##// END OF EJS Templates
AMISR correcciones, procesamientos
joabAM -
r1417:1ff56cfe5bef
parent child
Show More
@@ -1,657 +1,659
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """API to create signal chain projects
6 6
7 7 The API is provide through class: Project
8 8 """
9 9
10 10 import re
11 11 import sys
12 12 import ast
13 13 import datetime
14 14 import traceback
15 15 import time
16 16 import multiprocessing
17 17 from multiprocessing import Process, Queue
18 18 from threading import Thread
19 19 from xml.etree.ElementTree import ElementTree, Element, SubElement
20 20
21 21 from schainpy.admin import Alarm, SchainWarning
22 22 from schainpy.model import *
23 23 from schainpy.utils import log
24 24
25 25 if 'darwin' in sys.platform and sys.version_info[0] == 3 and sys.version_info[1] > 7:
26 26 multiprocessing.set_start_method('fork')
27 27
28 28 class ConfBase():
29 29
30 30 def __init__(self):
31 31
32 32 self.id = '0'
33 33 self.name = None
34 34 self.priority = None
35 35 self.parameters = {}
36 36 self.object = None
37 37 self.operations = []
38 38
39 39 def getId(self):
40 40
41 41 return self.id
42 42
43 43 def getNewId(self):
44 44
45 45 return int(self.id) * 10 + len(self.operations) + 1
46 46
47 47 def updateId(self, new_id):
48 48
49 49 self.id = str(new_id)
50 50
51 51 n = 1
52 52 for conf in self.operations:
53 53 conf_id = str(int(new_id) * 10 + n)
54 54 conf.updateId(conf_id)
55 55 n += 1
56 56
57 57 def getKwargs(self):
58 58
59 59 params = {}
60 60
61 61 for key, value in self.parameters.items():
62 62 if value not in (None, '', ' '):
63 63 params[key] = value
64 64
65 65 return params
66 66
67 67 def update(self, **kwargs):
68 68
69 69 for key, value in kwargs.items():
70 70 self.addParameter(name=key, value=value)
71 71
72 72 def addParameter(self, name, value, format=None):
73 73 '''
74 74 '''
75 75
76 76 if isinstance(value, str) and re.search(r'(\d+/\d+/\d+)', value):
77 77 self.parameters[name] = datetime.date(*[int(x) for x in value.split('/')])
78 78 elif isinstance(value, str) and re.search(r'(\d+:\d+:\d+)', value):
79 79 self.parameters[name] = datetime.time(*[int(x) for x in value.split(':')])
80 80 else:
81 81 try:
82 82 self.parameters[name] = ast.literal_eval(value)
83 83 except:
84 84 if isinstance(value, str) and ',' in value:
85 85 self.parameters[name] = value.split(',')
86 86 else:
87 87 self.parameters[name] = value
88 88
89 89 def getParameters(self):
90 90
91 91 params = {}
92 92 for key, value in self.parameters.items():
93 93 s = type(value).__name__
94 94 if s == 'date':
95 95 params[key] = value.strftime('%Y/%m/%d')
96 96 elif s == 'time':
97 97 params[key] = value.strftime('%H:%M:%S')
98 98 else:
99 99 params[key] = str(value)
100 100
101 101 return params
102 102
103 103 def makeXml(self, element):
104 104
105 105 xml = SubElement(element, self.ELEMENTNAME)
106 106 for label in self.xml_labels:
107 107 xml.set(label, str(getattr(self, label)))
108 108
109 109 for key, value in self.getParameters().items():
110 110 xml_param = SubElement(xml, 'Parameter')
111 111 xml_param.set('name', key)
112 112 xml_param.set('value', value)
113 113
114 114 for conf in self.operations:
115 115 conf.makeXml(xml)
116 116
117 117 def __str__(self):
118 118
119 119 if self.ELEMENTNAME == 'Operation':
120 120 s = ' {}[id={}]\n'.format(self.name, self.id)
121 121 else:
122 122 s = '{}[id={}, inputId={}]\n'.format(self.name, self.id, self.inputId)
123 123
124 124 for key, value in self.parameters.items():
125 125 if self.ELEMENTNAME == 'Operation':
126 126 s += ' {}: {}\n'.format(key, value)
127 127 else:
128 128 s += ' {}: {}\n'.format(key, value)
129 129
130 130 for conf in self.operations:
131 131 s += str(conf)
132 132
133 133 return s
134 134
135 135 class OperationConf(ConfBase):
136 136
137 137 ELEMENTNAME = 'Operation'
138 138 xml_labels = ['id', 'name']
139 139
140 140 def setup(self, id, name, priority, project_id, err_queue):
141 141
142 142 self.id = str(id)
143 143 self.project_id = project_id
144 144 self.name = name
145 145 self.type = 'other'
146 146 self.err_queue = err_queue
147 147
148 148 def readXml(self, element, project_id, err_queue):
149 149
150 150 self.id = element.get('id')
151 151 self.name = element.get('name')
152 152 self.type = 'other'
153 153 self.project_id = str(project_id)
154 154 self.err_queue = err_queue
155 155
156 156 for elm in element.iter('Parameter'):
157 157 self.addParameter(elm.get('name'), elm.get('value'))
158 158
159 159 def createObject(self):
160 160
161 161 className = eval(self.name)
162 162
163 163 if 'Plot' in self.name or 'Writer' in self.name or 'Send' in self.name or 'print' in self.name:
164 164 kwargs = self.getKwargs()
165 165 opObj = className(self.name, self.id, self.project_id, self.err_queue, **kwargs)
166 166 opObj.start()
167 167 self.type = 'external'
168 168 else:
169 169 opObj = className()
170 170
171 171 self.object = opObj
172 172 return opObj
173 173
174 174 class ProcUnitConf(ConfBase):
175 175
176 176 ELEMENTNAME = 'ProcUnit'
177 177 xml_labels = ['id', 'inputId', 'name']
178 178
179 179 def setup(self, project_id, id, name, datatype, inputId, err_queue):
180 180 '''
181 181 '''
182 182
183 183 if datatype == None and name == None:
184 184 raise ValueError('datatype or name should be defined')
185 185
186 186 if name == None:
187 187 if 'Proc' in datatype:
188 188 name = datatype
189 189 else:
190 190 name = '%sProc' % (datatype)
191 191
192 192 if datatype == None:
193 193 datatype = name.replace('Proc', '')
194 194
195 195 self.id = str(id)
196 196 self.project_id = project_id
197 197 self.name = name
198 198 self.datatype = datatype
199 199 self.inputId = inputId
200 200 self.err_queue = err_queue
201 201 self.operations = []
202 202 self.parameters = {}
203 203
204 204 def removeOperation(self, id):
205 205
206 206 i = [1 if x.id==id else 0 for x in self.operations]
207 207 self.operations.pop(i.index(1))
208 208
209 209 def getOperation(self, id):
210 210
211 211 for conf in self.operations:
212 212 if conf.id == id:
213 213 return conf
214 214
215 215 def addOperation(self, name, optype='self'):
216 216 '''
217 217 '''
218 218
219 219 id = self.getNewId()
220 220 conf = OperationConf()
221 221 conf.setup(id, name=name, priority='0', project_id=self.project_id, err_queue=self.err_queue)
222 222 self.operations.append(conf)
223 223
224 224 return conf
225 225
226 226 def readXml(self, element, project_id, err_queue):
227 227
228 228 self.id = element.get('id')
229 229 self.name = element.get('name')
230 230 self.inputId = None if element.get('inputId') == 'None' else element.get('inputId')
231 231 self.datatype = element.get('datatype', self.name.replace(self.ELEMENTNAME.replace('Unit', ''), ''))
232 232 self.project_id = str(project_id)
233 233 self.err_queue = err_queue
234 234 self.operations = []
235 235 self.parameters = {}
236 236
237 237 for elm in element:
238 238 if elm.tag == 'Parameter':
239 239 self.addParameter(elm.get('name'), elm.get('value'))
240 240 elif elm.tag == 'Operation':
241 241 conf = OperationConf()
242 242 conf.readXml(elm, project_id, err_queue)
243 243 self.operations.append(conf)
244 244
245 245 def createObjects(self):
246 246 '''
247 247 Instancia de unidades de procesamiento.
248 248 '''
249 249
250 250 className = eval(self.name)
251 251 kwargs = self.getKwargs()
252 252 procUnitObj = className()
253 253 procUnitObj.name = self.name
254 254 log.success('creating process... id: {}, inputId: {}'.format(self.id,self.inputId), self.name)
255 255
256 256 for conf in self.operations:
257 257
258 258 opObj = conf.createObject()
259 259
260 260 log.success('adding operation: {}, type:{}'.format(conf.name,conf.type), self.name)
261 261
262 262 procUnitObj.addOperation(conf, opObj)
263 263
264 264 self.object = procUnitObj
265 265
266 266 def run(self):
267 267 '''
268 268 '''
269 269
270 270 return self.object.call(**self.getKwargs())
271 271
272 272
273 273 class ReadUnitConf(ProcUnitConf):
274 274
275 275 ELEMENTNAME = 'ReadUnit'
276 276
277 277 def __init__(self):
278 278
279 279 self.id = None
280 280 self.datatype = None
281 281 self.name = None
282 282 self.inputId = None
283 283 self.operations = []
284 284 self.parameters = {}
285 285
286 286 def setup(self, project_id, id, name, datatype, err_queue, path='', startDate='', endDate='',
287 287 startTime='', endTime='', server=None, **kwargs):
288 288
289 289 if datatype == None and name == None:
290 290 raise ValueError('datatype or name should be defined')
291 291 if name == None:
292 292 if 'Reader' in datatype:
293 293 name = datatype
294 294 datatype = name.replace('Reader','')
295 295 else:
296 296 name = '{}Reader'.format(datatype)
297 297 if datatype == None:
298 298 if 'Reader' in name:
299 299 datatype = name.replace('Reader','')
300 300 else:
301 301 datatype = name
302 302 name = '{}Reader'.format(name)
303 303
304 304 self.id = id
305 305 self.project_id = project_id
306 306 self.name = name
307 307 self.datatype = datatype
308 308 self.err_queue = err_queue
309 309
310 310 self.addParameter(name='path', value=path)
311 311 self.addParameter(name='startDate', value=startDate)
312 312 self.addParameter(name='endDate', value=endDate)
313 313 self.addParameter(name='startTime', value=startTime)
314 314 self.addParameter(name='endTime', value=endTime)
315 315
316 316 for key, value in kwargs.items():
317 317 self.addParameter(name=key, value=value)
318 318
319 319
320 320 class Project(Process):
321 321 """API to create signal chain projects"""
322 322
323 323 ELEMENTNAME = 'Project'
324 324
325 325 def __init__(self, name=''):
326 326
327 327 Process.__init__(self)
328 328 self.id = '1'
329 329 if name:
330 330 self.name = '{} ({})'.format(Process.__name__, name)
331 331 self.filename = None
332 332 self.description = None
333 333 self.email = None
334 334 self.alarm = []
335 335 self.configurations = {}
336 336 # self.err_queue = Queue()
337 337 self.err_queue = None
338 338 self.started = False
339 339
340 340 def getNewId(self):
341 341
342 342 idList = list(self.configurations.keys())
343 343 id = int(self.id) * 10
344 344
345 345 while True:
346 346 id += 1
347 347
348 348 if str(id) in idList:
349 349 continue
350 350
351 351 break
352 352
353 353 return str(id)
354 354
355 355 def updateId(self, new_id):
356 356
357 357 self.id = str(new_id)
358 358
359 359 keyList = list(self.configurations.keys())
360 360 keyList.sort()
361 361
362 362 n = 1
363 363 new_confs = {}
364 364
365 365 for procKey in keyList:
366 366
367 367 conf = self.configurations[procKey]
368 368 idProcUnit = str(int(self.id) * 10 + n)
369 369 conf.updateId(idProcUnit)
370 370 new_confs[idProcUnit] = conf
371 371 n += 1
372 372
373 373 self.configurations = new_confs
374 374
375 375 def setup(self, id=1, name='', description='', email=None, alarm=[]):
376 376
377 377 self.id = str(id)
378 378 self.description = description
379 379 self.email = email
380 380 self.alarm = alarm
381 381 if name:
382 382 self.name = '{} ({})'.format(Process.__name__, name)
383 383
384 384 def update(self, **kwargs):
385 385
386 386 for key, value in kwargs.items():
387 387 setattr(self, key, value)
388 388
389 389 def clone(self):
390 390
391 391 p = Project()
392 392 p.id = self.id
393 393 p.name = self.name
394 394 p.description = self.description
395 395 p.configurations = self.configurations.copy()
396 396
397 397 return p
398 398
399 399 def addReadUnit(self, id=None, datatype=None, name=None, **kwargs):
400 400
401 401 '''
402 402 '''
403 403
404 404 if id is None:
405 405 idReadUnit = self.getNewId()
406 406 else:
407 407 idReadUnit = str(id)
408 408
409 409 conf = ReadUnitConf()
410 410 conf.setup(self.id, idReadUnit, name, datatype, self.err_queue, **kwargs)
411 411 self.configurations[conf.id] = conf
412 412
413 413 return conf
414 414
415 415 def addProcUnit(self, id=None, inputId='0', datatype=None, name=None):
416 416
417 417 '''
418 418 '''
419 419
420 420 if id is None:
421 421 idProcUnit = self.getNewId()
422 422 else:
423 423 idProcUnit = id
424 424
425 425 conf = ProcUnitConf()
426 426 conf.setup(self.id, idProcUnit, name, datatype, inputId, self.err_queue)
427 427 self.configurations[conf.id] = conf
428 428
429 429 return conf
430 430
431 431 def removeProcUnit(self, id):
432 432
433 433 if id in self.configurations:
434 434 self.configurations.pop(id)
435 435
436 436 def getReadUnit(self):
437 437
438 438 for obj in list(self.configurations.values()):
439 439 if obj.ELEMENTNAME == 'ReadUnit':
440 440 return obj
441 441
442 442 return None
443 443
444 444 def getProcUnit(self, id):
445 445
446 446 return self.configurations[id]
447 447
448 448 def getUnits(self):
449 449
450 450 keys = list(self.configurations)
451 451 keys.sort()
452 452
453 453 for key in keys:
454 454 yield self.configurations[key]
455 455
456 456 def updateUnit(self, id, **kwargs):
457 457
458 458 conf = self.configurations[id].update(**kwargs)
459 459
460 460 def makeXml(self):
461 461
462 462 xml = Element('Project')
463 463 xml.set('id', str(self.id))
464 464 xml.set('name', self.name)
465 465 xml.set('description', self.description)
466 466
467 467 for conf in self.configurations.values():
468 468 conf.makeXml(xml)
469 469
470 470 self.xml = xml
471 471
472 472 def writeXml(self, filename=None):
473 473
474 474 if filename == None:
475 475 if self.filename:
476 476 filename = self.filename
477 477 else:
478 478 filename = 'schain.xml'
479 479
480 480 if not filename:
481 481 print('filename has not been defined. Use setFilename(filename) for do it.')
482 482 return 0
483 483
484 484 abs_file = os.path.abspath(filename)
485 485
486 486 if not os.access(os.path.dirname(abs_file), os.W_OK):
487 487 print('No write permission on %s' % os.path.dirname(abs_file))
488 488 return 0
489 489
490 490 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
491 491 print('File %s already exists and it could not be overwriten' % abs_file)
492 492 return 0
493 493
494 494 self.makeXml()
495 495
496 496 ElementTree(self.xml).write(abs_file, method='xml')
497 497
498 498 self.filename = abs_file
499 499
500 500 return 1
501 501
502 502 def readXml(self, filename):
503 503
504 504 abs_file = os.path.abspath(filename)
505 505
506 506 self.configurations = {}
507 507
508 508 try:
509 509 self.xml = ElementTree().parse(abs_file)
510 510 except:
511 511 log.error('Error reading %s, verify file format' % filename)
512 512 return 0
513 513
514 514 self.id = self.xml.get('id')
515 515 self.name = self.xml.get('name')
516 516 self.description = self.xml.get('description')
517 517
518 518 for element in self.xml:
519 519 if element.tag == 'ReadUnit':
520 520 conf = ReadUnitConf()
521 521 conf.readXml(element, self.id, self.err_queue)
522 522 self.configurations[conf.id] = conf
523 523 elif element.tag == 'ProcUnit':
524 524 conf = ProcUnitConf()
525 525 input_proc = self.configurations[element.get('inputId')]
526 526 conf.readXml(element, self.id, self.err_queue)
527 527 self.configurations[conf.id] = conf
528 528
529 529 self.filename = abs_file
530 530
531 531 return 1
532 532
533 533 def __str__(self):
534 534
535 535 text = '\nProject[id=%s, name=%s, description=%s]\n\n' % (
536 536 self.id,
537 537 self.name,
538 538 self.description,
539 539 )
540 540
541 541 for conf in self.configurations.values():
542 542 text += '{}'.format(conf)
543 543
544 544 return text
545 545
546 546 def createObjects(self):
547 547
548 548 keys = list(self.configurations.keys())
549 549 keys.sort()
550 550 for key in keys:
551 551 conf = self.configurations[key]
552 552 conf.createObjects()
553 553 if conf.inputId is not None:
554 554 conf.object.setInput(self.configurations[conf.inputId].object)
555 555
556 556 def monitor(self):
557 557
558 558 t = Thread(target=self._monitor, args=(self.err_queue, self.ctx))
559 559 t.start()
560 560
561 561 def _monitor(self, queue, ctx):
562 562
563 563 import socket
564 564
565 565 procs = 0
566 566 err_msg = ''
567 567
568 568 while True:
569 569 msg = queue.get()
570 570 if '#_start_#' in msg:
571 571 procs += 1
572 572 elif '#_end_#' in msg:
573 573 procs -=1
574 574 else:
575 575 err_msg = msg
576 576
577 577 if procs == 0 or 'Traceback' in err_msg:
578 578 break
579 579 time.sleep(0.1)
580 580
581 581 if '|' in err_msg:
582 582 name, err = err_msg.split('|')
583 583 if 'SchainWarning' in err:
584 584 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), name)
585 585 elif 'SchainError' in err:
586 586 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), name)
587 587 else:
588 588 log.error(err, name)
589 589 else:
590 590 name, err = self.name, err_msg
591 591
592 592 time.sleep(1)
593 593
594 594 ctx.term()
595 595
596 596 message = ''.join(err)
597 597
598 598 if err_msg:
599 599 subject = 'SChain v%s: Error running %s\n' % (
600 600 schainpy.__version__, self.name)
601 601
602 602 subtitle = 'Hostname: %s\n' % socket.gethostbyname(
603 603 socket.gethostname())
604 604 subtitle += 'Working directory: %s\n' % os.path.abspath('./')
605 605 subtitle += 'Configuration file: %s\n' % self.filename
606 606 subtitle += 'Time: %s\n' % str(datetime.datetime.now())
607 607
608 608 readUnitConfObj = self.getReadUnit()
609 609 if readUnitConfObj:
610 610 subtitle += '\nInput parameters:\n'
611 611 subtitle += '[Data path = %s]\n' % readUnitConfObj.parameters['path']
612 612 subtitle += '[Start date = %s]\n' % readUnitConfObj.parameters['startDate']
613 613 subtitle += '[End date = %s]\n' % readUnitConfObj.parameters['endDate']
614 614 subtitle += '[Start time = %s]\n' % readUnitConfObj.parameters['startTime']
615 615 subtitle += '[End time = %s]\n' % readUnitConfObj.parameters['endTime']
616 616
617 617 a = Alarm(
618 618 modes=self.alarm,
619 619 email=self.email,
620 620 message=message,
621 621 subject=subject,
622 622 subtitle=subtitle,
623 623 filename=self.filename
624 624 )
625 625
626 626 a.start()
627 627
628 628 def setFilename(self, filename):
629 629
630 630 self.filename = filename
631 631
632 632 def runProcs(self):
633 633
634 634 err = False
635 635 n = len(self.configurations)
636 636
637 637 while not err:
638 #print("STAR")
638 639 for conf in self.getUnits():
640 #print("CONF: ",conf)
639 641 ok = conf.run()
640 642 if ok == 'Error':
641 643 n -= 1
642 644 continue
643 645 elif not ok:
644 646 break
645 647 if n == 0:
646 648 err = True
647 649
648 650 def run(self):
649 651
650 652 log.success('\nStarting Project {} [id={}]'.format(self.name, self.id), tag='')
651 653 self.started = True
652 654 self.start_time = time.time()
653 655 self.createObjects()
654 656 self.runProcs()
655 657 log.success('{} Done (Time: {:4.2f}s)'.format(
656 658 self.name,
657 659 time.time()-self.start_time), '')
@@ -1,209 +1,212
1 1 '''
2 2 Base clases to create Processing units and operations, the MPDecorator
3 3 must be used in plotting and writing operations to allow to run as an
4 4 external process.
5 5 '''
6 6 import os
7 7 import inspect
8 8 import zmq
9 9 import time
10 10 import pickle
11 11 import traceback
12 12 from threading import Thread
13 13 from multiprocessing import Process, Queue
14 14 from schainpy.utils import log
15 15 import copy
16 16 QUEUE_SIZE = int(os.environ.get('QUEUE_MAX_SIZE', '10'))
17 17 class ProcessingUnit(object):
18 18 '''
19 19 Base class to create Signal Chain Units
20 20 '''
21 21
22 22 proc_type = 'processing'
23 23
24 24 def __init__(self):
25 25
26 26 self.dataIn = None
27 27 self.dataOut = None
28 28 self.isConfig = False
29 29 self.operations = []
30 30
31 31 def setInput(self, unit):
32 32
33 33 self.dataIn = unit.dataOut
34 34
35 35
36 36 def getAllowedArgs(self):
37 37 if hasattr(self, '__attrs__'):
38 38 return self.__attrs__
39 39 else:
40 40 return inspect.getargspec(self.run).args
41 41
42 42 def addOperation(self, conf, operation):
43 43 '''
44 44 '''
45 45
46 46 self.operations.append((operation, conf.type, conf.getKwargs()))
47 47
48 48 def getOperationObj(self, objId):
49 49
50 50 if objId not in list(self.operations.keys()):
51 51 return None
52 52
53 53 return self.operations[objId]
54 54
55 55 def call(self, **kwargs):
56 56 '''
57 57 '''
58 58
59 59 try:
60 if self.dataIn is not None and self.dataIn.flagNoData and not self.dataIn.error:
61 return self.dataIn.isReady()
62 elif self.dataIn is None or not self.dataIn.error:
60 # if self.dataIn is not None and self.dataIn.flagNoData and not self.dataIn.error:
61 # return self.dataIn.isReady()
62 #dataIn=None es unidades de Lectura, segunda parte unidades de procesamiento
63 if self.dataIn is None or (not self.dataIn.error and not self.dataIn.flagNoData):
63 64 self.run(**kwargs)
64 65 elif self.dataIn.error:
65 66 self.dataOut.error = self.dataIn.error
66 67 self.dataOut.flagNoData = True
68
67 69 except:
68 70
69 71 err = traceback.format_exc()
70 72 if 'SchainWarning' in err:
71 73 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), self.name)
72 74 elif 'SchainError' in err:
73 75 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), self.name)
74 76 else:
75 77 log.error(err, self.name)
76 78 self.dataOut.error = True
77 79
78 80 for op, optype, opkwargs in self.operations:
79 if optype == 'other' and not self.dataOut.flagNoData:
81 if optype == 'other' and self.dataOut.isReady():
80 82 try:
81 83 self.dataOut = op.run(self.dataOut, **opkwargs)
82 84 except Exception as e:
85 print(e)
83 86 self.dataOut.error = True
84 87 return 'Error'
85 88 elif optype == 'external' and self.dataOut.isReady() :
86 89 op.queue.put(copy.deepcopy(self.dataOut))
87 90 elif optype == 'external' and self.dataOut.error:
88 op.queue.put(self.dataOut)
91 op.queue.put(copy.deepcopy(self.dataOut))
89 92
90 return 'Error' if self.dataOut.error else self.dataOut.isReady()
93 return 'Error' if self.dataOut.error else True#self.dataOut.isReady()
91 94
92 95 def setup(self):
93 96
94 97 raise NotImplementedError
95 98
96 99 def run(self):
97 100
98 101 raise NotImplementedError
99 102
100 103 def close(self):
101 104
102 105 return
103 106
104 107
105 108 class Operation(object):
106 109
107 110 '''
108 111 '''
109 112
110 113 proc_type = 'operation'
111 114
112 115 def __init__(self):
113 116
114 117 self.id = None
115 118 self.isConfig = False
116 119
117 120 if not hasattr(self, 'name'):
118 121 self.name = self.__class__.__name__
119 122
120 123 def getAllowedArgs(self):
121 124 if hasattr(self, '__attrs__'):
122 125 return self.__attrs__
123 126 else:
124 127 return inspect.getargspec(self.run).args
125 128
126 129 def setup(self):
127 130
128 131 self.isConfig = True
129 132
130 133 raise NotImplementedError
131 134
132 135 def run(self, dataIn, **kwargs):
133 136 """
134 137 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
135 138 atributos del objeto dataIn.
136 139
137 140 Input:
138 141
139 142 dataIn : objeto del tipo JROData
140 143
141 144 Return:
142 145
143 146 None
144 147
145 148 Affected:
146 149 __buffer : buffer de recepcion de datos.
147 150
148 151 """
149 152 if not self.isConfig:
150 153 self.setup(**kwargs)
151 154
152 155 raise NotImplementedError
153 156
154 157 def close(self):
155 158
156 159 return
157 160
158 161
159 162 def MPDecorator(BaseClass):
160 163 """
161 164 Multiprocessing class decorator
162 165
163 166 This function add multiprocessing features to a BaseClass.
164 167 """
165 168
166 169 class MPClass(BaseClass, Process):
167 170
168 171 def __init__(self, *args, **kwargs):
169 172 super(MPClass, self).__init__()
170 173 Process.__init__(self)
171 174
172 175 self.args = args
173 176 self.kwargs = kwargs
174 177 self.t = time.time()
175 178 self.op_type = 'external'
176 179 self.name = BaseClass.__name__
177 180 self.__doc__ = BaseClass.__doc__
178 181
179 182 if 'plot' in self.name.lower() and not self.name.endswith('_'):
180 183 self.name = '{}{}'.format(self.CODE.upper(), 'Plot')
181 184
182 185 self.start_time = time.time()
183 186 self.err_queue = args[3]
184 187 self.queue = Queue(maxsize=QUEUE_SIZE)
185 188 self.myrun = BaseClass.run
186 189
187 190 def run(self):
188 191
189 192 while True:
190 193
191 194 dataOut = self.queue.get()
192 195
193 196 if not dataOut.error:
194 197 try:
195 198 BaseClass.run(self, dataOut, **self.kwargs)
196 199 except:
197 200 err = traceback.format_exc()
198 201 log.error(err, self.name)
199 202 else:
200 203 break
201 204
202 205 self.close()
203 206
204 207 def close(self):
205 208
206 209 BaseClass.close(self)
207 210 log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
208 211
209 212 return MPClass
@@ -1,1689 +1,1689
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15 import math
16 16
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
18 18 from schainpy.model.data.jrodata import Spectra
19 19 from schainpy.model.data.jrodata import hildebrand_sekhon
20 20 from schainpy.utils import log
21 21
22 22 from scipy.optimize import curve_fit
23 23
24 24 class SpectraProc(ProcessingUnit):
25 25
26 26 def __init__(self):
27 27
28 28 ProcessingUnit.__init__(self)
29 29
30 30 self.buffer = None
31 31 self.firstdatatime = None
32 32 self.profIndex = 0
33 33 self.dataOut = Spectra()
34 34 self.id_min = None
35 35 self.id_max = None
36 36 self.setupReq = False #Agregar a todas las unidades de proc
37 37
38 38 def __updateSpecFromVoltage(self):
39 39
40 40 self.dataOut.timeZone = self.dataIn.timeZone
41 41 self.dataOut.dstFlag = self.dataIn.dstFlag
42 42 self.dataOut.errorCount = self.dataIn.errorCount
43 43 self.dataOut.useLocalTime = self.dataIn.useLocalTime
44 44 try:
45 45 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
46 46 except:
47 47 pass
48 48 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
49 49 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
50 50 self.dataOut.channelList = self.dataIn.channelList
51 51 self.dataOut.heightList = self.dataIn.heightList
52 52 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
53 53 self.dataOut.nProfiles = self.dataOut.nFFTPoints
54 54 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
55 55 self.dataOut.utctime = self.firstdatatime
56 56 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
57 57 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
58 58 self.dataOut.flagShiftFFT = False
59 59 self.dataOut.nCohInt = self.dataIn.nCohInt
60 60 self.dataOut.nIncohInt = 1
61 61 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
62 62 self.dataOut.frequency = self.dataIn.frequency
63 63 self.dataOut.realtime = self.dataIn.realtime
64 64 self.dataOut.azimuth = self.dataIn.azimuth
65 65 self.dataOut.zenith = self.dataIn.zenith
66 66 self.dataOut.codeList = self.dataIn.codeList
67 67 self.dataOut.azimuthList = self.dataIn.azimuthList
68 68 self.dataOut.elevationList = self.dataIn.elevationList
69 69
70 70
71 71
72 72 def __getFft(self):
73 73 """
74 74 Convierte valores de Voltaje a Spectra
75 75
76 76 Affected:
77 77 self.dataOut.data_spc
78 78 self.dataOut.data_cspc
79 79 self.dataOut.data_dc
80 80 self.dataOut.heightList
81 81 self.profIndex
82 82 self.buffer
83 83 self.dataOut.flagNoData
84 84 """
85 85 fft_volt = numpy.fft.fft(
86 86 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
87 87 fft_volt = fft_volt.astype(numpy.dtype('complex'))
88 88 dc = fft_volt[:, 0, :]
89 89
90 90 # calculo de self-spectra
91 91 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
92 92 spc = fft_volt * numpy.conjugate(fft_volt)
93 93 spc = spc.real
94 94
95 95 blocksize = 0
96 96 blocksize += dc.size
97 97 blocksize += spc.size
98 98
99 99 cspc = None
100 100 pairIndex = 0
101 101 if self.dataOut.pairsList != None:
102 102 # calculo de cross-spectra
103 103 cspc = numpy.zeros(
104 104 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
105 105 for pair in self.dataOut.pairsList:
106 106 if pair[0] not in self.dataOut.channelList:
107 107 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
108 108 str(pair), str(self.dataOut.channelList)))
109 109 if pair[1] not in self.dataOut.channelList:
110 110 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
111 111 str(pair), str(self.dataOut.channelList)))
112 112
113 113 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
114 114 numpy.conjugate(fft_volt[pair[1], :, :])
115 115 pairIndex += 1
116 116 blocksize += cspc.size
117 117
118 118 self.dataOut.data_spc = spc
119 119 self.dataOut.data_cspc = cspc
120 120 self.dataOut.data_dc = dc
121 121 self.dataOut.blockSize = blocksize
122 122 self.dataOut.flagShiftFFT = False
123 123
124 124 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
125 125
126 126 if self.dataIn.type == "Spectra":
127 127
128 128 try:
129 129 self.dataOut.copy(self.dataIn)
130 130
131 131 except Exception as e:
132 132 print(e)
133 133
134 134 if shift_fft:
135 135 #desplaza a la derecha en el eje 2 determinadas posiciones
136 136 shift = int(self.dataOut.nFFTPoints/2)
137 137 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
138 138
139 139 if self.dataOut.data_cspc is not None:
140 140 #desplaza a la derecha en el eje 2 determinadas posiciones
141 141 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
142 142 if pairsList:
143 143 self.__selectPairs(pairsList)
144 144
145 145
146 146 elif self.dataIn.type == "Voltage":
147 147
148 148 self.dataOut.flagNoData = True
149 149
150 150 if nFFTPoints == None:
151 151 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
152 152
153 153 if nProfiles == None:
154 154 nProfiles = nFFTPoints
155 155
156 156 if ippFactor == None:
157 157 self.dataOut.ippFactor = 1
158 158
159 159 self.dataOut.nFFTPoints = nFFTPoints
160 160
161 161 if self.buffer is None:
162 162 self.buffer = numpy.zeros((self.dataIn.nChannels,
163 163 nProfiles,
164 164 self.dataIn.nHeights),
165 165 dtype='complex')
166 166
167 167 if self.dataIn.flagDataAsBlock:
168 168 nVoltProfiles = self.dataIn.data.shape[1]
169 169
170 170 if nVoltProfiles == nProfiles:
171 171 self.buffer = self.dataIn.data.copy()
172 172 self.profIndex = nVoltProfiles
173 173
174 174 elif nVoltProfiles < nProfiles:
175 175
176 176 if self.profIndex == 0:
177 177 self.id_min = 0
178 178 self.id_max = nVoltProfiles
179 179
180 180 self.buffer[:, self.id_min:self.id_max,
181 181 :] = self.dataIn.data
182 182 self.profIndex += nVoltProfiles
183 183 self.id_min += nVoltProfiles
184 184 self.id_max += nVoltProfiles
185 185 else:
186 186 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
187 187 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
188 188 self.dataOut.flagNoData = True
189 189 else:
190 190 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
191 191 self.profIndex += 1
192 192
193 193 if self.firstdatatime == None:
194 194 self.firstdatatime = self.dataIn.utctime
195 195
196 196 if self.profIndex == nProfiles:
197 197 self.__updateSpecFromVoltage()
198 198 if pairsList == None:
199 199 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
200 200 else:
201 201 self.dataOut.pairsList = pairsList
202 202 self.__getFft()
203 203 self.dataOut.flagNoData = False
204 204 self.firstdatatime = None
205 205 self.profIndex = 0
206 206 else:
207 207 raise ValueError("The type of input object '%s' is not valid".format(
208 208 self.dataIn.type))
209 209
210 210 def __selectPairs(self, pairsList):
211 211
212 212 if not pairsList:
213 213 return
214 214
215 215 pairs = []
216 216 pairsIndex = []
217 217
218 218 for pair in pairsList:
219 219 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
220 220 continue
221 221 pairs.append(pair)
222 222 pairsIndex.append(pairs.index(pair))
223 223
224 224 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
225 225 self.dataOut.pairsList = pairs
226 226
227 227 return
228 228
229 229 def selectFFTs(self, minFFT, maxFFT ):
230 230 """
231 231 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
232 232 minFFT<= FFT <= maxFFT
233 233 """
234 234
235 235 if (minFFT > maxFFT):
236 236 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
237 237
238 238 if (minFFT < self.dataOut.getFreqRange()[0]):
239 239 minFFT = self.dataOut.getFreqRange()[0]
240 240
241 241 if (maxFFT > self.dataOut.getFreqRange()[-1]):
242 242 maxFFT = self.dataOut.getFreqRange()[-1]
243 243
244 244 minIndex = 0
245 245 maxIndex = 0
246 246 FFTs = self.dataOut.getFreqRange()
247 247
248 248 inda = numpy.where(FFTs >= minFFT)
249 249 indb = numpy.where(FFTs <= maxFFT)
250 250
251 251 try:
252 252 minIndex = inda[0][0]
253 253 except:
254 254 minIndex = 0
255 255
256 256 try:
257 257 maxIndex = indb[0][-1]
258 258 except:
259 259 maxIndex = len(FFTs)
260 260
261 261 self.selectFFTsByIndex(minIndex, maxIndex)
262 262
263 263 return 1
264 264
265 265 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
266 266 newheis = numpy.where(
267 267 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
268 268
269 269 if hei_ref != None:
270 270 newheis = numpy.where(self.dataOut.heightList > hei_ref)
271 271
272 272 minIndex = min(newheis[0])
273 273 maxIndex = max(newheis[0])
274 274 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
275 275 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
276 276
277 277 # determina indices
278 278 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
279 279 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
280 280 avg_dB = 10 * \
281 281 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
282 282 beacon_dB = numpy.sort(avg_dB)[-nheis:]
283 283 beacon_heiIndexList = []
284 284 for val in avg_dB.tolist():
285 285 if val >= beacon_dB[0]:
286 286 beacon_heiIndexList.append(avg_dB.tolist().index(val))
287 287
288 288 #data_spc = data_spc[:,:,beacon_heiIndexList]
289 289 data_cspc = None
290 290 if self.dataOut.data_cspc is not None:
291 291 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
292 292 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
293 293
294 294 data_dc = None
295 295 if self.dataOut.data_dc is not None:
296 296 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
297 297 #data_dc = data_dc[:,beacon_heiIndexList]
298 298
299 299 self.dataOut.data_spc = data_spc
300 300 self.dataOut.data_cspc = data_cspc
301 301 self.dataOut.data_dc = data_dc
302 302 self.dataOut.heightList = heightList
303 303 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
304 304
305 305 return 1
306 306
307 307 def selectFFTsByIndex(self, minIndex, maxIndex):
308 308 """
309 309
310 310 """
311 311
312 312 if (minIndex < 0) or (minIndex > maxIndex):
313 313 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
314 314
315 315 if (maxIndex >= self.dataOut.nProfiles):
316 316 maxIndex = self.dataOut.nProfiles-1
317 317
318 318 #Spectra
319 319 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
320 320
321 321 data_cspc = None
322 322 if self.dataOut.data_cspc is not None:
323 323 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
324 324
325 325 data_dc = None
326 326 if self.dataOut.data_dc is not None:
327 327 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
328 328
329 329 self.dataOut.data_spc = data_spc
330 330 self.dataOut.data_cspc = data_cspc
331 331 self.dataOut.data_dc = data_dc
332 332
333 333 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
334 334 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
335 335 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
336 336
337 337 return 1
338 338
339 339 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
340 340 # validacion de rango
341 341 if minHei == None:
342 342 minHei = self.dataOut.heightList[0]
343 343
344 344 if maxHei == None:
345 345 maxHei = self.dataOut.heightList[-1]
346 346
347 347 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
348 348 print('minHei: %.2f is out of the heights range' % (minHei))
349 349 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
350 350 minHei = self.dataOut.heightList[0]
351 351
352 352 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
353 353 print('maxHei: %.2f is out of the heights range' % (maxHei))
354 354 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
355 355 maxHei = self.dataOut.heightList[-1]
356 356
357 357 # validacion de velocidades
358 358 velrange = self.dataOut.getVelRange(1)
359 359
360 360 if minVel == None:
361 361 minVel = velrange[0]
362 362
363 363 if maxVel == None:
364 364 maxVel = velrange[-1]
365 365
366 366 if (minVel < velrange[0]) or (minVel > maxVel):
367 367 print('minVel: %.2f is out of the velocity range' % (minVel))
368 368 print('minVel is setting to %.2f' % (velrange[0]))
369 369 minVel = velrange[0]
370 370
371 371 if (maxVel > velrange[-1]) or (maxVel < minVel):
372 372 print('maxVel: %.2f is out of the velocity range' % (maxVel))
373 373 print('maxVel is setting to %.2f' % (velrange[-1]))
374 374 maxVel = velrange[-1]
375 375
376 376 # seleccion de indices para rango
377 377 minIndex = 0
378 378 maxIndex = 0
379 379 heights = self.dataOut.heightList
380 380
381 381 inda = numpy.where(heights >= minHei)
382 382 indb = numpy.where(heights <= maxHei)
383 383
384 384 try:
385 385 minIndex = inda[0][0]
386 386 except:
387 387 minIndex = 0
388 388
389 389 try:
390 390 maxIndex = indb[0][-1]
391 391 except:
392 392 maxIndex = len(heights)
393 393
394 394 if (minIndex < 0) or (minIndex > maxIndex):
395 395 raise ValueError("some value in (%d,%d) is not valid" % (
396 396 minIndex, maxIndex))
397 397
398 398 if (maxIndex >= self.dataOut.nHeights):
399 399 maxIndex = self.dataOut.nHeights - 1
400 400
401 401 # seleccion de indices para velocidades
402 402 indminvel = numpy.where(velrange >= minVel)
403 403 indmaxvel = numpy.where(velrange <= maxVel)
404 404 try:
405 405 minIndexVel = indminvel[0][0]
406 406 except:
407 407 minIndexVel = 0
408 408
409 409 try:
410 410 maxIndexVel = indmaxvel[0][-1]
411 411 except:
412 412 maxIndexVel = len(velrange)
413 413
414 414 # seleccion del espectro
415 415 data_spc = self.dataOut.data_spc[:,
416 416 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
417 417 # estimacion de ruido
418 418 noise = numpy.zeros(self.dataOut.nChannels)
419 419
420 420 for channel in range(self.dataOut.nChannels):
421 421 daux = data_spc[channel, :, :]
422 422 sortdata = numpy.sort(daux, axis=None)
423 423 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
424 424
425 425 self.dataOut.noise_estimation = noise.copy()
426 426
427 427 return 1
428 428
429 429 class removeDC(Operation):
430 430
431 431 def run(self, dataOut, mode=2):
432 432 self.dataOut = dataOut
433 433 jspectra = self.dataOut.data_spc
434 434 jcspectra = self.dataOut.data_cspc
435 435
436 436 num_chan = jspectra.shape[0]
437 437 num_hei = jspectra.shape[2]
438 438
439 439 if jcspectra is not None:
440 440 jcspectraExist = True
441 441 num_pairs = jcspectra.shape[0]
442 442 else:
443 443 jcspectraExist = False
444 444
445 445 freq_dc = int(jspectra.shape[1] / 2)
446 446 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
447 447 ind_vel = ind_vel.astype(int)
448 448
449 449 if ind_vel[0] < 0:
450 450 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
451 451
452 452 if mode == 1:
453 453 jspectra[:, freq_dc, :] = (
454 454 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
455 455
456 456 if jcspectraExist:
457 457 jcspectra[:, freq_dc, :] = (
458 458 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
459 459
460 460 if mode == 2:
461 461
462 462 vel = numpy.array([-2, -1, 1, 2])
463 463 xx = numpy.zeros([4, 4])
464 464
465 465 for fil in range(4):
466 466 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
467 467
468 468 xx_inv = numpy.linalg.inv(xx)
469 469 xx_aux = xx_inv[0, :]
470 470
471 471 for ich in range(num_chan):
472 472 yy = jspectra[ich, ind_vel, :]
473 473 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
474 474
475 475 junkid = jspectra[ich, freq_dc, :] <= 0
476 476 cjunkid = sum(junkid)
477 477
478 478 if cjunkid.any():
479 479 jspectra[ich, freq_dc, junkid.nonzero()] = (
480 480 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
481 481
482 482 if jcspectraExist:
483 483 for ip in range(num_pairs):
484 484 yy = jcspectra[ip, ind_vel, :]
485 485 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
486 486
487 487 self.dataOut.data_spc = jspectra
488 488 self.dataOut.data_cspc = jcspectra
489 489
490 490 return self.dataOut
491 491
492 492 # import matplotlib.pyplot as plt
493 493
494 494 def fit_func( x, a0, a1, a2): #, a3, a4, a5):
495 495 z = (x - a1) / a2
496 496 y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
497 497 return y
498 498
499 499
500 500 class CleanRayleigh(Operation):
501 501
502 502 def __init__(self):
503 503
504 504 Operation.__init__(self)
505 505 self.i=0
506 506 self.isConfig = False
507 507 self.__dataReady = False
508 508 self.__profIndex = 0
509 509 self.byTime = False
510 510 self.byProfiles = False
511 511
512 512 self.bloques = None
513 513 self.bloque0 = None
514 514
515 515 self.index = 0
516 516
517 517 self.buffer = 0
518 518 self.buffer2 = 0
519 519 self.buffer3 = 0
520 520
521 521
522 522 def setup(self,dataOut,min_hei,max_hei,n, timeInterval,factor_stdv):
523 523
524 524 self.nChannels = dataOut.nChannels
525 525 self.nProf = dataOut.nProfiles
526 526 self.nPairs = dataOut.data_cspc.shape[0]
527 527 self.pairsArray = numpy.array(dataOut.pairsList)
528 528 self.spectra = dataOut.data_spc
529 529 self.cspectra = dataOut.data_cspc
530 530 self.heights = dataOut.heightList #alturas totales
531 531 self.nHeights = len(self.heights)
532 532 self.min_hei = min_hei
533 533 self.max_hei = max_hei
534 534 if (self.min_hei == None):
535 535 self.min_hei = 0
536 536 if (self.max_hei == None):
537 537 self.max_hei = dataOut.heightList[-1]
538 538 self.hval = ((self.max_hei>=self.heights) & (self.heights >= self.min_hei)).nonzero()
539 539 self.heightsClean = self.heights[self.hval] #alturas filtradas
540 540 self.hval = self.hval[0] # forma (N,), an solo N elementos -> Indices de alturas
541 541 self.nHeightsClean = len(self.heightsClean)
542 542 self.channels = dataOut.channelList
543 543 self.nChan = len(self.channels)
544 544 self.nIncohInt = dataOut.nIncohInt
545 545 self.__initime = dataOut.utctime
546 546 self.maxAltInd = self.hval[-1]+1
547 547 self.minAltInd = self.hval[0]
548 548
549 549 self.crosspairs = dataOut.pairsList
550 550 self.nPairs = len(self.crosspairs)
551 551 self.normFactor = dataOut.normFactor
552 552 self.nFFTPoints = dataOut.nFFTPoints
553 553 self.ippSeconds = dataOut.ippSeconds
554 554 self.currentTime = self.__initime
555 555 self.pairsArray = numpy.array(dataOut.pairsList)
556 556 self.factor_stdv = factor_stdv
557 557
558 558 if n != None :
559 559 self.byProfiles = True
560 560 self.nIntProfiles = n
561 561 else:
562 562 self.__integrationtime = timeInterval
563 563
564 564 self.__dataReady = False
565 565 self.isConfig = True
566 566
567 567
568 568
569 569 def run(self, dataOut,min_hei=None,max_hei=None, n=None, timeInterval=10,factor_stdv=2.5):
570 570
571 571 if not self.isConfig :
572 572
573 573 self.setup(dataOut, min_hei,max_hei,n,timeInterval,factor_stdv)
574 574
575 575 tini=dataOut.utctime
576 576
577 577 if self.byProfiles:
578 578 if self.__profIndex == self.nIntProfiles:
579 579 self.__dataReady = True
580 580 else:
581 581 if (tini - self.__initime) >= self.__integrationtime:
582 582
583 583 self.__dataReady = True
584 584 self.__initime = tini
585 585
586 586 #if (tini.tm_min % 2) == 0 and (tini.tm_sec < 5 and self.fint==0):
587 587
588 588 if self.__dataReady:
589 589
590 590 self.__profIndex = 0
591 591 jspc = self.buffer
592 592 jcspc = self.buffer2
593 593 #jnoise = self.buffer3
594 594 self.buffer = dataOut.data_spc
595 595 self.buffer2 = dataOut.data_cspc
596 596 #self.buffer3 = dataOut.noise
597 597 self.currentTime = dataOut.utctime
598 598 if numpy.any(jspc) :
599 599 #print( jspc.shape, jcspc.shape)
600 600 jspc = numpy.reshape(jspc,(int(len(jspc)/self.nChannels),self.nChannels,self.nFFTPoints,self.nHeights))
601 601 jcspc= numpy.reshape(jcspc,(int(len(jcspc)/self.nPairs),self.nPairs,self.nFFTPoints,self.nHeights))
602 602 self.__dataReady = False
603 603 #print( jspc.shape, jcspc.shape)
604 604 dataOut.flagNoData = False
605 605 else:
606 606 dataOut.flagNoData = True
607 607 self.__dataReady = False
608 608 return dataOut
609 609 else:
610 610 #print( len(self.buffer))
611 611 if numpy.any(self.buffer):
612 612 self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
613 613 self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
614 614 self.buffer3 += dataOut.data_dc
615 615 else:
616 616 self.buffer = dataOut.data_spc
617 617 self.buffer2 = dataOut.data_cspc
618 618 self.buffer3 = dataOut.data_dc
619 619 #print self.index, self.fint
620 620 #print self.buffer2.shape
621 621 dataOut.flagNoData = True ## NOTE: ?? revisar LUEGO
622 622 self.__profIndex += 1
623 623 return dataOut ## NOTE: REV
624 624
625 625
626 626 #index = tini.tm_hour*12+tini.tm_min/5
627 627 '''REVISAR'''
628 628 # jspc = jspc/self.nFFTPoints/self.normFactor
629 629 # jcspc = jcspc/self.nFFTPoints/self.normFactor
630 630
631 631
632 632
633 633 tmp_spectra,tmp_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
634 634 dataOut.data_spc = tmp_spectra
635 635 dataOut.data_cspc = tmp_cspectra
636 636
637 637 #dataOut.data_spc,dataOut.data_cspc = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
638 638
639 639 dataOut.data_dc = self.buffer3
640 640 dataOut.nIncohInt *= self.nIntProfiles
641 641 dataOut.utctime = self.currentTime #tiempo promediado
642 642 #print("Time: ",time.localtime(dataOut.utctime))
643 643 # dataOut.data_spc = sat_spectra
644 644 # dataOut.data_cspc = sat_cspectra
645 645 self.buffer = 0
646 646 self.buffer2 = 0
647 647 self.buffer3 = 0
648 648
649 649 return dataOut
650 650
651 651 def cleanRayleigh(self,dataOut,spectra,cspectra,factor_stdv):
652 652 #print("OP cleanRayleigh")
653 653 #import matplotlib.pyplot as plt
654 654 #for k in range(149):
655 655 #channelsProcssd = []
656 656 #channelA_ok = False
657 657 #rfunc = cspectra.copy() #self.bloques
658 658 rfunc = spectra.copy()
659 659 #rfunc = cspectra
660 660 #val_spc = spectra*0.0 #self.bloque0*0.0
661 661 #val_cspc = cspectra*0.0 #self.bloques*0.0
662 662 #in_sat_spectra = spectra.copy() #self.bloque0
663 663 #in_sat_cspectra = cspectra.copy() #self.bloques
664 664
665 665
666 666 ###ONLY FOR TEST:
667 667 raxs = math.ceil(math.sqrt(self.nPairs))
668 668 caxs = math.ceil(self.nPairs/raxs)
669 669 if self.nPairs <4:
670 670 raxs = 2
671 671 caxs = 2
672 672 #print(raxs, caxs)
673 673 fft_rev = 14 #nFFT to plot
674 674 hei_rev = ((self.heights >= 550) & (self.heights <= 551)).nonzero() #hei to plot
675 675 hei_rev = hei_rev[0]
676 676 #print(hei_rev)
677 677
678 678 #print numpy.absolute(rfunc[:,0,0,14])
679 679
680 680 gauss_fit, covariance = None, None
681 681 for ih in range(self.minAltInd,self.maxAltInd):
682 682 for ifreq in range(self.nFFTPoints):
683 683 '''
684 684 ###ONLY FOR TEST:
685 685 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
686 686 fig, axs = plt.subplots(raxs, caxs)
687 687 fig2, axs2 = plt.subplots(raxs, caxs)
688 688 col_ax = 0
689 689 row_ax = 0
690 690 '''
691 691 #print(self.nPairs)
692 692 for ii in range(self.nChan): #PARES DE CANALES SELF y CROSS
693 693 # if self.crosspairs[ii][1]-self.crosspairs[ii][0] > 1: # APLICAR SOLO EN PARES CONTIGUOS
694 694 # continue
695 695 # if not self.crosspairs[ii][0] in channelsProcssd:
696 696 # channelA_ok = True
697 697 #print("pair: ",self.crosspairs[ii])
698 698 '''
699 699 ###ONLY FOR TEST:
700 700 if (col_ax%caxs==0 and col_ax!=0 and self.nPairs !=1):
701 701 col_ax = 0
702 702 row_ax += 1
703 703 '''
704 704 func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih])) #Potencia?
705 705 #print(func2clean.shape)
706 706 val = (numpy.isfinite(func2clean)==True).nonzero()
707 707
708 708 if len(val)>0: #limitador
709 709 min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
710 710 if min_val <= -40 :
711 711 min_val = -40
712 712 max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
713 713 if max_val >= 200 :
714 714 max_val = 200
715 715 #print min_val, max_val
716 716 step = 1
717 717 #print("Getting bins and the histogram")
718 718 x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
719 719 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
720 720 #print(len(y_dist),len(binstep[:-1]))
721 721 #print(row_ax,col_ax, " ..")
722 722 #print(self.pairsArray[ii][0],self.pairsArray[ii][1])
723 723 mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
724 724 sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
725 725 parg = [numpy.amax(y_dist),mean,sigma]
726 726
727 727 newY = None
728 728
729 729 try :
730 730 gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
731 731 mode = gauss_fit[1]
732 732 stdv = gauss_fit[2]
733 733 #print(" FIT OK",gauss_fit)
734 734 '''
735 735 ###ONLY FOR TEST:
736 736 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
737 737 newY = fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
738 738 axs[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
739 739 axs[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
740 740 axs[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
741 741 '''
742 742 except:
743 743 mode = mean
744 744 stdv = sigma
745 745 #print("FIT FAIL")
746 746 #continue
747 747
748 748
749 749 #print(mode,stdv)
750 750 #Removing echoes greater than mode + std_factor*stdv
751 751 noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
752 752 #noval tiene los indices que se van a remover
753 753 #print("Chan ",ii," novals: ",len(noval[0]))
754 754 if len(noval[0]) > 0: #forma de array (N,) es igual a longitud (N)
755 755 novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
756 756 #print(novall)
757 757 #print(" ",self.pairsArray[ii])
758 758 #cross_pairs = self.pairsArray[ii]
759 759 #Getting coherent echoes which are removed.
760 760 # if len(novall[0]) > 0:
761 761 #
762 762 # val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
763 763 # val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
764 764 # val_cspc[novall[0],ii,ifreq,ih] = 1
765 765 #print("OUT NOVALL 1")
766 766 try:
767 767 pair = (self.channels[ii],self.channels[ii + 1])
768 768 except:
769 769 pair = (99,99)
770 770 #print("par ", pair)
771 771 if ( pair in self.crosspairs):
772 772 q = self.crosspairs.index(pair)
773 773 #print("estΓ‘ aqui: ", q, (ii,ii + 1))
774 774 new_a = numpy.delete(cspectra[:,q,ifreq,ih], noval[0])
775 775 cspectra[noval,q,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
776 776
777 777 #if channelA_ok:
778 778 #chA = self.channels.index(cross_pairs[0])
779 779 new_b = numpy.delete(spectra[:,ii,ifreq,ih], noval[0])
780 780 spectra[noval,ii,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
781 781 #channelA_ok = False
782 782
783 783 # chB = self.channels.index(cross_pairs[1])
784 784 # new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
785 785 # spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
786 786 #
787 787 # channelsProcssd.append(self.crosspairs[ii][0]) # save channel A
788 788 # channelsProcssd.append(self.crosspairs[ii][1]) # save channel B
789 789 '''
790 790 ###ONLY FOR TEST:
791 791 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
792 792 func2clean = 10*numpy.log10(numpy.absolute(spectra[:,ii,ifreq,ih]))
793 793 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
794 794 axs2[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
795 795 axs2[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
796 796 axs2[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
797 797 '''
798 798 '''
799 799 ###ONLY FOR TEST:
800 800 col_ax += 1 #contador de ploteo columnas
801 801 ##print(col_ax)
802 802 ###ONLY FOR TEST:
803 803 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
804 804 title = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km"
805 805 title2 = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km CLEANED"
806 806 fig.suptitle(title)
807 807 fig2.suptitle(title2)
808 808 plt.show()
809 809 '''
810 810 ##################################################################################################
811 811
812 812 #print("Getting average of the spectra and cross-spectra from incoherent echoes.")
813 813 out_spectra = numpy.zeros([self.nChan,self.nFFTPoints,self.nHeights], dtype=float) #+numpy.nan
814 814 out_cspectra = numpy.zeros([self.nPairs,self.nFFTPoints,self.nHeights], dtype=complex) #+numpy.nan
815 815 for ih in range(self.nHeights):
816 816 for ifreq in range(self.nFFTPoints):
817 817 for ich in range(self.nChan):
818 818 tmp = spectra[:,ich,ifreq,ih]
819 819 valid = (numpy.isfinite(tmp[:])==True).nonzero()
820 820
821 821 if len(valid[0]) >0 :
822 822 out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
823 823
824 824 for icr in range(self.nPairs):
825 825 tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
826 826 valid = (numpy.isfinite(tmp)==True).nonzero()
827 827 if len(valid[0]) > 0:
828 828 out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
829 829
830 830 return out_spectra, out_cspectra
831 831
832 832 def REM_ISOLATED_POINTS(self,array,rth):
833 833 # import matplotlib.pyplot as plt
834 834 if rth == None :
835 835 rth = 4
836 836 #print("REM ISO")
837 837 num_prof = len(array[0,:,0])
838 838 num_hei = len(array[0,0,:])
839 839 n2d = len(array[:,0,0])
840 840
841 841 for ii in range(n2d) :
842 842 #print ii,n2d
843 843 tmp = array[ii,:,:]
844 844 #print tmp.shape, array[ii,101,:],array[ii,102,:]
845 845
846 846 # fig = plt.figure(figsize=(6,5))
847 847 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
848 848 # ax = fig.add_axes([left, bottom, width, height])
849 849 # x = range(num_prof)
850 850 # y = range(num_hei)
851 851 # cp = ax.contour(y,x,tmp)
852 852 # ax.clabel(cp, inline=True,fontsize=10)
853 853 # plt.show()
854 854
855 855 #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
856 856 tmp = numpy.reshape(tmp,num_prof*num_hei)
857 857 indxs1 = (numpy.isfinite(tmp)==True).nonzero()
858 858 indxs2 = (tmp > 0).nonzero()
859 859
860 860 indxs1 = (indxs1[0])
861 861 indxs2 = indxs2[0]
862 862 #indxs1 = numpy.array(indxs1[0])
863 863 #indxs2 = numpy.array(indxs2[0])
864 864 indxs = None
865 865 #print indxs1 , indxs2
866 866 for iv in range(len(indxs2)):
867 867 indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
868 868 #print len(indxs2), indv
869 869 if len(indv[0]) > 0 :
870 870 indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
871 871 # print indxs
872 872 indxs = indxs[1:]
873 873 #print(indxs, len(indxs))
874 874 if len(indxs) < 4 :
875 875 array[ii,:,:] = 0.
876 876 return
877 877
878 878 xpos = numpy.mod(indxs ,num_hei)
879 879 ypos = (indxs / num_hei)
880 880 sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
881 881 #print sx
882 882 xpos = xpos[sx]
883 883 ypos = ypos[sx]
884 884
885 885 # *********************************** Cleaning isolated points **********************************
886 886 ic = 0
887 887 while True :
888 888 r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
889 889 #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
890 890 #plt.plot(r)
891 891 #plt.show()
892 892 no_coh1 = (numpy.isfinite(r)==True).nonzero()
893 893 no_coh2 = (r <= rth).nonzero()
894 894 #print r, no_coh1, no_coh2
895 895 no_coh1 = numpy.array(no_coh1[0])
896 896 no_coh2 = numpy.array(no_coh2[0])
897 897 no_coh = None
898 898 #print valid1 , valid2
899 899 for iv in range(len(no_coh2)):
900 900 indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
901 901 if len(indv[0]) > 0 :
902 902 no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
903 903 no_coh = no_coh[1:]
904 904 #print len(no_coh), no_coh
905 905 if len(no_coh) < 4 :
906 906 #print xpos[ic], ypos[ic], ic
907 907 # plt.plot(r)
908 908 # plt.show()
909 909 xpos[ic] = numpy.nan
910 910 ypos[ic] = numpy.nan
911 911
912 912 ic = ic + 1
913 913 if (ic == len(indxs)) :
914 914 break
915 915 #print( xpos, ypos)
916 916
917 917 indxs = (numpy.isfinite(list(xpos))==True).nonzero()
918 918 #print indxs[0]
919 919 if len(indxs[0]) < 4 :
920 920 array[ii,:,:] = 0.
921 921 return
922 922
923 923 xpos = xpos[indxs[0]]
924 924 ypos = ypos[indxs[0]]
925 925 for i in range(0,len(ypos)):
926 926 ypos[i]=int(ypos[i])
927 927 junk = tmp
928 928 tmp = junk*0.0
929 929
930 930 tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
931 931 array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
932 932
933 933 #print array.shape
934 934 #tmp = numpy.reshape(tmp,(num_prof,num_hei))
935 935 #print tmp.shape
936 936
937 937 # fig = plt.figure(figsize=(6,5))
938 938 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
939 939 # ax = fig.add_axes([left, bottom, width, height])
940 940 # x = range(num_prof)
941 941 # y = range(num_hei)
942 942 # cp = ax.contour(y,x,array[ii,:,:])
943 943 # ax.clabel(cp, inline=True,fontsize=10)
944 944 # plt.show()
945 945 return array
946 946
947 947
948 948 class IntegrationFaradaySpectra(Operation):
949 949
950 950 __profIndex = 0
951 951 __withOverapping = False
952 952
953 953 __byTime = False
954 954 __initime = None
955 955 __lastdatatime = None
956 956 __integrationtime = None
957 957
958 958 __buffer_spc = None
959 959 __buffer_cspc = None
960 960 __buffer_dc = None
961 961
962 962 __dataReady = False
963 963
964 964 __timeInterval = None
965 965
966 966 n = None
967 967
968 968 def __init__(self):
969 969
970 970 Operation.__init__(self)
971 971
972 972 def setup(self, dataOut,n=None, timeInterval=None, overlapping=False, DPL=None):
973 973 """
974 974 Set the parameters of the integration class.
975 975
976 976 Inputs:
977 977
978 978 n : Number of coherent integrations
979 979 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
980 980 overlapping :
981 981
982 982 """
983 983
984 984 self.__initime = None
985 985 self.__lastdatatime = 0
986 986
987 987 self.__buffer_spc = []
988 988 self.__buffer_cspc = []
989 989 self.__buffer_dc = 0
990 990
991 991 self.__profIndex = 0
992 992 self.__dataReady = False
993 993 self.__byTime = False
994 994
995 995 #self.ByLags = dataOut.ByLags ###REDEFINIR
996 996 self.ByLags = False
997 997
998 998 if DPL != None:
999 999 self.DPL=DPL
1000 1000 else:
1001 1001 #self.DPL=dataOut.DPL ###REDEFINIR
1002 1002 self.DPL=0
1003 1003
1004 1004 if n is None and timeInterval is None:
1005 1005 raise ValueError("n or timeInterval should be specified ...")
1006 1006
1007 1007 if n is not None:
1008 1008 self.n = int(n)
1009 1009 else:
1010 1010
1011 1011 self.__integrationtime = int(timeInterval)
1012 1012 self.n = None
1013 1013 self.__byTime = True
1014 1014
1015 1015 def putData(self, data_spc, data_cspc, data_dc):
1016 1016 """
1017 1017 Add a profile to the __buffer_spc and increase in one the __profileIndex
1018 1018
1019 1019 """
1020 1020
1021 1021 self.__buffer_spc.append(data_spc)
1022 1022
1023 1023 if data_cspc is None:
1024 1024 self.__buffer_cspc = None
1025 1025 else:
1026 1026 self.__buffer_cspc.append(data_cspc)
1027 1027
1028 1028 if data_dc is None:
1029 1029 self.__buffer_dc = None
1030 1030 else:
1031 1031 self.__buffer_dc += data_dc
1032 1032
1033 1033 self.__profIndex += 1
1034 1034
1035 1035 return
1036 1036
1037 1037 def hildebrand_sekhon_Integration(self,data,navg):
1038 1038
1039 1039 sortdata = numpy.sort(data, axis=None)
1040 1040 sortID=data.argsort()
1041 1041 lenOfData = len(sortdata)
1042 1042 nums_min = lenOfData*0.75
1043 1043 if nums_min <= 5:
1044 1044 nums_min = 5
1045 1045 sump = 0.
1046 1046 sumq = 0.
1047 1047 j = 0
1048 1048 cont = 1
1049 1049 while((cont == 1)and(j < lenOfData)):
1050 1050 sump += sortdata[j]
1051 1051 sumq += sortdata[j]**2
1052 1052 if j > nums_min:
1053 1053 rtest = float(j)/(j-1) + 1.0/navg
1054 1054 if ((sumq*j) > (rtest*sump**2)):
1055 1055 j = j - 1
1056 1056 sump = sump - sortdata[j]
1057 1057 sumq = sumq - sortdata[j]**2
1058 1058 cont = 0
1059 1059 j += 1
1060 1060 #lnoise = sump / j
1061 1061
1062 1062 return j,sortID
1063 1063
1064 1064 def pushData(self):
1065 1065 """
1066 1066 Return the sum of the last profiles and the profiles used in the sum.
1067 1067
1068 1068 Affected:
1069 1069
1070 1070 self.__profileIndex
1071 1071
1072 1072 """
1073 1073 bufferH=None
1074 1074 buffer=None
1075 1075 buffer1=None
1076 1076 buffer_cspc=None
1077 1077 self.__buffer_spc=numpy.array(self.__buffer_spc)
1078 1078 self.__buffer_cspc=numpy.array(self.__buffer_cspc)
1079 1079 freq_dc = int(self.__buffer_spc.shape[2] / 2)
1080 1080 #print("FREQ_DC",freq_dc,self.__buffer_spc.shape,self.nHeights)
1081 1081 for k in range(7,self.nHeights):
1082 1082 buffer_cspc=numpy.copy(self.__buffer_cspc[:,:,:,k])
1083 1083 outliers_IDs_cspc=[]
1084 1084 cspc_outliers_exist=False
1085 1085 for i in range(self.nChannels):#dataOut.nChannels):
1086 1086
1087 1087 buffer1=numpy.copy(self.__buffer_spc[:,i,:,k])
1088 1088 indexes=[]
1089 1089 #sortIDs=[]
1090 1090 outliers_IDs=[]
1091 1091
1092 1092 for j in range(self.nProfiles):
1093 1093 # if i==0 and j==freq_dc: #NOT CONSIDERING DC PROFILE AT CHANNEL 0
1094 1094 # continue
1095 1095 # if i==1 and j==0: #NOT CONSIDERING DC PROFILE AT CHANNEL 1
1096 1096 # continue
1097 1097 buffer=buffer1[:,j]
1098 1098 index,sortID=self.hildebrand_sekhon_Integration(buffer,1)
1099 1099
1100 1100 indexes.append(index)
1101 1101 #sortIDs.append(sortID)
1102 1102 outliers_IDs=numpy.append(outliers_IDs,sortID[index:])
1103 1103
1104 1104 outliers_IDs=numpy.array(outliers_IDs)
1105 1105 outliers_IDs=outliers_IDs.ravel()
1106 1106 outliers_IDs=numpy.unique(outliers_IDs)
1107 1107 outliers_IDs=outliers_IDs.astype(numpy.dtype('int64'))
1108 1108 indexes=numpy.array(indexes)
1109 1109 indexmin=numpy.min(indexes)
1110 1110
1111 1111 if indexmin != buffer1.shape[0]:
1112 1112 cspc_outliers_exist=True
1113 1113 ###sortdata=numpy.sort(buffer1,axis=0)
1114 1114 ###avg2=numpy.mean(sortdata[:indexmin,:],axis=0)
1115 1115 lt=outliers_IDs
1116 1116 avg=numpy.mean(buffer1[[t for t in range(buffer1.shape[0]) if t not in lt],:],axis=0)
1117 1117
1118 1118 for p in list(outliers_IDs):
1119 1119 buffer1[p,:]=avg
1120 1120
1121 1121 self.__buffer_spc[:,i,:,k]=numpy.copy(buffer1)
1122 1122 ###cspc IDs
1123 1123 #indexmin_cspc+=indexmin_cspc
1124 1124 outliers_IDs_cspc=numpy.append(outliers_IDs_cspc,outliers_IDs)
1125 1125
1126 1126 #if not breakFlag:
1127 1127 outliers_IDs_cspc=outliers_IDs_cspc.astype(numpy.dtype('int64'))
1128 1128 if cspc_outliers_exist:
1129 1129 #sortdata=numpy.sort(buffer_cspc,axis=0)
1130 1130 #avg=numpy.mean(sortdata[:indexmin_cpsc,:],axis=0)
1131 1131 lt=outliers_IDs_cspc
1132 1132
1133 1133 avg=numpy.mean(buffer_cspc[[t for t in range(buffer_cspc.shape[0]) if t not in lt],:],axis=0)
1134 1134 for p in list(outliers_IDs_cspc):
1135 1135 buffer_cspc[p,:]=avg
1136 1136
1137 1137 self.__buffer_cspc[:,:,:,k]=numpy.copy(buffer_cspc)
1138 1138 #else:
1139 1139 #break
1140 1140
1141 1141
1142 1142
1143 1143
1144 1144 buffer=None
1145 1145 bufferH=None
1146 1146 buffer1=None
1147 1147 buffer_cspc=None
1148 1148
1149 1149 #print("cpsc",self.__buffer_cspc[:,0,0,0,0])
1150 1150 #print(self.__profIndex)
1151 1151 #exit()
1152 1152
1153 1153 buffer=None
1154 1154 #print(self.__buffer_spc[:,1,3,20,0])
1155 1155 #print(self.__buffer_spc[:,1,5,37,0])
1156 1156 data_spc = numpy.sum(self.__buffer_spc,axis=0)
1157 1157 data_cspc = numpy.sum(self.__buffer_cspc,axis=0)
1158 1158
1159 1159 #print(numpy.shape(data_spc))
1160 1160 #data_spc[1,4,20,0]=numpy.nan
1161 1161
1162 1162 #data_cspc = self.__buffer_cspc
1163 1163 data_dc = self.__buffer_dc
1164 1164 n = self.__profIndex
1165 1165
1166 1166 self.__buffer_spc = []
1167 1167 self.__buffer_cspc = []
1168 1168 self.__buffer_dc = 0
1169 1169 self.__profIndex = 0
1170 1170
1171 1171 return data_spc, data_cspc, data_dc, n
1172 1172
1173 1173 def byProfiles(self, *args):
1174 1174
1175 1175 self.__dataReady = False
1176 1176 avgdata_spc = None
1177 1177 avgdata_cspc = None
1178 1178 avgdata_dc = None
1179 1179
1180 1180 self.putData(*args)
1181 1181
1182 1182 if self.__profIndex == self.n:
1183 1183
1184 1184 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1185 1185 self.n = n
1186 1186 self.__dataReady = True
1187 1187
1188 1188 return avgdata_spc, avgdata_cspc, avgdata_dc
1189 1189
1190 1190 def byTime(self, datatime, *args):
1191 1191
1192 1192 self.__dataReady = False
1193 1193 avgdata_spc = None
1194 1194 avgdata_cspc = None
1195 1195 avgdata_dc = None
1196 1196
1197 1197 self.putData(*args)
1198 1198
1199 1199 if (datatime - self.__initime) >= self.__integrationtime:
1200 1200 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1201 1201 self.n = n
1202 1202 self.__dataReady = True
1203 1203
1204 1204 return avgdata_spc, avgdata_cspc, avgdata_dc
1205 1205
1206 1206 def integrate(self, datatime, *args):
1207 1207
1208 1208 if self.__profIndex == 0:
1209 1209 self.__initime = datatime
1210 1210
1211 1211 if self.__byTime:
1212 1212 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1213 1213 datatime, *args)
1214 1214 else:
1215 1215 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1216 1216
1217 1217 if not self.__dataReady:
1218 1218 return None, None, None, None
1219 1219
1220 1220 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1221 1221
1222 1222 def run(self, dataOut, n=None, DPL = None,timeInterval=None, overlapping=False):
1223 1223 if n == 1:
1224 1224 return dataOut
1225 1225
1226 1226 dataOut.flagNoData = True
1227 1227
1228 1228 if not self.isConfig:
1229 1229 self.setup(dataOut, n, timeInterval, overlapping,DPL )
1230 1230 self.isConfig = True
1231 1231
1232 1232 if not self.ByLags:
1233 1233 self.nProfiles=dataOut.nProfiles
1234 1234 self.nChannels=dataOut.nChannels
1235 1235 self.nHeights=dataOut.nHeights
1236 1236 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1237 1237 dataOut.data_spc,
1238 1238 dataOut.data_cspc,
1239 1239 dataOut.data_dc)
1240 1240 else:
1241 1241 self.nProfiles=dataOut.nProfiles
1242 1242 self.nChannels=dataOut.nChannels
1243 1243 self.nHeights=dataOut.nHeights
1244 1244 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1245 1245 dataOut.dataLag_spc,
1246 1246 dataOut.dataLag_cspc,
1247 1247 dataOut.dataLag_dc)
1248 1248
1249 1249 if self.__dataReady:
1250 1250
1251 1251 if not self.ByLags:
1252 1252
1253 1253 dataOut.data_spc = numpy.squeeze(avgdata_spc)
1254 1254 dataOut.data_cspc = numpy.squeeze(avgdata_cspc)
1255 1255 dataOut.data_dc = avgdata_dc
1256 1256 else:
1257 1257 dataOut.dataLag_spc = avgdata_spc
1258 1258 dataOut.dataLag_cspc = avgdata_cspc
1259 1259 dataOut.dataLag_dc = avgdata_dc
1260 1260
1261 1261 dataOut.data_spc=dataOut.dataLag_spc[:,:,:,dataOut.LagPlot]
1262 1262 dataOut.data_cspc=dataOut.dataLag_cspc[:,:,:,dataOut.LagPlot]
1263 1263 dataOut.data_dc=dataOut.dataLag_dc[:,:,dataOut.LagPlot]
1264 1264
1265 1265
1266 1266 dataOut.nIncohInt *= self.n
1267 1267 dataOut.utctime = avgdatatime
1268 1268 dataOut.flagNoData = False
1269 1269
1270 1270 return dataOut
1271 1271
1272 1272 class removeInterference(Operation):
1273 1273
1274 1274 def removeInterference2(self):
1275 1275
1276 1276 cspc = self.dataOut.data_cspc
1277 1277 spc = self.dataOut.data_spc
1278 1278 Heights = numpy.arange(cspc.shape[2])
1279 1279 realCspc = numpy.abs(cspc)
1280 1280
1281 1281 for i in range(cspc.shape[0]):
1282 1282 LinePower= numpy.sum(realCspc[i], axis=0)
1283 1283 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
1284 1284 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
1285 1285 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
1286 1286 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
1287 1287 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
1288 1288
1289 1289
1290 1290 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
1291 1291 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
1292 1292 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
1293 1293 cspc[i,InterferenceRange,:] = numpy.NaN
1294 1294
1295 1295 self.dataOut.data_cspc = cspc
1296 1296
1297 1297 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
1298 1298
1299 1299 jspectra = self.dataOut.data_spc
1300 1300 jcspectra = self.dataOut.data_cspc
1301 1301 jnoise = self.dataOut.getNoise()
1302 1302 num_incoh = self.dataOut.nIncohInt
1303 1303
1304 1304 num_channel = jspectra.shape[0]
1305 1305 num_prof = jspectra.shape[1]
1306 1306 num_hei = jspectra.shape[2]
1307 1307
1308 1308 # hei_interf
1309 1309 if hei_interf is None:
1310 1310 count_hei = int(num_hei / 2)
1311 1311 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
1312 1312 hei_interf = numpy.asarray(hei_interf)[0]
1313 1313 # nhei_interf
1314 1314 if (nhei_interf == None):
1315 1315 nhei_interf = 5
1316 1316 if (nhei_interf < 1):
1317 1317 nhei_interf = 1
1318 1318 if (nhei_interf > count_hei):
1319 1319 nhei_interf = count_hei
1320 1320 if (offhei_interf == None):
1321 1321 offhei_interf = 0
1322 1322
1323 1323 ind_hei = list(range(num_hei))
1324 1324 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1325 1325 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1326 1326 mask_prof = numpy.asarray(list(range(num_prof)))
1327 1327 num_mask_prof = mask_prof.size
1328 1328 comp_mask_prof = [0, num_prof / 2]
1329 1329
1330 1330 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1331 1331 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1332 1332 jnoise = numpy.nan
1333 1333 noise_exist = jnoise[0] < numpy.Inf
1334 1334
1335 1335 # Subrutina de Remocion de la Interferencia
1336 1336 for ich in range(num_channel):
1337 1337 # Se ordena los espectros segun su potencia (menor a mayor)
1338 1338 power = jspectra[ich, mask_prof, :]
1339 1339 power = power[:, hei_interf]
1340 1340 power = power.sum(axis=0)
1341 1341 psort = power.ravel().argsort()
1342 1342
1343 1343 # Se estima la interferencia promedio en los Espectros de Potencia empleando
1344 1344 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
1345 1345 offhei_interf, nhei_interf + offhei_interf))]]]
1346 1346
1347 1347 if noise_exist:
1348 1348 # tmp_noise = jnoise[ich] / num_prof
1349 1349 tmp_noise = jnoise[ich]
1350 1350 junkspc_interf = junkspc_interf - tmp_noise
1351 1351 #junkspc_interf[:,comp_mask_prof] = 0
1352 1352
1353 1353 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
1354 1354 jspc_interf = jspc_interf.transpose()
1355 1355 # Calculando el espectro de interferencia promedio
1356 1356 noiseid = numpy.where(
1357 1357 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
1358 1358 noiseid = noiseid[0]
1359 1359 cnoiseid = noiseid.size
1360 1360 interfid = numpy.where(
1361 1361 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
1362 1362 interfid = interfid[0]
1363 1363 cinterfid = interfid.size
1364 1364
1365 1365 if (cnoiseid > 0):
1366 1366 jspc_interf[noiseid] = 0
1367 1367
1368 1368 # Expandiendo los perfiles a limpiar
1369 1369 if (cinterfid > 0):
1370 1370 new_interfid = (
1371 1371 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
1372 1372 new_interfid = numpy.asarray(new_interfid)
1373 1373 new_interfid = {x for x in new_interfid}
1374 1374 new_interfid = numpy.array(list(new_interfid))
1375 1375 new_cinterfid = new_interfid.size
1376 1376 else:
1377 1377 new_cinterfid = 0
1378 1378
1379 1379 for ip in range(new_cinterfid):
1380 1380 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
1381 1381 jspc_interf[new_interfid[ip]
1382 1382 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
1383 1383
1384 1384 jspectra[ich, :, ind_hei] = jspectra[ich, :,
1385 1385 ind_hei] - jspc_interf # Corregir indices
1386 1386
1387 1387 # Removiendo la interferencia del punto de mayor interferencia
1388 1388 ListAux = jspc_interf[mask_prof].tolist()
1389 1389 maxid = ListAux.index(max(ListAux))
1390 1390
1391 1391 if cinterfid > 0:
1392 1392 for ip in range(cinterfid * (interf == 2) - 1):
1393 1393 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
1394 1394 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
1395 1395 cind = len(ind)
1396 1396
1397 1397 if (cind > 0):
1398 1398 jspectra[ich, interfid[ip], ind] = tmp_noise * \
1399 1399 (1 + (numpy.random.uniform(cind) - 0.5) /
1400 1400 numpy.sqrt(num_incoh))
1401 1401
1402 1402 ind = numpy.array([-2, -1, 1, 2])
1403 1403 xx = numpy.zeros([4, 4])
1404 1404
1405 1405 for id1 in range(4):
1406 1406 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1407 1407
1408 1408 xx_inv = numpy.linalg.inv(xx)
1409 1409 xx = xx_inv[:, 0]
1410 1410 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1411 1411 yy = jspectra[ich, mask_prof[ind], :]
1412 1412 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
1413 1413 yy.transpose(), xx)
1414 1414
1415 1415 indAux = (jspectra[ich, :, :] < tmp_noise *
1416 1416 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
1417 1417 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
1418 1418 (1 - 1 / numpy.sqrt(num_incoh))
1419 1419
1420 1420 # Remocion de Interferencia en el Cross Spectra
1421 1421 if jcspectra is None:
1422 1422 return jspectra, jcspectra
1423 1423 num_pairs = int(jcspectra.size / (num_prof * num_hei))
1424 1424 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1425 1425
1426 1426 for ip in range(num_pairs):
1427 1427
1428 1428 #-------------------------------------------
1429 1429
1430 1430 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
1431 1431 cspower = cspower[:, hei_interf]
1432 1432 cspower = cspower.sum(axis=0)
1433 1433
1434 1434 cspsort = cspower.ravel().argsort()
1435 1435 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
1436 1436 offhei_interf, nhei_interf + offhei_interf))]]]
1437 1437 junkcspc_interf = junkcspc_interf.transpose()
1438 1438 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
1439 1439
1440 1440 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1441 1441
1442 1442 median_real = int(numpy.median(numpy.real(
1443 1443 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1444 1444 median_imag = int(numpy.median(numpy.imag(
1445 1445 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1446 1446 comp_mask_prof = [int(e) for e in comp_mask_prof]
1447 1447 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
1448 1448 median_real, median_imag)
1449 1449
1450 1450 for iprof in range(num_prof):
1451 1451 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
1452 1452 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
1453 1453
1454 1454 # Removiendo la Interferencia
1455 1455 jcspectra[ip, :, ind_hei] = jcspectra[ip,
1456 1456 :, ind_hei] - jcspc_interf
1457 1457
1458 1458 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1459 1459 maxid = ListAux.index(max(ListAux))
1460 1460
1461 1461 ind = numpy.array([-2, -1, 1, 2])
1462 1462 xx = numpy.zeros([4, 4])
1463 1463
1464 1464 for id1 in range(4):
1465 1465 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1466 1466
1467 1467 xx_inv = numpy.linalg.inv(xx)
1468 1468 xx = xx_inv[:, 0]
1469 1469
1470 1470 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1471 1471 yy = jcspectra[ip, mask_prof[ind], :]
1472 1472 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
1473 1473
1474 1474 # Guardar Resultados
1475 1475 self.dataOut.data_spc = jspectra
1476 1476 self.dataOut.data_cspc = jcspectra
1477 1477
1478 1478 return 1
1479 1479
1480 1480 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
1481 1481
1482 1482 self.dataOut = dataOut
1483 1483
1484 1484 if mode == 1:
1485 1485 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
1486 1486 elif mode == 2:
1487 1487 self.removeInterference2()
1488 1488
1489 1489 return self.dataOut
1490 1490
1491 1491
1492 1492 class IncohInt(Operation):
1493 1493
1494 1494 __profIndex = 0
1495 1495 __withOverapping = False
1496 1496
1497 1497 __byTime = False
1498 1498 __initime = None
1499 1499 __lastdatatime = None
1500 1500 __integrationtime = None
1501 1501
1502 1502 __buffer_spc = None
1503 1503 __buffer_cspc = None
1504 1504 __buffer_dc = None
1505 1505
1506 1506 __dataReady = False
1507 1507
1508 1508 __timeInterval = None
1509 1509
1510 1510 n = None
1511 1511
1512 1512 def __init__(self):
1513 1513
1514 1514 Operation.__init__(self)
1515 1515
1516 1516 def setup(self, n=None, timeInterval=None, overlapping=False):
1517 1517 """
1518 1518 Set the parameters of the integration class.
1519 1519
1520 1520 Inputs:
1521 1521
1522 1522 n : Number of coherent integrations
1523 1523 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1524 1524 overlapping :
1525 1525
1526 1526 """
1527 1527
1528 1528 self.__initime = None
1529 1529 self.__lastdatatime = 0
1530 1530
1531 1531 self.__buffer_spc = 0
1532 1532 self.__buffer_cspc = 0
1533 1533 self.__buffer_dc = 0
1534 1534
1535 1535 self.__profIndex = 0
1536 1536 self.__dataReady = False
1537 1537 self.__byTime = False
1538 1538
1539 1539 if n is None and timeInterval is None:
1540 1540 raise ValueError("n or timeInterval should be specified ...")
1541 1541
1542 1542 if n is not None:
1543 1543 self.n = int(n)
1544 1544 else:
1545 1545
1546 1546 self.__integrationtime = int(timeInterval)
1547 1547 self.n = None
1548 1548 self.__byTime = True
1549 1549
1550 1550 def putData(self, data_spc, data_cspc, data_dc):
1551 1551 """
1552 1552 Add a profile to the __buffer_spc and increase in one the __profileIndex
1553 1553
1554 1554 """
1555 1555
1556 1556 self.__buffer_spc += data_spc
1557 1557
1558 1558 if data_cspc is None:
1559 1559 self.__buffer_cspc = None
1560 1560 else:
1561 1561 self.__buffer_cspc += data_cspc
1562 1562
1563 1563 if data_dc is None:
1564 1564 self.__buffer_dc = None
1565 1565 else:
1566 1566 self.__buffer_dc += data_dc
1567 1567
1568 1568 self.__profIndex += 1
1569 1569
1570 1570 return
1571 1571
1572 1572 def pushData(self):
1573 1573 """
1574 1574 Return the sum of the last profiles and the profiles used in the sum.
1575 1575
1576 1576 Affected:
1577 1577
1578 1578 self.__profileIndex
1579 1579
1580 1580 """
1581 1581
1582 1582 data_spc = self.__buffer_spc
1583 1583 data_cspc = self.__buffer_cspc
1584 1584 data_dc = self.__buffer_dc
1585 1585 n = self.__profIndex
1586 1586
1587 1587 self.__buffer_spc = 0
1588 1588 self.__buffer_cspc = 0
1589 1589 self.__buffer_dc = 0
1590 1590 self.__profIndex = 0
1591 1591
1592 1592 return data_spc, data_cspc, data_dc, n
1593 1593
1594 1594 def byProfiles(self, *args):
1595 1595
1596 1596 self.__dataReady = False
1597 1597 avgdata_spc = None
1598 1598 avgdata_cspc = None
1599 1599 avgdata_dc = None
1600 1600
1601 1601 self.putData(*args)
1602 1602
1603 1603 if self.__profIndex == self.n:
1604 1604
1605 1605 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1606 1606 self.n = n
1607 1607 self.__dataReady = True
1608 1608
1609 1609 return avgdata_spc, avgdata_cspc, avgdata_dc
1610 1610
1611 1611 def byTime(self, datatime, *args):
1612 1612
1613 1613 self.__dataReady = False
1614 1614 avgdata_spc = None
1615 1615 avgdata_cspc = None
1616 1616 avgdata_dc = None
1617 1617
1618 1618 self.putData(*args)
1619 1619
1620 1620 if (datatime - self.__initime) >= self.__integrationtime:
1621 1621 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1622 1622 self.n = n
1623 1623 self.__dataReady = True
1624 1624
1625 1625 return avgdata_spc, avgdata_cspc, avgdata_dc
1626 1626
1627 1627 def integrate(self, datatime, *args):
1628 1628
1629 1629 if self.__profIndex == 0:
1630 1630 self.__initime = datatime
1631 1631
1632 1632 if self.__byTime:
1633 1633 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1634 1634 datatime, *args)
1635 1635 else:
1636 1636 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1637 1637
1638 1638 if not self.__dataReady:
1639 1639 return None, None, None, None
1640 1640
1641 1641 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1642 1642
1643 1643 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1644 1644 if n == 1:
1645 1645 return dataOut
1646 1646
1647 1647 dataOut.flagNoData = True
1648 1648
1649 1649 if not self.isConfig:
1650 1650 self.setup(n, timeInterval, overlapping)
1651 1651 self.isConfig = True
1652 1652
1653 1653 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1654 1654 dataOut.data_spc,
1655 1655 dataOut.data_cspc,
1656 1656 dataOut.data_dc)
1657 1657
1658 1658 if self.__dataReady:
1659 1659
1660 1660 dataOut.data_spc = avgdata_spc
1661 1661 dataOut.data_cspc = avgdata_cspc
1662 1662 dataOut.data_dc = avgdata_dc
1663 1663 dataOut.nIncohInt *= self.n
1664 1664 dataOut.utctime = avgdatatime
1665 1665 dataOut.flagNoData = False
1666
1666
1667 1667 return dataOut
1668 1668
1669 1669 class dopplerFlip(Operation):
1670 1670
1671 1671 def run(self, dataOut):
1672 1672 # arreglo 1: (num_chan, num_profiles, num_heights)
1673 1673 self.dataOut = dataOut
1674 1674 # JULIA-oblicua, indice 2
1675 1675 # arreglo 2: (num_profiles, num_heights)
1676 1676 jspectra = self.dataOut.data_spc[2]
1677 1677 jspectra_tmp = numpy.zeros(jspectra.shape)
1678 1678 num_profiles = jspectra.shape[0]
1679 1679 freq_dc = int(num_profiles / 2)
1680 1680 # Flip con for
1681 1681 for j in range(num_profiles):
1682 1682 jspectra_tmp[num_profiles-j-1]= jspectra[j]
1683 1683 # Intercambio perfil de DC con perfil inmediato anterior
1684 1684 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
1685 1685 jspectra_tmp[freq_dc]= jspectra[freq_dc]
1686 1686 # canal modificado es re-escrito en el arreglo de canales
1687 1687 self.dataOut.data_spc[2] = jspectra_tmp
1688 1688
1689 1689 return self.dataOut
@@ -1,1633 +1,1638
1 1 import sys
2 2 import numpy,math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from time import time
8
8 import numpy
9 9
10 10
11 11 class VoltageProc(ProcessingUnit):
12 12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 def __updateObjFromAmisrInput(self):
30 30
31 31 self.dataOut.timeZone = self.dataIn.timeZone
32 32 self.dataOut.dstFlag = self.dataIn.dstFlag
33 33 self.dataOut.errorCount = self.dataIn.errorCount
34 34 self.dataOut.useLocalTime = self.dataIn.useLocalTime
35 35
36 36 self.dataOut.flagNoData = self.dataIn.flagNoData
37 37 self.dataOut.data = self.dataIn.data
38 38 self.dataOut.utctime = self.dataIn.utctime
39 39 self.dataOut.channelList = self.dataIn.channelList
40 40 #self.dataOut.timeInterval = self.dataIn.timeInterval
41 41 self.dataOut.heightList = self.dataIn.heightList
42 42 self.dataOut.nProfiles = self.dataIn.nProfiles
43 43
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 self.dataOut.ippSeconds = self.dataIn.ippSeconds
46 46 self.dataOut.frequency = self.dataIn.frequency
47 47
48 48 self.dataOut.azimuth = self.dataIn.azimuth
49 49 self.dataOut.zenith = self.dataIn.zenith
50 50
51 51 self.dataOut.beam.codeList = self.dataIn.beam.codeList
52 52 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
53 53 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
54 54
55 55
56 56 class selectChannels(Operation):
57 57
58 58 def run(self, dataOut, channelList=None):
59 59 self.channelList = channelList
60 60 if self.channelList == None:
61 61 print("Missing channelList")
62 62 return dataOut
63 63 channelIndexList = []
64 if type(self.channelList) is list:
65 pass
66 elif type(self.channelList) is tuple:
67 self.channelList = list(self.channelList)
68 else:
69 self.channelList = self.channelList.tolist()
70
71 if type(dataOut.channelList) is not list:
72 dataOut.channelList = dataOut.channelList.tolist()
73 64
65 if type(dataOut.channelList) is not list: #leer array desde HDF5
66 try:
67 dataOut.channelList = dataOut.channelList.tolist()
68 except Exception as e:
69 print("Select Channels: ",e)
74 70 for channel in self.channelList:
75 71 if channel not in dataOut.channelList:
76 72 raise ValueError("Channel %d is not in %s" %(channel, str(dataOut.channelList)))
77 73
78 74 index = dataOut.channelList.index(channel)
79 75 channelIndexList.append(index)
80 76 dataOut = self.selectChannelsByIndex(dataOut,channelIndexList)
81 77 return dataOut
82 78
83 79 def selectChannelsByIndex(self, dataOut, channelIndexList):
84 80 """
85 81 Selecciona un bloque de datos en base a canales segun el channelIndexList
86 82
87 83 Input:
88 84 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
89 85
90 86 Affected:
91 87 dataOut.data
92 88 dataOut.channelIndexList
93 89 dataOut.nChannels
94 90 dataOut.m_ProcessingHeader.totalSpectra
95 91 dataOut.systemHeaderObj.numChannels
96 92 dataOut.m_ProcessingHeader.blockSize
97 93
98 94 Return:
99 95 None
100 96 """
101
97 #print("selectChannelsByIndex")
102 98 # for channelIndex in channelIndexList:
103 99 # if channelIndex not in dataOut.channelIndexList:
104 100 # raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
105 101
106 102 if dataOut.type == 'Voltage':
107 103 if dataOut.flagDataAsBlock:
108 104 """
109 105 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
110 106 """
111 107 data = dataOut.data[channelIndexList,:,:]
112 108 else:
113 109 data = dataOut.data[channelIndexList,:]
114 110
115 111 dataOut.data = data
116 112 # dataOut.channelList = [dataOut.channelList[i] for i in channelIndexList]
117 113 dataOut.channelList = range(len(channelIndexList))
118 114
119 115 elif dataOut.type == 'Spectra':
120 data_spc = dataOut.data_spc[channelIndexList, :]
121 dataOut.data_spc = data_spc
122 if hasattr(dataOut, 'data_dc') and dataOut.data_dc != None:
123 data_dc = dataOut.data_dc[channelIndexList, :]
124 dataOut.data_dc = data_dc
116 if hasattr(dataOut, 'data_spc'):
117 if dataOut.data_spc is None:
118 raise ValueError("data_spc is None")
119 return dataOut
120 else:
121 data_spc = dataOut.data_spc[channelIndexList, :]
122 dataOut.data_spc = data_spc
123
124 # if hasattr(dataOut, 'data_dc') :# and
125 # if dataOut.data_dc is None:
126 # raise ValueError("data_dc is None")
127 # return dataOut
128 # else:
129 # data_dc = dataOut.data_dc[channelIndexList, :]
130 # dataOut.data_dc = data_dc
125 131 # dataOut.channelList = [dataOut.channelList[i] for i in channelIndexList]
126 132 dataOut.channelList = channelIndexList
127 133 dataOut = self.__selectPairsByChannel(dataOut,channelIndexList)
128 134
129 135 return dataOut
130 136
131 137 def __selectPairsByChannel(self, dataOut, channelList=None):
132
138 #print("__selectPairsByChannel")
133 139 if channelList == None:
134 140 return
135 141
136 142 pairsIndexListSelected = []
137 143 for pairIndex in dataOut.pairsIndexList:
138 144 # First pair
139 145 if dataOut.pairsList[pairIndex][0] not in channelList:
140 146 continue
141 147 # Second pair
142 148 if dataOut.pairsList[pairIndex][1] not in channelList:
143 149 continue
144 150
145 151 pairsIndexListSelected.append(pairIndex)
146
147 152 if not pairsIndexListSelected:
148 153 dataOut.data_cspc = None
149 154 dataOut.pairsList = []
150 155 return
151 156
152 157 dataOut.data_cspc = dataOut.data_cspc[pairsIndexListSelected]
153 158 dataOut.pairsList = [dataOut.pairsList[i]
154 159 for i in pairsIndexListSelected]
155 160
156 161 return dataOut
157 162
158 163 class selectHeights(Operation):
159 164
160 165 def run(self, dataOut, minHei=None, maxHei=None, minIndex=None, maxIndex=None):
161 166 """
162 167 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
163 168 minHei <= height <= maxHei
164 169
165 170 Input:
166 171 minHei : valor minimo de altura a considerar
167 172 maxHei : valor maximo de altura a considerar
168 173
169 174 Affected:
170 175 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
171 176
172 177 Return:
173 178 1 si el metodo se ejecuto con exito caso contrario devuelve 0
174 179 """
175 180
176 181 dataOut = dataOut
177 182
178 183 if minHei and maxHei:
179 184
180 185 if (minHei < dataOut.heightList[0]):
181 186 minHei = dataOut.heightList[0]
182 187
183 188 if (maxHei > dataOut.heightList[-1]):
184 189 maxHei = dataOut.heightList[-1]
185 190
186 191 minIndex = 0
187 192 maxIndex = 0
188 193 heights = dataOut.heightList
189 194
190 195 inda = numpy.where(heights >= minHei)
191 196 indb = numpy.where(heights <= maxHei)
192 197
193 198 try:
194 199 minIndex = inda[0][0]
195 200 except:
196 201 minIndex = 0
197 202
198 203 try:
199 204 maxIndex = indb[0][-1]
200 205 except:
201 206 maxIndex = len(heights)
202 207
203 208 self.selectHeightsByIndex(minIndex, maxIndex)
204 209
205 210 return self.dataOut
206 211
207 212 def selectHeightsByIndex(self, minIndex, maxIndex):
208 213 """
209 214 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
210 215 minIndex <= index <= maxIndex
211 216
212 217 Input:
213 218 minIndex : valor de indice minimo de altura a considerar
214 219 maxIndex : valor de indice maximo de altura a considerar
215 220
216 221 Affected:
217 222 self.dataOut.data
218 223 self.dataOut.heightList
219 224
220 225 Return:
221 226 1 si el metodo se ejecuto con exito caso contrario devuelve 0
222 227 """
223 228
224 229 if self.dataOut.type == 'Voltage':
225 230 if (minIndex < 0) or (minIndex > maxIndex):
226 231 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
227 232
228 233 if (maxIndex >= self.dataOut.nHeights):
229 234 maxIndex = self.dataOut.nHeights
230 235
231 236 #voltage
232 237 if self.dataOut.flagDataAsBlock:
233 238 """
234 239 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
235 240 """
236 241 data = self.dataOut.data[:,:, minIndex:maxIndex]
237 242 else:
238 243 data = self.dataOut.data[:, minIndex:maxIndex]
239 244
240 245 # firstHeight = self.dataOut.heightList[minIndex]
241 246
242 247 self.dataOut.data = data
243 248 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
244 249
245 250 if self.dataOut.nHeights <= 1:
246 251 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
247 252 elif self.dataOut.type == 'Spectra':
248 253 if (minIndex < 0) or (minIndex > maxIndex):
249 254 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
250 255 minIndex, maxIndex))
251 256
252 257 if (maxIndex >= self.dataOut.nHeights):
253 258 maxIndex = self.dataOut.nHeights - 1
254 259
255 260 # Spectra
256 261 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
257 262
258 263 data_cspc = None
259 264 if self.dataOut.data_cspc is not None:
260 265 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
261 266
262 267 data_dc = None
263 268 if self.dataOut.data_dc is not None:
264 269 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
265 270
266 271 self.dataOut.data_spc = data_spc
267 272 self.dataOut.data_cspc = data_cspc
268 273 self.dataOut.data_dc = data_dc
269 274
270 275 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
271 276
272 277 return 1
273 278
274 279
275 280 class filterByHeights(Operation):
276 281
277 282 def run(self, dataOut, window):
278 283
279 284 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
280 285
281 286 if window == None:
282 287 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
283 288
284 289 newdelta = deltaHeight * window
285 290 r = dataOut.nHeights % window
286 291 newheights = (dataOut.nHeights-r)/window
287 292
288 293 if newheights <= 1:
289 294 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
290 295
291 296 if dataOut.flagDataAsBlock:
292 297 """
293 298 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
294 299 """
295 300 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
296 301 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
297 302 buffer = numpy.sum(buffer,3)
298 303
299 304 else:
300 305 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
301 306 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
302 307 buffer = numpy.sum(buffer,2)
303 308
304 309 dataOut.data = buffer
305 310 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
306 311 dataOut.windowOfFilter = window
307 312
308 313 return dataOut
309 314
310 315
311 316 class setH0(Operation):
312 317
313 318 def run(self, dataOut, h0, deltaHeight = None):
314 319
315 320 if not deltaHeight:
316 321 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
317 322
318 323 nHeights = dataOut.nHeights
319 324
320 325 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
321 326
322 327 dataOut.heightList = newHeiRange
323 328
324 329 return dataOut
325 330
326 331
327 332 class deFlip(Operation):
328 333
329 334 def run(self, dataOut, channelList = []):
330 335
331 336 data = dataOut.data.copy()
332 337
333 338 if dataOut.flagDataAsBlock:
334 339 flip = self.flip
335 340 profileList = list(range(dataOut.nProfiles))
336 341
337 342 if not channelList:
338 343 for thisProfile in profileList:
339 344 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
340 345 flip *= -1.0
341 346 else:
342 347 for thisChannel in channelList:
343 348 if thisChannel not in dataOut.channelList:
344 349 continue
345 350
346 351 for thisProfile in profileList:
347 352 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
348 353 flip *= -1.0
349 354
350 355 self.flip = flip
351 356
352 357 else:
353 358 if not channelList:
354 359 data[:,:] = data[:,:]*self.flip
355 360 else:
356 361 for thisChannel in channelList:
357 362 if thisChannel not in dataOut.channelList:
358 363 continue
359 364
360 365 data[thisChannel,:] = data[thisChannel,:]*self.flip
361 366
362 367 self.flip *= -1.
363 368
364 369 dataOut.data = data
365 370
366 371 return dataOut
367 372
368 373
369 374 class setAttribute(Operation):
370 375 '''
371 376 Set an arbitrary attribute(s) to dataOut
372 377 '''
373 378
374 379 def __init__(self):
375 380
376 381 Operation.__init__(self)
377 382 self._ready = False
378 383
379 384 def run(self, dataOut, **kwargs):
380 385
381 386 for key, value in kwargs.items():
382 387 setattr(dataOut, key, value)
383 388
384 389 return dataOut
385 390
386 391
387 392 @MPDecorator
388 393 class printAttribute(Operation):
389 394 '''
390 395 Print an arbitrary attribute of dataOut
391 396 '''
392 397
393 398 def __init__(self):
394 399
395 400 Operation.__init__(self)
396 401
397 402 def run(self, dataOut, attributes):
398 403
399 404 if isinstance(attributes, str):
400 405 attributes = [attributes]
401 406 for attr in attributes:
402 407 if hasattr(dataOut, attr):
403 408 log.log(getattr(dataOut, attr), attr)
404 409
405 410
406 411 class interpolateHeights(Operation):
407 412
408 413 def run(self, dataOut, topLim, botLim):
409 414 #69 al 72 para julia
410 415 #82-84 para meteoros
411 416 if len(numpy.shape(dataOut.data))==2:
412 417 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
413 418 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
414 419 #dataOut.data[:,botLim:limSup+1] = sampInterp
415 420 dataOut.data[:,botLim:topLim+1] = sampInterp
416 421 else:
417 422 nHeights = dataOut.data.shape[2]
418 423 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
419 424 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
420 425 f = interpolate.interp1d(x, y, axis = 2)
421 426 xnew = numpy.arange(botLim,topLim+1)
422 427 ynew = f(xnew)
423 428 dataOut.data[:,:,botLim:topLim+1] = ynew
424 429
425 430 return dataOut
426 431
427 432
428 433 class CohInt(Operation):
429 434
430 435 isConfig = False
431 436 __profIndex = 0
432 437 __byTime = False
433 438 __initime = None
434 439 __lastdatatime = None
435 440 __integrationtime = None
436 441 __buffer = None
437 442 __bufferStride = []
438 443 __dataReady = False
439 444 __profIndexStride = 0
440 445 __dataToPutStride = False
441 446 n = None
442 447
443 448 def __init__(self, **kwargs):
444 449
445 450 Operation.__init__(self, **kwargs)
446 451
447 452 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
448 453 """
449 454 Set the parameters of the integration class.
450 455
451 456 Inputs:
452 457
453 458 n : Number of coherent integrations
454 459 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
455 460 overlapping :
456 461 """
457 462
458 463 self.__initime = None
459 464 self.__lastdatatime = 0
460 465 self.__buffer = None
461 466 self.__dataReady = False
462 467 self.byblock = byblock
463 468 self.stride = stride
464 469
465 470 if n == None and timeInterval == None:
466 471 raise ValueError("n or timeInterval should be specified ...")
467 472
468 473 if n != None:
469 474 self.n = n
470 475 self.__byTime = False
471 476 else:
472 477 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
473 478 self.n = 9999
474 479 self.__byTime = True
475 480
476 481 if overlapping:
477 482 self.__withOverlapping = True
478 483 self.__buffer = None
479 484 else:
480 485 self.__withOverlapping = False
481 486 self.__buffer = 0
482 487
483 488 self.__profIndex = 0
484 489
485 490 def putData(self, data):
486 491
487 492 """
488 493 Add a profile to the __buffer and increase in one the __profileIndex
489 494
490 495 """
491 496
492 497 if not self.__withOverlapping:
493 498 self.__buffer += data.copy()
494 499 self.__profIndex += 1
495 500 return
496 501
497 502 #Overlapping data
498 503 nChannels, nHeis = data.shape
499 504 data = numpy.reshape(data, (1, nChannels, nHeis))
500 505
501 506 #If the buffer is empty then it takes the data value
502 507 if self.__buffer is None:
503 508 self.__buffer = data
504 509 self.__profIndex += 1
505 510 return
506 511
507 512 #If the buffer length is lower than n then stakcing the data value
508 513 if self.__profIndex < self.n:
509 514 self.__buffer = numpy.vstack((self.__buffer, data))
510 515 self.__profIndex += 1
511 516 return
512 517
513 518 #If the buffer length is equal to n then replacing the last buffer value with the data value
514 519 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
515 520 self.__buffer[self.n-1] = data
516 521 self.__profIndex = self.n
517 522 return
518 523
519 524
520 525 def pushData(self):
521 526 """
522 527 Return the sum of the last profiles and the profiles used in the sum.
523 528
524 529 Affected:
525 530
526 531 self.__profileIndex
527 532
528 533 """
529 534
530 535 if not self.__withOverlapping:
531 536 data = self.__buffer
532 537 n = self.__profIndex
533 538
534 539 self.__buffer = 0
535 540 self.__profIndex = 0
536 541
537 542 return data, n
538 543
539 544 #Integration with Overlapping
540 545 data = numpy.sum(self.__buffer, axis=0)
541 546 # print data
542 547 # raise
543 548 n = self.__profIndex
544 549
545 550 return data, n
546 551
547 552 def byProfiles(self, data):
548 553
549 554 self.__dataReady = False
550 555 avgdata = None
551 556 # n = None
552 557 # print data
553 558 # raise
554 559 self.putData(data)
555 560
556 561 if self.__profIndex == self.n:
557 562 avgdata, n = self.pushData()
558 563 self.__dataReady = True
559 564
560 565 return avgdata
561 566
562 567 def byTime(self, data, datatime):
563 568
564 569 self.__dataReady = False
565 570 avgdata = None
566 571 n = None
567 572
568 573 self.putData(data)
569 574
570 575 if (datatime - self.__initime) >= self.__integrationtime:
571 576 avgdata, n = self.pushData()
572 577 self.n = n
573 578 self.__dataReady = True
574 579
575 580 return avgdata
576 581
577 582 def integrateByStride(self, data, datatime):
578 583 # print data
579 584 if self.__profIndex == 0:
580 585 self.__buffer = [[data.copy(), datatime]]
581 586 else:
582 587 self.__buffer.append([data.copy(),datatime])
583 588 self.__profIndex += 1
584 589 self.__dataReady = False
585 590
586 591 if self.__profIndex == self.n * self.stride :
587 592 self.__dataToPutStride = True
588 593 self.__profIndexStride = 0
589 594 self.__profIndex = 0
590 595 self.__bufferStride = []
591 596 for i in range(self.stride):
592 597 current = self.__buffer[i::self.stride]
593 598 data = numpy.sum([t[0] for t in current], axis=0)
594 599 avgdatatime = numpy.average([t[1] for t in current])
595 600 # print data
596 601 self.__bufferStride.append((data, avgdatatime))
597 602
598 603 if self.__dataToPutStride:
599 604 self.__dataReady = True
600 605 self.__profIndexStride += 1
601 606 if self.__profIndexStride == self.stride:
602 607 self.__dataToPutStride = False
603 608 # print self.__bufferStride[self.__profIndexStride - 1]
604 609 # raise
605 610 return self.__bufferStride[self.__profIndexStride - 1]
606 611
607 612
608 613 return None, None
609 614
610 615 def integrate(self, data, datatime=None):
611 616
612 617 if self.__initime == None:
613 618 self.__initime = datatime
614 619
615 620 if self.__byTime:
616 621 avgdata = self.byTime(data, datatime)
617 622 else:
618 623 avgdata = self.byProfiles(data)
619 624
620 625
621 626 self.__lastdatatime = datatime
622 627
623 628 if avgdata is None:
624 629 return None, None
625 630
626 631 avgdatatime = self.__initime
627 632
628 633 deltatime = datatime - self.__lastdatatime
629 634
630 635 if not self.__withOverlapping:
631 636 self.__initime = datatime
632 637 else:
633 638 self.__initime += deltatime
634 639
635 640 return avgdata, avgdatatime
636 641
637 642 def integrateByBlock(self, dataOut):
638 643
639 644 times = int(dataOut.data.shape[1]/self.n)
640 645 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
641 646
642 647 id_min = 0
643 648 id_max = self.n
644 649
645 650 for i in range(times):
646 651 junk = dataOut.data[:,id_min:id_max,:]
647 652 avgdata[:,i,:] = junk.sum(axis=1)
648 653 id_min += self.n
649 654 id_max += self.n
650 655
651 656 timeInterval = dataOut.ippSeconds*self.n
652 657 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
653 658 self.__dataReady = True
654 659 return avgdata, avgdatatime
655 660
656 661 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
657 662
658 663 if not self.isConfig:
659 664 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
660 665 self.isConfig = True
661 666
662 667 if dataOut.flagDataAsBlock:
663 668 """
664 669 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
665 670 """
666 671 avgdata, avgdatatime = self.integrateByBlock(dataOut)
667 672 dataOut.nProfiles /= self.n
668 673 else:
669 674 if stride is None:
670 675 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
671 676 else:
672 677 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
673 678
674 679
675 680 # dataOut.timeInterval *= n
676 681 dataOut.flagNoData = True
677 682
678 683 if self.__dataReady:
679 684 dataOut.data = avgdata
680 685 if not dataOut.flagCohInt:
681 686 dataOut.nCohInt *= self.n
682 687 dataOut.flagCohInt = True
683 688 dataOut.utctime = avgdatatime
684 689 # print avgdata, avgdatatime
685 690 # raise
686 691 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
687 692 dataOut.flagNoData = False
688 693 return dataOut
689 694
690 695 class Decoder(Operation):
691 696
692 697 isConfig = False
693 698 __profIndex = 0
694 699
695 700 code = None
696 701
697 702 nCode = None
698 703 nBaud = None
699 704
700 705 def __init__(self, **kwargs):
701 706
702 707 Operation.__init__(self, **kwargs)
703 708
704 709 self.times = None
705 710 self.osamp = None
706 711 # self.__setValues = False
707 712 self.isConfig = False
708 713 self.setupReq = False
709 714 def setup(self, code, osamp, dataOut):
710 715
711 716 self.__profIndex = 0
712 717
713 718 self.code = code
714 719
715 720 self.nCode = len(code)
716 721 self.nBaud = len(code[0])
717 722 if (osamp != None) and (osamp >1):
718 723 self.osamp = osamp
719 724 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
720 725 self.nBaud = self.nBaud*self.osamp
721 726
722 727 self.__nChannels = dataOut.nChannels
723 728 self.__nProfiles = dataOut.nProfiles
724 729 self.__nHeis = dataOut.nHeights
725 730
726 731 if self.__nHeis < self.nBaud:
727 732 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
728 733
729 734 #Frequency
730 735 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
731 736
732 737 __codeBuffer[:,0:self.nBaud] = self.code
733 738
734 739 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
735 740
736 741 if dataOut.flagDataAsBlock:
737 742
738 743 self.ndatadec = self.__nHeis #- self.nBaud + 1
739 744
740 745 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
741 746
742 747 else:
743 748
744 749 #Time
745 750 self.ndatadec = self.__nHeis #- self.nBaud + 1
746 751
747 752 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
748 753
749 754 def __convolutionInFreq(self, data):
750 755
751 756 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
752 757
753 758 fft_data = numpy.fft.fft(data, axis=1)
754 759
755 760 conv = fft_data*fft_code
756 761
757 762 data = numpy.fft.ifft(conv,axis=1)
758 763
759 764 return data
760 765
761 766 def __convolutionInFreqOpt(self, data):
762 767
763 768 raise NotImplementedError
764 769
765 770 def __convolutionInTime(self, data):
766 771
767 772 code = self.code[self.__profIndex]
768 773 for i in range(self.__nChannels):
769 774 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
770 775
771 776 return self.datadecTime
772 777
773 778 def __convolutionByBlockInTime(self, data):
774 779
775 780 repetitions = int(self.__nProfiles / self.nCode)
776 781 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
777 782 junk = junk.flatten()
778 783 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
779 784 profilesList = range(self.__nProfiles)
780 785
781 786 for i in range(self.__nChannels):
782 787 for j in profilesList:
783 788 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
784 789 return self.datadecTime
785 790
786 791 def __convolutionByBlockInFreq(self, data):
787 792
788 793 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
789 794
790 795
791 796 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
792 797
793 798 fft_data = numpy.fft.fft(data, axis=2)
794 799
795 800 conv = fft_data*fft_code
796 801
797 802 data = numpy.fft.ifft(conv,axis=2)
798 803
799 804 return data
800 805
801 806
802 807 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
803 808
804 809 if dataOut.flagDecodeData:
805 810 print("This data is already decoded, recoding again ...")
806 811
807 812 if not self.isConfig:
808 813
809 814 if code is None:
810 815 if dataOut.code is None:
811 816 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
812 817
813 818 code = dataOut.code
814 819 else:
815 820 code = numpy.array(code).reshape(nCode,nBaud)
816 821 self.setup(code, osamp, dataOut)
817 822
818 823 self.isConfig = True
819 824
820 825 if mode == 3:
821 826 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
822 827
823 828 if times != None:
824 829 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
825 830
826 831 if self.code is None:
827 832 print("Fail decoding: Code is not defined.")
828 833 return
829 834
830 835 self.__nProfiles = dataOut.nProfiles
831 836 datadec = None
832 837
833 838 if mode == 3:
834 839 mode = 0
835 840
836 841 if dataOut.flagDataAsBlock:
837 842 """
838 843 Decoding when data have been read as block,
839 844 """
840 845
841 846 if mode == 0:
842 847 datadec = self.__convolutionByBlockInTime(dataOut.data)
843 848 if mode == 1:
844 849 datadec = self.__convolutionByBlockInFreq(dataOut.data)
845 850 else:
846 851 """
847 852 Decoding when data have been read profile by profile
848 853 """
849 854 if mode == 0:
850 855 datadec = self.__convolutionInTime(dataOut.data)
851 856
852 857 if mode == 1:
853 858 datadec = self.__convolutionInFreq(dataOut.data)
854 859
855 860 if mode == 2:
856 861 datadec = self.__convolutionInFreqOpt(dataOut.data)
857 862
858 863 if datadec is None:
859 864 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
860 865
861 866 dataOut.code = self.code
862 867 dataOut.nCode = self.nCode
863 868 dataOut.nBaud = self.nBaud
864 869
865 870 dataOut.data = datadec
866 871
867 872 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
868 873
869 874 dataOut.flagDecodeData = True #asumo q la data esta decodificada
870 875
871 876 if self.__profIndex == self.nCode-1:
872 877 self.__profIndex = 0
873 878 return dataOut
874 879
875 880 self.__profIndex += 1
876 881
877 882 return dataOut
878 883 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
879 884
880 885
881 886 class ProfileConcat(Operation):
882 887
883 888 isConfig = False
884 889 buffer = None
885 890
886 891 def __init__(self, **kwargs):
887 892
888 893 Operation.__init__(self, **kwargs)
889 894 self.profileIndex = 0
890 895
891 896 def reset(self):
892 897 self.buffer = numpy.zeros_like(self.buffer)
893 898 self.start_index = 0
894 899 self.times = 1
895 900
896 901 def setup(self, data, m, n=1):
897 902 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
898 903 self.nHeights = data.shape[1]#.nHeights
899 904 self.start_index = 0
900 905 self.times = 1
901 906
902 907 def concat(self, data):
903 908
904 909 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
905 910 self.start_index = self.start_index + self.nHeights
906 911
907 912 def run(self, dataOut, m):
908 913 dataOut.flagNoData = True
909 914
910 915 if not self.isConfig:
911 916 self.setup(dataOut.data, m, 1)
912 917 self.isConfig = True
913 918
914 919 if dataOut.flagDataAsBlock:
915 920 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
916 921
917 922 else:
918 923 self.concat(dataOut.data)
919 924 self.times += 1
920 925 if self.times > m:
921 926 dataOut.data = self.buffer
922 927 self.reset()
923 928 dataOut.flagNoData = False
924 929 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
925 930 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
926 931 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
927 932 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
928 933 dataOut.ippSeconds *= m
929 934 return dataOut
930 935
931 936 class ProfileSelector(Operation):
932 937
933 938 profileIndex = None
934 939 # Tamanho total de los perfiles
935 940 nProfiles = None
936 941
937 942 def __init__(self, **kwargs):
938 943
939 944 Operation.__init__(self, **kwargs)
940 945 self.profileIndex = 0
941 946
942 947 def incProfileIndex(self):
943 948
944 949 self.profileIndex += 1
945 950
946 951 if self.profileIndex >= self.nProfiles:
947 952 self.profileIndex = 0
948 953
949 954 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
950 955
951 956 if profileIndex < minIndex:
952 957 return False
953 958
954 959 if profileIndex > maxIndex:
955 960 return False
956 961
957 962 return True
958 963
959 964 def isThisProfileInList(self, profileIndex, profileList):
960 965
961 966 if profileIndex not in profileList:
962 967 return False
963 968
964 969 return True
965 970
966 971 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
967 972
968 973 """
969 974 ProfileSelector:
970 975
971 976 Inputs:
972 977 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
973 978
974 979 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
975 980
976 981 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
977 982
978 983 """
979 984
980 985 if rangeList is not None:
981 986 if type(rangeList[0]) not in (tuple, list):
982 987 rangeList = [rangeList]
983 988
984 989 dataOut.flagNoData = True
985 990
986 991 if dataOut.flagDataAsBlock:
987 992 """
988 993 data dimension = [nChannels, nProfiles, nHeis]
989 994 """
990 995 if profileList != None:
991 996 dataOut.data = dataOut.data[:,profileList,:]
992 997
993 998 if profileRangeList != None:
994 999 minIndex = profileRangeList[0]
995 1000 maxIndex = profileRangeList[1]
996 1001 profileList = list(range(minIndex, maxIndex+1))
997 1002
998 1003 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
999 1004
1000 1005 if rangeList != None:
1001 1006
1002 1007 profileList = []
1003 1008
1004 1009 for thisRange in rangeList:
1005 1010 minIndex = thisRange[0]
1006 1011 maxIndex = thisRange[1]
1007 1012
1008 1013 profileList.extend(list(range(minIndex, maxIndex+1)))
1009 1014
1010 1015 dataOut.data = dataOut.data[:,profileList,:]
1011 1016
1012 1017 dataOut.nProfiles = len(profileList)
1013 1018 dataOut.profileIndex = dataOut.nProfiles - 1
1014 1019 dataOut.flagNoData = False
1015 1020
1016 1021 return dataOut
1017 1022
1018 1023 """
1019 1024 data dimension = [nChannels, nHeis]
1020 1025 """
1021 1026
1022 1027 if profileList != None:
1023 1028
1024 1029 if self.isThisProfileInList(dataOut.profileIndex, profileList):
1025 1030
1026 1031 self.nProfiles = len(profileList)
1027 1032 dataOut.nProfiles = self.nProfiles
1028 1033 dataOut.profileIndex = self.profileIndex
1029 1034 dataOut.flagNoData = False
1030 1035
1031 1036 self.incProfileIndex()
1032 1037 return dataOut
1033 1038
1034 1039 if profileRangeList != None:
1035 1040
1036 1041 minIndex = profileRangeList[0]
1037 1042 maxIndex = profileRangeList[1]
1038 1043
1039 1044 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1040 1045
1041 1046 self.nProfiles = maxIndex - minIndex + 1
1042 1047 dataOut.nProfiles = self.nProfiles
1043 1048 dataOut.profileIndex = self.profileIndex
1044 1049 dataOut.flagNoData = False
1045 1050
1046 1051 self.incProfileIndex()
1047 1052 return dataOut
1048 1053
1049 1054 if rangeList != None:
1050 1055
1051 1056 nProfiles = 0
1052 1057
1053 1058 for thisRange in rangeList:
1054 1059 minIndex = thisRange[0]
1055 1060 maxIndex = thisRange[1]
1056 1061
1057 1062 nProfiles += maxIndex - minIndex + 1
1058 1063
1059 1064 for thisRange in rangeList:
1060 1065
1061 1066 minIndex = thisRange[0]
1062 1067 maxIndex = thisRange[1]
1063 1068
1064 1069 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1065 1070
1066 1071 self.nProfiles = nProfiles
1067 1072 dataOut.nProfiles = self.nProfiles
1068 1073 dataOut.profileIndex = self.profileIndex
1069 1074 dataOut.flagNoData = False
1070 1075
1071 1076 self.incProfileIndex()
1072 1077
1073 1078 break
1074 1079
1075 1080 return dataOut
1076 1081
1077 1082
1078 1083 if beam != None: #beam is only for AMISR data
1079 1084 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1080 1085 dataOut.flagNoData = False
1081 1086 dataOut.profileIndex = self.profileIndex
1082 1087
1083 1088 self.incProfileIndex()
1084 1089
1085 1090 return dataOut
1086 1091
1087 1092 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
1088 1093
1089 1094
1090 1095 class Reshaper(Operation):
1091 1096
1092 1097 def __init__(self, **kwargs):
1093 1098
1094 1099 Operation.__init__(self, **kwargs)
1095 1100
1096 1101 self.__buffer = None
1097 1102 self.__nitems = 0
1098 1103
1099 1104 def __appendProfile(self, dataOut, nTxs):
1100 1105
1101 1106 if self.__buffer is None:
1102 1107 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1103 1108 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1104 1109
1105 1110 ini = dataOut.nHeights * self.__nitems
1106 1111 end = ini + dataOut.nHeights
1107 1112
1108 1113 self.__buffer[:, ini:end] = dataOut.data
1109 1114
1110 1115 self.__nitems += 1
1111 1116
1112 1117 return int(self.__nitems*nTxs)
1113 1118
1114 1119 def __getBuffer(self):
1115 1120
1116 1121 if self.__nitems == int(1./self.__nTxs):
1117 1122
1118 1123 self.__nitems = 0
1119 1124
1120 1125 return self.__buffer.copy()
1121 1126
1122 1127 return None
1123 1128
1124 1129 def __checkInputs(self, dataOut, shape, nTxs):
1125 1130
1126 1131 if shape is None and nTxs is None:
1127 1132 raise ValueError("Reshaper: shape of factor should be defined")
1128 1133
1129 1134 if nTxs:
1130 1135 if nTxs < 0:
1131 1136 raise ValueError("nTxs should be greater than 0")
1132 1137
1133 1138 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1134 1139 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1135 1140
1136 1141 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1137 1142
1138 1143 return shape, nTxs
1139 1144
1140 1145 if len(shape) != 2 and len(shape) != 3:
1141 1146 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1142 1147
1143 1148 if len(shape) == 2:
1144 1149 shape_tuple = [dataOut.nChannels]
1145 1150 shape_tuple.extend(shape)
1146 1151 else:
1147 1152 shape_tuple = list(shape)
1148 1153
1149 1154 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1150 1155
1151 1156 return shape_tuple, nTxs
1152 1157
1153 1158 def run(self, dataOut, shape=None, nTxs=None):
1154 1159
1155 1160 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1156 1161
1157 1162 dataOut.flagNoData = True
1158 1163 profileIndex = None
1159 1164
1160 1165 if dataOut.flagDataAsBlock:
1161 1166
1162 1167 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1163 1168 dataOut.flagNoData = False
1164 1169
1165 1170 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1166 1171
1167 1172 else:
1168 1173
1169 1174 if self.__nTxs < 1:
1170 1175
1171 1176 self.__appendProfile(dataOut, self.__nTxs)
1172 1177 new_data = self.__getBuffer()
1173 1178
1174 1179 if new_data is not None:
1175 1180 dataOut.data = new_data
1176 1181 dataOut.flagNoData = False
1177 1182
1178 1183 profileIndex = dataOut.profileIndex*nTxs
1179 1184
1180 1185 else:
1181 1186 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1182 1187
1183 1188 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1184 1189
1185 1190 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1186 1191
1187 1192 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1188 1193
1189 1194 dataOut.profileIndex = profileIndex
1190 1195
1191 1196 dataOut.ippSeconds /= self.__nTxs
1192 1197
1193 1198 return dataOut
1194 1199
1195 1200 class SplitProfiles(Operation):
1196 1201
1197 1202 def __init__(self, **kwargs):
1198 1203
1199 1204 Operation.__init__(self, **kwargs)
1200 1205
1201 1206 def run(self, dataOut, n):
1202 1207
1203 1208 dataOut.flagNoData = True
1204 1209 profileIndex = None
1205 1210
1206 1211 if dataOut.flagDataAsBlock:
1207 1212
1208 1213 #nchannels, nprofiles, nsamples
1209 1214 shape = dataOut.data.shape
1210 1215
1211 1216 if shape[2] % n != 0:
1212 1217 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1213 1218
1214 1219 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1215 1220
1216 1221 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1217 1222 dataOut.flagNoData = False
1218 1223
1219 1224 profileIndex = int(dataOut.nProfiles/n) - 1
1220 1225
1221 1226 else:
1222 1227
1223 1228 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1224 1229
1225 1230 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1226 1231
1227 1232 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1228 1233
1229 1234 dataOut.nProfiles = int(dataOut.nProfiles*n)
1230 1235
1231 1236 dataOut.profileIndex = profileIndex
1232 1237
1233 1238 dataOut.ippSeconds /= n
1234 1239
1235 1240 return dataOut
1236 1241
1237 1242 class CombineProfiles(Operation):
1238 1243 def __init__(self, **kwargs):
1239 1244
1240 1245 Operation.__init__(self, **kwargs)
1241 1246
1242 1247 self.__remData = None
1243 1248 self.__profileIndex = 0
1244 1249
1245 1250 def run(self, dataOut, n):
1246 1251
1247 1252 dataOut.flagNoData = True
1248 1253 profileIndex = None
1249 1254
1250 1255 if dataOut.flagDataAsBlock:
1251 1256
1252 1257 #nchannels, nprofiles, nsamples
1253 1258 shape = dataOut.data.shape
1254 1259 new_shape = shape[0], shape[1]/n, shape[2]*n
1255 1260
1256 1261 if shape[1] % n != 0:
1257 1262 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1258 1263
1259 1264 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1260 1265 dataOut.flagNoData = False
1261 1266
1262 1267 profileIndex = int(dataOut.nProfiles*n) - 1
1263 1268
1264 1269 else:
1265 1270
1266 1271 #nchannels, nsamples
1267 1272 if self.__remData is None:
1268 1273 newData = dataOut.data
1269 1274 else:
1270 1275 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1271 1276
1272 1277 self.__profileIndex += 1
1273 1278
1274 1279 if self.__profileIndex < n:
1275 1280 self.__remData = newData
1276 1281 #continue
1277 1282 return
1278 1283
1279 1284 self.__profileIndex = 0
1280 1285 self.__remData = None
1281 1286
1282 1287 dataOut.data = newData
1283 1288 dataOut.flagNoData = False
1284 1289
1285 1290 profileIndex = dataOut.profileIndex/n
1286 1291
1287 1292
1288 1293 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1289 1294
1290 1295 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1291 1296
1292 1297 dataOut.nProfiles = int(dataOut.nProfiles/n)
1293 1298
1294 1299 dataOut.profileIndex = profileIndex
1295 1300
1296 1301 dataOut.ippSeconds *= n
1297 1302
1298 1303 return dataOut
1299 1304
1300 1305 class PulsePairVoltage(Operation):
1301 1306 '''
1302 1307 Function PulsePair(Signal Power, Velocity)
1303 1308 The real component of Lag[0] provides Intensity Information
1304 1309 The imag component of Lag[1] Phase provides Velocity Information
1305 1310
1306 1311 Configuration Parameters:
1307 1312 nPRF = Number of Several PRF
1308 1313 theta = Degree Azimuth angel Boundaries
1309 1314
1310 1315 Input:
1311 1316 self.dataOut
1312 1317 lag[N]
1313 1318 Affected:
1314 1319 self.dataOut.spc
1315 1320 '''
1316 1321 isConfig = False
1317 1322 __profIndex = 0
1318 1323 __initime = None
1319 1324 __lastdatatime = None
1320 1325 __buffer = None
1321 1326 noise = None
1322 1327 __dataReady = False
1323 1328 n = None
1324 1329 __nch = 0
1325 1330 __nHeis = 0
1326 1331 removeDC = False
1327 1332 ipp = None
1328 1333 lambda_ = 0
1329 1334
1330 1335 def __init__(self,**kwargs):
1331 1336 Operation.__init__(self,**kwargs)
1332 1337
1333 1338 def setup(self, dataOut, n = None, removeDC=False):
1334 1339 '''
1335 1340 n= Numero de PRF's de entrada
1336 1341 '''
1337 1342 self.__initime = None
1338 1343 self.__lastdatatime = 0
1339 1344 self.__dataReady = False
1340 1345 self.__buffer = 0
1341 1346 self.__profIndex = 0
1342 1347 self.noise = None
1343 1348 self.__nch = dataOut.nChannels
1344 1349 self.__nHeis = dataOut.nHeights
1345 1350 self.removeDC = removeDC
1346 1351 self.lambda_ = 3.0e8/(9345.0e6)
1347 1352 self.ippSec = dataOut.ippSeconds
1348 1353 self.nCohInt = dataOut.nCohInt
1349 1354
1350 1355 if n == None:
1351 1356 raise ValueError("n should be specified.")
1352 1357
1353 1358 if n != None:
1354 1359 if n<2:
1355 1360 raise ValueError("n should be greater than 2")
1356 1361
1357 1362 self.n = n
1358 1363 self.__nProf = n
1359 1364
1360 1365 self.__buffer = numpy.zeros((dataOut.nChannels,
1361 1366 n,
1362 1367 dataOut.nHeights),
1363 1368 dtype='complex')
1364 1369
1365 1370 def putData(self,data):
1366 1371 '''
1367 1372 Add a profile to he __buffer and increase in one the __profiel Index
1368 1373 '''
1369 1374 self.__buffer[:,self.__profIndex,:]= data
1370 1375 self.__profIndex += 1
1371 1376 return
1372 1377
1373 1378 def pushData(self,dataOut):
1374 1379 '''
1375 1380 Return the PULSEPAIR and the profiles used in the operation
1376 1381 Affected : self.__profileIndex
1377 1382 '''
1378 1383 #----------------- Remove DC-----------------------------------
1379 1384 if self.removeDC==True:
1380 1385 mean = numpy.mean(self.__buffer,1)
1381 1386 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1382 1387 dc= numpy.tile(tmp,[1,self.__nProf,1])
1383 1388 self.__buffer = self.__buffer - dc
1384 1389 #------------------Calculo de Potencia ------------------------
1385 1390 pair0 = self.__buffer*numpy.conj(self.__buffer)
1386 1391 pair0 = pair0.real
1387 1392 lag_0 = numpy.sum(pair0,1)
1388 1393 #------------------Calculo de Ruido x canal--------------------
1389 1394 self.noise = numpy.zeros(self.__nch)
1390 1395 for i in range(self.__nch):
1391 1396 daux = numpy.sort(pair0[i,:,:],axis= None)
1392 1397 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1393 1398
1394 1399 self.noise = self.noise.reshape(self.__nch,1)
1395 1400 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1396 1401 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1397 1402 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1398 1403 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1399 1404 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1400 1405 #-------------------- Power --------------------------------------------------
1401 1406 data_power = lag_0/(self.n*self.nCohInt)
1402 1407 #------------------ Senal ---------------------------------------------------
1403 1408 data_intensity = pair0 - noise_buffer
1404 1409 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1405 1410 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1406 1411 for i in range(self.__nch):
1407 1412 for j in range(self.__nHeis):
1408 1413 if data_intensity[i][j] < 0:
1409 1414 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1410 1415
1411 1416 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1412 1417 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1413 1418 lag_1 = numpy.sum(pair1,1)
1414 1419 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1415 1420 data_velocity = (self.lambda_/2.0)*data_freq
1416 1421
1417 1422 #---------------- Potencia promedio estimada de la Senal-----------
1418 1423 lag_0 = lag_0/self.n
1419 1424 S = lag_0-self.noise
1420 1425
1421 1426 #---------------- Frecuencia Doppler promedio ---------------------
1422 1427 lag_1 = lag_1/(self.n-1)
1423 1428 R1 = numpy.abs(lag_1)
1424 1429
1425 1430 #---------------- Calculo del SNR----------------------------------
1426 1431 data_snrPP = S/self.noise
1427 1432 for i in range(self.__nch):
1428 1433 for j in range(self.__nHeis):
1429 1434 if data_snrPP[i][j] < 1.e-20:
1430 1435 data_snrPP[i][j] = 1.e-20
1431 1436
1432 1437 #----------------- Calculo del ancho espectral ----------------------
1433 1438 L = S/R1
1434 1439 L = numpy.where(L<0,1,L)
1435 1440 L = numpy.log(L)
1436 1441 tmp = numpy.sqrt(numpy.absolute(L))
1437 1442 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1438 1443 n = self.__profIndex
1439 1444
1440 1445 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1441 1446 self.__profIndex = 0
1442 1447 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
1443 1448
1444 1449
1445 1450 def pulsePairbyProfiles(self,dataOut):
1446 1451
1447 1452 self.__dataReady = False
1448 1453 data_power = None
1449 1454 data_intensity = None
1450 1455 data_velocity = None
1451 1456 data_specwidth = None
1452 1457 data_snrPP = None
1453 1458 self.putData(data=dataOut.data)
1454 1459 if self.__profIndex == self.n:
1455 1460 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
1456 1461 self.__dataReady = True
1457 1462
1458 1463 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
1459 1464
1460 1465
1461 1466 def pulsePairOp(self, dataOut, datatime= None):
1462 1467
1463 1468 if self.__initime == None:
1464 1469 self.__initime = datatime
1465 1470 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
1466 1471 self.__lastdatatime = datatime
1467 1472
1468 1473 if data_power is None:
1469 1474 return None, None, None,None,None,None
1470 1475
1471 1476 avgdatatime = self.__initime
1472 1477 deltatime = datatime - self.__lastdatatime
1473 1478 self.__initime = datatime
1474 1479
1475 1480 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
1476 1481
1477 1482 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1478 1483
1479 1484 if not self.isConfig:
1480 1485 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1481 1486 self.isConfig = True
1482 1487 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1483 1488 dataOut.flagNoData = True
1484 1489
1485 1490 if self.__dataReady:
1486 1491 dataOut.nCohInt *= self.n
1487 1492 dataOut.dataPP_POW = data_intensity # S
1488 1493 dataOut.dataPP_POWER = data_power # P
1489 1494 dataOut.dataPP_DOP = data_velocity
1490 1495 dataOut.dataPP_SNR = data_snrPP
1491 1496 dataOut.dataPP_WIDTH = data_specwidth
1492 1497 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1493 1498 dataOut.utctime = avgdatatime
1494 1499 dataOut.flagNoData = False
1495 1500 return dataOut
1496 1501
1497 1502
1498 1503
1499 1504 # import collections
1500 1505 # from scipy.stats import mode
1501 1506 #
1502 1507 # class Synchronize(Operation):
1503 1508 #
1504 1509 # isConfig = False
1505 1510 # __profIndex = 0
1506 1511 #
1507 1512 # def __init__(self, **kwargs):
1508 1513 #
1509 1514 # Operation.__init__(self, **kwargs)
1510 1515 # # self.isConfig = False
1511 1516 # self.__powBuffer = None
1512 1517 # self.__startIndex = 0
1513 1518 # self.__pulseFound = False
1514 1519 #
1515 1520 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1516 1521 #
1517 1522 # #Read data
1518 1523 #
1519 1524 # powerdB = dataOut.getPower(channel = channel)
1520 1525 # noisedB = dataOut.getNoise(channel = channel)[0]
1521 1526 #
1522 1527 # self.__powBuffer.extend(powerdB.flatten())
1523 1528 #
1524 1529 # dataArray = numpy.array(self.__powBuffer)
1525 1530 #
1526 1531 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1527 1532 #
1528 1533 # maxValue = numpy.nanmax(filteredPower)
1529 1534 #
1530 1535 # if maxValue < noisedB + 10:
1531 1536 # #No se encuentra ningun pulso de transmision
1532 1537 # return None
1533 1538 #
1534 1539 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1535 1540 #
1536 1541 # if len(maxValuesIndex) < 2:
1537 1542 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1538 1543 # return None
1539 1544 #
1540 1545 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1541 1546 #
1542 1547 # #Seleccionar solo valores con un espaciamiento de nSamples
1543 1548 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1544 1549 #
1545 1550 # if len(pulseIndex) < 2:
1546 1551 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1547 1552 # return None
1548 1553 #
1549 1554 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1550 1555 #
1551 1556 # #remover senales que se distancien menos de 10 unidades o muestras
1552 1557 # #(No deberian existir IPP menor a 10 unidades)
1553 1558 #
1554 1559 # realIndex = numpy.where(spacing > 10 )[0]
1555 1560 #
1556 1561 # if len(realIndex) < 2:
1557 1562 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1558 1563 # return None
1559 1564 #
1560 1565 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1561 1566 # realPulseIndex = pulseIndex[realIndex]
1562 1567 #
1563 1568 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1564 1569 #
1565 1570 # print "IPP = %d samples" %period
1566 1571 #
1567 1572 # self.__newNSamples = dataOut.nHeights #int(period)
1568 1573 # self.__startIndex = int(realPulseIndex[0])
1569 1574 #
1570 1575 # return 1
1571 1576 #
1572 1577 #
1573 1578 # def setup(self, nSamples, nChannels, buffer_size = 4):
1574 1579 #
1575 1580 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1576 1581 # maxlen = buffer_size*nSamples)
1577 1582 #
1578 1583 # bufferList = []
1579 1584 #
1580 1585 # for i in range(nChannels):
1581 1586 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1582 1587 # maxlen = buffer_size*nSamples)
1583 1588 #
1584 1589 # bufferList.append(bufferByChannel)
1585 1590 #
1586 1591 # self.__nSamples = nSamples
1587 1592 # self.__nChannels = nChannels
1588 1593 # self.__bufferList = bufferList
1589 1594 #
1590 1595 # def run(self, dataOut, channel = 0):
1591 1596 #
1592 1597 # if not self.isConfig:
1593 1598 # nSamples = dataOut.nHeights
1594 1599 # nChannels = dataOut.nChannels
1595 1600 # self.setup(nSamples, nChannels)
1596 1601 # self.isConfig = True
1597 1602 #
1598 1603 # #Append new data to internal buffer
1599 1604 # for thisChannel in range(self.__nChannels):
1600 1605 # bufferByChannel = self.__bufferList[thisChannel]
1601 1606 # bufferByChannel.extend(dataOut.data[thisChannel])
1602 1607 #
1603 1608 # if self.__pulseFound:
1604 1609 # self.__startIndex -= self.__nSamples
1605 1610 #
1606 1611 # #Finding Tx Pulse
1607 1612 # if not self.__pulseFound:
1608 1613 # indexFound = self.__findTxPulse(dataOut, channel)
1609 1614 #
1610 1615 # if indexFound == None:
1611 1616 # dataOut.flagNoData = True
1612 1617 # return
1613 1618 #
1614 1619 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1615 1620 # self.__pulseFound = True
1616 1621 # self.__startIndex = indexFound
1617 1622 #
1618 1623 # #If pulse was found ...
1619 1624 # for thisChannel in range(self.__nChannels):
1620 1625 # bufferByChannel = self.__bufferList[thisChannel]
1621 1626 # #print self.__startIndex
1622 1627 # x = numpy.array(bufferByChannel)
1623 1628 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1624 1629 #
1625 1630 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1626 1631 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1627 1632 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1628 1633 #
1629 1634 # dataOut.data = self.__arrayBuffer
1630 1635 #
1631 1636 # self.__startIndex += self.__newNSamples
1632 1637 #
1633 1638 # return
General Comments 0
You need to be logged in to leave comments. Login now