##// END OF EJS Templates
Modificación a kmamisr para ejecutarse en la versión 3, creación de scripts con terminación v3 para difereciarlos, se comentó la linea #720 de JroIO_param.py debido a que reiniciaba la lista de archivos, ocasionando la reescritura del archivo hdf5. Alguna otra modificación aparente es producto de algunas variaciones en espacios al usar la función print()
joabAM -
r1279:c53fe2a4a291
parent child
Show More
@@ -1,1290 +1,1295
1 1 '''
2 2 Updated on January , 2018, for multiprocessing purposes
3 3 Author: Sergio Cortez
4 4 Created on September , 2012
5 5 '''
6 6 from platform import python_version
7 7 import sys
8 8 import ast
9 9 import datetime
10 10 import traceback
11 11 import math
12 12 import time
13 13 import zmq
14 14 from multiprocessing import Process, Queue, Event, Value, cpu_count
15 15 from threading import Thread
16 16 from xml.etree.ElementTree import ElementTree, Element, SubElement, tostring
17 17 from xml.dom import minidom
18 18
19 19
20 20 from schainpy.admin import Alarm, SchainWarning
21 21 from schainpy.model import *
22 22 from schainpy.utils import log
23 23
24 24
25 25 DTYPES = {
26 26 'Voltage': '.r',
27 27 'Spectra': '.pdata'
28 28 }
29 29
30 30
31 31 def MPProject(project, n=cpu_count()):
32 32 '''
33 33 Project wrapper to run schain in n processes
34 34 '''
35 35
36 36 rconf = project.getReadUnitObj()
37 37 op = rconf.getOperationObj('run')
38 38 dt1 = op.getParameterValue('startDate')
39 39 dt2 = op.getParameterValue('endDate')
40 40 tm1 = op.getParameterValue('startTime')
41 41 tm2 = op.getParameterValue('endTime')
42 42 days = (dt2 - dt1).days
43 43
44 44 for day in range(days + 1):
45 45 skip = 0
46 46 cursor = 0
47 47 processes = []
48 48 dt = dt1 + datetime.timedelta(day)
49 49 dt_str = dt.strftime('%Y/%m/%d')
50 50 reader = JRODataReader()
51 51 paths, files = reader.searchFilesOffLine(path=rconf.path,
52 52 startDate=dt,
53 53 endDate=dt,
54 54 startTime=tm1,
55 55 endTime=tm2,
56 56 ext=DTYPES[rconf.datatype])
57 57 nFiles = len(files)
58 58 if nFiles == 0:
59 59 continue
60 skip = int(math.ceil(nFiles / n))
60 skip = int(math.ceil(nFiles / n))
61 61 while nFiles > cursor * skip:
62 62 rconf.update(startDate=dt_str, endDate=dt_str, cursor=cursor,
63 63 skip=skip)
64 64 p = project.clone()
65 65 p.start()
66 66 processes.append(p)
67 67 cursor += 1
68 68
69 69 def beforeExit(exctype, value, trace):
70 70 for process in processes:
71 71 process.terminate()
72 72 process.join()
73 73 print(traceback.print_tb(trace))
74 74
75 75 sys.excepthook = beforeExit
76 76
77 77 for process in processes:
78 78 process.join()
79 79 process.terminate()
80 80
81 81 time.sleep(3)
82 82
83 83 def wait(context):
84
84
85 85 time.sleep(1)
86 86 c = zmq.Context()
87 87 receiver = c.socket(zmq.SUB)
88 receiver.connect('ipc:///tmp/schain_{}_pub'.format(self.id))
88 receiver.connect('ipc:///tmp/schain_{}_pub'.format(self.id))
89 89 receiver.setsockopt(zmq.SUBSCRIBE, self.id.encode())
90 90 msg = receiver.recv_multipart()[1]
91 91 context.terminate()
92 92
93 93 class ParameterConf():
94 94
95 95 id = None
96 96 name = None
97 97 value = None
98 98 format = None
99 99
100 100 __formated_value = None
101 101
102 102 ELEMENTNAME = 'Parameter'
103 103
104 104 def __init__(self):
105 105
106 106 self.format = 'str'
107 107
108 108 def getElementName(self):
109 109
110 110 return self.ELEMENTNAME
111 111
112 112 def getValue(self):
113 113
114 114 value = self.value
115 115 format = self.format
116 116
117 117 if self.__formated_value != None:
118 118
119 119 return self.__formated_value
120 120
121 121 if format == 'obj':
122 122 return value
123 123
124 124 if format == 'str':
125 125 self.__formated_value = str(value)
126 126 return self.__formated_value
127 127
128 128 if value == '':
129 129 raise ValueError('%s: This parameter value is empty' % self.name)
130 130
131 131 if format == 'list':
132 132 strList = [s.strip() for s in value.split(',')]
133 133 self.__formated_value = strList
134 134
135 135 return self.__formated_value
136 136
137 137 if format == 'intlist':
138 138 '''
139 139 Example:
140 140 value = (0,1,2)
141 141 '''
142 142
143 143 new_value = ast.literal_eval(value)
144 144
145 145 if type(new_value) not in (tuple, list):
146 146 new_value = [int(new_value)]
147 147
148 148 self.__formated_value = new_value
149 149
150 150 return self.__formated_value
151 151
152 152 if format == 'floatlist':
153 153 '''
154 154 Example:
155 155 value = (0.5, 1.4, 2.7)
156 156 '''
157 157
158 158 new_value = ast.literal_eval(value)
159 159
160 160 if type(new_value) not in (tuple, list):
161 161 new_value = [float(new_value)]
162 162
163 163 self.__formated_value = new_value
164 164
165 165 return self.__formated_value
166 166
167 167 if format == 'date':
168 168 strList = value.split('/')
169 169 intList = [int(x) for x in strList]
170 170 date = datetime.date(intList[0], intList[1], intList[2])
171 171
172 172 self.__formated_value = date
173 173
174 174 return self.__formated_value
175 175
176 176 if format == 'time':
177 177 strList = value.split(':')
178 178 intList = [int(x) for x in strList]
179 179 time = datetime.time(intList[0], intList[1], intList[2])
180 180
181 181 self.__formated_value = time
182 182
183 183 return self.__formated_value
184 184
185 185 if format == 'pairslist':
186 186 '''
187 187 Example:
188 188 value = (0,1),(1,2)
189 189 '''
190 190
191 191 new_value = ast.literal_eval(value)
192 192
193 193 if type(new_value) not in (tuple, list):
194 194 raise ValueError('%s has to be a tuple or list of pairs' % value)
195 195
196 196 if type(new_value[0]) not in (tuple, list):
197 197 if len(new_value) != 2:
198 198 raise ValueError('%s has to be a tuple or list of pairs' % value)
199 199 new_value = [new_value]
200 200
201 201 for thisPair in new_value:
202 202 if len(thisPair) != 2:
203 203 raise ValueError('%s has to be a tuple or list of pairs' % value)
204 204
205 205 self.__formated_value = new_value
206 206
207 207 return self.__formated_value
208 208
209 209 if format == 'multilist':
210 210 '''
211 211 Example:
212 212 value = (0,1,2),(3,4,5)
213 213 '''
214 214 multiList = ast.literal_eval(value)
215 215
216 216 if type(multiList[0]) == int:
217 217 multiList = ast.literal_eval('(' + value + ')')
218 218
219 219 self.__formated_value = multiList
220 220
221 221 return self.__formated_value
222 222
223 223 if format == 'bool':
224 224 value = int(value)
225 225
226 226 if format == 'int':
227 227 value = float(value)
228 228
229 229 format_func = eval(format)
230 230
231 231 self.__formated_value = format_func(value)
232 232
233 233 return self.__formated_value
234 234
235 235 def updateId(self, new_id):
236 236
237 237 self.id = str(new_id)
238 238
239 239 def setup(self, id, name, value, format='str'):
240 240 self.id = str(id)
241 241 self.name = name
242 242 if format == 'obj':
243 243 self.value = value
244 244 else:
245 245 self.value = str(value)
246 246 self.format = str.lower(format)
247 247
248 248 self.getValue()
249 249
250 250 return 1
251 251
252 252 def update(self, name, value, format='str'):
253 253
254 254 self.name = name
255 255 self.value = str(value)
256 256 self.format = format
257 257
258 258 def makeXml(self, opElement):
259 259 if self.name not in ('queue',):
260 260 parmElement = SubElement(opElement, self.ELEMENTNAME)
261 261 parmElement.set('id', str(self.id))
262 262 parmElement.set('name', self.name)
263 263 parmElement.set('value', self.value)
264 264 parmElement.set('format', self.format)
265
265
266 266 def readXml(self, parmElement):
267 267
268 268 self.id = parmElement.get('id')
269 269 self.name = parmElement.get('name')
270 270 self.value = parmElement.get('value')
271 271 self.format = str.lower(parmElement.get('format'))
272 272
273 273 # Compatible with old signal chain version
274 274 if self.format == 'int' and self.name == 'idfigure':
275 275 self.name = 'id'
276 276
277 277 def printattr(self):
278 278
279 279 print('Parameter[%s]: name = %s, value = %s, format = %s, project_id = %s' % (self.id, self.name, self.value, self.format, self.project_id))
280 280
281 281 class OperationConf():
282 282
283 283 ELEMENTNAME = 'Operation'
284 284
285 285 def __init__(self):
286 286
287 287 self.id = '0'
288 288 self.name = None
289 289 self.priority = None
290 290 self.topic = None
291 291
292 292 def __getNewId(self):
293 293
294 294 return int(self.id) * 10 + len(self.parmConfObjList) + 1
295 295
296 296 def getId(self):
297 297 return self.id
298 298
299 299 def updateId(self, new_id):
300 300
301 301 self.id = str(new_id)
302 302
303 303 n = 1
304 304 for parmObj in self.parmConfObjList:
305 305
306 306 idParm = str(int(new_id) * 10 + n)
307 307 parmObj.updateId(idParm)
308 308
309 309 n += 1
310 310
311 311 def getElementName(self):
312 312
313 313 return self.ELEMENTNAME
314 314
315 315 def getParameterObjList(self):
316 316
317 317 return self.parmConfObjList
318 318
319 319 def getParameterObj(self, parameterName):
320 320
321 321 for parmConfObj in self.parmConfObjList:
322 322
323 323 if parmConfObj.name != parameterName:
324 324 continue
325 325
326 326 return parmConfObj
327 327
328 328 return None
329 329
330 330 def getParameterObjfromValue(self, parameterValue):
331 331
332 332 for parmConfObj in self.parmConfObjList:
333 333
334 334 if parmConfObj.getValue() != parameterValue:
335 335 continue
336 336
337 337 return parmConfObj.getValue()
338 338
339 339 return None
340 340
341 341 def getParameterValue(self, parameterName):
342 342
343 343 parameterObj = self.getParameterObj(parameterName)
344 344
345 345 # if not parameterObj:
346 346 # return None
347 347
348 348 value = parameterObj.getValue()
349 349
350 350 return value
351 351
352 352 def getKwargs(self):
353 353
354 354 kwargs = {}
355 355
356 356 for parmConfObj in self.parmConfObjList:
357 357 if self.name == 'run' and parmConfObj.name == 'datatype':
358 358 continue
359 359
360 360 kwargs[parmConfObj.name] = parmConfObj.getValue()
361 361
362 362 return kwargs
363 363
364 364 def setup(self, id, name, priority, type, project_id, err_queue, lock):
365 365
366 366 self.id = str(id)
367 367 self.project_id = project_id
368 368 self.name = name
369 369 self.type = type
370 370 self.priority = priority
371 371 self.err_queue = err_queue
372 372 self.lock = lock
373 373 self.parmConfObjList = []
374 374
375 375 def removeParameters(self):
376 376
377 377 for obj in self.parmConfObjList:
378 378 del obj
379 379
380 380 self.parmConfObjList = []
381 381
382 382 def addParameter(self, name, value, format='str'):
383 383
384 384 if value is None:
385 385 return None
386 386 id = self.__getNewId()
387 387
388 388 parmConfObj = ParameterConf()
389 389 if not parmConfObj.setup(id, name, value, format):
390 390 return None
391 391
392 392 self.parmConfObjList.append(parmConfObj)
393 393
394 394 return parmConfObj
395 395
396 396 def changeParameter(self, name, value, format='str'):
397 397
398 398 parmConfObj = self.getParameterObj(name)
399 399 parmConfObj.update(name, value, format)
400 400
401 401 return parmConfObj
402 402
403 403 def makeXml(self, procUnitElement):
404 404
405 405 opElement = SubElement(procUnitElement, self.ELEMENTNAME)
406 406 opElement.set('id', str(self.id))
407 407 opElement.set('name', self.name)
408 408 opElement.set('type', self.type)
409 409 opElement.set('priority', str(self.priority))
410 410
411 411 for parmConfObj in self.parmConfObjList:
412 412 parmConfObj.makeXml(opElement)
413 413
414 414 def readXml(self, opElement, project_id):
415 415
416 416 self.id = opElement.get('id')
417 417 self.name = opElement.get('name')
418 418 self.type = opElement.get('type')
419 419 self.priority = opElement.get('priority')
420 self.project_id = str(project_id)
420 self.project_id = str(project_id)
421 421
422 422 # Compatible with old signal chain version
423 423 # Use of 'run' method instead 'init'
424 424 if self.type == 'self' and self.name == 'init':
425 425 self.name = 'run'
426 426
427 427 self.parmConfObjList = []
428 428
429 429 parmElementList = opElement.iter(ParameterConf().getElementName())
430 430
431 431 for parmElement in parmElementList:
432 432 parmConfObj = ParameterConf()
433 433 parmConfObj.readXml(parmElement)
434 434
435 435 # Compatible with old signal chain version
436 436 # If an 'plot' OPERATION is found, changes name operation by the value of its type PARAMETER
437 437 if self.type != 'self' and self.name == 'Plot':
438 438 if parmConfObj.format == 'str' and parmConfObj.name == 'type':
439 439 self.name = parmConfObj.value
440 440 continue
441 441
442 442 self.parmConfObjList.append(parmConfObj)
443 443
444 444 def printattr(self):
445 445
446 446 print('%s[%s]: name = %s, type = %s, priority = %s, project_id = %s' % (self.ELEMENTNAME,
447 447 self.id,
448 448 self.name,
449 449 self.type,
450 450 self.priority,
451 451 self.project_id))
452 452
453 453 for parmConfObj in self.parmConfObjList:
454 454 parmConfObj.printattr()
455 455
456 456 def createObject(self):
457 457
458 458 className = eval(self.name)
459 459
460 460 if self.type == 'other':
461 461 opObj = className()
462 462 elif self.type == 'external':
463 463 kwargs = self.getKwargs()
464 464 opObj = className(self.id, self.id, self.project_id, self.err_queue, self.lock, 'Operation', **kwargs)
465 465 opObj.start()
466 466 self.opObj = opObj
467 467
468 468 return opObj
469 469
470 470 class ProcUnitConf():
471 471
472 472 ELEMENTNAME = 'ProcUnit'
473 473
474 474 def __init__(self):
475 475
476 476 self.id = None
477 477 self.datatype = None
478 478 self.name = None
479 self.inputId = None
479 self.inputId = None
480 480 self.opConfObjList = []
481 481 self.procUnitObj = None
482 482 self.opObjDict = {}
483 483
484 484 def __getPriority(self):
485 485
486 486 return len(self.opConfObjList) + 1
487 487
488 488 def __getNewId(self):
489 489
490 490 return int(self.id) * 10 + len(self.opConfObjList) + 1
491 491
492 492 def getElementName(self):
493 493
494 494 return self.ELEMENTNAME
495 495
496 496 def getId(self):
497 497
498 498 return self.id
499 499
500 def updateId(self, new_id):
500 def updateId(self, new_id):
501 501 '''
502 502 new_id = int(parentId) * 10 + (int(self.id) % 10)
503 503 new_inputId = int(parentId) * 10 + (int(self.inputId) % 10)
504 504
505 505 # If this proc unit has not inputs
506 506 #if self.inputId == '0':
507 507 #new_inputId = 0
508 508
509 509 n = 1
510 510 for opConfObj in self.opConfObjList:
511 511
512 512 idOp = str(int(new_id) * 10 + n)
513 513 opConfObj.updateId(idOp)
514 514
515 515 n += 1
516 516
517 517 self.parentId = str(parentId)
518 518 self.id = str(new_id)
519 519 #self.inputId = str(new_inputId)
520 520 '''
521 521 n = 1
522 522
523 523 def getInputId(self):
524 524
525 525 return self.inputId
526 526
527 527 def getOperationObjList(self):
528 528
529 529 return self.opConfObjList
530 530
531 531 def getOperationObj(self, name=None):
532 532
533 533 for opConfObj in self.opConfObjList:
534 534
535 535 if opConfObj.name != name:
536 536 continue
537 537
538 538 return opConfObj
539 539
540 540 return None
541 541
542 542 def getOpObjfromParamValue(self, value=None):
543 543
544 544 for opConfObj in self.opConfObjList:
545 545 if opConfObj.getParameterObjfromValue(parameterValue=value) != value:
546 546 continue
547 547 return opConfObj
548 548 return None
549 549
550 550 def getProcUnitObj(self):
551 551
552 552 return self.procUnitObj
553 553
554 554 def setup(self, project_id, id, name, datatype, inputId, err_queue, lock):
555 555 '''
556 556 id sera el topico a publicar
557 557 inputId sera el topico a subscribirse
558 558 '''
559
559
560 560 # Compatible with old signal chain version
561 561 if datatype == None and name == None:
562 562 raise ValueError('datatype or name should be defined')
563 563
564 564 #Definir una condicion para inputId cuando sea 0
565 565
566 566 if name == None:
567 567 if 'Proc' in datatype:
568 568 name = datatype
569 569 else:
570 570 name = '%sProc' % (datatype)
571 571
572 572 if datatype == None:
573 573 datatype = name.replace('Proc', '')
574 574
575 575 self.id = str(id)
576 576 self.project_id = project_id
577 577 self.name = name
578 578 self.datatype = datatype
579 579 self.inputId = inputId
580 580 self.err_queue = err_queue
581 581 self.lock = lock
582 582 self.opConfObjList = []
583 583
584 self.addOperation(name='run', optype='self')
584 self.addOperation(name='run', optype='self')
585 585
586 586 def removeOperations(self):
587 587
588 588 for obj in self.opConfObjList:
589 589 del obj
590 590
591 591 self.opConfObjList = []
592 592 self.addOperation(name='run')
593 593
594 594 def addParameter(self, **kwargs):
595 595 '''
596 596 Add parameters to 'run' operation
597 597 '''
598 598 opObj = self.opConfObjList[0]
599 599
600 600 opObj.addParameter(**kwargs)
601 601
602 602 return opObj
603 603
604 604 def addOperation(self, name, optype='self'):
605 605 '''
606 606 Actualizacion - > proceso comunicacion
607 607 En el caso de optype='self', elminar. DEfinir comuncacion IPC -> Topic
608 608 definir el tipoc de socket o comunicacion ipc++
609 609
610 610 '''
611 611
612 612 id = self.__getNewId()
613 613 priority = self.__getPriority() # Sin mucho sentido, pero puede usarse
614 614 opConfObj = OperationConf()
615 615 opConfObj.setup(id, name=name, priority=priority, type=optype, project_id=self.project_id, err_queue=self.err_queue, lock=self.lock)
616 616 self.opConfObjList.append(opConfObj)
617 617
618 618 return opConfObj
619 619
620 620 def makeXml(self, projectElement):
621 621
622 622 procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
623 623 procUnitElement.set('id', str(self.id))
624 624 procUnitElement.set('name', self.name)
625 625 procUnitElement.set('datatype', self.datatype)
626 626 procUnitElement.set('inputId', str(self.inputId))
627 627
628 628 for opConfObj in self.opConfObjList:
629 629 opConfObj.makeXml(procUnitElement)
630 630
631 631 def readXml(self, upElement, project_id):
632 632
633 633 self.id = upElement.get('id')
634 634 self.name = upElement.get('name')
635 635 self.datatype = upElement.get('datatype')
636 636 self.inputId = upElement.get('inputId')
637 637 self.project_id = str(project_id)
638 638
639 639 if self.ELEMENTNAME == 'ReadUnit':
640 640 self.datatype = self.datatype.replace('Reader', '')
641 641
642 642 if self.ELEMENTNAME == 'ProcUnit':
643 643 self.datatype = self.datatype.replace('Proc', '')
644 644
645 645 if self.inputId == 'None':
646 646 self.inputId = '0'
647 647
648 648 self.opConfObjList = []
649 649
650 650 opElementList = upElement.iter(OperationConf().getElementName())
651 651
652 652 for opElement in opElementList:
653 653 opConfObj = OperationConf()
654 654 opConfObj.readXml(opElement, project_id)
655 655 self.opConfObjList.append(opConfObj)
656 656
657 657 def printattr(self):
658 658
659 659 print('%s[%s]: name = %s, datatype = %s, inputId = %s, project_id = %s' % (self.ELEMENTNAME,
660 660 self.id,
661 661 self.name,
662 662 self.datatype,
663 663 self.inputId,
664 664 self.project_id))
665 665
666 666 for opConfObj in self.opConfObjList:
667 667 opConfObj.printattr()
668 668
669 669 def getKwargs(self):
670 670
671 671 opObj = self.opConfObjList[0]
672 672 kwargs = opObj.getKwargs()
673 673
674 674 return kwargs
675 675
676 676 def createObjects(self):
677 677 '''
678 678 Instancia de unidades de procesamiento.
679 679 '''
680 680
681 681 className = eval(self.name)
682 #print(self.name)
682 683 kwargs = self.getKwargs()
684 #print(kwargs)
685 #print("mark_a")
683 686 procUnitObj = className(self.id, self.inputId, self.project_id, self.err_queue, self.lock, 'ProcUnit', **kwargs)
687 #print("mark_b")
684 688 log.success('creating process...', self.name)
685 689
686 690 for opConfObj in self.opConfObjList:
687
691
688 692 if opConfObj.type == 'self' and opConfObj.name == 'run':
689 693 continue
690 694 elif opConfObj.type == 'self':
691 695 opObj = getattr(procUnitObj, opConfObj.name)
692 696 else:
693 697 opObj = opConfObj.createObject()
694
698
695 699 log.success('adding operation: {}, type:{}'.format(
696 700 opConfObj.name,
697 701 opConfObj.type), self.name)
698
702
699 703 procUnitObj.addOperation(opConfObj, opObj)
700
704
701 705 procUnitObj.start()
702 706 self.procUnitObj = procUnitObj
703
707
704 708 def close(self):
705 709
706 710 for opConfObj in self.opConfObjList:
707 711 if opConfObj.type == 'self':
708 712 continue
709 713
710 714 opObj = self.procUnitObj.getOperationObj(opConfObj.id)
711 715 opObj.close()
712 716
713 717 self.procUnitObj.close()
714 718
715 719 return
716 720
717 721
718 722 class ReadUnitConf(ProcUnitConf):
719 723
720 724 ELEMENTNAME = 'ReadUnit'
721 725
722 726 def __init__(self):
723 727
724 728 self.id = None
725 729 self.datatype = None
726 730 self.name = None
727 731 self.inputId = None
728 732 self.opConfObjList = []
729 733 self.lock = Event()
730 734 self.lock.set()
731 735 self.lock.n = Value('d', 0)
732 736
733 737 def getElementName(self):
734 738
735 return self.ELEMENTNAME
736
739 return self.ELEMENTNAME
740
737 741 def setup(self, project_id, id, name, datatype, err_queue, path='', startDate='', endDate='',
738 742 startTime='', endTime='', server=None, **kwargs):
739 743
740 744
741 745 '''
742 746 *****el id del proceso sera el Topico
743 747
744 748 Adicion de {topic}, si no esta presente -> error
745 749 kwargs deben ser trasmitidos en la instanciacion
746 750
747 751 '''
748
752
749 753 # Compatible with old signal chain version
750 754 if datatype == None and name == None:
751 755 raise ValueError('datatype or name should be defined')
752 756 if name == None:
753 757 if 'Reader' in datatype:
754 758 name = datatype
755 759 datatype = name.replace('Reader','')
756 760 else:
757 761 name = '{}Reader'.format(datatype)
758 762 if datatype == None:
759 763 if 'Reader' in name:
760 764 datatype = name.replace('Reader','')
761 765 else:
762 766 datatype = name
763 767 name = '{}Reader'.format(name)
764 768
765 769 self.id = id
766 770 self.project_id = project_id
767 771 self.name = name
768 772 self.datatype = datatype
769 773 if path != '':
770 774 self.path = os.path.abspath(path)
775 print (self.path)
771 776 self.startDate = startDate
772 777 self.endDate = endDate
773 778 self.startTime = startTime
774 779 self.endTime = endTime
775 780 self.server = server
776 self.err_queue = err_queue
781 self.err_queue = err_queue
777 782 self.addRunOperation(**kwargs)
778 783
779 784 def update(self, **kwargs):
780 785
781 786 if 'datatype' in kwargs:
782 787 datatype = kwargs.pop('datatype')
783 788 if 'Reader' in datatype:
784 789 self.name = datatype
785 790 else:
786 791 self.name = '%sReader' % (datatype)
787 792 self.datatype = self.name.replace('Reader', '')
788 793
789 794 attrs = ('path', 'startDate', 'endDate',
790 795 'startTime', 'endTime')
791 796
792 797 for attr in attrs:
793 798 if attr in kwargs:
794 799 setattr(self, attr, kwargs.pop(attr))
795 800
796 801 self.updateRunOperation(**kwargs)
797 802
798 803 def removeOperations(self):
799 804
800 805 for obj in self.opConfObjList:
801 806 del obj
802 807
803 808 self.opConfObjList = []
804 809
805 810 def addRunOperation(self, **kwargs):
806 811
807 opObj = self.addOperation(name='run', optype='self')
812 opObj = self.addOperation(name='run', optype='self')
808 813
809 814 if self.server is None:
810 815 opObj.addParameter(
811 816 name='datatype', value=self.datatype, format='str')
812 817 opObj.addParameter(name='path', value=self.path, format='str')
813 818 opObj.addParameter(
814 819 name='startDate', value=self.startDate, format='date')
815 820 opObj.addParameter(
816 821 name='endDate', value=self.endDate, format='date')
817 822 opObj.addParameter(
818 823 name='startTime', value=self.startTime, format='time')
819 824 opObj.addParameter(
820 825 name='endTime', value=self.endTime, format='time')
821 826
822 827 for key, value in list(kwargs.items()):
823 828 opObj.addParameter(name=key, value=value,
824 829 format=type(value).__name__)
825 830 else:
826 831 opObj.addParameter(name='server', value=self.server, format='str')
827 832
828 833 return opObj
829 834
830 835 def updateRunOperation(self, **kwargs):
831 836
832 837 opObj = self.getOperationObj(name='run')
833 838 opObj.removeParameters()
834 839
835 840 opObj.addParameter(name='datatype', value=self.datatype, format='str')
836 841 opObj.addParameter(name='path', value=self.path, format='str')
837 842 opObj.addParameter(
838 843 name='startDate', value=self.startDate, format='date')
839 844 opObj.addParameter(name='endDate', value=self.endDate, format='date')
840 845 opObj.addParameter(
841 846 name='startTime', value=self.startTime, format='time')
842 847 opObj.addParameter(name='endTime', value=self.endTime, format='time')
843 848
844 849 for key, value in list(kwargs.items()):
845 850 opObj.addParameter(name=key, value=value,
846 851 format=type(value).__name__)
847 852
848 853 return opObj
849 854
850 855 def readXml(self, upElement, project_id):
851 856
852 857 self.id = upElement.get('id')
853 858 self.name = upElement.get('name')
854 859 self.datatype = upElement.get('datatype')
855 860 self.project_id = str(project_id) #yong
856 861
857 862 if self.ELEMENTNAME == 'ReadUnit':
858 863 self.datatype = self.datatype.replace('Reader', '')
859 864
860 865 self.opConfObjList = []
861 866
862 867 opElementList = upElement.iter(OperationConf().getElementName())
863 868
864 869 for opElement in opElementList:
865 870 opConfObj = OperationConf()
866 871 opConfObj.readXml(opElement, project_id)
867 872 self.opConfObjList.append(opConfObj)
868 873
869 874 if opConfObj.name == 'run':
870 875 self.path = opConfObj.getParameterValue('path')
871 876 self.startDate = opConfObj.getParameterValue('startDate')
872 877 self.endDate = opConfObj.getParameterValue('endDate')
873 878 self.startTime = opConfObj.getParameterValue('startTime')
874 879 self.endTime = opConfObj.getParameterValue('endTime')
875 880
876 881
877 882 class Project(Process):
878 883
879 884 ELEMENTNAME = 'Project'
880 885
881 886 def __init__(self):
882 887
883 888 Process.__init__(self)
884 889 self.id = None
885 890 self.filename = None
886 891 self.description = None
887 892 self.email = None
888 893 self.alarm = None
889 894 self.procUnitConfObjDict = {}
890 895 self.err_queue = Queue()
891 896
892 897 def __getNewId(self):
893 898
894 899 idList = list(self.procUnitConfObjDict.keys())
895 900 id = int(self.id) * 10
896 901
897 902 while True:
898 903 id += 1
899 904
900 905 if str(id) in idList:
901 906 continue
902 907
903 908 break
904 909
905 910 return str(id)
906 911
907 912 def getElementName(self):
908 913
909 914 return self.ELEMENTNAME
910 915
911 916 def getId(self):
912 917
913 918 return self.id
914 919
915 920 def updateId(self, new_id):
916 921
917 922 self.id = str(new_id)
918 923
919 924 keyList = list(self.procUnitConfObjDict.keys())
920 925 keyList.sort()
921 926
922 927 n = 1
923 928 newProcUnitConfObjDict = {}
924 929
925 930 for procKey in keyList:
926 931
927 932 procUnitConfObj = self.procUnitConfObjDict[procKey]
928 933 idProcUnit = str(int(self.id) * 10 + n)
929 934 procUnitConfObj.updateId(idProcUnit)
930 935 newProcUnitConfObjDict[idProcUnit] = procUnitConfObj
931 936 n += 1
932 937
933 938 self.procUnitConfObjDict = newProcUnitConfObjDict
934 939
935 940 def setup(self, id=1, name='', description='', email=None, alarm=[]):
936 941
937 942 print(' ')
938 943 print('*' * 60)
939 944 print('* Starting SIGNAL CHAIN PROCESSING (Multiprocessing) v%s *' % schainpy.__version__)
940 945 print('*' * 60)
941 946 print("* Python " + python_version() + " *")
942 947 print('*' * 19)
943 948 print(' ')
944 949 self.id = str(id)
945 self.description = description
950 self.description = description
946 951 self.email = email
947 952 self.alarm = alarm
948 953 if name:
949 954 self.name = '{} ({})'.format(Process.__name__, name)
950 955
951 956 def update(self, **kwargs):
952 957
953 958 for key, value in list(kwargs.items()):
954 959 setattr(self, key, value)
955 960
956 961 def clone(self):
957 962
958 963 p = Project()
959 964 p.procUnitConfObjDict = self.procUnitConfObjDict
960 965 return p
961 966
962 967 def addReadUnit(self, id=None, datatype=None, name=None, **kwargs):
963 968
964 969 '''
965 970 Actualizacion:
966 971 Se agrego un nuevo argumento: topic -relativo a la forma de comunicar los procesos simultaneos
967 972
968 973 * El id del proceso sera el topico al que se deben subscribir los procUnits para recibir la informacion(data)
969 974
970 975 '''
971 976
972 977 if id is None:
973 978 idReadUnit = self.__getNewId()
974 979 else:
975 980 idReadUnit = str(id)
976 981
977 982 readUnitConfObj = ReadUnitConf()
978 983 readUnitConfObj.setup(self.id, idReadUnit, name, datatype, self.err_queue, **kwargs)
979 984 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
980
985
981 986 return readUnitConfObj
982 987
983 988 def addProcUnit(self, inputId='0', datatype=None, name=None):
984 989
985 990 '''
986 991 Actualizacion:
987 992 Se agrego dos nuevos argumentos: topic_read (lee data de otro procUnit) y topic_write(escribe o envia data a otro procUnit)
988 993 Deberia reemplazar a "inputId"
989 994
990 995 ** A fin de mantener el inputID, este sera la representaacion del topicoal que deben subscribirse. El ID propio de la intancia
991 996 (proceso) sera el topico de la publicacion, todo sera asignado de manera dinamica.
992 997
993 998 '''
994 999
995 1000 idProcUnit = self.__getNewId()
996 1001 procUnitConfObj = ProcUnitConf()
997 input_proc = self.procUnitConfObjDict[inputId]
1002 input_proc = self.procUnitConfObjDict[inputId]
998 1003 procUnitConfObj.setup(self.id, idProcUnit, name, datatype, inputId, self.err_queue, input_proc.lock)
999 1004 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1000 1005
1001 1006 return procUnitConfObj
1002 1007
1003 1008 def removeProcUnit(self, id):
1004 1009
1005 1010 if id in list(self.procUnitConfObjDict.keys()):
1006 1011 self.procUnitConfObjDict.pop(id)
1007 1012
1008 1013 def getReadUnitId(self):
1009 1014
1010 1015 readUnitConfObj = self.getReadUnitObj()
1011 1016
1012 1017 return readUnitConfObj.id
1013 1018
1014 1019 def getReadUnitObj(self):
1015 1020
1016 1021 for obj in list(self.procUnitConfObjDict.values()):
1017 1022 if obj.getElementName() == 'ReadUnit':
1018 1023 return obj
1019 1024
1020 1025 return None
1021 1026
1022 1027 def getProcUnitObj(self, id=None, name=None):
1023 1028
1024 1029 if id != None:
1025 1030 return self.procUnitConfObjDict[id]
1026 1031
1027 1032 if name != None:
1028 1033 return self.getProcUnitObjByName(name)
1029 1034
1030 1035 return None
1031 1036
1032 1037 def getProcUnitObjByName(self, name):
1033 1038
1034 1039 for obj in list(self.procUnitConfObjDict.values()):
1035 1040 if obj.name == name:
1036 1041 return obj
1037 1042
1038 1043 return None
1039 1044
1040 1045 def procUnitItems(self):
1041 1046
1042 1047 return list(self.procUnitConfObjDict.items())
1043 1048
1044 1049 def makeXml(self):
1045 1050
1046 1051 projectElement = Element('Project')
1047 1052 projectElement.set('id', str(self.id))
1048 1053 projectElement.set('name', self.name)
1049 1054 projectElement.set('description', self.description)
1050 1055
1051 1056 for procUnitConfObj in list(self.procUnitConfObjDict.values()):
1052 1057 procUnitConfObj.makeXml(projectElement)
1053 1058
1054 1059 self.projectElement = projectElement
1055 1060
1056 1061 def writeXml(self, filename=None):
1057 1062
1058 1063 if filename == None:
1059 1064 if self.filename:
1060 1065 filename = self.filename
1061 1066 else:
1062 1067 filename = 'schain.xml'
1063 1068
1064 1069 if not filename:
1065 1070 print('filename has not been defined. Use setFilename(filename) for do it.')
1066 1071 return 0
1067 1072
1068 1073 abs_file = os.path.abspath(filename)
1069 1074
1070 1075 if not os.access(os.path.dirname(abs_file), os.W_OK):
1071 1076 print('No write permission on %s' % os.path.dirname(abs_file))
1072 1077 return 0
1073 1078
1074 1079 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
1075 1080 print('File %s already exists and it could not be overwriten' % abs_file)
1076 1081 return 0
1077 1082
1078 1083 self.makeXml()
1079 1084
1080 1085 ElementTree(self.projectElement).write(abs_file, method='xml')
1081 1086
1082 1087 self.filename = abs_file
1083 1088
1084 1089 return 1
1085 1090
1086 1091 def readXml(self, filename=None):
1087 1092
1088 1093 if not filename:
1089 1094 print('filename is not defined')
1090 1095 return 0
1091 1096
1092 1097 abs_file = os.path.abspath(filename)
1093 1098
1094 1099 if not os.path.isfile(abs_file):
1095 1100 print('%s file does not exist' % abs_file)
1096 1101 return 0
1097 1102
1098 1103 self.projectElement = None
1099 1104 self.procUnitConfObjDict = {}
1100 1105
1101 1106 try:
1102 1107 self.projectElement = ElementTree().parse(abs_file)
1103 1108 except:
1104 1109 print('Error reading %s, verify file format' % filename)
1105 1110 return 0
1106 1111
1107 1112 self.project = self.projectElement.tag
1108 1113
1109 1114 self.id = self.projectElement.get('id')
1110 1115 self.name = self.projectElement.get('name')
1111 1116 self.description = self.projectElement.get('description')
1112 1117
1113 1118 readUnitElementList = self.projectElement.iter(
1114 1119 ReadUnitConf().getElementName())
1115 1120
1116 1121 for readUnitElement in readUnitElementList:
1117 1122 readUnitConfObj = ReadUnitConf()
1118 1123 readUnitConfObj.readXml(readUnitElement, self.id)
1119 1124 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
1120 1125
1121 1126 procUnitElementList = self.projectElement.iter(
1122 1127 ProcUnitConf().getElementName())
1123 1128
1124 1129 for procUnitElement in procUnitElementList:
1125 1130 procUnitConfObj = ProcUnitConf()
1126 1131 procUnitConfObj.readXml(procUnitElement, self.id)
1127 1132 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1128 1133
1129 1134 self.filename = abs_file
1130 1135
1131 1136 return 1
1132 1137
1133 1138 def __str__(self):
1134 1139
1135 1140 print('Project: name = %s, description = %s, id = %s' % (
1136 1141 self.name,
1137 1142 self.description,
1138 1143 self.id))
1139 1144
1140 1145 for procUnitConfObj in self.procUnitConfObjDict.values():
1141 1146 print(procUnitConfObj)
1142 1147
1143 1148 def createObjects(self):
1144 1149
1145 1150
1146 1151 keys = list(self.procUnitConfObjDict.keys())
1147 1152 keys.sort()
1148 1153 for key in keys:
1149 1154 self.procUnitConfObjDict[key].createObjects()
1150 1155
1151 1156 def monitor(self):
1152 1157
1153 1158 t = Thread(target=self.__monitor, args=(self.err_queue, self.ctx))
1154 1159 t.start()
1155
1160
1156 1161 def __monitor(self, queue, ctx):
1157 1162
1158 1163 import socket
1159
1164
1160 1165 procs = 0
1161 1166 err_msg = ''
1162
1167
1163 1168 while True:
1164 1169 msg = queue.get()
1165 1170 if '#_start_#' in msg:
1166 1171 procs += 1
1167 1172 elif '#_end_#' in msg:
1168 1173 procs -=1
1169 1174 else:
1170 1175 err_msg = msg
1171
1172 if procs == 0 or 'Traceback' in err_msg:
1176
1177 if procs == 0 or 'Traceback' in err_msg:
1173 1178 break
1174 1179 time.sleep(0.1)
1175
1180
1176 1181 if '|' in err_msg:
1177 1182 name, err = err_msg.split('|')
1178 1183 if 'SchainWarning' in err:
1179 1184 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), name)
1180 1185 elif 'SchainError' in err:
1181 1186 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), name)
1182 1187 else:
1183 1188 log.error(err, name)
1184 else:
1189 else:
1185 1190 name, err = self.name, err_msg
1186
1191
1187 1192 time.sleep(2)
1188 1193
1189 1194 for conf in self.procUnitConfObjDict.values():
1190 1195 for confop in conf.opConfObjList:
1191 1196 if confop.type == 'external':
1192 1197 confop.opObj.terminate()
1193 1198 conf.procUnitObj.terminate()
1194
1199
1195 1200 ctx.term()
1196 1201
1197 1202 message = ''.join(err)
1198 1203
1199 1204 if err_msg:
1200 1205 subject = 'SChain v%s: Error running %s\n' % (
1201 1206 schainpy.__version__, self.name)
1202 1207
1203 1208 subtitle = 'Hostname: %s\n' % socket.gethostbyname(
1204 1209 socket.gethostname())
1205 1210 subtitle += 'Working directory: %s\n' % os.path.abspath('./')
1206 1211 subtitle += 'Configuration file: %s\n' % self.filename
1207 1212 subtitle += 'Time: %s\n' % str(datetime.datetime.now())
1208 1213
1209 1214 readUnitConfObj = self.getReadUnitObj()
1210 1215 if readUnitConfObj:
1211 1216 subtitle += '\nInput parameters:\n'
1212 1217 subtitle += '[Data path = %s]\n' % readUnitConfObj.path
1213 1218 subtitle += '[Data type = %s]\n' % readUnitConfObj.datatype
1214 1219 subtitle += '[Start date = %s]\n' % readUnitConfObj.startDate
1215 1220 subtitle += '[End date = %s]\n' % readUnitConfObj.endDate
1216 1221 subtitle += '[Start time = %s]\n' % readUnitConfObj.startTime
1217 1222 subtitle += '[End time = %s]\n' % readUnitConfObj.endTime
1218 1223
1219 1224 a = Alarm(
1220 modes=self.alarm,
1225 modes=self.alarm,
1221 1226 email=self.email,
1222 1227 message=message,
1223 1228 subject=subject,
1224 1229 subtitle=subtitle,
1225 1230 filename=self.filename
1226 1231 )
1227 1232
1228 1233 a.start()
1229 1234
1230 1235 def isPaused(self):
1231 1236 return 0
1232 1237
1233 1238 def isStopped(self):
1234 1239 return 0
1235 1240
1236 1241 def runController(self):
1237 1242 '''
1238 1243 returns 0 when this process has been stopped, 1 otherwise
1239 1244 '''
1240 1245
1241 1246 if self.isPaused():
1242 1247 print('Process suspended')
1243 1248
1244 1249 while True:
1245 1250 time.sleep(0.1)
1246 1251
1247 1252 if not self.isPaused():
1248 1253 break
1249 1254
1250 1255 if self.isStopped():
1251 1256 break
1252 1257
1253 1258 print('Process reinitialized')
1254 1259
1255 1260 if self.isStopped():
1256 1261 print('Process stopped')
1257 1262 return 0
1258 1263
1259 1264 return 1
1260 1265
1261 1266 def setFilename(self, filename):
1262 1267
1263 1268 self.filename = filename
1264 1269
1265 1270 def setProxy(self):
1266 1271
1267 1272 if not os.path.exists('/tmp/schain'):
1268 1273 os.mkdir('/tmp/schain')
1269
1274
1270 1275 self.ctx = zmq.Context()
1271 1276 xpub = self.ctx.socket(zmq.XPUB)
1272 1277 xpub.bind('ipc:///tmp/schain/{}_pub'.format(self.id))
1273 1278 xsub = self.ctx.socket(zmq.XSUB)
1274 1279 xsub.bind('ipc:///tmp/schain/{}_sub'.format(self.id))
1275 1280 self.monitor()
1276 1281 try:
1277 1282 zmq.proxy(xpub, xsub)
1278 1283 except zmq.ContextTerminated:
1279 1284 xpub.close()
1280 1285 xsub.close()
1281 1286
1282 1287 def run(self):
1283 1288
1284 1289 log.success('Starting {}: {}'.format(self.name, self.id), tag='')
1285 self.start_time = time.time()
1286 self.createObjects()
1287 self.setProxy()
1290 self.start_time = time.time()
1291 self.createObjects()
1292 self.setProxy()
1288 1293 log.success('{} Done (Time: {}s)'.format(
1289 1294 self.name,
1290 1295 time.time()-self.start_time), '')
@@ -1,1372 +1,1372
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import copy
8 8 import numpy
9 9 import datetime
10 10 import json
11 11
12 12 from schainpy.utils import log
13 13 from .jroheaderIO import SystemHeader, RadarControllerHeader
14 14
15 15
16 16 def getNumpyDtype(dataTypeCode):
17 17
18 18 if dataTypeCode == 0:
19 19 numpyDtype = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
20 20 elif dataTypeCode == 1:
21 21 numpyDtype = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
22 22 elif dataTypeCode == 2:
23 23 numpyDtype = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
24 24 elif dataTypeCode == 3:
25 25 numpyDtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
26 26 elif dataTypeCode == 4:
27 27 numpyDtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
28 28 elif dataTypeCode == 5:
29 29 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
30 30 else:
31 31 raise ValueError('dataTypeCode was not defined')
32 32
33 33 return numpyDtype
34 34
35 35
36 36 def getDataTypeCode(numpyDtype):
37 37
38 38 if numpyDtype == numpy.dtype([('real', '<i1'), ('imag', '<i1')]):
39 39 datatype = 0
40 40 elif numpyDtype == numpy.dtype([('real', '<i2'), ('imag', '<i2')]):
41 41 datatype = 1
42 42 elif numpyDtype == numpy.dtype([('real', '<i4'), ('imag', '<i4')]):
43 43 datatype = 2
44 44 elif numpyDtype == numpy.dtype([('real', '<i8'), ('imag', '<i8')]):
45 45 datatype = 3
46 46 elif numpyDtype == numpy.dtype([('real', '<f4'), ('imag', '<f4')]):
47 47 datatype = 4
48 48 elif numpyDtype == numpy.dtype([('real', '<f8'), ('imag', '<f8')]):
49 49 datatype = 5
50 50 else:
51 51 datatype = None
52 52
53 53 return datatype
54 54
55 55
56 56 def hildebrand_sekhon(data, navg):
57 57 """
58 58 This method is for the objective determination of the noise level in Doppler spectra. This
59 59 implementation technique is based on the fact that the standard deviation of the spectral
60 60 densities is equal to the mean spectral density for white Gaussian noise
61 61
62 62 Inputs:
63 63 Data : heights
64 64 navg : numbers of averages
65 65
66 66 Return:
67 67 mean : noise's level
68 68 """
69 69
70 70 sortdata = numpy.sort(data, axis=None)
71 71 lenOfData = len(sortdata)
72 72 nums_min = lenOfData*0.2
73 73
74 74 if nums_min <= 5:
75 75
76 76 nums_min = 5
77 77
78 78 sump = 0.
79 79 sumq = 0.
80 80
81 81 j = 0
82 82 cont = 1
83 83
84 84 while((cont == 1)and(j < lenOfData)):
85 85
86 86 sump += sortdata[j]
87 87 sumq += sortdata[j]**2
88 88
89 89 if j > nums_min:
90 90 rtest = float(j)/(j-1) + 1.0/navg
91 91 if ((sumq*j) > (rtest*sump**2)):
92 92 j = j - 1
93 93 sump = sump - sortdata[j]
94 94 sumq = sumq - sortdata[j]**2
95 95 cont = 0
96 96
97 97 j += 1
98 98
99 99 lnoise = sump / j
100 100
101 101 return lnoise
102 102
103 103
104 104 class Beam:
105 105
106 106 def __init__(self):
107 107 self.codeList = []
108 108 self.azimuthList = []
109 109 self.zenithList = []
110 110
111 111
112 112 class GenericData(object):
113 113
114 114 flagNoData = True
115 115
116 116 def copy(self, inputObj=None):
117
117
118 118 if inputObj == None:
119 119 return copy.deepcopy(self)
120 120
121 121 for key in list(inputObj.__dict__.keys()):
122 122
123 123 attribute = inputObj.__dict__[key]
124 124
125 125 # If this attribute is a tuple or list
126 126 if type(inputObj.__dict__[key]) in (tuple, list):
127 127 self.__dict__[key] = attribute[:]
128 128 continue
129 129
130 130 # If this attribute is another object or instance
131 131 if hasattr(attribute, '__dict__'):
132 132 self.__dict__[key] = attribute.copy()
133 133 continue
134 134
135 135 self.__dict__[key] = inputObj.__dict__[key]
136 136
137 137 def deepcopy(self):
138 138
139 139 return copy.deepcopy(self)
140 140
141 141 def isEmpty(self):
142 142
143 143 return self.flagNoData
144 144
145 145
146 146 class JROData(GenericData):
147 147
148 148 # m_BasicHeader = BasicHeader()
149 149 # m_ProcessingHeader = ProcessingHeader()
150 150
151 151 systemHeaderObj = SystemHeader()
152 152 radarControllerHeaderObj = RadarControllerHeader()
153 153 # data = None
154 154 type = None
155 155 datatype = None # dtype but in string
156 156 # dtype = None
157 157 # nChannels = None
158 158 # nHeights = None
159 159 nProfiles = None
160 160 heightList = None
161 161 channelList = None
162 162 flagDiscontinuousBlock = False
163 163 useLocalTime = False
164 164 utctime = None
165 165 timeZone = None
166 166 dstFlag = None
167 167 errorCount = None
168 168 blocksize = None
169 169 # nCode = None
170 170 # nBaud = None
171 171 # code = None
172 172 flagDecodeData = False # asumo q la data no esta decodificada
173 173 flagDeflipData = False # asumo q la data no esta sin flip
174 174 flagShiftFFT = False
175 175 # ippSeconds = None
176 176 # timeInterval = None
177 177 nCohInt = None
178 178 # noise = None
179 179 windowOfFilter = 1
180 180 # Speed of ligth
181 181 C = 3e8
182 182 frequency = 49.92e6
183 183 realtime = False
184 184 beacon_heiIndexList = None
185 185 last_block = None
186 186 blocknow = None
187 187 azimuth = None
188 188 zenith = None
189 189 beam = Beam()
190 190 profileIndex = None
191 191 error = None
192 192 data = None
193 193 nmodes = None
194 194
195 195 def __str__(self):
196 196
197 197 return '{} - {}'.format(self.type, self.getDatatime())
198 198
199 199 def getNoise(self):
200 200
201 201 raise NotImplementedError
202 202
203 203 def getNChannels(self):
204 204
205 205 return len(self.channelList)
206 206
207 207 def getChannelIndexList(self):
208 208
209 209 return list(range(self.nChannels))
210 210
211 211 def getNHeights(self):
212 212
213 213 return len(self.heightList)
214 214
215 215 def getHeiRange(self, extrapoints=0):
216 216
217 217 heis = self.heightList
218 218 # deltah = self.heightList[1] - self.heightList[0]
219 219 #
220 220 # heis.append(self.heightList[-1])
221 221
222 222 return heis
223 223
224 224 def getDeltaH(self):
225 225
226 226 delta = self.heightList[1] - self.heightList[0]
227 227
228 228 return delta
229 229
230 230 def getltctime(self):
231 231
232 232 if self.useLocalTime:
233 233 return self.utctime - self.timeZone * 60
234 234
235 235 return self.utctime
236 236
237 237 def getDatatime(self):
238 238
239 239 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
240 240 return datatimeValue
241 241
242 242 def getTimeRange(self):
243 243
244 244 datatime = []
245 245
246 246 datatime.append(self.ltctime)
247 247 datatime.append(self.ltctime + self.timeInterval + 1)
248 248
249 249 datatime = numpy.array(datatime)
250 250
251 251 return datatime
252 252
253 253 def getFmaxTimeResponse(self):
254 254
255 255 period = (10**-6) * self.getDeltaH() / (0.15)
256 256
257 257 PRF = 1. / (period * self.nCohInt)
258 258
259 259 fmax = PRF
260 260
261 261 return fmax
262 262
263 263 def getFmax(self):
264 264 PRF = 1. / (self.ippSeconds * self.nCohInt)
265 265
266 266 fmax = PRF
267 267 return fmax
268 268
269 269 def getVmax(self):
270 270
271 271 _lambda = self.C / self.frequency
272 272
273 273 vmax = self.getFmax() * _lambda / 2
274 274
275 275 return vmax
276 276
277 277 def get_ippSeconds(self):
278 278 '''
279 279 '''
280 280 return self.radarControllerHeaderObj.ippSeconds
281 281
282 282 def set_ippSeconds(self, ippSeconds):
283 283 '''
284 284 '''
285 285
286 286 self.radarControllerHeaderObj.ippSeconds = ippSeconds
287 287
288 288 return
289 289
290 290 def get_dtype(self):
291 291 '''
292 292 '''
293 293 return getNumpyDtype(self.datatype)
294 294
295 295 def set_dtype(self, numpyDtype):
296 296 '''
297 297 '''
298 298
299 299 self.datatype = getDataTypeCode(numpyDtype)
300 300
301 301 def get_code(self):
302 302 '''
303 303 '''
304 304 return self.radarControllerHeaderObj.code
305 305
306 306 def set_code(self, code):
307 307 '''
308 308 '''
309 309 self.radarControllerHeaderObj.code = code
310 310
311 311 return
312 312
313 313 def get_ncode(self):
314 314 '''
315 315 '''
316 316 return self.radarControllerHeaderObj.nCode
317 317
318 318 def set_ncode(self, nCode):
319 319 '''
320 320 '''
321 321 self.radarControllerHeaderObj.nCode = nCode
322 322
323 323 return
324 324
325 325 def get_nbaud(self):
326 326 '''
327 327 '''
328 328 return self.radarControllerHeaderObj.nBaud
329 329
330 330 def set_nbaud(self, nBaud):
331 331 '''
332 332 '''
333 333 self.radarControllerHeaderObj.nBaud = nBaud
334 334
335 335 return
336 336
337 337 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
338 338 channelIndexList = property(
339 339 getChannelIndexList, "I'm the 'channelIndexList' property.")
340 340 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
341 341 #noise = property(getNoise, "I'm the 'nHeights' property.")
342 342 datatime = property(getDatatime, "I'm the 'datatime' property")
343 343 ltctime = property(getltctime, "I'm the 'ltctime' property")
344 344 ippSeconds = property(get_ippSeconds, set_ippSeconds)
345 345 dtype = property(get_dtype, set_dtype)
346 346 # timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
347 347 code = property(get_code, set_code)
348 348 nCode = property(get_ncode, set_ncode)
349 349 nBaud = property(get_nbaud, set_nbaud)
350 350
351 351
352 352 class Voltage(JROData):
353 353
354 354 # data es un numpy array de 2 dmensiones (canales, alturas)
355 355 data = None
356 356
357 357 def __init__(self):
358 358 '''
359 359 Constructor
360 360 '''
361 361
362 362 self.useLocalTime = True
363 363 self.radarControllerHeaderObj = RadarControllerHeader()
364 364 self.systemHeaderObj = SystemHeader()
365 365 self.type = "Voltage"
366 366 self.data = None
367 367 # self.dtype = None
368 368 # self.nChannels = 0
369 369 # self.nHeights = 0
370 370 self.nProfiles = None
371 371 self.heightList = None
372 372 self.channelList = None
373 373 # self.channelIndexList = None
374 374 self.flagNoData = True
375 375 self.flagDiscontinuousBlock = False
376 376 self.utctime = None
377 377 self.timeZone = None
378 378 self.dstFlag = None
379 379 self.errorCount = None
380 380 self.nCohInt = None
381 381 self.blocksize = None
382 382 self.flagDecodeData = False # asumo q la data no esta decodificada
383 383 self.flagDeflipData = False # asumo q la data no esta sin flip
384 384 self.flagShiftFFT = False
385 385 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
386 386 self.profileIndex = 0
387 387
388 388 def getNoisebyHildebrand(self, channel=None):
389 389 """
390 390 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
391 391
392 392 Return:
393 393 noiselevel
394 394 """
395 395
396 396 if channel != None:
397 397 data = self.data[channel]
398 398 nChannels = 1
399 399 else:
400 400 data = self.data
401 401 nChannels = self.nChannels
402 402
403 403 noise = numpy.zeros(nChannels)
404 404 power = data * numpy.conjugate(data)
405 405
406 406 for thisChannel in range(nChannels):
407 407 if nChannels == 1:
408 408 daux = power[:].real
409 409 else:
410 410 daux = power[thisChannel, :].real
411 411 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
412 412
413 413 return noise
414 414
415 415 def getNoise(self, type=1, channel=None):
416 416
417 417 if type == 1:
418 418 noise = self.getNoisebyHildebrand(channel)
419 419
420 420 return noise
421 421
422 422 def getPower(self, channel=None):
423 423
424 424 if channel != None:
425 425 data = self.data[channel]
426 426 else:
427 427 data = self.data
428 428
429 429 power = data * numpy.conjugate(data)
430 430 powerdB = 10 * numpy.log10(power.real)
431 431 powerdB = numpy.squeeze(powerdB)
432 432
433 433 return powerdB
434 434
435 435 def getTimeInterval(self):
436 436
437 437 timeInterval = self.ippSeconds * self.nCohInt
438 438
439 439 return timeInterval
440 440
441 441 noise = property(getNoise, "I'm the 'nHeights' property.")
442 442 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
443 443
444 444
445 445 class Spectra(JROData):
446 446
447 447 # data spc es un numpy array de 2 dmensiones (canales, perfiles, alturas)
448 448 data_spc = None
449 449 # data cspc es un numpy array de 2 dmensiones (canales, pares, alturas)
450 450 data_cspc = None
451 451 # data dc es un numpy array de 2 dmensiones (canales, alturas)
452 452 data_dc = None
453 453 # data power
454 454 data_pwr = None
455 455 nFFTPoints = None
456 456 # nPairs = None
457 457 pairsList = None
458 458 nIncohInt = None
459 459 wavelength = None # Necesario para cacular el rango de velocidad desde la frecuencia
460 460 nCohInt = None # se requiere para determinar el valor de timeInterval
461 461 ippFactor = None
462 462 profileIndex = 0
463 463 plotting = "spectra"
464 464
465 465 def __init__(self):
466 466 '''
467 467 Constructor
468 468 '''
469 469
470 470 self.useLocalTime = True
471 471 self.radarControllerHeaderObj = RadarControllerHeader()
472 472 self.systemHeaderObj = SystemHeader()
473 473 self.type = "Spectra"
474 474 # self.data = None
475 475 # self.dtype = None
476 476 # self.nChannels = 0
477 477 # self.nHeights = 0
478 478 self.nProfiles = None
479 479 self.heightList = None
480 480 self.channelList = None
481 481 # self.channelIndexList = None
482 482 self.pairsList = None
483 483 self.flagNoData = True
484 484 self.flagDiscontinuousBlock = False
485 485 self.utctime = None
486 486 self.nCohInt = None
487 487 self.nIncohInt = None
488 488 self.blocksize = None
489 489 self.nFFTPoints = None
490 490 self.wavelength = None
491 491 self.flagDecodeData = False # asumo q la data no esta decodificada
492 492 self.flagDeflipData = False # asumo q la data no esta sin flip
493 493 self.flagShiftFFT = False
494 494 self.ippFactor = 1
495 495 #self.noise = None
496 496 self.beacon_heiIndexList = []
497 497 self.noise_estimation = None
498 498
499 499 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
500 500 """
501 501 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
502 502
503 503 Return:
504 504 noiselevel
505 505 """
506 506
507 507 noise = numpy.zeros(self.nChannels)
508 508
509 509 for channel in range(self.nChannels):
510 510 daux = self.data_spc[channel,
511 511 xmin_index:xmax_index, ymin_index:ymax_index]
512 512 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
513 513
514 514 return noise
515 515
516 516 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
517 517
518 518 if self.noise_estimation is not None:
519 519 # this was estimated by getNoise Operation defined in jroproc_spectra.py
520 520 return self.noise_estimation
521 521 else:
522 522 noise = self.getNoisebyHildebrand(
523 523 xmin_index, xmax_index, ymin_index, ymax_index)
524 524 return noise
525 525
526 526 def getFreqRangeTimeResponse(self, extrapoints=0):
527 527
528 528 deltafreq = self.getFmaxTimeResponse() / (self.nFFTPoints * self.ippFactor)
529 529 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
530 530
531 531 return freqrange
532 532
533 533 def getAcfRange(self, extrapoints=0):
534 534
535 535 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
536 536 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
537 537
538 538 return freqrange
539 539
540 540 def getFreqRange(self, extrapoints=0):
541 541
542 542 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
543 543 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
544 544
545 545 return freqrange
546 546
547 547 def getVelRange(self, extrapoints=0):
548 548
549 549 deltav = self.getVmax() / (self.nFFTPoints * self.ippFactor)
550 550 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
551
551
552 552 if self.nmodes:
553 553 return velrange/self.nmodes
554 554 else:
555 555 return velrange
556 556
557 557 def getNPairs(self):
558 558
559 559 return len(self.pairsList)
560 560
561 561 def getPairsIndexList(self):
562 562
563 563 return list(range(self.nPairs))
564 564
565 565 def getNormFactor(self):
566 566
567 567 pwcode = 1
568 568
569 569 if self.flagDecodeData:
570 570 pwcode = numpy.sum(self.code[0]**2)
571 571 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
572 572 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
573 573
574 574 return normFactor
575 575
576 576 def getFlagCspc(self):
577 577
578 578 if self.data_cspc is None:
579 579 return True
580 580
581 581 return False
582 582
583 583 def getFlagDc(self):
584 584
585 585 if self.data_dc is None:
586 586 return True
587 587
588 588 return False
589 589
590 590 def getTimeInterval(self):
591 591
592 592 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
593 593 if self.nmodes:
594 594 return self.nmodes*timeInterval
595 595 else:
596 596 return timeInterval
597 597
598 598 def getPower(self):
599 599
600 600 factor = self.normFactor
601 601 z = self.data_spc / factor
602 602 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
603 603 avg = numpy.average(z, axis=1)
604 604
605 605 return 10 * numpy.log10(avg)
606 606
607 607 def getCoherence(self, pairsList=None, phase=False):
608 608
609 609 z = []
610 610 if pairsList is None:
611 611 pairsIndexList = self.pairsIndexList
612 612 else:
613 613 pairsIndexList = []
614 614 for pair in pairsList:
615 615 if pair not in self.pairsList:
616 616 raise ValueError("Pair %s is not in dataOut.pairsList" % (
617 617 pair))
618 618 pairsIndexList.append(self.pairsList.index(pair))
619 619 for i in range(len(pairsIndexList)):
620 620 pair = self.pairsList[pairsIndexList[i]]
621 621 ccf = numpy.average(self.data_cspc[pairsIndexList[i], :, :], axis=0)
622 622 powa = numpy.average(self.data_spc[pair[0], :, :], axis=0)
623 623 powb = numpy.average(self.data_spc[pair[1], :, :], axis=0)
624 624 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
625 625 if phase:
626 626 data = numpy.arctan2(avgcoherenceComplex.imag,
627 627 avgcoherenceComplex.real) * 180 / numpy.pi
628 628 else:
629 629 data = numpy.abs(avgcoherenceComplex)
630 630
631 631 z.append(data)
632 632
633 633 return numpy.array(z)
634 634
635 635 def setValue(self, value):
636 636
637 637 print("This property should not be initialized")
638 638
639 639 return
640 640
641 641 nPairs = property(getNPairs, setValue, "I'm the 'nPairs' property.")
642 642 pairsIndexList = property(
643 643 getPairsIndexList, setValue, "I'm the 'pairsIndexList' property.")
644 644 normFactor = property(getNormFactor, setValue,
645 645 "I'm the 'getNormFactor' property.")
646 646 flag_cspc = property(getFlagCspc, setValue)
647 647 flag_dc = property(getFlagDc, setValue)
648 648 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
649 649 timeInterval = property(getTimeInterval, setValue,
650 650 "I'm the 'timeInterval' property")
651 651
652 652
653 653 class SpectraHeis(Spectra):
654 654
655 655 data_spc = None
656 656 data_cspc = None
657 657 data_dc = None
658 658 nFFTPoints = None
659 659 # nPairs = None
660 660 pairsList = None
661 661 nCohInt = None
662 662 nIncohInt = None
663 663
664 664 def __init__(self):
665 665
666 666 self.radarControllerHeaderObj = RadarControllerHeader()
667 667
668 668 self.systemHeaderObj = SystemHeader()
669 669
670 670 self.type = "SpectraHeis"
671 671
672 672 # self.dtype = None
673 673
674 674 # self.nChannels = 0
675 675
676 676 # self.nHeights = 0
677 677
678 678 self.nProfiles = None
679 679
680 680 self.heightList = None
681 681
682 682 self.channelList = None
683 683
684 684 # self.channelIndexList = None
685 685
686 686 self.flagNoData = True
687 687
688 688 self.flagDiscontinuousBlock = False
689 689
690 690 # self.nPairs = 0
691 691
692 692 self.utctime = None
693 693
694 694 self.blocksize = None
695 695
696 696 self.profileIndex = 0
697 697
698 698 self.nCohInt = 1
699 699
700 700 self.nIncohInt = 1
701 701
702 702 def getNormFactor(self):
703 703 pwcode = 1
704 704 if self.flagDecodeData:
705 705 pwcode = numpy.sum(self.code[0]**2)
706 706
707 707 normFactor = self.nIncohInt * self.nCohInt * pwcode
708 708
709 709 return normFactor
710 710
711 711 def getTimeInterval(self):
712 712
713 713 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
714 714
715 715 return timeInterval
716 716
717 717 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
718 718 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
719 719
720 720
721 721 class Fits(JROData):
722 722
723 723 heightList = None
724 724 channelList = None
725 725 flagNoData = True
726 726 flagDiscontinuousBlock = False
727 727 useLocalTime = False
728 728 utctime = None
729 729 timeZone = None
730 730 # ippSeconds = None
731 731 # timeInterval = None
732 732 nCohInt = None
733 733 nIncohInt = None
734 734 noise = None
735 735 windowOfFilter = 1
736 736 # Speed of ligth
737 737 C = 3e8
738 738 frequency = 49.92e6
739 739 realtime = False
740 740
741 741 def __init__(self):
742 742
743 743 self.type = "Fits"
744 744
745 745 self.nProfiles = None
746 746
747 747 self.heightList = None
748 748
749 749 self.channelList = None
750 750
751 751 # self.channelIndexList = None
752 752
753 753 self.flagNoData = True
754 754
755 755 self.utctime = None
756 756
757 757 self.nCohInt = 1
758 758
759 759 self.nIncohInt = 1
760 760
761 761 self.useLocalTime = True
762 762
763 763 self.profileIndex = 0
764 764
765 765 # self.utctime = None
766 766 # self.timeZone = None
767 767 # self.ltctime = None
768 768 # self.timeInterval = None
769 769 # self.header = None
770 770 # self.data_header = None
771 771 # self.data = None
772 772 # self.datatime = None
773 773 # self.flagNoData = False
774 774 # self.expName = ''
775 775 # self.nChannels = None
776 776 # self.nSamples = None
777 777 # self.dataBlocksPerFile = None
778 778 # self.comments = ''
779 779 #
780 780
781 781 def getltctime(self):
782 782
783 783 if self.useLocalTime:
784 784 return self.utctime - self.timeZone * 60
785 785
786 786 return self.utctime
787 787
788 788 def getDatatime(self):
789 789
790 790 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
791 791 return datatime
792 792
793 793 def getTimeRange(self):
794 794
795 795 datatime = []
796 796
797 797 datatime.append(self.ltctime)
798 798 datatime.append(self.ltctime + self.timeInterval)
799 799
800 800 datatime = numpy.array(datatime)
801 801
802 802 return datatime
803 803
804 804 def getHeiRange(self):
805 805
806 806 heis = self.heightList
807 807
808 808 return heis
809 809
810 810 def getNHeights(self):
811 811
812 812 return len(self.heightList)
813 813
814 814 def getNChannels(self):
815 815
816 816 return len(self.channelList)
817 817
818 818 def getChannelIndexList(self):
819 819
820 820 return list(range(self.nChannels))
821 821
822 822 def getNoise(self, type=1):
823 823
824 824 #noise = numpy.zeros(self.nChannels)
825 825
826 826 if type == 1:
827 827 noise = self.getNoisebyHildebrand()
828 828
829 829 if type == 2:
830 830 noise = self.getNoisebySort()
831 831
832 832 if type == 3:
833 833 noise = self.getNoisebyWindow()
834 834
835 835 return noise
836 836
837 837 def getTimeInterval(self):
838 838
839 839 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
840 840
841 841 return timeInterval
842 842
843 843 def get_ippSeconds(self):
844 844 '''
845 845 '''
846 846 return self.ipp_sec
847 847
848 848
849 849 datatime = property(getDatatime, "I'm the 'datatime' property")
850 850 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
851 851 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
852 852 channelIndexList = property(
853 853 getChannelIndexList, "I'm the 'channelIndexList' property.")
854 854 noise = property(getNoise, "I'm the 'nHeights' property.")
855 855
856 856 ltctime = property(getltctime, "I'm the 'ltctime' property")
857 857 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
858 858 ippSeconds = property(get_ippSeconds, '')
859 859
860 860 class Correlation(JROData):
861 861
862 862 noise = None
863 863 SNR = None
864 864 #--------------------------------------------------
865 865 mode = None
866 866 split = False
867 867 data_cf = None
868 868 lags = None
869 869 lagRange = None
870 870 pairsList = None
871 871 normFactor = None
872 872 #--------------------------------------------------
873 873 # calculateVelocity = None
874 874 nLags = None
875 875 nPairs = None
876 876 nAvg = None
877 877
878 878 def __init__(self):
879 879 '''
880 880 Constructor
881 881 '''
882 882 self.radarControllerHeaderObj = RadarControllerHeader()
883 883
884 884 self.systemHeaderObj = SystemHeader()
885 885
886 886 self.type = "Correlation"
887 887
888 888 self.data = None
889 889
890 890 self.dtype = None
891 891
892 892 self.nProfiles = None
893 893
894 894 self.heightList = None
895 895
896 896 self.channelList = None
897 897
898 898 self.flagNoData = True
899 899
900 900 self.flagDiscontinuousBlock = False
901 901
902 902 self.utctime = None
903 903
904 904 self.timeZone = None
905 905
906 906 self.dstFlag = None
907 907
908 908 self.errorCount = None
909 909
910 910 self.blocksize = None
911 911
912 912 self.flagDecodeData = False # asumo q la data no esta decodificada
913 913
914 914 self.flagDeflipData = False # asumo q la data no esta sin flip
915 915
916 916 self.pairsList = None
917 917
918 918 self.nPoints = None
919 919
920 920 def getPairsList(self):
921 921
922 922 return self.pairsList
923 923
924 924 def getNoise(self, mode=2):
925 925
926 926 indR = numpy.where(self.lagR == 0)[0][0]
927 927 indT = numpy.where(self.lagT == 0)[0][0]
928 928
929 929 jspectra0 = self.data_corr[:, :, indR, :]
930 930 jspectra = copy.copy(jspectra0)
931 931
932 932 num_chan = jspectra.shape[0]
933 933 num_hei = jspectra.shape[2]
934 934
935 935 freq_dc = jspectra.shape[1] / 2
936 936 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
937 937
938 938 if ind_vel[0] < 0:
939 939 ind_vel[list(range(0, 1))] = ind_vel[list(
940 940 range(0, 1))] + self.num_prof
941 941
942 942 if mode == 1:
943 943 jspectra[:, freq_dc, :] = (
944 944 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
945 945
946 946 if mode == 2:
947 947
948 948 vel = numpy.array([-2, -1, 1, 2])
949 949 xx = numpy.zeros([4, 4])
950 950
951 951 for fil in range(4):
952 952 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
953 953
954 954 xx_inv = numpy.linalg.inv(xx)
955 955 xx_aux = xx_inv[0, :]
956 956
957 957 for ich in range(num_chan):
958 958 yy = jspectra[ich, ind_vel, :]
959 959 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
960 960
961 961 junkid = jspectra[ich, freq_dc, :] <= 0
962 962 cjunkid = sum(junkid)
963 963
964 964 if cjunkid.any():
965 965 jspectra[ich, freq_dc, junkid.nonzero()] = (
966 966 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
967 967
968 968 noise = jspectra0[:, freq_dc, :] - jspectra[:, freq_dc, :]
969 969
970 970 return noise
971 971
972 972 def getTimeInterval(self):
973 973
974 974 timeInterval = self.ippSeconds * self.nCohInt * self.nProfiles
975 975
976 976 return timeInterval
977 977
978 978 def splitFunctions(self):
979 979
980 980 pairsList = self.pairsList
981 981 ccf_pairs = []
982 982 acf_pairs = []
983 983 ccf_ind = []
984 984 acf_ind = []
985 985 for l in range(len(pairsList)):
986 986 chan0 = pairsList[l][0]
987 987 chan1 = pairsList[l][1]
988 988
989 989 # Obteniendo pares de Autocorrelacion
990 990 if chan0 == chan1:
991 991 acf_pairs.append(chan0)
992 992 acf_ind.append(l)
993 993 else:
994 994 ccf_pairs.append(pairsList[l])
995 995 ccf_ind.append(l)
996 996
997 997 data_acf = self.data_cf[acf_ind]
998 998 data_ccf = self.data_cf[ccf_ind]
999 999
1000 1000 return acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf
1001 1001
1002 1002 def getNormFactor(self):
1003 1003 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.splitFunctions()
1004 1004 acf_pairs = numpy.array(acf_pairs)
1005 1005 normFactor = numpy.zeros((self.nPairs, self.nHeights))
1006 1006
1007 1007 for p in range(self.nPairs):
1008 1008 pair = self.pairsList[p]
1009 1009
1010 1010 ch0 = pair[0]
1011 1011 ch1 = pair[1]
1012 1012
1013 1013 ch0_max = numpy.max(data_acf[acf_pairs == ch0, :, :], axis=1)
1014 1014 ch1_max = numpy.max(data_acf[acf_pairs == ch1, :, :], axis=1)
1015 1015 normFactor[p, :] = numpy.sqrt(ch0_max * ch1_max)
1016 1016
1017 1017 return normFactor
1018 1018
1019 1019 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
1020 1020 normFactor = property(getNormFactor, "I'm the 'normFactor property'")
1021 1021
1022 1022
1023 1023 class Parameters(Spectra):
1024 1024
1025 1025 experimentInfo = None # Information about the experiment
1026 1026 # Information from previous data
1027 1027 inputUnit = None # Type of data to be processed
1028 1028 operation = None # Type of operation to parametrize
1029 1029 # normFactor = None #Normalization Factor
1030 1030 groupList = None # List of Pairs, Groups, etc
1031 1031 # Parameters
1032 1032 data_param = None # Parameters obtained
1033 1033 data_pre = None # Data Pre Parametrization
1034 1034 data_SNR = None # Signal to Noise Ratio
1035 1035 # heightRange = None #Heights
1036 1036 abscissaList = None # Abscissa, can be velocities, lags or time
1037 1037 # noise = None #Noise Potency
1038 1038 utctimeInit = None # Initial UTC time
1039 1039 paramInterval = None # Time interval to calculate Parameters in seconds
1040 1040 useLocalTime = True
1041 1041 # Fitting
1042 1042 data_error = None # Error of the estimation
1043 1043 constants = None
1044 1044 library = None
1045 1045 # Output signal
1046 1046 outputInterval = None # Time interval to calculate output signal in seconds
1047 1047 data_output = None # Out signal
1048 1048 nAvg = None
1049 1049 noise_estimation = None
1050 1050 GauSPC = None # Fit gaussian SPC
1051 1051
1052 1052 def __init__(self):
1053 1053 '''
1054 1054 Constructor
1055 1055 '''
1056 1056 self.radarControllerHeaderObj = RadarControllerHeader()
1057 1057
1058 1058 self.systemHeaderObj = SystemHeader()
1059 1059
1060 1060 self.type = "Parameters"
1061 1061
1062 1062 def getTimeRange1(self, interval):
1063 1063
1064 1064 datatime = []
1065 1065
1066 1066 if self.useLocalTime:
1067 1067 time1 = self.utctimeInit - self.timeZone * 60
1068 1068 else:
1069 1069 time1 = self.utctimeInit
1070 1070
1071 1071 datatime.append(time1)
1072 1072 datatime.append(time1 + interval)
1073 1073 datatime = numpy.array(datatime)
1074 1074
1075 1075 return datatime
1076 1076
1077 1077 def getTimeInterval(self):
1078 1078
1079 1079 if hasattr(self, 'timeInterval1'):
1080 1080 return self.timeInterval1
1081 1081 else:
1082 1082 return self.paramInterval
1083 1083
1084 1084 def setValue(self, value):
1085 1085
1086 1086 print("This property should not be initialized")
1087 1087
1088 1088 return
1089 1089
1090 1090 def getNoise(self):
1091 1091
1092 1092 return self.spc_noise
1093 1093
1094 1094 timeInterval = property(getTimeInterval)
1095 1095 noise = property(getNoise, setValue, "I'm the 'Noise' property.")
1096 1096
1097 1097
1098 1098 class PlotterData(object):
1099 1099 '''
1100 1100 Object to hold data to be plotted
1101 1101 '''
1102 1102
1103 1103 MAXNUMX = 100
1104 1104 MAXNUMY = 100
1105 1105
1106 1106 def __init__(self, code, throttle_value, exp_code, buffering=True, snr=False):
1107
1107
1108 1108 self.key = code
1109 1109 self.throttle = throttle_value
1110 1110 self.exp_code = exp_code
1111 1111 self.buffering = buffering
1112 1112 self.ready = False
1113 1113 self.localtime = False
1114 1114 self.data = {}
1115 1115 self.meta = {}
1116 1116 self.__times = []
1117 1117 self.__heights = []
1118 1118
1119 1119 if 'snr' in code:
1120 1120 self.plottypes = ['snr']
1121 1121 elif code == 'spc':
1122 1122 self.plottypes = ['spc', 'noise', 'rti']
1123 1123 elif code == 'rti':
1124 1124 self.plottypes = ['noise', 'rti']
1125 1125 else:
1126 1126 self.plottypes = [code]
1127 1127
1128 1128 if 'snr' not in self.plottypes and snr:
1129 1129 self.plottypes.append('snr')
1130 1130
1131 1131 for plot in self.plottypes:
1132 1132 self.data[plot] = {}
1133 1133
1134 1134 def __str__(self):
1135 1135 dum = ['{}{}'.format(key, self.shape(key)) for key in self.data]
1136 1136 return 'Data[{}][{}]'.format(';'.join(dum), len(self.__times))
1137 1137
1138 1138 def __len__(self):
1139 1139 return len(self.__times)
1140 1140
1141 1141 def __getitem__(self, key):
1142
1142
1143 1143 if key not in self.data:
1144 1144 raise KeyError(log.error('Missing key: {}'.format(key)))
1145 1145 if 'spc' in key or not self.buffering:
1146 1146 ret = self.data[key]
1147 1147 elif 'scope' in key:
1148 1148 ret = numpy.array(self.data[key][float(self.tm)])
1149 1149 else:
1150 1150 ret = numpy.array([self.data[key][x] for x in self.times])
1151 1151 if ret.ndim > 1:
1152 1152 ret = numpy.swapaxes(ret, 0, 1)
1153 1153 return ret
1154 1154
1155 1155 def __contains__(self, key):
1156 1156 return key in self.data
1157 1157
1158 1158 def setup(self):
1159 1159 '''
1160 1160 Configure object
1161 1161 '''
1162 1162
1163 1163 self.type = ''
1164 1164 self.ready = False
1165 1165 self.data = {}
1166 1166 self.__times = []
1167 1167 self.__heights = []
1168 1168 self.__all_heights = set()
1169 1169 for plot in self.plottypes:
1170 1170 if 'snr' in plot:
1171 1171 plot = 'snr'
1172 1172 elif 'spc_moments' == plot:
1173 1173 plot = 'moments'
1174 1174 self.data[plot] = {}
1175
1175
1176 1176 if 'spc' in self.data or 'rti' in self.data or 'cspc' in self.data or 'moments' in self.data:
1177 1177 self.data['noise'] = {}
1178 1178 self.data['rti'] = {}
1179 1179 if 'noise' not in self.plottypes:
1180 1180 self.plottypes.append('noise')
1181 1181 if 'rti' not in self.plottypes:
1182 1182 self.plottypes.append('rti')
1183
1183
1184 1184 def shape(self, key):
1185 1185 '''
1186 1186 Get the shape of the one-element data for the given key
1187 1187 '''
1188 1188
1189 1189 if len(self.data[key]):
1190 1190 if 'spc' in key or not self.buffering:
1191 1191 return self.data[key].shape
1192 1192 return self.data[key][self.__times[0]].shape
1193 1193 return (0,)
1194 1194
1195 1195 def update(self, dataOut, tm):
1196 1196 '''
1197 1197 Update data object with new dataOut
1198 1198 '''
1199
1199
1200 1200 if tm in self.__times:
1201 1201 return
1202 1202 self.profileIndex = dataOut.profileIndex
1203 1203 self.tm = tm
1204 1204 self.type = dataOut.type
1205 1205 self.parameters = getattr(dataOut, 'parameters', [])
1206
1206
1207 1207 if hasattr(dataOut, 'meta'):
1208 1208 self.meta.update(dataOut.meta)
1209
1209
1210 1210 self.pairs = dataOut.pairsList
1211 1211 self.interval = dataOut.getTimeInterval()
1212 1212 self.localtime = dataOut.useLocalTime
1213 1213 if 'spc' in self.plottypes or 'cspc' in self.plottypes or 'spc_moments' in self.plottypes:
1214 1214 self.xrange = (dataOut.getFreqRange(1)/1000.,
1215 1215 dataOut.getAcfRange(1), dataOut.getVelRange(1))
1216 1216 self.factor = dataOut.normFactor
1217 1217 self.__heights.append(dataOut.heightList)
1218 1218 self.__all_heights.update(dataOut.heightList)
1219 1219 self.__times.append(tm)
1220
1220
1221 1221 for plot in self.plottypes:
1222 1222 if plot in ('spc', 'spc_moments'):
1223 1223 z = dataOut.data_spc/dataOut.normFactor
1224 1224 buffer = 10*numpy.log10(z)
1225 1225 if plot == 'cspc':
1226 1226 z = dataOut.data_spc/dataOut.normFactor
1227 1227 buffer = (dataOut.data_spc, dataOut.data_cspc)
1228 1228 if plot == 'noise':
1229 1229 buffer = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
1230 1230 if plot == 'rti':
1231 1231 buffer = dataOut.getPower()
1232 1232 if plot == 'snr_db':
1233 1233 buffer = dataOut.data_SNR
1234 1234 if plot == 'snr':
1235 1235 buffer = 10*numpy.log10(dataOut.data_SNR)
1236 1236 if plot == 'dop':
1237 1237 buffer = dataOut.data_DOP
1238 1238 if plot == 'pow':
1239 1239 buffer = 10*numpy.log10(dataOut.data_POW)
1240 1240 if plot == 'width':
1241 1241 buffer = dataOut.data_WIDTH
1242 1242 if plot == 'coh':
1243 1243 buffer = dataOut.getCoherence()
1244 1244 if plot == 'phase':
1245 1245 buffer = dataOut.getCoherence(phase=True)
1246 1246 if plot == 'output':
1247 1247 buffer = dataOut.data_output
1248 1248 if plot == 'param':
1249 1249 buffer = dataOut.data_param
1250 1250 if plot == 'scope':
1251 1251 buffer = dataOut.data
1252 1252 self.flagDataAsBlock = dataOut.flagDataAsBlock
1253 self.nProfiles = dataOut.nProfiles
1254
1253 self.nProfiles = dataOut.nProfiles
1254
1255 1255 if plot == 'spc':
1256 1256 self.data['spc'] = buffer
1257 1257 elif plot == 'cspc':
1258 1258 self.data['spc'] = buffer[0]
1259 1259 self.data['cspc'] = buffer[1]
1260 1260 elif plot == 'spc_moments':
1261 1261 self.data['spc'] = buffer
1262 1262 self.data['moments'][tm] = dataOut.moments
1263 1263 else:
1264 1264 if self.buffering:
1265 1265 self.data[plot][tm] = buffer
1266 1266 else:
1267 1267 self.data[plot] = buffer
1268 1268
1269 1269 if dataOut.channelList is None:
1270 1270 self.channels = range(buffer.shape[0])
1271 1271 else:
1272 1272 self.channels = dataOut.channelList
1273 1273
1274 1274 def normalize_heights(self):
1275 1275 '''
1276 1276 Ensure same-dimension of the data for different heighList
1277 1277 '''
1278 1278
1279 1279 H = numpy.array(list(self.__all_heights))
1280 1280 H.sort()
1281 1281 for key in self.data:
1282 1282 shape = self.shape(key)[:-1] + H.shape
1283 1283 for tm, obj in list(self.data[key].items()):
1284 1284 h = self.__heights[self.__times.index(tm)]
1285 1285 if H.size == h.size:
1286 1286 continue
1287 1287 index = numpy.where(numpy.in1d(H, h))[0]
1288 1288 dummy = numpy.zeros(shape) + numpy.nan
1289 1289 if len(shape) == 2:
1290 1290 dummy[:, index] = obj
1291 1291 else:
1292 1292 dummy[index] = obj
1293 1293 self.data[key][tm] = dummy
1294 1294
1295 1295 self.__heights = [H for tm in self.__times]
1296 1296
1297 1297 def jsonify(self, plot_name, plot_type, decimate=False):
1298 1298 '''
1299 1299 Convert data to json
1300 1300 '''
1301 1301
1302 1302 tm = self.times[-1]
1303 1303 dy = int(self.heights.size/self.MAXNUMY) + 1
1304 1304 if self.key in ('spc', 'cspc') or not self.buffering:
1305 1305 dx = int(self.data[self.key].shape[1]/self.MAXNUMX) + 1
1306 1306 data = self.roundFloats(
1307 1307 self.data[self.key][::, ::dx, ::dy].tolist())
1308 1308 else:
1309 1309 data = self.roundFloats(self.data[self.key][tm].tolist())
1310 1310 if self.key is 'noise':
1311 1311 data = [[x] for x in data]
1312 1312
1313 1313 meta = {}
1314 1314 ret = {
1315 1315 'plot': plot_name,
1316 1316 'code': self.exp_code,
1317 1317 'time': float(tm),
1318 1318 'data': data,
1319 1319 }
1320 1320 meta['type'] = plot_type
1321 1321 meta['interval'] = float(self.interval)
1322 1322 meta['localtime'] = self.localtime
1323 1323 meta['yrange'] = self.roundFloats(self.heights[::dy].tolist())
1324 1324 if 'spc' in self.data or 'cspc' in self.data:
1325 1325 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
1326 1326 else:
1327 1327 meta['xrange'] = []
1328 1328
1329 meta.update(self.meta)
1329 meta.update(self.meta)
1330 1330 ret['metadata'] = meta
1331 1331 return json.dumps(ret)
1332 1332
1333 1333 @property
1334 1334 def times(self):
1335 1335 '''
1336 1336 Return the list of times of the current data
1337 1337 '''
1338 1338
1339 1339 ret = numpy.array(self.__times)
1340 1340 ret.sort()
1341 1341 return ret
1342 1342
1343 1343 @property
1344 1344 def min_time(self):
1345 1345 '''
1346 1346 Return the minimun time value
1347 1347 '''
1348 1348
1349 1349 return self.times[0]
1350 1350
1351 1351 @property
1352 1352 def max_time(self):
1353 1353 '''
1354 1354 Return the maximun time value
1355 1355 '''
1356 1356
1357 1357 return self.times[-1]
1358 1358
1359 1359 @property
1360 1360 def heights(self):
1361 1361 '''
1362 1362 Return the list of heights of the current data
1363 1363 '''
1364 1364
1365 1365 return numpy.array(self.__heights[-1])
1366 1366
1367 1367 @staticmethod
1368 1368 def roundFloats(obj):
1369 1369 if isinstance(obj, list):
1370 1370 return list(map(PlotterData.roundFloats, obj))
1371 1371 elif isinstance(obj, float):
1372 1372 return round(obj, 2)
@@ -1,906 +1,906
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10 import inspect
11 11 from schainpy.utils import log
12 12
13 13 SPEED_OF_LIGHT = 299792458
14 14 SPEED_OF_LIGHT = 3e8
15 15
16 16 BASIC_STRUCTURE = numpy.dtype([
17 17 ('nSize', '<u4'),
18 18 ('nVersion', '<u2'),
19 19 ('nDataBlockId', '<u4'),
20 20 ('nUtime', '<u4'),
21 21 ('nMilsec', '<u2'),
22 22 ('nTimezone', '<i2'),
23 23 ('nDstflag', '<i2'),
24 24 ('nErrorCount', '<u4')
25 25 ])
26 26
27 27 SYSTEM_STRUCTURE = numpy.dtype([
28 28 ('nSize', '<u4'),
29 29 ('nNumSamples', '<u4'),
30 30 ('nNumProfiles', '<u4'),
31 31 ('nNumChannels', '<u4'),
32 32 ('nADCResolution', '<u4'),
33 33 ('nPCDIOBusWidth', '<u4'),
34 34 ])
35 35
36 36 RADAR_STRUCTURE = numpy.dtype([
37 37 ('nSize', '<u4'),
38 38 ('nExpType', '<u4'),
39 39 ('nNTx', '<u4'),
40 40 ('fIpp', '<f4'),
41 41 ('fTxA', '<f4'),
42 42 ('fTxB', '<f4'),
43 43 ('nNumWindows', '<u4'),
44 44 ('nNumTaus', '<u4'),
45 45 ('nCodeType', '<u4'),
46 46 ('nLine6Function', '<u4'),
47 47 ('nLine5Function', '<u4'),
48 48 ('fClock', '<f4'),
49 49 ('nPrePulseBefore', '<u4'),
50 50 ('nPrePulseAfter', '<u4'),
51 51 ('sRangeIPP', '<a20'),
52 52 ('sRangeTxA', '<a20'),
53 53 ('sRangeTxB', '<a20'),
54 54 ])
55 55
56 56 SAMPLING_STRUCTURE = numpy.dtype(
57 57 [('h0', '<f4'), ('dh', '<f4'), ('nsa', '<u4')])
58 58
59 59
60 60 PROCESSING_STRUCTURE = numpy.dtype([
61 61 ('nSize', '<u4'),
62 62 ('nDataType', '<u4'),
63 63 ('nSizeOfDataBlock', '<u4'),
64 64 ('nProfilesperBlock', '<u4'),
65 65 ('nDataBlocksperFile', '<u4'),
66 66 ('nNumWindows', '<u4'),
67 67 ('nProcessFlags', '<u4'),
68 68 ('nCoherentIntegrations', '<u4'),
69 69 ('nIncoherentIntegrations', '<u4'),
70 70 ('nTotalSpectra', '<u4')
71 71 ])
72 72
73 73
74 74 class Header(object):
75 75
76 76 def __init__(self):
77 77 raise NotImplementedError
78 78
79 79 def copy(self):
80 80 return copy.deepcopy(self)
81 81
82 82 def read(self):
83 83
84 84 raise NotImplementedError
85 85
86 86 def write(self):
87 87
88 88 raise NotImplementedError
89 89
90 90 def getAllowedArgs(self):
91 91 args = inspect.getargspec(self.__init__).args
92 92 try:
93 93 args.remove('self')
94 94 except:
95 95 pass
96 96 return args
97 97
98 98 def getAsDict(self):
99 99 args = self.getAllowedArgs()
100 100 asDict = {}
101 101 for x in args:
102 102 asDict[x] = self[x]
103 103 return asDict
104 104
105 105 def __getitem__(self, name):
106 106 return getattr(self, name)
107 107
108 108 def printInfo(self):
109 109
110 110 message = "#" * 50 + "\n"
111 111 message += self.__class__.__name__.upper() + "\n"
112 112 message += "#" * 50 + "\n"
113 113
114 114 keyList = list(self.__dict__.keys())
115 115 keyList.sort()
116 116
117 117 for key in keyList:
118 118 message += "%s = %s" % (key, self.__dict__[key]) + "\n"
119 119
120 120 if "size" not in keyList:
121 121 attr = getattr(self, "size")
122 122
123 123 if attr:
124 124 message += "%s = %s" % ("size", attr) + "\n"
125 125
126 126 print(message)
127 127
128 128
129 129 class BasicHeader(Header):
130 130
131 131 size = None
132 132 version = None
133 133 dataBlock = None
134 134 utc = None
135 135 ltc = None
136 136 miliSecond = None
137 137 timeZone = None
138 138 dstFlag = None
139 139 errorCount = None
140 140 datatime = None
141 141 structure = BASIC_STRUCTURE
142 142 __LOCALTIME = None
143 143
144 144 def __init__(self, useLocalTime=True):
145 145
146 146 self.size = 24
147 147 self.version = 0
148 148 self.dataBlock = 0
149 149 self.utc = 0
150 150 self.miliSecond = 0
151 151 self.timeZone = 0
152 152 self.dstFlag = 0
153 153 self.errorCount = 0
154 154
155 155 self.useLocalTime = useLocalTime
156 156
157 157 def read(self, fp):
158 158
159 159 self.length = 0
160 160 try:
161 161 if hasattr(fp, 'read'):
162 162 header = numpy.fromfile(fp, BASIC_STRUCTURE, 1)
163 163 else:
164 164 header = numpy.fromstring(fp, BASIC_STRUCTURE, 1)
165 165 except Exception as e:
166 166 print("BasicHeader: ")
167 167 print(e)
168 168 return 0
169 169
170 170 self.size = int(header['nSize'][0])
171 171 self.version = int(header['nVersion'][0])
172 172 self.dataBlock = int(header['nDataBlockId'][0])
173 173 self.utc = int(header['nUtime'][0])
174 174 self.miliSecond = int(header['nMilsec'][0])
175 175 self.timeZone = int(header['nTimezone'][0])
176 176 self.dstFlag = int(header['nDstflag'][0])
177 177 self.errorCount = int(header['nErrorCount'][0])
178 178
179 179 if self.size < 24:
180 180 return 0
181 181
182 182 self.length = header.nbytes
183 183 return 1
184 184
185 185 def write(self, fp):
186 186
187 187 headerTuple = (self.size, self.version, self.dataBlock, self.utc,
188 188 self.miliSecond, self.timeZone, self.dstFlag, self.errorCount)
189 189 header = numpy.array(headerTuple, BASIC_STRUCTURE)
190 190 header.tofile(fp)
191 191
192 192 return 1
193 193
194 194 def get_ltc(self):
195 195
196 196 return self.utc - self.timeZone * 60
197 197
198 198 def set_ltc(self, value):
199 199
200 200 self.utc = value + self.timeZone * 60
201 201
202 202 def get_datatime(self):
203 203
204 204 return datetime.datetime.utcfromtimestamp(self.ltc)
205 205
206 206 ltc = property(get_ltc, set_ltc)
207 207 datatime = property(get_datatime)
208 208
209 209
210 210 class SystemHeader(Header):
211 211
212 212 size = None
213 213 nSamples = None
214 214 nProfiles = None
215 215 nChannels = None
216 216 adcResolution = None
217 217 pciDioBusWidth = None
218 218 structure = SYSTEM_STRUCTURE
219 219
220 220 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWidth=0):
221
221
222 222 self.size = 24
223 223 self.nSamples = nSamples
224 224 self.nProfiles = nProfiles
225 225 self.nChannels = nChannels
226 226 self.adcResolution = adcResolution
227 227 self.pciDioBusWidth = pciDioBusWidth
228 228
229 229 def read(self, fp):
230 230 self.length = 0
231 231 try:
232 232 startFp = fp.tell()
233 233 except Exception as e:
234 234 startFp = None
235 235 pass
236 236
237 237 try:
238 238 if hasattr(fp, 'read'):
239 239 header = numpy.fromfile(fp, SYSTEM_STRUCTURE, 1)
240 240 else:
241 241 header = numpy.fromstring(fp, SYSTEM_STRUCTURE, 1)
242 242 except Exception as e:
243 243 print("System Header: " + str(e))
244 244 return 0
245 245
246 246 self.size = header['nSize'][0]
247 247 self.nSamples = header['nNumSamples'][0]
248 248 self.nProfiles = header['nNumProfiles'][0]
249 249 self.nChannels = header['nNumChannels'][0]
250 250 self.adcResolution = header['nADCResolution'][0]
251 251 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
252 252
253 253 if startFp is not None:
254 254 endFp = self.size + startFp
255 255
256 256 if fp.tell() > endFp:
257 257 sys.stderr.write(
258 258 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp.name)
259 259 return 0
260 260
261 261 if fp.tell() < endFp:
262 262 sys.stderr.write(
263 263 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp.name)
264 264 return 0
265 265
266 266 self.length = header.nbytes
267 267 return 1
268 268
269 269 def write(self, fp):
270 270
271 271 headerTuple = (self.size, self.nSamples, self.nProfiles,
272 272 self.nChannels, self.adcResolution, self.pciDioBusWidth)
273 273 header = numpy.array(headerTuple, SYSTEM_STRUCTURE)
274 274 header.tofile(fp)
275 275
276 276 return 1
277 277
278 278
279 279 class RadarControllerHeader(Header):
280 280
281 281 expType = None
282 282 nTx = None
283 283 ipp = None
284 284 txA = None
285 285 txB = None
286 286 nWindows = None
287 287 numTaus = None
288 288 codeType = None
289 289 line6Function = None
290 290 line5Function = None
291 291 fClock = None
292 292 prePulseBefore = None
293 293 prePulseAfter = None
294 294 rangeIpp = None
295 295 rangeTxA = None
296 296 rangeTxB = None
297 297 structure = RADAR_STRUCTURE
298 298 __size = None
299 299
300 300 def __init__(self, expType=2, nTx=1,
301 301 ipp=None, txA=0, txB=0,
302 302 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
303 303 numTaus=0, line6Function=0, line5Function=0, fClock=None,
304 304 prePulseBefore=0, prePulseAfter=0,
305 305 codeType=0, nCode=0, nBaud=0, code=None,
306 306 flip1=0, flip2=0):
307 307
308 308 # self.size = 116
309 309 self.expType = expType
310 310 self.nTx = nTx
311 311 self.ipp = ipp
312 312 self.txA = txA
313 313 self.txB = txB
314 314 self.rangeIpp = ipp
315 315 self.rangeTxA = txA
316 316 self.rangeTxB = txB
317 317
318 318 self.nWindows = nWindows
319 319 self.numTaus = numTaus
320 320 self.codeType = codeType
321 321 self.line6Function = line6Function
322 322 self.line5Function = line5Function
323 323 self.fClock = fClock
324 324 self.prePulseBefore = prePulseBefore
325 325 self.prePulseAfter = prePulseAfter
326 326
327 327 self.nHeights = nHeights
328 328 self.firstHeight = firstHeight
329 329 self.deltaHeight = deltaHeight
330 330 self.samplesWin = nHeights
331 331
332 332 self.nCode = nCode
333 333 self.nBaud = nBaud
334 334 self.code = code
335 335 self.flip1 = flip1
336 336 self.flip2 = flip2
337 337
338 338 self.code_size = int(numpy.ceil(self.nBaud / 32.)) * self.nCode * 4
339 339 # self.dynamic = numpy.array([],numpy.dtype('byte'))
340 340
341 341 if self.fClock is None and self.deltaHeight is not None:
342 342 self.fClock = 0.15 / (deltaHeight * 1e-6) # 0.15Km / (height * 1u)
343 343
344 344 def read(self, fp):
345 345 self.length = 0
346 346 try:
347 347 startFp = fp.tell()
348 348 except Exception as e:
349 349 startFp = None
350 350 pass
351 351
352 352 try:
353 353 if hasattr(fp, 'read'):
354 354 header = numpy.fromfile(fp, RADAR_STRUCTURE, 1)
355 355 else:
356 356 header = numpy.fromstring(fp, RADAR_STRUCTURE, 1)
357 357 self.length += header.nbytes
358 358 except Exception as e:
359 359 print("RadarControllerHeader: " + str(e))
360 360 return 0
361 361
362 362 size = int(header['nSize'][0])
363 363 self.expType = int(header['nExpType'][0])
364 364 self.nTx = int(header['nNTx'][0])
365 365 self.ipp = float(header['fIpp'][0])
366 366 self.txA = float(header['fTxA'][0])
367 367 self.txB = float(header['fTxB'][0])
368 368 self.nWindows = int(header['nNumWindows'][0])
369 369 self.numTaus = int(header['nNumTaus'][0])
370 370 self.codeType = int(header['nCodeType'][0])
371 371 self.line6Function = int(header['nLine6Function'][0])
372 372 self.line5Function = int(header['nLine5Function'][0])
373 373 self.fClock = float(header['fClock'][0])
374 374 self.prePulseBefore = int(header['nPrePulseBefore'][0])
375 375 self.prePulseAfter = int(header['nPrePulseAfter'][0])
376 376 self.rangeIpp = header['sRangeIPP'][0]
377 377 self.rangeTxA = header['sRangeTxA'][0]
378 378 self.rangeTxB = header['sRangeTxB'][0]
379 379
380 380 try:
381 381 if hasattr(fp, 'read'):
382 382 samplingWindow = numpy.fromfile(
383 383 fp, SAMPLING_STRUCTURE, self.nWindows)
384 384 else:
385 385 samplingWindow = numpy.fromstring(
386 386 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
387 387 self.length += samplingWindow.nbytes
388 388 except Exception as e:
389 389 print("RadarControllerHeader: " + str(e))
390 390 return 0
391 391 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
392 392 self.firstHeight = samplingWindow['h0']
393 393 self.deltaHeight = samplingWindow['dh']
394 394 self.samplesWin = samplingWindow['nsa']
395 395
396 396 try:
397 397 if hasattr(fp, 'read'):
398 398 self.Taus = numpy.fromfile(fp, '<f4', self.numTaus)
399 399 else:
400 400 self.Taus = numpy.fromstring(
401 401 fp[self.length:], '<f4', self.numTaus)
402 402 self.length += self.Taus.nbytes
403 403 except Exception as e:
404 404 print("RadarControllerHeader: " + str(e))
405 405 return 0
406 406
407 407 self.code_size = 0
408 408 if self.codeType != 0:
409 409
410 410 try:
411 411 if hasattr(fp, 'read'):
412 412 self.nCode = numpy.fromfile(fp, '<u4', 1)[0]
413 413 self.length += self.nCode.nbytes
414 414 self.nBaud = numpy.fromfile(fp, '<u4', 1)[0]
415 415 self.length += self.nBaud.nbytes
416 416 else:
417 417 self.nCode = numpy.fromstring(
418 418 fp[self.length:], '<u4', 1)[0]
419 419 self.length += self.nCode.nbytes
420 420 self.nBaud = numpy.fromstring(
421 421 fp[self.length:], '<u4', 1)[0]
422 422 self.length += self.nBaud.nbytes
423 423 except Exception as e:
424 424 print("RadarControllerHeader: " + str(e))
425 425 return 0
426 426 code = numpy.empty([self.nCode, self.nBaud], dtype='i1')
427 427
428 428 for ic in range(self.nCode):
429 429 try:
430 430 if hasattr(fp, 'read'):
431 431 temp = numpy.fromfile(fp, 'u4', int(
432 432 numpy.ceil(self.nBaud / 32.)))
433 433 else:
434 434 temp = numpy.fromstring(
435 435 fp, 'u4', int(numpy.ceil(self.nBaud / 32.)))
436 436 self.length += temp.nbytes
437 437 except Exception as e:
438 438 print("RadarControllerHeader: " + str(e))
439 439 return 0
440 440
441 441 for ib in range(self.nBaud - 1, -1, -1):
442 442 code[ic, ib] = temp[int(ib / 32)] % 2
443 443 temp[int(ib / 32)] = temp[int(ib / 32)] / 2
444 444
445 445 self.code = 2.0 * code - 1.0
446 446 self.code_size = int(numpy.ceil(self.nBaud / 32.)) * self.nCode * 4
447 447
448 448 # if self.line5Function == RCfunction.FLIP:
449 449 # self.flip1 = numpy.fromfile(fp,'<u4',1)
450 450 #
451 451 # if self.line6Function == RCfunction.FLIP:
452 452 # self.flip2 = numpy.fromfile(fp,'<u4',1)
453 453 if startFp is not None:
454 454 endFp = size + startFp
455 455
456 456 if fp.tell() != endFp:
457 457 # fp.seek(endFp)
458 458 print("%s: Radar Controller Header size is not consistent: from data [%d] != from header field [%d]" % (fp.name, fp.tell() - startFp, size))
459 459 # return 0
460 460
461 461 if fp.tell() > endFp:
462 462 sys.stderr.write(
463 463 "Warning %s: Size value read from Radar Controller header is lower than it has to be\n" % fp.name)
464 464 # return 0
465 465
466 466 if fp.tell() < endFp:
467 467 sys.stderr.write(
468 468 "Warning %s: Size value read from Radar Controller header is greater than it has to be\n" % fp.name)
469 469
470 470 return 1
471 471
472 472 def write(self, fp):
473 473
474 474 headerTuple = (self.size,
475 475 self.expType,
476 476 self.nTx,
477 477 self.ipp,
478 478 self.txA,
479 479 self.txB,
480 480 self.nWindows,
481 481 self.numTaus,
482 482 self.codeType,
483 483 self.line6Function,
484 484 self.line5Function,
485 485 self.fClock,
486 486 self.prePulseBefore,
487 487 self.prePulseAfter,
488 488 self.rangeIpp,
489 489 self.rangeTxA,
490 490 self.rangeTxB)
491 491
492 492 header = numpy.array(headerTuple, RADAR_STRUCTURE)
493 493 header.tofile(fp)
494 494
495 495 sampleWindowTuple = (
496 496 self.firstHeight, self.deltaHeight, self.samplesWin)
497 497 samplingWindow = numpy.array(sampleWindowTuple, SAMPLING_STRUCTURE)
498 498 samplingWindow.tofile(fp)
499 499
500 500 if self.numTaus > 0:
501 501 self.Taus.tofile(fp)
502 502
503 503 if self.codeType != 0:
504 504 nCode = numpy.array(self.nCode, '<u4')
505 505 nCode.tofile(fp)
506 506 nBaud = numpy.array(self.nBaud, '<u4')
507 507 nBaud.tofile(fp)
508 508 code1 = (self.code + 1.0) / 2.
509 509
510 510 for ic in range(self.nCode):
511 511 tempx = numpy.zeros(int(numpy.ceil(self.nBaud / 32.)))
512 512 start = 0
513 513 end = 32
514 514 for i in range(len(tempx)):
515 515 code_selected = code1[ic, start:end]
516 516 for j in range(len(code_selected) - 1, -1, -1):
517 517 if code_selected[j] == 1:
518 518 tempx[i] = tempx[i] + \
519 519 2**(len(code_selected) - 1 - j)
520 520 start = start + 32
521 521 end = end + 32
522 522
523 523 tempx = tempx.astype('u4')
524 524 tempx.tofile(fp)
525 525
526 526 # if self.line5Function == RCfunction.FLIP:
527 527 # self.flip1.tofile(fp)
528 528 #
529 529 # if self.line6Function == RCfunction.FLIP:
530 530 # self.flip2.tofile(fp)
531 531
532 532 return 1
533 533
534 534 def get_ippSeconds(self):
535 535 '''
536 536 '''
537 537 ippSeconds = 2.0 * 1000 * self.ipp / SPEED_OF_LIGHT
538 538
539 539 return ippSeconds
540 540
541 541 def set_ippSeconds(self, ippSeconds):
542 542 '''
543 543 '''
544 544
545 545 self.ipp = ippSeconds * SPEED_OF_LIGHT / (2.0 * 1000)
546 546
547 547 return
548 548
549 549 def get_size(self):
550 550
551 551 self.__size = 116 + 12 * self.nWindows + 4 * self.numTaus
552 552
553 553 if self.codeType != 0:
554 554 self.__size += 4 + 4 + 4 * self.nCode * \
555 555 numpy.ceil(self.nBaud / 32.)
556 556
557 557 return self.__size
558 558
559 559 def set_size(self, value):
560 560
561 561 raise IOError("size is a property and it cannot be set, just read")
562 562
563 563 return
564 564
565 565 ippSeconds = property(get_ippSeconds, set_ippSeconds)
566 566 size = property(get_size, set_size)
567 567
568 568
569 569 class ProcessingHeader(Header):
570 570
571 571 # size = None
572 572 dtype = None
573 573 blockSize = None
574 574 profilesPerBlock = None
575 575 dataBlocksPerFile = None
576 576 nWindows = None
577 577 processFlags = None
578 578 nCohInt = None
579 579 nIncohInt = None
580 580 totalSpectra = None
581 581 structure = PROCESSING_STRUCTURE
582 582 flag_dc = None
583 583 flag_cspc = None
584 584
585 585 def __init__(self, dtype=0, blockSize=0, profilesPerBlock=0, dataBlocksPerFile=0, nWindows=0, processFlags=0, nCohInt=0,
586 586 nIncohInt=0, totalSpectra=0, nHeights=0, firstHeight=0, deltaHeight=0, samplesWin=0, spectraComb=0, nCode=0,
587 587 code=0, nBaud=None, shif_fft=False, flag_dc=False, flag_cspc=False, flag_decode=False, flag_deflip=False
588 588 ):
589 589
590 590 # self.size = 0
591 591 self.dtype = dtype
592 592 self.blockSize = blockSize
593 593 self.profilesPerBlock = 0
594 594 self.dataBlocksPerFile = 0
595 595 self.nWindows = 0
596 596 self.processFlags = 0
597 597 self.nCohInt = 0
598 598 self.nIncohInt = 0
599 599 self.totalSpectra = 0
600 600
601 601 self.nHeights = 0
602 602 self.firstHeight = 0
603 603 self.deltaHeight = 0
604 604 self.samplesWin = 0
605 605 self.spectraComb = 0
606 606 self.nCode = None
607 607 self.code = None
608 608 self.nBaud = None
609 609
610 610 self.shif_fft = False
611 611 self.flag_dc = False
612 612 self.flag_cspc = False
613 613 self.flag_decode = False
614 614 self.flag_deflip = False
615 615 self.length = 0
616 616
617 617 def read(self, fp):
618 618 self.length = 0
619 619 try:
620 620 startFp = fp.tell()
621 621 except Exception as e:
622 622 startFp = None
623 623 pass
624 624
625 625 try:
626 626 if hasattr(fp, 'read'):
627 627 header = numpy.fromfile(fp, PROCESSING_STRUCTURE, 1)
628 628 else:
629 629 header = numpy.fromstring(fp, PROCESSING_STRUCTURE, 1)
630 630 self.length += header.nbytes
631 631 except Exception as e:
632 632 print("ProcessingHeader: " + str(e))
633 633 return 0
634 634
635 635 size = int(header['nSize'][0])
636 636 self.dtype = int(header['nDataType'][0])
637 637 self.blockSize = int(header['nSizeOfDataBlock'][0])
638 638 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
639 639 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
640 640 self.nWindows = int(header['nNumWindows'][0])
641 641 self.processFlags = header['nProcessFlags']
642 642 self.nCohInt = int(header['nCoherentIntegrations'][0])
643 643 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
644 644 self.totalSpectra = int(header['nTotalSpectra'][0])
645 645
646 646 try:
647 647 if hasattr(fp, 'read'):
648 648 samplingWindow = numpy.fromfile(
649 649 fp, SAMPLING_STRUCTURE, self.nWindows)
650 650 else:
651 651 samplingWindow = numpy.fromstring(
652 652 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
653 653 self.length += samplingWindow.nbytes
654 654 except Exception as e:
655 655 print("ProcessingHeader: " + str(e))
656 656 return 0
657 657
658 658 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
659 659 self.firstHeight = float(samplingWindow['h0'][0])
660 660 self.deltaHeight = float(samplingWindow['dh'][0])
661 661 self.samplesWin = samplingWindow['nsa'][0]
662 662
663 663 try:
664 664 if hasattr(fp, 'read'):
665 665 self.spectraComb = numpy.fromfile(
666 666 fp, 'u1', 2 * self.totalSpectra)
667 667 else:
668 668 self.spectraComb = numpy.fromstring(
669 669 fp[self.length:], 'u1', 2 * self.totalSpectra)
670 670 self.length += self.spectraComb.nbytes
671 671 except Exception as e:
672 672 print("ProcessingHeader: " + str(e))
673 673 return 0
674 674
675 675 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
676 676 self.nCode = int(numpy.fromfile(fp, '<u4', 1))
677 677 self.nBaud = int(numpy.fromfile(fp, '<u4', 1))
678 678 self.code = numpy.fromfile(
679 679 fp, '<f4', self.nCode * self.nBaud).reshape(self.nCode, self.nBaud)
680 680
681 681 if ((self.processFlags & PROCFLAG.EXP_NAME_ESP) == PROCFLAG.EXP_NAME_ESP):
682 682 exp_name_len = int(numpy.fromfile(fp, '<u4', 1))
683 683 exp_name = numpy.fromfile(fp, 'u1', exp_name_len + 1)
684 684
685 685 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
686 686 self.shif_fft = True
687 687 else:
688 688 self.shif_fft = False
689 689
690 690 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
691 691 self.flag_dc = True
692 692 else:
693 693 self.flag_dc = False
694 694
695 695 if ((self.processFlags & PROCFLAG.DECODE_DATA) == PROCFLAG.DECODE_DATA):
696 696 self.flag_decode = True
697 697 else:
698 698 self.flag_decode = False
699 699
700 700 if ((self.processFlags & PROCFLAG.DEFLIP_DATA) == PROCFLAG.DEFLIP_DATA):
701 701 self.flag_deflip = True
702 702 else:
703 703 self.flag_deflip = False
704 704
705 705 nChannels = 0
706 706 nPairs = 0
707 707 pairList = []
708 708
709 709 for i in range(0, self.totalSpectra * 2, 2):
710 710 if self.spectraComb[i] == self.spectraComb[i + 1]:
711 711 nChannels = nChannels + 1 # par de canales iguales
712 712 else:
713 713 nPairs = nPairs + 1 # par de canales diferentes
714 714 pairList.append((self.spectraComb[i], self.spectraComb[i + 1]))
715 715
716 716 self.flag_cspc = False
717 717 if nPairs > 0:
718 718 self.flag_cspc = True
719 719
720 720 if startFp is not None:
721 721 endFp = size + startFp
722 722 if fp.tell() > endFp:
723 723 sys.stderr.write(
724 724 "Warning: Processing header size is lower than it has to be")
725 725 return 0
726 726
727 727 if fp.tell() < endFp:
728 728 sys.stderr.write(
729 729 "Warning: Processing header size is greater than it is considered")
730 730
731 731 return 1
732 732
733 733 def write(self, fp):
734 734 # Clear DEFINE_PROCESS_CODE
735 735 self.processFlags = self.processFlags & (~PROCFLAG.DEFINE_PROCESS_CODE)
736 736
737 737 headerTuple = (self.size,
738 738 self.dtype,
739 739 self.blockSize,
740 740 self.profilesPerBlock,
741 741 self.dataBlocksPerFile,
742 742 self.nWindows,
743 743 self.processFlags,
744 744 self.nCohInt,
745 745 self.nIncohInt,
746 746 self.totalSpectra)
747 747
748 748 header = numpy.array(headerTuple, PROCESSING_STRUCTURE)
749 749 header.tofile(fp)
750 750
751 751 if self.nWindows != 0:
752 752 sampleWindowTuple = (
753 753 self.firstHeight, self.deltaHeight, self.samplesWin)
754 754 samplingWindow = numpy.array(sampleWindowTuple, SAMPLING_STRUCTURE)
755 755 samplingWindow.tofile(fp)
756 756
757 757 if self.totalSpectra != 0:
758 758 # spectraComb = numpy.array([],numpy.dtype('u1'))
759 759 spectraComb = self.spectraComb
760 760 spectraComb.tofile(fp)
761 761
762 762 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
763 763 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
764 764 # nCode.tofile(fp)
765 765 #
766 766 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
767 767 # nBaud.tofile(fp)
768 768 #
769 769 # code = self.code.reshape(self.nCode*self.nBaud)
770 770 # code = code.astype(numpy.dtype('<f4'))
771 771 # code.tofile(fp)
772 772
773 773 return 1
774 774
775 775 def get_size(self):
776 776
777 777 self.__size = 40 + 12 * self.nWindows + 2 * self.totalSpectra
778 778
779 779 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
780 780 # self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
781 781 # self.__size += 4 + 4 + 4 * self.nCode * self.nBaud
782 782
783 783 return self.__size
784 784
785 785 def set_size(self, value):
786 786
787 787 raise IOError("size is a property and it cannot be set, just read")
788 788
789 789 return
790 790
791 791 size = property(get_size, set_size)
792 792
793 793
794 794 class RCfunction:
795 795 NONE = 0
796 796 FLIP = 1
797 797 CODE = 2
798 798 SAMPLING = 3
799 799 LIN6DIV256 = 4
800 800 SYNCHRO = 5
801 801
802 802
803 803 class nCodeType:
804 804 NONE = 0
805 805 USERDEFINE = 1
806 806 BARKER2 = 2
807 807 BARKER3 = 3
808 808 BARKER4 = 4
809 809 BARKER5 = 5
810 810 BARKER7 = 6
811 811 BARKER11 = 7
812 812 BARKER13 = 8
813 813 AC128 = 9
814 814 COMPLEMENTARYCODE2 = 10
815 815 COMPLEMENTARYCODE4 = 11
816 816 COMPLEMENTARYCODE8 = 12
817 817 COMPLEMENTARYCODE16 = 13
818 818 COMPLEMENTARYCODE32 = 14
819 819 COMPLEMENTARYCODE64 = 15
820 820 COMPLEMENTARYCODE128 = 16
821 821 CODE_BINARY28 = 17
822 822
823 823
824 824 class PROCFLAG:
825 825
826 826 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
827 827 DECODE_DATA = numpy.uint32(0x00000002)
828 828 SPECTRA_CALC = numpy.uint32(0x00000004)
829 829 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
830 830 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
831 831 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
832 832
833 833 DATATYPE_CHAR = numpy.uint32(0x00000040)
834 834 DATATYPE_SHORT = numpy.uint32(0x00000080)
835 835 DATATYPE_LONG = numpy.uint32(0x00000100)
836 836 DATATYPE_INT64 = numpy.uint32(0x00000200)
837 837 DATATYPE_FLOAT = numpy.uint32(0x00000400)
838 838 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
839 839
840 840 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
841 841 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
842 842 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
843 843
844 844 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
845 845 DEFLIP_DATA = numpy.uint32(0x00010000)
846 846 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
847 847
848 848 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
849 849 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
850 850 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
851 851 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
852 852 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
853 853
854 854 EXP_NAME_ESP = numpy.uint32(0x00200000)
855 855 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
856 856
857 857 OPERATION_MASK = numpy.uint32(0x0000003F)
858 858 DATATYPE_MASK = numpy.uint32(0x00000FC0)
859 859 DATAARRANGE_MASK = numpy.uint32(0x00007000)
860 860 ACQ_SYS_MASK = numpy.uint32(0x001C0000)
861 861
862 862
863 863 dtype0 = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
864 864 dtype1 = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
865 865 dtype2 = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
866 866 dtype3 = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
867 867 dtype4 = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
868 868 dtype5 = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
869 869
870 870 NUMPY_DTYPE_LIST = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
871 871
872 872 PROCFLAG_DTYPE_LIST = [PROCFLAG.DATATYPE_CHAR,
873 873 PROCFLAG.DATATYPE_SHORT,
874 874 PROCFLAG.DATATYPE_LONG,
875 875 PROCFLAG.DATATYPE_INT64,
876 876 PROCFLAG.DATATYPE_FLOAT,
877 877 PROCFLAG.DATATYPE_DOUBLE]
878 878
879 879 DTYPE_WIDTH = [1, 2, 4, 8, 4, 8]
880 880
881 881
882 882 def get_dtype_index(numpy_dtype):
883 883
884 884 index = None
885 885
886 886 for i in range(len(NUMPY_DTYPE_LIST)):
887 887 if numpy_dtype == NUMPY_DTYPE_LIST[i]:
888 888 index = i
889 889 break
890 890
891 891 return index
892 892
893 893
894 894 def get_numpy_dtype(index):
895 895
896 896 return NUMPY_DTYPE_LIST[index]
897 897
898 898
899 899 def get_procflag_dtype(index):
900 900
901 901 return PROCFLAG_DTYPE_LIST[index]
902 902
903 903
904 904 def get_dtype_width(index):
905 905
906 return DTYPE_WIDTH[index] No newline at end of file
906 return DTYPE_WIDTH[index]
@@ -1,810 +1,808
1 1
2 2 import os
3 3 import sys
4 4 import zmq
5 5 import time
6 6 import numpy
7 7 import datetime
8 8 from functools import wraps
9 9 from threading import Thread
10 10 import matplotlib
11 11
12 12 if 'BACKEND' in os.environ:
13 13 matplotlib.use(os.environ['BACKEND'])
14 14 elif 'linux' in sys.platform:
15 15 matplotlib.use("TkAgg")
16 16 elif 'darwin' in sys.platform:
17 17 matplotlib.use('WxAgg')
18 18 else:
19 19 from schainpy.utils import log
20 20 log.warning('Using default Backend="Agg"', 'INFO')
21 21 matplotlib.use('Agg')
22 22
23 23 import matplotlib.pyplot as plt
24 24 from matplotlib.patches import Polygon
25 25 from mpl_toolkits.axes_grid1 import make_axes_locatable
26 26 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
27 27
28 28 from schainpy.model.data.jrodata import PlotterData
29 29 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
30 30 from schainpy.utils import log
31 31
32 32 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
33 33 blu_values = matplotlib.pyplot.get_cmap(
34 34 'seismic_r', 20)(numpy.arange(20))[10:15]
35 35 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
36 36 'jro', numpy.vstack((blu_values, jet_values)))
37 37 matplotlib.pyplot.register_cmap(cmap=ncmap)
38 38
39 39 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
40 40 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
41 41
42 42 EARTH_RADIUS = 6.3710e3
43 43
44 44 def ll2xy(lat1, lon1, lat2, lon2):
45 45
46 46 p = 0.017453292519943295
47 47 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
48 48 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
49 49 r = 12742 * numpy.arcsin(numpy.sqrt(a))
50 50 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
51 51 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
52 52 theta = -theta + numpy.pi/2
53 53 return r*numpy.cos(theta), r*numpy.sin(theta)
54 54
55 55
56 56 def km2deg(km):
57 57 '''
58 58 Convert distance in km to degrees
59 59 '''
60 60
61 61 return numpy.rad2deg(km/EARTH_RADIUS)
62 62
63 63
64 64 def figpause(interval):
65 65 backend = plt.rcParams['backend']
66 66 if backend in matplotlib.rcsetup.interactive_bk:
67 67 figManager = matplotlib._pylab_helpers.Gcf.get_active()
68 68 if figManager is not None:
69 69 canvas = figManager.canvas
70 70 if canvas.figure.stale:
71 71 canvas.draw()
72 72 try:
73 73 canvas.start_event_loop(interval)
74 74 except:
75 75 pass
76 76 return
77 77
78 78
79 79 def popup(message):
80 80 '''
81 81 '''
82 82
83 83 fig = plt.figure(figsize=(12, 8), facecolor='r')
84 84 text = '\n'.join([s.strip() for s in message.split(':')])
85 85 fig.text(0.01, 0.5, text, ha='left', va='center',
86 86 size='20', weight='heavy', color='w')
87 87 fig.show()
88 88 figpause(1000)
89 89
90 90
91 91 class Throttle(object):
92 92 '''
93 93 Decorator that prevents a function from being called more than once every
94 94 time period.
95 95 To create a function that cannot be called more than once a minute, but
96 96 will sleep until it can be called:
97 97 @Throttle(minutes=1)
98 98 def foo():
99 99 pass
100 100
101 101 for i in range(10):
102 102 foo()
103 103 print "This function has run %s times." % i
104 104 '''
105 105
106 106 def __init__(self, seconds=0, minutes=0, hours=0):
107 107 self.throttle_period = datetime.timedelta(
108 108 seconds=seconds, minutes=minutes, hours=hours
109 109 )
110 110
111 111 self.time_of_last_call = datetime.datetime.min
112 112
113 113 def __call__(self, fn):
114 114 @wraps(fn)
115 115 def wrapper(*args, **kwargs):
116 116 coerce = kwargs.pop('coerce', None)
117 117 if coerce:
118 118 self.time_of_last_call = datetime.datetime.now()
119 119 return fn(*args, **kwargs)
120 120 else:
121 121 now = datetime.datetime.now()
122 122 time_since_last_call = now - self.time_of_last_call
123 123 time_left = self.throttle_period - time_since_last_call
124 124
125 125 if time_left > datetime.timedelta(seconds=0):
126 126 return
127 127
128 128 self.time_of_last_call = datetime.datetime.now()
129 129 return fn(*args, **kwargs)
130 130
131 131 return wrapper
132 132
133 133 def apply_throttle(value):
134 134
135 135 @Throttle(seconds=value)
136 136 def fnThrottled(fn):
137 137 fn()
138 138
139 139 return fnThrottled
140 140
141 141
142 142 @MPDecorator
143 143 class Plot(Operation):
144 144 '''
145 145 Base class for Schain plotting operations
146 146 '''
147 147
148 148 CODE = 'Figure'
149 149 colormap = 'jet'
150 150 bgcolor = 'white'
151 151 __missing = 1E30
152 152
153 153 __attrs__ = ['show', 'save', 'xmin', 'xmax', 'ymin', 'ymax', 'zmin', 'zmax',
154 154 'zlimits', 'xlabel', 'ylabel', 'xaxis', 'cb_label', 'title',
155 155 'colorbar', 'bgcolor', 'width', 'height', 'localtime', 'oneFigure',
156 156 'showprofile', 'decimation', 'pause']
157 157
158 158 def __init__(self):
159 159
160 160 Operation.__init__(self)
161 161 self.isConfig = False
162 162 self.isPlotConfig = False
163 163 self.save_counter = 1
164 164 self.sender_counter = 1
165 165 self.data = None
166 166
167 167 def __fmtTime(self, x, pos):
168 168 '''
169 169 '''
170 170
171 171 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
172 172
173 173 def __setup(self, **kwargs):
174 174 '''
175 175 Initialize variables
176 176 '''
177 177
178 178 self.figures = []
179 179 self.axes = []
180 180 self.cb_axes = []
181 181 self.localtime = kwargs.pop('localtime', True)
182 182 self.show = kwargs.get('show', True)
183 183 self.save = kwargs.get('save', False)
184 184 self.save_period = kwargs.get('save_period', 1)
185 185 self.ftp = kwargs.get('ftp', False)
186 186 self.colormap = kwargs.get('colormap', self.colormap)
187 187 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
188 188 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
189 189 self.colormaps = kwargs.get('colormaps', None)
190 190 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
191 191 self.showprofile = kwargs.get('showprofile', False)
192 192 self.title = kwargs.get('wintitle', self.CODE.upper())
193 193 self.cb_label = kwargs.get('cb_label', None)
194 194 self.cb_labels = kwargs.get('cb_labels', None)
195 195 self.labels = kwargs.get('labels', None)
196 196 self.xaxis = kwargs.get('xaxis', 'frequency')
197 197 self.zmin = kwargs.get('zmin', None)
198 198 self.zmax = kwargs.get('zmax', None)
199 199 self.zlimits = kwargs.get('zlimits', None)
200 200 self.xmin = kwargs.get('xmin', None)
201 201 self.xmax = kwargs.get('xmax', None)
202 202 self.xrange = kwargs.get('xrange', 24)
203 203 self.xscale = kwargs.get('xscale', None)
204 204 self.ymin = kwargs.get('ymin', None)
205 205 self.ymax = kwargs.get('ymax', None)
206 206 self.yscale = kwargs.get('yscale', None)
207 207 self.xlabel = kwargs.get('xlabel', None)
208 208 self.decimation = kwargs.get('decimation', None)
209 209 self.showSNR = kwargs.get('showSNR', False)
210 210 self.oneFigure = kwargs.get('oneFigure', True)
211 211 self.width = kwargs.get('width', None)
212 212 self.height = kwargs.get('height', None)
213 213 self.colorbar = kwargs.get('colorbar', True)
214 214 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
215 215 self.channels = kwargs.get('channels', None)
216 216 self.titles = kwargs.get('titles', [])
217 217 self.polar = False
218 218 self.type = kwargs.get('type', 'iq')
219 219 self.grid = kwargs.get('grid', False)
220 220 self.pause = kwargs.get('pause', False)
221 221 self.save_labels = kwargs.get('save_labels', None)
222 222 self.realtime = kwargs.get('realtime', True)
223 223 self.buffering = kwargs.get('buffering', True)
224 224 self.throttle = kwargs.get('throttle', 2)
225 225 self.exp_code = kwargs.get('exp_code', None)
226 226 self.plot_server = kwargs.get('plot_server', False)
227 227 self.sender_period = kwargs.get('sender_period', 1)
228 228 self.__throttle_plot = apply_throttle(self.throttle)
229 229 self.data = PlotterData(
230 230 self.CODE, self.throttle, self.exp_code, self.buffering, snr=self.showSNR)
231
231
232 232 if self.plot_server:
233 233 if not self.plot_server.startswith('tcp://'):
234 234 self.plot_server = 'tcp://{}'.format(self.plot_server)
235 235 log.success(
236 236 'Sending to server: {}'.format(self.plot_server),
237 237 self.name
238 238 )
239 239 if 'plot_name' in kwargs:
240 240 self.plot_name = kwargs['plot_name']
241 241
242 242 def __setup_plot(self):
243 243 '''
244 244 Common setup for all figures, here figures and axes are created
245 245 '''
246 246
247 247 self.setup()
248 248
249 self.time_label = 'LT' if self.localtime else 'UTC'
249 self.time_label = 'LT' if self.localtime else 'UTC'
250 250
251 251 if self.width is None:
252 252 self.width = 8
253 253
254 254 self.figures = []
255 255 self.axes = []
256 256 self.cb_axes = []
257 257 self.pf_axes = []
258 258 self.cmaps = []
259 259
260 260 size = '15%' if self.ncols == 1 else '30%'
261 261 pad = '4%' if self.ncols == 1 else '8%'
262 262
263 263 if self.oneFigure:
264 264 if self.height is None:
265 265 self.height = 1.4 * self.nrows + 1
266 266 fig = plt.figure(figsize=(self.width, self.height),
267 267 edgecolor='k',
268 268 facecolor='w')
269 269 self.figures.append(fig)
270 270 for n in range(self.nplots):
271 271 ax = fig.add_subplot(self.nrows, self.ncols,
272 272 n + 1, polar=self.polar)
273 273 ax.tick_params(labelsize=8)
274 274 ax.firsttime = True
275 275 ax.index = 0
276 276 ax.press = None
277 277 self.axes.append(ax)
278 278 if self.showprofile:
279 279 cax = self.__add_axes(ax, size=size, pad=pad)
280 280 cax.tick_params(labelsize=8)
281 281 self.pf_axes.append(cax)
282 282 else:
283 283 if self.height is None:
284 284 self.height = 3
285 285 for n in range(self.nplots):
286 286 fig = plt.figure(figsize=(self.width, self.height),
287 287 edgecolor='k',
288 288 facecolor='w')
289 289 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
290 290 ax.tick_params(labelsize=8)
291 291 ax.firsttime = True
292 292 ax.index = 0
293 293 ax.press = None
294 294 self.figures.append(fig)
295 295 self.axes.append(ax)
296 296 if self.showprofile:
297 297 cax = self.__add_axes(ax, size=size, pad=pad)
298 298 cax.tick_params(labelsize=8)
299 299 self.pf_axes.append(cax)
300 300
301 301 for n in range(self.nrows):
302 302 if self.colormaps is not None:
303 303 cmap = plt.get_cmap(self.colormaps[n])
304 304 else:
305 305 cmap = plt.get_cmap(self.colormap)
306 306 cmap.set_bad(self.bgcolor, 1.)
307 307 self.cmaps.append(cmap)
308
308
309 309 for fig in self.figures:
310 310 fig.canvas.mpl_connect('key_press_event', self.OnKeyPress)
311 311 fig.canvas.mpl_connect('scroll_event', self.OnBtnScroll)
312 312 fig.canvas.mpl_connect('button_press_event', self.onBtnPress)
313 313 fig.canvas.mpl_connect('motion_notify_event', self.onMotion)
314 314 fig.canvas.mpl_connect('button_release_event', self.onBtnRelease)
315 315
316 316 def OnKeyPress(self, event):
317 317 '''
318 318 Event for pressing keys (up, down) change colormap
319 319 '''
320 320 ax = event.inaxes
321 321 if ax in self.axes:
322 322 if event.key == 'down':
323 323 ax.index += 1
324 324 elif event.key == 'up':
325 325 ax.index -= 1
326 326 if ax.index < 0:
327 327 ax.index = len(CMAPS) - 1
328 328 elif ax.index == len(CMAPS):
329 329 ax.index = 0
330 330 cmap = CMAPS[ax.index]
331 331 ax.cbar.set_cmap(cmap)
332 332 ax.cbar.draw_all()
333 333 ax.plt.set_cmap(cmap)
334 334 ax.cbar.patch.figure.canvas.draw()
335 335 self.colormap = cmap.name
336 336
337 337 def OnBtnScroll(self, event):
338 338 '''
339 339 Event for scrolling, scale figure
340 340 '''
341 341 cb_ax = event.inaxes
342 342 if cb_ax in [ax.cbar.ax for ax in self.axes if ax.cbar]:
343 343 ax = [ax for ax in self.axes if cb_ax == ax.cbar.ax][0]
344 344 pt = ax.cbar.ax.bbox.get_points()[:, 1]
345 345 nrm = ax.cbar.norm
346 346 vmin, vmax, p0, p1, pS = (
347 347 nrm.vmin, nrm.vmax, pt[0], pt[1], event.y)
348 348 scale = 2 if event.step == 1 else 0.5
349 349 point = vmin + (vmax - vmin) / (p1 - p0) * (pS - p0)
350 350 ax.cbar.norm.vmin = point - scale * (point - vmin)
351 351 ax.cbar.norm.vmax = point - scale * (point - vmax)
352 352 ax.plt.set_norm(ax.cbar.norm)
353 353 ax.cbar.draw_all()
354 354 ax.cbar.patch.figure.canvas.draw()
355 355
356 356 def onBtnPress(self, event):
357 357 '''
358 358 Event for mouse button press
359 359 '''
360 360 cb_ax = event.inaxes
361 361 if cb_ax is None:
362 362 return
363 363
364 364 if cb_ax in [ax.cbar.ax for ax in self.axes if ax.cbar]:
365 365 cb_ax.press = event.x, event.y
366 366 else:
367 367 cb_ax.press = None
368 368
369 369 def onMotion(self, event):
370 370 '''
371 371 Event for move inside colorbar
372 372 '''
373 373 cb_ax = event.inaxes
374 374 if cb_ax is None:
375 375 return
376 376 if cb_ax not in [ax.cbar.ax for ax in self.axes if ax.cbar]:
377 377 return
378 378 if cb_ax.press is None:
379 379 return
380 380
381 381 ax = [ax for ax in self.axes if cb_ax == ax.cbar.ax][0]
382 382 xprev, yprev = cb_ax.press
383 383 dx = event.x - xprev
384 384 dy = event.y - yprev
385 385 cb_ax.press = event.x, event.y
386 386 scale = ax.cbar.norm.vmax - ax.cbar.norm.vmin
387 387 perc = 0.03
388 388
389 389 if event.button == 1:
390 390 ax.cbar.norm.vmin -= (perc * scale) * numpy.sign(dy)
391 391 ax.cbar.norm.vmax -= (perc * scale) * numpy.sign(dy)
392 392 elif event.button == 3:
393 393 ax.cbar.norm.vmin -= (perc * scale) * numpy.sign(dy)
394 394 ax.cbar.norm.vmax += (perc * scale) * numpy.sign(dy)
395 395
396 396 ax.cbar.draw_all()
397 397 ax.plt.set_norm(ax.cbar.norm)
398 398 ax.cbar.patch.figure.canvas.draw()
399 399
400 400 def onBtnRelease(self, event):
401 401 '''
402 402 Event for mouse button release
403 403 '''
404 404 cb_ax = event.inaxes
405 405 if cb_ax is not None:
406 406 cb_ax.press = None
407 407
408 408 def __add_axes(self, ax, size='30%', pad='8%'):
409 409 '''
410 410 Add new axes to the given figure
411 411 '''
412 412 divider = make_axes_locatable(ax)
413 413 nax = divider.new_horizontal(size=size, pad=pad)
414 414 ax.figure.add_axes(nax)
415 415 return nax
416 416
417 417 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
418 418 '''
419 419 Create a masked array for missing data
420 420 '''
421 421 if x_buffer.shape[0] < 2:
422 422 return x_buffer, y_buffer, z_buffer
423 423
424 424 deltas = x_buffer[1:] - x_buffer[0:-1]
425 425 x_median = numpy.median(deltas)
426 426
427 427 index = numpy.where(deltas > 5 * x_median)
428 428
429 429 if len(index[0]) != 0:
430 430 z_buffer[::, index[0], ::] = self.__missing
431 431 z_buffer = numpy.ma.masked_inside(z_buffer,
432 432 0.99 * self.__missing,
433 433 1.01 * self.__missing)
434 434
435 435 return x_buffer, y_buffer, z_buffer
436 436
437 437 def decimate(self):
438 438
439 439 # dx = int(len(self.x)/self.__MAXNUMX) + 1
440 440 dy = int(len(self.y) / self.decimation) + 1
441 441
442 442 # x = self.x[::dx]
443 443 x = self.x
444 444 y = self.y[::dy]
445 445 z = self.z[::, ::, ::dy]
446 446
447 447 return x, y, z
448 448
449 449 def format(self):
450 450 '''
451 451 Set min and max values, labels, ticks and titles
452 452 '''
453 453
454 454 if self.xmin is None:
455 455 xmin = self.data.min_time
456 456 else:
457 457 if self.xaxis is 'time':
458 458 dt = self.getDateTime(self.data.min_time)
459 459 xmin = (dt.replace(hour=int(self.xmin), minute=0, second=0) -
460 460 datetime.datetime(1970, 1, 1)).total_seconds()
461 461 if self.data.localtime:
462 462 xmin += time.timezone
463 463 else:
464 464 xmin = self.xmin
465 465
466 466 if self.xmax is None:
467 467 xmax = xmin + self.xrange * 60 * 60
468 468 else:
469 469 if self.xaxis is 'time':
470 470 dt = self.getDateTime(self.data.max_time)
471 471 xmax = (dt.replace(hour=int(self.xmax), minute=59, second=59) -
472 472 datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=1)).total_seconds()
473 473 if self.data.localtime:
474 474 xmax += time.timezone
475 475 else:
476 476 xmax = self.xmax
477
477
478 478 ymin = self.ymin if self.ymin else numpy.nanmin(self.y)
479 479 ymax = self.ymax if self.ymax else numpy.nanmax(self.y)
480 480 #Y = numpy.array([1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000])
481
481
482 482 #i = 1 if numpy.where(
483 483 # abs(ymax-ymin) <= Y)[0][0] < 0 else numpy.where(abs(ymax-ymin) <= Y)[0][0]
484 484 #ystep = Y[i] / 10.
485 485 dig = int(numpy.log10(ymax))
486 486 if dig == 0:
487 487 digD = len(str(ymax)) - 2
488 488 ydec = ymax*(10**digD)
489 489
490 490 dig = int(numpy.log10(ydec))
491 491 ystep = ((ydec + (10**(dig)))//10**(dig))*(10**(dig))
492 492 ystep = ystep/5
493 493 ystep = ystep/(10**digD)
494 494
495 else:
495 else:
496 496 ystep = ((ymax + (10**(dig)))//10**(dig))*(10**(dig))
497 497 ystep = ystep/5
498
498
499 499 if self.xaxis is not 'time':
500
500
501 501 dig = int(numpy.log10(xmax))
502
502
503 503 if dig <= 0:
504 504 digD = len(str(xmax)) - 2
505 505 xdec = xmax*(10**digD)
506 506
507 507 dig = int(numpy.log10(xdec))
508 508 xstep = ((xdec + (10**(dig)))//10**(dig))*(10**(dig))
509 509 xstep = xstep*0.5
510 510 xstep = xstep/(10**digD)
511
512 else:
511
512 else:
513 513 xstep = ((xmax + (10**(dig)))//10**(dig))*(10**(dig))
514 514 xstep = xstep/5
515
515
516 516 for n, ax in enumerate(self.axes):
517 517 if ax.firsttime:
518 518 ax.set_facecolor(self.bgcolor)
519 519 ax.yaxis.set_major_locator(MultipleLocator(ystep))
520 520 if self.xscale:
521 521 ax.xaxis.set_major_formatter(FuncFormatter(
522 522 lambda x, pos: '{0:g}'.format(x*self.xscale)))
523 523 if self.xscale:
524 524 ax.yaxis.set_major_formatter(FuncFormatter(
525 525 lambda x, pos: '{0:g}'.format(x*self.yscale)))
526 526 if self.xaxis is 'time':
527 527 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
528 528 ax.xaxis.set_major_locator(LinearLocator(9))
529 529 else:
530 530 ax.xaxis.set_major_locator(MultipleLocator(xstep))
531 531 if self.xlabel is not None:
532 532 ax.set_xlabel(self.xlabel)
533 533 ax.set_ylabel(self.ylabel)
534 534 ax.firsttime = False
535 535 if self.showprofile:
536 536 self.pf_axes[n].set_ylim(ymin, ymax)
537 537 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
538 538 self.pf_axes[n].set_xlabel('dB')
539 539 self.pf_axes[n].grid(b=True, axis='x')
540 540 [tick.set_visible(False)
541 541 for tick in self.pf_axes[n].get_yticklabels()]
542 542 if self.colorbar:
543 543 ax.cbar = plt.colorbar(
544 544 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
545 545 ax.cbar.ax.tick_params(labelsize=8)
546 546 ax.cbar.ax.press = None
547 547 if self.cb_label:
548 548 ax.cbar.set_label(self.cb_label, size=8)
549 549 elif self.cb_labels:
550 550 ax.cbar.set_label(self.cb_labels[n], size=8)
551 551 else:
552 552 ax.cbar = None
553 553 if self.grid:
554 554 ax.grid(True)
555 555
556 556 if not self.polar:
557 557 ax.set_xlim(xmin, xmax)
558 558 ax.set_ylim(ymin, ymax)
559 559 ax.set_title('{} {} {}'.format(
560 560 self.titles[n],
561 561 self.getDateTime(self.data.max_time).strftime(
562 562 '%Y-%m-%d %H:%M:%S'),
563 563 self.time_label),
564 564 size=8)
565 565 else:
566 566 ax.set_title('{}'.format(self.titles[n]), size=8)
567 567 ax.set_ylim(0, 90)
568 568 ax.set_yticks(numpy.arange(0, 90, 20))
569 569 ax.yaxis.labelpad = 40
570 570
571 571 def clear_figures(self):
572 572 '''
573 573 Reset axes for redraw plots
574 574 '''
575 575
576 576 for ax in self.axes:
577 577 ax.clear()
578 578 ax.firsttime = True
579 579 if ax.cbar:
580 580 ax.cbar.remove()
581 581
582 582 def __plot(self):
583 583 '''
584 584 Main function to plot, format and save figures
585 585 '''
586 586
587 587 try:
588 588 self.plot()
589 589 self.format()
590 590 except Exception as e:
591 591 log.warning('{} Plot could not be updated... check data'.format(
592 592 self.CODE), self.name)
593 593 log.error(str(e), '')
594 594 return
595 595
596 596 for n, fig in enumerate(self.figures):
597 597 if self.nrows == 0 or self.nplots == 0:
598 598 log.warning('No data', self.name)
599 599 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
600 600 fig.canvas.manager.set_window_title(self.CODE)
601 601 continue
602 602
603 603 fig.tight_layout()
604 604 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
605 605 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
606 606 fig.canvas.draw()
607 607 if self.show:
608 608 fig.show()
609 609 figpause(0.1)
610 610
611 611 if self.save:
612 612 self.save_figure(n)
613
613
614 614 if self.plot_server:
615 615 self.send_to_server()
616 616 # t = Thread(target=self.send_to_server)
617 617 # t.start()
618 618
619 619 def save_figure(self, n):
620 620 '''
621 621 '''
622 622
623 623 if self.save_counter < self.save_period:
624 624 self.save_counter += 1
625 625 return
626 626
627 627 self.save_counter = 1
628 628
629 629 fig = self.figures[n]
630 630
631 631 if self.save_labels:
632 632 labels = self.save_labels
633 633 else:
634 634 labels = list(range(self.nrows))
635 635
636 636 if self.oneFigure:
637 637 label = ''
638 638 else:
639 639 label = '-{}'.format(labels[n])
640 640 figname = os.path.join(
641 641 self.save,
642 642 self.CODE,
643 643 '{}{}_{}.png'.format(
644 644 self.CODE,
645 645 label,
646 self.getDateTime(self.data.max_time).strftime(
647 '%Y%m%d_%H%M%S'
648 ),
646 self.getDateTime(self.data.max_time).strftime('%Y%m%d_%H%M%S'),
649 647 )
650 648 )
649
651 650 log.log('Saving figure: {}'.format(figname), self.name)
652 651 if not os.path.isdir(os.path.dirname(figname)):
653 652 os.makedirs(os.path.dirname(figname))
654 653 fig.savefig(figname)
655 654
656 655 if self.realtime:
657 656 figname = os.path.join(
658 657 self.save,
659 658 '{}{}_{}.png'.format(
660 659 self.CODE,
661 660 label,
662 661 self.getDateTime(self.data.min_time).strftime(
663 662 '%Y%m%d'
664 663 ),
665 664 )
666 665 )
667 666 fig.savefig(figname)
668 667
669 668 def send_to_server(self):
670 669 '''
671 670 '''
672 671
673 672 if self.sender_counter < self.sender_period:
674 673 self.sender_counter += 1
675 674 return
676 675
677 676 self.sender_counter = 1
678 677 self.data.meta['titles'] = self.titles
679 678 retries = 2
680 679 while True:
681 680 self.socket.send_string(self.data.jsonify(self.plot_name, self.plot_type))
682 681 socks = dict(self.poll.poll(5000))
683 682 if socks.get(self.socket) == zmq.POLLIN:
684 683 reply = self.socket.recv_string()
685 684 if reply == 'ok':
686 685 log.log("Response from server ok", self.name)
687 686 break
688 687 else:
689 688 log.warning(
690 689 "Malformed reply from server: {}".format(reply), self.name)
691 690
692 691 else:
693 692 log.warning(
694 693 "No response from server, retrying...", self.name)
695 694 self.socket.setsockopt(zmq.LINGER, 0)
696 695 self.socket.close()
697 696 self.poll.unregister(self.socket)
698 697 retries -= 1
699 698 if retries == 0:
700 699 log.error(
701 700 "Server seems to be offline, abandoning", self.name)
702 701 self.socket = self.context.socket(zmq.REQ)
703 702 self.socket.connect(self.plot_server)
704 703 self.poll.register(self.socket, zmq.POLLIN)
705 704 time.sleep(1)
706 705 break
707 706 self.socket = self.context.socket(zmq.REQ)
708 707 self.socket.connect(self.plot_server)
709 708 self.poll.register(self.socket, zmq.POLLIN)
710 709 time.sleep(0.5)
711 710
712 711 def setup(self):
713 712 '''
714 713 This method should be implemented in the child class, the following
715 714 attributes should be set:
716 715
717 716 self.nrows: number of rows
718 717 self.ncols: number of cols
719 718 self.nplots: number of plots (channels or pairs)
720 719 self.ylabel: label for Y axes
721 self.titles: list of axes title
720 self.titles: list of axes title
722 721
723 722 '''
724 723 raise NotImplementedError
725 724
726 725 def plot(self):
727 726 '''
728 727 Must be defined in the child class
729 728 '''
730 729 raise NotImplementedError
731
730
732 731 def run(self, dataOut, **kwargs):
733 732 '''
734 733 Main plotting routine
735 734 '''
736
735
737 736 if self.isConfig is False:
738 737 self.__setup(**kwargs)
739 738 if dataOut.type == 'Parameters':
740 739 t = dataOut.utctimeInit
741 740 else:
742 t = dataOut.utctime
741 t = dataOut.utctime
743 742
744 743 if dataOut.useLocalTime:
745 744 self.getDateTime = datetime.datetime.fromtimestamp
746 745 if not self.localtime:
747 746 t += time.timezone
748 747 else:
749 748 self.getDateTime = datetime.datetime.utcfromtimestamp
750 749 if self.localtime:
751 750 t -= time.timezone
752
751
753 752 if 'buffer' in self.plot_type:
754 753 if self.xmin is None:
755 754 self.tmin = t
756 755 else:
757 756 self.tmin = (
758 757 self.getDateTime(t).replace(
759 hour=self.xmin,
760 minute=0,
758 hour=self.xmin,
759 minute=0,
761 760 second=0) - self.getDateTime(0)).total_seconds()
762 761
763 762 self.data.setup()
764 763 self.isConfig = True
765 764 if self.plot_server:
766 765 self.context = zmq.Context()
767 766 self.socket = self.context.socket(zmq.REQ)
768 767 self.socket.connect(self.plot_server)
769 768 self.poll = zmq.Poller()
770 769 self.poll.register(self.socket, zmq.POLLIN)
771 770
772 771 if dataOut.type == 'Parameters':
773 772 tm = dataOut.utctimeInit
774 773 else:
775 774 tm = dataOut.utctime
776 775
777 776 if not dataOut.useLocalTime and self.localtime:
778 777 tm -= time.timezone
779 778 if dataOut.useLocalTime and not self.localtime:
780 779 tm += time.timezone
781 780
782 if self.xaxis is 'time' and self.data and (tm - self.tmin) >= self.xrange*60*60:
781 if self.xaxis is 'time' and self.data and (tm - self.tmin) >= self.xrange*60*60:
783 782 self.save_counter = self.save_period
784 783 self.__plot()
785 784 self.xmin += self.xrange
786 785 if self.xmin >= 24:
787 786 self.xmin -= 24
788 787 self.tmin += self.xrange*60*60
789 788 self.data.setup()
790 789 self.clear_figures()
791 790
792 791 self.data.update(dataOut, tm)
793 792
794 793 if self.isPlotConfig is False:
795 794 self.__setup_plot()
796 795 self.isPlotConfig = True
797 796
798 797 if self.realtime:
799 798 self.__plot()
800 799 else:
801 800 self.__throttle_plot(self.__plot)#, coerce=coerce)
802 801
803 802 def close(self):
804 803
805 804 if self.data:
806 805 self.save_counter = self.save_period
807 806 self.__plot()
808 807 if self.data and self.pause:
809 808 figpause(10)
810
@@ -1,629 +1,649
1 1 '''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5 '''
6 6
7 7 import os
8 8 import sys
9 9 import glob
10 10 import fnmatch
11 11 import datetime
12 12 import time
13 13 import re
14 14 import h5py
15 15 import numpy
16 16
17 17 try:
18 18 from gevent import sleep
19 19 except:
20 20 from time import sleep
21 21
22 22 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
23 23 from schainpy.model.data.jrodata import Voltage
24 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
24 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
25 25 from numpy import imag
26 26
27 @MPDecorator
27 28 class AMISRReader(ProcessingUnit):
28 29 '''
29 30 classdocs
30 31 '''
31 32
32 33 def __init__(self):
33 34 '''
34 35 Constructor
35 36 '''
36
37
37 38 ProcessingUnit.__init__(self)
38
39
39 40 self.set = None
40 41 self.subset = None
41 42 self.extension_file = '.h5'
42 43 self.dtc_str = 'dtc'
43 44 self.dtc_id = 0
44 45 self.status = True
45 46 self.isConfig = False
46 47 self.dirnameList = []
47 48 self.filenameList = []
48 49 self.fileIndex = None
49 50 self.flagNoMoreFiles = False
50 51 self.flagIsNewFile = 0
51 52 self.filename = ''
52 53 self.amisrFilePointer = None
53
54
55 self.dataset = None
56
57
58
54
55
56 #self.dataset = None
57
58
59
59 60
60 61 self.profileIndex = 0
61
62
62
63
63 64 self.beamCodeByFrame = None
64 65 self.radacTimeByFrame = None
65
66
66 67 self.dataset = None
67
68
69
70
68
69
70
71
71 72 self.__firstFile = True
72
73
73 74 self.buffer = None
74
75
75
76
76 77 self.timezone = 'ut'
77
78
78 79 self.__waitForNewFile = 20
79 self.__filename_online = None
80 self.__filename_online = None
80 81 #Is really necessary create the output object in the initializer
81 82 self.dataOut = Voltage()
82
83 self.dataOut.error=False
84
83 85 def setup(self,path=None,
84 startDate=None,
85 endDate=None,
86 startTime=None,
86 startDate=None,
87 endDate=None,
88 startTime=None,
87 89 endTime=None,
88 90 walk=True,
89 91 timezone='ut',
90 92 all=0,
91 93 code = None,
92 94 nCode = 0,
93 95 nBaud = 0,
94 96 online=False):
95
97
98 #print ("T",path)
99
96 100 self.timezone = timezone
97 101 self.all = all
98 102 self.online = online
99
103
100 104 self.code = code
101 105 self.nCode = int(nCode)
102 106 self.nBaud = int(nBaud)
103
104
105
107
108
109
106 110 #self.findFiles()
107 111 if not(online):
108 112 #Busqueda de archivos offline
109 113 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
110 114 else:
111 115 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
112
116
113 117 if not(self.filenameList):
114 118 print("There is no files into the folder: %s"%(path))
115
116 119 sys.exit(-1)
117
120
118 121 self.fileIndex = -1
119
120 self.readNextFile(online)
121
122
123 self.readNextFile(online)
124
122 125 '''
123 126 Add code
124 '''
127 '''
125 128 self.isConfig = True
126
129
127 130 pass
128
129
131
132
130 133 def readAMISRHeader(self,fp):
131 134 header = 'Raw11/Data/RadacHeader'
132 135 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
133 136 self.beamCode = fp.get('Raw11/Data/Beamcodes') # NUMBER OF CHANNELS AND IDENTIFY POSITION TO CREATE A FILE WITH THAT INFO
134 137 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
135 138 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
136 139 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
137 140 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
138 141 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
139 142 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
140 143 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
141 144 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
142 145 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
143 146 self.frequency = fp.get('Rx/Frequency')
144 147 txAus = fp.get('Raw11/Data/Pulsewidth')
145
146
148
149
147 150 self.nblocks = self.pulseCount.shape[0] #nblocks
148
151
149 152 self.nprofiles = self.pulseCount.shape[1] #nprofile
150 153 self.nsa = self.nsamplesPulse[0,0] #ngates
151 154 self.nchannels = self.beamCode.shape[1]
152 155 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
153 156 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
154 157 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
155
158
156 159 #filling radar controller header parameters
157 160 self.__ippKm = self.ippSeconds *.15*1e6 # in km
158 161 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
159 162 self.__txB = 0
160 163 nWindows=1
161 self.__nSamples = self.nsa
164 self.__nSamples = self.nsa
162 165 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
163 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
164
166 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
167
165 168 #for now until understand why the code saved is different (code included even though code not in tuf file)
166 169 #self.__codeType = 0
167 170 # self.__nCode = None
168 171 # self.__nBaud = None
169 172 self.__code = self.code
170 173 self.__codeType = 0
171 174 if self.code != None:
172 175 self.__codeType = 1
173 176 self.__nCode = self.nCode
174 177 self.__nBaud = self.nBaud
175 178 #self.__code = 0
176
179
177 180 #filling system header parameters
178 181 self.__nSamples = self.nsa
179 self.newProfiles = self.nprofiles/self.nchannels
182 self.newProfiles = self.nprofiles/self.nchannels
180 183 self.__channelList = list(range(self.nchannels))
181
184
182 185 self.__frequency = self.frequency[0][0]
183
184 186
185
187
188
186 189 def createBuffers(self):
187
188 pass
189
190
191 pass
192
190 193 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
191 194 self.path = path
192 195 self.startDate = startDate
193 196 self.endDate = endDate
194 197 self.startTime = startTime
195 198 self.endTime = endTime
196 199 self.walk = walk
197
200
198 201 def __checkPath(self):
199 202 if os.path.exists(self.path):
200 203 self.status = 1
201 204 else:
202 205 self.status = 0
203 206 print('Path:%s does not exists'%self.path)
204
207
205 208 return
206
207
209
210
208 211 def __selDates(self, amisr_dirname_format):
209 212 try:
210 213 year = int(amisr_dirname_format[0:4])
211 214 month = int(amisr_dirname_format[4:6])
212 215 dom = int(amisr_dirname_format[6:8])
213 216 thisDate = datetime.date(year,month,dom)
214
217
215 218 if (thisDate>=self.startDate and thisDate <= self.endDate):
216 219 return amisr_dirname_format
217 220 except:
218 221 return None
219
220
222
223
221 224 def __findDataForDates(self,online=False):
222
225
223 226 if not(self.status):
224 227 return None
225
228
226 229 pat = '\d+.\d+'
227 230 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
228 231 dirnameList = [x for x in dirnameList if x!=None]
229 232 dirnameList = [x.string for x in dirnameList]
230 233 if not(online):
231 234 dirnameList = [self.__selDates(x) for x in dirnameList]
232 235 dirnameList = [x for x in dirnameList if x!=None]
233 236 if len(dirnameList)>0:
234 237 self.status = 1
235 238 self.dirnameList = dirnameList
236 239 self.dirnameList.sort()
237 240 else:
238 241 self.status = 0
239 242 return None
240
243
241 244 def __getTimeFromData(self):
242 245 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
243 246 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
244 247
245 248 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
246 249 print('........................................')
247 250 filter_filenameList = []
248 251 self.filenameList.sort()
249 252 #for i in range(len(self.filenameList)-1):
250 253 for i in range(len(self.filenameList)):
251 254 filename = self.filenameList[i]
252 255 fp = h5py.File(filename,'r')
253 256 time_str = fp.get('Time/RadacTimeString')
254
255 startDateTimeStr_File = time_str[0][0].split('.')[0]
257
258 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
259 #startDateTimeStr_File = "2019-12-16 09:21:11"
256 260 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
257 261 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
258
259 endDateTimeStr_File = time_str[-1][-1].split('.')[0]
262
263 #endDateTimeStr_File = "2019-12-16 11:10:11"
264 endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0]
260 265 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
261 266 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
262
267
263 268 fp.close()
264
269
270 #print("check time", startDateTime_File)
265 271 if self.timezone == 'lt':
266 272 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
267 273 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
268
269 274 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<endDateTime_Reader):
270 275 #self.filenameList.remove(filename)
271 276 filter_filenameList.append(filename)
272
277
273 278 if (endDateTime_File>=endDateTime_Reader):
274 279 break
275
276
280
281
277 282 filter_filenameList.sort()
278 283 self.filenameList = filter_filenameList
279 284 return 1
280
285
281 286 def __filterByGlob1(self, dirName):
282 287 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
283 288 filter_files.sort()
284 289 filterDict = {}
285 290 filterDict.setdefault(dirName)
286 291 filterDict[dirName] = filter_files
287 292 return filterDict
288
293
289 294 def __getFilenameList(self, fileListInKeys, dirList):
290 295 for value in fileListInKeys:
291 296 dirName = list(value.keys())[0]
292 297 for file in value[dirName]:
293 298 filename = os.path.join(dirName, file)
294 299 self.filenameList.append(filename)
295
296
300
301
297 302 def __selectDataForTimes(self, online=False):
298 303 #aun no esta implementado el filtro for tiempo
299 304 if not(self.status):
300 305 return None
301
306
302 307 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
303
308
304 309 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
305
310
306 311 self.__getFilenameList(fileListInKeys, dirList)
307 312 if not(online):
308 313 #filtro por tiempo
309 314 if not(self.all):
310 315 self.__getTimeFromData()
311 316
312 317 if len(self.filenameList)>0:
313 318 self.status = 1
314 319 self.filenameList.sort()
315 320 else:
316 321 self.status = 0
317 322 return None
318
323
319 324 else:
320 325 #get the last file - 1
321 326 self.filenameList = [self.filenameList[-2]]
322
327
323 328 new_dirnameList = []
324 329 for dirname in self.dirnameList:
325 330 junk = numpy.array([dirname in x for x in self.filenameList])
326 331 junk_sum = junk.sum()
327 332 if junk_sum > 0:
328 333 new_dirnameList.append(dirname)
329 334 self.dirnameList = new_dirnameList
330 335 return 1
331
336
332 337 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
333 338 endTime=datetime.time(23,59,59),walk=True):
334
339
335 340 if endDate ==None:
336 341 startDate = datetime.datetime.utcnow().date()
337 342 endDate = datetime.datetime.utcnow().date()
338
343
339 344 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
340
345
341 346 self.__checkPath()
342
347
343 348 self.__findDataForDates(online=True)
344
349
345 350 self.dirnameList = [self.dirnameList[-1]]
346
351
347 352 self.__selectDataForTimes(online=True)
348
353
349 354 return
350
351
355
356
352 357 def searchFilesOffLine(self,
353 358 path,
354 359 startDate,
355 360 endDate,
356 361 startTime=datetime.time(0,0,0),
357 362 endTime=datetime.time(23,59,59),
358 363 walk=True):
359
364
360 365 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
361
366
362 367 self.__checkPath()
363
368
364 369 self.__findDataForDates()
365
370
366 371 self.__selectDataForTimes()
367
372
368 373 for i in range(len(self.filenameList)):
369 374 print("%s" %(self.filenameList[i]))
370
371 return
372
375
376 return
377
373 378 def __setNextFileOffline(self):
374 379 idFile = self.fileIndex
375 380
376 381 while (True):
377 382 idFile += 1
378 383 if not(idFile < len(self.filenameList)):
379 384 self.flagNoMoreFiles = 1
380 385 print("No more Files")
386 self.dataOut.error = True
381 387 return 0
382 388
383 389 filename = self.filenameList[idFile]
384 390
385 391 amisrFilePointer = h5py.File(filename,'r')
386
392
387 393 break
388 394
389 395 self.flagIsNewFile = 1
390 396 self.fileIndex = idFile
391 397 self.filename = filename
392 398
393 399 self.amisrFilePointer = amisrFilePointer
394 400
395 401 print("Setting the file: %s"%self.filename)
396 402
397 403 return 1
398
399
404
405
400 406 def __setNextFileOnline(self):
401 407 filename = self.filenameList[0]
402 408 if self.__filename_online != None:
403 409 self.__selectDataForTimes(online=True)
404 410 filename = self.filenameList[0]
405 411 wait = 0
406 412 while self.__filename_online == filename:
407 413 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
408 414 if wait == 5:
409 415 return 0
410 416 sleep(self.__waitForNewFile)
411 417 self.__selectDataForTimes(online=True)
412 418 filename = self.filenameList[0]
413 419 wait += 1
414
420
415 421 self.__filename_online = filename
416
422
417 423 self.amisrFilePointer = h5py.File(filename,'r')
418 424 self.flagIsNewFile = 1
419 425 self.filename = filename
420 426 print("Setting the file: %s"%self.filename)
421 427 return 1
422
423
428
429
424 430 def readData(self):
425 431 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
426 432 re = buffer[:,:,:,0]
427 433 im = buffer[:,:,:,1]
428 434 dataset = re + im*1j
435
429 436 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
430 437 timeset = self.radacTime[:,0]
438
431 439 return dataset,timeset
432
440
433 441 def reshapeData(self):
434 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
442 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
435 443 channels = self.beamCodeByPulse[0,:]
436 444 nchan = self.nchannels
437 445 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
438 446 nblocks = self.nblocks
439 447 nsamples = self.nsa
440
448
441 449 #Dimensions : nChannels, nProfiles, nSamples
442 new_block = numpy.empty((nblocks, nchan, self.newProfiles, nsamples), dtype="complex64")
450 new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64")
443 451 ############################################
444
452
445 453 for thisChannel in range(nchan):
446 454 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[0][thisChannel])[0],:]
447 455
448
456
449 457 new_block = numpy.transpose(new_block, (1,0,2,3))
450 458 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
451
452 return new_block
453
459
460 return new_block
461
454 462 def updateIndexes(self):
455
463
456 464 pass
457
465
458 466 def fillJROHeader(self):
459
467
460 468 #fill radar controller header
461 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
469 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
462 470 txA=self.__txA,
463 471 txB=0,
464 472 nWindows=1,
465 473 nHeights=self.__nSamples,
466 474 firstHeight=self.__firstHeight,
467 475 deltaHeight=self.__deltaHeight,
468 476 codeType=self.__codeType,
469 477 nCode=self.__nCode, nBaud=self.__nBaud,
470 478 code = self.__code,
471 479 fClock=1)
472
473
474
480
475 481 #fill system header
476 482 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
477 483 nProfiles=self.newProfiles,
478 484 nChannels=len(self.__channelList),
479 485 adcResolution=14,
480 pciDioBusWith=32)
481
486 pciDioBusWidth=32)
487
482 488 self.dataOut.type = "Voltage"
483
489
484 490 self.dataOut.data = None
485
491
486 492 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
487
493
488 494 # self.dataOut.nChannels = 0
489
495
490 496 # self.dataOut.nHeights = 0
491
497
492 498 self.dataOut.nProfiles = self.newProfiles*self.nblocks
493
499
494 500 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
495 501 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
496 502 self.dataOut.heightList = ranges/1000.0 #km
497
498
503
504
499 505 self.dataOut.channelList = self.__channelList
500
506
501 507 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
502
508
503 509 # self.dataOut.channelIndexList = None
504
510
505 511 self.dataOut.flagNoData = True
506
507 #Set to TRUE if the data is discontinuous
512
513 #Set to TRUE if the data is discontinuous
508 514 self.dataOut.flagDiscontinuousBlock = False
509
515
510 516 self.dataOut.utctime = None
511
517
512 518 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
513 519 if self.timezone == 'lt':
514 520 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
515 else:
521 else:
516 522 self.dataOut.timeZone = 0 #by default time is UTC
517 523
518 524 self.dataOut.dstFlag = 0
519
525
520 526 self.dataOut.errorCount = 0
521
527
522 528 self.dataOut.nCohInt = 1
523
529
524 530 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
525
531
526 532 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
527
533
528 534 self.dataOut.flagShiftFFT = False
529
535
530 536 self.dataOut.ippSeconds = self.ippSeconds
531
532 #Time interval between profiles
537
538 #Time interval between profiles
533 539 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
534
540
535 541 self.dataOut.frequency = self.__frequency
536
537 542 self.dataOut.realtime = self.online
538 543 pass
539
544
540 545 def readNextFile(self,online=False):
541
546
542 547 if not(online):
543 548 newFile = self.__setNextFileOffline()
544 549 else:
545 newFile = self.__setNextFileOnline()
546
550 newFile = self.__setNextFileOnline()
551
547 552 if not(newFile):
548 553 return 0
549
550 554 #if self.__firstFile:
551 555 self.readAMISRHeader(self.amisrFilePointer)
556
552 557 self.createBuffers()
558
553 559 self.fillJROHeader()
560
554 561 #self.__firstFile = False
555
556
557
562
563
564
558 565 self.dataset,self.timeset = self.readData()
559
566
560 567 if self.endDate!=None:
561 568 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
562 569 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
563 startDateTimeStr_File = time_str[0][0].split('.')[0]
570 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
564 571 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
565 572 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
566 573 if self.timezone == 'lt':
567 574 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
568 575 if (startDateTime_File>endDateTime_Reader):
569 576 return 0
570
577
571 578 self.jrodataset = self.reshapeData()
572 579 #----self.updateIndexes()
573 580 self.profileIndex = 0
574
581
575 582 return 1
576
577
583
584
578 585 def __hasNotDataInBuffer(self):
579 586 if self.profileIndex >= (self.newProfiles*self.nblocks):
580 587 return 1
581 588 return 0
582
583
589
590
584 591 def getData(self):
585
592
586 593 if self.flagNoMoreFiles:
587 594 self.dataOut.flagNoData = True
588 595 return 0
589
596
590 597 if self.__hasNotDataInBuffer():
591 598 if not (self.readNextFile(self.online)):
592 599 return 0
593 600
594
595 if self.dataset is None: # setear esta condicion cuando no hayan datos por leers
596 self.dataOut.flagNoData = True
601
602 if self.dataset is None: # setear esta condicion cuando no hayan datos por leer
603 self.dataOut.flagNoData = True
597 604 return 0
598
605
599 606 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
600
607
601 608 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
602
609
610 #print("R_t",self.timeset)
611
603 612 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
604 613 #verificar basic header de jro data y ver si es compatible con este valor
605 614 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
606 615 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
607 616 indexblock = self.profileIndex/self.newProfiles
608 #print indexblock, indexprof
609 self.dataOut.utctime = self.timeset[indexblock] + (indexprof * self.ippSeconds * self.nchannels)
617 #print (indexblock, indexprof)
618 diffUTC = 1.8e4 #UTC diference from peru in seconds --Joab
619 diffUTC = 0
620 t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC #
621 #cambio posible 18/02/2020
622
623
624
625 #print("utc :",indexblock," __ ",t_comp)
626 #print(numpy.shape(self.timeset))
627 self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp
628 #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp
629 #print(self.dataOut.utctime)
610 630 self.dataOut.profileIndex = self.profileIndex
611 631 self.dataOut.flagNoData = False
612 632 # if indexprof == 0:
613 633 # print self.dataOut.utctime
614
634
615 635 self.profileIndex += 1
616
636
617 637 return self.dataOut.data
618
619
638
639
620 640 def run(self, **kwargs):
621 641 '''
622 642 This method will be called many times so here you should put all your code
623 643 '''
624
644
625 645 if not self.isConfig:
626 646 self.setup(**kwargs)
627 647 self.isConfig = True
628
648
629 649 self.getData()
@@ -1,1435 +1,1435
1 1 import numpy
2 2 import time
3 3 import os
4 4 import h5py
5 5 import re
6 6 import datetime
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14 @MPDecorator
15 15 class ParamReader(JRODataReader,ProcessingUnit):
16 16 '''
17 17 Reads HDF5 format files
18 18 path
19 19 startDate
20 20 endDate
21 21 startTime
22 22 endTime
23 23 '''
24 24
25 25 ext = ".hdf5"
26 26 optchar = "D"
27 27 timezone = None
28 28 startTime = None
29 29 endTime = None
30 30 fileIndex = None
31 31 utcList = None #To select data in the utctime list
32 32 blockList = None #List to blocks to be read from the file
33 33 blocksPerFile = None #Number of blocks to be read
34 34 blockIndex = None
35 35 path = None
36 36 #List of Files
37 37 filenameList = None
38 38 datetimeList = None
39 39 #Hdf5 File
40 40 listMetaname = None
41 41 listMeta = None
42 42 listDataname = None
43 43 listData = None
44 44 listShapes = None
45 45 fp = None
46 46 #dataOut reconstruction
47 47 dataOut = None
48 48
49 49 def __init__(self):#, **kwargs):
50 50 ProcessingUnit.__init__(self) #, **kwargs)
51 51 self.dataOut = Parameters()
52 52 return
53 53
54 54 def setup(self, **kwargs):
55 55
56 56 path = kwargs['path']
57 57 startDate = kwargs['startDate']
58 58 endDate = kwargs['endDate']
59 59 startTime = kwargs['startTime']
60 60 endTime = kwargs['endTime']
61 61 walk = kwargs['walk']
62 62 if 'ext' in kwargs:
63 63 ext = kwargs['ext']
64 64 else:
65 65 ext = '.hdf5'
66 66 if 'timezone' in kwargs:
67 67 self.timezone = kwargs['timezone']
68 68 else:
69 69 self.timezone = 'lt'
70 70
71 71 print("[Reading] Searching files in offline mode ...")
72 72 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
73 73 startTime=startTime, endTime=endTime,
74 74 ext=ext, walk=walk)
75 75
76 76 if not(filenameList):
77 77 print("There is no files into the folder: %s"%(path))
78 78 sys.exit(-1)
79 79
80 80 self.fileIndex = -1
81 81 self.startTime = startTime
82 82 self.endTime = endTime
83 83
84 84 self.__readMetadata()
85 85
86 86 self.__setNextFileOffline()
87 87
88 88 return
89 89
90 90 def searchFilesOffLine(self,
91 91 path,
92 92 startDate=None,
93 93 endDate=None,
94 94 startTime=datetime.time(0,0,0),
95 95 endTime=datetime.time(23,59,59),
96 96 ext='.hdf5',
97 97 walk=True):
98 98
99 99 expLabel = ''
100 100 self.filenameList = []
101 101 self.datetimeList = []
102 102
103 103 pathList = []
104 104
105 105 JRODataObj = JRODataReader()
106 106 dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
107 107
108 108 if dateList == []:
109 109 print("[Reading] No *%s files in %s from %s to %s)"%(ext, path,
110 110 datetime.datetime.combine(startDate,startTime).ctime(),
111 111 datetime.datetime.combine(endDate,endTime).ctime()))
112 112
113 113 return None, None
114 114
115 115 if len(dateList) > 1:
116 116 print("[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate))
117 117 else:
118 118 print("[Reading] data was found for the date %s" %(dateList[0]))
119 119
120 120 filenameList = []
121 121 datetimeList = []
122 122
123 123 #----------------------------------------------------------------------------------
124 124
125 125 for thisPath in pathList:
126 126
127 127 fileList = glob.glob1(thisPath, "*%s" %ext)
128 128 fileList.sort()
129 129
130 130 for file in fileList:
131 131
132 132 filename = os.path.join(thisPath,file)
133 133
134 134 if not isFileInDateRange(filename, startDate, endDate):
135 135 continue
136 136
137 137 thisDatetime = self.__isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
138 138
139 139 if not(thisDatetime):
140 140 continue
141 141
142 142 filenameList.append(filename)
143 143 datetimeList.append(thisDatetime)
144 144
145 145 if not(filenameList):
146 146 print("[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
147 147 return None, None
148 148
149 149 print("[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime))
150 150 print()
151 151
152 152 self.filenameList = filenameList
153 153 self.datetimeList = datetimeList
154 154
155 155 return pathList, filenameList
156 156
157 157 def __isFileInTimeRange(self,filename, startDate, endDate, startTime, endTime):
158 158
159 159 """
160 160 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
161 161
162 162 Inputs:
163 163 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
164 164 startDate : fecha inicial del rango seleccionado en formato datetime.date
165 165 endDate : fecha final del rango seleccionado en formato datetime.date
166 166 startTime : tiempo inicial del rango seleccionado en formato datetime.time
167 167 endTime : tiempo final del rango seleccionado en formato datetime.time
168 168
169 169 Return:
170 170 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
171 171 fecha especificado, de lo contrario retorna False.
172 172
173 173 Excepciones:
174 174 Si el archivo no existe o no puede ser abierto
175 175 Si la cabecera no puede ser leida.
176 176
177 177 """
178 178
179 179 try:
180 180 fp = h5py.File(filename,'r')
181 181 grp1 = fp['Data']
182 182
183 183 except IOError:
184 184 traceback.print_exc()
185 185 raise IOError("The file %s can't be opened" %(filename))
186
186
187 187 #In case has utctime attribute
188 188 grp2 = grp1['utctime']
189 189 # thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time
190 190 thisUtcTime = grp2.value[0]
191 191
192 192 fp.close()
193 193
194 194 if self.timezone == 'lt':
195 195 thisUtcTime -= 5*3600
196 196
197 197 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
198 198 thisDate = thisDatetime.date()
199 199 thisTime = thisDatetime.time()
200 200
201 201 startUtcTime = (datetime.datetime.combine(thisDate,startTime)- datetime.datetime(1970, 1, 1)).total_seconds()
202 202 endUtcTime = (datetime.datetime.combine(thisDate,endTime)- datetime.datetime(1970, 1, 1)).total_seconds()
203 203
204 204 #General case
205 205 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
206 206 #-----------o----------------------------o-----------
207 207 # startTime endTime
208 208
209 209 if endTime >= startTime:
210 210 thisUtcLog = numpy.logical_and(thisUtcTime > startUtcTime, thisUtcTime < endUtcTime)
211 211 if numpy.any(thisUtcLog): #If there is one block between the hours mentioned
212 212 return thisDatetime
213 213 return None
214 214
215 215 #If endTime < startTime then endTime belongs to the next day
216 216 #<<<<<<<<<<<o o>>>>>>>>>>>
217 217 #-----------o----------------------------o-----------
218 218 # endTime startTime
219 219
220 220 if (thisDate == startDate) and numpy.all(thisUtcTime < startUtcTime):
221 221 return None
222 222
223 223 if (thisDate == endDate) and numpy.all(thisUtcTime > endUtcTime):
224 224 return None
225 225
226 226 if numpy.all(thisUtcTime < startUtcTime) and numpy.all(thisUtcTime > endUtcTime):
227 227 return None
228 228
229 229 return thisDatetime
230 230
231 231 def __setNextFileOffline(self):
232 232
233 233 self.fileIndex += 1
234 234 idFile = self.fileIndex
235 235
236 236 if not(idFile < len(self.filenameList)):
237 237 raise schainpy.admin.SchainError("No more Files")
238 238 return 0
239 239
240 240 filename = self.filenameList[idFile]
241 241 filePointer = h5py.File(filename,'r')
242 242 self.filename = filename
243 243 self.fp = filePointer
244 244
245 245 print("Setting the file: %s"%self.filename)
246 246
247 247 self.__setBlockList()
248 248 self.__readData()
249 249 self.blockIndex = 0
250 250 return 1
251 251
252 252 def __setBlockList(self):
253 253 '''
254 254 Selects the data within the times defined
255 255
256 256 self.fp
257 257 self.startTime
258 258 self.endTime
259 259
260 260 self.blockList
261 261 self.blocksPerFile
262 262
263 263 '''
264 264 fp = self.fp
265 265 startTime = self.startTime
266 266 endTime = self.endTime
267 267
268 268 grp = fp['Data']
269 269 thisUtcTime = grp['utctime'].value.astype(numpy.float)[0]
270 270
271 271 #ERROOOOR
272 272 if self.timezone == 'lt':
273 273 thisUtcTime -= 5*3600
274 274
275 275 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
276 276
277 277 thisDate = thisDatetime.date()
278 278 thisTime = thisDatetime.time()
279 279
280 280 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
281 281 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
282 282
283 283 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
284 284
285 285 self.blockList = ind
286 286 self.blocksPerFile = len(ind)
287 287
288 288 return
289 289
290 290 def __readMetadata(self):
291 291 '''
292 292 Reads Metadata
293 293
294 294 self.pathMeta
295 295 self.listShapes
296 296 self.listMetaname
297 297 self.listMeta
298 298
299 299 '''
300 300
301 301 filename = self.filenameList[0]
302 302 fp = h5py.File(filename,'r')
303 303 gp = fp['Metadata']
304 304
305 305 listMetaname = []
306 306 listMetadata = []
307 307 for item in list(gp.items()):
308 308 name = item[0]
309 309
310 310 if name=='array dimensions':
311 311 table = gp[name][:]
312 312 listShapes = {}
313 313 for shapes in table:
314 314 listShapes[shapes[0]] = numpy.array([shapes[1],shapes[2],shapes[3],shapes[4],shapes[5]])
315 315 else:
316 316 data = gp[name].value
317 317 listMetaname.append(name)
318 318 listMetadata.append(data)
319 319
320 320 self.listShapes = listShapes
321 321 self.listMetaname = listMetaname
322 322 self.listMeta = listMetadata
323 323
324 324 fp.close()
325 325 return
326 326
327 327 def __readData(self):
328 328 grp = self.fp['Data']
329 329 listdataname = []
330 330 listdata = []
331 331
332 332 for item in list(grp.items()):
333 333 name = item[0]
334 334 listdataname.append(name)
335 335
336 336 array = self.__setDataArray(grp[name],self.listShapes[name])
337 337 listdata.append(array)
338 338
339 339 self.listDataname = listdataname
340 340 self.listData = listdata
341 341 return
342 342
343 343 def __setDataArray(self, dataset, shapes):
344 344
345 345 nDims = shapes[0]
346 346 nDim2 = shapes[1] #Dimension 0
347 347 nDim1 = shapes[2] #Dimension 1, number of Points or Parameters
348 348 nDim0 = shapes[3] #Dimension 2, number of samples or ranges
349 349 mode = shapes[4] #Mode of storing
350 350 blockList = self.blockList
351 351 blocksPerFile = self.blocksPerFile
352 352
353 353 #Depending on what mode the data was stored
354 354 if mode == 0: #Divided in channels
355 355 arrayData = dataset.value.astype(numpy.float)[0][blockList]
356 356 if mode == 1: #Divided in parameter
357 357 strds = 'table'
358 358 nDatas = nDim1
359 359 newShapes = (blocksPerFile,nDim2,nDim0)
360 360 elif mode==2: #Concatenated in a table
361 361 strds = 'table0'
362 362 arrayData = dataset[strds].value
363 363 #Selecting part of the dataset
364 364 utctime = arrayData[:,0]
365 365 u, indices = numpy.unique(utctime, return_index=True)
366 366
367 367 if blockList.size != indices.size:
368 368 indMin = indices[blockList[0]]
369 369 if blockList[1] + 1 >= indices.size:
370 370 arrayData = arrayData[indMin:,:]
371 371 else:
372 372 indMax = indices[blockList[1] + 1]
373 373 arrayData = arrayData[indMin:indMax,:]
374 374 return arrayData
375 375
376 376 # One dimension
377 377 if nDims == 0:
378 378 arrayData = dataset.value.astype(numpy.float)[0][blockList]
379 379
380 380 # Two dimensions
381 381 elif nDims == 2:
382 382 arrayData = numpy.zeros((blocksPerFile,nDim1,nDim0))
383 383 newShapes = (blocksPerFile,nDim0)
384 384 nDatas = nDim1
385 385
386 386 for i in range(nDatas):
387 387 data = dataset[strds + str(i)].value
388 388 arrayData[:,i,:] = data[blockList,:]
389 389
390 390 # Three dimensions
391 391 else:
392 392 arrayData = numpy.zeros((blocksPerFile,nDim2,nDim1,nDim0))
393 393 for i in range(nDatas):
394 394
395 395 data = dataset[strds + str(i)].value
396 396
397 397 for b in range(blockList.size):
398 398 arrayData[b,:,i,:] = data[:,:,blockList[b]]
399 399
400 400 return arrayData
401 401
402 402 def __setDataOut(self):
403 403 listMeta = self.listMeta
404 404 listMetaname = self.listMetaname
405 405 listDataname = self.listDataname
406 406 listData = self.listData
407 407 listShapes = self.listShapes
408 408
409 409 blockIndex = self.blockIndex
410 410 # blockList = self.blockList
411 411
412 412 for i in range(len(listMeta)):
413 413 setattr(self.dataOut,listMetaname[i],listMeta[i])
414 414
415 415 for j in range(len(listData)):
416 416 nShapes = listShapes[listDataname[j]][0]
417 417 mode = listShapes[listDataname[j]][4]
418 418 if nShapes == 1:
419 419 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
420 420 elif nShapes > 1:
421 421 setattr(self.dataOut,listDataname[j],listData[j][blockIndex,:])
422 422 elif mode==0:
423 423 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
424 424 #Mode Meteors
425 425 elif mode ==2:
426 426 selectedData = self.__selectDataMode2(listData[j], blockIndex)
427 427 setattr(self.dataOut, listDataname[j], selectedData)
428 428 return
429 429
430 430 def __selectDataMode2(self, data, blockIndex):
431 431 utctime = data[:,0]
432 432 aux, indices = numpy.unique(utctime, return_inverse=True)
433 433 selInd = numpy.where(indices == blockIndex)[0]
434 434 selData = data[selInd,:]
435 435
436 436 return selData
437 437
438 438 def getData(self):
439 439
440 440 if self.blockIndex==self.blocksPerFile:
441 441 if not( self.__setNextFileOffline() ):
442 442 self.dataOut.flagNoData = True
443 443 return 0
444 444
445 445 self.__setDataOut()
446 446 self.dataOut.flagNoData = False
447 447
448 448 self.blockIndex += 1
449 449
450 450 return
451 451
452 452 def run(self, **kwargs):
453 453
454 454 if not(self.isConfig):
455 455 self.setup(**kwargs)
456 456 self.isConfig = True
457 457
458 458 self.getData()
459 459
460 460 return
461 461
462 462 @MPDecorator
463 463 class ParamWriter(Operation):
464 464 '''
465 465 HDF5 Writer, stores parameters data in HDF5 format files
466 466
467 467 path: path where the files will be stored
468 468 blocksPerFile: number of blocks that will be saved in per HDF5 format file
469 469 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
470 470 metadataList: list of attributes that will be stored as metadata
471 471 dataList: list of attributes that will be stores as data
472 472 '''
473 473
474 474 ext = ".hdf5"
475 475 optchar = "D"
476 476 metaoptchar = "M"
477 477 metaFile = None
478 478 filename = None
479 479 path = None
480 480 setFile = None
481 481 fp = None
482 482 grp = None
483 483 ds = None
484 484 firsttime = True
485 485 #Configurations
486 486 blocksPerFile = None
487 487 blockIndex = None
488 488 dataOut = None
489 489 #Data Arrays
490 490 dataList = None
491 491 metadataList = None
492 492 dsList = None #List of dictionaries with dataset properties
493 493 tableDim = None
494 494 dtype = [('arrayName', 'S20'),('nDimensions', 'i'), ('dim2', 'i'), ('dim1', 'i'),('dim0', 'i'),('mode', 'b')]
495 495 currentDay = None
496 496 lastTime = None
497 497 setType = None
498 498
499 499 def __init__(self):
500
500
501 501 Operation.__init__(self)
502 502 return
503 503
504 504 def setup(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
505 505 self.path = path
506 506 self.blocksPerFile = blocksPerFile
507 507 self.metadataList = metadataList
508 508 self.dataList = dataList
509 509 self.dataOut = dataOut
510 510 self.mode = mode
511 511 if self.mode is not None:
512 512 self.mode = numpy.zeros(len(self.dataList)) + mode
513 513 else:
514 514 self.mode = numpy.ones(len(self.dataList))
515 515
516 516 self.setType = setType
517 517
518 518 arrayDim = numpy.zeros((len(self.dataList),5))
519 519
520 520 #Table dimensions
521 521 dtype0 = self.dtype
522 522 tableList = []
523 523
524 524 #Dictionary and list of tables
525 525 dsList = []
526 526
527 527 for i in range(len(self.dataList)):
528 528 dsDict = {}
529 529 dataAux = getattr(self.dataOut, self.dataList[i])
530 530 dsDict['variable'] = self.dataList[i]
531 531 #--------------------- Conditionals ------------------------
532 532 #There is no data
533
533
534 534 if dataAux is None:
535
535
536 536 return 0
537 537
538 538 if isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
539 539 dsDict['mode'] = 0
540 540 dsDict['nDim'] = 0
541 541 arrayDim[i,0] = 0
542 542 dsList.append(dsDict)
543 543
544 544 #Mode 2: meteors
545 545 elif self.mode[i] == 2:
546 546 dsDict['dsName'] = 'table0'
547 547 dsDict['mode'] = 2 # Mode meteors
548 548 dsDict['shape'] = dataAux.shape[-1]
549 549 dsDict['nDim'] = 0
550 550 dsDict['dsNumber'] = 1
551 551 arrayDim[i,3] = dataAux.shape[-1]
552 552 arrayDim[i,4] = self.mode[i] #Mode the data was stored
553 553 dsList.append(dsDict)
554 554
555 555 #Mode 1
556 556 else:
557 557 arrayDim0 = dataAux.shape #Data dimensions
558 558 arrayDim[i,0] = len(arrayDim0) #Number of array dimensions
559 559 arrayDim[i,4] = self.mode[i] #Mode the data was stored
560 560 strtable = 'table'
561 561 dsDict['mode'] = 1 # Mode parameters
562 562
563 563 # Three-dimension arrays
564 564 if len(arrayDim0) == 3:
565 565 arrayDim[i,1:-1] = numpy.array(arrayDim0)
566 566 nTables = int(arrayDim[i,2])
567 567 dsDict['dsNumber'] = nTables
568 568 dsDict['shape'] = arrayDim[i,2:4]
569 569 dsDict['nDim'] = 3
570 570
571 571 for j in range(nTables):
572 572 dsDict = dsDict.copy()
573 573 dsDict['dsName'] = strtable + str(j)
574 574 dsList.append(dsDict)
575 575
576 576 # Two-dimension arrays
577 577 elif len(arrayDim0) == 2:
578 578 arrayDim[i,2:-1] = numpy.array(arrayDim0)
579 579 nTables = int(arrayDim[i,2])
580 580 dsDict['dsNumber'] = nTables
581 581 dsDict['shape'] = arrayDim[i,3]
582 582 dsDict['nDim'] = 2
583 583
584 584 for j in range(nTables):
585 585 dsDict = dsDict.copy()
586 586 dsDict['dsName'] = strtable + str(j)
587 587 dsList.append(dsDict)
588 588
589 589 # One-dimension arrays
590 590 elif len(arrayDim0) == 1:
591 591 arrayDim[i,3] = arrayDim0[0]
592 592 dsDict['shape'] = arrayDim0[0]
593 593 dsDict['dsNumber'] = 1
594 594 dsDict['dsName'] = strtable + str(0)
595 595 dsDict['nDim'] = 1
596 596 dsList.append(dsDict)
597 597
598 598 table = numpy.array((self.dataList[i],) + tuple(arrayDim[i,:]),dtype = dtype0)
599 599 tableList.append(table)
600 600
601 601 self.dsList = dsList
602 602 self.tableDim = numpy.array(tableList, dtype = dtype0)
603 603 self.blockIndex = 0
604 604 timeTuple = time.localtime(dataOut.utctime)
605 605 self.currentDay = timeTuple.tm_yday
606 606
607 607 def putMetadata(self):
608 608
609 609 fp = self.createMetadataFile()
610 610 self.writeMetadata(fp)
611 611 fp.close()
612 612 return
613 613
614 614 def createMetadataFile(self):
615 615 ext = self.ext
616 616 path = self.path
617 617 setFile = self.setFile
618 618
619 619 timeTuple = time.localtime(self.dataOut.utctime)
620 620
621 621 subfolder = ''
622 622 fullpath = os.path.join( path, subfolder )
623 623
624 624 if not( os.path.exists(fullpath) ):
625 625 os.mkdir(fullpath)
626 626 setFile = -1 #inicializo mi contador de seteo
627 627
628 628 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
629 629 fullpath = os.path.join( path, subfolder )
630 630
631 631 if not( os.path.exists(fullpath) ):
632 632 os.mkdir(fullpath)
633 633 setFile = -1 #inicializo mi contador de seteo
634 634
635 635 else:
636 636 filesList = os.listdir( fullpath )
637 637 filesList = sorted( filesList, key=str.lower )
638 638 if len( filesList ) > 0:
639 639 filesList = [k for k in filesList if k.startswith(self.metaoptchar)]
640 640 filen = filesList[-1]
641 641 # el filename debera tener el siguiente formato
642 642 # 0 1234 567 89A BCDE (hex)
643 643 # x YYYY DDD SSS .ext
644 644 if isNumber( filen[8:11] ):
645 645 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
646 646 else:
647 647 setFile = -1
648 648 else:
649 649 setFile = -1 #inicializo mi contador de seteo
650 650
651 651 if self.setType is None:
652 652 setFile += 1
653 653 file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar,
654 654 timeTuple.tm_year,
655 655 timeTuple.tm_yday,
656 656 setFile,
657 657 ext )
658 658 else:
659 659 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
660 660 file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar,
661 661 timeTuple.tm_year,
662 662 timeTuple.tm_yday,
663 663 setFile,
664 664 ext )
665 665
666 666 filename = os.path.join( path, subfolder, file )
667 667 self.metaFile = file
668 668 #Setting HDF5 File
669 669 fp = h5py.File(filename,'w')
670 670
671 671 return fp
672 672
673 673 def writeMetadata(self, fp):
674 674
675 675 grp = fp.create_group("Metadata")
676 676 grp.create_dataset('array dimensions', data = self.tableDim, dtype = self.dtype)
677 677
678 678 for i in range(len(self.metadataList)):
679 679 grp.create_dataset(self.metadataList[i], data=getattr(self.dataOut, self.metadataList[i]))
680 680 return
681 681
682 682 def timeFlag(self):
683 683 currentTime = self.dataOut.utctime
684 684
685 685 if self.lastTime is None:
686 686 self.lastTime = currentTime
687 687
688 688 #Day
689 689 timeTuple = time.localtime(currentTime)
690 690 dataDay = timeTuple.tm_yday
691 691
692 692 #Time
693 693 timeDiff = currentTime - self.lastTime
694 694
695 695 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
696 696 if dataDay != self.currentDay:
697 697 self.currentDay = dataDay
698 698 return True
699 699 elif timeDiff > 3*60*60:
700 700 self.lastTime = currentTime
701 701 return True
702 702 else:
703 703 self.lastTime = currentTime
704 704 return False
705 705
706 706 def setNextFile(self):
707
707
708 708 ext = self.ext
709 709 path = self.path
710 710 setFile = self.setFile
711 711 mode = self.mode
712 712
713 713 timeTuple = time.localtime(self.dataOut.utctime)
714 714 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
715 715
716 716 fullpath = os.path.join( path, subfolder )
717 717
718 718 if os.path.exists(fullpath):
719 719 filesList = os.listdir( fullpath )
720 filesList = [k for k in filesList if 'M' in k]
720 ##filesList = [k for k in filesList if 'M' in k]
721 721 if len( filesList ) > 0:
722 722 filesList = sorted( filesList, key=str.lower )
723 723 filen = filesList[-1]
724 724 # el filename debera tener el siguiente formato
725 725 # 0 1234 567 89A BCDE (hex)
726 726 # x YYYY DDD SSS .ext
727 727 if isNumber( filen[8:11] ):
728 728 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
729 729 else:
730 730 setFile = -1
731 731 else:
732 732 setFile = -1 #inicializo mi contador de seteo
733 733 else:
734 734 os.makedirs(fullpath)
735 735 setFile = -1 #inicializo mi contador de seteo
736 736
737 737 if self.setType is None:
738 738 setFile += 1
739 739 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
740 740 timeTuple.tm_year,
741 741 timeTuple.tm_yday,
742 742 setFile,
743 743 ext )
744 744 else:
745 745 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
746 746 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
747 747 timeTuple.tm_year,
748 748 timeTuple.tm_yday,
749 749 setFile,
750 750 ext )
751 751
752 752 filename = os.path.join( path, subfolder, file )
753 753
754 754 #Setting HDF5 File
755 755 fp = h5py.File(filename,'w')
756 756 #write metadata
757 757 self.writeMetadata(fp)
758 758 #Write data
759 759 grp = fp.create_group("Data")
760 760 ds = []
761 761 data = []
762 762 dsList = self.dsList
763 763 i = 0
764 764 while i < len(dsList):
765 765 dsInfo = dsList[i]
766 766 #One-dimension data
767 767 if dsInfo['mode'] == 0:
768 768 ds0 = grp.create_dataset(dsInfo['variable'], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype=numpy.float64)
769 769 ds.append(ds0)
770 770 data.append([])
771 771 i += 1
772 772 continue
773 773
774 774 elif dsInfo['mode'] == 2:
775 775 grp0 = grp.create_group(dsInfo['variable'])
776 776 ds0 = grp0.create_dataset(dsInfo['dsName'], (1,dsInfo['shape']), data = numpy.zeros((1,dsInfo['shape'])) , maxshape=(None,dsInfo['shape']), chunks=True)
777 777 ds.append(ds0)
778 778 data.append([])
779 779 i += 1
780 780 continue
781 781
782 782 elif dsInfo['mode'] == 1:
783 783 grp0 = grp.create_group(dsInfo['variable'])
784 784
785 785 for j in range(dsInfo['dsNumber']):
786 786 dsInfo = dsList[i]
787 787 tableName = dsInfo['dsName']
788
788
789 789
790 790 if dsInfo['nDim'] == 3:
791 791 shape = dsInfo['shape'].astype(int)
792 792 ds0 = grp0.create_dataset(tableName, (shape[0],shape[1],1) , data = numpy.zeros((shape[0],shape[1],1)), maxshape = (None,shape[1],None), chunks=True)
793 793 else:
794 794 shape = int(dsInfo['shape'])
795 795 ds0 = grp0.create_dataset(tableName, (1,shape), data = numpy.zeros((1,shape)) , maxshape=(None,shape), chunks=True)
796 796
797 797 ds.append(ds0)
798 798 data.append([])
799 799 i += 1
800 800
801 801 fp.flush()
802 802 fp.close()
803 803
804 804 log.log('creating file: {}'.format(filename), 'Writing')
805 805 self.filename = filename
806 806 self.ds = ds
807 807 self.data = data
808 808 self.firsttime = True
809 809 self.blockIndex = 0
810 810 return
811 811
812 812 def putData(self):
813 813
814 814 if self.blockIndex == self.blocksPerFile or self.timeFlag():
815 815 self.setNextFile()
816 816
817 817 self.readBlock()
818 818 self.setBlock() #Prepare data to be written
819 819 self.writeBlock() #Write data
820 820
821 821 return
822 822
823 823 def readBlock(self):
824 824
825 825 '''
826 826 data Array configured
827 827
828 828
829 829 self.data
830 830 '''
831 831 dsList = self.dsList
832 832 ds = self.ds
833 833 #Setting HDF5 File
834 834 fp = h5py.File(self.filename,'r+')
835 835 grp = fp["Data"]
836 836 ind = 0
837 837
838 838 while ind < len(dsList):
839 839 dsInfo = dsList[ind]
840 840
841 841 if dsInfo['mode'] == 0:
842 842 ds0 = grp[dsInfo['variable']]
843 843 ds[ind] = ds0
844 844 ind += 1
845 845 else:
846 846
847 847 grp0 = grp[dsInfo['variable']]
848 848
849 849 for j in range(dsInfo['dsNumber']):
850 850 dsInfo = dsList[ind]
851 851 ds0 = grp0[dsInfo['dsName']]
852 852 ds[ind] = ds0
853 853 ind += 1
854 854
855 855 self.fp = fp
856 856 self.grp = grp
857 857 self.ds = ds
858 858
859 859 return
860 860
861 861 def setBlock(self):
862 862 '''
863 863 data Array configured
864 864
865 865
866 866 self.data
867 867 '''
868 868 #Creating Arrays
869 869 dsList = self.dsList
870 870 data = self.data
871 871 ind = 0
872 872
873 873 while ind < len(dsList):
874 874 dsInfo = dsList[ind]
875 875 dataAux = getattr(self.dataOut, dsInfo['variable'])
876 876
877 877 mode = dsInfo['mode']
878 878 nDim = dsInfo['nDim']
879 879
880 880 if mode == 0 or mode == 2 or nDim == 1:
881 881 data[ind] = dataAux
882 882 ind += 1
883 883 # elif nDim == 1:
884 884 # data[ind] = numpy.reshape(dataAux,(numpy.size(dataAux),1))
885 885 # ind += 1
886 886 elif nDim == 2:
887 887 for j in range(dsInfo['dsNumber']):
888 888 data[ind] = dataAux[j,:]
889 889 ind += 1
890 890 elif nDim == 3:
891 891 for j in range(dsInfo['dsNumber']):
892 892 data[ind] = dataAux[:,j,:]
893 893 ind += 1
894 894
895 895 self.data = data
896 896 return
897 897
898 898 def writeBlock(self):
899 899 '''
900 900 Saves the block in the HDF5 file
901 901 '''
902 902 dsList = self.dsList
903 903
904 904 for i in range(len(self.ds)):
905 905 dsInfo = dsList[i]
906 906 nDim = dsInfo['nDim']
907 907 mode = dsInfo['mode']
908 908
909 909 # First time
910 910 if self.firsttime:
911 911 if type(self.data[i]) == numpy.ndarray:
912 912
913 913 if nDim == 3:
914 914 self.data[i] = self.data[i].reshape((self.data[i].shape[0],self.data[i].shape[1],1))
915 915 self.ds[i].resize(self.data[i].shape)
916 916 if mode == 2:
917 917 self.ds[i].resize(self.data[i].shape)
918 918 self.ds[i][:] = self.data[i]
919 919 else:
920 920
921 921 # From second time
922 922 # Meteors!
923 923 if mode == 2:
924 924 dataShape = self.data[i].shape
925 925 dsShape = self.ds[i].shape
926 926 self.ds[i].resize((self.ds[i].shape[0] + dataShape[0],self.ds[i].shape[1]))
927 927 self.ds[i][dsShape[0]:,:] = self.data[i]
928 928 # No dimension
929 929 elif mode == 0:
930 930 self.ds[i].resize((self.ds[i].shape[0], self.ds[i].shape[1] + 1))
931 931 self.ds[i][0,-1] = self.data[i]
932 932 # One dimension
933 933 elif nDim == 1:
934 934 self.ds[i].resize((self.ds[i].shape[0] + 1, self.ds[i].shape[1]))
935 935 self.ds[i][-1,:] = self.data[i]
936 936 # Two dimension
937 937 elif nDim == 2:
938 938 self.ds[i].resize((self.ds[i].shape[0] + 1,self.ds[i].shape[1]))
939 939 self.ds[i][self.blockIndex,:] = self.data[i]
940 940 # Three dimensions
941 941 elif nDim == 3:
942 942 self.ds[i].resize((self.ds[i].shape[0],self.ds[i].shape[1],self.ds[i].shape[2]+1))
943 943 self.ds[i][:,:,-1] = self.data[i]
944 944
945 945 self.firsttime = False
946 946 self.blockIndex += 1
947 947
948 948 #Close to save changes
949 949 self.fp.flush()
950 950 self.fp.close()
951 951 return
952 952
953 953 def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
954 954
955 955 self.dataOut = dataOut
956 956 if not(self.isConfig):
957 self.setup(dataOut, path=path, blocksPerFile=blocksPerFile,
957 self.setup(dataOut, path=path, blocksPerFile=blocksPerFile,
958 958 metadataList=metadataList, dataList=dataList, mode=mode,
959 959 setType=setType)
960 960
961 961 self.isConfig = True
962 962 self.setNextFile()
963 963
964 964 self.putData()
965 965 return
966
966
967 967
968 968 @MPDecorator
969 969 class ParameterReader(Reader, ProcessingUnit):
970 970 '''
971 971 Reads HDF5 format files
972 972 '''
973 973
974 974 def __init__(self):
975 975 ProcessingUnit.__init__(self)
976 976 self.dataOut = Parameters()
977 977 self.ext = ".hdf5"
978 978 self.optchar = "D"
979 979 self.timezone = "lt"
980 980 self.listMetaname = []
981 981 self.listMeta = []
982 982 self.listDataname = []
983 983 self.listData = []
984 984 self.listShapes = []
985 985 self.open_file = h5py.File
986 986 self.open_mode = 'r'
987 987 self.metadata = False
988 988 self.filefmt = "*%Y%j***"
989 989 self.folderfmt = "*%Y%j"
990 990
991 991 def setup(self, **kwargs):
992 992
993 993 self.set_kwargs(**kwargs)
994 994 if not self.ext.startswith('.'):
995 self.ext = '.{}'.format(self.ext)
995 self.ext = '.{}'.format(self.ext)
996 996
997 997 if self.online:
998 998 log.log("Searching files in online mode...", self.name)
999 999
1000 1000 for nTries in range(self.nTries):
1001 1001 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1002 self.endDate, self.expLabel, self.ext, self.walk,
1002 self.endDate, self.expLabel, self.ext, self.walk,
1003 1003 self.filefmt, self.folderfmt)
1004 1004
1005 1005 try:
1006 1006 fullpath = next(fullpath)
1007 1007 except:
1008 1008 fullpath = None
1009
1009
1010 1010 if fullpath:
1011 1011 break
1012 1012
1013 1013 log.warning(
1014 1014 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1015 self.delay, self.path, nTries + 1),
1015 self.delay, self.path, nTries + 1),
1016 1016 self.name)
1017 1017 time.sleep(self.delay)
1018 1018
1019 1019 if not(fullpath):
1020 1020 raise schainpy.admin.SchainError(
1021 'There isn\'t any valid file in {}'.format(self.path))
1021 'There isn\'t any valid file in {}'.format(self.path))
1022 1022
1023 1023 pathname, filename = os.path.split(fullpath)
1024 1024 self.year = int(filename[1:5])
1025 1025 self.doy = int(filename[5:8])
1026 self.set = int(filename[8:11]) - 1
1026 self.set = int(filename[8:11]) - 1
1027 1027 else:
1028 1028 log.log("Searching files in {}".format(self.path), self.name)
1029 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1029 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1030 1030 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1031
1031
1032 1032 self.setNextFile()
1033 1033
1034 1034 return
1035 1035
1036 1036 def readFirstHeader(self):
1037 1037 '''Read metadata and data'''
1038 1038
1039 self.__readMetadata()
1039 self.__readMetadata()
1040 1040 self.__readData()
1041 1041 self.__setBlockList()
1042 1042 self.blockIndex = 0
1043
1043
1044 1044 return
1045 1045
1046 1046 def __setBlockList(self):
1047 1047 '''
1048 1048 Selects the data within the times defined
1049 1049
1050 1050 self.fp
1051 1051 self.startTime
1052 1052 self.endTime
1053 1053 self.blockList
1054 1054 self.blocksPerFile
1055 1055
1056 1056 '''
1057 1057
1058 1058 startTime = self.startTime
1059 1059 endTime = self.endTime
1060 1060
1061 1061 index = self.listDataname.index('utctime')
1062 1062 thisUtcTime = self.listData[index]
1063 1063 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
1064 1064
1065 1065 if self.timezone == 'lt':
1066 1066 thisUtcTime -= 5*3600
1067 1067
1068 1068 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
1069 1069
1070 1070 thisDate = thisDatetime.date()
1071 1071 thisTime = thisDatetime.time()
1072 1072
1073 1073 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
1074 1074 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
1075 1075
1076 1076 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
1077 1077
1078 1078 self.blockList = ind
1079 1079 self.blocksPerFile = len(ind)
1080 1080 return
1081 1081
1082 1082 def __readMetadata(self):
1083 1083 '''
1084 1084 Reads Metadata
1085 1085 '''
1086 1086
1087 1087 listMetaname = []
1088 1088 listMetadata = []
1089 1089 if 'Metadata' in self.fp:
1090 1090 gp = self.fp['Metadata']
1091 1091 for item in list(gp.items()):
1092 1092 name = item[0]
1093 1093
1094 1094 if name=='variables':
1095 1095 table = gp[name][:]
1096 1096 listShapes = {}
1097 1097 for shapes in table:
1098 1098 listShapes[shapes[0].decode()] = numpy.array([shapes[1]])
1099 1099 else:
1100 1100 data = gp[name].value
1101 1101 listMetaname.append(name)
1102 listMetadata.append(data)
1102 listMetadata.append(data)
1103 1103 elif self.metadata:
1104 1104 metadata = json.loads(self.metadata)
1105 1105 listShapes = {}
1106 1106 for tup in metadata:
1107 1107 name, values, dim = tup
1108 1108 if dim == -1:
1109 1109 listMetaname.append(name)
1110 1110 listMetadata.append(self.fp[values].value)
1111 1111 else:
1112 1112 listShapes[name] = numpy.array([dim])
1113 1113 else:
1114 1114 raise IOError('Missing Metadata group in file or metadata info')
1115 1115
1116 1116 self.listShapes = listShapes
1117 1117 self.listMetaname = listMetaname
1118 self.listMeta = listMetadata
1118 self.listMeta = listMetadata
1119 1119
1120 1120 return
1121 1121
1122 1122 def __readData(self):
1123 1123
1124 1124 listdataname = []
1125 1125 listdata = []
1126
1126
1127 1127 if 'Data' in self.fp:
1128 1128 grp = self.fp['Data']
1129 1129 for item in list(grp.items()):
1130 1130 name = item[0]
1131 1131 listdataname.append(name)
1132 1132 dim = self.listShapes[name][0]
1133 1133 if dim == 0:
1134 1134 array = grp[name].value
1135 1135 else:
1136 1136 array = []
1137 1137 for i in range(dim):
1138 1138 array.append(grp[name]['table{:02d}'.format(i)].value)
1139 1139 array = numpy.array(array)
1140
1140
1141 1141 listdata.append(array)
1142 1142 elif self.metadata:
1143 1143 metadata = json.loads(self.metadata)
1144 1144 for tup in metadata:
1145 1145 name, values, dim = tup
1146 1146 listdataname.append(name)
1147 1147 if dim == -1:
1148 1148 continue
1149 1149 elif dim == 0:
1150 1150 array = self.fp[values].value
1151 1151 else:
1152 1152 array = []
1153 1153 for var in values:
1154 1154 array.append(self.fp[var].value)
1155 1155 array = numpy.array(array)
1156 1156 listdata.append(array)
1157 1157 else:
1158 1158 raise IOError('Missing Data group in file or metadata info')
1159 1159
1160 1160 self.listDataname = listdataname
1161 1161 self.listData = listdata
1162 1162 return
1163
1163
1164 1164 def getData(self):
1165 1165
1166 1166 for i in range(len(self.listMeta)):
1167 1167 setattr(self.dataOut, self.listMetaname[i], self.listMeta[i])
1168 1168
1169 1169 for j in range(len(self.listData)):
1170 1170 dim = self.listShapes[self.listDataname[j]][0]
1171 1171 if dim == 0:
1172 1172 setattr(self.dataOut, self.listDataname[j], self.listData[j][self.blockIndex])
1173 1173 else:
1174 1174 setattr(self.dataOut, self.listDataname[j], self.listData[j][:,self.blockIndex])
1175 1175
1176 1176 self.dataOut.paramInterval = self.interval
1177 1177 self.dataOut.flagNoData = False
1178 1178 self.blockIndex += 1
1179 1179
1180 1180 return
1181 1181
1182 1182 def run(self, **kwargs):
1183 1183
1184 1184 if not(self.isConfig):
1185 1185 self.setup(**kwargs)
1186 1186 self.isConfig = True
1187 1187
1188 1188 if self.blockIndex == self.blocksPerFile:
1189 1189 self.setNextFile()
1190 1190
1191 1191 self.getData()
1192 1192
1193 1193 return
1194 1194
1195 1195 @MPDecorator
1196 1196 class ParameterWriter(Operation):
1197 1197 '''
1198 1198 HDF5 Writer, stores parameters data in HDF5 format files
1199 1199
1200 1200 path: path where the files will be stored
1201 1201 blocksPerFile: number of blocks that will be saved in per HDF5 format file
1202 1202 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
1203 1203 metadataList: list of attributes that will be stored as metadata
1204 1204 dataList: list of attributes that will be stores as data
1205 1205 '''
1206 1206
1207 1207
1208 1208 ext = ".hdf5"
1209 1209 optchar = "D"
1210 1210 metaoptchar = "M"
1211 1211 metaFile = None
1212 1212 filename = None
1213 1213 path = None
1214 1214 setFile = None
1215 1215 fp = None
1216 1216 grp = None
1217 1217 ds = None
1218 1218 firsttime = True
1219 1219 #Configurations
1220 1220 blocksPerFile = None
1221 1221 blockIndex = None
1222 1222 dataOut = None
1223 1223 #Data Arrays
1224 1224 dataList = None
1225 1225 metadataList = None
1226 1226 dsList = None #List of dictionaries with dataset properties
1227 1227 tableDim = None
1228 1228 dtype = [('name', 'S20'),('nDim', 'i')]
1229 1229 currentDay = None
1230 1230 lastTime = None
1231 1231
1232 1232 def __init__(self):
1233
1233
1234 1234 Operation.__init__(self)
1235 1235 return
1236 1236
1237 1237 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None):
1238 1238 self.path = path
1239 1239 self.blocksPerFile = blocksPerFile
1240 1240 self.metadataList = metadataList
1241 1241 self.dataList = dataList
1242 1242 self.setType = setType
1243 1243
1244 1244 tableList = []
1245 1245 dsList = []
1246 1246
1247 1247 for i in range(len(self.dataList)):
1248 1248 dsDict = {}
1249 1249 dataAux = getattr(self.dataOut, self.dataList[i])
1250 1250 dsDict['variable'] = self.dataList[i]
1251 1251
1252 1252 if dataAux is None:
1253 1253 continue
1254 1254 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
1255 1255 dsDict['nDim'] = 0
1256 1256 else:
1257 1257 dsDict['nDim'] = len(dataAux.shape)
1258 1258 dsDict['shape'] = dataAux.shape
1259 1259 dsDict['dsNumber'] = dataAux.shape[0]
1260
1260
1261 1261 dsList.append(dsDict)
1262 1262 tableList.append((self.dataList[i], dsDict['nDim']))
1263 1263
1264 1264 self.dsList = dsList
1265 1265 self.tableDim = numpy.array(tableList, dtype=self.dtype)
1266 1266 self.currentDay = self.dataOut.datatime.date()
1267 1267
1268 1268 def timeFlag(self):
1269 1269 currentTime = self.dataOut.utctime
1270 1270 timeTuple = time.localtime(currentTime)
1271 1271 dataDay = timeTuple.tm_yday
1272 1272
1273 1273 if self.lastTime is None:
1274 1274 self.lastTime = currentTime
1275 1275 self.currentDay = dataDay
1276 1276 return False
1277
1277
1278 1278 timeDiff = currentTime - self.lastTime
1279 1279
1280 1280 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
1281 1281 if dataDay != self.currentDay:
1282 1282 self.currentDay = dataDay
1283 1283 return True
1284 1284 elif timeDiff > 3*60*60:
1285 1285 self.lastTime = currentTime
1286 1286 return True
1287 1287 else:
1288 1288 self.lastTime = currentTime
1289 1289 return False
1290 1290
1291 1291 def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, setType=None):
1292 1292
1293 1293 self.dataOut = dataOut
1294 1294 if not(self.isConfig):
1295 self.setup(path=path, blocksPerFile=blocksPerFile,
1295 self.setup(path=path, blocksPerFile=blocksPerFile,
1296 1296 metadataList=metadataList, dataList=dataList,
1297 1297 setType=setType)
1298 1298
1299 1299 self.isConfig = True
1300 1300 self.setNextFile()
1301 1301
1302 1302 self.putData()
1303 1303 return
1304
1304
1305 1305 def setNextFile(self):
1306
1306
1307 1307 ext = self.ext
1308 1308 path = self.path
1309 1309 setFile = self.setFile
1310 1310
1311 1311 timeTuple = time.localtime(self.dataOut.utctime)
1312 1312 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1313 1313 fullpath = os.path.join(path, subfolder)
1314 1314
1315 1315 if os.path.exists(fullpath):
1316 1316 filesList = os.listdir(fullpath)
1317 1317 filesList = [k for k in filesList if k.startswith(self.optchar)]
1318 1318 if len( filesList ) > 0:
1319 1319 filesList = sorted(filesList, key=str.lower)
1320 1320 filen = filesList[-1]
1321 1321 # el filename debera tener el siguiente formato
1322 1322 # 0 1234 567 89A BCDE (hex)
1323 1323 # x YYYY DDD SSS .ext
1324 1324 if isNumber(filen[8:11]):
1325 1325 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
1326 1326 else:
1327 1327 setFile = -1
1328 1328 else:
1329 1329 setFile = -1 #inicializo mi contador de seteo
1330 1330 else:
1331 1331 os.makedirs(fullpath)
1332 1332 setFile = -1 #inicializo mi contador de seteo
1333 1333
1334 1334 if self.setType is None:
1335 1335 setFile += 1
1336 1336 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
1337 1337 timeTuple.tm_year,
1338 1338 timeTuple.tm_yday,
1339 1339 setFile,
1340 1340 ext )
1341 1341 else:
1342 1342 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
1343 1343 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
1344 1344 timeTuple.tm_year,
1345 1345 timeTuple.tm_yday,
1346 1346 setFile,
1347 1347 ext )
1348 1348
1349 1349 self.filename = os.path.join( path, subfolder, file )
1350 1350
1351 1351 #Setting HDF5 File
1352 1352 self.fp = h5py.File(self.filename, 'w')
1353 1353 #write metadata
1354 1354 self.writeMetadata(self.fp)
1355 1355 #Write data
1356 1356 self.writeData(self.fp)
1357 1357
1358 1358 def writeMetadata(self, fp):
1359 1359
1360 1360 grp = fp.create_group("Metadata")
1361 1361 grp.create_dataset('variables', data=self.tableDim, dtype=self.dtype)
1362 1362
1363 1363 for i in range(len(self.metadataList)):
1364 1364 if not hasattr(self.dataOut, self.metadataList[i]):
1365 1365 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
1366 1366 continue
1367 1367 value = getattr(self.dataOut, self.metadataList[i])
1368 1368 grp.create_dataset(self.metadataList[i], data=value)
1369 1369 return
1370 1370
1371 1371 def writeData(self, fp):
1372
1372
1373 1373 grp = fp.create_group("Data")
1374 1374 dtsets = []
1375 1375 data = []
1376
1376
1377 1377 for dsInfo in self.dsList:
1378 1378 if dsInfo['nDim'] == 0:
1379 1379 ds = grp.create_dataset(
1380 dsInfo['variable'],
1380 dsInfo['variable'],
1381 1381 (self.blocksPerFile, ),
1382 chunks=True,
1382 chunks=True,
1383 1383 dtype=numpy.float64)
1384 1384 dtsets.append(ds)
1385 1385 data.append((dsInfo['variable'], -1))
1386 1386 else:
1387 1387 sgrp = grp.create_group(dsInfo['variable'])
1388 1388 for i in range(dsInfo['dsNumber']):
1389 1389 ds = sgrp.create_dataset(
1390 'table{:02d}'.format(i),
1390 'table{:02d}'.format(i),
1391 1391 (self.blocksPerFile, ) + dsInfo['shape'][1:],
1392 1392 chunks=True)
1393 1393 dtsets.append(ds)
1394 1394 data.append((dsInfo['variable'], i))
1395 1395 fp.flush()
1396 1396
1397 1397 log.log('Creating file: {}'.format(fp.filename), self.name)
1398
1398
1399 1399 self.ds = dtsets
1400 1400 self.data = data
1401 1401 self.firsttime = True
1402 1402 self.blockIndex = 0
1403 1403 return
1404 1404
1405 1405 def putData(self):
1406 1406
1407 1407 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
1408 1408 self.closeFile()
1409 1409 self.setNextFile()
1410 1410
1411 1411 for i, ds in enumerate(self.ds):
1412 1412 attr, ch = self.data[i]
1413 1413 if ch == -1:
1414 1414 ds[self.blockIndex] = getattr(self.dataOut, attr)
1415 1415 else:
1416 1416 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
1417 1417
1418 1418 self.fp.flush()
1419 1419 self.blockIndex += 1
1420 1420 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
1421 1421
1422 1422 return
1423 1423
1424 1424 def closeFile(self):
1425 1425
1426 1426 if self.blockIndex != self.blocksPerFile:
1427 1427 for ds in self.ds:
1428 1428 ds.resize(self.blockIndex, axis=0)
1429 1429
1430 1430 self.fp.flush()
1431 1431 self.fp.close()
1432 1432
1433 1433 def close(self):
1434 1434
1435 1435 self.closeFile()
@@ -1,429 +1,426
1 1 '''
2 2 Updated for multiprocessing
3 3 Author : Sergio Cortez
4 4 Jan 2018
5 5 Abstract:
6 6 Base class for processing units and operations. A decorator provides multiprocessing features and interconnect the processes created.
7 The argument (kwargs) sent from the controller is parsed and filtered via the decorator for each processing unit or operation instantiated.
8 The decorator handle also the methods inside the processing unit to be called from the main script (not as operations) (OPERATION -> type ='self').
7 The argument (kwargs) sent from the controller is parsed and filtered via the decorator for each processing unit or operation instantiated.
8 The decorator handle also the methods inside the processing unit to be called from the main script (not as operations) (OPERATION -> type ='self').
9 9
10 10 Based on:
11 11 $Author: murco $
12 12 $Id: jroproc_base.py 1 2012-11-12 18:56:07Z murco $
13 13 '''
14 14
15 15 import os
16 16 import sys
17 17 import inspect
18 18 import zmq
19 19 import time
20 20 import pickle
21 21 import traceback
22 22 try:
23 23 from queue import Queue
24 24 except:
25 25 from Queue import Queue
26 26 from threading import Thread
27 27 from multiprocessing import Process
28 28
29 29 from schainpy.utils import log
30 30
31 31
32 32 class ProcessingUnit(object):
33 33
34 34 """
35 35 Update - Jan 2018 - MULTIPROCESSING
36 All the "call" methods present in the previous base were removed.
36 All the "call" methods present in the previous base were removed.
37 37 The majority of operations are independant processes, thus
38 the decorator is in charge of communicate the operation processes
38 the decorator is in charge of communicate the operation processes
39 39 with the proccessing unit via IPC.
40 40
41 41 The constructor does not receive any argument. The remaining methods
42 42 are related with the operations to execute.
43
43
44 44
45 45 """
46 46 proc_type = 'processing'
47 47 __attrs__ = []
48 48
49 49 def __init__(self):
50 50
51 51 self.dataIn = None
52 52 self.dataOut = None
53 53 self.isConfig = False
54 54 self.operations = []
55 55 self.plots = []
56 56
57 57 def getAllowedArgs(self):
58 58 if hasattr(self, '__attrs__'):
59 59 return self.__attrs__
60 60 else:
61 61 return inspect.getargspec(self.run).args
62 62
63 63 def addOperation(self, conf, operation):
64 64 """
65 This method is used in the controller, and update the dictionary containing the operations to execute. The dict
65 This method is used in the controller, and update the dictionary containing the operations to execute. The dict
66 66 posses the id of the operation process (IPC purposes)
67 67
68 68 Agrega un objeto del tipo "Operation" (opObj) a la lista de objetos "self.objectList" y retorna el
69 69 identificador asociado a este objeto.
70 70
71 71 Input:
72 72
73 73 object : objeto de la clase "Operation"
74 74
75 75 Return:
76 76
77 77 objId : identificador del objeto, necesario para comunicar con master(procUnit)
78 78 """
79 79
80 80 self.operations.append(
81 81 (operation, conf.type, conf.id, conf.getKwargs()))
82
82
83 83 if 'plot' in self.name.lower():
84 84 self.plots.append(operation.CODE)
85 85
86 86 def getOperationObj(self, objId):
87 87
88 88 if objId not in list(self.operations.keys()):
89 89 return None
90 90
91 91 return self.operations[objId]
92 92
93 93 def operation(self, **kwargs):
94 94 """
95 95 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
96 96 atributos del objeto dataOut
97 97
98 98 Input:
99 99
100 100 **kwargs : Diccionario de argumentos de la funcion a ejecutar
101 101 """
102 102
103 103 raise NotImplementedError
104 104
105 105 def setup(self):
106 106
107 107 raise NotImplementedError
108 108
109 109 def run(self):
110 110
111 111 raise NotImplementedError
112 112
113 113 def close(self):
114 114
115 115 return
116 116
117 117
118 118 class Operation(object):
119 119
120 120 """
121 121 Update - Jan 2018 - MULTIPROCESSING
122 122
123 123 Most of the methods remained the same. The decorator parse the arguments and executed the run() method for each process.
124 124 The constructor doe snot receive any argument, neither the baseclass.
125 125
126 126
127 127 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
128 128 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
129 129 acumulacion dentro de esta clase
130 130
131 131 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
132 132
133 133 """
134 134 proc_type = 'operation'
135 135 __attrs__ = []
136 136
137 137 def __init__(self):
138 138
139 139 self.id = None
140 140 self.isConfig = False
141 141
142 142 if not hasattr(self, 'name'):
143 143 self.name = self.__class__.__name__
144 144
145 145 def getAllowedArgs(self):
146 146 if hasattr(self, '__attrs__'):
147 147 return self.__attrs__
148 148 else:
149 149 return inspect.getargspec(self.run).args
150 150
151 151 def setup(self):
152 152
153 153 self.isConfig = True
154 154
155 155 raise NotImplementedError
156 156
157 157 def run(self, dataIn, **kwargs):
158 158 """
159 159 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
160 160 atributos del objeto dataIn.
161 161
162 162 Input:
163 163
164 164 dataIn : objeto del tipo JROData
165 165
166 166 Return:
167 167
168 168 None
169 169
170 170 Affected:
171 171 __buffer : buffer de recepcion de datos.
172 172
173 173 """
174 174 if not self.isConfig:
175 175 self.setup(**kwargs)
176 176
177 177 raise NotImplementedError
178 178
179 179 def close(self):
180 180
181 181 return
182 182
183 183 class InputQueue(Thread):
184
184
185 185 '''
186 186 Class to hold input data for Proccessing Units and external Operations,
187 187 '''
188 188
189 189 def __init__(self, project_id, inputId, lock=None):
190 190
191 191 Thread.__init__(self)
192 192 self.queue = Queue()
193 193 self.project_id = project_id
194 194 self.inputId = inputId
195 195 self.lock = lock
196 196 self.islocked = False
197 197 self.size = 0
198 198
199 199 def run(self):
200 200
201 201 c = zmq.Context()
202 202 self.receiver = c.socket(zmq.SUB)
203 203 self.receiver.connect(
204 204 'ipc:///tmp/schain/{}_pub'.format(self.project_id))
205 205 self.receiver.setsockopt(zmq.SUBSCRIBE, self.inputId.encode())
206 206
207 207 while True:
208 208 obj = self.receiver.recv_multipart()[1]
209 209 self.size += sys.getsizeof(obj)
210 210 self.queue.put(obj)
211 211
212 212 def get(self):
213 213
214 214 if not self.islocked and self.size/1000000 > 512:
215 self.lock.n.value += 1
215 self.lock.n.value += 1
216 216 self.islocked = True
217 217 self.lock.clear()
218 218 elif self.islocked and self.size/1000000 <= 512:
219 219 self.islocked = False
220 220 self.lock.n.value -= 1
221 221 if self.lock.n.value == 0:
222 self.lock.set()
223
222 self.lock.set()
223
224 224 obj = self.queue.get()
225 225 self.size -= sys.getsizeof(obj)
226 226 return pickle.loads(obj)
227 227
228
228
229 229 def MPDecorator(BaseClass):
230 230 """
231 231 Multiprocessing class decorator
232 232
233 233 This function add multiprocessing features to a BaseClass. Also, it handle
234 the communication beetween processes (readers, procUnits and operations).
234 the communication beetween processes (readers, procUnits and operations).
235 235 """
236 236
237 237 class MPClass(BaseClass, Process):
238 238
239 239 def __init__(self, *args, **kwargs):
240 240 super(MPClass, self).__init__()
241 241 Process.__init__(self)
242 242 self.operationKwargs = {}
243 243 self.args = args
244 244 self.kwargs = kwargs
245 245 self.sender = None
246 246 self.receiver = None
247 247 self.i = 0
248 248 self.t = time.time()
249 249 self.name = BaseClass.__name__
250 250 self.__doc__ = BaseClass.__doc__
251
251
252 252 if 'plot' in self.name.lower() and not self.name.endswith('_'):
253 253 self.name = '{}{}'.format(self.CODE.upper(), 'Plot')
254
255 self.start_time = time.time()
254
255 self.start_time = time.time()
256 256 self.id = args[0]
257 257 self.inputId = args[1]
258 258 self.project_id = args[2]
259 259 self.err_queue = args[3]
260 260 self.lock = args[4]
261 261 self.typeProc = args[5]
262 262 self.err_queue.put('#_start_#')
263 263 if self.inputId is not None:
264 264 self.queue = InputQueue(self.project_id, self.inputId, self.lock)
265 265
266 266 def subscribe(self):
267 267 '''
268 268 Start the zmq socket receiver and subcribe to input ID.
269 269 '''
270 270
271 271 self.queue.start()
272
272
273 273 def listen(self):
274 274 '''
275 275 This function waits for objects
276 276 '''
277
278 return self.queue.get()
277
278 return self.queue.get()
279 279
280 280 def set_publisher(self):
281 281 '''
282 This function create a zmq socket for publishing objects.
282 This function create a zmq socket for publishing objects.
283 283 '''
284 284
285 285 time.sleep(0.5)
286
286
287 287 c = zmq.Context()
288 288 self.sender = c.socket(zmq.PUB)
289 289 self.sender.connect(
290 290 'ipc:///tmp/schain/{}_sub'.format(self.project_id))
291 291
292 292 def publish(self, data, id):
293 293 '''
294 294 This function publish an object, to an specific topic.
295 295 It blocks publishing when receiver queue is full to avoid data loss
296 '''
297
296 '''
297
298 298 if self.inputId is None:
299 299 self.lock.wait()
300 300 self.sender.send_multipart([str(id).encode(), pickle.dumps(data)])
301
302 301 def runReader(self):
303 302 '''
304 303 Run fuction for read units
305 304 '''
306 305 while True:
307 306
308 307 try:
309 308 BaseClass.run(self, **self.kwargs)
310 309 except:
311 err = traceback.format_exc()
310 err = traceback.format_exc()
312 311 if 'No more files' in err:
313 312 log.warning('No more files to read', self.name)
314 313 else:
315 314 self.err_queue.put('{}|{}'.format(self.name, err))
316 self.dataOut.error = True
317
315 self.dataOut.error = True
316
318 317 for op, optype, opId, kwargs in self.operations:
319 318 if optype == 'self' and not self.dataOut.flagNoData:
320 319 op(**kwargs)
321 320 elif optype == 'other' and not self.dataOut.flagNoData:
322 321 self.dataOut = op.run(self.dataOut, **self.kwargs)
323 322 elif optype == 'external':
324 323 self.publish(self.dataOut, opId)
325 324
326 325 if self.dataOut.flagNoData and not self.dataOut.error:
327 326 continue
328 327
329 328 self.publish(self.dataOut, self.id)
330
331 if self.dataOut.error:
329 if self.dataOut.error:
332 330 break
333 331
334 332 time.sleep(0.5)
335 333
336 334 def runProc(self):
337 335 '''
338 336 Run function for proccessing units
339 337 '''
340 338
341 339 while True:
342 self.dataIn = self.listen()
340 self.dataIn = self.listen()
343 341
344 342 if self.dataIn.flagNoData and self.dataIn.error is None:
345 343 continue
346 344 elif not self.dataIn.error:
347 345 try:
348 346 BaseClass.run(self, **self.kwargs)
349 347 except:
350 348 self.err_queue.put('{}|{}'.format(self.name, traceback.format_exc()))
351 349 self.dataOut.error = True
352 350 elif self.dataIn.error:
353 351 self.dataOut.error = self.dataIn.error
354 352 self.dataOut.flagNoData = True
355
353
356 354 for op, optype, opId, kwargs in self.operations:
357 355 if optype == 'self' and not self.dataOut.flagNoData:
358 356 op(**kwargs)
359 357 elif optype == 'other' and not self.dataOut.flagNoData:
360 358 self.dataOut = op.run(self.dataOut, **kwargs)
361 elif optype == 'external' and not self.dataOut.flagNoData:
359 elif optype == 'external' and not self.dataOut.flagNoData:
362 360 self.publish(self.dataOut, opId)
363
361
364 362 self.publish(self.dataOut, self.id)
365 363 for op, optype, opId, kwargs in self.operations:
366 if optype == 'external' and self.dataOut.error:
364 if optype == 'external' and self.dataOut.error:
367 365 self.publish(self.dataOut, opId)
368
366
369 367 if self.dataOut.error:
370 368 break
371
369
372 370 time.sleep(0.5)
373 371
374 372 def runOp(self):
375 373 '''
376 374 Run function for external operations (this operations just receive data
377 375 ex: plots, writers, publishers)
378 376 '''
379
377
380 378 while True:
381 379
382 380 dataOut = self.listen()
383 381
384 382 if not dataOut.error:
385 383 try:
386 384 BaseClass.run(self, dataOut, **self.kwargs)
387 385 except:
388 386 self.err_queue.put('{}|{}'.format(self.name, traceback.format_exc()))
389 387 dataOut.error = True
390 388 else:
391 break
389 break
392 390
393 391 def run(self):
394 392 if self.typeProc is "ProcUnit":
395 393
396 394 if self.inputId is not None:
397 395 self.subscribe()
398
396
399 397 self.set_publisher()
400 398
401 399 if 'Reader' not in BaseClass.__name__:
402 400 self.runProc()
403 401 else:
404 402 self.runReader()
405
406 403 elif self.typeProc is "Operation":
407 404
408 405 self.subscribe()
409 406 self.runOp()
410 407
411 408 else:
412 409 raise ValueError("Unknown type")
413 410
414 411 self.close()
415 412
416 413 def close(self):
417 414
418 415 BaseClass.close(self)
419 416 self.err_queue.put('#_end_#')
420 417
421 418 if self.sender:
422 419 self.sender.close()
423 420
424 421 if self.receiver:
425 422 self.receiver.close()
426 423
427 424 log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
428 425
429 426 return MPClass
This diff has been collapsed as it changes many lines, (2152 lines changed) Show them Hide them
@@ -1,3857 +1,3857
1 1 import numpy
2 2 import math
3 3 from scipy import optimize, interpolate, signal, stats, ndimage
4 4 import scipy
5 5 import re
6 6 import datetime
7 7 import copy
8 8 import sys
9 9 import importlib
10 10 import itertools
11 from multiprocessing import Pool, TimeoutError
11 from multiprocessing import Pool, TimeoutError
12 12 from multiprocessing.pool import ThreadPool
13 13 import time
14 14
15 15 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
16 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
16 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
17 17 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
18 18 from scipy import asarray as ar,exp
19 19 from scipy.optimize import curve_fit
20 20 from schainpy.utils import log
21 21 import warnings
22 22 from numpy import NaN
23 23 from scipy.optimize.optimize import OptimizeWarning
24 24 warnings.filterwarnings('ignore')
25 25
26 26
27 27 SPEED_OF_LIGHT = 299792458
28 28
29 29
30 30 '''solving pickling issue'''
31 31
32 32 def _pickle_method(method):
33 33 func_name = method.__func__.__name__
34 34 obj = method.__self__
35 35 cls = method.__self__.__class__
36 36 return _unpickle_method, (func_name, obj, cls)
37 37
38 38 def _unpickle_method(func_name, obj, cls):
39 39 for cls in cls.mro():
40 40 try:
41 41 func = cls.__dict__[func_name]
42 42 except KeyError:
43 43 pass
44 44 else:
45 45 break
46 46 return func.__get__(obj, cls)
47 47
48 48 @MPDecorator
49 49 class ParametersProc(ProcessingUnit):
50
50
51 51 METHODS = {}
52 52 nSeconds = None
53 53
54 54 def __init__(self):
55 55 ProcessingUnit.__init__(self)
56
56
57 57 # self.objectDict = {}
58 58 self.buffer = None
59 59 self.firstdatatime = None
60 60 self.profIndex = 0
61 61 self.dataOut = Parameters()
62 62 self.setupReq = False #Agregar a todas las unidades de proc
63 63
64 64 def __updateObjFromInput(self):
65
65
66 66 self.dataOut.inputUnit = self.dataIn.type
67
67
68 68 self.dataOut.timeZone = self.dataIn.timeZone
69 69 self.dataOut.dstFlag = self.dataIn.dstFlag
70 70 self.dataOut.errorCount = self.dataIn.errorCount
71 71 self.dataOut.useLocalTime = self.dataIn.useLocalTime
72
72
73 73 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
74 74 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
75 75 self.dataOut.channelList = self.dataIn.channelList
76 76 self.dataOut.heightList = self.dataIn.heightList
77 77 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
78 78 # self.dataOut.nHeights = self.dataIn.nHeights
79 79 # self.dataOut.nChannels = self.dataIn.nChannels
80 80 self.dataOut.nBaud = self.dataIn.nBaud
81 81 self.dataOut.nCode = self.dataIn.nCode
82 82 self.dataOut.code = self.dataIn.code
83 83 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
84 84 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
85 85 # self.dataOut.utctime = self.firstdatatime
86 86 self.dataOut.utctime = self.dataIn.utctime
87 87 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
88 88 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
89 89 self.dataOut.nCohInt = self.dataIn.nCohInt
90 90 # self.dataOut.nIncohInt = 1
91 91 self.dataOut.ippSeconds = self.dataIn.ippSeconds
92 92 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
93 93 self.dataOut.timeInterval1 = self.dataIn.timeInterval
94 self.dataOut.heightList = self.dataIn.getHeiRange()
94 self.dataOut.heightList = self.dataIn.getHeiRange()
95 95 self.dataOut.frequency = self.dataIn.frequency
96 96 # self.dataOut.noise = self.dataIn.noise
97
97
98 98 def run(self):
99 99
100 100
101 101
102 102 #---------------------- Voltage Data ---------------------------
103
103
104 104 if self.dataIn.type == "Voltage":
105 105
106 106 self.__updateObjFromInput()
107 107 self.dataOut.data_pre = self.dataIn.data.copy()
108 108 self.dataOut.flagNoData = False
109 109 self.dataOut.utctimeInit = self.dataIn.utctime
110 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
110 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
111 111 return
112
112
113 113 #---------------------- Spectra Data ---------------------------
114
114
115 115 if self.dataIn.type == "Spectra":
116 116
117 117 self.dataOut.data_pre = (self.dataIn.data_spc, self.dataIn.data_cspc)
118 118 self.dataOut.data_spc = self.dataIn.data_spc
119 119 self.dataOut.data_cspc = self.dataIn.data_cspc
120 120 self.dataOut.nProfiles = self.dataIn.nProfiles
121 121 self.dataOut.nIncohInt = self.dataIn.nIncohInt
122 122 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
123 123 self.dataOut.ippFactor = self.dataIn.ippFactor
124 124 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
125 125 self.dataOut.spc_noise = self.dataIn.getNoise()
126 126 self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
127 127 # self.dataOut.normFactor = self.dataIn.normFactor
128 self.dataOut.pairsList = self.dataIn.pairsList
128 self.dataOut.pairsList = self.dataIn.pairsList
129 129 self.dataOut.groupList = self.dataIn.pairsList
130 self.dataOut.flagNoData = False
131
130 self.dataOut.flagNoData = False
131
132 132 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
133 133 self.dataOut.ChanDist = self.dataIn.ChanDist
134 else: self.dataOut.ChanDist = None
135
134 else: self.dataOut.ChanDist = None
135
136 136 #if hasattr(self.dataIn, 'VelRange'): #Velocities range
137 137 # self.dataOut.VelRange = self.dataIn.VelRange
138 138 #else: self.dataOut.VelRange = None
139
139
140 140 if hasattr(self.dataIn, 'RadarConst'): #Radar Constant
141 141 self.dataOut.RadarConst = self.dataIn.RadarConst
142
142
143 143 if hasattr(self.dataIn, 'NPW'): #NPW
144 144 self.dataOut.NPW = self.dataIn.NPW
145
145
146 146 if hasattr(self.dataIn, 'COFA'): #COFA
147 147 self.dataOut.COFA = self.dataIn.COFA
148
149
150
148
149
150
151 151 #---------------------- Correlation Data ---------------------------
152
152
153 153 if self.dataIn.type == "Correlation":
154 154 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
155
155
156 156 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
157 157 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
158 158 self.dataOut.groupList = (acf_pairs, ccf_pairs)
159
159
160 160 self.dataOut.abscissaList = self.dataIn.lagRange
161 161 self.dataOut.noise = self.dataIn.noise
162 162 self.dataOut.data_SNR = self.dataIn.SNR
163 163 self.dataOut.flagNoData = False
164 164 self.dataOut.nAvg = self.dataIn.nAvg
165
165
166 166 #---------------------- Parameters Data ---------------------------
167
167
168 168 if self.dataIn.type == "Parameters":
169 169 self.dataOut.copy(self.dataIn)
170 170 self.dataOut.flagNoData = False
171
171
172 172 return True
173
173
174 174 self.__updateObjFromInput()
175 175 self.dataOut.utctimeInit = self.dataIn.utctime
176 176 self.dataOut.paramInterval = self.dataIn.timeInterval
177
177
178
178 179 return
179 180
180 181
181 182 def target(tups):
182
183
183 184 obj, args = tups
184
185
185 186 return obj.FitGau(args)
186
187
187
188
188 189 class SpectralFilters(Operation):
189
190
190 191 '''This class allows the Rainfall / Wind Selection for CLAIRE RADAR
191
192
192 193 LimitR : It is the limit in m/s of Rainfall
193 194 LimitW : It is the limit in m/s for Winds
194
195
195 196 Input:
196
197
197 198 self.dataOut.data_pre : SPC and CSPC
198 199 self.dataOut.spc_range : To select wind and rainfall velocities
199
200
200 201 Affected:
201
202
202 203 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
203 self.dataOut.spcparam_range : Used in SpcParamPlot
204 self.dataOut.spcparam_range : Used in SpcParamPlot
204 205 self.dataOut.SPCparam : Used in PrecipitationProc
205
206
206
207
207 208 '''
208
209
209 210 def __init__(self):
210 211 Operation.__init__(self)
211 212 self.i=0
212
213 def run(self, dataOut, PositiveLimit=1.5, NegativeLimit=2.5):
214
215
216 #Limite de vientos
213
214 def run(self, dataOut, PositiveLimit=1.5, NegativeLimit=2.5):
215
216
217 #Limite de vientos
217 218 LimitR = PositiveLimit
218 219 LimitN = NegativeLimit
219
220
220 221 self.spc = dataOut.data_pre[0].copy()
221 222 self.cspc = dataOut.data_pre[1].copy()
222
223
223 224 self.Num_Hei = self.spc.shape[2]
224 225 self.Num_Bin = self.spc.shape[1]
225 226 self.Num_Chn = self.spc.shape[0]
226
227
227 228 VelRange = dataOut.spc_range[2]
228 229 TimeRange = dataOut.spc_range[1]
229 230 FrecRange = dataOut.spc_range[0]
230
231
231 232 Vmax= 2*numpy.max(dataOut.spc_range[2])
232 233 Tmax= 2*numpy.max(dataOut.spc_range[1])
233 234 Fmax= 2*numpy.max(dataOut.spc_range[0])
234
235
235 236 Breaker1R=VelRange[numpy.abs(VelRange-(-LimitN)).argmin()]
236 237 Breaker1R=numpy.where(VelRange == Breaker1R)
237
238 Delta = self.Num_Bin/2 - Breaker1R[0]
239
240
238
239 Delta = self.Num_Bin/2 - Breaker1R[0]
240
241
241 242 '''Reacomodando SPCrange'''
242 243
243 244 VelRange=numpy.roll(VelRange,-(int(self.Num_Bin/2)) ,axis=0)
244
245
245 246 VelRange[-(int(self.Num_Bin/2)):]+= Vmax
246
247
247 248 FrecRange=numpy.roll(FrecRange,-(int(self.Num_Bin/2)),axis=0)
248
249
249 250 FrecRange[-(int(self.Num_Bin/2)):]+= Fmax
250
251
251 252 TimeRange=numpy.roll(TimeRange,-(int(self.Num_Bin/2)),axis=0)
252
253
253 254 TimeRange[-(int(self.Num_Bin/2)):]+= Tmax
254
255
255 256 ''' ------------------ '''
256
257
257 258 Breaker2R=VelRange[numpy.abs(VelRange-(LimitR)).argmin()]
258 259 Breaker2R=numpy.where(VelRange == Breaker2R)
259
260
260
261
261 262 SPCroll = numpy.roll(self.spc,-(int(self.Num_Bin/2)) ,axis=1)
262
263
263 264 SPCcut = SPCroll.copy()
264 265 for i in range(self.Num_Chn):
265
266
266 267 SPCcut[i,0:int(Breaker2R[0]),:] = dataOut.noise[i]
267 268 SPCcut[i,-int(Delta):,:] = dataOut.noise[i]
268
269
269 270 SPCcut[i]=SPCcut[i]- dataOut.noise[i]
270 271 SPCcut[ numpy.where( SPCcut<0 ) ] = 1e-20
271
272
272 273 SPCroll[i]=SPCroll[i]-dataOut.noise[i]
273 274 SPCroll[ numpy.where( SPCroll<0 ) ] = 1e-20
274
275
275 276 SPC_ch1 = SPCroll
276
277
277 278 SPC_ch2 = SPCcut
278
279
279 280 SPCparam = (SPC_ch1, SPC_ch2, self.spc)
280 dataOut.SPCparam = numpy.asarray(SPCparam)
281
282
281 dataOut.SPCparam = numpy.asarray(SPCparam)
282
283
283 284 dataOut.spcparam_range=numpy.zeros([self.Num_Chn,self.Num_Bin+1])
284
285
285 286 dataOut.spcparam_range[2]=VelRange
286 287 dataOut.spcparam_range[1]=TimeRange
287 288 dataOut.spcparam_range[0]=FrecRange
288 289 return dataOut
289
290
290 291 class GaussianFit(Operation):
291
292
292 293 '''
293 Function that fit of one and two generalized gaussians (gg) based
294 on the PSD shape across an "power band" identified from a cumsum of
294 Function that fit of one and two generalized gaussians (gg) based
295 on the PSD shape across an "power band" identified from a cumsum of
295 296 the measured spectrum - noise.
296
297
297 298 Input:
298 299 self.dataOut.data_pre : SelfSpectra
299
300
300 301 Output:
301 302 self.dataOut.SPCparam : SPC_ch1, SPC_ch2
302
303
303 304 '''
304 305 def __init__(self):
305 306 Operation.__init__(self)
306 307 self.i=0
307
308
308
309
309 310 def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
310 311 """This routine will find a couple of generalized Gaussians to a power spectrum
311 312 input: spc
312 313 output:
313 314 Amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1,noise
314 315 """
315
316
316 317 self.spc = dataOut.data_pre[0].copy()
317 318 self.Num_Hei = self.spc.shape[2]
318 319 self.Num_Bin = self.spc.shape[1]
319 320 self.Num_Chn = self.spc.shape[0]
320 321 Vrange = dataOut.abscissaList
321
322
322 323 GauSPC = numpy.empty([self.Num_Chn,self.Num_Bin,self.Num_Hei])
323 324 SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
324 325 SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
325 326 SPC_ch1[:] = numpy.NaN
326 327 SPC_ch2[:] = numpy.NaN
327 328
328
329
329 330 start_time = time.time()
330
331
331 332 noise_ = dataOut.spc_noise[0].copy()
332
333
334 pool = Pool(processes=self.Num_Chn)
333
334
335 pool = Pool(processes=self.Num_Chn)
335 336 args = [(Vrange, Ch, pnoise, noise_, num_intg, SNRlimit) for Ch in range(self.Num_Chn)]
336 objs = [self for __ in range(self.Num_Chn)]
337 attrs = list(zip(objs, args))
337 objs = [self for __ in range(self.Num_Chn)]
338 attrs = list(zip(objs, args))
338 339 gauSPC = pool.map(target, attrs)
339 340 dataOut.SPCparam = numpy.asarray(SPCparam)
340
341
341 342 ''' Parameters:
342 343 1. Amplitude
343 344 2. Shift
344 345 3. Width
345 346 4. Power
346 347 '''
347
348
348 349 def FitGau(self, X):
349
350
350 351 Vrange, ch, pnoise, noise_, num_intg, SNRlimit = X
351
352
352 353 SPCparam = []
353 354 SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
354 355 SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
355 356 SPC_ch1[:] = 0#numpy.NaN
356 357 SPC_ch2[:] = 0#numpy.NaN
357
358
359
358
359
360
360 361 for ht in range(self.Num_Hei):
361
362
362
363
363 364 spc = numpy.asarray(self.spc)[ch,:,ht]
364
365
365 366 #############################################
366 367 # normalizing spc and noise
367 368 # This part differs from gg1
368 369 spc_norm_max = max(spc)
369 370 #spc = spc / spc_norm_max
370 371 pnoise = pnoise #/ spc_norm_max
371 372 #############################################
372
373
373 374 fatspectra=1.0
374
375
375 376 wnoise = noise_ #/ spc_norm_max
376 377 #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
377 #if wnoise>1.1*pnoise: # to be tested later
378 #if wnoise>1.1*pnoise: # to be tested later
378 379 # wnoise=pnoise
379 noisebl=wnoise*0.9;
380 noisebl=wnoise*0.9;
380 381 noisebh=wnoise*1.1
381 382 spc=spc-wnoise
382
383
383 384 minx=numpy.argmin(spc)
384 #spcs=spc.copy()
385 #spcs=spc.copy()
385 386 spcs=numpy.roll(spc,-minx)
386 387 cum=numpy.cumsum(spcs)
387 388 tot_noise=wnoise * self.Num_Bin #64;
388
389
389 390 snr = sum(spcs)/tot_noise
390 391 snrdB=10.*numpy.log10(snr)
391
392
392 393 if snrdB < SNRlimit :
393 394 snr = numpy.NaN
394 395 SPC_ch1[:,ht] = 0#numpy.NaN
395 396 SPC_ch1[:,ht] = 0#numpy.NaN
396 397 SPCparam = (SPC_ch1,SPC_ch2)
397 398 continue
398
399
399
400
400 401 #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
401 402 # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
402
403 cummax=max(cum);
403
404 cummax=max(cum);
404 405 epsi=0.08*fatspectra # cumsum to narrow down the energy region
405 cumlo=cummax*epsi;
406 cumlo=cummax*epsi;
406 407 cumhi=cummax*(1-epsi)
407 408 powerindex=numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
408
409
409
410
410 411 if len(powerindex) < 1:# case for powerindex 0
411 412 continue
412 413 powerlo=powerindex[0]
413 414 powerhi=powerindex[-1]
414 415 powerwidth=powerhi-powerlo
415
416
416 417 firstpeak=powerlo+powerwidth/10.# first gaussian energy location
417 418 secondpeak=powerhi-powerwidth/10.#second gaussian energy location
418 419 midpeak=(firstpeak+secondpeak)/2.
419 420 firstamp=spcs[int(firstpeak)]
420 421 secondamp=spcs[int(secondpeak)]
421 422 midamp=spcs[int(midpeak)]
422
423
423 424 x=numpy.arange( self.Num_Bin )
424 425 y_data=spc+wnoise
425
426
426 427 ''' single Gaussian '''
427 428 shift0=numpy.mod(midpeak+minx, self.Num_Bin )
428 429 width0=powerwidth/4.#Initialization entire power of spectrum divided by 4
429 430 power0=2.
430 431 amplitude0=midamp
431 432 state0=[shift0,width0,amplitude0,power0,wnoise]
432 433 bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
433 434 lsq1=fmin_l_bfgs_b(self.misfit1,state0,args=(y_data,x,num_intg),bounds=bnds,approx_grad=True)
434
435 chiSq1=lsq1[1];
436 435
437
436 chiSq1=lsq1[1];
437
438
438 439 if fatspectra<1.0 and powerwidth<4:
439 440 choice=0
440 441 Amplitude0=lsq1[0][2]
441 442 shift0=lsq1[0][0]
442 443 width0=lsq1[0][1]
443 444 p0=lsq1[0][3]
444 445 Amplitude1=0.
445 446 shift1=0.
446 447 width1=0.
447 448 p1=0.
448 449 noise=lsq1[0][4]
449 450 #return (numpy.array([shift0,width0,Amplitude0,p0]),
450 451 # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
451
452
452 453 ''' two gaussians '''
453 454 #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
454 shift0=numpy.mod(firstpeak+minx, self.Num_Bin );
455 shift0=numpy.mod(firstpeak+minx, self.Num_Bin );
455 456 shift1=numpy.mod(secondpeak+minx, self.Num_Bin )
456 width0=powerwidth/6.;
457 width0=powerwidth/6.;
457 458 width1=width0
458 power0=2.;
459 power0=2.;
459 460 power1=power0
460 amplitude0=firstamp;
461 amplitude0=firstamp;
461 462 amplitude1=secondamp
462 463 state0=[shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
463 464 #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
464 465 bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
465 466 #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
466
467
467 468 lsq2 = fmin_l_bfgs_b( self.misfit2 , state0 , args=(y_data,x,num_intg) , bounds=bnds , approx_grad=True )
468
469
470 chiSq2=lsq2[1];
471
472
473
469
470
471 chiSq2=lsq2[1];
472
473
474
474 475 oneG=(chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
475
476
476 477 if snrdB>-12: # when SNR is strong pick the peak with least shift (LOS velocity) error
477 478 if oneG:
478 479 choice=0
479 480 else:
480 481 w1=lsq2[0][1]; w2=lsq2[0][5]
481 482 a1=lsq2[0][2]; a2=lsq2[0][6]
482 483 p1=lsq2[0][3]; p2=lsq2[0][7]
483 s1=(2**(1+1./p1))*scipy.special.gamma(1./p1)/p1;
484 s1=(2**(1+1./p1))*scipy.special.gamma(1./p1)/p1;
484 485 s2=(2**(1+1./p2))*scipy.special.gamma(1./p2)/p2;
485 486 gp1=a1*w1*s1; gp2=a2*w2*s2 # power content of each ggaussian with proper p scaling
486
487
487 488 if gp1>gp2:
488 489 if a1>0.7*a2:
489 490 choice=1
490 491 else:
491 492 choice=2
492 493 elif gp2>gp1:
493 494 if a2>0.7*a1:
494 495 choice=2
495 496 else:
496 497 choice=1
497 498 else:
498 499 choice=numpy.argmax([a1,a2])+1
499 500 #else:
500 501 #choice=argmin([std2a,std2b])+1
501
502
502 503 else: # with low SNR go to the most energetic peak
503 504 choice=numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
504
505
506 shift0=lsq2[0][0];
505
506
507 shift0=lsq2[0][0];
507 508 vel0=Vrange[0] + shift0*(Vrange[1]-Vrange[0])
508 shift1=lsq2[0][4];
509 shift1=lsq2[0][4];
509 510 vel1=Vrange[0] + shift1*(Vrange[1]-Vrange[0])
510
511
511 512 max_vel = 1.0
512
513
513 514 #first peak will be 0, second peak will be 1
514 515 if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range
515 516 shift0=lsq2[0][0]
516 517 width0=lsq2[0][1]
517 518 Amplitude0=lsq2[0][2]
518 519 p0=lsq2[0][3]
519
520
520 521 shift1=lsq2[0][4]
521 522 width1=lsq2[0][5]
522 523 Amplitude1=lsq2[0][6]
523 524 p1=lsq2[0][7]
524 noise=lsq2[0][8]
525 noise=lsq2[0][8]
525 526 else:
526 527 shift1=lsq2[0][0]
527 528 width1=lsq2[0][1]
528 529 Amplitude1=lsq2[0][2]
529 530 p1=lsq2[0][3]
530
531
531 532 shift0=lsq2[0][4]
532 533 width0=lsq2[0][5]
533 534 Amplitude0=lsq2[0][6]
534 p0=lsq2[0][7]
535 noise=lsq2[0][8]
536
535 p0=lsq2[0][7]
536 noise=lsq2[0][8]
537
537 538 if Amplitude0<0.05: # in case the peak is noise
538 shift0,width0,Amplitude0,p0 = [0,0,0,0]#4*[numpy.NaN]
539 shift0,width0,Amplitude0,p0 = [0,0,0,0]#4*[numpy.NaN]
539 540 if Amplitude1<0.05:
540 shift1,width1,Amplitude1,p1 = [0,0,0,0]#4*[numpy.NaN]
541
542
541 shift1,width1,Amplitude1,p1 = [0,0,0,0]#4*[numpy.NaN]
542
543
543 544 SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0))/width0)**p0
544 545 SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1))/width1)**p1
545 546 SPCparam = (SPC_ch1,SPC_ch2)
546
547
547
548
548 549 return GauSPC
549
550
550 551 def y_model1(self,x,state):
551 552 shift0,width0,amplitude0,power0,noise=state
552 553 model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
553
554
554 555 model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
555
556
556 557 model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
557 558 return model0+model0u+model0d+noise
558
559 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
559
560 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
560 561 shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,noise=state
561 562 model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
562
563
563 564 model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
564
565
565 566 model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
566 567 model1=amplitude1*numpy.exp(-0.5*abs((x-shift1)/width1)**power1)
567
568
568 569 model1u=amplitude1*numpy.exp(-0.5*abs((x-shift1- self.Num_Bin )/width1)**power1)
569
570
570 571 model1d=amplitude1*numpy.exp(-0.5*abs((x-shift1+ self.Num_Bin )/width1)**power1)
571 572 return model0+model0u+model0d+model1+model1u+model1d+noise
572
573 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
573
574 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
574 575
575 576 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
576
577
577 578 def misfit2(self,state,y_data,x,num_intg):
578 579 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
579
580
580
581
581 582
582 583 class PrecipitationProc(Operation):
583
584
584 585 '''
585 586 Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
586
587 Input:
587
588 Input:
588 589 self.dataOut.data_pre : SelfSpectra
589
590 Output:
591
592 self.dataOut.data_output : Reflectivity factor, rainfall Rate
593
594
595 Parameters affected:
590
591 Output:
592
593 self.dataOut.data_output : Reflectivity factor, rainfall Rate
594
595
596 Parameters affected:
596 597 '''
597
598
598 599 def __init__(self):
599 600 Operation.__init__(self)
600 601 self.i=0
601
602
602
603
603 604 def gaus(self,xSamples,Amp,Mu,Sigma):
604 605 return ( Amp / ((2*numpy.pi)**0.5 * Sigma) ) * numpy.exp( -( xSamples - Mu )**2 / ( 2 * (Sigma**2) ))
605
606
607
606
607
608
608 609 def Moments(self, ySamples, xSamples):
609 610 Pot = numpy.nansum( ySamples ) # Potencia, momento 0
610 611 yNorm = ySamples / Pot
611
612
612 613 Vr = numpy.nansum( yNorm * xSamples ) # Velocidad radial, mu, corrimiento doppler, primer momento
613 Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento
614 Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento
614 615 Desv = Sigma2**0.5 # Desv. Estandar, Ancho espectral
615
616 return numpy.array([Pot, Vr, Desv])
617
618 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
616
617 return numpy.array([Pot, Vr, Desv])
618
619 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
619 620 tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km = 0.93, Altitude=3350):
620
621
621
622
622 623 Velrange = dataOut.spcparam_range[2]
623 624 FrecRange = dataOut.spcparam_range[0]
624
625
625 626 dV= Velrange[1]-Velrange[0]
626 627 dF= FrecRange[1]-FrecRange[0]
627
628
628 629 if radar == "MIRA35C" :
629
630
630 631 self.spc = dataOut.data_pre[0].copy()
631 632 self.Num_Hei = self.spc.shape[2]
632 633 self.Num_Bin = self.spc.shape[1]
633 634 self.Num_Chn = self.spc.shape[0]
634 635 Ze = self.dBZeMODE2(dataOut)
635
636
636 637 else:
637
638
638 639 self.spc = dataOut.SPCparam[1].copy() #dataOut.data_pre[0].copy() #
639
640
640 641 """NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX"""
641
642 self.spc[:,:,0:7]= numpy.NaN
643
642
643 self.spc[:,:,0:7]= numpy.NaN
644
644 645 """##########################################"""
645
646
646 647 self.Num_Hei = self.spc.shape[2]
647 648 self.Num_Bin = self.spc.shape[1]
648 649 self.Num_Chn = self.spc.shape[0]
649
650
650 651 ''' Se obtiene la constante del RADAR '''
651
652
652 653 self.Pt = Pt
653 654 self.Gt = Gt
654 655 self.Gr = Gr
655 656 self.Lambda = Lambda
656 657 self.aL = aL
657 658 self.tauW = tauW
658 659 self.ThetaT = ThetaT
659 660 self.ThetaR = ThetaR
660
661
661 662 Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
662 663 Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR)
663 664 RadarConstant = 10e-26 * Numerator / Denominator #
664
665
665 666 ''' ============================= '''
666
667 self.spc[0] = (self.spc[0]-dataOut.noise[0])
668 self.spc[1] = (self.spc[1]-dataOut.noise[1])
669 self.spc[2] = (self.spc[2]-dataOut.noise[2])
670
667
668 self.spc[0] = (self.spc[0]-dataOut.noise[0])
669 self.spc[1] = (self.spc[1]-dataOut.noise[1])
670 self.spc[2] = (self.spc[2]-dataOut.noise[2])
671
671 672 self.spc[ numpy.where(self.spc < 0)] = 0
672
673 SPCmean = (numpy.mean(self.spc,0) - numpy.mean(dataOut.noise))
673
674 SPCmean = (numpy.mean(self.spc,0) - numpy.mean(dataOut.noise))
674 675 SPCmean[ numpy.where(SPCmean < 0)] = 0
675
676
676 677 ETAn = numpy.zeros([self.Num_Bin,self.Num_Hei])
677 678 ETAv = numpy.zeros([self.Num_Bin,self.Num_Hei])
678 679 ETAd = numpy.zeros([self.Num_Bin,self.Num_Hei])
679
680
680 681 Pr = SPCmean[:,:]
681
682
682 683 VelMeteoro = numpy.mean(SPCmean,axis=0)
683
684
684 685 D_range = numpy.zeros([self.Num_Bin,self.Num_Hei])
685 686 SIGMA = numpy.zeros([self.Num_Bin,self.Num_Hei])
686 687 N_dist = numpy.zeros([self.Num_Bin,self.Num_Hei])
687 688 V_mean = numpy.zeros(self.Num_Hei)
688 689 del_V = numpy.zeros(self.Num_Hei)
689 690 Z = numpy.zeros(self.Num_Hei)
690 691 Ze = numpy.zeros(self.Num_Hei)
691 692 RR = numpy.zeros(self.Num_Hei)
692
693
693 694 Range = dataOut.heightList*1000.
694
695
695 696 for R in range(self.Num_Hei):
696
697
697 698 h = Range[R] + Altitude #Range from ground to radar pulse altitude
698 699 del_V[R] = 1 + 3.68 * 10**-5 * h + 1.71 * 10**-9 * h**2 #Density change correction for velocity
699
700
700 701 D_range[:,R] = numpy.log( (9.65 - (Velrange[0:self.Num_Bin] / del_V[R])) / 10.3 ) / -0.6 #Diameter range [m]x10**-3
701
702
702 703 '''NOTA: ETA(n) dn = ETA(f) df
703
704
704 705 dn = 1 Diferencial de muestreo
705 706 df = ETA(n) / ETA(f)
706
707
707 708 '''
708
709
709 710 ETAn[:,R] = RadarConstant * Pr[:,R] * (Range[R] )**2 #Reflectivity (ETA)
710
711
711 712 ETAv[:,R]=ETAn[:,R]/dV
712
713
713 714 ETAd[:,R]=ETAv[:,R]*6.18*exp(-0.6*D_range[:,R])
714
715
715 716 SIGMA[:,R] = Km * (D_range[:,R] * 1e-3 )**6 * numpy.pi**5 / Lambda**4 #Equivalent Section of drops (sigma)
716
717 N_dist[:,R] = ETAn[:,R] / SIGMA[:,R]
718
717
718 N_dist[:,R] = ETAn[:,R] / SIGMA[:,R]
719
719 720 DMoments = self.Moments(Pr[:,R], Velrange[0:self.Num_Bin])
720
721
721 722 try:
722 723 popt01,pcov = curve_fit(self.gaus, Velrange[0:self.Num_Bin] , Pr[:,R] , p0=DMoments)
723 except:
724 except:
724 725 popt01=numpy.zeros(3)
725 726 popt01[1]= DMoments[1]
726
727
727 728 if popt01[1]<0 or popt01[1]>20:
728 729 popt01[1]=numpy.NaN
729
730
730
731
731 732 V_mean[R]=popt01[1]
732
733
733 734 Z[R] = numpy.nansum( N_dist[:,R] * (D_range[:,R])**6 )#*10**-18
734
735
735 736 RR[R] = 0.0006*numpy.pi * numpy.nansum( D_range[:,R]**3 * N_dist[:,R] * Velrange[0:self.Num_Bin] ) #Rainfall rate
736
737
737 738 Ze[R] = (numpy.nansum( ETAn[:,R]) * Lambda**4) / ( 10**-18*numpy.pi**5 * Km)
738
739
740
739
740
741
741 742 RR2 = (Z/200)**(1/1.6)
742 743 dBRR = 10*numpy.log10(RR)
743 744 dBRR2 = 10*numpy.log10(RR2)
744
745
745 746 dBZe = 10*numpy.log10(Ze)
746 747 dBZ = 10*numpy.log10(Z)
747
748
748 749 dataOut.data_output = RR[8]
749 750 dataOut.data_param = numpy.ones([3,self.Num_Hei])
750 751 dataOut.channelList = [0,1,2]
751
752
752 753 dataOut.data_param[0]=dBZ
753 754 dataOut.data_param[1]=V_mean
754 755 dataOut.data_param[2]=RR
755 756
756 757 return dataOut
757
758
758 759 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
759
760
760 761 NPW = dataOut.NPW
761 762 COFA = dataOut.COFA
762
763
763 764 SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
764 765 RadarConst = dataOut.RadarConst
765 766 #frequency = 34.85*10**9
766
767
767 768 ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
768 769 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
769
770
770 771 ETA = numpy.sum(SNR,1)
771
772
772 773 ETA = numpy.where(ETA is not 0. , ETA, numpy.NaN)
773
774
774 775 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
775
776
776 777 for r in range(self.Num_Hei):
777
778
778 779 Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
779 780 #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
780
781
781 782 return Ze
782
783
783 784 # def GetRadarConstant(self):
784 #
785 # """
785 #
786 # """
786 787 # Constants:
787 #
788 #
788 789 # Pt: Transmission Power dB 5kW 5000
789 790 # Gt: Transmission Gain dB 24.7 dB 295.1209
790 791 # Gr: Reception Gain dB 18.5 dB 70.7945
791 792 # Lambda: Wavelenght m 0.6741 m 0.6741
792 793 # aL: Attenuation loses dB 4dB 2.5118
793 794 # tauW: Width of transmission pulse s 4us 4e-6
794 795 # ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317
795 796 # ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087
796 #
797 #
797 798 # """
798 #
799 #
799 800 # Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
800 801 # Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
801 802 # RadarConstant = Numerator / Denominator
802 #
803 #
803 804 # return RadarConstant
804
805
806
807 class FullSpectralAnalysis(Operation):
808
805
806
807
808 class FullSpectralAnalysis(Operation):
809
809 810 """
810 811 Function that implements Full Spectral Analisys technique.
811
812 Input:
812
813 Input:
813 814 self.dataOut.data_pre : SelfSpectra and CrossSPectra data
814 815 self.dataOut.groupList : Pairlist of channels
815 816 self.dataOut.ChanDist : Physical distance between receivers
816
817
818 Output:
819
820 self.dataOut.data_output : Zonal wind, Meridional wind and Vertical wind
821
822
817
818
819 Output:
820
821 self.dataOut.data_output : Zonal wind, Meridional wind and Vertical wind
822
823
823 824 Parameters affected: Winds, height range, SNR
824
825
825 826 """
826 827 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRlimit=7):
827
828 self.indice=int(numpy.random.rand()*1000)
829
828
829 self.indice=int(numpy.random.rand()*1000)
830
830 831 spc = dataOut.data_pre[0].copy()
831 832 cspc = dataOut.data_pre[1]
832
833
833 834 """NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX"""
834
835
835 836 SNRspc = spc.copy()
836 837 SNRspc[:,:,0:7]= numpy.NaN
837
838
838 839 """##########################################"""
839
840
840
841
841 842 nChannel = spc.shape[0]
842 843 nProfiles = spc.shape[1]
843 844 nHeights = spc.shape[2]
844
845
845 846 pairsList = dataOut.groupList
846 847 if dataOut.ChanDist is not None :
847 848 ChanDist = dataOut.ChanDist
848 849 else:
849 850 ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]])
850
851
851 852 FrecRange = dataOut.spc_range[0]
852
853
853 854 ySamples=numpy.ones([nChannel,nProfiles])
854 855 phase=numpy.ones([nChannel,nProfiles])
855 856 CSPCSamples=numpy.ones([nChannel,nProfiles],dtype=numpy.complex_)
856 857 coherence=numpy.ones([nChannel,nProfiles])
857 858 PhaseSlope=numpy.ones(nChannel)
858 859 PhaseInter=numpy.ones(nChannel)
859 860 data_SNR=numpy.zeros([nProfiles])
860
861
861 862 data = dataOut.data_pre
862 863 noise = dataOut.noise
863
864
864 865 dataOut.data_SNR = (numpy.mean(SNRspc,axis=1)- noise[0]) / noise[0]
865
866
866 867 dataOut.data_SNR[numpy.where( dataOut.data_SNR <0 )] = 1e-20
867
868
868
869
869 870 data_output=numpy.ones([spc.shape[0],spc.shape[2]])*numpy.NaN
870
871
871 872 velocityX=[]
872 873 velocityY=[]
873 874 velocityV=[]
874 875 PhaseLine=[]
875
876
876 877 dbSNR = 10*numpy.log10(dataOut.data_SNR)
877 878 dbSNR = numpy.average(dbSNR,0)
878
879
879 880 for Height in range(nHeights):
880
881
881 882 [Vzon,Vmer,Vver, GaussCenter, PhaseSlope, FitGaussCSPC]= self.WindEstimation(spc, cspc, pairsList, ChanDist, Height, noise, dataOut.spc_range, dbSNR[Height], SNRlimit)
882 883 PhaseLine = numpy.append(PhaseLine, PhaseSlope)
883
884
884 885 if abs(Vzon)<100. and abs(Vzon)> 0.:
885 886 velocityX=numpy.append(velocityX, Vzon)#Vmag
886
887
887 888 else:
888 889 velocityX=numpy.append(velocityX, numpy.NaN)
889
890
890 891 if abs(Vmer)<100. and abs(Vmer) > 0.:
891 892 velocityY=numpy.append(velocityY, -Vmer)#Vang
892
893
893 894 else:
894 895 velocityY=numpy.append(velocityY, numpy.NaN)
895
896
896 897 if dbSNR[Height] > SNRlimit:
897 898 velocityV=numpy.append(velocityV, -Vver)#FirstMoment[Height])
898 899 else:
899 900 velocityV=numpy.append(velocityV, numpy.NaN)
900 901
901
902
902
903
903 904 '''Nota: Cambiar el signo de numpy.array(velocityX) cuando se intente procesar datos de BLTR'''
904 905 data_output[0] = numpy.array(velocityX) #self.moving_average(numpy.array(velocityX) , N=1)
905 906 data_output[1] = numpy.array(velocityY) #self.moving_average(numpy.array(velocityY) , N=1)
906 907 data_output[2] = velocityV#FirstMoment
907
908
908 909 xFrec=FrecRange[0:spc.shape[1]]
909
910
910 911 dataOut.data_output=data_output
911
912
912 913 return dataOut
913
914
914
915
915 916 def moving_average(self,x, N=2):
916 917 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
917
918
918 919 def gaus(self,xSamples,Amp,Mu,Sigma):
919 920 return ( Amp / ((2*numpy.pi)**0.5 * Sigma) ) * numpy.exp( -( xSamples - Mu )**2 / ( 2 * (Sigma**2) ))
920
921
922
921
922
923
923 924 def Moments(self, ySamples, xSamples):
924 925 Pot = numpy.nansum( ySamples ) # Potencia, momento 0
925 926 yNorm = ySamples / Pot
926 927 Vr = numpy.nansum( yNorm * xSamples ) # Velocidad radial, mu, corrimiento doppler, primer momento
927 Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento
928 Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento
928 929 Desv = Sigma2**0.5 # Desv. Estandar, Ancho espectral
929
930
930 931 return numpy.array([Pot, Vr, Desv])
931
932
932 933 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit):
933
934 934
935
935
936
936 937 ySamples=numpy.ones([spc.shape[0],spc.shape[1]])
937 938 phase=numpy.ones([spc.shape[0],spc.shape[1]])
938 939 CSPCSamples=numpy.ones([spc.shape[0],spc.shape[1]],dtype=numpy.complex_)
939 940 coherence=numpy.ones([spc.shape[0],spc.shape[1]])
940 941 PhaseSlope=numpy.zeros(spc.shape[0])
941 942 PhaseInter=numpy.ones(spc.shape[0])
942 943 xFrec=AbbsisaRange[0][0:spc.shape[1]]
943 944 xVel =AbbsisaRange[2][0:spc.shape[1]]
944 945 Vv=numpy.empty(spc.shape[2])*0
945 946 SPCav = numpy.average(spc, axis=0)-numpy.average(noise) #spc[0]-noise[0]#
946
947 SPCmoments = self.Moments(SPCav[:,Height], xVel )
947
948 SPCmoments = self.Moments(SPCav[:,Height], xVel )
948 949 CSPCmoments = []
949 950 cspcNoise = numpy.empty(3)
950
951
951 952 '''Getting Eij and Nij'''
952
953
953 954 Xi01=ChanDist[0][0]
954 955 Eta01=ChanDist[0][1]
955
956
956 957 Xi02=ChanDist[1][0]
957 958 Eta02=ChanDist[1][1]
958
959
959 960 Xi12=ChanDist[2][0]
960 961 Eta12=ChanDist[2][1]
961
962
962 963 z = spc.copy()
963 964 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
964
965 for i in range(spc.shape[0]):
966
965
966 for i in range(spc.shape[0]):
967
967 968 '''****** Line of Data SPC ******'''
968 969 zline=z[i,:,Height].copy() - noise[i] # Se resta ruido
969
970
970 971 '''****** SPC is normalized ******'''
971 972 SmoothSPC =self.moving_average(zline.copy(),N=1) # Se suaviza el ruido
972 FactNorm = SmoothSPC/numpy.nansum(SmoothSPC) # SPC Normalizado y suavizado
973
973 FactNorm = SmoothSPC/numpy.nansum(SmoothSPC) # SPC Normalizado y suavizado
974
974 975 xSamples = xFrec # Se toma el rango de frecuncias
975 976 ySamples[i] = FactNorm # Se toman los valores de SPC normalizado
976
977
977 978 for i in range(spc.shape[0]):
978
979
979 980 '''****** Line of Data CSPC ******'''
980 981 cspcLine = ( cspc[i,:,Height].copy())# - noise[i] ) # no! Se resta el ruido
981 982 SmoothCSPC =self.moving_average(cspcLine,N=1) # Se suaviza el ruido
982 983 cspcNorm = SmoothCSPC/numpy.nansum(SmoothCSPC) # CSPC normalizado y suavizado
983
984
984 985 '''****** CSPC is normalized with respect to Briggs and Vincent ******'''
985 986 chan_index0 = pairsList[i][0]
986 987 chan_index1 = pairsList[i][1]
987
988 CSPCFactor= numpy.abs(numpy.nansum(ySamples[chan_index0]))**2 * numpy.abs(numpy.nansum(ySamples[chan_index1]))**2
988
989 CSPCFactor= numpy.abs(numpy.nansum(ySamples[chan_index0]))**2 * numpy.abs(numpy.nansum(ySamples[chan_index1]))**2
989 990 CSPCNorm = cspcNorm / numpy.sqrt(CSPCFactor)
990
991
991 992 CSPCSamples[i] = CSPCNorm
992
993
993 994 coherence[i] = numpy.abs(CSPCSamples[i]) / numpy.sqrt(CSPCFactor)
994
995
995 996 #coherence[i]= self.moving_average(coherence[i],N=1)
996
997
997 998 phase[i] = self.moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi
998
999
999 1000 CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPCSamples[0]), xSamples),
1000 1001 self.Moments(numpy.abs(CSPCSamples[1]), xSamples),
1001 self.Moments(numpy.abs(CSPCSamples[2]), xSamples)])
1002
1003
1002 self.Moments(numpy.abs(CSPCSamples[2]), xSamples)])
1003
1004
1004 1005 popt=[1e-10,0,1e-10]
1005 popt01, popt02, popt12 = [1e-10,1e-10,1e-10], [1e-10,1e-10,1e-10] ,[1e-10,1e-10,1e-10]
1006 popt01, popt02, popt12 = [1e-10,1e-10,1e-10], [1e-10,1e-10,1e-10] ,[1e-10,1e-10,1e-10]
1006 1007 FitGauss01, FitGauss02, FitGauss12 = numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0
1007
1008
1008 1009 CSPCMask01 = numpy.abs(CSPCSamples[0])
1009 1010 CSPCMask02 = numpy.abs(CSPCSamples[1])
1010 1011 CSPCMask12 = numpy.abs(CSPCSamples[2])
1011
1012
1012 1013 mask01 = ~numpy.isnan(CSPCMask01)
1013 1014 mask02 = ~numpy.isnan(CSPCMask02)
1014 1015 mask12 = ~numpy.isnan(CSPCMask12)
1015
1016
1016 1017 #mask = ~numpy.isnan(CSPCMask01)
1017 1018 CSPCMask01 = CSPCMask01[mask01]
1018 1019 CSPCMask02 = CSPCMask02[mask02]
1019 1020 CSPCMask12 = CSPCMask12[mask12]
1020 1021 #CSPCMask01 = numpy.ma.masked_invalid(CSPCMask01)
1021
1022
1023
1022
1023
1024
1024 1025 '''***Fit Gauss CSPC01***'''
1025 1026 if dbSNR > SNRlimit and numpy.abs(SPCmoments[1])<3 :
1026 1027 try:
1027 1028 popt01,pcov = curve_fit(self.gaus,xSamples[mask01],numpy.abs(CSPCMask01),p0=CSPCmoments[0])
1028 1029 popt02,pcov = curve_fit(self.gaus,xSamples[mask02],numpy.abs(CSPCMask02),p0=CSPCmoments[1])
1029 1030 popt12,pcov = curve_fit(self.gaus,xSamples[mask12],numpy.abs(CSPCMask12),p0=CSPCmoments[2])
1030 1031 FitGauss01 = self.gaus(xSamples,*popt01)
1031 1032 FitGauss02 = self.gaus(xSamples,*popt02)
1032 1033 FitGauss12 = self.gaus(xSamples,*popt12)
1033 1034 except:
1034 1035 FitGauss01=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[0]))
1035 1036 FitGauss02=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[1]))
1036 1037 FitGauss12=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[2]))
1037
1038
1038
1039
1039 1040 CSPCopt = numpy.vstack([popt01,popt02,popt12])
1040
1041
1041 1042 '''****** Getting fij width ******'''
1042
1043 yMean = numpy.average(ySamples, axis=0) # ySamples[0]
1044
1043
1044 yMean = numpy.average(ySamples, axis=0) # ySamples[0]
1045
1045 1046 '''******* Getting fitting Gaussian *******'''
1046 meanGauss = sum(xSamples*yMean) / len(xSamples) # Mu, velocidad radial (frecuencia)
1047 sigma2 = sum(yMean*(xSamples-meanGauss)**2) / len(xSamples) # Varianza, Ancho espectral (frecuencia)
1048
1047 meanGauss = sum(xSamples*yMean) / len(xSamples) # Mu, velocidad radial (frecuencia)
1048 sigma2 = sum(yMean*(xSamples-meanGauss)**2) / len(xSamples) # Varianza, Ancho espectral (frecuencia)
1049
1049 1050 yMoments = self.Moments(yMean, xSamples)
1050
1051
1051 1052 if dbSNR > SNRlimit and numpy.abs(SPCmoments[1])<3: # and abs(meanGauss/sigma2) > 0.00001:
1052 1053 try:
1053 1054 popt,pcov = curve_fit(self.gaus,xSamples,yMean,p0=yMoments)
1054 1055 FitGauss=self.gaus(xSamples,*popt)
1055
1056
1056 1057 except :#RuntimeError:
1057 1058 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1058
1059
1059
1060
1060 1061 else:
1061 1062 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1062
1063
1064
1063
1064
1065
1065 1066 '''****** Getting Fij ******'''
1066 1067 Fijcspc = CSPCopt[:,2]/2*3
1067
1068
1068
1069
1069 1070 GaussCenter = popt[1] #xFrec[GCpos]
1070 1071 #Punto en Eje X de la Gaussiana donde se encuentra el centro
1071 1072 ClosestCenter = xSamples[numpy.abs(xSamples-GaussCenter).argmin()]
1072 1073 PointGauCenter = numpy.where(xSamples==ClosestCenter)[0][0]
1073
1074 #Punto e^-1 hubicado en la Gaussiana
1074
1075 #Punto e^-1 hubicado en la Gaussiana
1075 1076 PeMinus1 = numpy.max(FitGauss)* numpy.exp(-1)
1076 1077 FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # El punto mas cercano a "Peminus1" dentro de "FitGauss"
1077 1078 PointFij = numpy.where(FitGauss==FijClosest)[0][0]
1078
1079
1079 1080 if xSamples[PointFij] > xSamples[PointGauCenter]:
1080 1081 Fij = xSamples[PointFij] - xSamples[PointGauCenter]
1081
1082
1082 1083 else:
1083 1084 Fij = xSamples[PointGauCenter] - xSamples[PointFij]
1084
1085
1085
1086
1086 1087 '''****** Taking frequency ranges from SPCs ******'''
1087
1088
1088
1089
1089 1090 #GaussCenter = popt[1] #Primer momento 01
1090 1091 GauWidth = popt[2] *3/2 #Ancho de banda de Gau01
1091 1092 Range = numpy.empty(2)
1092 1093 Range[0] = GaussCenter - GauWidth
1093 Range[1] = GaussCenter + GauWidth
1094 #Punto en Eje X de la Gaussiana donde se encuentra ancho de banda (min:max)
1094 Range[1] = GaussCenter + GauWidth
1095 #Punto en Eje X de la Gaussiana donde se encuentra ancho de banda (min:max)
1095 1096 ClosRangeMin = xSamples[numpy.abs(xSamples-Range[0]).argmin()]
1096 1097 ClosRangeMax = xSamples[numpy.abs(xSamples-Range[1]).argmin()]
1097
1098
1098 1099 PointRangeMin = numpy.where(xSamples==ClosRangeMin)[0][0]
1099 1100 PointRangeMax = numpy.where(xSamples==ClosRangeMax)[0][0]
1100
1101
1101 1102 Range=numpy.array([ PointRangeMin, PointRangeMax ])
1102
1103
1103 1104 FrecRange = xFrec[ Range[0] : Range[1] ]
1104 1105 VelRange = xVel[ Range[0] : Range[1] ]
1105
1106
1106
1107
1107 1108 '''****** Getting SCPC Slope ******'''
1108
1109
1109 1110 for i in range(spc.shape[0]):
1110
1111
1111 1112 if len(FrecRange)>5 and len(FrecRange)<spc.shape[1]*0.3:
1112 PhaseRange=self.moving_average(phase[i,Range[0]:Range[1]],N=3)
1113
1113 PhaseRange=self.moving_average(phase[i,Range[0]:Range[1]],N=3)
1114
1114 1115 '''***********************VelRange******************'''
1115
1116
1116 1117 mask = ~numpy.isnan(FrecRange) & ~numpy.isnan(PhaseRange)
1117
1118
1118 1119 if len(FrecRange) == len(PhaseRange):
1119 1120 try:
1120 1121 slope, intercept, r_value, p_value, std_err = stats.linregress(FrecRange[mask], PhaseRange[mask])
1121 1122 PhaseSlope[i]=slope
1122 1123 PhaseInter[i]=intercept
1123 1124 except:
1124 1125 PhaseSlope[i]=0
1125 1126 PhaseInter[i]=0
1126 1127 else:
1127 1128 PhaseSlope[i]=0
1128 1129 PhaseInter[i]=0
1129 1130 else:
1130 1131 PhaseSlope[i]=0
1131 1132 PhaseInter[i]=0
1132
1133
1133
1134
1134 1135 '''Getting constant C'''
1135 1136 cC=(Fij*numpy.pi)**2
1136
1137
1137 1138 '''****** Getting constants F and G ******'''
1138 1139 MijEijNij=numpy.array([[Xi02,Eta02], [Xi12,Eta12]])
1139 1140 MijResult0=(-PhaseSlope[1]*cC) / (2*numpy.pi)
1140 MijResult1=(-PhaseSlope[2]*cC) / (2*numpy.pi)
1141 MijResult1=(-PhaseSlope[2]*cC) / (2*numpy.pi)
1141 1142 MijResults=numpy.array([MijResult0,MijResult1])
1142 1143 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1143
1144
1144 1145 '''****** Getting constants A, B and H ******'''
1145 1146 W01=numpy.nanmax( FitGauss01 ) #numpy.abs(CSPCSamples[0]))
1146 1147 W02=numpy.nanmax( FitGauss02 ) #numpy.abs(CSPCSamples[1]))
1147 1148 W12=numpy.nanmax( FitGauss12 ) #numpy.abs(CSPCSamples[2]))
1148
1149
1149 1150 WijResult0=((cF*Xi01+cG*Eta01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi/cC))
1150 1151 WijResult1=((cF*Xi02+cG*Eta02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi/cC))
1151 1152 WijResult2=((cF*Xi12+cG*Eta12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi/cC))
1152
1153
1153 1154 WijResults=numpy.array([WijResult0, WijResult1, WijResult2])
1154
1155 WijEijNij=numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1155
1156 WijEijNij=numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1156 1157 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1157
1158
1158 1159 VxVy=numpy.array([[cA,cH],[cH,cB]])
1159 1160 VxVyResults=numpy.array([-cF,-cG])
1160 1161 (Vx,Vy) = numpy.linalg.solve(VxVy, VxVyResults)
1161
1162
1162 1163 Vzon = Vy
1163 1164 Vmer = Vx
1164 1165 Vmag=numpy.sqrt(Vzon**2+Vmer**2)
1165 1166 Vang=numpy.arctan2(Vmer,Vzon)
1166 1167 if numpy.abs( popt[1] ) < 3.5 and len(FrecRange)>4:
1167 1168 Vver=popt[1]
1168 1169 else:
1169 1170 Vver=numpy.NaN
1170 1171 FitGaussCSPC = numpy.array([FitGauss01,FitGauss02,FitGauss12])
1171
1172
1172
1173
1173 1174 return Vzon, Vmer, Vver, GaussCenter, PhaseSlope, FitGaussCSPC
1174
1175
1175 1176 class SpectralMoments(Operation):
1176
1177
1177 1178 '''
1178 1179 Function SpectralMoments()
1179
1180
1180 1181 Calculates moments (power, mean, standard deviation) and SNR of the signal
1181
1182
1182 1183 Type of dataIn: Spectra
1183
1184
1184 1185 Configuration Parameters:
1185
1186
1186 1187 dirCosx : Cosine director in X axis
1187 1188 dirCosy : Cosine director in Y axis
1188
1189
1189 1190 elevation :
1190 1191 azimuth :
1191
1192
1192 1193 Input:
1193 channelList : simple channel list to select e.g. [2,3,7]
1194 channelList : simple channel list to select e.g. [2,3,7]
1194 1195 self.dataOut.data_pre : Spectral data
1195 1196 self.dataOut.abscissaList : List of frequencies
1196 1197 self.dataOut.noise : Noise level per channel
1197
1198
1198 1199 Affected:
1199 1200 self.dataOut.moments : Parameters per channel
1200 1201 self.dataOut.data_SNR : SNR per channel
1201
1202
1202 1203 '''
1203
1204
1204 1205 def run(self, dataOut):
1205
1206
1206 1207 #dataOut.data_pre = dataOut.data_pre[0]
1207 1208 data = dataOut.data_pre[0]
1208 1209 absc = dataOut.abscissaList[:-1]
1209 1210 noise = dataOut.noise
1210 1211 nChannel = data.shape[0]
1211 1212 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
1212
1213
1213 1214 for ind in range(nChannel):
1214 1215 data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind] )
1215
1216
1216 1217 dataOut.moments = data_param[:,1:,:]
1217 1218 dataOut.data_SNR = data_param[:,0]
1218 1219 dataOut.data_POW = data_param[:,1]
1219 1220 dataOut.data_DOP = data_param[:,2]
1220 1221 dataOut.data_WIDTH = data_param[:,3]
1221 1222 return dataOut
1222
1223 def __calculateMoments(self, oldspec, oldfreq, n0,
1223
1224 def __calculateMoments(self, oldspec, oldfreq, n0,
1224 1225 nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
1225
1226
1226 1227 if (nicoh is None): nicoh = 1
1227 if (graph is None): graph = 0
1228 if (graph is None): graph = 0
1228 1229 if (smooth is None): smooth = 0
1229 1230 elif (self.smooth < 3): smooth = 0
1230 1231
1231 1232 if (type1 is None): type1 = 0
1232 1233 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
1233 1234 if (snrth is None): snrth = -3
1234 1235 if (dc is None): dc = 0
1235 1236 if (aliasing is None): aliasing = 0
1236 1237 if (oldfd is None): oldfd = 0
1237 1238 if (wwauto is None): wwauto = 0
1238
1239
1239 1240 if (n0 < 1.e-20): n0 = 1.e-20
1240
1241
1241 1242 freq = oldfreq
1242 1243 vec_power = numpy.zeros(oldspec.shape[1])
1243 1244 vec_fd = numpy.zeros(oldspec.shape[1])
1244 1245 vec_w = numpy.zeros(oldspec.shape[1])
1245 1246 vec_snr = numpy.zeros(oldspec.shape[1])
1246
1247
1247 1248 oldspec = numpy.ma.masked_invalid(oldspec)
1248 1249
1249 1250 for ind in range(oldspec.shape[1]):
1250
1251
1251 1252 spec = oldspec[:,ind]
1252 1253 aux = spec*fwindow
1253 1254 max_spec = aux.max()
1254 1255 m = list(aux).index(max_spec)
1255
1256 #Smooth
1256
1257 #Smooth
1257 1258 if (smooth == 0): spec2 = spec
1258 1259 else: spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1259
1260
1260 1261 # Calculo de Momentos
1261 1262 bb = spec2[list(range(m,spec2.size))]
1262 1263 bb = (bb<n0).nonzero()
1263 1264 bb = bb[0]
1264
1265
1265 1266 ss = spec2[list(range(0,m + 1))]
1266 1267 ss = (ss<n0).nonzero()
1267 1268 ss = ss[0]
1268
1269
1269 1270 if (bb.size == 0):
1270 1271 bb0 = spec.size - 1 - m
1271 else:
1272 else:
1272 1273 bb0 = bb[0] - 1
1273 1274 if (bb0 < 0):
1274 1275 bb0 = 0
1275
1276
1276 1277 if (ss.size == 0): ss1 = 1
1277 1278 else: ss1 = max(ss) + 1
1278
1279
1279 1280 if (ss1 > m): ss1 = m
1280
1281 valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1
1281
1282 valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1
1282 1283 power = ((spec2[valid] - n0)*fwindow[valid]).sum()
1283 1284 fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power
1284 1285 w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power)
1285 snr = (spec2.mean()-n0)/n0
1286
1287 if (snr < 1.e-20) :
1286 snr = (spec2.mean()-n0)/n0
1287
1288 if (snr < 1.e-20) :
1288 1289 snr = 1.e-20
1289
1290
1290 1291 vec_power[ind] = power
1291 1292 vec_fd[ind] = fd
1292 1293 vec_w[ind] = w
1293 1294 vec_snr[ind] = snr
1294
1295
1295 1296 moments = numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1296 1297 return moments
1297
1298
1298 1299 #------------------ Get SA Parameters --------------------------
1299
1300
1300 1301 def GetSAParameters(self):
1301 1302 #SA en frecuencia
1302 1303 pairslist = self.dataOut.groupList
1303 1304 num_pairs = len(pairslist)
1304
1305
1305 1306 vel = self.dataOut.abscissaList
1306 1307 spectra = self.dataOut.data_pre
1307 1308 cspectra = self.dataIn.data_cspc
1308 delta_v = vel[1] - vel[0]
1309
1309 delta_v = vel[1] - vel[0]
1310
1310 1311 #Calculating the power spectrum
1311 1312 spc_pow = numpy.sum(spectra, 3)*delta_v
1312 1313 #Normalizing Spectra
1313 1314 norm_spectra = spectra/spc_pow
1314 1315 #Calculating the norm_spectra at peak
1315 max_spectra = numpy.max(norm_spectra, 3)
1316
1316 max_spectra = numpy.max(norm_spectra, 3)
1317
1317 1318 #Normalizing Cross Spectra
1318 1319 norm_cspectra = numpy.zeros(cspectra.shape)
1319
1320
1320 1321 for i in range(num_chan):
1321 1322 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
1322
1323
1323 1324 max_cspectra = numpy.max(norm_cspectra,2)
1324 1325 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
1325
1326
1326 1327 for i in range(num_pairs):
1327 1328 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
1328 1329 #------------------- Get Lags ----------------------------------
1329
1330
1330 1331 class SALags(Operation):
1331 1332 '''
1332 1333 Function GetMoments()
1333 1334
1334 1335 Input:
1335 1336 self.dataOut.data_pre
1336 1337 self.dataOut.abscissaList
1337 1338 self.dataOut.noise
1338 1339 self.dataOut.normFactor
1339 1340 self.dataOut.data_SNR
1340 1341 self.dataOut.groupList
1341 1342 self.dataOut.nChannels
1342
1343
1343 1344 Affected:
1344 1345 self.dataOut.data_param
1345
1346
1346 1347 '''
1347 def run(self, dataOut):
1348 def run(self, dataOut):
1348 1349 data_acf = dataOut.data_pre[0]
1349 1350 data_ccf = dataOut.data_pre[1]
1350 1351 normFactor_acf = dataOut.normFactor[0]
1351 1352 normFactor_ccf = dataOut.normFactor[1]
1352 1353 pairs_acf = dataOut.groupList[0]
1353 1354 pairs_ccf = dataOut.groupList[1]
1354
1355
1355 1356 nHeights = dataOut.nHeights
1356 1357 absc = dataOut.abscissaList
1357 1358 noise = dataOut.noise
1358 1359 SNR = dataOut.data_SNR
1359 1360 nChannels = dataOut.nChannels
1360 1361 # pairsList = dataOut.groupList
1361 1362 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
1362 1363
1363 1364 for l in range(len(pairs_acf)):
1364 1365 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
1365
1366
1366 1367 for l in range(len(pairs_ccf)):
1367 1368 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
1368
1369
1369 1370 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
1370 1371 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
1371 1372 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
1372 1373 return
1373
1374
1374 1375 # def __getPairsAutoCorr(self, pairsList, nChannels):
1375 #
1376 #
1376 1377 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1377 #
1378 # for l in range(len(pairsList)):
1378 #
1379 # for l in range(len(pairsList)):
1379 1380 # firstChannel = pairsList[l][0]
1380 1381 # secondChannel = pairsList[l][1]
1381 #
1382 # #Obteniendo pares de Autocorrelacion
1382 #
1383 # #Obteniendo pares de Autocorrelacion
1383 1384 # if firstChannel == secondChannel:
1384 1385 # pairsAutoCorr[firstChannel] = int(l)
1385 #
1386 #
1386 1387 # pairsAutoCorr = pairsAutoCorr.astype(int)
1387 #
1388 #
1388 1389 # pairsCrossCorr = range(len(pairsList))
1389 1390 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1390 #
1391 #
1391 1392 # return pairsAutoCorr, pairsCrossCorr
1392
1393
1393 1394 def __calculateTaus(self, data_acf, data_ccf, lagRange):
1394
1395
1395 1396 lag0 = data_acf.shape[1]/2
1396 1397 #Funcion de Autocorrelacion
1397 1398 mean_acf = stats.nanmean(data_acf, axis = 0)
1398
1399
1399 1400 #Obtencion Indice de TauCross
1400 1401 ind_ccf = data_ccf.argmax(axis = 1)
1401 1402 #Obtencion Indice de TauAuto
1402 1403 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
1403 1404 ccf_lag0 = data_ccf[:,lag0,:]
1404
1405
1405 1406 for i in range(ccf_lag0.shape[0]):
1406 1407 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
1407
1408
1408 1409 #Obtencion de TauCross y TauAuto
1409 1410 tau_ccf = lagRange[ind_ccf]
1410 1411 tau_acf = lagRange[ind_acf]
1411
1412
1412 1413 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
1413
1414
1414 1415 tau_ccf[Nan1,Nan2] = numpy.nan
1415 1416 tau_acf[Nan1,Nan2] = numpy.nan
1416 1417 tau = numpy.vstack((tau_ccf,tau_acf))
1417
1418
1418 1419 return tau
1419
1420
1420 1421 def __calculateLag1Phase(self, data, lagTRange):
1421 1422 data1 = stats.nanmean(data, axis = 0)
1422 1423 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
1423 1424
1424 1425 phase = numpy.angle(data1[lag1,:])
1425
1426
1426 1427 return phase
1427
1428
1428 1429 class SpectralFitting(Operation):
1429 1430 '''
1430 1431 Function GetMoments()
1431
1432
1432 1433 Input:
1433 1434 Output:
1434 1435 Variables modified:
1435 1436 '''
1436
1437 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1438
1439
1437
1438 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1439
1440
1440 1441 if path != None:
1441 1442 sys.path.append(path)
1442 1443 self.dataOut.library = importlib.import_module(file)
1443
1444
1444 1445 #To be inserted as a parameter
1445 1446 groupArray = numpy.array(groupList)
1446 # groupArray = numpy.array([[0,1],[2,3]])
1447 # groupArray = numpy.array([[0,1],[2,3]])
1447 1448 self.dataOut.groupList = groupArray
1448
1449
1449 1450 nGroups = groupArray.shape[0]
1450 1451 nChannels = self.dataIn.nChannels
1451 1452 nHeights=self.dataIn.heightList.size
1452
1453
1453 1454 #Parameters Array
1454 1455 self.dataOut.data_param = None
1455
1456
1456 1457 #Set constants
1457 1458 constants = self.dataOut.library.setConstants(self.dataIn)
1458 1459 self.dataOut.constants = constants
1459 1460 M = self.dataIn.normFactor
1460 1461 N = self.dataIn.nFFTPoints
1461 1462 ippSeconds = self.dataIn.ippSeconds
1462 1463 K = self.dataIn.nIncohInt
1463 1464 pairsArray = numpy.array(self.dataIn.pairsList)
1464
1465
1465 1466 #List of possible combinations
1466 1467 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
1467 1468 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
1468
1469
1469 1470 if getSNR:
1470 1471 listChannels = groupArray.reshape((groupArray.size))
1471 1472 listChannels.sort()
1472 1473 noise = self.dataIn.getNoise()
1473 1474 self.dataOut.data_SNR = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1474
1475 for i in range(nGroups):
1475
1476 for i in range(nGroups):
1476 1477 coord = groupArray[i,:]
1477
1478
1478 1479 #Input data array
1479 1480 data = self.dataIn.data_spc[coord,:,:]/(M*N)
1480 1481 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
1481
1482
1482 1483 #Cross Spectra data array for Covariance Matrixes
1483 1484 ind = 0
1484 1485 for pairs in listComb:
1485 1486 pairsSel = numpy.array([coord[x],coord[y]])
1486 1487 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
1487 1488 ind += 1
1488 1489 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
1489 1490 dataCross = dataCross**2/K
1490
1491
1491 1492 for h in range(nHeights):
1492
1493
1493 1494 #Input
1494 1495 d = data[:,h]
1495 1496
1496 1497 #Covariance Matrix
1497 1498 D = numpy.diag(d**2/K)
1498 1499 ind = 0
1499 1500 for pairs in listComb:
1500 1501 #Coordinates in Covariance Matrix
1501 x = pairs[0]
1502 x = pairs[0]
1502 1503 y = pairs[1]
1503 1504 #Channel Index
1504 1505 S12 = dataCross[ind,:,h]
1505 1506 D12 = numpy.diag(S12)
1506 1507 #Completing Covariance Matrix with Cross Spectras
1507 1508 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
1508 1509 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
1509 1510 ind += 1
1510 1511 Dinv=numpy.linalg.inv(D)
1511 1512 L=numpy.linalg.cholesky(Dinv)
1512 1513 LT=L.T
1513 1514
1514 1515 dp = numpy.dot(LT,d)
1515
1516
1516 1517 #Initial values
1517 1518 data_spc = self.dataIn.data_spc[coord,:,h]
1518
1519
1519 1520 if (h>0)and(error1[3]<5):
1520 1521 p0 = self.dataOut.data_param[i,:,h-1]
1521 1522 else:
1522 1523 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
1523
1524
1524 1525 try:
1525 1526 #Least Squares
1526 1527 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
1527 1528 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
1528 1529 #Chi square error
1529 1530 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
1530 1531 #Error with Jacobian
1531 1532 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
1532 1533 except:
1533 1534 minp = p0*numpy.nan
1534 1535 error0 = numpy.nan
1535 1536 error1 = p0*numpy.nan
1536
1537
1537 1538 #Save
1538 1539 if self.dataOut.data_param is None:
1539 1540 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
1540 1541 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
1541
1542
1542 1543 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
1543 1544 self.dataOut.data_param[i,:,h] = minp
1544 1545 return
1545
1546
1546 1547 def __residFunction(self, p, dp, LT, constants):
1547 1548
1548 1549 fm = self.dataOut.library.modelFunction(p, constants)
1549 1550 fmp=numpy.dot(LT,fm)
1550
1551
1551 1552 return dp-fmp
1552 1553
1553 1554 def __getSNR(self, z, noise):
1554
1555
1555 1556 avg = numpy.average(z, axis=1)
1556 1557 SNR = (avg.T-noise)/noise
1557 1558 SNR = SNR.T
1558 1559 return SNR
1559
1560
1560 1561 def __chisq(p,chindex,hindex):
1561 1562 #similar to Resid but calculates CHI**2
1562 1563 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
1563 1564 dp=numpy.dot(LT,d)
1564 1565 fmp=numpy.dot(LT,fm)
1565 1566 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
1566 1567 return chisq
1567
1568
1568 1569 class WindProfiler(Operation):
1569
1570
1570 1571 __isConfig = False
1571
1572
1572 1573 __initime = None
1573 1574 __lastdatatime = None
1574 1575 __integrationtime = None
1575
1576
1576 1577 __buffer = None
1577
1578
1578 1579 __dataReady = False
1579
1580
1580 1581 __firstdata = None
1581
1582
1582 1583 n = None
1583
1584 def __init__(self):
1584
1585 def __init__(self):
1585 1586 Operation.__init__(self)
1586
1587
1587 1588 def __calculateCosDir(self, elev, azim):
1588 1589 zen = (90 - elev)*numpy.pi/180
1589 1590 azim = azim*numpy.pi/180
1590 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1591 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1591 1592 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
1592
1593
1593 1594 signX = numpy.sign(numpy.cos(azim))
1594 1595 signY = numpy.sign(numpy.sin(azim))
1595
1596
1596 1597 cosDirX = numpy.copysign(cosDirX, signX)
1597 1598 cosDirY = numpy.copysign(cosDirY, signY)
1598 1599 return cosDirX, cosDirY
1599
1600
1600 1601 def __calculateAngles(self, theta_x, theta_y, azimuth):
1601
1602
1602 1603 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
1603 1604 zenith_arr = numpy.arccos(dir_cosw)
1604 1605 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
1605
1606
1606 1607 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
1607 1608 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
1608
1609
1609 1610 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
1610 1611
1611 1612 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
1612
1613 #
1613
1614 #
1614 1615 if horOnly:
1615 1616 A = numpy.c_[dir_cosu,dir_cosv]
1616 1617 else:
1617 1618 A = numpy.c_[dir_cosu,dir_cosv,dir_cosw]
1618 1619 A = numpy.asmatrix(A)
1619 1620 A1 = numpy.linalg.inv(A.transpose()*A)*A.transpose()
1620 1621
1621 1622 return A1
1622 1623
1623 1624 def __correctValues(self, heiRang, phi, velRadial, SNR):
1624 1625 listPhi = phi.tolist()
1625 1626 maxid = listPhi.index(max(listPhi))
1626 1627 minid = listPhi.index(min(listPhi))
1627
1628 rango = list(range(len(phi)))
1628
1629 rango = list(range(len(phi)))
1629 1630 # rango = numpy.delete(rango,maxid)
1630
1631
1631 1632 heiRang1 = heiRang*math.cos(phi[maxid])
1632 1633 heiRangAux = heiRang*math.cos(phi[minid])
1633 1634 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1634 1635 heiRang1 = numpy.delete(heiRang1,indOut)
1635
1636
1636 1637 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1637 1638 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1638
1639
1639 1640 for i in rango:
1640 1641 x = heiRang*math.cos(phi[i])
1641 1642 y1 = velRadial[i,:]
1642 1643 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1643
1644
1644 1645 x1 = heiRang1
1645 1646 y11 = f1(x1)
1646
1647
1647 1648 y2 = SNR[i,:]
1648 1649 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1649 1650 y21 = f2(x1)
1650
1651
1651 1652 velRadial1[i,:] = y11
1652 1653 SNR1[i,:] = y21
1653
1654
1654 1655 return heiRang1, velRadial1, SNR1
1655 1656
1656 1657 def __calculateVelUVW(self, A, velRadial):
1657
1658
1658 1659 #Operacion Matricial
1659 1660 # velUVW = numpy.zeros((velRadial.shape[1],3))
1660 1661 # for ind in range(velRadial.shape[1]):
1661 1662 # velUVW[ind,:] = numpy.dot(A,velRadial[:,ind])
1662 1663 # velUVW = velUVW.transpose()
1663 1664 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
1664 1665 velUVW[:,:] = numpy.dot(A,velRadial)
1665
1666
1666
1667
1667 1668 return velUVW
1668
1669
1669 1670 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
1670
1671
1671 1672 def techniqueDBS(self, kwargs):
1672 1673 """
1673 1674 Function that implements Doppler Beam Swinging (DBS) technique.
1674
1675
1675 1676 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1676 1677 Direction correction (if necessary), Ranges and SNR
1677
1678
1678 1679 Output: Winds estimation (Zonal, Meridional and Vertical)
1679
1680
1680 1681 Parameters affected: Winds, height range, SNR
1681 1682 """
1682 1683 velRadial0 = kwargs['velRadial']
1683 1684 heiRang = kwargs['heightList']
1684 1685 SNR0 = kwargs['SNR']
1685
1686
1686 1687 if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
1687 1688 theta_x = numpy.array(kwargs['dirCosx'])
1688 1689 theta_y = numpy.array(kwargs['dirCosy'])
1689 1690 else:
1690 1691 elev = numpy.array(kwargs['elevation'])
1691 1692 azim = numpy.array(kwargs['azimuth'])
1692 1693 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1693 azimuth = kwargs['correctAzimuth']
1694 azimuth = kwargs['correctAzimuth']
1694 1695 if 'horizontalOnly' in kwargs:
1695 1696 horizontalOnly = kwargs['horizontalOnly']
1696 1697 else: horizontalOnly = False
1697 1698 if 'correctFactor' in kwargs:
1698 1699 correctFactor = kwargs['correctFactor']
1699 1700 else: correctFactor = 1
1700 1701 if 'channelList' in kwargs:
1701 1702 channelList = kwargs['channelList']
1702 1703 if len(channelList) == 2:
1703 1704 horizontalOnly = True
1704 1705 arrayChannel = numpy.array(channelList)
1705 1706 param = param[arrayChannel,:,:]
1706 1707 theta_x = theta_x[arrayChannel]
1707 1708 theta_y = theta_y[arrayChannel]
1708
1709 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1710 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1709
1710 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1711 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1711 1712 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
1712
1713
1713 1714 #Calculo de Componentes de la velocidad con DBS
1714 1715 winds = self.__calculateVelUVW(A,velRadial1)
1715
1716
1716 1717 return winds, heiRang1, SNR1
1717
1718
1718 1719 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
1719
1720
1720 1721 nPairs = len(pairs_ccf)
1721 1722 posx = numpy.asarray(posx)
1722 1723 posy = numpy.asarray(posy)
1723
1724
1724 1725 #Rotacion Inversa para alinear con el azimuth
1725 1726 if azimuth!= None:
1726 1727 azimuth = azimuth*math.pi/180
1727 1728 posx1 = posx*math.cos(azimuth) + posy*math.sin(azimuth)
1728 1729 posy1 = -posx*math.sin(azimuth) + posy*math.cos(azimuth)
1729 1730 else:
1730 1731 posx1 = posx
1731 1732 posy1 = posy
1732
1733
1733 1734 #Calculo de Distancias
1734 1735 distx = numpy.zeros(nPairs)
1735 1736 disty = numpy.zeros(nPairs)
1736 1737 dist = numpy.zeros(nPairs)
1737 1738 ang = numpy.zeros(nPairs)
1738
1739
1739 1740 for i in range(nPairs):
1740 1741 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
1741 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1742 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1742 1743 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
1743 1744 ang[i] = numpy.arctan2(disty[i],distx[i])
1744
1745
1745 1746 return distx, disty, dist, ang
1746 #Calculo de Matrices
1747 #Calculo de Matrices
1747 1748 # nPairs = len(pairs)
1748 1749 # ang1 = numpy.zeros((nPairs, 2, 1))
1749 1750 # dist1 = numpy.zeros((nPairs, 2, 1))
1750 #
1751 #
1751 1752 # for j in range(nPairs):
1752 1753 # dist1[j,0,0] = dist[pairs[j][0]]
1753 1754 # dist1[j,1,0] = dist[pairs[j][1]]
1754 1755 # ang1[j,0,0] = ang[pairs[j][0]]
1755 1756 # ang1[j,1,0] = ang[pairs[j][1]]
1756 #
1757 #
1757 1758 # return distx,disty, dist1,ang1
1758 1759
1759
1760
1760 1761 def __calculateVelVer(self, phase, lagTRange, _lambda):
1761 1762
1762 1763 Ts = lagTRange[1] - lagTRange[0]
1763 1764 velW = -_lambda*phase/(4*math.pi*Ts)
1764
1765
1765 1766 return velW
1766
1767
1767 1768 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
1768 1769 nPairs = tau1.shape[0]
1769 1770 nHeights = tau1.shape[1]
1770 vel = numpy.zeros((nPairs,3,nHeights))
1771 vel = numpy.zeros((nPairs,3,nHeights))
1771 1772 dist1 = numpy.reshape(dist, (dist.size,1))
1772
1773
1773 1774 angCos = numpy.cos(ang)
1774 1775 angSin = numpy.sin(ang)
1775
1776 vel0 = dist1*tau1/(2*tau2**2)
1776
1777 vel0 = dist1*tau1/(2*tau2**2)
1777 1778 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
1778 1779 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
1779
1780
1780 1781 ind = numpy.where(numpy.isinf(vel))
1781 1782 vel[ind] = numpy.nan
1782
1783
1783 1784 return vel
1784
1785
1785 1786 # def __getPairsAutoCorr(self, pairsList, nChannels):
1786 #
1787 #
1787 1788 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1788 #
1789 # for l in range(len(pairsList)):
1789 #
1790 # for l in range(len(pairsList)):
1790 1791 # firstChannel = pairsList[l][0]
1791 1792 # secondChannel = pairsList[l][1]
1792 #
1793 # #Obteniendo pares de Autocorrelacion
1793 #
1794 # #Obteniendo pares de Autocorrelacion
1794 1795 # if firstChannel == secondChannel:
1795 1796 # pairsAutoCorr[firstChannel] = int(l)
1796 #
1797 #
1797 1798 # pairsAutoCorr = pairsAutoCorr.astype(int)
1798 #
1799 #
1799 1800 # pairsCrossCorr = range(len(pairsList))
1800 1801 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1801 #
1802 #
1802 1803 # return pairsAutoCorr, pairsCrossCorr
1803
1804
1804 1805 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
1805 1806 def techniqueSA(self, kwargs):
1806
1807 """
1807
1808 """
1808 1809 Function that implements Spaced Antenna (SA) technique.
1809
1810
1810 1811 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1811 1812 Direction correction (if necessary), Ranges and SNR
1812
1813
1813 1814 Output: Winds estimation (Zonal, Meridional and Vertical)
1814
1815
1815 1816 Parameters affected: Winds
1816 1817 """
1817 1818 position_x = kwargs['positionX']
1818 1819 position_y = kwargs['positionY']
1819 1820 azimuth = kwargs['azimuth']
1820
1821
1821 1822 if 'correctFactor' in kwargs:
1822 1823 correctFactor = kwargs['correctFactor']
1823 1824 else:
1824 1825 correctFactor = 1
1825
1826
1826 1827 groupList = kwargs['groupList']
1827 1828 pairs_ccf = groupList[1]
1828 1829 tau = kwargs['tau']
1829 1830 _lambda = kwargs['_lambda']
1830
1831
1831 1832 #Cross Correlation pairs obtained
1832 1833 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
1833 1834 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
1834 1835 # pairsSelArray = numpy.array(pairsSelected)
1835 1836 # pairs = []
1836 #
1837 #
1837 1838 # #Wind estimation pairs obtained
1838 1839 # for i in range(pairsSelArray.shape[0]/2):
1839 1840 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
1840 1841 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
1841 1842 # pairs.append((ind1,ind2))
1842
1843
1843 1844 indtau = tau.shape[0]/2
1844 1845 tau1 = tau[:indtau,:]
1845 1846 tau2 = tau[indtau:-1,:]
1846 1847 # tau1 = tau1[pairs,:]
1847 1848 # tau2 = tau2[pairs,:]
1848 1849 phase1 = tau[-1,:]
1849
1850
1850 1851 #---------------------------------------------------------------------
1851 #Metodo Directo
1852 #Metodo Directo
1852 1853 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
1853 1854 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
1854 1855 winds = stats.nanmean(winds, axis=0)
1855 1856 #---------------------------------------------------------------------
1856 1857 #Metodo General
1857 1858 # distx, disty, dist = self.calculateDistance(position_x,position_y,pairsCrossCorr, pairsList, azimuth)
1858 1859 # #Calculo Coeficientes de Funcion de Correlacion
1859 1860 # F,G,A,B,H = self.calculateCoef(tau1,tau2,distx,disty,n)
1860 1861 # #Calculo de Velocidades
1861 1862 # winds = self.calculateVelUV(F,G,A,B,H)
1862 1863
1863 1864 #---------------------------------------------------------------------
1864 1865 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
1865 1866 winds = correctFactor*winds
1866 1867 return winds
1867
1868
1868 1869 def __checkTime(self, currentTime, paramInterval, outputInterval):
1869
1870
1870 1871 dataTime = currentTime + paramInterval
1871 1872 deltaTime = dataTime - self.__initime
1872
1873
1873 1874 if deltaTime >= outputInterval or deltaTime < 0:
1874 1875 self.__dataReady = True
1875 return
1876
1876 return
1877
1877 1878 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
1878 1879 '''
1879 1880 Function that implements winds estimation technique with detected meteors.
1880
1881
1881 1882 Input: Detected meteors, Minimum meteor quantity to wind estimation
1882
1883
1883 1884 Output: Winds estimation (Zonal and Meridional)
1884
1885
1885 1886 Parameters affected: Winds
1886 '''
1887 '''
1887 1888 #Settings
1888 1889 nInt = (heightMax - heightMin)/2
1889 1890 nInt = int(nInt)
1890 winds = numpy.zeros((2,nInt))*numpy.nan
1891
1891 winds = numpy.zeros((2,nInt))*numpy.nan
1892
1892 1893 #Filter errors
1893 1894 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
1894 1895 finalMeteor = arrayMeteor[error,:]
1895
1896
1896 1897 #Meteor Histogram
1897 1898 finalHeights = finalMeteor[:,2]
1898 1899 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
1899 1900 nMeteorsPerI = hist[0]
1900 1901 heightPerI = hist[1]
1901
1902
1902 1903 #Sort of meteors
1903 1904 indSort = finalHeights.argsort()
1904 1905 finalMeteor2 = finalMeteor[indSort,:]
1905
1906
1906 1907 # Calculating winds
1907 1908 ind1 = 0
1908 ind2 = 0
1909
1909 ind2 = 0
1910
1910 1911 for i in range(nInt):
1911 1912 nMet = nMeteorsPerI[i]
1912 1913 ind1 = ind2
1913 1914 ind2 = ind1 + nMet
1914
1915
1915 1916 meteorAux = finalMeteor2[ind1:ind2,:]
1916
1917
1917 1918 if meteorAux.shape[0] >= meteorThresh:
1918 1919 vel = meteorAux[:, 6]
1919 1920 zen = meteorAux[:, 4]*numpy.pi/180
1920 1921 azim = meteorAux[:, 3]*numpy.pi/180
1921
1922
1922 1923 n = numpy.cos(zen)
1923 1924 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
1924 1925 # l = m*numpy.tan(azim)
1925 1926 l = numpy.sin(zen)*numpy.sin(azim)
1926 1927 m = numpy.sin(zen)*numpy.cos(azim)
1927
1928
1928 1929 A = numpy.vstack((l, m)).transpose()
1929 1930 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
1930 1931 windsAux = numpy.dot(A1, vel)
1931
1932
1932 1933 winds[0,i] = windsAux[0]
1933 1934 winds[1,i] = windsAux[1]
1934
1935
1935 1936 return winds, heightPerI[:-1]
1936
1937
1937 1938 def techniqueNSM_SA(self, **kwargs):
1938 1939 metArray = kwargs['metArray']
1939 1940 heightList = kwargs['heightList']
1940 1941 timeList = kwargs['timeList']
1941
1942
1942 1943 rx_location = kwargs['rx_location']
1943 1944 groupList = kwargs['groupList']
1944 1945 azimuth = kwargs['azimuth']
1945 1946 dfactor = kwargs['dfactor']
1946 1947 k = kwargs['k']
1947
1948
1948 1949 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
1949 1950 d = dist*dfactor
1950 1951 #Phase calculation
1951 1952 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
1952
1953
1953 1954 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
1954
1955
1955 1956 velEst = numpy.zeros((heightList.size,2))*numpy.nan
1956 1957 azimuth1 = azimuth1*numpy.pi/180
1957
1958
1958 1959 for i in range(heightList.size):
1959 1960 h = heightList[i]
1960 1961 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
1961 1962 metHeight = metArray1[indH,:]
1962 1963 if metHeight.shape[0] >= 2:
1963 1964 velAux = numpy.asmatrix(metHeight[:,-2]).T #Radial Velocities
1964 1965 iazim = metHeight[:,1].astype(int)
1965 1966 azimAux = numpy.asmatrix(azimuth1[iazim]).T #Azimuths
1966 1967 A = numpy.hstack((numpy.cos(azimAux),numpy.sin(azimAux)))
1967 1968 A = numpy.asmatrix(A)
1968 1969 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
1969 1970 velHor = numpy.dot(A1,velAux)
1970
1971
1971 1972 velEst[i,:] = numpy.squeeze(velHor)
1972 1973 return velEst
1973
1974
1974 1975 def __getPhaseSlope(self, metArray, heightList, timeList):
1975 1976 meteorList = []
1976 1977 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
1977 1978 #Putting back together the meteor matrix
1978 1979 utctime = metArray[:,0]
1979 1980 uniqueTime = numpy.unique(utctime)
1980
1981
1981 1982 phaseDerThresh = 0.5
1982 1983 ippSeconds = timeList[1] - timeList[0]
1983 1984 sec = numpy.where(timeList>1)[0][0]
1984 1985 nPairs = metArray.shape[1] - 6
1985 1986 nHeights = len(heightList)
1986
1987
1987 1988 for t in uniqueTime:
1988 1989 metArray1 = metArray[utctime==t,:]
1989 1990 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
1990 1991 tmet = metArray1[:,1].astype(int)
1991 1992 hmet = metArray1[:,2].astype(int)
1992
1993
1993 1994 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
1994 1995 metPhase[:,:] = numpy.nan
1995 1996 metPhase[:,hmet,tmet] = metArray1[:,6:].T
1996
1997
1997 1998 #Delete short trails
1998 1999 metBool = ~numpy.isnan(metPhase[0,:,:])
1999 2000 heightVect = numpy.sum(metBool, axis = 1)
2000 2001 metBool[heightVect<sec,:] = False
2001 2002 metPhase[:,heightVect<sec,:] = numpy.nan
2002
2003
2003 2004 #Derivative
2004 2005 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
2005 2006 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
2006 2007 metPhase[phDerAux] = numpy.nan
2007
2008
2008 2009 #--------------------------METEOR DETECTION -----------------------------------------
2009 2010 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
2010
2011
2011 2012 for p in numpy.arange(nPairs):
2012 2013 phase = metPhase[p,:,:]
2013 2014 phDer = metDer[p,:,:]
2014
2015
2015 2016 for h in indMet:
2016 2017 height = heightList[h]
2017 2018 phase1 = phase[h,:] #82
2018 2019 phDer1 = phDer[h,:]
2019
2020
2020 2021 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
2021
2022
2022 2023 indValid = numpy.where(~numpy.isnan(phase1))[0]
2023 2024 initMet = indValid[0]
2024 2025 endMet = 0
2025
2026
2026 2027 for i in range(len(indValid)-1):
2027
2028
2028 2029 #Time difference
2029 2030 inow = indValid[i]
2030 2031 inext = indValid[i+1]
2031 2032 idiff = inext - inow
2032 2033 #Phase difference
2033 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2034
2034 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2035
2035 2036 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
2036 2037 sizeTrail = inow - initMet + 1
2037 2038 if sizeTrail>3*sec: #Too short meteors
2038 2039 x = numpy.arange(initMet,inow+1)*ippSeconds
2039 2040 y = phase1[initMet:inow+1]
2040 2041 ynnan = ~numpy.isnan(y)
2041 2042 x = x[ynnan]
2042 2043 y = y[ynnan]
2043 2044 slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
2044 2045 ylin = x*slope + intercept
2045 2046 rsq = r_value**2
2046 2047 if rsq > 0.5:
2047 2048 vel = slope#*height*1000/(k*d)
2048 2049 estAux = numpy.array([utctime,p,height, vel, rsq])
2049 2050 meteorList.append(estAux)
2050 initMet = inext
2051 initMet = inext
2051 2052 metArray2 = numpy.array(meteorList)
2052
2053
2053 2054 return metArray2
2054
2055
2055 2056 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
2056
2057
2057 2058 azimuth1 = numpy.zeros(len(pairslist))
2058 2059 dist = numpy.zeros(len(pairslist))
2059
2060
2060 2061 for i in range(len(rx_location)):
2061 2062 ch0 = pairslist[i][0]
2062 2063 ch1 = pairslist[i][1]
2063
2064
2064 2065 diffX = rx_location[ch0][0] - rx_location[ch1][0]
2065 2066 diffY = rx_location[ch0][1] - rx_location[ch1][1]
2066 2067 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
2067 2068 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
2068
2069
2069 2070 azimuth1 -= azimuth0
2070 2071 return azimuth1, dist
2071
2072
2072 2073 def techniqueNSM_DBS(self, **kwargs):
2073 2074 metArray = kwargs['metArray']
2074 2075 heightList = kwargs['heightList']
2075 timeList = kwargs['timeList']
2076 timeList = kwargs['timeList']
2076 2077 azimuth = kwargs['azimuth']
2077 2078 theta_x = numpy.array(kwargs['theta_x'])
2078 2079 theta_y = numpy.array(kwargs['theta_y'])
2079
2080
2080 2081 utctime = metArray[:,0]
2081 2082 cmet = metArray[:,1].astype(int)
2082 2083 hmet = metArray[:,3].astype(int)
2083 2084 SNRmet = metArray[:,4]
2084 2085 vmet = metArray[:,5]
2085 2086 spcmet = metArray[:,6]
2086
2087
2087 2088 nChan = numpy.max(cmet) + 1
2088 2089 nHeights = len(heightList)
2089 2090
2090 2091 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
2091 2092 hmet = heightList[hmet]
2092 2093 h1met = hmet*numpy.cos(zenith_arr[cmet]) #Corrected heights
2093 2094
2094 2095 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2095 2096
2096 2097 for i in range(nHeights - 1):
2097 2098 hmin = heightList[i]
2098 2099 hmax = heightList[i + 1]
2099 2100
2100 2101 thisH = (h1met>=hmin) & (h1met<hmax) & (cmet!=2) & (SNRmet>8) & (vmet<50) & (spcmet<10)
2101 2102 indthisH = numpy.where(thisH)
2102
2103
2103 2104 if numpy.size(indthisH) > 3:
2104
2105
2105 2106 vel_aux = vmet[thisH]
2106 2107 chan_aux = cmet[thisH]
2107 2108 cosu_aux = dir_cosu[chan_aux]
2108 2109 cosv_aux = dir_cosv[chan_aux]
2109 2110 cosw_aux = dir_cosw[chan_aux]
2110
2111 nch = numpy.size(numpy.unique(chan_aux))
2111
2112 nch = numpy.size(numpy.unique(chan_aux))
2112 2113 if nch > 1:
2113 2114 A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
2114 2115 velEst[i,:] = numpy.dot(A,vel_aux)
2115
2116
2116 2117 return velEst
2117 2118
2118 2119 def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
2119 2120
2120 2121 param = dataOut.data_param
2121 2122 if dataOut.abscissaList != None:
2122 2123 absc = dataOut.abscissaList[:-1]
2123 2124 # noise = dataOut.noise
2124 2125 heightList = dataOut.heightList
2125 2126 SNR = dataOut.data_SNR
2126
2127
2127 2128 if technique == 'DBS':
2128
2129 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2129
2130 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2130 2131 kwargs['heightList'] = heightList
2131 2132 kwargs['SNR'] = SNR
2132
2133
2133 2134 dataOut.data_output, dataOut.heightList, dataOut.data_SNR = self.techniqueDBS(kwargs) #DBS Function
2134 2135 dataOut.utctimeInit = dataOut.utctime
2135 2136 dataOut.outputInterval = dataOut.paramInterval
2136
2137
2137 2138 elif technique == 'SA':
2138
2139
2139 2140 #Parameters
2140 2141 # position_x = kwargs['positionX']
2141 2142 # position_y = kwargs['positionY']
2142 2143 # azimuth = kwargs['azimuth']
2143 #
2144 #
2144 2145 # if kwargs.has_key('crosspairsList'):
2145 2146 # pairs = kwargs['crosspairsList']
2146 2147 # else:
2147 # pairs = None
2148 #
2148 # pairs = None
2149 #
2149 2150 # if kwargs.has_key('correctFactor'):
2150 2151 # correctFactor = kwargs['correctFactor']
2151 2152 # else:
2152 2153 # correctFactor = 1
2153
2154
2154 2155 # tau = dataOut.data_param
2155 2156 # _lambda = dataOut.C/dataOut.frequency
2156 2157 # pairsList = dataOut.groupList
2157 2158 # nChannels = dataOut.nChannels
2158
2159
2159 2160 kwargs['groupList'] = dataOut.groupList
2160 2161 kwargs['tau'] = dataOut.data_param
2161 2162 kwargs['_lambda'] = dataOut.C/dataOut.frequency
2162 2163 # dataOut.data_output = self.techniqueSA(pairs, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, absc, correctFactor)
2163 2164 dataOut.data_output = self.techniqueSA(kwargs)
2164 2165 dataOut.utctimeInit = dataOut.utctime
2165 2166 dataOut.outputInterval = dataOut.timeInterval
2166
2167 elif technique == 'Meteors':
2167
2168 elif technique == 'Meteors':
2168 2169 dataOut.flagNoData = True
2169 2170 self.__dataReady = False
2170
2171
2171 2172 if 'nHours' in kwargs:
2172 2173 nHours = kwargs['nHours']
2173 else:
2174 else:
2174 2175 nHours = 1
2175
2176
2176 2177 if 'meteorsPerBin' in kwargs:
2177 2178 meteorThresh = kwargs['meteorsPerBin']
2178 2179 else:
2179 2180 meteorThresh = 6
2180
2181
2181 2182 if 'hmin' in kwargs:
2182 2183 hmin = kwargs['hmin']
2183 2184 else: hmin = 70
2184 2185 if 'hmax' in kwargs:
2185 2186 hmax = kwargs['hmax']
2186 2187 else: hmax = 110
2187
2188
2188 2189 dataOut.outputInterval = nHours*3600
2189
2190
2190 2191 if self.__isConfig == False:
2191 2192 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2192 2193 #Get Initial LTC time
2193 2194 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2194 2195 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2195 2196
2196 2197 self.__isConfig = True
2197
2198
2198 2199 if self.__buffer is None:
2199 2200 self.__buffer = dataOut.data_param
2200 2201 self.__firstdata = copy.copy(dataOut)
2201 2202
2202 2203 else:
2203 2204 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2204
2205
2205 2206 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2206
2207
2207 2208 if self.__dataReady:
2208 2209 dataOut.utctimeInit = self.__initime
2209
2210
2210 2211 self.__initime += dataOut.outputInterval #to erase time offset
2211
2212
2212 2213 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
2213 2214 dataOut.flagNoData = False
2214 2215 self.__buffer = None
2215
2216
2216 2217 elif technique == 'Meteors1':
2217 2218 dataOut.flagNoData = True
2218 2219 self.__dataReady = False
2219
2220
2220 2221 if 'nMins' in kwargs:
2221 2222 nMins = kwargs['nMins']
2222 2223 else: nMins = 20
2223 2224 if 'rx_location' in kwargs:
2224 2225 rx_location = kwargs['rx_location']
2225 2226 else: rx_location = [(0,1),(1,1),(1,0)]
2226 2227 if 'azimuth' in kwargs:
2227 2228 azimuth = kwargs['azimuth']
2228 2229 else: azimuth = 51.06
2229 2230 if 'dfactor' in kwargs:
2230 2231 dfactor = kwargs['dfactor']
2231 2232 if 'mode' in kwargs:
2232 2233 mode = kwargs['mode']
2233 2234 if 'theta_x' in kwargs:
2234 theta_x = kwargs['theta_x']
2235 theta_x = kwargs['theta_x']
2235 2236 if 'theta_y' in kwargs:
2236 2237 theta_y = kwargs['theta_y']
2237 2238 else: mode = 'SA'
2238 2239
2239 2240 #Borrar luego esto
2240 2241 if dataOut.groupList is None:
2241 2242 dataOut.groupList = [(0,1),(0,2),(1,2)]
2242 2243 groupList = dataOut.groupList
2243 2244 C = 3e8
2244 2245 freq = 50e6
2245 2246 lamb = C/freq
2246 2247 k = 2*numpy.pi/lamb
2247
2248
2248 2249 timeList = dataOut.abscissaList
2249 2250 heightList = dataOut.heightList
2250
2251
2251 2252 if self.__isConfig == False:
2252 2253 dataOut.outputInterval = nMins*60
2253 2254 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2254 2255 #Get Initial LTC time
2255 2256 initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2256 2257 minuteAux = initime.minute
2257 2258 minuteNew = int(numpy.floor(minuteAux/nMins)*nMins)
2258 2259 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2259 2260
2260 2261 self.__isConfig = True
2261
2262
2262 2263 if self.__buffer is None:
2263 2264 self.__buffer = dataOut.data_param
2264 2265 self.__firstdata = copy.copy(dataOut)
2265 2266
2266 2267 else:
2267 2268 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2268
2269
2269 2270 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2270
2271
2271 2272 if self.__dataReady:
2272 2273 dataOut.utctimeInit = self.__initime
2273 2274 self.__initime += dataOut.outputInterval #to erase time offset
2274
2275
2275 2276 metArray = self.__buffer
2276 2277 if mode == 'SA':
2277 2278 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
2278 2279 elif mode == 'DBS':
2279 2280 dataOut.data_output = self.techniqueNSM_DBS(metArray=metArray,heightList=heightList,timeList=timeList, azimuth=azimuth, theta_x=theta_x, theta_y=theta_y)
2280 2281 dataOut.data_output = dataOut.data_output.T
2281 2282 dataOut.flagNoData = False
2282 2283 self.__buffer = None
2283 2284
2284 2285 return
2285
2286
2286 2287 class EWDriftsEstimation(Operation):
2287
2288 def __init__(self):
2289 Operation.__init__(self)
2290
2288
2289 def __init__(self):
2290 Operation.__init__(self)
2291
2291 2292 def __correctValues(self, heiRang, phi, velRadial, SNR):
2292 2293 listPhi = phi.tolist()
2293 2294 maxid = listPhi.index(max(listPhi))
2294 2295 minid = listPhi.index(min(listPhi))
2295
2296 rango = list(range(len(phi)))
2296
2297 rango = list(range(len(phi)))
2297 2298 # rango = numpy.delete(rango,maxid)
2298
2299
2299 2300 heiRang1 = heiRang*math.cos(phi[maxid])
2300 2301 heiRangAux = heiRang*math.cos(phi[minid])
2301 2302 indOut = (heiRang1 < heiRangAux[0]).nonzero()
2302 2303 heiRang1 = numpy.delete(heiRang1,indOut)
2303
2304
2304 2305 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
2305 2306 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
2306
2307
2307 2308 for i in rango:
2308 2309 x = heiRang*math.cos(phi[i])
2309 2310 y1 = velRadial[i,:]
2310 2311 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
2311
2312
2312 2313 x1 = heiRang1
2313 2314 y11 = f1(x1)
2314
2315
2315 2316 y2 = SNR[i,:]
2316 2317 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
2317 2318 y21 = f2(x1)
2318
2319
2319 2320 velRadial1[i,:] = y11
2320 2321 SNR1[i,:] = y21
2321
2322
2322 2323 return heiRang1, velRadial1, SNR1
2323 2324
2324 2325 def run(self, dataOut, zenith, zenithCorrection):
2325 2326 heiRang = dataOut.heightList
2326 2327 velRadial = dataOut.data_param[:,3,:]
2327 2328 SNR = dataOut.data_SNR
2328
2329
2329 2330 zenith = numpy.array(zenith)
2330 zenith -= zenithCorrection
2331 zenith -= zenithCorrection
2331 2332 zenith *= numpy.pi/180
2332
2333
2333 2334 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
2334
2335
2335 2336 alp = zenith[0]
2336 2337 bet = zenith[1]
2337
2338
2338 2339 w_w = velRadial1[0,:]
2339 2340 w_e = velRadial1[1,:]
2340
2341 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2342 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2343
2341
2342 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2343 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2344
2344 2345 winds = numpy.vstack((u,w))
2345
2346
2346 2347 dataOut.heightList = heiRang1
2347 2348 dataOut.data_output = winds
2348 2349 dataOut.data_SNR = SNR1
2349
2350
2350 2351 dataOut.utctimeInit = dataOut.utctime
2351 2352 dataOut.outputInterval = dataOut.timeInterval
2352 2353 return
2353 2354
2354 2355 #--------------- Non Specular Meteor ----------------
2355 2356
2356 2357 class NonSpecularMeteorDetection(Operation):
2357 2358
2358 2359 def run(self, dataOut, mode, SNRthresh=8, phaseDerThresh=0.5, cohThresh=0.8, allData = False):
2359 2360 data_acf = dataOut.data_pre[0]
2360 2361 data_ccf = dataOut.data_pre[1]
2361 2362 pairsList = dataOut.groupList[1]
2362
2363
2363 2364 lamb = dataOut.C/dataOut.frequency
2364 2365 tSamp = dataOut.ippSeconds*dataOut.nCohInt
2365 2366 paramInterval = dataOut.paramInterval
2366
2367
2367 2368 nChannels = data_acf.shape[0]
2368 2369 nLags = data_acf.shape[1]
2369 2370 nProfiles = data_acf.shape[2]
2370 2371 nHeights = dataOut.nHeights
2371 2372 nCohInt = dataOut.nCohInt
2372 2373 sec = numpy.round(nProfiles/dataOut.paramInterval)
2373 2374 heightList = dataOut.heightList
2374 2375 ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
2375 2376 utctime = dataOut.utctime
2376
2377
2377 2378 dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
2378 2379
2379 2380 #------------------------ SNR --------------------------------------
2380 2381 power = data_acf[:,0,:,:].real
2381 2382 noise = numpy.zeros(nChannels)
2382 2383 SNR = numpy.zeros(power.shape)
2383 2384 for i in range(nChannels):
2384 2385 noise[i] = hildebrand_sekhon(power[i,:], nCohInt)
2385 2386 SNR[i] = (power[i]-noise[i])/noise[i]
2386 2387 SNRm = numpy.nanmean(SNR, axis = 0)
2387 2388 SNRdB = 10*numpy.log10(SNR)
2388
2389
2389 2390 if mode == 'SA':
2390 2391 dataOut.groupList = dataOut.groupList[1]
2391 2392 nPairs = data_ccf.shape[0]
2392 2393 #---------------------- Coherence and Phase --------------------------
2393 2394 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
2394 2395 # phase1 = numpy.copy(phase)
2395 2396 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
2396
2397
2397 2398 for p in range(nPairs):
2398 2399 ch0 = pairsList[p][0]
2399 2400 ch1 = pairsList[p][1]
2400 2401 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
2401 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2402 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2403 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2404 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2402 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2403 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2404 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2405 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2405 2406 coh = numpy.nanmax(coh1, axis = 0)
2406 2407 # struc = numpy.ones((5,1))
2407 2408 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
2408 2409 #---------------------- Radial Velocity ----------------------------
2409 2410 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
2410 2411 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
2411
2412
2412 2413 if allData:
2413 2414 boolMetFin = ~numpy.isnan(SNRm)
2414 2415 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2415 2416 else:
2416 2417 #------------------------ Meteor mask ---------------------------------
2417 2418 # #SNR mask
2418 2419 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
2419 #
2420 #
2420 2421 # #Erase small objects
2421 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2422 #
2422 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2423 #
2423 2424 # auxEEJ = numpy.sum(boolMet1,axis=0)
2424 2425 # indOver = auxEEJ>nProfiles*0.8 #Use this later
2425 2426 # indEEJ = numpy.where(indOver)[0]
2426 2427 # indNEEJ = numpy.where(~indOver)[0]
2427 #
2428 #
2428 2429 # boolMetFin = boolMet1
2429 #
2430 #
2430 2431 # if indEEJ.size > 0:
2431 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2432 #
2432 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2433 #
2433 2434 # boolMet2 = coh > cohThresh
2434 2435 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
2435 #
2436 #
2436 2437 # #Final Meteor mask
2437 2438 # boolMetFin = boolMet1|boolMet2
2438
2439
2439 2440 #Coherence mask
2440 2441 boolMet1 = coh > 0.75
2441 2442 struc = numpy.ones((30,1))
2442 2443 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
2443
2444
2444 2445 #Derivative mask
2445 2446 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2446 2447 boolMet2 = derPhase < 0.2
2447 2448 # boolMet2 = ndimage.morphology.binary_opening(boolMet2)
2448 2449 # boolMet2 = ndimage.morphology.binary_closing(boolMet2, structure = numpy.ones((10,1)))
2449 2450 boolMet2 = ndimage.median_filter(boolMet2,size=5)
2450 2451 boolMet2 = numpy.vstack((boolMet2,numpy.full((1,nHeights), True, dtype=bool)))
2451 2452 # #Final mask
2452 2453 # boolMetFin = boolMet2
2453 2454 boolMetFin = boolMet1&boolMet2
2454 2455 # boolMetFin = ndimage.morphology.binary_dilation(boolMetFin)
2455 2456 #Creating data_param
2456 2457 coordMet = numpy.where(boolMetFin)
2457 2458
2458 2459 tmet = coordMet[0]
2459 2460 hmet = coordMet[1]
2460
2461
2461 2462 data_param = numpy.zeros((tmet.size, 6 + nPairs))
2462 2463 data_param[:,0] = utctime
2463 2464 data_param[:,1] = tmet
2464 2465 data_param[:,2] = hmet
2465 2466 data_param[:,3] = SNRm[tmet,hmet]
2466 2467 data_param[:,4] = velRad[tmet,hmet]
2467 2468 data_param[:,5] = coh[tmet,hmet]
2468 2469 data_param[:,6:] = phase[:,tmet,hmet].T
2469
2470
2470 2471 elif mode == 'DBS':
2471 2472 dataOut.groupList = numpy.arange(nChannels)
2472 2473
2473 2474 #Radial Velocities
2474 2475 phase = numpy.angle(data_acf[:,1,:,:])
2475 2476 # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
2476 2477 velRad = phase*lamb/(4*numpy.pi*tSamp)
2477
2478
2478 2479 #Spectral width
2479 2480 # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
2480 2481 # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
2481 2482 acf1 = data_acf[:,1,:,:]
2482 2483 acf2 = data_acf[:,2,:,:]
2483 2484
2484 2485 spcWidth = (lamb/(2*numpy.sqrt(6)*numpy.pi*tSamp))*numpy.sqrt(numpy.log(acf1/acf2))
2485 2486 # velRad = ndimage.median_filter(velRad, size = (1,5,1))
2486 2487 if allData:
2487 2488 boolMetFin = ~numpy.isnan(SNRdB)
2488 2489 else:
2489 2490 #SNR
2490 2491 boolMet1 = (SNRdB>SNRthresh) #SNR mask
2491 2492 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
2492
2493
2493 2494 #Radial velocity
2494 2495 boolMet2 = numpy.abs(velRad) < 20
2495 2496 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
2496
2497
2497 2498 #Spectral Width
2498 2499 boolMet3 = spcWidth < 30
2499 2500 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
2500 2501 # boolMetFin = self.__erase_small(boolMet1, 10,5)
2501 2502 boolMetFin = boolMet1&boolMet2&boolMet3
2502
2503
2503 2504 #Creating data_param
2504 2505 coordMet = numpy.where(boolMetFin)
2505 2506
2506 2507 cmet = coordMet[0]
2507 2508 tmet = coordMet[1]
2508 2509 hmet = coordMet[2]
2509
2510
2510 2511 data_param = numpy.zeros((tmet.size, 7))
2511 2512 data_param[:,0] = utctime
2512 2513 data_param[:,1] = cmet
2513 2514 data_param[:,2] = tmet
2514 2515 data_param[:,3] = hmet
2515 2516 data_param[:,4] = SNR[cmet,tmet,hmet].T
2516 2517 data_param[:,5] = velRad[cmet,tmet,hmet].T
2517 2518 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
2518
2519
2519 2520 # self.dataOut.data_param = data_int
2520 2521 if len(data_param) == 0:
2521 2522 dataOut.flagNoData = True
2522 2523 else:
2523 2524 dataOut.data_param = data_param
2524 2525
2525 2526 def __erase_small(self, binArray, threshX, threshY):
2526 2527 labarray, numfeat = ndimage.measurements.label(binArray)
2527 2528 binArray1 = numpy.copy(binArray)
2528
2529
2529 2530 for i in range(1,numfeat + 1):
2530 2531 auxBin = (labarray==i)
2531 2532 auxSize = auxBin.sum()
2532
2533
2533 2534 x,y = numpy.where(auxBin)
2534 2535 widthX = x.max() - x.min()
2535 2536 widthY = y.max() - y.min()
2536
2537
2537 2538 #width X: 3 seg -> 12.5*3
2538 #width Y:
2539
2539 #width Y:
2540
2540 2541 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
2541 2542 binArray1[auxBin] = False
2542
2543
2543 2544 return binArray1
2544 2545
2545 2546 #--------------- Specular Meteor ----------------
2546 2547
2547 2548 class SMDetection(Operation):
2548 2549 '''
2549 2550 Function DetectMeteors()
2550 2551 Project developed with paper:
2551 2552 HOLDSWORTH ET AL. 2004
2552
2553
2553 2554 Input:
2554 2555 self.dataOut.data_pre
2555
2556
2556 2557 centerReceiverIndex: From the channels, which is the center receiver
2557
2558
2558 2559 hei_ref: Height reference for the Beacon signal extraction
2559 2560 tauindex:
2560 2561 predefinedPhaseShifts: Predefined phase offset for the voltge signals
2561
2562
2562 2563 cohDetection: Whether to user Coherent detection or not
2563 2564 cohDet_timeStep: Coherent Detection calculation time step
2564 2565 cohDet_thresh: Coherent Detection phase threshold to correct phases
2565
2566
2566 2567 noise_timeStep: Noise calculation time step
2567 2568 noise_multiple: Noise multiple to define signal threshold
2568
2569
2569 2570 multDet_timeLimit: Multiple Detection Removal time limit in seconds
2570 2571 multDet_rangeLimit: Multiple Detection Removal range limit in km
2571
2572
2572 2573 phaseThresh: Maximum phase difference between receiver to be consider a meteor
2573 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2574
2574 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2575
2575 2576 hmin: Minimum Height of the meteor to use it in the further wind estimations
2576 2577 hmax: Maximum Height of the meteor to use it in the further wind estimations
2577 2578 azimuth: Azimuth angle correction
2578
2579
2579 2580 Affected:
2580 2581 self.dataOut.data_param
2581
2582
2582 2583 Rejection Criteria (Errors):
2583 2584 0: No error; analysis OK
2584 2585 1: SNR < SNR threshold
2585 2586 2: angle of arrival (AOA) ambiguously determined
2586 2587 3: AOA estimate not feasible
2587 2588 4: Large difference in AOAs obtained from different antenna baselines
2588 2589 5: echo at start or end of time series
2589 2590 6: echo less than 5 examples long; too short for analysis
2590 2591 7: echo rise exceeds 0.3s
2591 2592 8: echo decay time less than twice rise time
2592 2593 9: large power level before echo
2593 2594 10: large power level after echo
2594 2595 11: poor fit to amplitude for estimation of decay time
2595 2596 12: poor fit to CCF phase variation for estimation of radial drift velocity
2596 2597 13: height unresolvable echo: not valid height within 70 to 110 km
2597 2598 14: height ambiguous echo: more then one possible height within 70 to 110 km
2598 2599 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
2599 2600 16: oscilatory echo, indicating event most likely not an underdense echo
2600
2601
2601 2602 17: phase difference in meteor Reestimation
2602
2603
2603 2604 Data Storage:
2604 2605 Meteors for Wind Estimation (8):
2605 2606 Utc Time | Range Height
2606 2607 Azimuth Zenith errorCosDir
2607 2608 VelRad errorVelRad
2608 2609 Phase0 Phase1 Phase2 Phase3
2609 2610 TypeError
2610
2611 '''
2612
2611
2612 '''
2613
2613 2614 def run(self, dataOut, hei_ref = None, tauindex = 0,
2614 2615 phaseOffsets = None,
2615 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2616 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2616 2617 noise_timeStep = 4, noise_multiple = 4,
2617 2618 multDet_timeLimit = 1, multDet_rangeLimit = 3,
2618 2619 phaseThresh = 20, SNRThresh = 5,
2619 2620 hmin = 50, hmax=150, azimuth = 0,
2620 2621 channelPositions = None) :
2621
2622
2622
2623
2623 2624 #Getting Pairslist
2624 2625 if channelPositions is None:
2625 2626 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2626 2627 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2627 2628 meteorOps = SMOperations()
2628 2629 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2629 2630 heiRang = dataOut.getHeiRange()
2630 2631 #Get Beacon signal - No Beacon signal anymore
2631 2632 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
2632 #
2633 #
2633 2634 # if hei_ref != None:
2634 2635 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
2635 #
2636
2637
2636 #
2637
2638
2638 2639 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
2639 2640 # see if the user put in pre defined phase shifts
2640 2641 voltsPShift = dataOut.data_pre.copy()
2641
2642
2642 2643 # if predefinedPhaseShifts != None:
2643 2644 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
2644 #
2645 #
2645 2646 # # elif beaconPhaseShifts:
2646 2647 # # #get hardware phase shifts using beacon signal
2647 2648 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
2648 2649 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
2649 #
2650 #
2650 2651 # else:
2651 # hardwarePhaseShifts = numpy.zeros(5)
2652 #
2652 # hardwarePhaseShifts = numpy.zeros(5)
2653 #
2653 2654 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
2654 2655 # for i in range(self.dataOut.data_pre.shape[0]):
2655 2656 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
2656 2657
2657 2658 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
2658
2659
2659 2660 #Remove DC
2660 2661 voltsDC = numpy.mean(voltsPShift,1)
2661 2662 voltsDC = numpy.mean(voltsDC,1)
2662 2663 for i in range(voltsDC.shape[0]):
2663 2664 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
2664
2665 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2665
2666 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2666 2667 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
2667
2668
2668 2669 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
2669 2670 #Coherent Detection
2670 2671 if cohDetection:
2671 2672 #use coherent detection to get the net power
2672 2673 cohDet_thresh = cohDet_thresh*numpy.pi/180
2673 2674 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
2674
2675
2675 2676 #Non-coherent detection!
2676 2677 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
2677 2678 #********** END OF COH/NON-COH POWER CALCULATION**********************
2678
2679
2679 2680 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
2680 2681 #Get noise
2681 2682 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
2682 2683 # noise = self.getNoise1(powerNet, noise_timeStep, self.dataOut.timeInterval)
2683 2684 #Get signal threshold
2684 2685 signalThresh = noise_multiple*noise
2685 2686 #Meteor echoes detection
2686 2687 listMeteors = self.__findMeteors(powerNet, signalThresh)
2687 2688 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
2688
2689
2689 2690 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
2690 2691 #Parameters
2691 2692 heiRange = dataOut.getHeiRange()
2692 2693 rangeInterval = heiRange[1] - heiRange[0]
2693 2694 rangeLimit = multDet_rangeLimit/rangeInterval
2694 2695 timeLimit = multDet_timeLimit/dataOut.timeInterval
2695 2696 #Multiple detection removals
2696 2697 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
2697 2698 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
2698
2699
2699 2700 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
2700 2701 #Parameters
2701 2702 phaseThresh = phaseThresh*numpy.pi/180
2702 2703 thresh = [phaseThresh, noise_multiple, SNRThresh]
2703 2704 #Meteor reestimation (Errors N 1, 6, 12, 17)
2704 2705 listMeteors2, listMeteorsPower, listMeteorsVolts = self.__meteorReestimation(listMeteors1, voltsPShift, pairslist0, thresh, noise, dataOut.timeInterval, dataOut.frequency)
2705 2706 # listMeteors2, listMeteorsPower, listMeteorsVolts = self.meteorReestimation3(listMeteors2, listMeteorsPower, listMeteorsVolts, voltsPShift, pairslist, thresh, noise)
2706 2707 #Estimation of decay times (Errors N 7, 8, 11)
2707 2708 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
2708 2709 #******************* END OF METEOR REESTIMATION *******************
2709
2710
2710 2711 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
2711 2712 #Calculating Radial Velocity (Error N 15)
2712 2713 radialStdThresh = 10
2713 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2714 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2714 2715
2715 2716 if len(listMeteors4) > 0:
2716 2717 #Setting New Array
2717 2718 date = dataOut.utctime
2718 2719 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
2719
2720
2720 2721 #Correcting phase offset
2721 2722 if phaseOffsets != None:
2722 2723 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2723 2724 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2724
2725
2725 2726 #Second Pairslist
2726 2727 pairsList = []
2727 2728 pairx = (0,1)
2728 2729 pairy = (2,3)
2729 2730 pairsList.append(pairx)
2730 2731 pairsList.append(pairy)
2731
2732
2732 2733 jph = numpy.array([0,0,0,0])
2733 2734 h = (hmin,hmax)
2734 2735 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2735
2736
2736 2737 # #Calculate AOA (Error N 3, 4)
2737 2738 # #JONES ET AL. 1998
2738 2739 # error = arrayParameters[:,-1]
2739 2740 # AOAthresh = numpy.pi/8
2740 2741 # phases = -arrayParameters[:,9:13]
2741 2742 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
2742 #
2743 #
2743 2744 # #Calculate Heights (Error N 13 and 14)
2744 2745 # error = arrayParameters[:,-1]
2745 2746 # Ranges = arrayParameters[:,2]
2746 2747 # zenith = arrayParameters[:,5]
2747 2748 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
2748 2749 # error = arrayParameters[:,-1]
2749 2750 #********************* END OF PARAMETERS CALCULATION **************************
2750
2751 #***************************+ PASS DATA TO NEXT STEP **********************
2751
2752 #***************************+ PASS DATA TO NEXT STEP **********************
2752 2753 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
2753 2754 dataOut.data_param = arrayParameters
2754
2755
2755 2756 if arrayParameters is None:
2756 2757 dataOut.flagNoData = True
2757 2758 else:
2758 2759 dataOut.flagNoData = True
2759
2760
2760 2761 return
2761
2762
2762 2763 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
2763
2764
2764 2765 minIndex = min(newheis[0])
2765 2766 maxIndex = max(newheis[0])
2766
2767
2767 2768 voltage = voltage0[:,:,minIndex:maxIndex+1]
2768 2769 nLength = voltage.shape[1]/n
2769 2770 nMin = 0
2770 2771 nMax = 0
2771 2772 phaseOffset = numpy.zeros((len(pairslist),n))
2772
2773
2773 2774 for i in range(n):
2774 2775 nMax += nLength
2775 2776 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
2776 2777 phaseCCF = numpy.mean(phaseCCF, axis = 2)
2777 phaseOffset[:,i] = phaseCCF.transpose()
2778 phaseOffset[:,i] = phaseCCF.transpose()
2778 2779 nMin = nMax
2779 2780 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
2780
2781
2781 2782 #Remove Outliers
2782 2783 factor = 2
2783 2784 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
2784 2785 dw = numpy.std(wt,axis = 1)
2785 2786 dw = dw.reshape((dw.size,1))
2786 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2787 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2787 2788 phaseOffset[ind] = numpy.nan
2788 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2789
2789 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2790
2790 2791 return phaseOffset
2791
2792
2792 2793 def __shiftPhase(self, data, phaseShift):
2793 2794 #this will shift the phase of a complex number
2794 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2795 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2795 2796 return dataShifted
2796
2797
2797 2798 def __estimatePhaseDifference(self, array, pairslist):
2798 2799 nChannel = array.shape[0]
2799 2800 nHeights = array.shape[2]
2800 2801 numPairs = len(pairslist)
2801 2802 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
2802 2803 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
2803
2804
2804 2805 #Correct phases
2805 2806 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
2806 2807 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2807
2808 if indDer[0].shape[0] > 0:
2808
2809 if indDer[0].shape[0] > 0:
2809 2810 for i in range(indDer[0].shape[0]):
2810 2811 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
2811 2812 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
2812
2813
2813 2814 # for j in range(numSides):
2814 2815 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
2815 2816 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
2816 #
2817 #
2817 2818 #Linear
2818 2819 phaseInt = numpy.zeros((numPairs,1))
2819 2820 angAllCCF = phaseCCF[:,[0,1,3,4],0]
2820 2821 for j in range(numPairs):
2821 2822 fit = stats.linregress([-2,-1,1,2],angAllCCF[j,:])
2822 2823 phaseInt[j] = fit[1]
2823 2824 #Phase Differences
2824 2825 phaseDiff = phaseInt - phaseCCF[:,2,:]
2825 2826 phaseArrival = phaseInt.reshape(phaseInt.size)
2826
2827
2827 2828 #Dealias
2828 2829 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
2829 2830 # indAlias = numpy.where(phaseArrival > numpy.pi)
2830 2831 # phaseArrival[indAlias] -= 2*numpy.pi
2831 2832 # indAlias = numpy.where(phaseArrival < -numpy.pi)
2832 2833 # phaseArrival[indAlias] += 2*numpy.pi
2833
2834
2834 2835 return phaseDiff, phaseArrival
2835
2836
2836 2837 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
2837 2838 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
2838 2839 #find the phase shifts of each channel over 1 second intervals
2839 2840 #only look at ranges below the beacon signal
2840 2841 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2841 2842 numBlocks = int(volts.shape[1]/numProfPerBlock)
2842 2843 numHeights = volts.shape[2]
2843 2844 nChannel = volts.shape[0]
2844 2845 voltsCohDet = volts.copy()
2845
2846
2846 2847 pairsarray = numpy.array(pairslist)
2847 2848 indSides = pairsarray[:,1]
2848 2849 # indSides = numpy.array(range(nChannel))
2849 2850 # indSides = numpy.delete(indSides, indCenter)
2850 #
2851 #
2851 2852 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
2852 2853 listBlocks = numpy.array_split(volts, numBlocks, 1)
2853
2854
2854 2855 startInd = 0
2855 2856 endInd = 0
2856
2857
2857 2858 for i in range(numBlocks):
2858 2859 startInd = endInd
2859 endInd = endInd + listBlocks[i].shape[1]
2860
2860 endInd = endInd + listBlocks[i].shape[1]
2861
2861 2862 arrayBlock = listBlocks[i]
2862 2863 # arrayBlockCenter = listCenter[i]
2863
2864
2864 2865 #Estimate the Phase Difference
2865 2866 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
2866 2867 #Phase Difference RMS
2867 2868 arrayPhaseRMS = numpy.abs(phaseDiff)
2868 2869 phaseRMSaux = numpy.sum(arrayPhaseRMS < thresh,0)
2869 2870 indPhase = numpy.where(phaseRMSaux==4)
2870 2871 #Shifting
2871 2872 if indPhase[0].shape[0] > 0:
2872 2873 for j in range(indSides.size):
2873 2874 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
2874 2875 voltsCohDet[:,startInd:endInd,:] = arrayBlock
2875
2876
2876 2877 return voltsCohDet
2877
2878
2878 2879 def __calculateCCF(self, volts, pairslist ,laglist):
2879
2880
2880 2881 nHeights = volts.shape[2]
2881 nPoints = volts.shape[1]
2882 nPoints = volts.shape[1]
2882 2883 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
2883
2884
2884 2885 for i in range(len(pairslist)):
2885 2886 volts1 = volts[pairslist[i][0]]
2886 volts2 = volts[pairslist[i][1]]
2887
2887 volts2 = volts[pairslist[i][1]]
2888
2888 2889 for t in range(len(laglist)):
2889 idxT = laglist[t]
2890 idxT = laglist[t]
2890 2891 if idxT >= 0:
2891 2892 vStacked = numpy.vstack((volts2[idxT:,:],
2892 2893 numpy.zeros((idxT, nHeights),dtype='complex')))
2893 2894 else:
2894 2895 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
2895 2896 volts2[:(nPoints + idxT),:]))
2896 2897 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
2897
2898
2898 2899 vStacked = None
2899 2900 return voltsCCF
2900
2901
2901 2902 def __getNoise(self, power, timeSegment, timeInterval):
2902 2903 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2903 2904 numBlocks = int(power.shape[0]/numProfPerBlock)
2904 2905 numHeights = power.shape[1]
2905 2906
2906 2907 listPower = numpy.array_split(power, numBlocks, 0)
2907 2908 noise = numpy.zeros((power.shape[0], power.shape[1]))
2908 2909 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
2909
2910
2910 2911 startInd = 0
2911 2912 endInd = 0
2912
2913
2913 2914 for i in range(numBlocks): #split por canal
2914 2915 startInd = endInd
2915 endInd = endInd + listPower[i].shape[0]
2916
2916 endInd = endInd + listPower[i].shape[0]
2917
2917 2918 arrayBlock = listPower[i]
2918 2919 noiseAux = numpy.mean(arrayBlock, 0)
2919 2920 # noiseAux = numpy.median(noiseAux)
2920 2921 # noiseAux = numpy.mean(arrayBlock)
2921 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
2922
2922 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
2923
2923 2924 noiseAux1 = numpy.mean(arrayBlock)
2924 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
2925
2925 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
2926
2926 2927 return noise, noise1
2927
2928
2928 2929 def __findMeteors(self, power, thresh):
2929 2930 nProf = power.shape[0]
2930 2931 nHeights = power.shape[1]
2931 2932 listMeteors = []
2932
2933
2933 2934 for i in range(nHeights):
2934 2935 powerAux = power[:,i]
2935 2936 threshAux = thresh[:,i]
2936
2937
2937 2938 indUPthresh = numpy.where(powerAux > threshAux)[0]
2938 2939 indDNthresh = numpy.where(powerAux <= threshAux)[0]
2939
2940
2940 2941 j = 0
2941
2942
2942 2943 while (j < indUPthresh.size - 2):
2943 2944 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
2944 2945 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
2945 2946 indDNthresh = indDNthresh[indDNAux]
2946
2947
2947 2948 if (indDNthresh.size > 0):
2948 2949 indEnd = indDNthresh[0] - 1
2949 2950 indInit = indUPthresh[j]
2950
2951
2951 2952 meteor = powerAux[indInit:indEnd + 1]
2952 2953 indPeak = meteor.argmax() + indInit
2953 2954 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
2954
2955
2955 2956 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
2956 2957 j = numpy.where(indUPthresh == indEnd)[0] + 1
2957 2958 else: j+=1
2958 2959 else: j+=1
2959
2960
2960 2961 return listMeteors
2961
2962
2962 2963 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
2963
2964 arrayMeteors = numpy.asarray(listMeteors)
2964
2965 arrayMeteors = numpy.asarray(listMeteors)
2965 2966 listMeteors1 = []
2966
2967
2967 2968 while arrayMeteors.shape[0] > 0:
2968 2969 FLAs = arrayMeteors[:,4]
2969 2970 maxFLA = FLAs.argmax()
2970 2971 listMeteors1.append(arrayMeteors[maxFLA,:])
2971
2972
2972 2973 MeteorInitTime = arrayMeteors[maxFLA,1]
2973 2974 MeteorEndTime = arrayMeteors[maxFLA,3]
2974 2975 MeteorHeight = arrayMeteors[maxFLA,0]
2975
2976
2976 2977 #Check neighborhood
2977 2978 maxHeightIndex = MeteorHeight + rangeLimit
2978 2979 minHeightIndex = MeteorHeight - rangeLimit
2979 2980 minTimeIndex = MeteorInitTime - timeLimit
2980 2981 maxTimeIndex = MeteorEndTime + timeLimit
2981
2982
2982 2983 #Check Heights
2983 2984 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
2984 2985 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
2985 2986 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
2986
2987
2987 2988 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
2988
2989
2989 2990 return listMeteors1
2990
2991
2991 2992 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
2992 2993 numHeights = volts.shape[2]
2993 2994 nChannel = volts.shape[0]
2994
2995
2995 2996 thresholdPhase = thresh[0]
2996 2997 thresholdNoise = thresh[1]
2997 2998 thresholdDB = float(thresh[2])
2998
2999
2999 3000 thresholdDB1 = 10**(thresholdDB/10)
3000 3001 pairsarray = numpy.array(pairslist)
3001 3002 indSides = pairsarray[:,1]
3002
3003
3003 3004 pairslist1 = list(pairslist)
3004 3005 pairslist1.append((0,1))
3005 3006 pairslist1.append((3,4))
3006 3007
3007 3008 listMeteors1 = []
3008 3009 listPowerSeries = []
3009 3010 listVoltageSeries = []
3010 3011 #volts has the war data
3011
3012
3012 3013 if frequency == 30e6:
3013 3014 timeLag = 45*10**-3
3014 3015 else:
3015 3016 timeLag = 15*10**-3
3016 3017 lag = numpy.ceil(timeLag/timeInterval)
3017
3018
3018 3019 for i in range(len(listMeteors)):
3019
3020
3020 3021 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
3021 3022 meteorAux = numpy.zeros(16)
3022
3023
3023 3024 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
3024 3025 mHeight = listMeteors[i][0]
3025 3026 mStart = listMeteors[i][1]
3026 3027 mPeak = listMeteors[i][2]
3027 3028 mEnd = listMeteors[i][3]
3028
3029
3029 3030 #get the volt data between the start and end times of the meteor
3030 3031 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
3031 3032 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3032
3033
3033 3034 #3.6. Phase Difference estimation
3034 3035 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
3035
3036
3036 3037 #3.7. Phase difference removal & meteor start, peak and end times reestimated
3037 3038 #meteorVolts0.- all Channels, all Profiles
3038 3039 meteorVolts0 = volts[:,:,mHeight]
3039 3040 meteorThresh = noise[:,mHeight]*thresholdNoise
3040 3041 meteorNoise = noise[:,mHeight]
3041 3042 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
3042 3043 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
3043
3044
3044 3045 #Times reestimation
3045 3046 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
3046 3047 if mStart1.size > 0:
3047 3048 mStart1 = mStart1[-1] + 1
3048
3049 else:
3049
3050 else:
3050 3051 mStart1 = mPeak
3051
3052
3052 3053 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
3053 3054 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
3054 3055 if mEndDecayTime1.size == 0:
3055 3056 mEndDecayTime1 = powerNet0.size
3056 3057 else:
3057 3058 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
3058 3059 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
3059
3060
3060 3061 #meteorVolts1.- all Channels, from start to end
3061 3062 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
3062 3063 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
3063 3064 if meteorVolts2.shape[1] == 0:
3064 3065 meteorVolts2 = meteorVolts0[:,mPeak:mEnd1 + 1]
3065 3066 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
3066 3067 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
3067 3068 ##################### END PARAMETERS REESTIMATION #########################
3068
3069
3069 3070 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
3070 3071 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
3071 if meteorVolts2.shape[1] > 0:
3072 if meteorVolts2.shape[1] > 0:
3072 3073 #Phase Difference re-estimation
3073 3074 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
3074 3075 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
3075 3076 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
3076 3077 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
3077 3078 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
3078
3079
3079 3080 #Phase Difference RMS
3080 3081 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
3081 3082 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
3082 3083 #Data from Meteor
3083 3084 mPeak1 = powerNet1.argmax() + mStart1
3084 3085 mPeakPower1 = powerNet1.max()
3085 3086 noiseAux = sum(noise[mStart1:mEnd1 + 1,mHeight])
3086 3087 mSNR1 = (sum(powerNet1)-noiseAux)/noiseAux
3087 3088 Meteor1 = numpy.array([mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1])
3088 3089 Meteor1 = numpy.hstack((Meteor1,phaseDiffint))
3089 3090 PowerSeries = powerNet0[mStart1:mEndDecayTime1 + 1]
3090 3091 #Vectorize
3091 3092 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
3092 3093 meteorAux[7:11] = phaseDiffint[0:4]
3093
3094
3094 3095 #Rejection Criterions
3095 3096 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
3096 3097 meteorAux[-1] = 17
3097 3098 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
3098 3099 meteorAux[-1] = 1
3099
3100
3101 else:
3100
3101
3102 else:
3102 3103 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
3103 3104 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
3104 3105 PowerSeries = 0
3105
3106
3106 3107 listMeteors1.append(meteorAux)
3107 3108 listPowerSeries.append(PowerSeries)
3108 3109 listVoltageSeries.append(meteorVolts1)
3109
3110 return listMeteors1, listPowerSeries, listVoltageSeries
3111
3110
3111 return listMeteors1, listPowerSeries, listVoltageSeries
3112
3112 3113 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
3113
3114
3114 3115 threshError = 10
3115 3116 #Depending if it is 30 or 50 MHz
3116 3117 if frequency == 30e6:
3117 3118 timeLag = 45*10**-3
3118 3119 else:
3119 3120 timeLag = 15*10**-3
3120 3121 lag = numpy.ceil(timeLag/timeInterval)
3121
3122
3122 3123 listMeteors1 = []
3123
3124
3124 3125 for i in range(len(listMeteors)):
3125 3126 meteorPower = listPower[i]
3126 3127 meteorAux = listMeteors[i]
3127
3128
3128 3129 if meteorAux[-1] == 0:
3129 3130
3130 try:
3131 try:
3131 3132 indmax = meteorPower.argmax()
3132 3133 indlag = indmax + lag
3133
3134
3134 3135 y = meteorPower[indlag:]
3135 3136 x = numpy.arange(0, y.size)*timeLag
3136
3137
3137 3138 #first guess
3138 3139 a = y[0]
3139 3140 tau = timeLag
3140 3141 #exponential fit
3141 3142 popt, pcov = optimize.curve_fit(self.__exponential_function, x, y, p0 = [a, tau])
3142 3143 y1 = self.__exponential_function(x, *popt)
3143 3144 #error estimation
3144 3145 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
3145
3146
3146 3147 decayTime = popt[1]
3147 3148 riseTime = indmax*timeInterval
3148 3149 meteorAux[11:13] = [decayTime, error]
3149
3150
3150 3151 #Table items 7, 8 and 11
3151 3152 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
3152 meteorAux[-1] = 7
3153 meteorAux[-1] = 7
3153 3154 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
3154 3155 meteorAux[-1] = 8
3155 3156 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
3156 meteorAux[-1] = 11
3157
3158
3157 meteorAux[-1] = 11
3158
3159
3159 3160 except:
3160 meteorAux[-1] = 11
3161
3162
3161 meteorAux[-1] = 11
3162
3163
3163 3164 listMeteors1.append(meteorAux)
3164
3165
3165 3166 return listMeteors1
3166 3167
3167 3168 #Exponential Function
3168 3169
3169 3170 def __exponential_function(self, x, a, tau):
3170 3171 y = a*numpy.exp(-x/tau)
3171 3172 return y
3172
3173
3173 3174 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
3174
3175
3175 3176 pairslist1 = list(pairslist)
3176 3177 pairslist1.append((0,1))
3177 3178 pairslist1.append((3,4))
3178 3179 numPairs = len(pairslist1)
3179 3180 #Time Lag
3180 3181 timeLag = 45*10**-3
3181 3182 c = 3e8
3182 3183 lag = numpy.ceil(timeLag/timeInterval)
3183 3184 freq = 30e6
3184
3185
3185 3186 listMeteors1 = []
3186
3187
3187 3188 for i in range(len(listMeteors)):
3188 3189 meteorAux = listMeteors[i]
3189 3190 if meteorAux[-1] == 0:
3190 3191 mStart = listMeteors[i][1]
3191 mPeak = listMeteors[i][2]
3192 mPeak = listMeteors[i][2]
3192 3193 mLag = mPeak - mStart + lag
3193
3194
3194 3195 #get the volt data between the start and end times of the meteor
3195 3196 meteorVolts = listVolts[i]
3196 3197 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3197 3198
3198 3199 #Get CCF
3199 3200 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
3200
3201
3201 3202 #Method 2
3202 3203 slopes = numpy.zeros(numPairs)
3203 3204 time = numpy.array([-2,-1,1,2])*timeInterval
3204 3205 angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
3205
3206
3206 3207 #Correct phases
3207 3208 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
3208 3209 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
3209
3210 if indDer[0].shape[0] > 0:
3210
3211 if indDer[0].shape[0] > 0:
3211 3212 for i in range(indDer[0].shape[0]):
3212 3213 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
3213 3214 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
3214 3215
3215 3216 # fit = scipy.stats.linregress(numpy.array([-2,-1,1,2])*timeInterval, numpy.array([phaseLagN2s[i],phaseLagN1s[i],phaseLag1s[i],phaseLag2s[i]]))
3216 3217 for j in range(numPairs):
3217 3218 fit = stats.linregress(time, angAllCCF[j,:])
3218 3219 slopes[j] = fit[0]
3219
3220
3220 3221 #Remove Outlier
3221 3222 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3222 3223 # slopes = numpy.delete(slopes,indOut)
3223 3224 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3224 3225 # slopes = numpy.delete(slopes,indOut)
3225
3226
3226 3227 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
3227 3228 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
3228 3229 meteorAux[-2] = radialError
3229 3230 meteorAux[-3] = radialVelocity
3230
3231
3231 3232 #Setting Error
3232 3233 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
3233 if numpy.abs(radialVelocity) > 200:
3234 if numpy.abs(radialVelocity) > 200:
3234 3235 meteorAux[-1] = 15
3235 3236 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
3236 3237 elif radialError > radialStdThresh:
3237 3238 meteorAux[-1] = 12
3238
3239
3239 3240 listMeteors1.append(meteorAux)
3240 3241 return listMeteors1
3241
3242
3242 3243 def __setNewArrays(self, listMeteors, date, heiRang):
3243
3244
3244 3245 #New arrays
3245 3246 arrayMeteors = numpy.array(listMeteors)
3246 3247 arrayParameters = numpy.zeros((len(listMeteors), 13))
3247
3248
3248 3249 #Date inclusion
3249 3250 # date = re.findall(r'\((.*?)\)', date)
3250 3251 # date = date[0].split(',')
3251 3252 # date = map(int, date)
3252 #
3253 #
3253 3254 # if len(date)<6:
3254 3255 # date.append(0)
3255 #
3256 #
3256 3257 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
3257 3258 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
3258 3259 arrayDate = numpy.tile(date, (len(listMeteors)))
3259
3260
3260 3261 #Meteor array
3261 3262 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
3262 3263 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
3263
3264
3264 3265 #Parameters Array
3265 3266 arrayParameters[:,0] = arrayDate #Date
3266 3267 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
3267 3268 arrayParameters[:,6:8] = arrayMeteors[:,-3:-1] #Radial velocity and its error
3268 3269 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
3269 3270 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
3270 3271
3271
3272
3272 3273 return arrayParameters
3273
3274
3274 3275 class CorrectSMPhases(Operation):
3275
3276
3276 3277 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
3277
3278
3278 3279 arrayParameters = dataOut.data_param
3279 3280 pairsList = []
3280 3281 pairx = (0,1)
3281 3282 pairy = (2,3)
3282 3283 pairsList.append(pairx)
3283 3284 pairsList.append(pairy)
3284 3285 jph = numpy.zeros(4)
3285
3286
3286 3287 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
3287 3288 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
3288 3289 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
3289
3290
3290 3291 meteorOps = SMOperations()
3291 3292 if channelPositions is None:
3292 3293 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3293 3294 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3294
3295
3295 3296 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3296 3297 h = (hmin,hmax)
3297
3298
3298 3299 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
3299
3300
3300 3301 dataOut.data_param = arrayParameters
3301 3302 return
3302 3303
3303 3304 class SMPhaseCalibration(Operation):
3304
3305
3305 3306 __buffer = None
3306 3307
3307 3308 __initime = None
3308 3309
3309 3310 __dataReady = False
3310
3311
3311 3312 __isConfig = False
3312
3313
3313 3314 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
3314
3315
3315 3316 dataTime = currentTime + paramInterval
3316 3317 deltaTime = dataTime - initTime
3317
3318
3318 3319 if deltaTime >= outputInterval or deltaTime < 0:
3319 3320 return True
3320
3321
3321 3322 return False
3322
3323
3323 3324 def __getGammas(self, pairs, d, phases):
3324 3325 gammas = numpy.zeros(2)
3325
3326
3326 3327 for i in range(len(pairs)):
3327
3328
3328 3329 pairi = pairs[i]
3329 3330
3330 3331 phip3 = phases[:,pairi[0]]
3331 3332 d3 = d[pairi[0]]
3332 3333 phip2 = phases[:,pairi[1]]
3333 3334 d2 = d[pairi[1]]
3334 3335 #Calculating gamma
3335 3336 # jdcos = alp1/(k*d1)
3336 3337 # jgamma = numpy.angle(numpy.exp(1j*(d0*alp1/d1 - alp0)))
3337 3338 jgamma = -phip2*d3/d2 - phip3
3338 3339 jgamma = numpy.angle(numpy.exp(1j*jgamma))
3339 3340 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
3340 3341 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
3341
3342
3342 3343 #Revised distribution
3343 3344 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
3344 3345
3345 3346 #Histogram
3346 3347 nBins = 64
3347 3348 rmin = -0.5*numpy.pi
3348 3349 rmax = 0.5*numpy.pi
3349 3350 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
3350
3351
3351 3352 meteorsY = phaseHisto[0]
3352 3353 phasesX = phaseHisto[1][:-1]
3353 3354 width = phasesX[1] - phasesX[0]
3354 3355 phasesX += width/2
3355
3356
3356 3357 #Gaussian aproximation
3357 3358 bpeak = meteorsY.argmax()
3358 3359 peak = meteorsY.max()
3359 3360 jmin = bpeak - 5
3360 3361 jmax = bpeak + 5 + 1
3361
3362
3362 3363 if jmin<0:
3363 3364 jmin = 0
3364 3365 jmax = 6
3365 3366 elif jmax > meteorsY.size:
3366 3367 jmin = meteorsY.size - 6
3367 3368 jmax = meteorsY.size
3368
3369
3369 3370 x0 = numpy.array([peak,bpeak,50])
3370 3371 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
3371
3372
3372 3373 #Gammas
3373 3374 gammas[i] = coeff[0][1]
3374
3375
3375 3376 return gammas
3376
3377
3377 3378 def __residualFunction(self, coeffs, y, t):
3378
3379
3379 3380 return y - self.__gauss_function(t, coeffs)
3380 3381
3381 3382 def __gauss_function(self, t, coeffs):
3382
3383
3383 3384 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
3384 3385
3385 3386 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
3386 3387 meteorOps = SMOperations()
3387 3388 nchan = 4
3388 3389 pairx = pairsList[0] #x es 0
3389 3390 pairy = pairsList[1] #y es 1
3390 3391 center_xangle = 0
3391 3392 center_yangle = 0
3392 3393 range_angle = numpy.array([10*numpy.pi,numpy.pi,numpy.pi/2,numpy.pi/4])
3393 3394 ntimes = len(range_angle)
3394 3395
3395 3396 nstepsx = 20
3396 3397 nstepsy = 20
3397 3398
3398 3399 for iz in range(ntimes):
3399 3400 min_xangle = -range_angle[iz]/2 + center_xangle
3400 3401 max_xangle = range_angle[iz]/2 + center_xangle
3401 3402 min_yangle = -range_angle[iz]/2 + center_yangle
3402 3403 max_yangle = range_angle[iz]/2 + center_yangle
3403
3404
3404 3405 inc_x = (max_xangle-min_xangle)/nstepsx
3405 3406 inc_y = (max_yangle-min_yangle)/nstepsy
3406
3407
3407 3408 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
3408 3409 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
3409 3410 penalty = numpy.zeros((nstepsx,nstepsy))
3410 3411 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
3411 3412 jph = numpy.zeros(nchan)
3412
3413
3413 3414 # Iterations looking for the offset
3414 3415 for iy in range(int(nstepsy)):
3415 3416 for ix in range(int(nstepsx)):
3416 3417 d3 = d[pairsList[1][0]]
3417 3418 d2 = d[pairsList[1][1]]
3418 3419 d5 = d[pairsList[0][0]]
3419 3420 d4 = d[pairsList[0][1]]
3420
3421
3421 3422 alp2 = alpha_y[iy] #gamma 1
3422 alp4 = alpha_x[ix] #gamma 0
3423
3423 alp4 = alpha_x[ix] #gamma 0
3424
3424 3425 alp3 = -alp2*d3/d2 - gammas[1]
3425 3426 alp5 = -alp4*d5/d4 - gammas[0]
3426 3427 # jph[pairy[1]] = alpha_y[iy]
3427 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3428
3428 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3429
3429 3430 # jph[pairx[1]] = alpha_x[ix]
3430 3431 # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
3431 3432 jph[pairsList[0][1]] = alp4
3432 3433 jph[pairsList[0][0]] = alp5
3433 3434 jph[pairsList[1][0]] = alp3
3434 jph[pairsList[1][1]] = alp2
3435 jph[pairsList[1][1]] = alp2
3435 3436 jph_array[:,ix,iy] = jph
3436 3437 # d = [2.0,2.5,2.5,2.0]
3437 #falta chequear si va a leer bien los meteoros
3438 #falta chequear si va a leer bien los meteoros
3438 3439 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
3439 3440 error = meteorsArray1[:,-1]
3440 3441 ind1 = numpy.where(error==0)[0]
3441 3442 penalty[ix,iy] = ind1.size
3442
3443
3443 3444 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
3444 3445 phOffset = jph_array[:,i,j]
3445
3446
3446 3447 center_xangle = phOffset[pairx[1]]
3447 3448 center_yangle = phOffset[pairy[1]]
3448
3449
3449 3450 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
3450 phOffset = phOffset*180/numpy.pi
3451 phOffset = phOffset*180/numpy.pi
3451 3452 return phOffset
3452
3453
3453
3454
3454 3455 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
3455
3456
3456 3457 dataOut.flagNoData = True
3457 self.__dataReady = False
3458 self.__dataReady = False
3458 3459 dataOut.outputInterval = nHours*3600
3459
3460
3460 3461 if self.__isConfig == False:
3461 3462 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
3462 3463 #Get Initial LTC time
3463 3464 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
3464 3465 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
3465 3466
3466 3467 self.__isConfig = True
3467
3468
3468 3469 if self.__buffer is None:
3469 3470 self.__buffer = dataOut.data_param.copy()
3470 3471
3471 3472 else:
3472 3473 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
3473
3474
3474 3475 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
3475
3476
3476 3477 if self.__dataReady:
3477 3478 dataOut.utctimeInit = self.__initime
3478 3479 self.__initime += dataOut.outputInterval #to erase time offset
3479
3480
3480 3481 freq = dataOut.frequency
3481 3482 c = dataOut.C #m/s
3482 3483 lamb = c/freq
3483 3484 k = 2*numpy.pi/lamb
3484 3485 azimuth = 0
3485 3486 h = (hmin, hmax)
3486 3487 # pairs = ((0,1),(2,3)) #Estrella
3487 3488 # pairs = ((1,0),(2,3)) #T
3488 3489
3489 3490 if channelPositions is None:
3490 3491 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3491 3492 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3492 3493 meteorOps = SMOperations()
3493 3494 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3494 3495
3495 3496 #Checking correct order of pairs
3496 3497 pairs = []
3497 3498 if distances[1] > distances[0]:
3498 3499 pairs.append((1,0))
3499 3500 else:
3500 3501 pairs.append((0,1))
3501
3502
3502 3503 if distances[3] > distances[2]:
3503 3504 pairs.append((3,2))
3504 3505 else:
3505 3506 pairs.append((2,3))
3506 3507 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
3507
3508
3508 3509 meteorsArray = self.__buffer
3509 3510 error = meteorsArray[:,-1]
3510 3511 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
3511 3512 ind1 = numpy.where(boolError)[0]
3512 3513 meteorsArray = meteorsArray[ind1,:]
3513 3514 meteorsArray[:,-1] = 0
3514 3515 phases = meteorsArray[:,8:12]
3515
3516
3516 3517 #Calculate Gammas
3517 3518 gammas = self.__getGammas(pairs, distances, phases)
3518 3519 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
3519 3520 #Calculate Phases
3520 3521 phasesOff = self.__getPhases(azimuth, h, pairs, distances, gammas, meteorsArray)
3521 3522 phasesOff = phasesOff.reshape((1,phasesOff.size))
3522 3523 dataOut.data_output = -phasesOff
3523 3524 dataOut.flagNoData = False
3524 3525 self.__buffer = None
3525
3526
3526
3527
3527 3528 return
3528
3529
3529 3530 class SMOperations():
3530
3531
3531 3532 def __init__(self):
3532
3533
3533 3534 return
3534
3535
3535 3536 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
3536
3537
3537 3538 arrayParameters = arrayParameters0.copy()
3538 3539 hmin = h[0]
3539 3540 hmax = h[1]
3540
3541
3541 3542 #Calculate AOA (Error N 3, 4)
3542 3543 #JONES ET AL. 1998
3543 3544 AOAthresh = numpy.pi/8
3544 3545 error = arrayParameters[:,-1]
3545 3546 phases = -arrayParameters[:,8:12] + jph
3546 3547 # phases = numpy.unwrap(phases)
3547 3548 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
3548
3549
3549 3550 #Calculate Heights (Error N 13 and 14)
3550 3551 error = arrayParameters[:,-1]
3551 3552 Ranges = arrayParameters[:,1]
3552 3553 zenith = arrayParameters[:,4]
3553 3554 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
3554
3555
3555 3556 #----------------------- Get Final data ------------------------------------
3556 3557 # error = arrayParameters[:,-1]
3557 3558 # ind1 = numpy.where(error==0)[0]
3558 3559 # arrayParameters = arrayParameters[ind1,:]
3559
3560
3560 3561 return arrayParameters
3561
3562
3562 3563 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
3563
3564
3564 3565 arrayAOA = numpy.zeros((phases.shape[0],3))
3565 3566 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
3566
3567
3567 3568 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3568 3569 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3569 3570 arrayAOA[:,2] = cosDirError
3570
3571
3571 3572 azimuthAngle = arrayAOA[:,0]
3572 3573 zenithAngle = arrayAOA[:,1]
3573
3574
3574 3575 #Setting Error
3575 3576 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
3576 3577 error[indError] = 0
3577 3578 #Number 3: AOA not fesible
3578 3579 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3579 error[indInvalid] = 3
3580 error[indInvalid] = 3
3580 3581 #Number 4: Large difference in AOAs obtained from different antenna baselines
3581 3582 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3582 error[indInvalid] = 4
3583 error[indInvalid] = 4
3583 3584 return arrayAOA, error
3584
3585
3585 3586 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
3586
3587
3587 3588 #Initializing some variables
3588 3589 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3589 3590 ang_aux = ang_aux.reshape(1,ang_aux.size)
3590
3591
3591 3592 cosdir = numpy.zeros((arrayPhase.shape[0],2))
3592 3593 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3593
3594
3594
3595
3595 3596 for i in range(2):
3596 3597 ph0 = arrayPhase[:,pairsList[i][0]]
3597 3598 ph1 = arrayPhase[:,pairsList[i][1]]
3598 3599 d0 = distances[pairsList[i][0]]
3599 3600 d1 = distances[pairsList[i][1]]
3600
3601 ph0_aux = ph0 + ph1
3601
3602 ph0_aux = ph0 + ph1
3602 3603 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
3603 3604 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
3604 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3605 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3605 3606 #First Estimation
3606 3607 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
3607
3608
3608 3609 #Most-Accurate Second Estimation
3609 3610 phi1_aux = ph0 - ph1
3610 3611 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3611 3612 #Direction Cosine 1
3612 3613 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
3613
3614
3614 3615 #Searching the correct Direction Cosine
3615 3616 cosdir0_aux = cosdir0[:,i]
3616 3617 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3617 3618 #Minimum Distance
3618 3619 cosDiff = (cosdir1 - cosdir0_aux)**2
3619 3620 indcos = cosDiff.argmin(axis = 1)
3620 3621 #Saving Value obtained
3621 3622 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3622
3623
3623 3624 return cosdir0, cosdir
3624
3625
3625 3626 def __calculateAOA(self, cosdir, azimuth):
3626 3627 cosdirX = cosdir[:,0]
3627 3628 cosdirY = cosdir[:,1]
3628
3629
3629 3630 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3630 3631 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
3631 3632 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3632
3633
3633 3634 return angles
3634
3635
3635 3636 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3636
3637
3637 3638 Ramb = 375 #Ramb = c/(2*PRF)
3638 3639 Re = 6371 #Earth Radius
3639 3640 heights = numpy.zeros(Ranges.shape)
3640
3641
3641 3642 R_aux = numpy.array([0,1,2])*Ramb
3642 3643 R_aux = R_aux.reshape(1,R_aux.size)
3643 3644
3644 3645 Ranges = Ranges.reshape(Ranges.size,1)
3645
3646
3646 3647 Ri = Ranges + R_aux
3647 3648 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3648
3649
3649 3650 #Check if there is a height between 70 and 110 km
3650 3651 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3651 3652 ind_h = numpy.where(h_bool == 1)[0]
3652
3653
3653 3654 hCorr = hi[ind_h, :]
3654 3655 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3655
3656
3656 3657 hCorr = hi[ind_hCorr][:len(ind_h)]
3657 3658 heights[ind_h] = hCorr
3658
3659
3659 3660 #Setting Error
3660 3661 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3661 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3662 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3662 3663 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
3663 3664 error[indError] = 0
3664 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3665 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3665 3666 error[indInvalid2] = 14
3666 3667 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3667 error[indInvalid1] = 13
3668
3668 error[indInvalid1] = 13
3669
3669 3670 return heights, error
3670
3671
3671 3672 def getPhasePairs(self, channelPositions):
3672 3673 chanPos = numpy.array(channelPositions)
3673 3674 listOper = list(itertools.combinations(list(range(5)),2))
3674
3675
3675 3676 distances = numpy.zeros(4)
3676 3677 axisX = []
3677 3678 axisY = []
3678 3679 distX = numpy.zeros(3)
3679 3680 distY = numpy.zeros(3)
3680 3681 ix = 0
3681 3682 iy = 0
3682
3683
3683 3684 pairX = numpy.zeros((2,2))
3684 3685 pairY = numpy.zeros((2,2))
3685
3686
3686 3687 for i in range(len(listOper)):
3687 3688 pairi = listOper[i]
3688
3689
3689 3690 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
3690
3691
3691 3692 if posDif[0] == 0:
3692 3693 axisY.append(pairi)
3693 3694 distY[iy] = posDif[1]
3694 3695 iy += 1
3695 3696 elif posDif[1] == 0:
3696 3697 axisX.append(pairi)
3697 3698 distX[ix] = posDif[0]
3698 3699 ix += 1
3699
3700
3700 3701 for i in range(2):
3701 3702 if i==0:
3702 3703 dist0 = distX
3703 3704 axis0 = axisX
3704 3705 else:
3705 3706 dist0 = distY
3706 3707 axis0 = axisY
3707
3708
3708 3709 side = numpy.argsort(dist0)[:-1]
3709 3710 axis0 = numpy.array(axis0)[side,:]
3710 3711 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
3711 3712 axis1 = numpy.unique(numpy.reshape(axis0,4))
3712 3713 side = axis1[axis1 != chanC]
3713 3714 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
3714 3715 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
3715 if diff1<0:
3716 if diff1<0:
3716 3717 chan2 = side[0]
3717 3718 d2 = numpy.abs(diff1)
3718 3719 chan1 = side[1]
3719 3720 d1 = numpy.abs(diff2)
3720 3721 else:
3721 3722 chan2 = side[1]
3722 3723 d2 = numpy.abs(diff2)
3723 3724 chan1 = side[0]
3724 3725 d1 = numpy.abs(diff1)
3725
3726
3726 3727 if i==0:
3727 3728 chanCX = chanC
3728 3729 chan1X = chan1
3729 3730 chan2X = chan2
3730 3731 distances[0:2] = numpy.array([d1,d2])
3731 3732 else:
3732 3733 chanCY = chanC
3733 3734 chan1Y = chan1
3734 3735 chan2Y = chan2
3735 3736 distances[2:4] = numpy.array([d1,d2])
3736 3737 # axisXsides = numpy.reshape(axisX[ix,:],4)
3737 #
3738 #
3738 3739 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
3739 3740 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
3740 #
3741 #
3741 3742 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
3742 3743 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
3743 3744 # channel25X = int(pairX[0,ind25X])
3744 3745 # channel20X = int(pairX[1,ind20X])
3745 3746 # ind25Y = numpy.where(pairY[0,:] != channelCentY)[0][0]
3746 3747 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
3747 3748 # channel25Y = int(pairY[0,ind25Y])
3748 3749 # channel20Y = int(pairY[1,ind20Y])
3749
3750
3750 3751 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
3751 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3752
3752 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3753
3753 3754 return pairslist, distances
3754 3755 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
3755 #
3756 #
3756 3757 # arrayAOA = numpy.zeros((phases.shape[0],3))
3757 3758 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
3758 #
3759 #
3759 3760 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3760 3761 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3761 3762 # arrayAOA[:,2] = cosDirError
3762 #
3763 #
3763 3764 # azimuthAngle = arrayAOA[:,0]
3764 3765 # zenithAngle = arrayAOA[:,1]
3765 #
3766 #
3766 3767 # #Setting Error
3767 3768 # #Number 3: AOA not fesible
3768 3769 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3769 # error[indInvalid] = 3
3770 # error[indInvalid] = 3
3770 3771 # #Number 4: Large difference in AOAs obtained from different antenna baselines
3771 3772 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3772 # error[indInvalid] = 4
3773 # error[indInvalid] = 4
3773 3774 # return arrayAOA, error
3774 #
3775 #
3775 3776 # def __getDirectionCosines(self, arrayPhase, pairsList):
3776 #
3777 #
3777 3778 # #Initializing some variables
3778 3779 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3779 3780 # ang_aux = ang_aux.reshape(1,ang_aux.size)
3780 #
3781 #
3781 3782 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
3782 3783 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3783 #
3784 #
3784 #
3785 #
3785 3786 # for i in range(2):
3786 3787 # #First Estimation
3787 3788 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
3788 3789 # #Dealias
3789 3790 # indcsi = numpy.where(phi0_aux > numpy.pi)
3790 # phi0_aux[indcsi] -= 2*numpy.pi
3791 # phi0_aux[indcsi] -= 2*numpy.pi
3791 3792 # indcsi = numpy.where(phi0_aux < -numpy.pi)
3792 # phi0_aux[indcsi] += 2*numpy.pi
3793 # phi0_aux[indcsi] += 2*numpy.pi
3793 3794 # #Direction Cosine 0
3794 3795 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
3795 #
3796 #
3796 3797 # #Most-Accurate Second Estimation
3797 3798 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
3798 3799 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3799 3800 # #Direction Cosine 1
3800 3801 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
3801 #
3802 #
3802 3803 # #Searching the correct Direction Cosine
3803 3804 # cosdir0_aux = cosdir0[:,i]
3804 3805 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3805 3806 # #Minimum Distance
3806 3807 # cosDiff = (cosdir1 - cosdir0_aux)**2
3807 3808 # indcos = cosDiff.argmin(axis = 1)
3808 3809 # #Saving Value obtained
3809 3810 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3810 #
3811 #
3811 3812 # return cosdir0, cosdir
3812 #
3813 #
3813 3814 # def __calculateAOA(self, cosdir, azimuth):
3814 3815 # cosdirX = cosdir[:,0]
3815 3816 # cosdirY = cosdir[:,1]
3816 #
3817 #
3817 3818 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3818 3819 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
3819 3820 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3820 #
3821 #
3821 3822 # return angles
3822 #
3823 #
3823 3824 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3824 #
3825 #
3825 3826 # Ramb = 375 #Ramb = c/(2*PRF)
3826 3827 # Re = 6371 #Earth Radius
3827 3828 # heights = numpy.zeros(Ranges.shape)
3828 #
3829 #
3829 3830 # R_aux = numpy.array([0,1,2])*Ramb
3830 3831 # R_aux = R_aux.reshape(1,R_aux.size)
3831 #
3832 #
3832 3833 # Ranges = Ranges.reshape(Ranges.size,1)
3833 #
3834 #
3834 3835 # Ri = Ranges + R_aux
3835 3836 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3836 #
3837 #
3837 3838 # #Check if there is a height between 70 and 110 km
3838 3839 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3839 3840 # ind_h = numpy.where(h_bool == 1)[0]
3840 #
3841 #
3841 3842 # hCorr = hi[ind_h, :]
3842 3843 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3843 #
3844 # hCorr = hi[ind_hCorr]
3844 #
3845 # hCorr = hi[ind_hCorr]
3845 3846 # heights[ind_h] = hCorr
3846 #
3847 #
3847 3848 # #Setting Error
3848 3849 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3849 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3850 #
3851 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3850 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3851 #
3852 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3852 3853 # error[indInvalid2] = 14
3853 3854 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3854 # error[indInvalid1] = 13
3855 #
3856 # return heights, error
3857 No newline at end of file
3855 # error[indInvalid1] = 13
3856 #
3857 # return heights, error
@@ -1,1056 +1,1056
1 1 import itertools
2 2
3 3 import numpy
4 4
5 5 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
6 6 from schainpy.model.data.jrodata import Spectra
7 7 from schainpy.model.data.jrodata import hildebrand_sekhon
8 8 from schainpy.utils import log
9 9
10 10 @MPDecorator
11 11 class SpectraProc(ProcessingUnit):
12 12
13 13
14 14 def __init__(self):
15 15
16 16 ProcessingUnit.__init__(self)
17 17
18 18 self.buffer = None
19 19 self.firstdatatime = None
20 20 self.profIndex = 0
21 21 self.dataOut = Spectra()
22 22 self.id_min = None
23 23 self.id_max = None
24 24 self.setupReq = False #Agregar a todas las unidades de proc
25 25
26 26 def __updateSpecFromVoltage(self):
27 27
28 28 self.dataOut.timeZone = self.dataIn.timeZone
29 29 self.dataOut.dstFlag = self.dataIn.dstFlag
30 30 self.dataOut.errorCount = self.dataIn.errorCount
31 31 self.dataOut.useLocalTime = self.dataIn.useLocalTime
32 32 try:
33 33 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
34 34 except:
35 35 pass
36 36 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
37 37 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
38 38 self.dataOut.channelList = self.dataIn.channelList
39 39 self.dataOut.heightList = self.dataIn.heightList
40 40 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
41 41
42 42 self.dataOut.nBaud = self.dataIn.nBaud
43 43 self.dataOut.nCode = self.dataIn.nCode
44 44 self.dataOut.code = self.dataIn.code
45 45 self.dataOut.nProfiles = self.dataOut.nFFTPoints
46 46
47 47 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
48 48 self.dataOut.utctime = self.firstdatatime
49 49 # asumo q la data esta decodificada
50 50 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
51 51 # asumo q la data esta sin flip
52 52 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
53 53 self.dataOut.flagShiftFFT = False
54 54
55 55 self.dataOut.nCohInt = self.dataIn.nCohInt
56 56 self.dataOut.nIncohInt = 1
57 57
58 58 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
59 59
60 60 self.dataOut.frequency = self.dataIn.frequency
61 61 self.dataOut.realtime = self.dataIn.realtime
62 62
63 63 self.dataOut.azimuth = self.dataIn.azimuth
64 64 self.dataOut.zenith = self.dataIn.zenith
65 65
66 66 self.dataOut.beam.codeList = self.dataIn.beam.codeList
67 67 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
68 68 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
69 69
70 70 def __getFft(self):
71 71 """
72 72 Convierte valores de Voltaje a Spectra
73 73
74 74 Affected:
75 75 self.dataOut.data_spc
76 76 self.dataOut.data_cspc
77 77 self.dataOut.data_dc
78 78 self.dataOut.heightList
79 79 self.profIndex
80 80 self.buffer
81 81 self.dataOut.flagNoData
82 82 """
83 83 fft_volt = numpy.fft.fft(
84 84 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
85 85 fft_volt = fft_volt.astype(numpy.dtype('complex'))
86 86 dc = fft_volt[:, 0, :]
87 87
88 88 # calculo de self-spectra
89 89 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
90 90 spc = fft_volt * numpy.conjugate(fft_volt)
91 91 spc = spc.real
92 92
93 93 blocksize = 0
94 94 blocksize += dc.size
95 95 blocksize += spc.size
96 96
97 97 cspc = None
98 98 pairIndex = 0
99 99 if self.dataOut.pairsList != None:
100 100 # calculo de cross-spectra
101 101 cspc = numpy.zeros(
102 102 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
103 103 for pair in self.dataOut.pairsList:
104 104 if pair[0] not in self.dataOut.channelList:
105 105 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
106 106 str(pair), str(self.dataOut.channelList)))
107 107 if pair[1] not in self.dataOut.channelList:
108 108 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
109 109 str(pair), str(self.dataOut.channelList)))
110 110
111 111 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
112 112 numpy.conjugate(fft_volt[pair[1], :, :])
113 113 pairIndex += 1
114 114 blocksize += cspc.size
115 115
116 116 self.dataOut.data_spc = spc
117 117 self.dataOut.data_cspc = cspc
118 118 self.dataOut.data_dc = dc
119 119 self.dataOut.blockSize = blocksize
120 120 self.dataOut.flagShiftFFT = True
121 121
122 122 def run(self, nProfiles=None, nFFTPoints=None, pairsList=[], ippFactor=None, shift_fft=False):
123 123
124 124 if self.dataIn.type == "Spectra":
125 125 self.dataOut.copy(self.dataIn)
126 126 if shift_fft:
127 127 #desplaza a la derecha en el eje 2 determinadas posiciones
128 128 shift = int(self.dataOut.nFFTPoints/2)
129 129 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
130 130
131 131 if self.dataOut.data_cspc is not None:
132 132 #desplaza a la derecha en el eje 2 determinadas posiciones
133 133 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
134 134
135 135 return True
136 136
137 137 if self.dataIn.type == "Voltage":
138 138
139 139 self.dataOut.flagNoData = True
140 140
141 141 if nFFTPoints == None:
142 142 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
143 143
144 144 if nProfiles == None:
145 145 nProfiles = nFFTPoints
146 146
147 147 if ippFactor == None:
148 148 ippFactor = 1
149 149
150 150 self.dataOut.ippFactor = ippFactor
151 151
152 152 self.dataOut.nFFTPoints = nFFTPoints
153 153 self.dataOut.pairsList = pairsList
154 154
155 155 if self.buffer is None:
156 156 self.buffer = numpy.zeros((self.dataIn.nChannels,
157 157 nProfiles,
158 158 self.dataIn.nHeights),
159 159 dtype='complex')
160 160
161 161 if self.dataIn.flagDataAsBlock:
162 162 nVoltProfiles = self.dataIn.data.shape[1]
163 163
164 164 if nVoltProfiles == nProfiles:
165 165 self.buffer = self.dataIn.data.copy()
166 166 self.profIndex = nVoltProfiles
167 167
168 168 elif nVoltProfiles < nProfiles:
169 169
170 170 if self.profIndex == 0:
171 171 self.id_min = 0
172 172 self.id_max = nVoltProfiles
173 173
174 174 self.buffer[:, self.id_min:self.id_max,
175 175 :] = self.dataIn.data
176 176 self.profIndex += nVoltProfiles
177 177 self.id_min += nVoltProfiles
178 178 self.id_max += nVoltProfiles
179 179 else:
180 180 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
181 181 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
182 182 self.dataOut.flagNoData = True
183 183 return 0
184 184 else:
185 185 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
186 186 self.profIndex += 1
187 187
188 188 if self.firstdatatime == None:
189 189 self.firstdatatime = self.dataIn.utctime
190 190
191 191 if self.profIndex == nProfiles:
192 192 self.__updateSpecFromVoltage()
193 193 self.__getFft()
194 194
195 195 self.dataOut.flagNoData = False
196 196 self.firstdatatime = None
197 197 self.profIndex = 0
198 198
199 199 return True
200 200
201 201 raise ValueError("The type of input object '%s' is not valid" % (
202 202 self.dataIn.type))
203 203
204 204 def __selectPairs(self, pairsList):
205 205
206 206 if not pairsList:
207 207 return
208 208
209 209 pairs = []
210 210 pairsIndex = []
211 211
212 212 for pair in pairsList:
213 213 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
214 214 continue
215 215 pairs.append(pair)
216 216 pairsIndex.append(pairs.index(pair))
217 217
218 218 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
219 219 self.dataOut.pairsList = pairs
220 220
221 221 return
222 222
223 223 def __selectPairsByChannel(self, channelList=None):
224 224
225 225 if channelList == None:
226 226 return
227 227
228 228 pairsIndexListSelected = []
229 229 for pairIndex in self.dataOut.pairsIndexList:
230 230 # First pair
231 231 if self.dataOut.pairsList[pairIndex][0] not in channelList:
232 232 continue
233 233 # Second pair
234 234 if self.dataOut.pairsList[pairIndex][1] not in channelList:
235 235 continue
236 236
237 237 pairsIndexListSelected.append(pairIndex)
238 238
239 239 if not pairsIndexListSelected:
240 240 self.dataOut.data_cspc = None
241 241 self.dataOut.pairsList = []
242 242 return
243 243
244 244 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
245 245 self.dataOut.pairsList = [self.dataOut.pairsList[i]
246 246 for i in pairsIndexListSelected]
247 247
248 248 return
249 249
250 250 def selectChannels(self, channelList):
251 251
252 252 channelIndexList = []
253 253
254 254 for channel in channelList:
255 255 if channel not in self.dataOut.channelList:
256 256 raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" % (
257 257 channel, str(self.dataOut.channelList)))
258 258
259 259 index = self.dataOut.channelList.index(channel)
260 260 channelIndexList.append(index)
261 261
262 262 self.selectChannelsByIndex(channelIndexList)
263 263
264 264 def selectChannelsByIndex(self, channelIndexList):
265 265 """
266 266 Selecciona un bloque de datos en base a canales segun el channelIndexList
267 267
268 268 Input:
269 269 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
270 270
271 271 Affected:
272 272 self.dataOut.data_spc
273 273 self.dataOut.channelIndexList
274 274 self.dataOut.nChannels
275 275
276 276 Return:
277 277 None
278 278 """
279 279
280 280 for channelIndex in channelIndexList:
281 281 if channelIndex not in self.dataOut.channelIndexList:
282 282 raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " % (
283 283 channelIndex, self.dataOut.channelIndexList))
284 284
285 285 data_spc = self.dataOut.data_spc[channelIndexList, :]
286 286 data_dc = self.dataOut.data_dc[channelIndexList, :]
287 287
288 288 self.dataOut.data_spc = data_spc
289 289 self.dataOut.data_dc = data_dc
290 290
291 291 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
292 292 self.dataOut.channelList = range(len(channelIndexList))
293 293 self.__selectPairsByChannel(channelIndexList)
294
294
295 295 return 1
296
297
296
297
298 298 def selectFFTs(self, minFFT, maxFFT ):
299 299 """
300 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
300 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
301 301 minFFT<= FFT <= maxFFT
302 302 """
303
303
304 304 if (minFFT > maxFFT):
305 305 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
306 306
307 307 if (minFFT < self.dataOut.getFreqRange()[0]):
308 308 minFFT = self.dataOut.getFreqRange()[0]
309 309
310 310 if (maxFFT > self.dataOut.getFreqRange()[-1]):
311 311 maxFFT = self.dataOut.getFreqRange()[-1]
312 312
313 313 minIndex = 0
314 314 maxIndex = 0
315 315 FFTs = self.dataOut.getFreqRange()
316 316
317 317 inda = numpy.where(FFTs >= minFFT)
318 318 indb = numpy.where(FFTs <= maxFFT)
319 319
320 320 try:
321 321 minIndex = inda[0][0]
322 322 except:
323 323 minIndex = 0
324 324
325 325 try:
326 326 maxIndex = indb[0][-1]
327 327 except:
328 328 maxIndex = len(FFTs)
329 329
330 330 self.selectFFTsByIndex(minIndex, maxIndex)
331 331
332 332 return 1
333
334
333
334
335 335 def setH0(self, h0, deltaHeight = None):
336
336
337 337 if not deltaHeight:
338 338 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
339
339
340 340 nHeights = self.dataOut.nHeights
341
341
342 342 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
343
343
344 344 self.dataOut.heightList = newHeiRange
345
346
345
346
347 347 def selectHeights(self, minHei, maxHei):
348 348 """
349 349 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
350 350 minHei <= height <= maxHei
351 351
352 352 Input:
353 353 minHei : valor minimo de altura a considerar
354 354 maxHei : valor maximo de altura a considerar
355 355
356 356 Affected:
357 357 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
358 358
359 359 Return:
360 360 1 si el metodo se ejecuto con exito caso contrario devuelve 0
361 361 """
362 362
363
363
364 364 if (minHei > maxHei):
365 365 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei))
366 366
367 367 if (minHei < self.dataOut.heightList[0]):
368 368 minHei = self.dataOut.heightList[0]
369 369
370 370 if (maxHei > self.dataOut.heightList[-1]):
371 371 maxHei = self.dataOut.heightList[-1]
372 372
373 373 minIndex = 0
374 374 maxIndex = 0
375 375 heights = self.dataOut.heightList
376 376
377 377 inda = numpy.where(heights >= minHei)
378 378 indb = numpy.where(heights <= maxHei)
379 379
380 380 try:
381 381 minIndex = inda[0][0]
382 382 except:
383 383 minIndex = 0
384 384
385 385 try:
386 386 maxIndex = indb[0][-1]
387 387 except:
388 388 maxIndex = len(heights)
389 389
390 390 self.selectHeightsByIndex(minIndex, maxIndex)
391
391
392 392
393 393 return 1
394 394
395 395 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
396 396 newheis = numpy.where(
397 397 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
398 398
399 399 if hei_ref != None:
400 400 newheis = numpy.where(self.dataOut.heightList > hei_ref)
401 401
402 402 minIndex = min(newheis[0])
403 403 maxIndex = max(newheis[0])
404 404 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
405 405 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
406 406
407 407 # determina indices
408 408 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
409 409 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
410 410 avg_dB = 10 * \
411 411 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
412 412 beacon_dB = numpy.sort(avg_dB)[-nheis:]
413 413 beacon_heiIndexList = []
414 414 for val in avg_dB.tolist():
415 415 if val >= beacon_dB[0]:
416 416 beacon_heiIndexList.append(avg_dB.tolist().index(val))
417 417
418 418 #data_spc = data_spc[:,:,beacon_heiIndexList]
419 419 data_cspc = None
420 420 if self.dataOut.data_cspc is not None:
421 421 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
422 422 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
423 423
424 424 data_dc = None
425 425 if self.dataOut.data_dc is not None:
426 426 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
427 427 #data_dc = data_dc[:,beacon_heiIndexList]
428 428
429 429 self.dataOut.data_spc = data_spc
430 430 self.dataOut.data_cspc = data_cspc
431 431 self.dataOut.data_dc = data_dc
432 432 self.dataOut.heightList = heightList
433 433 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
434 434
435 435 return 1
436 436
437 437 def selectFFTsByIndex(self, minIndex, maxIndex):
438 438 """
439
439
440 440 """
441 441
442 442 if (minIndex < 0) or (minIndex > maxIndex):
443 443 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
444 444
445 445 if (maxIndex >= self.dataOut.nProfiles):
446 446 maxIndex = self.dataOut.nProfiles-1
447 447
448 448 #Spectra
449 449 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
450 450
451 451 data_cspc = None
452 452 if self.dataOut.data_cspc is not None:
453 453 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
454 454
455 455 data_dc = None
456 456 if self.dataOut.data_dc is not None:
457 457 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
458 458
459 459 self.dataOut.data_spc = data_spc
460 460 self.dataOut.data_cspc = data_cspc
461 461 self.dataOut.data_dc = data_dc
462
462
463 463 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
464 464 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
465 465 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
466 466
467 467 return 1
468 468
469 469
470 470
471 471 def selectHeightsByIndex(self, minIndex, maxIndex):
472 472 """
473 473 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
474 474 minIndex <= index <= maxIndex
475 475
476 476 Input:
477 477 minIndex : valor de indice minimo de altura a considerar
478 478 maxIndex : valor de indice maximo de altura a considerar
479 479
480 480 Affected:
481 481 self.dataOut.data_spc
482 482 self.dataOut.data_cspc
483 483 self.dataOut.data_dc
484 484 self.dataOut.heightList
485 485
486 486 Return:
487 487 1 si el metodo se ejecuto con exito caso contrario devuelve 0
488 488 """
489 489
490 490 if (minIndex < 0) or (minIndex > maxIndex):
491 491 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
492 492 minIndex, maxIndex))
493 493
494 494 if (maxIndex >= self.dataOut.nHeights):
495 495 maxIndex = self.dataOut.nHeights - 1
496 496
497 497 # Spectra
498 498 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
499 499
500 500 data_cspc = None
501 501 if self.dataOut.data_cspc is not None:
502 502 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
503 503
504 504 data_dc = None
505 505 if self.dataOut.data_dc is not None:
506 506 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
507 507
508 508 self.dataOut.data_spc = data_spc
509 509 self.dataOut.data_cspc = data_cspc
510 510 self.dataOut.data_dc = data_dc
511 511
512 512 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
513 513
514 514 return 1
515 515
516 516 def removeDC(self, mode=2):
517 517 jspectra = self.dataOut.data_spc
518 518 jcspectra = self.dataOut.data_cspc
519 519
520 520 num_chan = jspectra.shape[0]
521 521 num_hei = jspectra.shape[2]
522 522
523 523 if jcspectra is not None:
524 524 jcspectraExist = True
525 525 num_pairs = jcspectra.shape[0]
526 526 else:
527 527 jcspectraExist = False
528 528
529 529 freq_dc = int(jspectra.shape[1] / 2)
530 530 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
531 531 ind_vel = ind_vel.astype(int)
532 532
533 533 if ind_vel[0] < 0:
534 534 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
535 535
536 536 if mode == 1:
537 537 jspectra[:, freq_dc, :] = (
538 538 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
539 539
540 540 if jcspectraExist:
541 541 jcspectra[:, freq_dc, :] = (
542 542 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
543 543
544 544 if mode == 2:
545 545
546 546 vel = numpy.array([-2, -1, 1, 2])
547 547 xx = numpy.zeros([4, 4])
548 548
549 549 for fil in range(4):
550 550 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
551 551
552 552 xx_inv = numpy.linalg.inv(xx)
553 553 xx_aux = xx_inv[0, :]
554 554
555 for ich in range(num_chan):
555 for ich in range(num_chan):
556 556 yy = jspectra[ich, ind_vel, :]
557 557 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
558 558
559 559 junkid = jspectra[ich, freq_dc, :] <= 0
560 560 cjunkid = sum(junkid)
561 561
562 562 if cjunkid.any():
563 563 jspectra[ich, freq_dc, junkid.nonzero()] = (
564 564 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
565 565
566 566 if jcspectraExist:
567 567 for ip in range(num_pairs):
568 568 yy = jcspectra[ip, ind_vel, :]
569 569 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
570 570
571 571 self.dataOut.data_spc = jspectra
572 572 self.dataOut.data_cspc = jcspectra
573 573
574 574 return 1
575 575
576 576 def removeInterference2(self):
577
577
578 578 cspc = self.dataOut.data_cspc
579 579 spc = self.dataOut.data_spc
580 Heights = numpy.arange(cspc.shape[2])
580 Heights = numpy.arange(cspc.shape[2])
581 581 realCspc = numpy.abs(cspc)
582
582
583 583 for i in range(cspc.shape[0]):
584 584 LinePower= numpy.sum(realCspc[i], axis=0)
585 585 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
586 586 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
587 587 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
588 588 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
589 589 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
590
591
590
591
592 592 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
593 593 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
594 594 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
595 595 cspc[i,InterferenceRange,:] = numpy.NaN
596
597
598
596
597
598
599 599 self.dataOut.data_cspc = cspc
600
600
601 601 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
602 602
603 603 jspectra = self.dataOut.data_spc
604 604 jcspectra = self.dataOut.data_cspc
605 605 jnoise = self.dataOut.getNoise()
606 606 num_incoh = self.dataOut.nIncohInt
607 607
608 608 num_channel = jspectra.shape[0]
609 609 num_prof = jspectra.shape[1]
610 610 num_hei = jspectra.shape[2]
611 611
612 612 # hei_interf
613 613 if hei_interf is None:
614 614 count_hei = int(num_hei / 2)
615 615 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
616 616 hei_interf = numpy.asarray(hei_interf)[0]
617 617 # nhei_interf
618 618 if (nhei_interf == None):
619 619 nhei_interf = 5
620 620 if (nhei_interf < 1):
621 621 nhei_interf = 1
622 622 if (nhei_interf > count_hei):
623 623 nhei_interf = count_hei
624 624 if (offhei_interf == None):
625 625 offhei_interf = 0
626 626
627 627 ind_hei = list(range(num_hei))
628 628 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
629 629 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
630 630 mask_prof = numpy.asarray(list(range(num_prof)))
631 631 num_mask_prof = mask_prof.size
632 632 comp_mask_prof = [0, num_prof / 2]
633 633
634 634 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
635 635 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
636 636 jnoise = numpy.nan
637 637 noise_exist = jnoise[0] < numpy.Inf
638 638
639 639 # Subrutina de Remocion de la Interferencia
640 640 for ich in range(num_channel):
641 641 # Se ordena los espectros segun su potencia (menor a mayor)
642 642 power = jspectra[ich, mask_prof, :]
643 643 power = power[:, hei_interf]
644 644 power = power.sum(axis=0)
645 645 psort = power.ravel().argsort()
646 646
647 647 # Se estima la interferencia promedio en los Espectros de Potencia empleando
648 648 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
649 649 offhei_interf, nhei_interf + offhei_interf))]]]
650 650
651 651 if noise_exist:
652 652 # tmp_noise = jnoise[ich] / num_prof
653 653 tmp_noise = jnoise[ich]
654 654 junkspc_interf = junkspc_interf - tmp_noise
655 655 #junkspc_interf[:,comp_mask_prof] = 0
656 656
657 657 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
658 658 jspc_interf = jspc_interf.transpose()
659 659 # Calculando el espectro de interferencia promedio
660 660 noiseid = numpy.where(
661 661 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
662 662 noiseid = noiseid[0]
663 663 cnoiseid = noiseid.size
664 664 interfid = numpy.where(
665 665 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
666 666 interfid = interfid[0]
667 667 cinterfid = interfid.size
668 668
669 669 if (cnoiseid > 0):
670 670 jspc_interf[noiseid] = 0
671 671
672 672 # Expandiendo los perfiles a limpiar
673 673 if (cinterfid > 0):
674 674 new_interfid = (
675 675 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
676 676 new_interfid = numpy.asarray(new_interfid)
677 677 new_interfid = {x for x in new_interfid}
678 678 new_interfid = numpy.array(list(new_interfid))
679 679 new_cinterfid = new_interfid.size
680 680 else:
681 681 new_cinterfid = 0
682 682
683 683 for ip in range(new_cinterfid):
684 684 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
685 685 jspc_interf[new_interfid[ip]
686 686 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
687 687
688 688 jspectra[ich, :, ind_hei] = jspectra[ich, :,
689 689 ind_hei] - jspc_interf # Corregir indices
690 690
691 691 # Removiendo la interferencia del punto de mayor interferencia
692 692 ListAux = jspc_interf[mask_prof].tolist()
693 693 maxid = ListAux.index(max(ListAux))
694 694
695 695 if cinterfid > 0:
696 696 for ip in range(cinterfid * (interf == 2) - 1):
697 697 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
698 698 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
699 699 cind = len(ind)
700 700
701 701 if (cind > 0):
702 702 jspectra[ich, interfid[ip], ind] = tmp_noise * \
703 703 (1 + (numpy.random.uniform(cind) - 0.5) /
704 704 numpy.sqrt(num_incoh))
705 705
706 706 ind = numpy.array([-2, -1, 1, 2])
707 707 xx = numpy.zeros([4, 4])
708 708
709 709 for id1 in range(4):
710 710 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
711 711
712 712 xx_inv = numpy.linalg.inv(xx)
713 713 xx = xx_inv[:, 0]
714 714 ind = (ind + maxid + num_mask_prof) % num_mask_prof
715 715 yy = jspectra[ich, mask_prof[ind], :]
716 716 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
717 717 yy.transpose(), xx)
718 718
719 719 indAux = (jspectra[ich, :, :] < tmp_noise *
720 720 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
721 721 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
722 722 (1 - 1 / numpy.sqrt(num_incoh))
723 723
724 724 # Remocion de Interferencia en el Cross Spectra
725 725 if jcspectra is None:
726 726 return jspectra, jcspectra
727 727 num_pairs = int(jcspectra.size / (num_prof * num_hei))
728 728 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
729 729
730 730 for ip in range(num_pairs):
731 731
732 732 #-------------------------------------------
733 733
734 734 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
735 735 cspower = cspower[:, hei_interf]
736 736 cspower = cspower.sum(axis=0)
737 737
738 738 cspsort = cspower.ravel().argsort()
739 739 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
740 740 offhei_interf, nhei_interf + offhei_interf))]]]
741 741 junkcspc_interf = junkcspc_interf.transpose()
742 742 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
743 743
744 744 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
745 745
746 746 median_real = int(numpy.median(numpy.real(
747 747 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
748 748 median_imag = int(numpy.median(numpy.imag(
749 749 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
750 750 comp_mask_prof = [int(e) for e in comp_mask_prof]
751 751 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
752 752 median_real, median_imag)
753 753
754 754 for iprof in range(num_prof):
755 755 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
756 756 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
757 757
758 758 # Removiendo la Interferencia
759 759 jcspectra[ip, :, ind_hei] = jcspectra[ip,
760 760 :, ind_hei] - jcspc_interf
761 761
762 762 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
763 763 maxid = ListAux.index(max(ListAux))
764 764
765 765 ind = numpy.array([-2, -1, 1, 2])
766 766 xx = numpy.zeros([4, 4])
767 767
768 768 for id1 in range(4):
769 769 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
770 770
771 771 xx_inv = numpy.linalg.inv(xx)
772 772 xx = xx_inv[:, 0]
773 773
774 774 ind = (ind + maxid + num_mask_prof) % num_mask_prof
775 775 yy = jcspectra[ip, mask_prof[ind], :]
776 776 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
777 777
778 778 # Guardar Resultados
779 779 self.dataOut.data_spc = jspectra
780 780 self.dataOut.data_cspc = jcspectra
781 781
782 782 return 1
783 783
784 784 def setRadarFrequency(self, frequency=None):
785 785
786 786 if frequency != None:
787 787 self.dataOut.frequency = frequency
788 788
789 789 return 1
790 790
791 791 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
792 792 # validacion de rango
793 793 if minHei == None:
794 794 minHei = self.dataOut.heightList[0]
795 795
796 796 if maxHei == None:
797 797 maxHei = self.dataOut.heightList[-1]
798 798
799 799 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
800 800 print('minHei: %.2f is out of the heights range' % (minHei))
801 801 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
802 802 minHei = self.dataOut.heightList[0]
803 803
804 804 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
805 805 print('maxHei: %.2f is out of the heights range' % (maxHei))
806 806 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
807 807 maxHei = self.dataOut.heightList[-1]
808 808
809 809 # validacion de velocidades
810 810 velrange = self.dataOut.getVelRange(1)
811 811
812 812 if minVel == None:
813 813 minVel = velrange[0]
814 814
815 815 if maxVel == None:
816 816 maxVel = velrange[-1]
817 817
818 818 if (minVel < velrange[0]) or (minVel > maxVel):
819 819 print('minVel: %.2f is out of the velocity range' % (minVel))
820 820 print('minVel is setting to %.2f' % (velrange[0]))
821 821 minVel = velrange[0]
822 822
823 823 if (maxVel > velrange[-1]) or (maxVel < minVel):
824 824 print('maxVel: %.2f is out of the velocity range' % (maxVel))
825 825 print('maxVel is setting to %.2f' % (velrange[-1]))
826 826 maxVel = velrange[-1]
827 827
828 828 # seleccion de indices para rango
829 829 minIndex = 0
830 830 maxIndex = 0
831 831 heights = self.dataOut.heightList
832 832
833 833 inda = numpy.where(heights >= minHei)
834 834 indb = numpy.where(heights <= maxHei)
835 835
836 836 try:
837 837 minIndex = inda[0][0]
838 838 except:
839 839 minIndex = 0
840 840
841 841 try:
842 842 maxIndex = indb[0][-1]
843 843 except:
844 844 maxIndex = len(heights)
845 845
846 846 if (minIndex < 0) or (minIndex > maxIndex):
847 847 raise ValueError("some value in (%d,%d) is not valid" % (
848 848 minIndex, maxIndex))
849 849
850 850 if (maxIndex >= self.dataOut.nHeights):
851 851 maxIndex = self.dataOut.nHeights - 1
852 852
853 853 # seleccion de indices para velocidades
854 854 indminvel = numpy.where(velrange >= minVel)
855 855 indmaxvel = numpy.where(velrange <= maxVel)
856 856 try:
857 857 minIndexVel = indminvel[0][0]
858 858 except:
859 859 minIndexVel = 0
860 860
861 861 try:
862 862 maxIndexVel = indmaxvel[0][-1]
863 863 except:
864 864 maxIndexVel = len(velrange)
865 865
866 866 # seleccion del espectro
867 867 data_spc = self.dataOut.data_spc[:,
868 868 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
869 869 # estimacion de ruido
870 870 noise = numpy.zeros(self.dataOut.nChannels)
871 871
872 872 for channel in range(self.dataOut.nChannels):
873 873 daux = data_spc[channel, :, :]
874 874 noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
875 875
876 876 self.dataOut.noise_estimation = noise.copy()
877 877
878 878 return 1
879 879
880 880
881 881 class IncohInt(Operation):
882 882
883 883 __profIndex = 0
884 884 __withOverapping = False
885 885
886 886 __byTime = False
887 887 __initime = None
888 888 __lastdatatime = None
889 889 __integrationtime = None
890 890
891 891 __buffer_spc = None
892 892 __buffer_cspc = None
893 893 __buffer_dc = None
894 894
895 895 __dataReady = False
896 896
897 897 __timeInterval = None
898 898
899 899 n = None
900 900
901 901 def __init__(self):
902 902
903 903 Operation.__init__(self)
904 904
905 905 def setup(self, n=None, timeInterval=None, overlapping=False):
906 906 """
907 907 Set the parameters of the integration class.
908 908
909 909 Inputs:
910 910
911 911 n : Number of coherent integrations
912 912 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
913 913 overlapping :
914 914
915 915 """
916 916
917 917 self.__initime = None
918 918 self.__lastdatatime = 0
919 919
920 920 self.__buffer_spc = 0
921 921 self.__buffer_cspc = 0
922 922 self.__buffer_dc = 0
923 923
924 924 self.__profIndex = 0
925 925 self.__dataReady = False
926 926 self.__byTime = False
927 927
928 928 if n is None and timeInterval is None:
929 929 raise ValueError("n or timeInterval should be specified ...")
930 930
931 931 if n is not None:
932 932 self.n = int(n)
933 933 else:
934
934
935 935 self.__integrationtime = int(timeInterval)
936 936 self.n = None
937 937 self.__byTime = True
938 938
939 939 def putData(self, data_spc, data_cspc, data_dc):
940 940 """
941 941 Add a profile to the __buffer_spc and increase in one the __profileIndex
942 942
943 943 """
944 944
945 945 self.__buffer_spc += data_spc
946 946
947 947 if data_cspc is None:
948 948 self.__buffer_cspc = None
949 949 else:
950 950 self.__buffer_cspc += data_cspc
951 951
952 952 if data_dc is None:
953 953 self.__buffer_dc = None
954 954 else:
955 955 self.__buffer_dc += data_dc
956 956
957 957 self.__profIndex += 1
958 958
959 959 return
960 960
961 961 def pushData(self):
962 962 """
963 963 Return the sum of the last profiles and the profiles used in the sum.
964 964
965 965 Affected:
966 966
967 967 self.__profileIndex
968 968
969 969 """
970 970
971 971 data_spc = self.__buffer_spc
972 972 data_cspc = self.__buffer_cspc
973 973 data_dc = self.__buffer_dc
974 974 n = self.__profIndex
975 975
976 976 self.__buffer_spc = 0
977 977 self.__buffer_cspc = 0
978 978 self.__buffer_dc = 0
979 979 self.__profIndex = 0
980 980
981 981 return data_spc, data_cspc, data_dc, n
982 982
983 983 def byProfiles(self, *args):
984 984
985 985 self.__dataReady = False
986 986 avgdata_spc = None
987 987 avgdata_cspc = None
988 988 avgdata_dc = None
989 989
990 990 self.putData(*args)
991 991
992 992 if self.__profIndex == self.n:
993 993
994 994 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
995 995 self.n = n
996 996 self.__dataReady = True
997 997
998 998 return avgdata_spc, avgdata_cspc, avgdata_dc
999 999
1000 1000 def byTime(self, datatime, *args):
1001 1001
1002 1002 self.__dataReady = False
1003 1003 avgdata_spc = None
1004 1004 avgdata_cspc = None
1005 1005 avgdata_dc = None
1006 1006
1007 1007 self.putData(*args)
1008 1008
1009 1009 if (datatime - self.__initime) >= self.__integrationtime:
1010 1010 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1011 1011 self.n = n
1012 1012 self.__dataReady = True
1013 1013
1014 1014 return avgdata_spc, avgdata_cspc, avgdata_dc
1015 1015
1016 1016 def integrate(self, datatime, *args):
1017 1017
1018 1018 if self.__profIndex == 0:
1019 1019 self.__initime = datatime
1020 1020
1021 1021 if self.__byTime:
1022 1022 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1023 1023 datatime, *args)
1024 1024 else:
1025 1025 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1026 1026
1027 1027 if not self.__dataReady:
1028 1028 return None, None, None, None
1029 1029
1030 1030 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1031 1031
1032 1032 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1033 1033 if n == 1:
1034 1034 return
1035
1035
1036 1036 dataOut.flagNoData = True
1037 1037
1038 1038 if not self.isConfig:
1039 1039 self.setup(n, timeInterval, overlapping)
1040 1040 self.isConfig = True
1041 1041
1042 1042 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1043 1043 dataOut.data_spc,
1044 1044 dataOut.data_cspc,
1045 1045 dataOut.data_dc)
1046 1046
1047 1047 if self.__dataReady:
1048 1048
1049 1049 dataOut.data_spc = avgdata_spc
1050 1050 dataOut.data_cspc = avgdata_cspc
1051 dataOut.data_dc = avgdata_dc
1051 dataOut.data_dc = avgdata_dc
1052 1052 dataOut.nIncohInt *= self.n
1053 1053 dataOut.utctime = avgdatatime
1054 1054 dataOut.flagNoData = False
1055 1055
1056 return dataOut No newline at end of file
1056 return dataOut
@@ -1,1328 +1,1327
1 1 import sys
2 2 import numpy
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage
6 6 from schainpy.utils import log
7 7 from time import time
8 8
9 9
10 10 @MPDecorator
11 class VoltageProc(ProcessingUnit):
12
11 class VoltageProc(ProcessingUnit):
12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 # self.dataOut.copy(self.dataIn)
30 30
31 31 def __updateObjFromAmisrInput(self):
32 32
33 33 self.dataOut.timeZone = self.dataIn.timeZone
34 34 self.dataOut.dstFlag = self.dataIn.dstFlag
35 35 self.dataOut.errorCount = self.dataIn.errorCount
36 36 self.dataOut.useLocalTime = self.dataIn.useLocalTime
37 37
38 38 self.dataOut.flagNoData = self.dataIn.flagNoData
39 39 self.dataOut.data = self.dataIn.data
40 40 self.dataOut.utctime = self.dataIn.utctime
41 41 self.dataOut.channelList = self.dataIn.channelList
42 42 #self.dataOut.timeInterval = self.dataIn.timeInterval
43 43 self.dataOut.heightList = self.dataIn.heightList
44 44 self.dataOut.nProfiles = self.dataIn.nProfiles
45 45
46 46 self.dataOut.nCohInt = self.dataIn.nCohInt
47 47 self.dataOut.ippSeconds = self.dataIn.ippSeconds
48 48 self.dataOut.frequency = self.dataIn.frequency
49 49
50 50 self.dataOut.azimuth = self.dataIn.azimuth
51 51 self.dataOut.zenith = self.dataIn.zenith
52 52
53 53 self.dataOut.beam.codeList = self.dataIn.beam.codeList
54 54 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
55 55 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
56 56 #
57 57 # pass#
58 58 #
59 59 # def init(self):
60 60 #
61 61 #
62 62 # if self.dataIn.type == 'AMISR':
63 63 # self.__updateObjFromAmisrInput()
64 64 #
65 65 # if self.dataIn.type == 'Voltage':
66 66 # self.dataOut.copy(self.dataIn)
67 67 # # No necesita copiar en cada init() los atributos de dataIn
68 68 # # la copia deberia hacerse por cada nuevo bloque de datos
69 69
70 70 def selectChannels(self, channelList):
71 71
72 72 channelIndexList = []
73 73
74 74 for channel in channelList:
75 75 if channel not in self.dataOut.channelList:
76 76 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
77 77
78 78 index = self.dataOut.channelList.index(channel)
79 79 channelIndexList.append(index)
80 80
81 81 self.selectChannelsByIndex(channelIndexList)
82 82
83 83 def selectChannelsByIndex(self, channelIndexList):
84 84 """
85 85 Selecciona un bloque de datos en base a canales segun el channelIndexList
86 86
87 87 Input:
88 88 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
89 89
90 90 Affected:
91 91 self.dataOut.data
92 92 self.dataOut.channelIndexList
93 93 self.dataOut.nChannels
94 94 self.dataOut.m_ProcessingHeader.totalSpectra
95 95 self.dataOut.systemHeaderObj.numChannels
96 96 self.dataOut.m_ProcessingHeader.blockSize
97 97
98 98 Return:
99 99 None
100 100 """
101 101
102 102 for channelIndex in channelIndexList:
103 103 if channelIndex not in self.dataOut.channelIndexList:
104 104 print(channelIndexList)
105 105 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
106 106
107 107 if self.dataOut.flagDataAsBlock:
108 108 """
109 109 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
110 110 """
111 111 data = self.dataOut.data[channelIndexList,:,:]
112 112 else:
113 113 data = self.dataOut.data[channelIndexList,:]
114 114
115 115 self.dataOut.data = data
116 116 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
117 117 self.dataOut.channelList = range(len(channelIndexList))
118
118
119 119 return 1
120 120
121 121 def selectHeights(self, minHei=None, maxHei=None):
122 122 """
123 123 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
124 124 minHei <= height <= maxHei
125 125
126 126 Input:
127 127 minHei : valor minimo de altura a considerar
128 128 maxHei : valor maximo de altura a considerar
129 129
130 130 Affected:
131 131 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
132 132
133 133 Return:
134 134 1 si el metodo se ejecuto con exito caso contrario devuelve 0
135 135 """
136 136
137 137 if minHei == None:
138 138 minHei = self.dataOut.heightList[0]
139 139
140 140 if maxHei == None:
141 141 maxHei = self.dataOut.heightList[-1]
142 142
143 143 if (minHei < self.dataOut.heightList[0]):
144 144 minHei = self.dataOut.heightList[0]
145 145
146 146 if (maxHei > self.dataOut.heightList[-1]):
147 147 maxHei = self.dataOut.heightList[-1]
148 148
149 149 minIndex = 0
150 150 maxIndex = 0
151 151 heights = self.dataOut.heightList
152 152
153 153 inda = numpy.where(heights >= minHei)
154 154 indb = numpy.where(heights <= maxHei)
155 155
156 156 try:
157 157 minIndex = inda[0][0]
158 158 except:
159 159 minIndex = 0
160 160
161 161 try:
162 162 maxIndex = indb[0][-1]
163 163 except:
164 164 maxIndex = len(heights)
165 165
166 166 self.selectHeightsByIndex(minIndex, maxIndex)
167 167
168 168 return 1
169 169
170 170
171 171 def selectHeightsByIndex(self, minIndex, maxIndex):
172 172 """
173 173 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
174 174 minIndex <= index <= maxIndex
175 175
176 176 Input:
177 177 minIndex : valor de indice minimo de altura a considerar
178 178 maxIndex : valor de indice maximo de altura a considerar
179 179
180 180 Affected:
181 181 self.dataOut.data
182 182 self.dataOut.heightList
183 183
184 184 Return:
185 185 1 si el metodo se ejecuto con exito caso contrario devuelve 0
186 186 """
187 187
188 188 if (minIndex < 0) or (minIndex > maxIndex):
189 189 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
190 190
191 191 if (maxIndex >= self.dataOut.nHeights):
192 192 maxIndex = self.dataOut.nHeights
193 193
194 194 #voltage
195 195 if self.dataOut.flagDataAsBlock:
196 196 """
197 197 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
198 198 """
199 199 data = self.dataOut.data[:,:, minIndex:maxIndex]
200 200 else:
201 201 data = self.dataOut.data[:, minIndex:maxIndex]
202 202
203 203 # firstHeight = self.dataOut.heightList[minIndex]
204 204
205 205 self.dataOut.data = data
206 206 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
207 207
208 208 if self.dataOut.nHeights <= 1:
209 209 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
210 210
211 211 return 1
212 212
213 213
214 214 def filterByHeights(self, window):
215 215
216 216 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
217 217
218 218 if window == None:
219 219 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
220 220
221 221 newdelta = deltaHeight * window
222 222 r = self.dataOut.nHeights % window
223 223 newheights = (self.dataOut.nHeights-r)/window
224 224
225 225 if newheights <= 1:
226 226 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window))
227 227
228 228 if self.dataOut.flagDataAsBlock:
229 229 """
230 230 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
231 231 """
232 buffer = self.dataOut.data[:, :, 0:int(self.dataOut.nHeights-r)]
232 buffer = self.dataOut.data[:, :, 0:int(self.dataOut.nHeights-r)]
233 233 buffer = buffer.reshape(self.dataOut.nChannels, self.dataOut.nProfiles, int(self.dataOut.nHeights/window), window)
234 234 buffer = numpy.sum(buffer,3)
235 235
236 236 else:
237 237 buffer = self.dataOut.data[:,0:int(self.dataOut.nHeights-r)]
238 238 buffer = buffer.reshape(self.dataOut.nChannels,int(self.dataOut.nHeights/window),int(window))
239 239 buffer = numpy.sum(buffer,2)
240 240
241 241 self.dataOut.data = buffer
242 242 self.dataOut.heightList = self.dataOut.heightList[0] + numpy.arange( newheights )*newdelta
243 243 self.dataOut.windowOfFilter = window
244 244
245 245 def setH0(self, h0, deltaHeight = None):
246 246
247 247 if not deltaHeight:
248 248 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
249 249
250 250 nHeights = self.dataOut.nHeights
251 251
252 252 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
253 253
254 254 self.dataOut.heightList = newHeiRange
255 255
256 256 def deFlip(self, channelList = []):
257 257
258 258 data = self.dataOut.data.copy()
259 259
260 260 if self.dataOut.flagDataAsBlock:
261 261 flip = self.flip
262 262 profileList = list(range(self.dataOut.nProfiles))
263 263
264 264 if not channelList:
265 265 for thisProfile in profileList:
266 266 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
267 267 flip *= -1.0
268 268 else:
269 269 for thisChannel in channelList:
270 270 if thisChannel not in self.dataOut.channelList:
271 271 continue
272 272
273 273 for thisProfile in profileList:
274 274 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
275 275 flip *= -1.0
276 276
277 277 self.flip = flip
278 278
279 279 else:
280 280 if not channelList:
281 281 data[:,:] = data[:,:]*self.flip
282 282 else:
283 283 for thisChannel in channelList:
284 284 if thisChannel not in self.dataOut.channelList:
285 285 continue
286 286
287 287 data[thisChannel,:] = data[thisChannel,:]*self.flip
288 288
289 289 self.flip *= -1.
290 290
291 291 self.dataOut.data = data
292 292
293 293 def setRadarFrequency(self, frequency=None):
294 294
295 295 if frequency != None:
296 296 self.dataOut.frequency = frequency
297 297
298 298 return 1
299 299
300 300 def interpolateHeights(self, topLim, botLim):
301 301 #69 al 72 para julia
302 302 #82-84 para meteoros
303 303 if len(numpy.shape(self.dataOut.data))==2:
304 304 sampInterp = (self.dataOut.data[:,botLim-1] + self.dataOut.data[:,topLim+1])/2
305 305 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
306 306 #self.dataOut.data[:,botLim:limSup+1] = sampInterp
307 307 self.dataOut.data[:,botLim:topLim+1] = sampInterp
308 308 else:
309 309 nHeights = self.dataOut.data.shape[2]
310 310 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
311 311 y = self.dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
312 312 f = interpolate.interp1d(x, y, axis = 2)
313 313 xnew = numpy.arange(botLim,topLim+1)
314 314 ynew = f(xnew)
315 315
316 316 self.dataOut.data[:,:,botLim:topLim+1] = ynew
317 317
318 318 # import collections
319 319
320 320 class CohInt(Operation):
321 321
322 322 isConfig = False
323 323 __profIndex = 0
324 324 __byTime = False
325 325 __initime = None
326 326 __lastdatatime = None
327 327 __integrationtime = None
328 328 __buffer = None
329 329 __bufferStride = []
330 330 __dataReady = False
331 331 __profIndexStride = 0
332 332 __dataToPutStride = False
333 333 n = None
334 334
335 335 def __init__(self, **kwargs):
336 336
337 337 Operation.__init__(self, **kwargs)
338 338
339 339 # self.isConfig = False
340 340
341 341 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
342 342 """
343 343 Set the parameters of the integration class.
344 344
345 345 Inputs:
346 346
347 347 n : Number of coherent integrations
348 348 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
349 349 overlapping :
350 350 """
351 351
352 352 self.__initime = None
353 353 self.__lastdatatime = 0
354 354 self.__buffer = None
355 355 self.__dataReady = False
356 356 self.byblock = byblock
357 357 self.stride = stride
358 358
359 359 if n == None and timeInterval == None:
360 360 raise ValueError("n or timeInterval should be specified ...")
361 361
362 362 if n != None:
363 363 self.n = n
364 364 self.__byTime = False
365 365 else:
366 366 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
367 367 self.n = 9999
368 368 self.__byTime = True
369 369
370 370 if overlapping:
371 371 self.__withOverlapping = True
372 372 self.__buffer = None
373 373 else:
374 374 self.__withOverlapping = False
375 375 self.__buffer = 0
376 376
377 377 self.__profIndex = 0
378 378
379 379 def putData(self, data):
380 380
381 381 """
382 382 Add a profile to the __buffer and increase in one the __profileIndex
383 383
384 384 """
385 385
386 386 if not self.__withOverlapping:
387 387 self.__buffer += data.copy()
388 388 self.__profIndex += 1
389 389 return
390 390
391 391 #Overlapping data
392 392 nChannels, nHeis = data.shape
393 393 data = numpy.reshape(data, (1, nChannels, nHeis))
394 394
395 395 #If the buffer is empty then it takes the data value
396 396 if self.__buffer is None:
397 397 self.__buffer = data
398 398 self.__profIndex += 1
399 399 return
400 400
401 401 #If the buffer length is lower than n then stakcing the data value
402 402 if self.__profIndex < self.n:
403 403 self.__buffer = numpy.vstack((self.__buffer, data))
404 404 self.__profIndex += 1
405 405 return
406 406
407 407 #If the buffer length is equal to n then replacing the last buffer value with the data value
408 408 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
409 409 self.__buffer[self.n-1] = data
410 410 self.__profIndex = self.n
411 411 return
412 412
413 413
414 414 def pushData(self):
415 415 """
416 416 Return the sum of the last profiles and the profiles used in the sum.
417 417
418 418 Affected:
419 419
420 420 self.__profileIndex
421 421
422 422 """
423 423
424 424 if not self.__withOverlapping:
425 425 data = self.__buffer
426 426 n = self.__profIndex
427 427
428 428 self.__buffer = 0
429 429 self.__profIndex = 0
430 430
431 431 return data, n
432 432
433 433 #Integration with Overlapping
434 434 data = numpy.sum(self.__buffer, axis=0)
435 435 # print data
436 436 # raise
437 437 n = self.__profIndex
438 438
439 439 return data, n
440 440
441 441 def byProfiles(self, data):
442 442
443 443 self.__dataReady = False
444 444 avgdata = None
445 445 # n = None
446 446 # print data
447 447 # raise
448 448 self.putData(data)
449 449
450 450 if self.__profIndex == self.n:
451 451 avgdata, n = self.pushData()
452 452 self.__dataReady = True
453 453
454 454 return avgdata
455 455
456 456 def byTime(self, data, datatime):
457 457
458 458 self.__dataReady = False
459 459 avgdata = None
460 460 n = None
461 461
462 462 self.putData(data)
463 463
464 464 if (datatime - self.__initime) >= self.__integrationtime:
465 465 avgdata, n = self.pushData()
466 466 self.n = n
467 467 self.__dataReady = True
468 468
469 469 return avgdata
470 470
471 471 def integrateByStride(self, data, datatime):
472 472 # print data
473 473 if self.__profIndex == 0:
474 474 self.__buffer = [[data.copy(), datatime]]
475 475 else:
476 476 self.__buffer.append([data.copy(),datatime])
477 477 self.__profIndex += 1
478 478 self.__dataReady = False
479 479
480 480 if self.__profIndex == self.n * self.stride :
481 481 self.__dataToPutStride = True
482 482 self.__profIndexStride = 0
483 483 self.__profIndex = 0
484 484 self.__bufferStride = []
485 485 for i in range(self.stride):
486 486 current = self.__buffer[i::self.stride]
487 487 data = numpy.sum([t[0] for t in current], axis=0)
488 488 avgdatatime = numpy.average([t[1] for t in current])
489 489 # print data
490 490 self.__bufferStride.append((data, avgdatatime))
491 491
492 492 if self.__dataToPutStride:
493 493 self.__dataReady = True
494 494 self.__profIndexStride += 1
495 495 if self.__profIndexStride == self.stride:
496 496 self.__dataToPutStride = False
497 497 # print self.__bufferStride[self.__profIndexStride - 1]
498 498 # raise
499 499 return self.__bufferStride[self.__profIndexStride - 1]
500
501
500
501
502 502 return None, None
503 503
504 504 def integrate(self, data, datatime=None):
505 505
506 506 if self.__initime == None:
507 507 self.__initime = datatime
508 508
509 509 if self.__byTime:
510 510 avgdata = self.byTime(data, datatime)
511 511 else:
512 512 avgdata = self.byProfiles(data)
513 513
514 514
515 515 self.__lastdatatime = datatime
516 516
517 517 if avgdata is None:
518 518 return None, None
519 519
520 520 avgdatatime = self.__initime
521 521
522 522 deltatime = datatime - self.__lastdatatime
523
523
524 524 if not self.__withOverlapping:
525 525 self.__initime = datatime
526 526 else:
527 527 self.__initime += deltatime
528 528
529 529 return avgdata, avgdatatime
530 530
531 531 def integrateByBlock(self, dataOut):
532 532
533 533 times = int(dataOut.data.shape[1]/self.n)
534 534 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
535 535
536 536 id_min = 0
537 537 id_max = self.n
538 538
539 539 for i in range(times):
540 540 junk = dataOut.data[:,id_min:id_max,:]
541 541 avgdata[:,i,:] = junk.sum(axis=1)
542 542 id_min += self.n
543 543 id_max += self.n
544 544
545 545 timeInterval = dataOut.ippSeconds*self.n
546 546 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
547 547 self.__dataReady = True
548 548 return avgdata, avgdatatime
549
549
550 550 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
551 551
552 552 if not self.isConfig:
553 553 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
554 554 self.isConfig = True
555 555
556 556 if dataOut.flagDataAsBlock:
557 557 """
558 558 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
559 559 """
560 560 avgdata, avgdatatime = self.integrateByBlock(dataOut)
561 561 dataOut.nProfiles /= self.n
562 562 else:
563 if stride is None:
563 if stride is None:
564 564 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
565 565 else:
566 566 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
567 567
568
568
569 569 # dataOut.timeInterval *= n
570 570 dataOut.flagNoData = True
571 571
572 572 if self.__dataReady:
573 573 dataOut.data = avgdata
574 574 dataOut.nCohInt *= self.n
575 575 dataOut.utctime = avgdatatime
576 576 # print avgdata, avgdatatime
577 577 # raise
578 578 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
579 579 dataOut.flagNoData = False
580 580 return dataOut
581 581
582 582 class Decoder(Operation):
583 583
584 584 isConfig = False
585 585 __profIndex = 0
586 586
587 587 code = None
588 588
589 589 nCode = None
590 590 nBaud = None
591 591
592 592 def __init__(self, **kwargs):
593 593
594 594 Operation.__init__(self, **kwargs)
595 595
596 596 self.times = None
597 597 self.osamp = None
598 598 # self.__setValues = False
599 599 self.isConfig = False
600 600 self.setupReq = False
601 601 def setup(self, code, osamp, dataOut):
602 602
603 603 self.__profIndex = 0
604 604
605 605 self.code = code
606 606
607 607 self.nCode = len(code)
608 608 self.nBaud = len(code[0])
609
610 609 if (osamp != None) and (osamp >1):
611 610 self.osamp = osamp
612 611 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
613 612 self.nBaud = self.nBaud*self.osamp
614 613
615 614 self.__nChannels = dataOut.nChannels
616 615 self.__nProfiles = dataOut.nProfiles
617 616 self.__nHeis = dataOut.nHeights
618 617
619 618 if self.__nHeis < self.nBaud:
620 619 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
621 620
622 621 #Frequency
623 622 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
624
623
625 624 __codeBuffer[:,0:self.nBaud] = self.code
626 625
627 626 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
628 627
629 628 if dataOut.flagDataAsBlock:
630 629
631 630 self.ndatadec = self.__nHeis #- self.nBaud + 1
632 631
633 632 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
634 633
635 634 else:
636 635
637 636 #Time
638 637 self.ndatadec = self.__nHeis #- self.nBaud + 1
639 638
640 639 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
641 640
642 641 def __convolutionInFreq(self, data):
643 642
644 643 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
645 644
646 645 fft_data = numpy.fft.fft(data, axis=1)
647 646
648 647 conv = fft_data*fft_code
649 648
650 649 data = numpy.fft.ifft(conv,axis=1)
651 650
652 651 return data
653 652
654 653 def __convolutionInFreqOpt(self, data):
655 654
656 655 raise NotImplementedError
657 656
658 657 def __convolutionInTime(self, data):
659 658
660 659 code = self.code[self.__profIndex]
661 660 for i in range(self.__nChannels):
662 661 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
663 662
664 663 return self.datadecTime
665 664
666 665 def __convolutionByBlockInTime(self, data):
667 666
668 667 repetitions = int(self.__nProfiles / self.nCode)
669 668 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
670 669 junk = junk.flatten()
671 670 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
672 671 profilesList = range(self.__nProfiles)
673
674 for i in range(self.__nChannels):
675 for j in profilesList:
676 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
677 return self.datadecTime
672
673 for i in range(self.__nChannels):
674 for j in profilesList:
675 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
676 return self.datadecTime
678 677
679 678 def __convolutionByBlockInFreq(self, data):
680 679
681 680 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
682 681
683 682
684 683 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
685 684
686 685 fft_data = numpy.fft.fft(data, axis=2)
687 686
688 687 conv = fft_data*fft_code
689 688
690 689 data = numpy.fft.ifft(conv,axis=2)
691 690
692 691 return data
693 692
694
693
695 694 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
696 695
697 696 if dataOut.flagDecodeData:
698 697 print("This data is already decoded, recoding again ...")
699 698
700 699 if not self.isConfig:
701 700
702 701 if code is None:
703 702 if dataOut.code is None:
704 703 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
705 704
706 705 code = dataOut.code
707 706 else:
708 707 code = numpy.array(code).reshape(nCode,nBaud)
709 708 self.setup(code, osamp, dataOut)
710 709
711 710 self.isConfig = True
712 711
713 712 if mode == 3:
714 713 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
715 714
716 715 if times != None:
717 716 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
718 717
719 718 if self.code is None:
720 719 print("Fail decoding: Code is not defined.")
721 720 return
722 721
723 722 self.__nProfiles = dataOut.nProfiles
724 723 datadec = None
725
724
726 725 if mode == 3:
727 726 mode = 0
728 727
729 728 if dataOut.flagDataAsBlock:
730 729 """
731 730 Decoding when data have been read as block,
732 731 """
733 732
734 733 if mode == 0:
735 734 datadec = self.__convolutionByBlockInTime(dataOut.data)
736 735 if mode == 1:
737 736 datadec = self.__convolutionByBlockInFreq(dataOut.data)
738 737 else:
739 738 """
740 739 Decoding when data have been read profile by profile
741 740 """
742 741 if mode == 0:
743 742 datadec = self.__convolutionInTime(dataOut.data)
744 743
745 744 if mode == 1:
746 745 datadec = self.__convolutionInFreq(dataOut.data)
747 746
748 747 if mode == 2:
749 748 datadec = self.__convolutionInFreqOpt(dataOut.data)
750 749
751 750 if datadec is None:
752 751 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
753 752
754 753 dataOut.code = self.code
755 754 dataOut.nCode = self.nCode
756 755 dataOut.nBaud = self.nBaud
757 756
758 757 dataOut.data = datadec
759 758
760 759 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
761 760
762 761 dataOut.flagDecodeData = True #asumo q la data esta decodificada
763 762
764 763 if self.__profIndex == self.nCode-1:
765 764 self.__profIndex = 0
766 765 return dataOut
767 766
768 767 self.__profIndex += 1
769 768
770 769 return dataOut
771 770 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
772 771
773 772
774 773 class ProfileConcat(Operation):
775 774
776 775 isConfig = False
777 776 buffer = None
778 777
779 778 def __init__(self, **kwargs):
780 779
781 780 Operation.__init__(self, **kwargs)
782 781 self.profileIndex = 0
783 782
784 783 def reset(self):
785 784 self.buffer = numpy.zeros_like(self.buffer)
786 785 self.start_index = 0
787 786 self.times = 1
788 787
789 788 def setup(self, data, m, n=1):
790 789 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
791 790 self.nHeights = data.shape[1]#.nHeights
792 791 self.start_index = 0
793 792 self.times = 1
794 793
795 794 def concat(self, data):
796 795
797 796 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
798 797 self.start_index = self.start_index + self.nHeights
799 798
800 799 def run(self, dataOut, m):
801 800 dataOut.flagNoData = True
802 801
803 802 if not self.isConfig:
804 803 self.setup(dataOut.data, m, 1)
805 804 self.isConfig = True
806 805
807 806 if dataOut.flagDataAsBlock:
808 807 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
809 808
810 809 else:
811 810 self.concat(dataOut.data)
812 811 self.times += 1
813 812 if self.times > m:
814 813 dataOut.data = self.buffer
815 814 self.reset()
816 815 dataOut.flagNoData = False
817 816 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
818 817 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
819 818 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
820 819 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
821 820 dataOut.ippSeconds *= m
822 821 return dataOut
823 822
824 823 class ProfileSelector(Operation):
825 824
826 825 profileIndex = None
827 826 # Tamanho total de los perfiles
828 827 nProfiles = None
829 828
830 829 def __init__(self, **kwargs):
831 830
832 831 Operation.__init__(self, **kwargs)
833 832 self.profileIndex = 0
834 833
835 834 def incProfileIndex(self):
836 835
837 836 self.profileIndex += 1
838 837
839 838 if self.profileIndex >= self.nProfiles:
840 839 self.profileIndex = 0
841 840
842 841 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
843 842
844 843 if profileIndex < minIndex:
845 844 return False
846 845
847 846 if profileIndex > maxIndex:
848 847 return False
849 848
850 849 return True
851 850
852 851 def isThisProfileInList(self, profileIndex, profileList):
853 852
854 853 if profileIndex not in profileList:
855 854 return False
856 855
857 856 return True
858 857
859 858 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
860 859
861 860 """
862 861 ProfileSelector:
863 862
864 863 Inputs:
865 864 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
866 865
867 866 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
868 867
869 868 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
870 869
871 870 """
872 871
873 872 if rangeList is not None:
874 873 if type(rangeList[0]) not in (tuple, list):
875 874 rangeList = [rangeList]
876 875
877 876 dataOut.flagNoData = True
878 877
879 878 if dataOut.flagDataAsBlock:
880 879 """
881 880 data dimension = [nChannels, nProfiles, nHeis]
882 881 """
883 882 if profileList != None:
884 883 dataOut.data = dataOut.data[:,profileList,:]
885 884
886 885 if profileRangeList != None:
887 886 minIndex = profileRangeList[0]
888 887 maxIndex = profileRangeList[1]
889 888 profileList = list(range(minIndex, maxIndex+1))
890 889
891 890 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
892 891
893 892 if rangeList != None:
894 893
895 894 profileList = []
896 895
897 896 for thisRange in rangeList:
898 897 minIndex = thisRange[0]
899 898 maxIndex = thisRange[1]
900 899
901 900 profileList.extend(list(range(minIndex, maxIndex+1)))
902 901
903 902 dataOut.data = dataOut.data[:,profileList,:]
904 903
905 904 dataOut.nProfiles = len(profileList)
906 905 dataOut.profileIndex = dataOut.nProfiles - 1
907 906 dataOut.flagNoData = False
908 907
909 908 return dataOut
910 909
911 910 """
912 911 data dimension = [nChannels, nHeis]
913 912 """
914 913
915 914 if profileList != None:
916 915
917 916 if self.isThisProfileInList(dataOut.profileIndex, profileList):
918 917
919 918 self.nProfiles = len(profileList)
920 919 dataOut.nProfiles = self.nProfiles
921 920 dataOut.profileIndex = self.profileIndex
922 921 dataOut.flagNoData = False
923 922
924 923 self.incProfileIndex()
925 924 return dataOut
926 925
927 926 if profileRangeList != None:
928 927
929 928 minIndex = profileRangeList[0]
930 929 maxIndex = profileRangeList[1]
931 930
932 931 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
933 932
934 933 self.nProfiles = maxIndex - minIndex + 1
935 934 dataOut.nProfiles = self.nProfiles
936 935 dataOut.profileIndex = self.profileIndex
937 936 dataOut.flagNoData = False
938 937
939 938 self.incProfileIndex()
940 939 return dataOut
941 940
942 941 if rangeList != None:
943 942
944 943 nProfiles = 0
945 944
946 945 for thisRange in rangeList:
947 946 minIndex = thisRange[0]
948 947 maxIndex = thisRange[1]
949 948
950 949 nProfiles += maxIndex - minIndex + 1
951 950
952 951 for thisRange in rangeList:
953 952
954 953 minIndex = thisRange[0]
955 954 maxIndex = thisRange[1]
956 955
957 956 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
958 957
959 958 self.nProfiles = nProfiles
960 959 dataOut.nProfiles = self.nProfiles
961 960 dataOut.profileIndex = self.profileIndex
962 961 dataOut.flagNoData = False
963 962
964 963 self.incProfileIndex()
965 964
966 965 break
967 966
968 967 return dataOut
969 968
970 969
971 970 if beam != None: #beam is only for AMISR data
972 971 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
973 972 dataOut.flagNoData = False
974 973 dataOut.profileIndex = self.profileIndex
975 974
976 975 self.incProfileIndex()
977 976
978 977 return dataOut
979 978
980 979 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
981 980
982 981 #return False
983 982 return dataOut
984 983
985 984 class Reshaper(Operation):
986 985
987 986 def __init__(self, **kwargs):
988 987
989 988 Operation.__init__(self, **kwargs)
990 989
991 990 self.__buffer = None
992 991 self.__nitems = 0
993 992
994 993 def __appendProfile(self, dataOut, nTxs):
995 994
996 995 if self.__buffer is None:
997 996 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
998 997 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
999 998
1000 999 ini = dataOut.nHeights * self.__nitems
1001 1000 end = ini + dataOut.nHeights
1002 1001
1003 1002 self.__buffer[:, ini:end] = dataOut.data
1004 1003
1005 1004 self.__nitems += 1
1006 1005
1007 1006 return int(self.__nitems*nTxs)
1008 1007
1009 1008 def __getBuffer(self):
1010 1009
1011 1010 if self.__nitems == int(1./self.__nTxs):
1012 1011
1013 1012 self.__nitems = 0
1014 1013
1015 1014 return self.__buffer.copy()
1016 1015
1017 1016 return None
1018 1017
1019 1018 def __checkInputs(self, dataOut, shape, nTxs):
1020 1019
1021 1020 if shape is None and nTxs is None:
1022 1021 raise ValueError("Reshaper: shape of factor should be defined")
1023 1022
1024 1023 if nTxs:
1025 1024 if nTxs < 0:
1026 1025 raise ValueError("nTxs should be greater than 0")
1027 1026
1028 1027 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1029 1028 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1030 1029
1031 1030 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1032 1031
1033 1032 return shape, nTxs
1034 1033
1035 1034 if len(shape) != 2 and len(shape) != 3:
1036 1035 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1037 1036
1038 1037 if len(shape) == 2:
1039 1038 shape_tuple = [dataOut.nChannels]
1040 1039 shape_tuple.extend(shape)
1041 1040 else:
1042 1041 shape_tuple = list(shape)
1043 1042
1044 1043 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1045 1044
1046 1045 return shape_tuple, nTxs
1047 1046
1048 1047 def run(self, dataOut, shape=None, nTxs=None):
1049 1048
1050 1049 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1051 1050
1052 1051 dataOut.flagNoData = True
1053 1052 profileIndex = None
1054 1053
1055 1054 if dataOut.flagDataAsBlock:
1056 1055
1057 1056 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1058 1057 dataOut.flagNoData = False
1059 1058
1060 1059 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1061 1060
1062 1061 else:
1063 1062
1064 1063 if self.__nTxs < 1:
1065 1064
1066 1065 self.__appendProfile(dataOut, self.__nTxs)
1067 1066 new_data = self.__getBuffer()
1068 1067
1069 1068 if new_data is not None:
1070 1069 dataOut.data = new_data
1071 1070 dataOut.flagNoData = False
1072 1071
1073 1072 profileIndex = dataOut.profileIndex*nTxs
1074 1073
1075 1074 else:
1076 1075 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1077 1076
1078 1077 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1079 1078
1080 1079 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1081 1080
1082 1081 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1083 1082
1084 1083 dataOut.profileIndex = profileIndex
1085 1084
1086 1085 dataOut.ippSeconds /= self.__nTxs
1087 1086
1088 1087 return dataOut
1089 1088
1090 1089 class SplitProfiles(Operation):
1091 1090
1092 1091 def __init__(self, **kwargs):
1093 1092
1094 1093 Operation.__init__(self, **kwargs)
1095 1094
1096 1095 def run(self, dataOut, n):
1097 1096
1098 1097 dataOut.flagNoData = True
1099 1098 profileIndex = None
1100 1099
1101 1100 if dataOut.flagDataAsBlock:
1102 1101
1103 1102 #nchannels, nprofiles, nsamples
1104 1103 shape = dataOut.data.shape
1105 1104
1106 1105 if shape[2] % n != 0:
1107 1106 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1108
1107
1109 1108 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1110
1109
1111 1110 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1112 1111 dataOut.flagNoData = False
1113 1112
1114 1113 profileIndex = int(dataOut.nProfiles/n) - 1
1115 1114
1116 1115 else:
1117 1116
1118 1117 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1119 1118
1120 1119 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1121 1120
1122 1121 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1123 1122
1124 1123 dataOut.nProfiles = int(dataOut.nProfiles*n)
1125 1124
1126 1125 dataOut.profileIndex = profileIndex
1127 1126
1128 1127 dataOut.ippSeconds /= n
1129 1128
1130 1129 return dataOut
1131 1130
1132 1131 class CombineProfiles(Operation):
1133 1132 def __init__(self, **kwargs):
1134 1133
1135 1134 Operation.__init__(self, **kwargs)
1136 1135
1137 1136 self.__remData = None
1138 1137 self.__profileIndex = 0
1139 1138
1140 1139 def run(self, dataOut, n):
1141 1140
1142 1141 dataOut.flagNoData = True
1143 1142 profileIndex = None
1144 1143
1145 1144 if dataOut.flagDataAsBlock:
1146 1145
1147 1146 #nchannels, nprofiles, nsamples
1148 1147 shape = dataOut.data.shape
1149 1148 new_shape = shape[0], shape[1]/n, shape[2]*n
1150 1149
1151 1150 if shape[1] % n != 0:
1152 1151 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1153 1152
1154 1153 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1155 1154 dataOut.flagNoData = False
1156 1155
1157 1156 profileIndex = int(dataOut.nProfiles*n) - 1
1158 1157
1159 1158 else:
1160 1159
1161 1160 #nchannels, nsamples
1162 1161 if self.__remData is None:
1163 1162 newData = dataOut.data
1164 1163 else:
1165 1164 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1166 1165
1167 1166 self.__profileIndex += 1
1168 1167
1169 1168 if self.__profileIndex < n:
1170 1169 self.__remData = newData
1171 1170 #continue
1172 1171 return
1173 1172
1174 1173 self.__profileIndex = 0
1175 1174 self.__remData = None
1176 1175
1177 1176 dataOut.data = newData
1178 1177 dataOut.flagNoData = False
1179 1178
1180 1179 profileIndex = dataOut.profileIndex/n
1181 1180
1182 1181
1183 1182 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1184 1183
1185 1184 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1186 1185
1187 1186 dataOut.nProfiles = int(dataOut.nProfiles/n)
1188 1187
1189 1188 dataOut.profileIndex = profileIndex
1190 1189
1191 1190 dataOut.ippSeconds *= n
1192 1191
1193 1192 return dataOut
1194 1193 # import collections
1195 1194 # from scipy.stats import mode
1196 1195 #
1197 1196 # class Synchronize(Operation):
1198 1197 #
1199 1198 # isConfig = False
1200 1199 # __profIndex = 0
1201 1200 #
1202 1201 # def __init__(self, **kwargs):
1203 1202 #
1204 1203 # Operation.__init__(self, **kwargs)
1205 1204 # # self.isConfig = False
1206 1205 # self.__powBuffer = None
1207 1206 # self.__startIndex = 0
1208 1207 # self.__pulseFound = False
1209 1208 #
1210 1209 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1211 1210 #
1212 1211 # #Read data
1213 1212 #
1214 1213 # powerdB = dataOut.getPower(channel = channel)
1215 1214 # noisedB = dataOut.getNoise(channel = channel)[0]
1216 1215 #
1217 1216 # self.__powBuffer.extend(powerdB.flatten())
1218 1217 #
1219 1218 # dataArray = numpy.array(self.__powBuffer)
1220 1219 #
1221 1220 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1222 1221 #
1223 1222 # maxValue = numpy.nanmax(filteredPower)
1224 1223 #
1225 1224 # if maxValue < noisedB + 10:
1226 1225 # #No se encuentra ningun pulso de transmision
1227 1226 # return None
1228 1227 #
1229 1228 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1230 1229 #
1231 1230 # if len(maxValuesIndex) < 2:
1232 1231 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1233 1232 # return None
1234 1233 #
1235 1234 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1236 1235 #
1237 1236 # #Seleccionar solo valores con un espaciamiento de nSamples
1238 1237 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1239 1238 #
1240 1239 # if len(pulseIndex) < 2:
1241 1240 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1242 1241 # return None
1243 1242 #
1244 1243 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1245 1244 #
1246 1245 # #remover senales que se distancien menos de 10 unidades o muestras
1247 1246 # #(No deberian existir IPP menor a 10 unidades)
1248 1247 #
1249 1248 # realIndex = numpy.where(spacing > 10 )[0]
1250 1249 #
1251 1250 # if len(realIndex) < 2:
1252 1251 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1253 1252 # return None
1254 1253 #
1255 1254 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1256 1255 # realPulseIndex = pulseIndex[realIndex]
1257 1256 #
1258 1257 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1259 1258 #
1260 1259 # print "IPP = %d samples" %period
1261 1260 #
1262 1261 # self.__newNSamples = dataOut.nHeights #int(period)
1263 1262 # self.__startIndex = int(realPulseIndex[0])
1264 1263 #
1265 1264 # return 1
1266 1265 #
1267 1266 #
1268 1267 # def setup(self, nSamples, nChannels, buffer_size = 4):
1269 1268 #
1270 1269 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1271 1270 # maxlen = buffer_size*nSamples)
1272 1271 #
1273 1272 # bufferList = []
1274 1273 #
1275 1274 # for i in range(nChannels):
1276 1275 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1277 1276 # maxlen = buffer_size*nSamples)
1278 1277 #
1279 1278 # bufferList.append(bufferByChannel)
1280 1279 #
1281 1280 # self.__nSamples = nSamples
1282 1281 # self.__nChannels = nChannels
1283 1282 # self.__bufferList = bufferList
1284 1283 #
1285 1284 # def run(self, dataOut, channel = 0):
1286 1285 #
1287 1286 # if not self.isConfig:
1288 1287 # nSamples = dataOut.nHeights
1289 1288 # nChannels = dataOut.nChannels
1290 1289 # self.setup(nSamples, nChannels)
1291 1290 # self.isConfig = True
1292 1291 #
1293 1292 # #Append new data to internal buffer
1294 1293 # for thisChannel in range(self.__nChannels):
1295 1294 # bufferByChannel = self.__bufferList[thisChannel]
1296 1295 # bufferByChannel.extend(dataOut.data[thisChannel])
1297 1296 #
1298 1297 # if self.__pulseFound:
1299 1298 # self.__startIndex -= self.__nSamples
1300 1299 #
1301 1300 # #Finding Tx Pulse
1302 1301 # if not self.__pulseFound:
1303 1302 # indexFound = self.__findTxPulse(dataOut, channel)
1304 1303 #
1305 1304 # if indexFound == None:
1306 1305 # dataOut.flagNoData = True
1307 1306 # return
1308 1307 #
1309 1308 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1310 1309 # self.__pulseFound = True
1311 1310 # self.__startIndex = indexFound
1312 1311 #
1313 1312 # #If pulse was found ...
1314 1313 # for thisChannel in range(self.__nChannels):
1315 1314 # bufferByChannel = self.__bufferList[thisChannel]
1316 1315 # #print self.__startIndex
1317 1316 # x = numpy.array(bufferByChannel)
1318 1317 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1319 1318 #
1320 1319 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1321 1320 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1322 1321 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1323 1322 #
1324 1323 # dataOut.data = self.__arrayBuffer
1325 1324 #
1326 1325 # self.__startIndex += self.__newNSamples
1327 1326 #
1328 1327 # return
@@ -1,1008 +1,1008
1 1 '''
2 2 @author: Daniel Suarez
3 3 '''
4 4 import os
5 5 import glob
6 6 import ftplib
7 7
8 8 try:
9 9 import paramiko
10 10 import scp
11 11 except:
12 12 print("You should install paramiko and scp libraries \nif you want to use SSH protocol to upload files to the server")
13 13
14 14 import time
15 15
16 16 import threading
17 17 Thread = threading.Thread
18 18
19 19 # try:
20 20 # from gevent import sleep
21 21 # except:
22 22 from time import sleep
23 23
24 24 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
25 25
26 26 class Remote(Thread):
27 27 """
28 28 Remote is a parent class used to define the behaviour of FTP and SSH class. These clases are
29 29 used to upload or download files remotely.
30 30
31 31 Non-standard Python modules used:
32 32 None
33 33
34 34 Written by:
35 35 "Miguel Urco":mailto:miguel.urco@jro.igp.gob.pe Jun. 03, 2015
36 36 """
37 37
38 38 server = None
39 39 username = None
40 40 password = None
41 41 remotefolder = None
42 42
43 43 period = 60
44 44 fileList = []
45 45 bussy = False
46 46
47 47 def __init__(self, server, username, password, remotefolder, period=60):
48 48
49 49 Thread.__init__(self)
50 50
51 51 self.setDaemon(True)
52 52
53 53 self.status = 0
54 54
55 55 self.__server = server
56 56 self.__username = username
57 57 self.__password = password
58 58 self.__remotefolder = remotefolder
59 59
60 60 self.period = period
61 61
62 62 self.fileList = []
63 63 self.bussy = False
64 64
65 65 self.stopFlag = False
66 66
67 67 print("[Remote Server] Opening server: %s" %self.__server)
68 68 if self.open(self.__server, self.__username, self.__password, self.__remotefolder):
69 69 print("[Remote Server] %s server was opened successfully" %self.__server)
70 70
71 71 self.close()
72 72
73 73 self.mutex = threading.Lock()
74 74
75 75 def stop(self):
76 76
77 77 self.stopFlag = True
78 78 self.join(10)
79 79
80 80 def open(self):
81 81 """
82 82 Connect to server and create a connection class (FTP or SSH) to remote server.
83 83 """
84 84 raise NotImplementedError("Implement this method in child class")
85 85
86 86 def close(self):
87 87 """
88 88 Close connection to server
89 89 """
90 90 raise NotImplementedError("Implement this method in child class")
91 91
92 92 def mkdir(self, remotefolder):
93 93 """
94 94 Create a folder remotely
95 95 """
96 96 raise NotImplementedError("Implement this method in child class")
97 97
98 98 def cd(self, remotefolder):
99 99 """
100 100 Change working directory in remote server
101 101 """
102 102 raise NotImplementedError("Implement this method in child class")
103 103
104 104 def download(self, filename, localfolder=None):
105 105 """
106 106 Download a file from server to local host
107 107 """
108 108 raise NotImplementedError("Implement this method in child class")
109 109
110 110 def sendFile(self, fullfilename):
111 111 """
112 112 sendFile method is used to upload a local file to the current directory in remote server
113 113
114 114 Inputs:
115 115 fullfilename - full path name of local file to store in remote directory
116 116
117 117 Returns:
118 118 0 in error case else 1
119 119 """
120 120 raise NotImplementedError("Implement this method in child class")
121 121
122 122 def upload(self, fullfilename, remotefolder=None):
123 123 """
124 124 upload method is used to upload a local file to remote directory. This method changes
125 125 working directory before sending a file.
126 126
127 127 Inputs:
128 128 fullfilename - full path name of local file to store in remote directory
129 129
130 130 remotefolder - remote directory
131 131
132 132 Returns:
133 133 0 in error case else 1
134 134 """
135 135 print("[Remote Server] Uploading %s to %s:%s" %(fullfilename, self.server, self.remotefolder))
136 136
137 137 if not self.status:
138 138 return 0
139 139
140 140 if remotefolder == None:
141 141 remotefolder = self.remotefolder
142 142
143 143 if not self.cd(remotefolder):
144 144 return 0
145 145
146 146 if not self.sendFile(fullfilename):
147 147 print("[Remote Server] Error uploading file %s" %fullfilename)
148 148 return 0
149 149
150 150 print("[Remote Server] upload finished successfully")
151 151
152 152 return 1
153 153
154 154 def delete(self, filename):
155 155 """
156 156 Remove a file from remote server
157 157 """
158 158 pass
159 159
160 160 def updateFileList(self, fileList):
161 161 """
162 162 Remove a file from remote server
163 163 """
164 164
165 165 if fileList == self.fileList:
166 166 return 0
167 167
168 168 self.mutex.acquire()
169 169 # init = time.time()
170 #
170 #
171 171 # while(self.bussy):
172 172 # sleep(0.1)
173 173 # if time.time() - init > 2*self.period:
174 174 # return 0
175
175
176 176 self.fileList = fileList
177 177 self.mutex.release()
178 178 return 1
179 179
180 180 def run(self):
181 181
182 182 if not self.status:
183 183 print("Finishing FTP service")
184 184 return
185 185
186 186 if not self.cd(self.remotefolder):
187 187 raise ValueError("Could not access to the new remote directory: %s" %self.remotefolder)
188 188
189 189 while True:
190 190
191 191 for i in range(self.period):
192 192 if self.stopFlag:
193 193 break
194 194 sleep(1)
195 195
196 196 if self.stopFlag:
197 197 break
198
198
199 199 # self.bussy = True
200 200 self.mutex.acquire()
201 201
202 202 print("[Remote Server] Opening %s" %self.__server)
203 203 if not self.open(self.__server, self.__username, self.__password, self.__remotefolder):
204 204 self.mutex.release()
205 205 continue
206 206
207 207 for thisFile in self.fileList:
208 208 self.upload(thisFile, self.remotefolder)
209 209
210 210 print("[Remote Server] Closing %s" %self.__server)
211 211 self.close()
212 212
213 213 self.mutex.release()
214 214 # self.bussy = False
215 215
216 216 print("[Remote Server] Thread stopped successfully")
217 217
218 218 class FTPClient(Remote):
219 219
220 220 __ftpClientObj = None
221 221
222 222 def __init__(self, server, username, password, remotefolder, period=60):
223 223 """
224 224 """
225 225 Remote.__init__(self, server, username, password, remotefolder, period)
226 226
227 227 def open(self, server, username, password, remotefolder):
228 228
229 229 """
230 230 This method is used to set FTP parameters and establish a connection to remote server
231 231
232 232 Inputs:
233 233 server - remote server IP Address
234 234
235 235 username - remote server Username
236 236
237 237 password - remote server password
238 238
239 239 remotefolder - remote server current working directory
240 240
241 241 Return:
242 242 Boolean - Returns 1 if a connection has been established, 0 otherwise
243 243
244 244 Affects:
245 245 self.status - in case of error or fail connection this parameter is set to 0 else 1
246 246
247 247 """
248 248
249 249 if server == None:
250 250 raise ValueError("FTP server should be defined")
251 251
252 252 if username == None:
253 253 raise ValueError("FTP username should be defined")
254 254
255 255 if password == None:
256 256 raise ValueError("FTP password should be defined")
257 257
258 258 if remotefolder == None:
259 259 raise ValueError("FTP remote folder should be defined")
260 260
261 261 try:
262 262 ftpClientObj = ftplib.FTP(server)
263 263 except ftplib.all_errors as e:
264 264 print("[FTP Server]: FTP server connection fail: %s" %server)
265 265 print("[FTP Server]:", e)
266 266 self.status = 0
267 267 return 0
268 268
269 269 try:
270 270 ftpClientObj.login(username, password)
271 271 except ftplib.all_errors:
272 272 print("[FTP Server]: FTP username or password are incorrect")
273 273 self.status = 0
274 274 return 0
275 275
276 276 if remotefolder == None:
277 277 remotefolder = ftpClientObj.pwd()
278 278 else:
279 279 try:
280 280 ftpClientObj.cwd(remotefolder)
281 281 except ftplib.all_errors:
282 282 print("[FTP Server]: FTP remote folder is invalid: %s" %remotefolder)
283 283 remotefolder = ftpClientObj.pwd()
284 284
285 285 self.server = server
286 286 self.username = username
287 287 self.password = password
288 288 self.remotefolder = remotefolder
289 289 self.__ftpClientObj = ftpClientObj
290 290 self.status = 1
291 291
292 292 return 1
293 293
294 294 def close(self):
295 295 """
296 296 Close connection to remote server
297 297 """
298 298 if not self.status:
299 299 return 0
300 300
301 301 self.__ftpClientObj.close()
302 302
303 303 def mkdir(self, remotefolder):
304 304 """
305 305 mkdir is used to make a new directory in remote server
306 306
307 307 Input:
308 308 remotefolder - directory name
309 309
310 310 Return:
311 311 0 in error case else 1
312 312 """
313 313 if not self.status:
314 314 return 0
315 315
316 316 try:
317 317 self.__ftpClientObj.mkd(dirname)
318 318 except ftplib.all_errors:
319 319 print("[FTP Server]: Error creating remote folder: %s" %remotefolder)
320 320 return 0
321 321
322 322 return 1
323 323
324 324 def cd(self, remotefolder):
325 325 """
326 326 cd is used to change remote working directory on server
327 327
328 328 Input:
329 329 remotefolder - current working directory
330 330
331 331 Affects:
332 332 self.remotefolder
333 333
334 334 Return:
335 335 0 in case of error else 1
336 336 """
337 337 if not self.status:
338 338 return 0
339 339
340 340 if remotefolder == self.remotefolder:
341 341 return 1
342 342
343 343 try:
344 344 self.__ftpClientObj.cwd(remotefolder)
345 345 except ftplib.all_errors:
346 346 print('[FTP Server]: Error changing to %s' %remotefolder)
347 347 print('[FTP Server]: Trying to create remote folder')
348 348
349 349 if not self.mkdir(remotefolder):
350 350 print('[FTP Server]: Remote folder could not be created')
351 351 return 0
352 352
353 353 try:
354 354 self.__ftpClientObj.cwd(remotefolder)
355 355 except ftplib.all_errors:
356 356 return 0
357 357
358 358 self.remotefolder = remotefolder
359 359
360 360 return 1
361 361
362 362 def sendFile(self, fullfilename):
363 363
364 364 if not self.status:
365 365 return 0
366 366
367 367 fp = open(fullfilename, 'rb')
368 368
369 369 filename = os.path.basename(fullfilename)
370 370
371 371 command = "STOR %s" %filename
372 372
373 373 try:
374 374 self.__ftpClientObj.storbinary(command, fp)
375 375 except ftplib.all_errors as e:
376 376 print("[FTP Server]:", e)
377 377 return 0
378 378
379 379 try:
380 380 self.__ftpClientObj.sendcmd('SITE CHMOD 755 ' + filename)
381 381 except ftplib.all_errors as e:
382 382 print("[FTP Server]:", e)
383 383
384 384 fp.close()
385 385
386 386 return 1
387 387
388 388 class SSHClient(Remote):
389 389
390 390 __sshClientObj = None
391 391 __scpClientObj = None
392 392
393 393 def __init__(self, server, username, password, remotefolder, period=60):
394 394 """
395 395 """
396 396 Remote.__init__(self, server, username, password, remotefolder, period)
397 397
398 398 def open(self, server, username, password, remotefolder, port=22):
399 399
400 400 """
401 401 This method is used to set SSH parameters and establish a connection to a remote server
402
402
403 403 Inputs:
404 server - remote server IP Address
405
406 username - remote server Username
407
404 server - remote server IP Address
405
406 username - remote server Username
407
408 408 password - remote server password
409
409
410 410 remotefolder - remote server current working directory
411
411
412 412 Return: void
413
414 Affects:
413
414 Affects:
415 415 self.status - in case of error or fail connection this parameter is set to 0 else 1
416 416
417 417 """
418 418 import socket
419 419
420 420 if server == None:
421 421 raise ValueError("SSH server should be defined")
422 422
423 423 if username == None:
424 424 raise ValueError("SSH username should be defined")
425 425
426 426 if password == None:
427 427 raise ValueError("SSH password should be defined")
428 428
429 429 if remotefolder == None:
430 430 raise ValueError("SSH remote folder should be defined")
431 431
432 432 sshClientObj = paramiko.SSHClient()
433 433
434 434 sshClientObj.load_system_host_keys()
435 435 sshClientObj.set_missing_host_key_policy(paramiko.WarningPolicy())
436 436
437 437 self.status = 0
438 438 try:
439 439 sshClientObj.connect(server, username=username, password=password, port=port)
440 440 except paramiko.AuthenticationException as e:
441 441 # print "SSH username or password are incorrect: %s"
442 442 print("[SSH Server]:", e)
443 443 return 0
444 444 except SSHException as e:
445 445 print("[SSH Server]:", e)
446 446 return 0
447 447 except socket.error:
448 448 self.status = 0
449 449 print("[SSH Server]:", e)
450 450 return 0
451 451
452 452 self.status = 1
453 453 scpClientObj = scp.SCPClient(sshClientObj.get_transport(), socket_timeout=30)
454 454
455 455 if remotefolder == None:
456 456 remotefolder = self.pwd()
457 457
458 458 self.server = server
459 459 self.username = username
460 460 self.password = password
461 461 self.__sshClientObj = sshClientObj
462 462 self.__scpClientObj = scpClientObj
463 463 self.status = 1
464 464
465 465 if not self.cd(remotefolder):
466 466 raise ValueError("[SSH Server]: Could not access to remote folder: %s" %remotefolder)
467 467 return 0
468 468
469 469 self.remotefolder = remotefolder
470 470
471 471 return 1
472 472
473 473 def close(self):
474 474 """
475 475 Close connection to remote server
476 476 """
477 477 if not self.status:
478 478 return 0
479 479
480 480 self.__scpClientObj.close()
481 481 self.__sshClientObj.close()
482 482
483 483 def __execute(self, command):
484 484 """
485 485 __execute a command on remote server
486
486
487 487 Input:
488 488 command - Exmaple 'ls -l'
489
489
490 490 Return:
491 491 0 in error case else 1
492 492 """
493 493 if not self.status:
494 494 return 0
495 495
496 496 stdin, stdout, stderr = self.__sshClientObj.exec_command(command)
497 497
498 498 result = stderr.readlines()
499 499 if len(result) > 1:
500 500 return 0
501 501
502 502 result = stdout.readlines()
503 503 if len(result) > 1:
504 504 return result[0][:-1]
505 505
506 506 return 1
507 507
508 508 def mkdir(self, remotefolder):
509 509 """
510 510 mkdir is used to make a new directory in remote server
511
511
512 512 Input:
513 513 remotefolder - directory name
514
514
515 515 Return:
516 516 0 in error case else 1
517 517 """
518 518
519 519 command = 'mkdir %s' %remotefolder
520 520
521 521 return self.__execute(command)
522 522
523 523 def pwd(self):
524 524
525 525 command = 'pwd'
526 526
527 527 return self.__execute(command)
528 528
529 529 def cd(self, remotefolder):
530 530 """
531 531 cd is used to change remote working directory on server
532
532
533 533 Input:
534 534 remotefolder - current working directory
535
535
536 536 Affects:
537 537 self.remotefolder
538
539 Return:
538
539 Return:
540 540 0 in case of error else 1
541 541 """
542 542 if not self.status:
543 543 return 0
544 544
545 545 if remotefolder == self.remotefolder:
546 546 return 1
547 547
548 548 chk_command = "cd %s; pwd" %remotefolder
549 549 mkdir_command = "mkdir %s" %remotefolder
550 550
551 551 if not self.__execute(chk_command):
552 552 if not self.__execute(mkdir_command):
553 553 self.remotefolder = None
554 554 return 0
555 555
556 556 self.remotefolder = remotefolder
557 557
558 558 return 1
559 559
560 560 def sendFile(self, fullfilename):
561 561
562 562 if not self.status:
563 563 return 0
564 564
565 565 try:
566 566 self.__scpClientObj.put(fullfilename, remote_path=self.remotefolder)
567 567 except scp.ScpError as e:
568 568 print("[SSH Server]", str(e))
569 569 return 0
570 570
571 571 remotefile = os.path.join(self.remotefolder, os.path.split(fullfilename)[-1])
572 572 command = 'chmod 775 %s' %remotefile
573 573
574 574 return self.__execute(command)
575 575
576 576 class SendToServer(ProcessingUnit):
577 577
578 578 def __init__(self, **kwargs):
579 579
580 580 ProcessingUnit.__init__(self, **kwargs)
581 581
582 582 self.isConfig = False
583 self.clientObj = None
584
583 self.clientObj = None
584
585 585 def setup(self, server, username, password, remotefolder, localfolder, ext='.png', period=60, protocol='ftp', **kwargs):
586 586
587 587 self.clientObj = None
588 588 self.localfolder = localfolder
589 589 self.ext = ext
590 590 self.period = period
591 591
592 592 if str.lower(protocol) == 'ftp':
593 593 self.clientObj = FTPClient(server, username, password, remotefolder, period)
594 594
595 595 if str.lower(protocol) == 'ssh':
596 596 self.clientObj = SSHClient(server, username, password, remotefolder, period)
597 597
598 598 if not self.clientObj:
599 599 raise ValueError("%s has been chosen as remote access protocol but it is not valid" %protocol)
600 600
601 601 self.clientObj.start()
602 602
603 603 def findFiles(self):
604 604
605 605 if not type(self.localfolder) == list:
606 606 folderList = [self.localfolder]
607 607 else:
608 608 folderList = self.localfolder
609 609
610 610 #Remove duplicate items
611 611 folderList = list(set(folderList))
612 612
613 613 fullfilenameList = []
614 614
615 615 for thisFolder in folderList:
616 616
617 617 print("[Remote Server]: Searching files on %s" %thisFolder)
618 618
619 619 filenameList = glob.glob1(thisFolder, '*%s' %self.ext)
620 620
621 621 if len(filenameList) < 1:
622 622
623 623 continue
624 624
625 625 for thisFile in filenameList:
626 626 fullfilename = os.path.join(thisFolder, thisFile)
627 627
628 628 if fullfilename in fullfilenameList:
629 629 continue
630 630
631 631 #Only files modified in the last 30 minutes are considered
632 632 if os.path.getmtime(fullfilename) < time.time() - 30*60:
633 633 continue
634 634
635 635 fullfilenameList.append(fullfilename)
636 636
637 637 return fullfilenameList
638 638
639 639 def run(self, **kwargs):
640 640 if not self.isConfig:
641 641 self.init = time.time()
642 642 self.setup(**kwargs)
643 643 self.isConfig = True
644
644
645 645 if not self.clientObj.is_alive():
646 646 print("[Remote Server]: Restarting connection ")
647 647 self.setup(**kwargs)
648
648
649 649 if time.time() - self.init >= self.period:
650 650 fullfilenameList = self.findFiles()
651 651
652 652 if self.clientObj.updateFileList(fullfilenameList):
653 653 print("[Remote Server]: Sending the next files ", str(fullfilenameList))
654 654 self.init = time.time()
655 655
656 656 def close(self):
657 657 print("[Remote Server] Stopping thread")
658 658 self.clientObj.stop()
659 659
660 660
661 661 class FTP(object):
662 662 """
663 663 Ftp is a public class used to define custom File Transfer Protocol from "ftplib" python module
664 664
665 665 Non-standard Python modules used: None
666 666
667 667 Written by "Daniel Suarez":mailto:daniel.suarez@jro.igp.gob.pe Oct. 26, 2010
668 668 """
669 669
670 670 def __init__(self,server = None, username=None, password=None, remotefolder=None):
671 671 """
672 672 This method is used to setting parameters for FTP and establishing connection to remote server
673 673
674 674 Inputs:
675 675 server - remote server IP Address
676 676
677 677 username - remote server Username
678 678
679 679 password - remote server password
680 680
681 681 remotefolder - remote server current working directory
682 682
683 683 Return: void
684 684
685 685 Affects:
686 686 self.status - in Error Case or Connection Failed this parameter is set to 1 else 0
687 687
688 688 self.folderList - sub-folder list of remote folder
689 689
690 690 self.fileList - file list of remote folder
691 691
692 692
693 693 """
694 694
695 695 if ((server == None) and (username==None) and (password==None) and (remotefolder==None)):
696 696 server, username, password, remotefolder = self.parmsByDefault()
697 697
698 698 self.server = server
699 699 self.username = username
700 700 self.password = password
701 701 self.remotefolder = remotefolder
702 702 self.file = None
703 703 self.ftp = None
704 704 self.status = 0
705 705
706 706 try:
707 707 self.ftp = ftplib.FTP(self.server)
708 708 self.ftp.login(self.username,self.password)
709 self.ftp.cwd(self.remotefolder)
709 self.ftp.cwd(self.remotefolder)
710 710 # print 'Connect to FTP Server: Successfully'
711
711
712 712 except ftplib.all_errors:
713 713 print('Error FTP Service')
714 714 self.status = 1
715 715 return
716 716
717 717
718 718
719 719 self.dirList = []
720 720
721 721 try:
722 722 self.dirList = self.ftp.nlst()
723 723
724 724 except ftplib.error_perm as resp:
725 725 if str(resp) == "550 No files found":
726 726 print("no files in this directory")
727 727 self.status = 1
728 728 return
729 729
730 730 except ftplib.all_errors:
731 731 print('Error Displaying Dir-Files')
732 732 self.status = 1
733 733 return
734 734
735 735 self.fileList = []
736 736 self.folderList = []
737 737 #only for test
738 738 for f in self.dirList:
739 739 name, ext = os.path.splitext(f)
740 740 if ext != '':
741 741 self.fileList.append(f)
742 742 # print 'filename: %s - size: %d'%(f,self.ftp.size(f))
743 743
744 744 def parmsByDefault(self):
745 745 server = 'jro-app.igp.gob.pe'
746 746 username = 'wmaster'
747 747 password = 'mst2010vhf'
748 748 remotefolder = '/home/wmaster/graficos'
749 749
750 750 return server, username, password, remotefolder
751 751
752 752
753 753 def mkd(self,dirname):
754 754 """
755 755 mkd is used to make directory in remote server
756 756
757 757 Input:
758 758 dirname - directory name
759 759
760 760 Return:
761 761 1 in error case else 0
762 762 """
763 763 try:
764 764 self.ftp.mkd(dirname)
765 765 except:
766 766 print('Error creating remote folder:%s'%dirname)
767 767 return 1
768 768
769 769 return 0
770 770
771 771
772 772 def delete(self,filename):
773 773 """
774 774 delete is used to delete file in current working directory of remote server
775 775
776 776 Input:
777 777 filename - filename to delete in remote folder
778 778
779 779 Return:
780 780 1 in error case else 0
781 781 """
782 782
783 783 try:
784 784 self.ftp.delete(filename)
785 785 except:
786 786 print('Error deleting remote file:%s'%filename)
787 787 return 1
788 788
789 789 return 0
790 790
791 791 def download(self,filename,localfolder):
792 792 """
793 793 download is used to downloading file from remote folder into local folder
794 794
795 795 Inputs:
796 796 filename - filename to donwload
797 797
798 798 localfolder - directory local to store filename
799 799
800 800 Returns:
801 801 self.status - 1 in error case else 0
802 802 """
803 803
804 804 self.status = 0
805 805
806 806
807 807 if not(filename in self.fileList):
808 808 print('filename:%s not exists'%filename)
809 809 self.status = 1
810 810 return self.status
811 811
812 812 newfilename = os.path.join(localfolder,filename)
813 813
814 814 self.file = open(newfilename, 'wb')
815 815
816 816 try:
817 817 print('Download: ' + filename)
818 818 self.ftp.retrbinary('RETR ' + filename, self.__handleDownload)
819 819 print('Download Complete')
820 820 except ftplib.all_errors:
821 821 print('Error Downloading ' + filename)
822 822 self.status = 1
823 823 return self.status
824 824
825 825 self.file.close()
826 826
827 827 return self.status
828 828
829 829
830 830 def __handleDownload(self,block):
831 831 """
832 832 __handleDownload is used to handle writing file
833 833 """
834 834 self.file.write(block)
835 835
836 836
837 837 def upload(self,filename,remotefolder=None):
838 838 """
839 839 upload is used to uploading local file to remote directory
840 840
841 841 Inputs:
842 842 filename - full path name of local file to store in remote directory
843 843
844 844 remotefolder - remote directory
845 845
846 846 Returns:
847 847 self.status - 1 in error case else 0
848 848 """
849 849
850 850 if remotefolder == None:
851 851 remotefolder = self.remotefolder
852 852
853 853 self.status = 0
854 854
855 855 try:
856 856 self.ftp.cwd(remotefolder)
857 857
858 858 self.file = open(filename, 'rb')
859 859
860 860 (head, tail) = os.path.split(filename)
861 861
862 862 command = "STOR " + tail
863 863
864 864 print('Uploading: ' + tail)
865 865 self.ftp.storbinary(command, self.file)
866 866 print('Upload Completed')
867 867
868 868 except ftplib.all_errors:
869 869 print('Error Uploading ' + tail)
870 870 self.status = 1
871 871 return self.status
872 872
873 873 self.file.close()
874 874
875 875 #back to initial directory in __init__()
876 876 self.ftp.cwd(self.remotefolder)
877 877
878 878 return self.status
879 879
880 880
881 881 def dir(self,remotefolder):
882 882 """
883 883 dir is used to change working directory of remote server and get folder and file list
884 884
885 885 Input:
886 886 remotefolder - current working directory
887 887
888 888 Affects:
889 889 self.fileList - file list of working directory
890 890
891 891 Return:
892 892 infoList - list with filenames and size of file in bytes
893 893
894 894 self.folderList - folder list
895 895 """
896 896
897 897 self.remotefolder = remotefolder
898 898 print('Change to ' + self.remotefolder)
899 899 try:
900 900 self.ftp.cwd(remotefolder)
901 901 except ftplib.all_errors:
902 902 print('Error Change to ' + self.remotefolder)
903 903 infoList = None
904 904 self.folderList = None
905 905 return infoList,self.folderList
906 906
907 907 self.dirList = []
908 908
909 909 try:
910 910 self.dirList = self.ftp.nlst()
911 911
912 912 except ftplib.error_perm as resp:
913 913 if str(resp) == "550 No files found":
914 914 print("no files in this directory")
915 915 infoList = None
916 916 self.folderList = None
917 917 return infoList,self.folderList
918 918 except ftplib.all_errors:
919 919 print('Error Displaying Dir-Files')
920 920 infoList = None
921 921 self.folderList = None
922 922 return infoList,self.folderList
923 923
924 924 infoList = []
925 925 self.fileList = []
926 926 self.folderList = []
927 927 for f in self.dirList:
928 928 name,ext = os.path.splitext(f)
929 929 if ext != '':
930 930 self.fileList.append(f)
931 931 value = (f,self.ftp.size(f))
932 932 infoList.append(value)
933 933
934 934 if ext == '':
935 935 self.folderList.append(f)
936 936
937 937 return infoList,self.folderList
938 938
939 939
940 940 def close(self):
941 941 """
942 942 close is used to close and end FTP connection
943 943
944 944 Inputs: None
945 945
946 946 Return: void
947 947
948 948 """
949 949 self.ftp.close()
950 950
951 951 class SendByFTP(Operation):
952 952
953 953 def __init__(self, **kwargs):
954 954 Operation.__init__(self, **kwargs)
955 955 self.status = 1
956 956 self.counter = 0
957 957
958 958 def error_print(self, ValueError):
959 959
960 960 print(ValueError, 'Error FTP')
961 961 print("don't worry the program is running...")
962 962
963 963 def worker_ftp(self, server, username, password, remotefolder, filenameList):
964 964
965 965 self.ftpClientObj = FTP(server, username, password, remotefolder)
966 966 for filename in filenameList:
967 967 self.ftpClientObj.upload(filename)
968 968 self.ftpClientObj.close()
969 969
970 970 def ftp_thread(self, server, username, password, remotefolder):
971 971 if not(self.status):
972 972 return
973 973
974 974 import multiprocessing
975 975
976 976 p = multiprocessing.Process(target=self.worker_ftp, args=(server, username, password, remotefolder, self.filenameList,))
977 977 p.start()
978 978
979 979 p.join(3)
980 980
981 981 if p.is_alive():
982 982 p.terminate()
983 983 p.join()
984 984 print('killing ftp process...')
985 985 self.status = 0
986 986 return
987 987
988 988 self.status = 1
989 989 return
990 990
991 991 def filterByExt(self, ext, localfolder):
992 992 fnameList = glob.glob1(localfolder,ext)
993 993 self.filenameList = [os.path.join(localfolder,x) for x in fnameList]
994 994
995 995 if len(self.filenameList) == 0:
996 996 self.status = 0
997 997
998 998 def run(self, dataOut, ext, localfolder, remotefolder, server, username, password, period=1):
999 999
1000 1000 self.counter += 1
1001 1001 if self.counter >= period:
1002 1002 self.filterByExt(ext, localfolder)
1003 1003
1004 1004 self.ftp_thread(server, username, password, remotefolder)
1005 1005
1006 1006 self.counter = 0
1007 1007
1008 self.status = 1 No newline at end of file
1008 self.status = 1
@@ -1,326 +1,326
1 1 '''
2 2 @author: Juan C. Espinoza
3 3 '''
4 4
5 5 import os
6 6 import glob
7 7 import time
8 8 import json
9 9 import numpy
10 10 import zmq
11 11 import datetime
12 12 import ftplib
13 13 from functools import wraps
14 14 from threading import Thread
15 15 from multiprocessing import Process
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit, MPDecorator
18 18 from schainpy.model.data.jrodata import JROData
19 19 from schainpy.utils import log
20 20
21 21 MAXNUMX = 500
22 22 MAXNUMY = 500
23 23
24 24 PLOT_CODES = {
25 25 'rti': 0, # Range time intensity (RTI).
26 26 'spc': 1, # Spectra (and Cross-spectra) information.
27 27 'cspc': 2, # Cross-Correlation information.
28 28 'coh': 3, # Coherence map.
29 29 'base': 4, # Base lines graphic.
30 30 'row': 5, # Row Spectra.
31 31 'total': 6, # Total Power.
32 32 'drift': 7, # Drifts graphics.
33 33 'height': 8, # Height profile.
34 34 'phase': 9, # Signal Phase.
35 35 'power': 16,
36 36 'noise': 17,
37 37 'beacon': 18,
38 38 'wind': 22,
39 39 'skymap': 23,
40 40 'Unknown': 24,
41 41 'V-E': 25, # PIP Velocity.
42 42 'Z-E': 26, # PIP Reflectivity.
43 43 'V-A': 27, # RHI Velocity.
44 44 'Z-A': 28, # RHI Reflectivity.
45 45 }
46 46
47 47 def get_plot_code(s):
48 48 label = s.split('_')[0]
49 49 codes = [key for key in PLOT_CODES if key in label]
50 if codes:
50 if codes:
51 51 return PLOT_CODES[codes[0]]
52 52 else:
53 53 return 24
54 54
55 55 def decimate(z, MAXNUMY):
56 56 dy = int(len(z[0])/MAXNUMY) + 1
57 57
58 58 return z[::, ::dy]
59 59
60 60
61 61 class PublishData(Operation):
62 62 '''
63 63 Operation to send data over zmq.
64 64 '''
65 65
66 66 __attrs__ = ['host', 'port', 'delay', 'verbose']
67 67
68 68 def setup(self, server='zmq.pipe', delay=0, verbose=True, **kwargs):
69 69 self.counter = 0
70 70 self.delay = kwargs.get('delay', 0)
71 71 self.cnt = 0
72 self.verbose = verbose
72 self.verbose = verbose
73 73 context = zmq.Context()
74 74 self.zmq_socket = context.socket(zmq.PUSH)
75 75 server = kwargs.get('server', 'zmq.pipe')
76 76
77 77 if 'tcp://' in server:
78 78 address = server
79 79 else:
80 80 address = 'ipc:///tmp/%s' % server
81 81
82 82 self.zmq_socket.connect(address)
83 83 time.sleep(1)
84 84
85 85
86 86 def publish_data(self):
87 87 self.dataOut.finished = False
88
88
89 89 if self.verbose:
90 90 log.log(
91 91 'Sending {} - {}'.format(self.dataOut.type, self.dataOut.datatime),
92 92 self.name
93 93 )
94 94 self.zmq_socket.send_pyobj(self.dataOut)
95 95
96 96 def run(self, dataOut, **kwargs):
97 97 self.dataOut = dataOut
98 98 if not self.isConfig:
99 99 self.setup(**kwargs)
100 100 self.isConfig = True
101 101
102 102 self.publish_data()
103 103 time.sleep(self.delay)
104 104
105 105 def close(self):
106
106
107 107 self.dataOut.finished = True
108 108 self.zmq_socket.send_pyobj(self.dataOut)
109 109 time.sleep(0.1)
110 110 self.zmq_socket.close()
111
111
112 112
113 113 class ReceiverData(ProcessingUnit):
114 114
115 115 __attrs__ = ['server']
116 116
117 117 def __init__(self, **kwargs):
118 118
119 119 ProcessingUnit.__init__(self, **kwargs)
120 120
121 121 self.isConfig = False
122 122 server = kwargs.get('server', 'zmq.pipe')
123 123 if 'tcp://' in server:
124 124 address = server
125 125 else:
126 126 address = 'ipc:///tmp/%s' % server
127 127
128 128 self.address = address
129 129 self.dataOut = JROData()
130 130
131 131 def setup(self):
132 132
133 133 self.context = zmq.Context()
134 134 self.receiver = self.context.socket(zmq.PULL)
135 135 self.receiver.bind(self.address)
136 136 time.sleep(0.5)
137 137 log.success('ReceiverData from {}'.format(self.address))
138 138
139 139
140 140 def run(self):
141 141
142 142 if not self.isConfig:
143 143 self.setup()
144 144 self.isConfig = True
145 145
146 146 self.dataOut = self.receiver.recv_pyobj()
147 147 log.log('{} - {}'.format(self.dataOut.type,
148 148 self.dataOut.datatime.ctime(),),
149 149 'Receiving')
150 150
151 151 @MPDecorator
152 152 class SendToFTP(Operation):
153 153
154 154 '''
155 155 Operation to send data over FTP.
156 156 patternX = 'local, remote, ext, period, exp_code, sub_exp_code'
157 157 '''
158 158
159 159 __attrs__ = ['server', 'username', 'password', 'timeout', 'patternX']
160 160
161 161 def __init__(self):
162 162 '''
163 163 '''
164 164 Operation.__init__(self)
165 165 self.ftp = None
166 166 self.ready = False
167 167
168 168 def setup(self, server, username, password, timeout, **kwargs):
169 169 '''
170 170 '''
171 171
172 172 self.server = server
173 173 self.username = username
174 174 self.password = password
175 175 self.timeout = timeout
176 176 self.patterns = []
177 177 self.times = []
178 178 self.latest = []
179 179 for arg, value in kwargs.items():
180 180 if 'pattern' in arg:
181 181 self.patterns.append(value)
182 182 self.times.append(time.time())
183 183 self.latest.append('')
184 184
185 185 def connect(self):
186 186 '''
187 187 '''
188 188
189 189 log.log('Connecting to ftp://{}'.format(self.server), self.name)
190 190 try:
191 191 self.ftp = ftplib.FTP(self.server, timeout=self.timeout)
192 192 except ftplib.all_errors:
193 193 log.error('Server connection fail: {}'.format(self.server), self.name)
194 194 if self.ftp is not None:
195 195 self.ftp.close()
196 196 self.ftp = None
197 197 self.ready = False
198 return
198 return
199 199
200 200 try:
201 201 self.ftp.login(self.username, self.password)
202 202 except ftplib.all_errors:
203 203 log.error('The given username y/o password are incorrect', self.name)
204 204 if self.ftp is not None:
205 205 self.ftp.close()
206 206 self.ftp = None
207 207 self.ready = False
208 208 return
209 209
210 210 log.success('Connection success', self.name)
211 211 self.ready = True
212 212 return
213 213
214 214 def check(self):
215 215
216 216 try:
217 217 self.ftp.voidcmd("NOOP")
218 218 except:
219 219 log.warning('Connection lost... trying to reconnect', self.name)
220 220 if self.ftp is not None:
221 221 self.ftp.close()
222 222 self.ftp = None
223 223 self.connect()
224 224
225 225 def find_files(self, path, ext):
226 226
227 227 files = glob.glob1(path, '*{}'.format(ext))
228 228 files.sort()
229 229 if files:
230 230 return files[-1]
231 231 return None
232 232
233 233 def getftpname(self, filename, exp_code, sub_exp_code):
234 234
235 235 thisDatetime = datetime.datetime.strptime(filename.split('_')[1], '%Y%m%d')
236 236 YEAR_STR = '%4.4d' % thisDatetime.timetuple().tm_year
237 237 DOY_STR = '%3.3d' % thisDatetime.timetuple().tm_yday
238 238 exp_code = '%3.3d' % exp_code
239 239 sub_exp_code = '%2.2d' % sub_exp_code
240 240 plot_code = '%2.2d' % get_plot_code(filename)
241 241 name = YEAR_STR + DOY_STR + '00' + exp_code + sub_exp_code + plot_code + '00.png'
242 242 return name
243 243
244 244 def upload(self, src, dst):
245 245
246 246 log.log('Uploading {} -> {} '.format(
247 src.split('/')[-1], dst.split('/')[-1]),
248 self.name,
247 src.split('/')[-1], dst.split('/')[-1]),
248 self.name,
249 249 nl=False
250 250 )
251 251
252 252 fp = open(src, 'rb')
253 253 command = 'STOR {}'.format(dst)
254 254
255 255 try:
256 256 self.ftp.storbinary(command, fp, blocksize=1024)
257 257 except Exception as e:
258 258 log.error('{}'.format(e), self.name)
259 259 if self.ftp is not None:
260 260 self.ftp.close()
261 261 self.ftp = None
262 262 return 0
263 263
264 264 try:
265 265 self.ftp.sendcmd('SITE CHMOD 755 {}'.format(dst))
266 266 except Exception as e:
267 267 log.error('{}'.format(e), self.name)
268 268 if self.ftp is not None:
269 269 self.ftp.close()
270 270 self.ftp = None
271 271 return 0
272 272
273 273 fp.close()
274 274 log.success('OK', tag='')
275 275 return 1
276
276
277 277 def send_files(self):
278 278
279 279 for x, pattern in enumerate(self.patterns):
280 280 local, remote, ext, period, exp_code, sub_exp_code = pattern
281 281 if time.time()-self.times[x] >= int(period):
282 282 srcname = self.find_files(local, ext)
283 283 src = os.path.join(local, srcname)
284 284 if os.path.getmtime(src) < time.time() - 30*60:
285 log.warning('Skipping old file {}'.format(srcname))
285 log.warning('Skipping old file {}'.format(srcname))
286 286 continue
287 287
288 288 if srcname is None or srcname == self.latest[x]:
289 log.warning('File alreday uploaded {}'.format(srcname))
289 log.warning('File alreday uploaded {}'.format(srcname))
290 290 continue
291
291
292 292 if 'png' in ext:
293 293 dstname = self.getftpname(srcname, int(exp_code), int(sub_exp_code))
294 294 else:
295 dstname = srcname
296
295 dstname = srcname
296
297 297 dst = os.path.join(remote, dstname)
298 298
299 299 if self.upload(src, dst):
300 300 self.times[x] = time.time()
301 301 self.latest[x] = srcname
302 else:
302 else:
303 303 self.ready = False
304 break
304 break
305 305
306 306 def run(self, dataOut, server, username, password, timeout=10, **kwargs):
307 307
308 308 if not self.isConfig:
309 309 self.setup(
310 server=server,
311 username=username,
312 password=password,
313 timeout=timeout,
310 server=server,
311 username=username,
312 password=password,
313 timeout=timeout,
314 314 **kwargs
315 315 )
316 316 self.isConfig = True
317 317 if not self.ready:
318 318 self.connect()
319 319 if self.ftp is not None:
320 320 self.check()
321 321 self.send_files()
322 322
323 323 def close(self):
324 324
325 325 if self.ftp is not None:
326 326 self.ftp.close()
General Comments 0
You need to be logged in to leave comments. Login now