##// END OF EJS Templates
cambios xmax xmin
José Chávez -
r1004:3c7daf19daec merge
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,1324 +1,1329
1 1 '''
2 2 Created on September , 2012
3 3 @author:
4 4 '''
5 5
6 6 import sys
7 7 import ast
8 8 import datetime
9 9 import traceback
10 10 import math
11 11 import time
12 12 from multiprocessing import Process, Queue, cpu_count
13 from profilehooks import profile, coverage
13 14
14 15 import schainpy
15 16 import schainpy.admin
16 17
17 18 from xml.etree.ElementTree import ElementTree, Element, SubElement, tostring
18 19 from xml.dom import minidom
19 20
20 21 from schainpy.model import *
21 22 from time import sleep
22 23
24
25
23 26 def prettify(elem):
24 27 """Return a pretty-printed XML string for the Element.
25 28 """
26 29 rough_string = tostring(elem, 'utf-8')
27 30 reparsed = minidom.parseString(rough_string)
28 31 return reparsed.toprettyxml(indent=" ")
29 32
30 33 def multiSchain(child, nProcess=cpu_count(), startDate=None, endDate=None, by_day=False):
31 34 skip = 0
32 35 cursor = 0
33 36 nFiles = None
34 37 processes = []
35 38 dt1 = datetime.datetime.strptime(startDate, '%Y/%m/%d')
36 39 dt2 = datetime.datetime.strptime(endDate, '%Y/%m/%d')
37 40 days = (dt2 - dt1).days
38 41
39 42 for day in range(days+1):
40 43 skip = 0
41 44 cursor = 0
42 45 q = Queue()
43 46 processes = []
44 47 dt = (dt1 + datetime.timedelta(day)).strftime('%Y/%m/%d')
45 48 firstProcess = Process(target=child, args=(cursor, skip, q, dt))
46 49 firstProcess.start()
47 50 if by_day:
48 51 continue
49 52 nFiles = q.get()
50 53 firstProcess.terminate()
51 54 skip = int(math.ceil(nFiles/nProcess))
52 55 while True:
53 56 processes.append(Process(target=child, args=(cursor, skip, q, dt)))
54 57 processes[cursor].start()
55 58 if nFiles < cursor*skip:
56 59 break
57 60 cursor += 1
58 61
59 62 def beforeExit(exctype, value, trace):
60 63 for process in processes:
61 64 process.terminate()
62 65 process.join()
63 66 print traceback.print_tb(trace)
64 67
65 68 sys.excepthook = beforeExit
66 69
67 70 for process in processes:
68 71 process.join()
69 72 process.terminate()
70 73 time.sleep(3)
71 74
72 75 class ParameterConf():
73 76
74 77 id = None
75 78 name = None
76 79 value = None
77 80 format = None
78 81
79 82 __formated_value = None
80 83
81 84 ELEMENTNAME = 'Parameter'
82 85
83 86 def __init__(self):
84 87
85 88 self.format = 'str'
86 89
87 90 def getElementName(self):
88 91
89 92 return self.ELEMENTNAME
90 93
91 94 def getValue(self):
92 95
93 96 value = self.value
94 97 format = self.format
95 98
96 99 if self.__formated_value != None:
97 100
98 101 return self.__formated_value
99 102
100 103 if format == 'obj':
101 104 return value
102 105
103 106 if format == 'str':
104 107 self.__formated_value = str(value)
105 108 return self.__formated_value
106 109
107 110 if value == '':
108 111 raise ValueError, "%s: This parameter value is empty" %self.name
109 112
110 113 if format == 'list':
111 114 strList = value.split(',')
112 115
113 116 self.__formated_value = strList
114 117
115 118 return self.__formated_value
116 119
117 120 if format == 'intlist':
118 121 """
119 122 Example:
120 123 value = (0,1,2)
121 124 """
122 125
123 126 new_value = ast.literal_eval(value)
124 127
125 128 if type(new_value) not in (tuple, list):
126 129 new_value = [int(new_value)]
127 130
128 131 self.__formated_value = new_value
129 132
130 133 return self.__formated_value
131 134
132 135 if format == 'floatlist':
133 136 """
134 137 Example:
135 138 value = (0.5, 1.4, 2.7)
136 139 """
137 140
138 141 new_value = ast.literal_eval(value)
139 142
140 143 if type(new_value) not in (tuple, list):
141 144 new_value = [float(new_value)]
142 145
143 146 self.__formated_value = new_value
144 147
145 148 return self.__formated_value
146 149
147 150 if format == 'date':
148 151 strList = value.split('/')
149 152 intList = [int(x) for x in strList]
150 153 date = datetime.date(intList[0], intList[1], intList[2])
151 154
152 155 self.__formated_value = date
153 156
154 157 return self.__formated_value
155 158
156 159 if format == 'time':
157 160 strList = value.split(':')
158 161 intList = [int(x) for x in strList]
159 162 time = datetime.time(intList[0], intList[1], intList[2])
160 163
161 164 self.__formated_value = time
162 165
163 166 return self.__formated_value
164 167
165 168 if format == 'pairslist':
166 169 """
167 170 Example:
168 171 value = (0,1),(1,2)
169 172 """
170 173
171 174 new_value = ast.literal_eval(value)
172 175
173 176 if type(new_value) not in (tuple, list):
174 177 raise ValueError, "%s has to be a tuple or list of pairs" %value
175 178
176 179 if type(new_value[0]) not in (tuple, list):
177 180 if len(new_value) != 2:
178 181 raise ValueError, "%s has to be a tuple or list of pairs" %value
179 182 new_value = [new_value]
180 183
181 184 for thisPair in new_value:
182 185 if len(thisPair) != 2:
183 186 raise ValueError, "%s has to be a tuple or list of pairs" %value
184 187
185 188 self.__formated_value = new_value
186 189
187 190 return self.__formated_value
188 191
189 192 if format == 'multilist':
190 193 """
191 194 Example:
192 195 value = (0,1,2),(3,4,5)
193 196 """
194 197 multiList = ast.literal_eval(value)
195 198
196 199 if type(multiList[0]) == int:
197 200 multiList = ast.literal_eval("(" + value + ")")
198 201
199 202 self.__formated_value = multiList
200 203
201 204 return self.__formated_value
202 205
203 206 if format == 'bool':
204 207 value = int(value)
205 208
206 209 if format == 'int':
207 210 value = float(value)
208 211
209 212 format_func = eval(format)
210 213
211 214 self.__formated_value = format_func(value)
212 215
213 216 return self.__formated_value
214 217
215 218 def updateId(self, new_id):
216 219
217 220 self.id = str(new_id)
218 221
219 222 def setup(self, id, name, value, format='str'):
220 223 self.id = str(id)
221 224 self.name = name
222 225 if format == 'obj':
223 226 self.value = value
224 227 else:
225 228 self.value = str(value)
226 229 self.format = str.lower(format)
227 230
228 231 self.getValue()
229 232
230 233 return 1
231 234
232 235 def update(self, name, value, format='str'):
233 236
234 237 self.name = name
235 238 self.value = str(value)
236 239 self.format = format
237 240
238 241 def makeXml(self, opElement):
239 242 if self.name not in ('queue',):
240 243 parmElement = SubElement(opElement, self.ELEMENTNAME)
241 244 parmElement.set('id', str(self.id))
242 245 parmElement.set('name', self.name)
243 246 parmElement.set('value', self.value)
244 247 parmElement.set('format', self.format)
245 248
246 249 def readXml(self, parmElement):
247 250
248 251 self.id = parmElement.get('id')
249 252 self.name = parmElement.get('name')
250 253 self.value = parmElement.get('value')
251 254 self.format = str.lower(parmElement.get('format'))
252 255
253 256 #Compatible with old signal chain version
254 257 if self.format == 'int' and self.name == 'idfigure':
255 258 self.name = 'id'
256 259
257 260 def printattr(self):
258 261
259 262 print "Parameter[%s]: name = %s, value = %s, format = %s" %(self.id, self.name, self.value, self.format)
260 263
261 class OperationConf():
264 class OperationConf():
262 265
263 266 id = None
264 267 name = None
265 268 priority = None
266 269 type = None
267 270
268 271 parmConfObjList = []
269 272
270 273 ELEMENTNAME = 'Operation'
271 274
272 275 def __init__(self):
273 276
274 277 self.id = '0'
275 278 self.name = None
276 279 self.priority = None
277 280 self.type = 'self'
278 281
279 282
280 283 def __getNewId(self):
281 284
282 285 return int(self.id)*10 + len(self.parmConfObjList) + 1
283 286
284 287 def updateId(self, new_id):
285 288
286 289 self.id = str(new_id)
287 290
288 291 n = 1
289 292 for parmObj in self.parmConfObjList:
290 293
291 294 idParm = str(int(new_id)*10 + n)
292 295 parmObj.updateId(idParm)
293 296
294 297 n += 1
295 298
296 299 def getElementName(self):
297 300
298 301 return self.ELEMENTNAME
299 302
300 303 def getParameterObjList(self):
301 304
302 305 return self.parmConfObjList
303 306
304 307 def getParameterObj(self, parameterName):
305 308
306 309 for parmConfObj in self.parmConfObjList:
307 310
308 311 if parmConfObj.name != parameterName:
309 312 continue
310 313
311 314 return parmConfObj
312 315
313 316 return None
314 317
315 318 def getParameterObjfromValue(self, parameterValue):
316 319
317 320 for parmConfObj in self.parmConfObjList:
318 321
319 322 if parmConfObj.getValue() != parameterValue:
320 323 continue
321 324
322 325 return parmConfObj.getValue()
323 326
324 327 return None
325 328
326 329 def getParameterValue(self, parameterName):
327 330
328 331 parameterObj = self.getParameterObj(parameterName)
329 332
330 # if not parameterObj:
331 # return None
333 # if not parameterObj:
334 # return None
332 335
333 336 value = parameterObj.getValue()
334 337
335 338 return value
336 339
337 340
338 341 def getKwargs(self):
339 342
340 343 kwargs = {}
341 344
342 345 for parmConfObj in self.parmConfObjList:
343 346 if self.name == 'run' and parmConfObj.name == 'datatype':
344 347 continue
345 348
346 349 kwargs[parmConfObj.name] = parmConfObj.getValue()
347 350
348 351 return kwargs
349 352
350 353 def setup(self, id, name, priority, type):
351 354
352 355 self.id = str(id)
353 356 self.name = name
354 357 self.type = type
355 358 self.priority = priority
356 359
357 360 self.parmConfObjList = []
358 361
359 362 def removeParameters(self):
360 363
361 364 for obj in self.parmConfObjList:
362 365 del obj
363 366
364 367 self.parmConfObjList = []
365 368
366 369 def addParameter(self, name, value, format='str'):
367 370
368 371 id = self.__getNewId()
369 372
370 373 parmConfObj = ParameterConf()
371 374 if not parmConfObj.setup(id, name, value, format):
372 375 return None
373 376
374 377 self.parmConfObjList.append(parmConfObj)
375 378
376 379 return parmConfObj
377 380
378 381 def changeParameter(self, name, value, format='str'):
379 382
380 383 parmConfObj = self.getParameterObj(name)
381 384 parmConfObj.update(name, value, format)
382 385
383 386 return parmConfObj
384 387
385 388 def makeXml(self, procUnitElement):
386 389
387 390 opElement = SubElement(procUnitElement, self.ELEMENTNAME)
388 391 opElement.set('id', str(self.id))
389 392 opElement.set('name', self.name)
390 393 opElement.set('type', self.type)
391 394 opElement.set('priority', str(self.priority))
392 395
393 396 for parmConfObj in self.parmConfObjList:
394 397 parmConfObj.makeXml(opElement)
395 398
396 399 def readXml(self, opElement):
397 400
398 401 self.id = opElement.get('id')
399 402 self.name = opElement.get('name')
400 403 self.type = opElement.get('type')
401 404 self.priority = opElement.get('priority')
402 405
403 406 #Compatible with old signal chain version
404 407 #Use of 'run' method instead 'init'
405 408 if self.type == 'self' and self.name == 'init':
406 409 self.name = 'run'
407 410
408 411 self.parmConfObjList = []
409 412
410 413 parmElementList = opElement.iter(ParameterConf().getElementName())
411 414
412 415 for parmElement in parmElementList:
413 416 parmConfObj = ParameterConf()
414 417 parmConfObj.readXml(parmElement)
415 418
416 419 #Compatible with old signal chain version
417 420 #If an 'plot' OPERATION is found, changes name operation by the value of its type PARAMETER
418 421 if self.type != 'self' and self.name == 'Plot':
419 422 if parmConfObj.format == 'str' and parmConfObj.name == 'type':
420 423 self.name = parmConfObj.value
421 424 continue
422 425
423 426 self.parmConfObjList.append(parmConfObj)
424 427
425 428 def printattr(self):
426 429
427 430 print "%s[%s]: name = %s, type = %s, priority = %s" %(self.ELEMENTNAME,
428 431 self.id,
429 432 self.name,
430 433 self.type,
431 434 self.priority)
432 435
433 436 for parmConfObj in self.parmConfObjList:
434 437 parmConfObj.printattr()
435 438
436 439 def createObject(self, plotter_queue=None):
437 440
438 441
439 442 if self.type == 'self':
440 443 raise ValueError, "This operation type cannot be created"
441 444
442 445 if self.type == 'plotter':
443 446 #Plotter(plotter_name)
444 447 if not plotter_queue:
445 448 raise ValueError, "plotter_queue is not defined. Use:\nmyProject = Project()\nmyProject.setPlotterQueue(plotter_queue)"
446 449
447 450 opObj = Plotter(self.name, plotter_queue)
448 451
449 452 if self.type == 'external' or self.type == 'other':
450 453
451 454 className = eval(self.name)
452 455 kwargs = self.getKwargs()
453 456
454 457 opObj = className(**kwargs)
455 458
456 459 return opObj
457 460
458 461
459 462 class ProcUnitConf():
460 463
461 464 id = None
462 465 name = None
463 466 datatype = None
464 467 inputId = None
465 468 parentId = None
466 469
467 470 opConfObjList = []
468 471
469 472 procUnitObj = None
470 473 opObjList = []
471 474
472 475 ELEMENTNAME = 'ProcUnit'
473 476
474 477 def __init__(self):
475 478
476 479 self.id = None
477 480 self.datatype = None
478 481 self.name = None
479 482 self.inputId = None
480 483
481 484 self.opConfObjList = []
482 485
483 486 self.procUnitObj = None
484 487 self.opObjDict = {}
485 488
486 489 def __getPriority(self):
487 490
488 491 return len(self.opConfObjList)+1
489 492
490 493 def __getNewId(self):
491 494
492 495 return int(self.id)*10 + len(self.opConfObjList) + 1
493 496
494 497 def getElementName(self):
495 498
496 499 return self.ELEMENTNAME
497 500
498 501 def getId(self):
499 502
500 503 return self.id
501 504
502 505 def updateId(self, new_id, parentId=parentId):
503 506
504 507
505 508 new_id = int(parentId)*10 + (int(self.id) % 10)
506 509 new_inputId = int(parentId)*10 + (int(self.inputId) % 10)
507 510
508 511 #If this proc unit has not inputs
509 512 if self.inputId == '0':
510 513 new_inputId = 0
511 514
512 515 n = 1
513 516 for opConfObj in self.opConfObjList:
514 517
515 518 idOp = str(int(new_id)*10 + n)
516 519 opConfObj.updateId(idOp)
517 520
518 521 n += 1
519 522
520 523 self.parentId = str(parentId)
521 524 self.id = str(new_id)
522 525 self.inputId = str(new_inputId)
523 526
524 527
525 528 def getInputId(self):
526 529
527 530 return self.inputId
528 531
529 532 def getOperationObjList(self):
530 533
531 534 return self.opConfObjList
532 535
533 536 def getOperationObj(self, name=None):
534 537
535 538 for opConfObj in self.opConfObjList:
536 539
537 540 if opConfObj.name != name:
538 541 continue
539 542
540 543 return opConfObj
541 544
542 545 return None
543 546
544 547 def getOpObjfromParamValue(self, value=None):
545 548
546 549 for opConfObj in self.opConfObjList:
547 550 if opConfObj.getParameterObjfromValue(parameterValue=value) != value:
548 551 continue
549 552 return opConfObj
550 553 return None
551 554
552 555 def getProcUnitObj(self):
553 556
554 557 return self.procUnitObj
555 558
556 559 def setup(self, id, name, datatype, inputId, parentId=None):
557 560
558 561 #Compatible with old signal chain version
559 562 if datatype==None and name==None:
560 563 raise ValueError, "datatype or name should be defined"
561 564
562 565 if name==None:
563 566 if 'Proc' in datatype:
564 567 name = datatype
565 568 else:
566 569 name = '%sProc' %(datatype)
567 570
568 571 if datatype==None:
569 572 datatype = name.replace('Proc','')
570 573
571 574 self.id = str(id)
572 575 self.name = name
573 576 self.datatype = datatype
574 577 self.inputId = inputId
575 578 self.parentId = parentId
576 579
577 580 self.opConfObjList = []
578 581
579 582 self.addOperation(name='run', optype='self')
580 583
581 584 def removeOperations(self):
582 585
583 586 for obj in self.opConfObjList:
584 587 del obj
585 588
586 589 self.opConfObjList = []
587 590 self.addOperation(name='run')
588 591
589 592 def addParameter(self, **kwargs):
590 593 '''
591 594 Add parameters to "run" operation
592 595 '''
593 596 opObj = self.opConfObjList[0]
594 597
595 598 opObj.addParameter(**kwargs)
596 599
597 600 return opObj
598 601
599 602 def addOperation(self, name, optype='self'):
600 603
601 604 id = self.__getNewId()
602 605 priority = self.__getPriority()
603 606
604 607 opConfObj = OperationConf()
605 608 opConfObj.setup(id, name=name, priority=priority, type=optype)
606 609
607 610 self.opConfObjList.append(opConfObj)
608 611
609 612 return opConfObj
610 613
611 614 def makeXml(self, projectElement):
612 615
613 616 procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
614 617 procUnitElement.set('id', str(self.id))
615 618 procUnitElement.set('name', self.name)
616 619 procUnitElement.set('datatype', self.datatype)
617 620 procUnitElement.set('inputId', str(self.inputId))
618 621
619 622 for opConfObj in self.opConfObjList:
620 623 opConfObj.makeXml(procUnitElement)
621 624
622 625 def readXml(self, upElement):
623 626
624 627 self.id = upElement.get('id')
625 628 self.name = upElement.get('name')
626 629 self.datatype = upElement.get('datatype')
627 630 self.inputId = upElement.get('inputId')
628 631
629 632 if self.ELEMENTNAME == "ReadUnit":
630 633 self.datatype = self.datatype.replace("Reader", "")
631 634
632 635 if self.ELEMENTNAME == "ProcUnit":
633 636 self.datatype = self.datatype.replace("Proc", "")
634 637
635 638 if self.inputId == 'None':
636 639 self.inputId = '0'
637 640
638 641 self.opConfObjList = []
639 642
640 643 opElementList = upElement.iter(OperationConf().getElementName())
641 644
642 645 for opElement in opElementList:
643 646 opConfObj = OperationConf()
644 647 opConfObj.readXml(opElement)
645 648 self.opConfObjList.append(opConfObj)
646 649
647 650 def printattr(self):
648 651
649 652 print "%s[%s]: name = %s, datatype = %s, inputId = %s" %(self.ELEMENTNAME,
650 653 self.id,
651 654 self.name,
652 655 self.datatype,
653 656 self.inputId)
654 657
655 658 for opConfObj in self.opConfObjList:
656 659 opConfObj.printattr()
657 660
658 661
659 662 def getKwargs(self):
660 663
661 664 opObj = self.opConfObjList[0]
662 665 kwargs = opObj.getKwargs()
663 666
664 667 return kwargs
665 668
666 669 def createObjects(self, plotter_queue=None):
667 670
668 671 className = eval(self.name)
669 672 kwargs = self.getKwargs()
670 673 procUnitObj = className(**kwargs)
671 674
672 675 for opConfObj in self.opConfObjList:
673 676
674 677 if opConfObj.type=='self' and self.name=='run':
675 678 continue
676 679 elif opConfObj.type=='self':
677 680 procUnitObj.addOperationKwargs(opConfObj.id, **opConfObj.getKwargs())
678 681 continue
679 682
680 683 opObj = opConfObj.createObject(plotter_queue)
681 684
682 685 self.opObjDict[opConfObj.id] = opObj
683 686
684 687 procUnitObj.addOperation(opObj, opConfObj.id)
685 688
686 689 self.procUnitObj = procUnitObj
687 690
688 691 return procUnitObj
689 692
693 ## @profile
690 694 def run(self):
691 695
692 696 is_ok = False
693 697
694 698 for opConfObj in self.opConfObjList:
695 699
696 700 kwargs = {}
697 701 for parmConfObj in opConfObj.getParameterObjList():
698 702 if opConfObj.name == 'run' and parmConfObj.name == 'datatype':
699 703 continue
700 704
701 705 kwargs[parmConfObj.name] = parmConfObj.getValue()
702 706
703 707 #ini = time.time()
704 708
705 709 #print "\tRunning the '%s' operation with %s" %(opConfObj.name, opConfObj.id)
706 710 sts = self.procUnitObj.call(opType = opConfObj.type,
707 711 opName = opConfObj.name,
708 712 opId = opConfObj.id,
709 713 )
710 714
711 # total_time = time.time() - ini
712 #
713 # if total_time > 0.002:
714 # print "%s::%s took %f seconds" %(self.name, opConfObj.name, total_time)
715 # total_time = time.time() - ini
716 #
717 # if total_time > 0.002:
718 # print "%s::%s took %f seconds" %(self.name, opConfObj.name, total_time)
715 719
716 720 is_ok = is_ok or sts
717 721
718 722 return is_ok
719 723
720 724 def close(self):
721 725
722 726 for opConfObj in self.opConfObjList:
723 727 if opConfObj.type == 'self':
724 728 continue
725 729
726 730 opObj = self.procUnitObj.getOperationObj(opConfObj.id)
727 731 opObj.close()
728 732
729 733 self.procUnitObj.close()
730 734
731 735 return
732 736
733 737 class ReadUnitConf(ProcUnitConf):
734 738
735 739 path = None
736 740 startDate = None
737 741 endDate = None
738 742 startTime = None
739 743 endTime = None
740 744
741 745 ELEMENTNAME = 'ReadUnit'
742 746
743 747 def __init__(self):
744 748
745 749 self.id = None
746 750 self.datatype = None
747 751 self.name = None
748 752 self.inputId = None
749 753
750 754 self.parentId = None
751 755
752 756 self.opConfObjList = []
753 757 self.opObjList = []
754 758
755 759 def getElementName(self):
756 760
757 761 return self.ELEMENTNAME
758 762
759 763 def setup(self, id, name, datatype, path='', startDate="", endDate="", startTime="",
760 764 endTime="", parentId=None, queue=None, server=None, **kwargs):
761 765
762 766 #Compatible with old signal chain version
763 767 if datatype==None and name==None:
764 768 raise ValueError, "datatype or name should be defined"
765 769
766 770 if name==None:
767 771 if 'Reader' in datatype:
768 772 name = datatype
769 773 else:
770 774 name = '%sReader' %(datatype)
771 775 if datatype==None:
772 776 datatype = name.replace('Reader','')
773 777
774 778 self.id = id
775 779 self.name = name
776 780 self.datatype = datatype
777 781 if path != '':
778 782 self.path = os.path.abspath(path)
779 783 self.startDate = startDate
780 784 self.endDate = endDate
781 785 self.startTime = startTime
782 786 self.endTime = endTime
783 787
784 788 self.inputId = '0'
785 789 self.parentId = parentId
786 790 self.queue = queue
787 791 self.server = server
788 792 self.addRunOperation(**kwargs)
789 793
790 794 def update(self, datatype, path, startDate, endDate, startTime, endTime, parentId=None, name=None, **kwargs):
791 795
792 796 #Compatible with old signal chain version
793 797 if datatype==None and name==None:
794 798 raise ValueError, "datatype or name should be defined"
795 799
796 800 if name==None:
797 801 if 'Reader' in datatype:
798 802 name = datatype
799 803 else:
800 804 name = '%sReader' %(datatype)
801 805
802 806 if datatype==None:
803 807 datatype = name.replace('Reader','')
804 808
805 809 self.datatype = datatype
806 810 self.name = name
807 811 self.path = path
808 812 self.startDate = startDate
809 813 self.endDate = endDate
810 814 self.startTime = startTime
811 815 self.endTime = endTime
812 816
813 817 self.inputId = '0'
814 818 self.parentId = parentId
815 819
816 820 self.updateRunOperation(**kwargs)
817 821
818 822 def removeOperations(self):
819 823
820 824 for obj in self.opConfObjList:
821 825 del obj
822 826
823 827 self.opConfObjList = []
824 828
825 829 def addRunOperation(self, **kwargs):
826 830
827 831 opObj = self.addOperation(name = 'run', optype = 'self')
828 832
829 833 if self.server is None:
830 834 opObj.addParameter(name='datatype' , value=self.datatype, format='str')
831 835 opObj.addParameter(name='path' , value=self.path, format='str')
832 836 opObj.addParameter(name='startDate' , value=self.startDate, format='date')
833 837 opObj.addParameter(name='endDate' , value=self.endDate, format='date')
834 838 opObj.addParameter(name='startTime' , value=self.startTime, format='time')
835 839 opObj.addParameter(name='endTime' , value=self.endTime, format='time')
836 840 opObj.addParameter(name='queue' , value=self.queue, format='obj')
837 841 for key, value in kwargs.items():
838 842 opObj.addParameter(name=key, value=value, format=type(value).__name__)
839 843 else:
840 844 opObj.addParameter(name='server' , value=self.server, format='str')
841 845
842 846
843 847 return opObj
844 848
845 849 def updateRunOperation(self, **kwargs):
846 850
847 851 opObj = self.getOperationObj(name = 'run')
848 852 opObj.removeParameters()
849 853
850 854 opObj.addParameter(name='datatype' , value=self.datatype, format='str')
851 855 opObj.addParameter(name='path' , value=self.path, format='str')
852 856 opObj.addParameter(name='startDate' , value=self.startDate, format='date')
853 857 opObj.addParameter(name='endDate' , value=self.endDate, format='date')
854 858 opObj.addParameter(name='startTime' , value=self.startTime, format='time')
855 859 opObj.addParameter(name='endTime' , value=self.endTime, format='time')
856 860
857 861 for key, value in kwargs.items():
858 862 opObj.addParameter(name=key, value=value, format=type(value).__name__)
859 863
860 864 return opObj
861 865
862 # def makeXml(self, projectElement):
863 #
864 # procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
865 # procUnitElement.set('id', str(self.id))
866 # procUnitElement.set('name', self.name)
867 # procUnitElement.set('datatype', self.datatype)
868 # procUnitElement.set('inputId', str(self.inputId))
869 #
870 # for opConfObj in self.opConfObjList:
871 # opConfObj.makeXml(procUnitElement)
866 # def makeXml(self, projectElement):
867 #
868 # procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
869 # procUnitElement.set('id', str(self.id))
870 # procUnitElement.set('name', self.name)
871 # procUnitElement.set('datatype', self.datatype)
872 # procUnitElement.set('inputId', str(self.inputId))
873 #
874 # for opConfObj in self.opConfObjList:
875 # opConfObj.makeXml(procUnitElement)
872 876
873 877 def readXml(self, upElement):
874 878
875 879 self.id = upElement.get('id')
876 880 self.name = upElement.get('name')
877 881 self.datatype = upElement.get('datatype')
878 882 self.inputId = upElement.get('inputId')
879 883
880 884 if self.ELEMENTNAME == "ReadUnit":
881 885 self.datatype = self.datatype.replace("Reader", "")
882 886
883 887 if self.inputId == 'None':
884 888 self.inputId = '0'
885 889
886 890 self.opConfObjList = []
887 891
888 892 opElementList = upElement.iter(OperationConf().getElementName())
889 893
890 894 for opElement in opElementList:
891 895 opConfObj = OperationConf()
892 896 opConfObj.readXml(opElement)
893 897 self.opConfObjList.append(opConfObj)
894 898
895 899 if opConfObj.name == 'run':
896 900 self.path = opConfObj.getParameterValue('path')
897 901 self.startDate = opConfObj.getParameterValue('startDate')
898 902 self.endDate = opConfObj.getParameterValue('endDate')
899 903 self.startTime = opConfObj.getParameterValue('startTime')
900 904 self.endTime = opConfObj.getParameterValue('endTime')
901 905
902 906 class Project():
903 907
904 908 id = None
905 909 name = None
906 910 description = None
907 911 filename = None
908 912
909 913 procUnitConfObjDict = None
910 914
911 915 ELEMENTNAME = 'Project'
912 916
913 917 plotterQueue = None
914 918
915 919 def __init__(self, plotter_queue=None):
916 920
917 921 self.id = None
918 922 self.name = None
919 923 self.description = None
920 924
921 925 self.plotterQueue = plotter_queue
922 926
923 927 self.procUnitConfObjDict = {}
924 928
925 929 def __getNewId(self):
926 930
927 931 idList = self.procUnitConfObjDict.keys()
928 932
929 933 id = int(self.id)*10
930 934
931 935 while True:
932 936 id += 1
933 937
934 938 if str(id) in idList:
935 939 continue
936 940
937 941 break
938 942
939 943 return str(id)
940 944
941 945 def getElementName(self):
942 946
943 947 return self.ELEMENTNAME
944 948
945 949 def getId(self):
946 950
947 951 return self.id
948 952
949 953 def updateId(self, new_id):
950 954
951 955 self.id = str(new_id)
952 956
953 957 keyList = self.procUnitConfObjDict.keys()
954 958 keyList.sort()
955 959
956 960 n = 1
957 961 newProcUnitConfObjDict = {}
958 962
959 963 for procKey in keyList:
960 964
961 965 procUnitConfObj = self.procUnitConfObjDict[procKey]
962 966 idProcUnit = str(int(self.id)*10 + n)
963 967 procUnitConfObj.updateId(idProcUnit, parentId = self.id)
964 968
965 969 newProcUnitConfObjDict[idProcUnit] = procUnitConfObj
966 970 n += 1
967 971
968 972 self.procUnitConfObjDict = newProcUnitConfObjDict
969 973
970 974 def setup(self, id, name, description):
971 975
972 976 self.id = str(id)
973 977 self.name = name
974 978 self.description = description
975 979
976 980 def update(self, name, description):
977 981
978 982 self.name = name
979 983 self.description = description
980 984
981 985 def addReadUnit(self, id=None, datatype=None, name=None, **kwargs):
982 986
983 987 if id is None:
984 988 idReadUnit = self.__getNewId()
985 989 else:
986 990 idReadUnit = str(id)
987 991
988 992 readUnitConfObj = ReadUnitConf()
989 993 readUnitConfObj.setup(idReadUnit, name, datatype, parentId=self.id, **kwargs)
990 994
991 995 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
992 996
993 997 return readUnitConfObj
994 998
995 999 def addProcUnit(self, inputId='0', datatype=None, name=None):
996 1000
997 1001 idProcUnit = self.__getNewId()
998 1002
999 1003 procUnitConfObj = ProcUnitConf()
1000 1004 procUnitConfObj.setup(idProcUnit, name, datatype, inputId, parentId=self.id)
1001 1005
1002 1006 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1003 1007
1004 1008 return procUnitConfObj
1005 1009
1006 1010 def removeProcUnit(self, id):
1007 1011
1008 1012 if id in self.procUnitConfObjDict.keys():
1009 1013 self.procUnitConfObjDict.pop(id)
1010 1014
1011 1015 def getReadUnitId(self):
1012 1016
1013 1017 readUnitConfObj = self.getReadUnitObj()
1014 1018
1015 1019 return readUnitConfObj.id
1016 1020
1017 1021 def getReadUnitObj(self):
1018 1022
1019 1023 for obj in self.procUnitConfObjDict.values():
1020 1024 if obj.getElementName() == "ReadUnit":
1021 1025 return obj
1022 1026
1023 1027 return None
1024 1028
1025 1029 def getProcUnitObj(self, id=None, name=None):
1026 1030
1027 1031 if id != None:
1028 1032 return self.procUnitConfObjDict[id]
1029 1033
1030 1034 if name != None:
1031 1035 return self.getProcUnitObjByName(name)
1032 1036
1033 1037 return None
1034 1038
1035 1039 def getProcUnitObjByName(self, name):
1036 1040
1037 1041 for obj in self.procUnitConfObjDict.values():
1038 1042 if obj.name == name:
1039 1043 return obj
1040 1044
1041 1045 return None
1042 1046
1043 1047 def procUnitItems(self):
1044 1048
1045 1049 return self.procUnitConfObjDict.items()
1046 1050
1047 1051 def makeXml(self):
1048 1052
1049 1053 projectElement = Element('Project')
1050 1054 projectElement.set('id', str(self.id))
1051 1055 projectElement.set('name', self.name)
1052 1056 projectElement.set('description', self.description)
1053 1057
1054 1058 for procUnitConfObj in self.procUnitConfObjDict.values():
1055 1059 procUnitConfObj.makeXml(projectElement)
1056 1060
1057 1061 self.projectElement = projectElement
1058 1062
1059 1063 def writeXml(self, filename=None):
1060 1064
1061 1065 if filename == None:
1062 1066 if self.filename:
1063 1067 filename = self.filename
1064 1068 else:
1065 1069 filename = "schain.xml"
1066 1070
1067 1071 if not filename:
1068 1072 print "filename has not been defined. Use setFilename(filename) for do it."
1069 1073 return 0
1070 1074
1071 1075 abs_file = os.path.abspath(filename)
1072 1076
1073 1077 if not os.access(os.path.dirname(abs_file), os.W_OK):
1074 1078 print "No write permission on %s" %os.path.dirname(abs_file)
1075 1079 return 0
1076 1080
1077 1081 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
1078 1082 print "File %s already exists and it could not be overwriten" %abs_file
1079 1083 return 0
1080 1084
1081 1085 self.makeXml()
1082 1086
1083 1087 ElementTree(self.projectElement).write(abs_file, method='xml')
1084 1088
1085 1089 self.filename = abs_file
1086 1090
1087 1091 return 1
1088 1092
1089 1093 def readXml(self, filename = None):
1090 1094
1091 1095 if not filename:
1092 1096 print "filename is not defined"
1093 1097 return 0
1094 1098
1095 1099 abs_file = os.path.abspath(filename)
1096 1100
1097 1101 if not os.path.isfile(abs_file):
1098 1102 print "%s file does not exist" %abs_file
1099 1103 return 0
1100 1104
1101 1105 self.projectElement = None
1102 1106 self.procUnitConfObjDict = {}
1103 1107
1104 1108 try:
1105 1109 self.projectElement = ElementTree().parse(abs_file)
1106 1110 except:
1107 1111 print "Error reading %s, verify file format" %filename
1108 1112 return 0
1109 1113
1110 1114 self.project = self.projectElement.tag
1111 1115
1112 1116 self.id = self.projectElement.get('id')
1113 1117 self.name = self.projectElement.get('name')
1114 1118 self.description = self.projectElement.get('description')
1115 1119
1116 1120 readUnitElementList = self.projectElement.iter(ReadUnitConf().getElementName())
1117 1121
1118 1122 for readUnitElement in readUnitElementList:
1119 1123 readUnitConfObj = ReadUnitConf()
1120 1124 readUnitConfObj.readXml(readUnitElement)
1121 1125
1122 1126 if readUnitConfObj.parentId == None:
1123 1127 readUnitConfObj.parentId = self.id
1124 1128
1125 1129 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
1126 1130
1127 1131 procUnitElementList = self.projectElement.iter(ProcUnitConf().getElementName())
1128 1132
1129 1133 for procUnitElement in procUnitElementList:
1130 1134 procUnitConfObj = ProcUnitConf()
1131 1135 procUnitConfObj.readXml(procUnitElement)
1132 1136
1133 1137 if procUnitConfObj.parentId == None:
1134 1138 procUnitConfObj.parentId = self.id
1135 1139
1136 1140 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1137 1141
1138 1142 self.filename = abs_file
1139 1143
1140 1144 return 1
1141 1145
1142 1146 def printattr(self):
1143 1147
1144 1148 print "Project[%s]: name = %s, description = %s" %(self.id,
1145 1149 self.name,
1146 1150 self.description)
1147 1151
1148 1152 for procUnitConfObj in self.procUnitConfObjDict.values():
1149 1153 procUnitConfObj.printattr()
1150 1154
1151 1155 def createObjects(self):
1152 1156
1153 1157 for procUnitConfObj in self.procUnitConfObjDict.values():
1154 1158 procUnitConfObj.createObjects(self.plotterQueue)
1155 1159
1156 1160 def __connect(self, objIN, thisObj):
1157 1161
1158 1162 thisObj.setInput(objIN.getOutputObj())
1159 1163
1160 1164 def connectObjects(self):
1161 1165
1162 1166 for thisPUConfObj in self.procUnitConfObjDict.values():
1163 1167
1164 1168 inputId = thisPUConfObj.getInputId()
1165 1169
1166 1170 if int(inputId) == 0:
1167 1171 continue
1168 1172
1169 1173 #Get input object
1170 1174 puConfINObj = self.procUnitConfObjDict[inputId]
1171 1175 puObjIN = puConfINObj.getProcUnitObj()
1172 1176
1173 1177 #Get current object
1174 1178 thisPUObj = thisPUConfObj.getProcUnitObj()
1175 1179
1176 1180 self.__connect(puObjIN, thisPUObj)
1177 1181
1178 1182 def __handleError(self, procUnitConfObj, send_email=True):
1179 1183
1180 1184 import socket
1181 1185
1182 1186 err = traceback.format_exception(sys.exc_info()[0],
1183 1187 sys.exc_info()[1],
1184 1188 sys.exc_info()[2])
1185 1189
1186 1190 print "***** Error occurred in %s *****" %(procUnitConfObj.name)
1187 1191 print "***** %s" %err[-1]
1188 1192
1189 1193 message = "".join(err)
1190 1194
1191 1195 sys.stderr.write(message)
1192 1196
1193 1197 if not send_email:
1194 1198 return
1195 1199
1196 1200 subject = "SChain v%s: Error running %s\n" %(schainpy.__version__, procUnitConfObj.name)
1197 1201
1198 1202 subtitle = "%s: %s\n" %(procUnitConfObj.getElementName() ,procUnitConfObj.name)
1199 1203 subtitle += "Hostname: %s\n" %socket.gethostbyname(socket.gethostname())
1200 1204 subtitle += "Working directory: %s\n" %os.path.abspath("./")
1201 1205 subtitle += "Configuration file: %s\n" %self.filename
1202 1206 subtitle += "Time: %s\n" %str(datetime.datetime.now())
1203 1207
1204 1208 readUnitConfObj = self.getReadUnitObj()
1205 1209 if readUnitConfObj:
1206 1210 subtitle += "\nInput parameters:\n"
1207 1211 subtitle += "[Data path = %s]\n" %readUnitConfObj.path
1208 1212 subtitle += "[Data type = %s]\n" %readUnitConfObj.datatype
1209 1213 subtitle += "[Start date = %s]\n" %readUnitConfObj.startDate
1210 1214 subtitle += "[End date = %s]\n" %readUnitConfObj.endDate
1211 1215 subtitle += "[Start time = %s]\n" %readUnitConfObj.startTime
1212 1216 subtitle += "[End time = %s]\n" %readUnitConfObj.endTime
1213 1217
1214 1218 adminObj = schainpy.admin.SchainNotify()
1215 1219 adminObj.sendAlert(message=message,
1216 1220 subject=subject,
1217 1221 subtitle=subtitle,
1218 1222 filename=self.filename)
1219 1223
1220 1224 def isPaused(self):
1221 1225 return 0
1222 1226
1223 1227 def isStopped(self):
1224 1228 return 0
1225 1229
1226 1230 def runController(self):
1227 1231 """
1228 1232 returns 0 when this process has been stopped, 1 otherwise
1229 1233 """
1230 1234
1231 1235 if self.isPaused():
1232 1236 print "Process suspended"
1233 1237
1234 1238 while True:
1235 1239 sleep(0.1)
1236 1240
1237 1241 if not self.isPaused():
1238 1242 break
1239 1243
1240 1244 if self.isStopped():
1241 1245 break
1242 1246
1243 1247 print "Process reinitialized"
1244 1248
1245 1249 if self.isStopped():
1246 1250 print "Process stopped"
1247 1251 return 0
1248 1252
1249 1253 return 1
1250 1254
1251 1255 def setFilename(self, filename):
1252 1256
1253 1257 self.filename = filename
1254 1258
1255 1259 def setPlotterQueue(self, plotter_queue):
1256 1260
1257 1261 raise NotImplementedError, "Use schainpy.controller_api.ControllerThread instead Project class"
1258 1262
1259 1263 def getPlotterQueue(self):
1260 1264
1261 1265 raise NotImplementedError, "Use schainpy.controller_api.ControllerThread instead Project class"
1262 1266
1263 1267 def useExternalPlotter(self):
1264 1268
1265 1269 raise NotImplementedError, "Use schainpy.controller_api.ControllerThread instead Project class"
1266 1270
1271
1267 1272 def run(self):
1268 1273
1269 1274 print
1270 1275 print "*"*60
1271 1276 print " Starting SIGNAL CHAIN PROCESSING v%s " %schainpy.__version__
1272 1277 print "*"*60
1273 1278 print
1274 1279
1275 1280 keyList = self.procUnitConfObjDict.keys()
1276 1281 keyList.sort()
1277 1282
1278 1283 while(True):
1279 1284
1280 1285 is_ok = False
1281 1286
1282 1287 for procKey in keyList:
1283 1288 # print "Running the '%s' process with %s" %(procUnitConfObj.name, procUnitConfObj.id)
1284 1289
1285 1290 procUnitConfObj = self.procUnitConfObjDict[procKey]
1286 1291
1287 1292 try:
1288 1293 sts = procUnitConfObj.run()
1289 1294 is_ok = is_ok or sts
1290 1295 except KeyboardInterrupt:
1291 1296 is_ok = False
1292 1297 break
1293 1298 except ValueError, e:
1294 1299 sleep(0.5)
1295 1300 self.__handleError(procUnitConfObj, send_email=True)
1296 1301 is_ok = False
1297 1302 break
1298 1303 except:
1299 1304 sleep(0.5)
1300 1305 self.__handleError(procUnitConfObj)
1301 1306 is_ok = False
1302 1307 break
1303 1308
1304 1309 #If every process unit finished so end process
1305 1310 if not(is_ok):
1306 1311 # print "Every process unit have finished"
1307 1312 break
1308 1313
1309 1314 if not self.runController():
1310 1315 break
1311 1316
1312 1317 #Closing every process
1313 1318 for procKey in keyList:
1314 1319 procUnitConfObj = self.procUnitConfObjDict[procKey]
1315 1320 procUnitConfObj.close()
1316 1321
1317 1322 print "Process finished"
1318 1323
1319 1324 def start(self, filename=None):
1320 1325
1321 1326 self.writeXml(filename)
1322 1327 self.createObjects()
1323 1328 self.connectObjects()
1324 1329 self.run()
@@ -1,12 +1,12
1 1 #from schainpy.model.data.jrodata import *
2 2 # from schainpy.model.io.jrodataIO import *
3 3 # from schainpy.model.proc.jroprocessing import *
4 4 # from schainpy.model.graphics.jroplot import *
5 5 # from schainpy.model.utils.jroutils import *
6 6 # from schainpy.serializer import *
7 7
8 8 from data import *
9 9 from io import *
10 10 from proc import *
11 from graphics import *
11 #from graphics import *
12 12 from utils import *
@@ -1,775 +1,783
1 1
2 2 import os
3 3 import zmq
4 4 import time
5 5 import numpy
6 6 import datetime
7 7 import numpy as np
8 8 import matplotlib
9 9 matplotlib.use('TkAgg')
10 10 import matplotlib.pyplot as plt
11 11 from mpl_toolkits.axes_grid1 import make_axes_locatable
12 12 from matplotlib.ticker import FuncFormatter, LinearLocator
13 13 from multiprocessing import Process
14 14
15 15 from schainpy.model.proc.jroproc_base import Operation
16 16
17 17 plt.ion()
18 18
19 19 func = lambda x, pos: ('%s') %(datetime.datetime.fromtimestamp(x).strftime('%H:%M'))
20 fromtimestamp = lambda x, mintime : (datetime.datetime.utcfromtimestamp(mintime).replace(hour=(x + 5), minute=0) - d1970).total_seconds()
21
20 22
21 23 d1970 = datetime.datetime(1970,1,1)
22 24
23 25 class PlotData(Operation, Process):
24 26
25 27 CODE = 'Figure'
26 28 colormap = 'jro'
27 29 CONFLATE = False
28 30 __MAXNUMX = 80
29 31 __missing = 1E30
30 32
31 33 def __init__(self, **kwargs):
32 34
33 35 Operation.__init__(self, plot=True, **kwargs)
34 36 Process.__init__(self)
35 37 self.kwargs['code'] = self.CODE
36 38 self.mp = False
37 39 self.dataOut = None
38 40 self.isConfig = False
39 41 self.figure = None
40 42 self.axes = []
41 43 self.localtime = kwargs.pop('localtime', True)
42 44 self.show = kwargs.get('show', True)
43 45 self.save = kwargs.get('save', False)
44 46 self.colormap = kwargs.get('colormap', self.colormap)
45 47 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
46 48 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
47 49 self.showprofile = kwargs.get('showprofile', True)
48 50 self.title = kwargs.get('wintitle', '')
49 51 self.xaxis = kwargs.get('xaxis', 'frequency')
50 52 self.zmin = kwargs.get('zmin', None)
51 53 self.zmax = kwargs.get('zmax', None)
52 54 self.xmin = kwargs.get('xmin', None)
53 55 self.xmax = kwargs.get('xmax', None)
54 56 self.xrange = kwargs.get('xrange', 24)
55 57 self.ymin = kwargs.get('ymin', None)
56 58 self.ymax = kwargs.get('ymax', None)
57 self.__MAXNUMY = kwargs.get('decimation', 80)
59 self.__MAXNUMY = kwargs.get('decimation', 5000)
58 60 self.throttle_value = 5
59 61 self.times = []
60 62 #self.interactive = self.kwargs['parent']
61 63
62 64
63 65 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
64 66
65 67 if x_buffer.shape[0] < 2:
66 68 return x_buffer, y_buffer, z_buffer
67 69
68 70 deltas = x_buffer[1:] - x_buffer[0:-1]
69 71 x_median = np.median(deltas)
70 72
71 73 index = np.where(deltas > 5*x_median)
72 74
73 75 if len(index[0]) != 0:
74 76 z_buffer[::, index[0], ::] = self.__missing
75 77 z_buffer = np.ma.masked_inside(z_buffer,
76 78 0.99*self.__missing,
77 79 1.01*self.__missing)
78 80
79 81 return x_buffer, y_buffer, z_buffer
80 82
81 83 def decimate(self):
82 84
83 85 # dx = int(len(self.x)/self.__MAXNUMX) + 1
84 86 dy = int(len(self.y)/self.__MAXNUMY) + 1
85 87
86 88 # x = self.x[::dx]
87 89 x = self.x
88 90 y = self.y[::dy]
89 91 z = self.z[::, ::, ::dy]
90 92
91 93 return x, y, z
92 94
93 95 def __plot(self):
94 96
95 97 print 'plotting...{}'.format(self.CODE)
96 98
97 99 if self.show:
98 100 self.figure.show()
99 101
100 102 self.plot()
101 103 plt.tight_layout()
102 104 self.figure.canvas.manager.set_window_title('{} {} - {}'.format(self.title, self.CODE.upper(),
103 105 datetime.datetime.fromtimestamp(self.max_time).strftime('%Y/%m/%d')))
104 106
105 107 if self.save:
106 108 figname = os.path.join(self.save, '{}_{}.png'.format(self.CODE,
107 109 datetime.datetime.fromtimestamp(self.saveTime).strftime('%y%m%d_%H%M%S')))
108 110 print 'Saving figure: {}'.format(figname)
109 111 self.figure.savefig(figname)
110 112
111 113 self.figure.canvas.draw()
112 114
113 115 def plot(self):
114 116
115 117 print 'plotting...{}'.format(self.CODE.upper())
116 118 return
117 119
118 120 def run(self):
119 121
120 122 print '[Starting] {}'.format(self.name)
121 123
122 124 context = zmq.Context()
123 125 receiver = context.socket(zmq.SUB)
124 126 receiver.setsockopt(zmq.SUBSCRIBE, '')
125 127 receiver.setsockopt(zmq.CONFLATE, self.CONFLATE)
126 128
127 129 if 'server' in self.kwargs['parent']:
128 130 receiver.connect('ipc:///tmp/{}.plots'.format(self.kwargs['parent']['server']))
129 131 else:
130 132 receiver.connect("ipc:///tmp/zmq.plots")
131 133
132 134 seconds_passed = 0
133 135
134 136 while True:
135 137 try:
136 138 self.data = receiver.recv_pyobj(flags=zmq.NOBLOCK)#flags=zmq.NOBLOCK
137 139 self.started = self.data['STARTED']
138 140 self.dataOut = self.data['dataOut']
139 141
140 142 if (len(self.times) < len(self.data['times']) and not self.started and self.data['ENDED']):
141 143 continue
142 144
143 145 self.times = self.data['times']
144 146 self.times.sort()
145 147 self.throttle_value = self.data['throttle']
146 148 self.min_time = self.times[0]
147 149 self.max_time = self.times[-1]
148 150
149 151 if self.isConfig is False:
150 152 print 'setting up'
151 153 self.setup()
152 154 self.isConfig = True
153 155 self.__plot()
154 156
155 157 if self.data['ENDED'] is True:
156 158 print '********GRAPHIC ENDED********'
157 159 self.ended = True
158 160 self.isConfig = False
159 161 self.__plot()
160 162 elif seconds_passed >= self.data['throttle']:
161 163 print 'passed', seconds_passed
162 164 self.__plot()
163 165 seconds_passed = 0
164 166
165 167 except zmq.Again as e:
166 168 print 'Waiting for data...'
167 169 plt.pause(2)
168 170 seconds_passed += 2
169 171
170 172 def close(self):
171 173 if self.dataOut:
172 174 self.__plot()
173 175
174 176
175 177 class PlotSpectraData(PlotData):
176 178
177 179 CODE = 'spc'
178 180 colormap = 'jro'
179 181 CONFLATE = False
180 182
181 183 def setup(self):
182 184
183 185 ncolspan = 1
184 186 colspan = 1
185 187 self.ncols = int(numpy.sqrt(self.dataOut.nChannels)+0.9)
186 188 self.nrows = int(self.dataOut.nChannels*1./self.ncols + 0.9)
187 189 self.width = 3.6*self.ncols
188 190 self.height = 3.2*self.nrows
189 191 if self.showprofile:
190 192 ncolspan = 3
191 193 colspan = 2
192 194 self.width += 1.2*self.ncols
193 195
194 196 self.ylabel = 'Range [Km]'
195 197 self.titles = ['Channel {}'.format(x) for x in self.dataOut.channelList]
196 198
197 199 if self.figure is None:
198 200 self.figure = plt.figure(figsize=(self.width, self.height),
199 201 edgecolor='k',
200 202 facecolor='w')
201 203 else:
202 204 self.figure.clf()
203 205
204 206 n = 0
205 207 for y in range(self.nrows):
206 208 for x in range(self.ncols):
207 209 if n >= self.dataOut.nChannels:
208 210 break
209 211 ax = plt.subplot2grid((self.nrows, self.ncols*ncolspan), (y, x*ncolspan), 1, colspan)
210 212 if self.showprofile:
211 213 ax.ax_profile = plt.subplot2grid((self.nrows, self.ncols*ncolspan), (y, x*ncolspan+colspan), 1, 1)
212 214
213 215 ax.firsttime = True
214 216 self.axes.append(ax)
215 217 n += 1
216 218
217 219 def plot(self):
218 220
219 221 if self.xaxis == "frequency":
220 222 x = self.dataOut.getFreqRange(1)/1000.
221 223 xlabel = "Frequency (kHz)"
222 224 elif self.xaxis == "time":
223 225 x = self.dataOut.getAcfRange(1)
224 226 xlabel = "Time (ms)"
225 227 else:
226 228 x = self.dataOut.getVelRange(1)
227 229 xlabel = "Velocity (m/s)"
228 230
229 231 y = self.dataOut.getHeiRange()
230 232 z = self.data[self.CODE]
231 233
232 234 for n, ax in enumerate(self.axes):
233 235
234 236 if ax.firsttime:
235 237 self.xmax = self.xmax if self.xmax else np.nanmax(x)
236 238 self.xmin = self.xmin if self.xmin else -self.xmax
237 239 self.ymin = self.ymin if self.ymin else np.nanmin(y)
238 240 self.ymax = self.ymax if self.ymax else np.nanmax(y)
239 241 self.zmin = self.zmin if self.zmin else np.nanmin(z)
240 242 self.zmax = self.zmax if self.zmax else np.nanmax(z)
241 243 ax.plot = ax.pcolormesh(x, y, z[n].T,
242 244 vmin=self.zmin,
243 245 vmax=self.zmax,
244 246 cmap=plt.get_cmap(self.colormap)
245 247 )
246 248 divider = make_axes_locatable(ax)
247 249 cax = divider.new_horizontal(size='3%', pad=0.05)
248 250 self.figure.add_axes(cax)
249 251 plt.colorbar(ax.plot, cax)
250 252
251 253 ax.set_xlim(self.xmin, self.xmax)
252 254 ax.set_ylim(self.ymin, self.ymax)
253 255
254 256 ax.set_ylabel(self.ylabel)
255 257 ax.set_xlabel(xlabel)
256 258
257 259 ax.firsttime = False
258 260
259 261 if self.showprofile:
260 262 ax.plot_profile= ax.ax_profile.plot(self.data['rti'][self.max_time][n], y)[0]
261 263 ax.ax_profile.set_xlim(self.zmin, self.zmax)
262 264 ax.ax_profile.set_ylim(self.ymin, self.ymax)
263 265 ax.ax_profile.set_xlabel('dB')
264 266 ax.ax_profile.grid(b=True, axis='x')
265 267 ax.plot_noise = ax.ax_profile.plot(numpy.repeat(self.data['noise'][self.max_time][n], len(y)), y,
266 268 color="k", linestyle="dashed", lw=2)[0]
267 269 [tick.set_visible(False) for tick in ax.ax_profile.get_yticklabels()]
268 270 else:
269 271 ax.plot.set_array(z[n].T.ravel())
270 272 if self.showprofile:
271 273 ax.plot_profile.set_data(self.data['rti'][self.max_time][n], y)
272 274 ax.plot_noise.set_data(numpy.repeat(self.data['noise'][self.max_time][n], len(y)), y)
273 275
274 276 ax.set_title('{} - Noise: {:.2f} dB'.format(self.titles[n], self.data['noise'][self.max_time][n]),
275 277 size=8)
276 278 self.saveTime = self.max_time
277 279
278 280
279 281 class PlotCrossSpectraData(PlotData):
280 282
281 283 CODE = 'cspc'
282 284 zmin_coh = None
283 285 zmax_coh = None
284 286 zmin_phase = None
285 287 zmax_phase = None
286 288 CONFLATE = False
287 289
288 290 def setup(self):
289 291
290 292 ncolspan = 1
291 293 colspan = 1
292 294 self.ncols = 2
293 295 self.nrows = self.dataOut.nPairs
294 296 self.width = 3.6*self.ncols
295 297 self.height = 3.2*self.nrows
296 298
297 299 self.ylabel = 'Range [Km]'
298 300 self.titles = ['Channel {}'.format(x) for x in self.dataOut.channelList]
299 301
300 302 if self.figure is None:
301 303 self.figure = plt.figure(figsize=(self.width, self.height),
302 304 edgecolor='k',
303 305 facecolor='w')
304 306 else:
305 307 self.figure.clf()
306 308
307 309 for y in range(self.nrows):
308 310 for x in range(self.ncols):
309 311 ax = plt.subplot2grid((self.nrows, self.ncols), (y, x), 1, 1)
310 312 ax.firsttime = True
311 313 self.axes.append(ax)
312 314
313 315 def plot(self):
314 316
315 317 if self.xaxis == "frequency":
316 318 x = self.dataOut.getFreqRange(1)/1000.
317 319 xlabel = "Frequency (kHz)"
318 320 elif self.xaxis == "time":
319 321 x = self.dataOut.getAcfRange(1)
320 322 xlabel = "Time (ms)"
321 323 else:
322 324 x = self.dataOut.getVelRange(1)
323 325 xlabel = "Velocity (m/s)"
324 326
325 327 y = self.dataOut.getHeiRange()
326 328 z_coh = self.data['cspc_coh']
327 329 z_phase = self.data['cspc_phase']
328 330
329 331 for n in range(self.nrows):
330 332 ax = self.axes[2*n]
331 333 ax1 = self.axes[2*n+1]
332 334 if ax.firsttime:
333 335 self.xmax = self.xmax if self.xmax else np.nanmax(x)
334 336 self.xmin = self.xmin if self.xmin else -self.xmax
335 337 self.ymin = self.ymin if self.ymin else np.nanmin(y)
336 338 self.ymax = self.ymax if self.ymax else np.nanmax(y)
337 339 self.zmin_coh = self.zmin_coh if self.zmin_coh else 0.0
338 340 self.zmax_coh = self.zmax_coh if self.zmax_coh else 1.0
339 341 self.zmin_phase = self.zmin_phase if self.zmin_phase else -180
340 342 self.zmax_phase = self.zmax_phase if self.zmax_phase else 180
341 343
342 344 ax.plot = ax.pcolormesh(x, y, z_coh[n].T,
343 345 vmin=self.zmin_coh,
344 346 vmax=self.zmax_coh,
345 347 cmap=plt.get_cmap(self.colormap_coh)
346 348 )
347 349 divider = make_axes_locatable(ax)
348 350 cax = divider.new_horizontal(size='3%', pad=0.05)
349 351 self.figure.add_axes(cax)
350 352 plt.colorbar(ax.plot, cax)
351 353
352 354 ax.set_xlim(self.xmin, self.xmax)
353 355 ax.set_ylim(self.ymin, self.ymax)
354 356
355 357 ax.set_ylabel(self.ylabel)
356 358 ax.set_xlabel(xlabel)
357 359 ax.firsttime = False
358 360
359 361 ax1.plot = ax1.pcolormesh(x, y, z_phase[n].T,
360 362 vmin=self.zmin_phase,
361 363 vmax=self.zmax_phase,
362 364 cmap=plt.get_cmap(self.colormap_phase)
363 365 )
364 366 divider = make_axes_locatable(ax1)
365 367 cax = divider.new_horizontal(size='3%', pad=0.05)
366 368 self.figure.add_axes(cax)
367 369 plt.colorbar(ax1.plot, cax)
368 370
369 371 ax1.set_xlim(self.xmin, self.xmax)
370 372 ax1.set_ylim(self.ymin, self.ymax)
371 373
372 374 ax1.set_ylabel(self.ylabel)
373 375 ax1.set_xlabel(xlabel)
374 376 ax1.firsttime = False
375 377 else:
376 378 ax.plot.set_array(z_coh[n].T.ravel())
377 379 ax1.plot.set_array(z_phase[n].T.ravel())
378 380
379 381 ax.set_title('Coherence Ch{} * Ch{}'.format(self.dataOut.pairsList[n][0], self.dataOut.pairsList[n][1]), size=8)
380 382 ax1.set_title('Phase Ch{} * Ch{}'.format(self.dataOut.pairsList[n][0], self.dataOut.pairsList[n][1]), size=8)
381 383 self.saveTime = self.max_time
382 384
383 385
384 386 class PlotSpectraMeanData(PlotSpectraData):
385 387
386 388 CODE = 'spc_mean'
387 389 colormap = 'jet'
388 390
389 391 def plot(self):
390 392
391 393 if self.xaxis == "frequency":
392 394 x = self.dataOut.getFreqRange(1)/1000.
393 395 xlabel = "Frequency (kHz)"
394 396 elif self.xaxis == "time":
395 397 x = self.dataOut.getAcfRange(1)
396 398 xlabel = "Time (ms)"
397 399 else:
398 400 x = self.dataOut.getVelRange(1)
399 401 xlabel = "Velocity (m/s)"
400 402
401 403 y = self.dataOut.getHeiRange()
402 404 z = self.data['spc']
403 405 mean = self.data['mean'][self.max_time]
404 406
405 407 for n, ax in enumerate(self.axes):
406 408
407 409 if ax.firsttime:
408 410 self.xmax = self.xmax if self.xmax else np.nanmax(x)
409 411 self.xmin = self.xmin if self.xmin else -self.xmax
410 412 self.ymin = self.ymin if self.ymin else np.nanmin(y)
411 413 self.ymax = self.ymax if self.ymax else np.nanmax(y)
412 414 self.zmin = self.zmin if self.zmin else np.nanmin(z)
413 415 self.zmax = self.zmax if self.zmax else np.nanmax(z)
414 416 ax.plt = ax.pcolormesh(x, y, z[n].T,
415 417 vmin=self.zmin,
416 418 vmax=self.zmax,
417 419 cmap=plt.get_cmap(self.colormap)
418 420 )
419 421 ax.plt_dop = ax.plot(mean[n], y,
420 422 color='k')[0]
421 423
422 424 divider = make_axes_locatable(ax)
423 425 cax = divider.new_horizontal(size='3%', pad=0.05)
424 426 self.figure.add_axes(cax)
425 427 plt.colorbar(ax.plt, cax)
426 428
427 429 ax.set_xlim(self.xmin, self.xmax)
428 430 ax.set_ylim(self.ymin, self.ymax)
429 431
430 432 ax.set_ylabel(self.ylabel)
431 433 ax.set_xlabel(xlabel)
432 434
433 435 ax.firsttime = False
434 436
435 437 if self.showprofile:
436 438 ax.plt_profile= ax.ax_profile.plot(self.data['rti'][self.max_time][n], y)[0]
437 439 ax.ax_profile.set_xlim(self.zmin, self.zmax)
438 440 ax.ax_profile.set_ylim(self.ymin, self.ymax)
439 441 ax.ax_profile.set_xlabel('dB')
440 442 ax.ax_profile.grid(b=True, axis='x')
441 443 ax.plt_noise = ax.ax_profile.plot(numpy.repeat(self.data['noise'][self.max_time][n], len(y)), y,
442 444 color="k", linestyle="dashed", lw=2)[0]
443 445 [tick.set_visible(False) for tick in ax.ax_profile.get_yticklabels()]
444 446 else:
445 447 ax.plt.set_array(z[n].T.ravel())
446 448 ax.plt_dop.set_data(mean[n], y)
447 449 if self.showprofile:
448 450 ax.plt_profile.set_data(self.data['rti'][self.max_time][n], y)
449 451 ax.plt_noise.set_data(numpy.repeat(self.data['noise'][self.max_time][n], len(y)), y)
450 452
451 453 ax.set_title('{} - Noise: {:.2f} dB'.format(self.titles[n], self.data['noise'][self.max_time][n]),
452 454 size=8)
453 455 self.saveTime = self.max_time
454 456
455 457
456 458 class PlotRTIData(PlotData):
457 459
458 460 CODE = 'rti'
459 461 colormap = 'jro'
460 462
461 463 def setup(self):
462 464 self.ncols = 1
463 465 self.nrows = self.dataOut.nChannels
464 466 self.width = 10
465 467 self.height = 2.2*self.nrows if self.nrows<6 else 12
466 468 if self.nrows==1:
467 469 self.height += 1
468 470 self.ylabel = 'Range [Km]'
469 471 self.titles = ['Channel {}'.format(x) for x in self.dataOut.channelList]
470 472
471 473 if self.figure is None:
472 474 self.figure = plt.figure(figsize=(self.width, self.height),
473 475 edgecolor='k',
474 476 facecolor='w')
475 477 else:
476 478 self.figure.clf()
477 479 self.axes = []
478 480
479 481 for n in range(self.nrows):
480 482 ax = self.figure.add_subplot(self.nrows, self.ncols, n+1)
481 483 ax.firsttime = True
482 484 self.axes.append(ax)
483 485
484 486 def plot(self):
485 487
486 488 self.x = np.array(self.times)
487 489 self.y = self.dataOut.getHeiRange()
488 490 self.z = []
489 491
490 492 for ch in range(self.nrows):
491 493 self.z.append([self.data[self.CODE][t][ch] for t in self.times])
492 494
493 495 self.z = np.array(self.z)
494 496 for n, ax in enumerate(self.axes):
495 497 x, y, z = self.fill_gaps(*self.decimate())
496 xmin = self.min_time
497 xmax = xmin+self.xrange*60*60
498 if self.xmin is None:
499 xmin = self.min_time
500 else:
501 xmin = fromtimestamp(int(self.xmin), self.min_time)
502 if self.xmax is None:
503 xmax = xmin + self.xrange*60*60
504 else:
505 xmax = xmin + (self.xmax - self.xmin) * 60 * 60
498 506 self.zmin = self.zmin if self.zmin else np.min(self.z)
499 507 self.zmax = self.zmax if self.zmax else np.max(self.z)
500 508 if ax.firsttime:
501 509 self.ymin = self.ymin if self.ymin else np.nanmin(self.y)
502 510 self.ymax = self.ymax if self.ymax else np.nanmax(self.y)
503 511 plot = ax.pcolormesh(x, y, z[n].T,
504 512 vmin=self.zmin,
505 513 vmax=self.zmax,
506 514 cmap=plt.get_cmap(self.colormap)
507 515 )
508 516 divider = make_axes_locatable(ax)
509 517 cax = divider.new_horizontal(size='2%', pad=0.05)
510 518 self.figure.add_axes(cax)
511 519 plt.colorbar(plot, cax)
512 520 ax.set_ylim(self.ymin, self.ymax)
513 521
514 522 ax.xaxis.set_major_formatter(FuncFormatter(func))
515 523 ax.xaxis.set_major_locator(LinearLocator(6))
516 524
517 525 ax.set_ylabel(self.ylabel)
518 526
519 527 # if self.xmin is None:
520 528 # xmin = self.min_time
521 529 # else:
522 530 # xmin = (datetime.datetime.combine(self.dataOut.datatime.date(),
523 531 # datetime.time(self.xmin, 0, 0))-d1970).total_seconds()
524 532
525 533 ax.set_xlim(xmin, xmax)
526 534 ax.firsttime = False
527 535 else:
528 536 ax.collections.remove(ax.collections[0])
529 537 ax.set_xlim(xmin, xmax)
530 538 plot = ax.pcolormesh(x, y, z[n].T,
531 539 vmin=self.zmin,
532 540 vmax=self.zmax,
533 541 cmap=plt.get_cmap(self.colormap)
534 542 )
535 543 ax.set_title('{} {}'.format(self.titles[n],
536 544 datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')),
537 545 size=8)
538 546
539 547 self.saveTime = self.min_time
540 548
541 549
542 550 class PlotCOHData(PlotRTIData):
543 551
544 552 CODE = 'coh'
545 553
546 554 def setup(self):
547 555
548 556 self.ncols = 1
549 557 self.nrows = self.dataOut.nPairs
550 558 self.width = 10
551 559 self.height = 2.2*self.nrows if self.nrows<6 else 12
552 560 if self.nrows==1:
553 561 self.height += 1
554 562 self.ylabel = 'Range [Km]'
555 563 self.titles = ['{} Ch{} * Ch{}'.format(self.CODE.upper(), x[0], x[1]) for x in self.dataOut.pairsList]
556 564
557 565 if self.figure is None:
558 566 self.figure = plt.figure(figsize=(self.width, self.height),
559 567 edgecolor='k',
560 568 facecolor='w')
561 569 else:
562 570 self.figure.clf()
563 571 self.axes = []
564 572
565 573 for n in range(self.nrows):
566 574 ax = self.figure.add_subplot(self.nrows, self.ncols, n+1)
567 575 ax.firsttime = True
568 576 self.axes.append(ax)
569 577
570 578
571 579 class PlotNoiseData(PlotData):
572 580 CODE = 'noise'
573 581
574 582 def setup(self):
575 583
576 584 self.ncols = 1
577 585 self.nrows = 1
578 586 self.width = 10
579 587 self.height = 3.2
580 588 self.ylabel = 'Intensity [dB]'
581 589 self.titles = ['Noise']
582 590
583 591 if self.figure is None:
584 592 self.figure = plt.figure(figsize=(self.width, self.height),
585 593 edgecolor='k',
586 594 facecolor='w')
587 595 else:
588 596 self.figure.clf()
589 597 self.axes = []
590 598
591 599 self.ax = self.figure.add_subplot(self.nrows, self.ncols, 1)
592 600 self.ax.firsttime = True
593 601
594 602 def plot(self):
595 603
596 604 x = self.times
597 605 xmin = self.min_time
598 606 xmax = xmin+self.xrange*60*60
599 607 if self.ax.firsttime:
600 608 for ch in self.dataOut.channelList:
601 609 y = [self.data[self.CODE][t][ch] for t in self.times]
602 610 self.ax.plot(x, y, lw=1, label='Ch{}'.format(ch))
603 611 self.ax.firsttime = False
604 612 self.ax.xaxis.set_major_formatter(FuncFormatter(func))
605 613 self.ax.xaxis.set_major_locator(LinearLocator(6))
606 614 self.ax.set_ylabel(self.ylabel)
607 615 plt.legend()
608 616 else:
609 617 for ch in self.dataOut.channelList:
610 618 y = [self.data[self.CODE][t][ch] for t in self.times]
611 619 self.ax.lines[ch].set_data(x, y)
612 620
613 621 self.ax.set_xlim(xmin, xmax)
614 622 self.ax.set_ylim(min(y)-5, max(y)+5)
615 623 self.saveTime = self.min_time
616 624
617 625
618 626 class PlotWindProfilerData(PlotRTIData):
619 627
620 628 CODE = 'wind'
621 629 colormap = 'seismic'
622 630
623 631 def setup(self):
624 632 self.ncols = 1
625 633 self.nrows = self.dataOut.data_output.shape[0]
626 634 self.width = 10
627 635 self.height = 2.2*self.nrows
628 636 self.ylabel = 'Height [Km]'
629 637 self.titles = ['Zonal Wind' ,'Meridional Wind', 'Vertical Wind']
630 638 self.clabels = ['Velocity (m/s)','Velocity (m/s)','Velocity (cm/s)']
631 639 self.windFactor = [1, 1, 100]
632 640
633 641 if self.figure is None:
634 642 self.figure = plt.figure(figsize=(self.width, self.height),
635 643 edgecolor='k',
636 644 facecolor='w')
637 645 else:
638 646 self.figure.clf()
639 647 self.axes = []
640 648
641 649 for n in range(self.nrows):
642 650 ax = self.figure.add_subplot(self.nrows, self.ncols, n+1)
643 651 ax.firsttime = True
644 652 self.axes.append(ax)
645 653
646 654 def plot(self):
647 655
648 656 self.x = np.array(self.times)
649 657 self.y = self.dataOut.heightList
650 658 self.z = []
651 659
652 660 for ch in range(self.nrows):
653 661 self.z.append([self.data['output'][t][ch] for t in self.times])
654 662
655 663 self.z = np.array(self.z)
656 664 self.z = numpy.ma.masked_invalid(self.z)
657 665
658 666 cmap=plt.get_cmap(self.colormap)
659 667 cmap.set_bad('black', 1.)
660 668
661 669 for n, ax in enumerate(self.axes):
662 670 x, y, z = self.fill_gaps(*self.decimate())
663 671 xmin = self.min_time
664 672 xmax = xmin+self.xrange*60*60
665 673 if ax.firsttime:
666 674 self.ymin = self.ymin if self.ymin else np.nanmin(self.y)
667 675 self.ymax = self.ymax if self.ymax else np.nanmax(self.y)
668 676 self.zmax = self.zmax if self.zmax else numpy.nanmax(abs(self.z[:-1, :]))
669 677 self.zmin = self.zmin if self.zmin else -self.zmax
670 678
671 679 plot = ax.pcolormesh(x, y, z[n].T*self.windFactor[n],
672 680 vmin=self.zmin,
673 681 vmax=self.zmax,
674 682 cmap=cmap
675 683 )
676 684 divider = make_axes_locatable(ax)
677 685 cax = divider.new_horizontal(size='2%', pad=0.05)
678 686 self.figure.add_axes(cax)
679 687 cb = plt.colorbar(plot, cax)
680 688 cb.set_label(self.clabels[n])
681 689 ax.set_ylim(self.ymin, self.ymax)
682 690
683 691 ax.xaxis.set_major_formatter(FuncFormatter(func))
684 692 ax.xaxis.set_major_locator(LinearLocator(6))
685 693
686 694 ax.set_ylabel(self.ylabel)
687 695
688 696 ax.set_xlim(xmin, xmax)
689 697 ax.firsttime = False
690 698 else:
691 699 ax.collections.remove(ax.collections[0])
692 700 ax.set_xlim(xmin, xmax)
693 701 plot = ax.pcolormesh(x, y, z[n].T*self.windFactor[n],
694 702 vmin=self.zmin,
695 703 vmax=self.zmax,
696 704 cmap=plt.get_cmap(self.colormap)
697 705 )
698 706 ax.set_title('{} {}'.format(self.titles[n],
699 707 datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')),
700 708 size=8)
701 709
702 710 self.saveTime = self.min_time
703 711
704 712
705 713 class PlotSNRData(PlotRTIData):
706 714 CODE = 'snr'
707 715 colormap = 'jet'
708 716
709 717 class PlotDOPData(PlotRTIData):
710 718 CODE = 'dop'
711 719 colormap = 'jet'
712 720
713 721
714 722 class PlotPHASEData(PlotCOHData):
715 723 CODE = 'phase'
716 724 colormap = 'seismic'
717 725
718 726
719 727 class PlotSkyMapData(PlotData):
720 728
721 729 CODE = 'met'
722 730
723 731 def setup(self):
724 732
725 733 self.ncols = 1
726 734 self.nrows = 1
727 735 self.width = 7.2
728 736 self.height = 7.2
729 737
730 738 self.xlabel = 'Zonal Zenith Angle (deg)'
731 739 self.ylabel = 'Meridional Zenith Angle (deg)'
732 740
733 741 if self.figure is None:
734 742 self.figure = plt.figure(figsize=(self.width, self.height),
735 743 edgecolor='k',
736 744 facecolor='w')
737 745 else:
738 746 self.figure.clf()
739 747
740 748 self.ax = plt.subplot2grid((self.nrows, self.ncols), (0, 0), 1, 1, polar=True)
741 749 self.ax.firsttime = True
742 750
743 751
744 752 def plot(self):
745 753
746 754 arrayParameters = np.concatenate([self.data['param'][t] for t in self.times])
747 755 error = arrayParameters[:,-1]
748 756 indValid = numpy.where(error == 0)[0]
749 757 finalMeteor = arrayParameters[indValid,:]
750 758 finalAzimuth = finalMeteor[:,3]
751 759 finalZenith = finalMeteor[:,4]
752 760
753 761 x = finalAzimuth*numpy.pi/180
754 762 y = finalZenith
755 763
756 764 if self.ax.firsttime:
757 765 self.ax.plot = self.ax.plot(x, y, 'bo', markersize=5)[0]
758 766 self.ax.set_ylim(0,90)
759 767 self.ax.set_yticks(numpy.arange(0,90,20))
760 768 self.ax.set_xlabel(self.xlabel)
761 769 self.ax.set_ylabel(self.ylabel)
762 770 self.ax.yaxis.labelpad = 40
763 771 self.ax.firsttime = False
764 772 else:
765 773 self.ax.plot.set_data(x, y)
766 774
767 775
768 776 dt1 = datetime.datetime.fromtimestamp(self.min_time).strftime('%y/%m/%d %H:%M:%S')
769 777 dt2 = datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')
770 778 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
771 779 dt2,
772 780 len(x))
773 781 self.ax.set_title(title, size=8)
774 782
775 783 self.saveTime = self.max_time
@@ -1,1816 +1,1813
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time, datetime
14 14 import traceback
15 15 import zmq
16 16
17 17 try:
18 18 from gevent import sleep
19 19 except:
20 20 from time import sleep
21 21
22 22 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
23 23 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
24 24
25 25 LOCALTIME = True
26 26
27 27 def isNumber(cad):
28 28 """
29 29 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
30 30
31 31 Excepciones:
32 32 Si un determinado string no puede ser convertido a numero
33 33 Input:
34 34 str, string al cual se le analiza para determinar si convertible a un numero o no
35 35
36 36 Return:
37 37 True : si el string es uno numerico
38 38 False : no es un string numerico
39 39 """
40 40 try:
41 41 float( cad )
42 42 return True
43 43 except:
44 44 return False
45 45
46 46 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
47 47 """
48 48 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
49 49
50 50 Inputs:
51 51 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
52 52
53 53 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
56 56 segundos contados desde 01/01/1970.
57 57
58 58 Return:
59 59 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
60 60 fecha especificado, de lo contrario retorna False.
61 61
62 62 Excepciones:
63 63 Si el archivo no existe o no puede ser abierto
64 64 Si la cabecera no puede ser leida.
65 65
66 66 """
67 67 basicHeaderObj = BasicHeader(LOCALTIME)
68 68
69 69 try:
70 70 fp = open(filename,'rb')
71 71 except IOError:
72 72 print "The file %s can't be opened" %(filename)
73 73 return 0
74 74
75 75 sts = basicHeaderObj.read(fp)
76 76 fp.close()
77 77
78 78 if not(sts):
79 79 print "Skipping the file %s because it has not a valid header" %(filename)
80 80 return 0
81 81
82 82 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
83 83 return 0
84 84
85 85 return 1
86 86
87 87 def isTimeInRange(thisTime, startTime, endTime):
88 88
89 89 if endTime >= startTime:
90 90 if (thisTime < startTime) or (thisTime > endTime):
91 91 return 0
92 92
93 93 return 1
94 94 else:
95 95 if (thisTime < startTime) and (thisTime > endTime):
96 96 return 0
97 97
98 98 return 1
99 99
100 100 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
101 101 """
102 102 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
103 103
104 104 Inputs:
105 105 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
106 106
107 107 startDate : fecha inicial del rango seleccionado en formato datetime.date
108 108
109 109 endDate : fecha final del rango seleccionado en formato datetime.date
110 110
111 111 startTime : tiempo inicial del rango seleccionado en formato datetime.time
112 112
113 113 endTime : tiempo final del rango seleccionado en formato datetime.time
114 114
115 115 Return:
116 116 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
117 117 fecha especificado, de lo contrario retorna False.
118 118
119 119 Excepciones:
120 120 Si el archivo no existe o no puede ser abierto
121 121 Si la cabecera no puede ser leida.
122 122
123 123 """
124 124
125 125
126 126 try:
127 127 fp = open(filename,'rb')
128 128 except IOError:
129 129 print "The file %s can't be opened" %(filename)
130 130 return None
131 131
132 132 firstBasicHeaderObj = BasicHeader(LOCALTIME)
133 133 systemHeaderObj = SystemHeader()
134 134 radarControllerHeaderObj = RadarControllerHeader()
135 135 processingHeaderObj = ProcessingHeader()
136 136
137 137 lastBasicHeaderObj = BasicHeader(LOCALTIME)
138 138
139 139 sts = firstBasicHeaderObj.read(fp)
140 140
141 141 if not(sts):
142 142 print "[Reading] Skipping the file %s because it has not a valid header" %(filename)
143 143 return None
144 144
145 145 if not systemHeaderObj.read(fp):
146 146 return None
147 147
148 148 if not radarControllerHeaderObj.read(fp):
149 149 return None
150 150
151 151 if not processingHeaderObj.read(fp):
152 152 return None
153 153
154 154 filesize = os.path.getsize(filename)
155 155
156 156 offset = processingHeaderObj.blockSize + 24 #header size
157 157
158 158 if filesize <= offset:
159 159 print "[Reading] %s: This file has not enough data" %filename
160 160 return None
161 161
162 162 fp.seek(-offset, 2)
163 163
164 164 sts = lastBasicHeaderObj.read(fp)
165 165
166 166 fp.close()
167 167
168 168 thisDatetime = lastBasicHeaderObj.datatime
169 169 thisTime_last_block = thisDatetime.time()
170 170
171 171 thisDatetime = firstBasicHeaderObj.datatime
172 172 thisDate = thisDatetime.date()
173 173 thisTime_first_block = thisDatetime.time()
174 174
175 175 #General case
176 176 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
177 177 #-----------o----------------------------o-----------
178 178 # startTime endTime
179 179
180 180 if endTime >= startTime:
181 181 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
182 182 return None
183 183
184 184 return thisDatetime
185 185
186 186 #If endTime < startTime then endTime belongs to the next day
187 187
188 188
189 189 #<<<<<<<<<<<o o>>>>>>>>>>>
190 190 #-----------o----------------------------o-----------
191 191 # endTime startTime
192 192
193 193 if (thisDate == startDate) and (thisTime_last_block < startTime):
194 194 return None
195 195
196 196 if (thisDate == endDate) and (thisTime_first_block > endTime):
197 197 return None
198 198
199 199 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
200 200 return None
201 201
202 202 return thisDatetime
203 203
204 204 def isFolderInDateRange(folder, startDate=None, endDate=None):
205 205 """
206 206 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
207 207
208 208 Inputs:
209 209 folder : nombre completo del directorio.
210 210 Su formato deberia ser "/path_root/?YYYYDDD"
211 211
212 212 siendo:
213 213 YYYY : Anio (ejemplo 2015)
214 214 DDD : Dia del anio (ejemplo 305)
215 215
216 216 startDate : fecha inicial del rango seleccionado en formato datetime.date
217 217
218 218 endDate : fecha final del rango seleccionado en formato datetime.date
219 219
220 220 Return:
221 221 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
222 222 fecha especificado, de lo contrario retorna False.
223 223 Excepciones:
224 224 Si el directorio no tiene el formato adecuado
225 225 """
226 226
227 227 basename = os.path.basename(folder)
228 228
229 229 if not isRadarFolder(basename):
230 230 print "The folder %s has not the rigth format" %folder
231 231 return 0
232 232
233 233 if startDate and endDate:
234 234 thisDate = getDateFromRadarFolder(basename)
235 235
236 236 if thisDate < startDate:
237 237 return 0
238 238
239 239 if thisDate > endDate:
240 240 return 0
241 241
242 242 return 1
243 243
244 244 def isFileInDateRange(filename, startDate=None, endDate=None):
245 245 """
246 246 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
247 247
248 248 Inputs:
249 249 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
250 250
251 251 Su formato deberia ser "?YYYYDDDsss"
252 252
253 253 siendo:
254 254 YYYY : Anio (ejemplo 2015)
255 255 DDD : Dia del anio (ejemplo 305)
256 256 sss : set
257 257
258 258 startDate : fecha inicial del rango seleccionado en formato datetime.date
259 259
260 260 endDate : fecha final del rango seleccionado en formato datetime.date
261 261
262 262 Return:
263 263 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
264 264 fecha especificado, de lo contrario retorna False.
265 265 Excepciones:
266 266 Si el archivo no tiene el formato adecuado
267 267 """
268 268
269 269 basename = os.path.basename(filename)
270 270
271 271 if not isRadarFile(basename):
272 272 print "The filename %s has not the rigth format" %filename
273 273 return 0
274 274
275 275 if startDate and endDate:
276 276 thisDate = getDateFromRadarFile(basename)
277 277
278 278 if thisDate < startDate:
279 279 return 0
280 280
281 281 if thisDate > endDate:
282 282 return 0
283 283
284 284 return 1
285 285
286 286 def getFileFromSet(path, ext, set):
287 287 validFilelist = []
288 288 fileList = os.listdir(path)
289 289
290 290 # 0 1234 567 89A BCDE
291 291 # H YYYY DDD SSS .ext
292 292
293 293 for thisFile in fileList:
294 294 try:
295 295 year = int(thisFile[1:5])
296 296 doy = int(thisFile[5:8])
297 297 except:
298 298 continue
299 299
300 300 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
301 301 continue
302 302
303 303 validFilelist.append(thisFile)
304 304
305 305 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
306 306
307 307 if len(myfile)!= 0:
308 308 return myfile[0]
309 309 else:
310 310 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
311 311 print 'the filename %s does not exist'%filename
312 312 print '...going to the last file: '
313 313
314 314 if validFilelist:
315 315 validFilelist = sorted( validFilelist, key=str.lower )
316 316 return validFilelist[-1]
317 317
318 318 return None
319 319
320 320 def getlastFileFromPath(path, ext):
321 321 """
322 322 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
323 323 al final de la depuracion devuelve el ultimo file de la lista que quedo.
324 324
325 325 Input:
326 326 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
327 327 ext : extension de los files contenidos en una carpeta
328 328
329 329 Return:
330 330 El ultimo file de una determinada carpeta, no se considera el path.
331 331 """
332 332 validFilelist = []
333 333 fileList = os.listdir(path)
334 334
335 335 # 0 1234 567 89A BCDE
336 336 # H YYYY DDD SSS .ext
337 337
338 338 for thisFile in fileList:
339 339
340 340 year = thisFile[1:5]
341 341 if not isNumber(year):
342 342 continue
343 343
344 344 doy = thisFile[5:8]
345 345 if not isNumber(doy):
346 346 continue
347 347
348 348 year = int(year)
349 349 doy = int(doy)
350 350
351 351 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
352 352 continue
353 353
354 354 validFilelist.append(thisFile)
355 355
356 356 if validFilelist:
357 357 validFilelist = sorted( validFilelist, key=str.lower )
358 358 return validFilelist[-1]
359 359
360 360 return None
361 361
362 362 def checkForRealPath(path, foldercounter, year, doy, set, ext):
363 363 """
364 364 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
365 365 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
366 366 el path exacto de un determinado file.
367 367
368 368 Example :
369 369 nombre correcto del file es .../.../D2009307/P2009307367.ext
370 370
371 371 Entonces la funcion prueba con las siguientes combinaciones
372 372 .../.../y2009307367.ext
373 373 .../.../Y2009307367.ext
374 374 .../.../x2009307/y2009307367.ext
375 375 .../.../x2009307/Y2009307367.ext
376 376 .../.../X2009307/y2009307367.ext
377 377 .../.../X2009307/Y2009307367.ext
378 378 siendo para este caso, la ultima combinacion de letras, identica al file buscado
379 379
380 380 Return:
381 381 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
382 382 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
383 383 para el filename
384 384 """
385 385 fullfilename = None
386 386 find_flag = False
387 387 filename = None
388 388
389 389 prefixDirList = [None,'d','D']
390 390 if ext.lower() == ".r": #voltage
391 391 prefixFileList = ['d','D']
392 392 elif ext.lower() == ".pdata": #spectra
393 393 prefixFileList = ['p','P']
394 394 else:
395 395 return None, filename
396 396
397 397 #barrido por las combinaciones posibles
398 398 for prefixDir in prefixDirList:
399 399 thispath = path
400 400 if prefixDir != None:
401 401 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
402 402 if foldercounter == 0:
403 403 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
404 404 else:
405 405 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
406 406 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
407 407 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
408 408 fullfilename = os.path.join( thispath, filename ) #formo el path completo
409 409
410 410 if os.path.exists( fullfilename ): #verifico que exista
411 411 find_flag = True
412 412 break
413 413 if find_flag:
414 414 break
415 415
416 416 if not(find_flag):
417 417 return None, filename
418 418
419 419 return fullfilename, filename
420 420
421 421 def isRadarFolder(folder):
422 422 try:
423 423 year = int(folder[1:5])
424 424 doy = int(folder[5:8])
425 425 except:
426 426 return 0
427 427
428 428 return 1
429 429
430 430 def isRadarFile(file):
431 431 try:
432 432 year = int(file[1:5])
433 433 doy = int(file[5:8])
434 434 set = int(file[8:11])
435 435 except:
436 436 return 0
437 437
438 438 return 1
439 439
440 440 def getDateFromRadarFile(file):
441 441 try:
442 442 year = int(file[1:5])
443 443 doy = int(file[5:8])
444 444 set = int(file[8:11])
445 445 except:
446 446 return None
447 447
448 448 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
449 449 return thisDate
450 450
451 451 def getDateFromRadarFolder(folder):
452 452 try:
453 453 year = int(folder[1:5])
454 454 doy = int(folder[5:8])
455 455 except:
456 456 return None
457 457
458 458 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
459 459 return thisDate
460 460
461 461 class JRODataIO:
462 462
463 463 c = 3E8
464 464
465 465 isConfig = False
466 466
467 467 basicHeaderObj = None
468 468
469 469 systemHeaderObj = None
470 470
471 471 radarControllerHeaderObj = None
472 472
473 473 processingHeaderObj = None
474 474
475 475 dtype = None
476 476
477 477 pathList = []
478 478
479 479 filenameList = []
480 480
481 481 filename = None
482 482
483 483 ext = None
484 484
485 485 flagIsNewFile = 1
486 486
487 487 flagDiscontinuousBlock = 0
488 488
489 489 flagIsNewBlock = 0
490 490
491 491 fp = None
492 492
493 493 firstHeaderSize = 0
494 494
495 495 basicHeaderSize = 24
496 496
497 497 versionFile = 1103
498 498
499 499 fileSize = None
500 500
501 501 # ippSeconds = None
502 502
503 503 fileSizeByHeader = None
504 504
505 505 fileIndex = None
506 506
507 507 profileIndex = None
508 508
509 509 blockIndex = None
510 510
511 511 nTotalBlocks = None
512 512
513 513 maxTimeStep = 30
514 514
515 515 lastUTTime = None
516 516
517 517 datablock = None
518 518
519 519 dataOut = None
520 520
521 521 blocksize = None
522 522
523 523 getByBlock = False
524 524
525 525 def __init__(self):
526 526
527 527 raise NotImplementedError
528 528
529 529 def run(self):
530 530
531 531 raise NotImplementedError
532 532
533 533 def getDtypeWidth(self):
534 534
535 535 dtype_index = get_dtype_index(self.dtype)
536 536 dtype_width = get_dtype_width(dtype_index)
537 537
538 538 return dtype_width
539 539
540 540 def getAllowedArgs(self):
541 541 return inspect.getargspec(self.run).args
542 542
543 543 class JRODataReader(JRODataIO):
544 544
545 545
546 546 online = 0
547 547
548 548 realtime = 0
549 549
550 550 nReadBlocks = 0
551 551
552 552 delay = 10 #number of seconds waiting a new file
553 553
554 554 nTries = 3 #quantity tries
555 555
556 556 nFiles = 3 #number of files for searching
557 557
558 558 path = None
559 559
560 560 foldercounter = 0
561 561
562 562 flagNoMoreFiles = 0
563 563
564 564 datetimeList = []
565 565
566 566 __isFirstTimeOnline = 1
567 567
568 568 __printInfo = True
569 569
570 570 profileIndex = None
571 571
572 572 nTxs = 1
573 573
574 574 txIndex = None
575 575
576 576 #Added--------------------
577 577
578 578 selBlocksize = None
579 579
580 580 selBlocktime = None
581 581
582 582
583 583 def __init__(self):
584 584
585 585 """
586 586 This class is used to find data files
587 587
588 588 Example:
589 589 reader = JRODataReader()
590 590 fileList = reader.findDataFiles()
591 591
592 592 """
593 593 pass
594 594
595 595
596 596 def createObjByDefault(self):
597 597 """
598 598
599 599 """
600 600 raise NotImplementedError
601 601
602 602 def getBlockDimension(self):
603 603
604 604 raise NotImplementedError
605 605
606 606 def __searchFilesOffLine(self,
607 607 path,
608 608 startDate=None,
609 609 endDate=None,
610 610 startTime=datetime.time(0,0,0),
611 611 endTime=datetime.time(23,59,59),
612 612 set=None,
613 613 expLabel='',
614 614 ext='.r',
615 615 queue=None,
616 616 cursor=None,
617 617 skip=None,
618 618 walk=True):
619 619
620 620 self.filenameList = []
621 621 self.datetimeList = []
622 622
623 623 pathList = []
624 624
625 625 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
626 626
627 627 if dateList == []:
628 628 # print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
629 629 return None, None
630 630
631 631 if len(dateList) > 1:
632 632 print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList))
633 633 else:
634 634 print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0])
635 635
636 636 filenameList = []
637 637 datetimeList = []
638 638
639 639 for thisPath in pathList:
640 640 # thisPath = pathList[pathDict[file]]
641 641
642 642 fileList = glob.glob1(thisPath, "*%s" %ext)
643 643 fileList.sort()
644 644
645 645 skippedFileList = []
646 646
647 647 if cursor is not None and skip is not None:
648 648 # if cursor*skip > len(fileList):
649 649 if skip == 0:
650 650 if queue is not None:
651 651 queue.put(len(fileList))
652 652 skippedFileList = []
653 653 else:
654 654 skippedFileList = fileList[cursor*skip: cursor*skip + skip]
655 655
656 656 else:
657 657 skippedFileList = fileList
658 658
659 659 for file in skippedFileList:
660 660
661 661 filename = os.path.join(thisPath,file)
662 662
663 663 if not isFileInDateRange(filename, startDate, endDate):
664 664 continue
665 665
666 666 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
667 667
668 668 if not(thisDatetime):
669 669 continue
670 670
671 671 filenameList.append(filename)
672 672 datetimeList.append(thisDatetime)
673 673
674 674 if not(filenameList):
675 675 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path)
676 676 return None, None
677 677
678 678 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
679 679 print
680 680
681 681 for i in range(len(filenameList)):
682 682 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
683 683
684 684 self.filenameList = filenameList
685 685 self.datetimeList = datetimeList
686 686
687 687 return pathList, filenameList
688 688
689 689 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
690 690
691 691 """
692 692 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
693 693 devuelve el archivo encontrado ademas de otros datos.
694 694
695 695 Input:
696 696 path : carpeta donde estan contenidos los files que contiene data
697 697
698 698 expLabel : Nombre del subexperimento (subfolder)
699 699
700 700 ext : extension de los files
701 701
702 702 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
703 703
704 704 Return:
705 705 directory : eL directorio donde esta el file encontrado
706 706 filename : el ultimo file de una determinada carpeta
707 707 year : el anho
708 708 doy : el numero de dia del anho
709 709 set : el set del archivo
710 710
711 711
712 712 """
713 713 if not os.path.isdir(path):
714 714 return None, None, None, None, None, None
715 715
716 716 dirList = []
717 717
718 718 if not walk:
719 719 fullpath = path
720 720 foldercounter = 0
721 721 else:
722 722 #Filtra solo los directorios
723 723 for thisPath in os.listdir(path):
724 724 if not os.path.isdir(os.path.join(path,thisPath)):
725 725 continue
726 726 if not isRadarFolder(thisPath):
727 727 continue
728 728
729 729 dirList.append(thisPath)
730 730
731 731 if not(dirList):
732 732 return None, None, None, None, None, None
733 733
734 734 dirList = sorted( dirList, key=str.lower )
735 735
736 736 doypath = dirList[-1]
737 737 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
738 738 fullpath = os.path.join(path, doypath, expLabel)
739 739
740 740
741 741 print "[Reading] %s folder was found: " %(fullpath )
742 742
743 743 if set == None:
744 744 filename = getlastFileFromPath(fullpath, ext)
745 745 else:
746 746 filename = getFileFromSet(fullpath, ext, set)
747 747
748 748 if not(filename):
749 749 return None, None, None, None, None, None
750 750
751 751 print "[Reading] %s file was found" %(filename)
752 752
753 753 if not(self.__verifyFile(os.path.join(fullpath, filename))):
754 754 return None, None, None, None, None, None
755 755
756 756 year = int( filename[1:5] )
757 757 doy = int( filename[5:8] )
758 758 set = int( filename[8:11] )
759 759
760 760 return fullpath, foldercounter, filename, year, doy, set
761 761
762 762 def __setNextFileOffline(self):
763 763
764 764 idFile = self.fileIndex
765 765
766 766 while (True):
767 767 idFile += 1
768 768 if not(idFile < len(self.filenameList)):
769 769 self.flagNoMoreFiles = 1
770 770 # print "[Reading] No more Files"
771 771 return 0
772 772
773 773 filename = self.filenameList[idFile]
774 774
775 775 if not(self.__verifyFile(filename)):
776 776 continue
777 777
778 778 fileSize = os.path.getsize(filename)
779 779 fp = open(filename,'rb')
780 780 break
781 781
782 782 self.flagIsNewFile = 1
783 783 self.fileIndex = idFile
784 784 self.filename = filename
785 785 self.fileSize = fileSize
786 786 self.fp = fp
787 787
788 788 # print "[Reading] Setting the file: %s"%self.filename
789 789
790 790 return 1
791 791
792 792 def __setNextFileOnline(self):
793 793 """
794 794 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
795 795 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
796 796 siguientes.
797 797
798 798 Affected:
799 799 self.flagIsNewFile
800 800 self.filename
801 801 self.fileSize
802 802 self.fp
803 803 self.set
804 804 self.flagNoMoreFiles
805 805
806 806 Return:
807 807 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
808 808 1 : si el file fue abierto con exito y esta listo a ser leido
809 809
810 810 Excepciones:
811 811 Si un determinado file no puede ser abierto
812 812 """
813 813 nFiles = 0
814 814 fileOk_flag = False
815 815 firstTime_flag = True
816 816
817 817 self.set += 1
818 818
819 819 if self.set > 999:
820 820 self.set = 0
821 821 self.foldercounter += 1
822 822
823 823 #busca el 1er file disponible
824 824 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
825 825 if fullfilename:
826 826 if self.__verifyFile(fullfilename, False):
827 827 fileOk_flag = True
828 828
829 829 #si no encuentra un file entonces espera y vuelve a buscar
830 830 if not(fileOk_flag):
831 831 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
832 832
833 833 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
834 834 tries = self.nTries
835 835 else:
836 836 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
837 837
838 838 for nTries in range( tries ):
839 839 if firstTime_flag:
840 840 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
841 841 sleep( self.delay )
842 842 else:
843 843 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
844 844
845 845 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
846 846 if fullfilename:
847 847 if self.__verifyFile(fullfilename):
848 848 fileOk_flag = True
849 849 break
850 850
851 851 if fileOk_flag:
852 852 break
853 853
854 854 firstTime_flag = False
855 855
856 856 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
857 857 self.set += 1
858 858
859 859 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
860 860 self.set = 0
861 861 self.doy += 1
862 862 self.foldercounter = 0
863 863
864 864 if fileOk_flag:
865 865 self.fileSize = os.path.getsize( fullfilename )
866 866 self.filename = fullfilename
867 867 self.flagIsNewFile = 1
868 868 if self.fp != None: self.fp.close()
869 869 self.fp = open(fullfilename, 'rb')
870 870 self.flagNoMoreFiles = 0
871 871 # print '[Reading] Setting the file: %s' % fullfilename
872 872 else:
873 873 self.fileSize = 0
874 874 self.filename = None
875 875 self.flagIsNewFile = 0
876 876 self.fp = None
877 877 self.flagNoMoreFiles = 1
878 878 # print '[Reading] No more files to read'
879 879
880 880 return fileOk_flag
881 881
882 882 def setNextFile(self):
883 883 if self.fp != None:
884 884 self.fp.close()
885 885
886 886 if self.online:
887 887 newFile = self.__setNextFileOnline()
888 888 else:
889 889 newFile = self.__setNextFileOffline()
890 890
891 891 if not(newFile):
892 892 print '[Reading] No more files to read'
893 893 return 0
894 894
895 895 if self.verbose:
896 896 print '[Reading] Setting the file: %s' % self.filename
897 897
898 898 self.__readFirstHeader()
899 899 self.nReadBlocks = 0
900 900 return 1
901 901
902 902 def __waitNewBlock(self):
903 903 """
904 904 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
905 905
906 906 Si el modo de lectura es OffLine siempre retorn 0
907 907 """
908 908 if not self.online:
909 909 return 0
910 910
911 911 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
912 912 return 0
913 913
914 914 currentPointer = self.fp.tell()
915 915
916 916 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
917 917
918 918 for nTries in range( self.nTries ):
919 919
920 920 self.fp.close()
921 921 self.fp = open( self.filename, 'rb' )
922 922 self.fp.seek( currentPointer )
923 923
924 924 self.fileSize = os.path.getsize( self.filename )
925 925 currentSize = self.fileSize - currentPointer
926 926
927 927 if ( currentSize >= neededSize ):
928 928 self.basicHeaderObj.read(self.fp)
929 929 return 1
930 930
931 931 if self.fileSize == self.fileSizeByHeader:
932 932 # self.flagEoF = True
933 933 return 0
934 934
935 935 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
936 936 sleep( self.delay )
937 937
938 938
939 939 return 0
940 940
941 941 def waitDataBlock(self,pointer_location):
942 942
943 943 currentPointer = pointer_location
944 944
945 945 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
946 946
947 947 for nTries in range( self.nTries ):
948 948 self.fp.close()
949 949 self.fp = open( self.filename, 'rb' )
950 950 self.fp.seek( currentPointer )
951 951
952 952 self.fileSize = os.path.getsize( self.filename )
953 953 currentSize = self.fileSize - currentPointer
954 954
955 955 if ( currentSize >= neededSize ):
956 956 return 1
957 957
958 958 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
959 959 sleep( self.delay )
960 960
961 961 return 0
962 962
963 963 def __jumpToLastBlock(self):
964 964
965 965 if not(self.__isFirstTimeOnline):
966 966 return
967 967
968 968 csize = self.fileSize - self.fp.tell()
969 969 blocksize = self.processingHeaderObj.blockSize
970 970
971 971 #salta el primer bloque de datos
972 972 if csize > self.processingHeaderObj.blockSize:
973 973 self.fp.seek(self.fp.tell() + blocksize)
974 974 else:
975 975 return
976 976
977 977 csize = self.fileSize - self.fp.tell()
978 978 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
979 979 while True:
980 980
981 981 if self.fp.tell()<self.fileSize:
982 982 self.fp.seek(self.fp.tell() + neededsize)
983 983 else:
984 984 self.fp.seek(self.fp.tell() - neededsize)
985 985 break
986 986
987 987 # csize = self.fileSize - self.fp.tell()
988 988 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
989 989 # factor = int(csize/neededsize)
990 990 # if factor > 0:
991 991 # self.fp.seek(self.fp.tell() + factor*neededsize)
992 992
993 993 self.flagIsNewFile = 0
994 994 self.__isFirstTimeOnline = 0
995 995
996 996 def __setNewBlock(self):
997 997 #if self.server is None:
998 998 if self.fp == None:
999 999 return 0
1000 1000
1001 1001 # if self.online:
1002 # self.__jumpToLastBlock()
1003 print 'xxxx'
1002 # self.__jumpToLastBlock()
1004 1003
1005 1004 if self.flagIsNewFile:
1006 1005 self.lastUTTime = self.basicHeaderObj.utc
1007 1006 return 1
1008 1007
1009 1008 if self.realtime:
1010 1009 self.flagDiscontinuousBlock = 1
1011 1010 if not(self.setNextFile()):
1012 1011 return 0
1013 1012 else:
1014 return 1
1015 print 'xxxx'
1013 return 1
1016 1014 #if self.server is None:
1017 1015 currentSize = self.fileSize - self.fp.tell()
1018 1016 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1019 1017 if (currentSize >= neededSize):
1020 1018 self.basicHeaderObj.read(self.fp)
1021 1019 self.lastUTTime = self.basicHeaderObj.utc
1022 1020 return 1
1023 1021 # else:
1024 1022 # self.basicHeaderObj.read(self.zHeader)
1025 1023 # self.lastUTTime = self.basicHeaderObj.utc
1026 1024 # return 1
1027 1025 if self.__waitNewBlock():
1028 1026 self.lastUTTime = self.basicHeaderObj.utc
1029 1027 return 1
1030 1028 #if self.server is None:
1031 1029 if not(self.setNextFile()):
1032 1030 return 0
1033 1031
1034 1032 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
1035 1033 self.lastUTTime = self.basicHeaderObj.utc
1036 1034
1037 1035 self.flagDiscontinuousBlock = 0
1038 1036
1039 1037 if deltaTime > self.maxTimeStep:
1040 1038 self.flagDiscontinuousBlock = 1
1041 1039
1042 1040 return 1
1043 1041
1044 1042 def readNextBlock(self):
1045 1043
1046 1044 #Skip block out of startTime and endTime
1047 while True:
1048 print 'cxxxx'
1045 while True:
1049 1046 if not(self.__setNewBlock()):
1050 1047 print 'returning'
1051 1048 return 0
1052 print 'dxxx'
1049
1053 1050 if not(self.readBlock()):
1054 1051 return 0
1055 1052
1056 1053 self.getBasicHeader()
1057 1054
1058 1055 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1059 1056
1060 1057 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1061 1058 self.processingHeaderObj.dataBlocksPerFile,
1062 1059 self.dataOut.datatime.ctime())
1063 1060 continue
1064 1061
1065 1062 break
1066 1063
1067 1064 if self.verbose:
1068 1065 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1069 1066 self.processingHeaderObj.dataBlocksPerFile,
1070 1067 self.dataOut.datatime.ctime())
1071 1068 return 1
1072 1069
1073 1070 def __readFirstHeader(self):
1074 1071
1075 1072 self.basicHeaderObj.read(self.fp)
1076 1073 self.systemHeaderObj.read(self.fp)
1077 1074 self.radarControllerHeaderObj.read(self.fp)
1078 1075 self.processingHeaderObj.read(self.fp)
1079 1076
1080 1077 self.firstHeaderSize = self.basicHeaderObj.size
1081 1078
1082 1079 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1083 1080 if datatype == 0:
1084 1081 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1085 1082 elif datatype == 1:
1086 1083 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
1087 1084 elif datatype == 2:
1088 1085 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1089 1086 elif datatype == 3:
1090 1087 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1091 1088 elif datatype == 4:
1092 1089 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1093 1090 elif datatype == 5:
1094 1091 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1095 1092 else:
1096 1093 raise ValueError, 'Data type was not defined'
1097 1094
1098 1095 self.dtype = datatype_str
1099 1096 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1100 1097 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1101 1098 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1102 1099 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1103 1100 self.getBlockDimension()
1104 1101
1105 1102 def __verifyFile(self, filename, msgFlag=True):
1106 1103
1107 1104 msg = None
1108 1105
1109 1106 try:
1110 1107 fp = open(filename, 'rb')
1111 1108 except IOError:
1112 1109
1113 1110 if msgFlag:
1114 1111 print "[Reading] File %s can't be opened" % (filename)
1115 1112
1116 1113 return False
1117 1114
1118 1115 currentPosition = fp.tell()
1119 1116 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1120 1117
1121 1118 if neededSize == 0:
1122 1119 basicHeaderObj = BasicHeader(LOCALTIME)
1123 1120 systemHeaderObj = SystemHeader()
1124 1121 radarControllerHeaderObj = RadarControllerHeader()
1125 1122 processingHeaderObj = ProcessingHeader()
1126 1123
1127 1124 if not( basicHeaderObj.read(fp) ):
1128 1125 fp.close()
1129 1126 return False
1130 1127
1131 1128 if not( systemHeaderObj.read(fp) ):
1132 1129 fp.close()
1133 1130 return False
1134 1131
1135 1132 if not( radarControllerHeaderObj.read(fp) ):
1136 1133 fp.close()
1137 1134 return False
1138 1135
1139 1136 if not( processingHeaderObj.read(fp) ):
1140 1137 fp.close()
1141 1138 return False
1142 1139
1143 1140 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1144 1141 else:
1145 1142 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1146 1143
1147 1144 fp.close()
1148 1145
1149 1146 fileSize = os.path.getsize(filename)
1150 1147 currentSize = fileSize - currentPosition
1151 1148
1152 1149 if currentSize < neededSize:
1153 1150 if msgFlag and (msg != None):
1154 1151 print msg
1155 1152 return False
1156 1153
1157 1154 return True
1158 1155
1159 1156 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1160 1157
1161 1158 path_empty = True
1162 1159
1163 1160 dateList = []
1164 1161 pathList = []
1165 1162
1166 1163 multi_path = path.split(',')
1167 1164
1168 1165 if not walk:
1169 1166
1170 1167 for single_path in multi_path:
1171 1168
1172 1169 if not os.path.isdir(single_path):
1173 1170 continue
1174 1171
1175 1172 fileList = glob.glob1(single_path, "*"+ext)
1176 1173
1177 1174 if not fileList:
1178 1175 continue
1179 1176
1180 1177 path_empty = False
1181 1178
1182 1179 fileList.sort()
1183 1180
1184 1181 for thisFile in fileList:
1185 1182
1186 1183 if not os.path.isfile(os.path.join(single_path, thisFile)):
1187 1184 continue
1188 1185
1189 1186 if not isRadarFile(thisFile):
1190 1187 continue
1191 1188
1192 1189 if not isFileInDateRange(thisFile, startDate, endDate):
1193 1190 continue
1194 1191
1195 1192 thisDate = getDateFromRadarFile(thisFile)
1196 1193
1197 1194 if thisDate in dateList:
1198 1195 continue
1199 1196
1200 1197 dateList.append(thisDate)
1201 1198 pathList.append(single_path)
1202 1199
1203 1200 else:
1204 1201 for single_path in multi_path:
1205 1202
1206 1203 if not os.path.isdir(single_path):
1207 1204 continue
1208 1205
1209 1206 dirList = []
1210 1207
1211 1208 for thisPath in os.listdir(single_path):
1212 1209
1213 1210 if not os.path.isdir(os.path.join(single_path,thisPath)):
1214 1211 continue
1215 1212
1216 1213 if not isRadarFolder(thisPath):
1217 1214 continue
1218 1215
1219 1216 if not isFolderInDateRange(thisPath, startDate, endDate):
1220 1217 continue
1221 1218
1222 1219 dirList.append(thisPath)
1223 1220
1224 1221 if not dirList:
1225 1222 continue
1226 1223
1227 1224 dirList.sort()
1228 1225
1229 1226 for thisDir in dirList:
1230 1227
1231 1228 datapath = os.path.join(single_path, thisDir, expLabel)
1232 1229 fileList = glob.glob1(datapath, "*"+ext)
1233 1230
1234 1231 if not fileList:
1235 1232 continue
1236 1233
1237 1234 path_empty = False
1238 1235
1239 1236 thisDate = getDateFromRadarFolder(thisDir)
1240 1237
1241 1238 pathList.append(datapath)
1242 1239 dateList.append(thisDate)
1243 1240
1244 1241 dateList.sort()
1245 1242
1246 1243 if walk:
1247 1244 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1248 1245 else:
1249 1246 pattern_path = multi_path[0]
1250 1247
1251 1248 if path_empty:
1252 1249 print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate)
1253 1250 else:
1254 1251 if not dateList:
1255 1252 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
1256 1253
1257 1254 if include_path:
1258 1255 return dateList, pathList
1259 1256
1260 1257 return dateList
1261 1258
1262 1259 def setup(self,
1263 1260 path=None,
1264 1261 startDate=None,
1265 1262 endDate=None,
1266 1263 startTime=datetime.time(0,0,0),
1267 1264 endTime=datetime.time(23,59,59),
1268 1265 set=None,
1269 1266 expLabel = "",
1270 1267 ext = None,
1271 1268 online = False,
1272 1269 delay = 60,
1273 1270 walk = True,
1274 1271 getblock = False,
1275 1272 nTxs = 1,
1276 1273 realtime=False,
1277 1274 blocksize=None,
1278 1275 blocktime=None,
1279 1276 queue=None,
1280 1277 skip=None,
1281 1278 cursor=None,
1282 1279 warnings=True,
1283 1280 verbose=True,
1284 1281 server=None):
1285 1282 if server is not None:
1286 1283 if 'tcp://' in server:
1287 1284 address = server
1288 1285 else:
1289 1286 address = 'ipc:///tmp/%s' % server
1290 1287 self.server = address
1291 1288 self.context = zmq.Context()
1292 1289 self.receiver = self.context.socket(zmq.PULL)
1293 1290 self.receiver.connect(self.server)
1294 1291 time.sleep(0.5)
1295 1292 print '[Starting] ReceiverData from {}'.format(self.server)
1296 1293 else:
1297 1294 self.server = None
1298 1295 if path == None:
1299 1296 raise ValueError, "[Reading] The path is not valid"
1300 1297
1301 1298 if ext == None:
1302 1299 ext = self.ext
1303 1300
1304 1301 if online:
1305 1302 print "[Reading] Searching files in online mode..."
1306 1303
1307 1304 for nTries in range( self.nTries ):
1308 1305 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1309 1306
1310 1307 if fullpath:
1311 1308 break
1312 1309
1313 1310 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1314 1311 sleep( self.delay )
1315 1312
1316 1313 if not(fullpath):
1317 1314 print "[Reading] There 'isn't any valid file in %s" % path
1318 1315 return
1319 1316
1320 1317 self.year = year
1321 1318 self.doy = doy
1322 1319 self.set = set - 1
1323 1320 self.path = path
1324 1321 self.foldercounter = foldercounter
1325 1322 last_set = None
1326 1323 else:
1327 1324 print "[Reading] Searching files in offline mode ..."
1328 1325 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1329 1326 startTime=startTime, endTime=endTime,
1330 1327 set=set, expLabel=expLabel, ext=ext,
1331 1328 walk=walk, cursor=cursor,
1332 1329 skip=skip, queue=queue)
1333 1330
1334 1331 if not(pathList):
1335 1332 # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1336 1333 # datetime.datetime.combine(startDate,startTime).ctime(),
1337 1334 # datetime.datetime.combine(endDate,endTime).ctime())
1338 1335
1339 1336 # sys.exit(-1)
1340 1337
1341 1338 self.fileIndex = -1
1342 1339 self.pathList = []
1343 1340 self.filenameList = []
1344 1341 return
1345 1342
1346 1343 self.fileIndex = -1
1347 1344 self.pathList = pathList
1348 1345 self.filenameList = filenameList
1349 1346 file_name = os.path.basename(filenameList[-1])
1350 1347 basename, ext = os.path.splitext(file_name)
1351 1348 last_set = int(basename[-3:])
1352 1349
1353 1350 self.online = online
1354 1351 self.realtime = realtime
1355 1352 self.delay = delay
1356 1353 ext = ext.lower()
1357 1354 self.ext = ext
1358 1355 self.getByBlock = getblock
1359 1356 self.nTxs = nTxs
1360 1357 self.startTime = startTime
1361 1358 self.endTime = endTime
1362 1359
1363 1360 #Added-----------------
1364 1361 self.selBlocksize = blocksize
1365 1362 self.selBlocktime = blocktime
1366 1363
1367 1364 # Verbose-----------
1368 1365 self.verbose = verbose
1369 1366 self.warnings = warnings
1370 1367
1371 1368 if not(self.setNextFile()):
1372 1369 if (startDate!=None) and (endDate!=None):
1373 1370 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1374 1371 elif startDate != None:
1375 1372 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1376 1373 else:
1377 1374 print "[Reading] No files"
1378 1375
1379 1376 self.fileIndex = -1
1380 1377 self.pathList = []
1381 1378 self.filenameList = []
1382 1379 return
1383 1380
1384 1381 # self.getBasicHeader()
1385 1382
1386 1383 if last_set != None:
1387 1384 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1388 1385 return
1389 1386
1390 1387 def getBasicHeader(self):
1391 1388
1392 1389 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1393 1390
1394 1391 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1395 1392
1396 1393 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1397 1394
1398 1395 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1399 1396
1400 1397 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1401 1398
1402 1399 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1403 1400
1404 1401 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1405 1402
1406 1403 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1407 1404
1408 1405
1409 1406 def getFirstHeader(self):
1410 1407
1411 1408 raise NotImplementedError
1412 1409
1413 1410 def getData(self):
1414 1411
1415 1412 raise NotImplementedError
1416 1413
1417 1414 def hasNotDataInBuffer(self):
1418 1415
1419 1416 raise NotImplementedError
1420 1417
1421 1418 def readBlock(self):
1422 1419
1423 1420 raise NotImplementedError
1424 1421
1425 1422 def isEndProcess(self):
1426 1423
1427 1424 return self.flagNoMoreFiles
1428 1425
1429 1426 def printReadBlocks(self):
1430 1427
1431 1428 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1432 1429
1433 1430 def printTotalBlocks(self):
1434 1431
1435 1432 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1436 1433
1437 1434 def printNumberOfBlock(self):
1438 1435
1439 1436 if self.flagIsNewBlock:
1440 1437 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1441 1438 self.processingHeaderObj.dataBlocksPerFile,
1442 1439 self.dataOut.datatime.ctime())
1443 1440
1444 1441 def printInfo(self):
1445 1442
1446 1443 if self.__printInfo == False:
1447 1444 return
1448 1445
1449 1446 self.basicHeaderObj.printInfo()
1450 1447 self.systemHeaderObj.printInfo()
1451 1448 self.radarControllerHeaderObj.printInfo()
1452 1449 self.processingHeaderObj.printInfo()
1453 1450
1454 1451 self.__printInfo = False
1455 1452
1456 1453
1457 1454 def run(self,
1458 1455 path=None,
1459 1456 startDate=None,
1460 1457 endDate=None,
1461 1458 startTime=datetime.time(0,0,0),
1462 1459 endTime=datetime.time(23,59,59),
1463 1460 set=None,
1464 1461 expLabel = "",
1465 1462 ext = None,
1466 1463 online = False,
1467 1464 delay = 60,
1468 1465 walk = True,
1469 1466 getblock = False,
1470 1467 nTxs = 1,
1471 1468 realtime=False,
1472 1469 blocksize=None,
1473 1470 blocktime=None,
1474 1471 queue=None,
1475 1472 skip=None,
1476 1473 cursor=None,
1477 1474 warnings=True,
1478 1475 server=None,
1479 1476 verbose=True, **kwargs):
1480 1477
1481 1478 if not(self.isConfig):
1482 1479 # self.dataOut = dataOut
1483 1480 self.setup( path=path,
1484 1481 startDate=startDate,
1485 1482 endDate=endDate,
1486 1483 startTime=startTime,
1487 1484 endTime=endTime,
1488 1485 set=set,
1489 1486 expLabel=expLabel,
1490 1487 ext=ext,
1491 1488 online=online,
1492 1489 delay=delay,
1493 1490 walk=walk,
1494 1491 getblock=getblock,
1495 1492 nTxs=nTxs,
1496 1493 realtime=realtime,
1497 1494 blocksize=blocksize,
1498 1495 blocktime=blocktime,
1499 1496 queue=queue,
1500 1497 skip=skip,
1501 1498 cursor=cursor,
1502 1499 warnings=warnings,
1503 1500 server=server,
1504 1501 verbose=verbose)
1505 1502 self.isConfig = True
1506 1503 if server is None:
1507 1504 self.getData()
1508 1505 else:
1509 1506 self.getFromServer()
1510 1507
1511 1508 class JRODataWriter(JRODataIO):
1512 1509
1513 1510 """
1514 1511 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1515 1512 de los datos siempre se realiza por bloques.
1516 1513 """
1517 1514
1518 1515 blockIndex = 0
1519 1516
1520 1517 path = None
1521 1518
1522 1519 setFile = None
1523 1520
1524 1521 profilesPerBlock = None
1525 1522
1526 1523 blocksPerFile = None
1527 1524
1528 1525 nWriteBlocks = 0
1529 1526
1530 1527 fileDate = None
1531 1528
1532 1529 def __init__(self, dataOut=None):
1533 1530 raise NotImplementedError
1534 1531
1535 1532
1536 1533 def hasAllDataInBuffer(self):
1537 1534 raise NotImplementedError
1538 1535
1539 1536
1540 1537 def setBlockDimension(self):
1541 1538 raise NotImplementedError
1542 1539
1543 1540
1544 1541 def writeBlock(self):
1545 1542 raise NotImplementedError
1546 1543
1547 1544
1548 1545 def putData(self):
1549 1546 raise NotImplementedError
1550 1547
1551 1548
1552 1549 def getProcessFlags(self):
1553 1550
1554 1551 processFlags = 0
1555 1552
1556 1553 dtype_index = get_dtype_index(self.dtype)
1557 1554 procflag_dtype = get_procflag_dtype(dtype_index)
1558 1555
1559 1556 processFlags += procflag_dtype
1560 1557
1561 1558 if self.dataOut.flagDecodeData:
1562 1559 processFlags += PROCFLAG.DECODE_DATA
1563 1560
1564 1561 if self.dataOut.flagDeflipData:
1565 1562 processFlags += PROCFLAG.DEFLIP_DATA
1566 1563
1567 1564 if self.dataOut.code is not None:
1568 1565 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1569 1566
1570 1567 if self.dataOut.nCohInt > 1:
1571 1568 processFlags += PROCFLAG.COHERENT_INTEGRATION
1572 1569
1573 1570 if self.dataOut.type == "Spectra":
1574 1571 if self.dataOut.nIncohInt > 1:
1575 1572 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1576 1573
1577 1574 if self.dataOut.data_dc is not None:
1578 1575 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1579 1576
1580 1577 if self.dataOut.flagShiftFFT:
1581 1578 processFlags += PROCFLAG.SHIFT_FFT_DATA
1582 1579
1583 1580 return processFlags
1584 1581
1585 1582 def setBasicHeader(self):
1586 1583
1587 1584 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1588 1585 self.basicHeaderObj.version = self.versionFile
1589 1586 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1590 1587
1591 1588 utc = numpy.floor(self.dataOut.utctime)
1592 1589 milisecond = (self.dataOut.utctime - utc)* 1000.0
1593 1590
1594 1591 self.basicHeaderObj.utc = utc
1595 1592 self.basicHeaderObj.miliSecond = milisecond
1596 1593 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1597 1594 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1598 1595 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1599 1596
1600 1597 def setFirstHeader(self):
1601 1598 """
1602 1599 Obtiene una copia del First Header
1603 1600
1604 1601 Affected:
1605 1602
1606 1603 self.basicHeaderObj
1607 1604 self.systemHeaderObj
1608 1605 self.radarControllerHeaderObj
1609 1606 self.processingHeaderObj self.
1610 1607
1611 1608 Return:
1612 1609 None
1613 1610 """
1614 1611
1615 1612 raise NotImplementedError
1616 1613
1617 1614 def __writeFirstHeader(self):
1618 1615 """
1619 1616 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1620 1617
1621 1618 Affected:
1622 1619 __dataType
1623 1620
1624 1621 Return:
1625 1622 None
1626 1623 """
1627 1624
1628 1625 # CALCULAR PARAMETROS
1629 1626
1630 1627 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1631 1628 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1632 1629
1633 1630 self.basicHeaderObj.write(self.fp)
1634 1631 self.systemHeaderObj.write(self.fp)
1635 1632 self.radarControllerHeaderObj.write(self.fp)
1636 1633 self.processingHeaderObj.write(self.fp)
1637 1634
1638 1635 def __setNewBlock(self):
1639 1636 """
1640 1637 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1641 1638
1642 1639 Return:
1643 1640 0 : si no pudo escribir nada
1644 1641 1 : Si escribio el Basic el First Header
1645 1642 """
1646 1643 if self.fp == None:
1647 1644 self.setNextFile()
1648 1645
1649 1646 if self.flagIsNewFile:
1650 1647 return 1
1651 1648
1652 1649 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1653 1650 self.basicHeaderObj.write(self.fp)
1654 1651 return 1
1655 1652
1656 1653 if not( self.setNextFile() ):
1657 1654 return 0
1658 1655
1659 1656 return 1
1660 1657
1661 1658
1662 1659 def writeNextBlock(self):
1663 1660 """
1664 1661 Selecciona el bloque siguiente de datos y los escribe en un file
1665 1662
1666 1663 Return:
1667 1664 0 : Si no hizo pudo escribir el bloque de datos
1668 1665 1 : Si no pudo escribir el bloque de datos
1669 1666 """
1670 1667 if not( self.__setNewBlock() ):
1671 1668 return 0
1672 1669
1673 1670 self.writeBlock()
1674 1671
1675 1672 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1676 1673 self.processingHeaderObj.dataBlocksPerFile)
1677 1674
1678 1675 return 1
1679 1676
1680 1677 def setNextFile(self):
1681 1678 """
1682 1679 Determina el siguiente file que sera escrito
1683 1680
1684 1681 Affected:
1685 1682 self.filename
1686 1683 self.subfolder
1687 1684 self.fp
1688 1685 self.setFile
1689 1686 self.flagIsNewFile
1690 1687
1691 1688 Return:
1692 1689 0 : Si el archivo no puede ser escrito
1693 1690 1 : Si el archivo esta listo para ser escrito
1694 1691 """
1695 1692 ext = self.ext
1696 1693 path = self.path
1697 1694
1698 1695 if self.fp != None:
1699 1696 self.fp.close()
1700 1697
1701 1698 timeTuple = time.localtime( self.dataOut.utctime)
1702 1699 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1703 1700
1704 1701 fullpath = os.path.join( path, subfolder )
1705 1702 setFile = self.setFile
1706 1703
1707 1704 if not( os.path.exists(fullpath) ):
1708 1705 os.mkdir(fullpath)
1709 1706 setFile = -1 #inicializo mi contador de seteo
1710 1707 else:
1711 1708 filesList = os.listdir( fullpath )
1712 1709 if len( filesList ) > 0:
1713 1710 filesList = sorted( filesList, key=str.lower )
1714 1711 filen = filesList[-1]
1715 1712 # el filename debera tener el siguiente formato
1716 1713 # 0 1234 567 89A BCDE (hex)
1717 1714 # x YYYY DDD SSS .ext
1718 1715 if isNumber( filen[8:11] ):
1719 1716 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1720 1717 else:
1721 1718 setFile = -1
1722 1719 else:
1723 1720 setFile = -1 #inicializo mi contador de seteo
1724 1721
1725 1722 setFile += 1
1726 1723
1727 1724 #If this is a new day it resets some values
1728 1725 if self.dataOut.datatime.date() > self.fileDate:
1729 1726 setFile = 0
1730 1727 self.nTotalBlocks = 0
1731 1728
1732 1729 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1733 1730
1734 1731 filename = os.path.join( path, subfolder, filen )
1735 1732
1736 1733 fp = open( filename,'wb' )
1737 1734
1738 1735 self.blockIndex = 0
1739 1736
1740 1737 #guardando atributos
1741 1738 self.filename = filename
1742 1739 self.subfolder = subfolder
1743 1740 self.fp = fp
1744 1741 self.setFile = setFile
1745 1742 self.flagIsNewFile = 1
1746 1743 self.fileDate = self.dataOut.datatime.date()
1747 1744
1748 1745 self.setFirstHeader()
1749 1746
1750 1747 print '[Writing] Opening file: %s'%self.filename
1751 1748
1752 1749 self.__writeFirstHeader()
1753 1750
1754 1751 return 1
1755 1752
1756 1753 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1757 1754 """
1758 1755 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1759 1756
1760 1757 Inputs:
1761 1758 path : directory where data will be saved
1762 1759 profilesPerBlock : number of profiles per block
1763 1760 set : initial file set
1764 1761 datatype : An integer number that defines data type:
1765 1762 0 : int8 (1 byte)
1766 1763 1 : int16 (2 bytes)
1767 1764 2 : int32 (4 bytes)
1768 1765 3 : int64 (8 bytes)
1769 1766 4 : float32 (4 bytes)
1770 1767 5 : double64 (8 bytes)
1771 1768
1772 1769 Return:
1773 1770 0 : Si no realizo un buen seteo
1774 1771 1 : Si realizo un buen seteo
1775 1772 """
1776 1773
1777 1774 if ext == None:
1778 1775 ext = self.ext
1779 1776
1780 1777 self.ext = ext.lower()
1781 1778
1782 1779 self.path = path
1783 1780
1784 1781 if set is None:
1785 1782 self.setFile = -1
1786 1783 else:
1787 1784 self.setFile = set - 1
1788 1785
1789 1786 self.blocksPerFile = blocksPerFile
1790 1787
1791 1788 self.profilesPerBlock = profilesPerBlock
1792 1789
1793 1790 self.dataOut = dataOut
1794 1791 self.fileDate = self.dataOut.datatime.date()
1795 1792 #By default
1796 1793 self.dtype = self.dataOut.dtype
1797 1794
1798 1795 if datatype is not None:
1799 1796 self.dtype = get_numpy_dtype(datatype)
1800 1797
1801 1798 if not(self.setNextFile()):
1802 1799 print "[Writing] There isn't a next file"
1803 1800 return 0
1804 1801
1805 1802 self.setBlockDimension()
1806 1803
1807 1804 return 1
1808 1805
1809 1806 def run(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1810 1807
1811 1808 if not(self.isConfig):
1812 1809
1813 1810 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs)
1814 1811 self.isConfig = True
1815 1812
1816 1813 self.putData()
@@ -1,739 +1,737
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import numpy
8 8
9 9 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
11 11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 12 from schainpy.model.data.jrodata import Voltage
13 13 import zmq
14 14 import tempfile
15 15 from StringIO import StringIO
16 16 # from _sha import blocksize
17 17
18 18 class VoltageReader(JRODataReader, ProcessingUnit):
19 19 """
20 20 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
21 21 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
22 22 perfiles*alturas*canales) son almacenados en la variable "buffer".
23 23
24 24 perfiles * alturas * canales
25 25
26 26 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
27 27 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
28 28 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
29 29 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
30 30
31 31 Example:
32 32
33 33 dpath = "/home/myuser/data"
34 34
35 35 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
36 36
37 37 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
38 38
39 39 readerObj = VoltageReader()
40 40
41 41 readerObj.setup(dpath, startTime, endTime)
42 42
43 43 while(True):
44 44
45 45 #to get one profile
46 46 profile = readerObj.getData()
47 47
48 48 #print the profile
49 49 print profile
50 50
51 51 #If you want to see all datablock
52 52 print readerObj.datablock
53 53
54 54 if readerObj.flagNoMoreFiles:
55 55 break
56 56
57 57 """
58 58
59 59 ext = ".r"
60 60
61 61 optchar = "D"
62 62 dataOut = None
63 63
64 64 def __init__(self, **kwargs):
65 65 """
66 66 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
67 67
68 68 Input:
69 69 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
70 70 almacenar un perfil de datos cada vez que se haga un requerimiento
71 71 (getData). El perfil sera obtenido a partir del buffer de datos,
72 72 si el buffer esta vacio se hara un nuevo proceso de lectura de un
73 73 bloque de datos.
74 74 Si este parametro no es pasado se creara uno internamente.
75 75
76 76 Variables afectadas:
77 77 self.dataOut
78 78
79 79 Return:
80 80 None
81 81 """
82 82
83 83 ProcessingUnit.__init__(self, **kwargs)
84 84
85 85 self.isConfig = False
86 86
87 87 self.datablock = None
88 88
89 89 self.utc = 0
90 90
91 91 self.ext = ".r"
92 92
93 93 self.optchar = "D"
94 94
95 95 self.basicHeaderObj = BasicHeader(LOCALTIME)
96 96
97 97 self.systemHeaderObj = SystemHeader()
98 98
99 99 self.radarControllerHeaderObj = RadarControllerHeader()
100 100
101 101 self.processingHeaderObj = ProcessingHeader()
102 102
103 103 self.online = 0
104 104
105 105 self.fp = None
106 106
107 107 self.idFile = None
108 108
109 109 self.dtype = None
110 110
111 111 self.fileSizeByHeader = None
112 112
113 113 self.filenameList = []
114 114
115 115 self.filename = None
116 116
117 117 self.fileSize = None
118 118
119 119 self.firstHeaderSize = 0
120 120
121 121 self.basicHeaderSize = 24
122 122
123 123 self.pathList = []
124 124
125 125 self.filenameList = []
126 126
127 127 self.lastUTTime = 0
128 128
129 129 self.maxTimeStep = 30
130 130
131 131 self.flagNoMoreFiles = 0
132 132
133 133 self.set = 0
134 134
135 135 self.path = None
136 136
137 137 self.profileIndex = 2**32-1
138 138
139 139 self.delay = 3 #seconds
140 140
141 141 self.nTries = 3 #quantity tries
142 142
143 143 self.nFiles = 3 #number of files for searching
144 144
145 145 self.nReadBlocks = 0
146 146
147 147 self.flagIsNewFile = 1
148 148
149 149 self.__isFirstTimeOnline = 1
150 150
151 151 # self.ippSeconds = 0
152 152
153 153 self.flagDiscontinuousBlock = 0
154 154
155 155 self.flagIsNewBlock = 0
156 156
157 157 self.nTotalBlocks = 0
158 158
159 159 self.blocksize = 0
160 160
161 161 self.dataOut = self.createObjByDefault()
162 162
163 163 self.nTxs = 1
164 164
165 165 self.txIndex = 0
166 166
167 167 def createObjByDefault(self):
168 168
169 169 dataObj = Voltage()
170 170
171 171 return dataObj
172 172
173 173 def __hasNotDataInBuffer(self):
174 174
175 175 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock*self.nTxs:
176 176 return 1
177 177
178 178 return 0
179 179
180 180
181 181 def getBlockDimension(self):
182 182 """
183 183 Obtiene la cantidad de puntos a leer por cada bloque de datos
184 184
185 185 Affected:
186 186 self.blocksize
187 187
188 188 Return:
189 189 None
190 190 """
191 191 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
192 192 self.blocksize = pts2read
193 193
194 194
195 195
196 196 def readBlock(self):
197 197 """
198 198 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
199 199 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
200 200 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
201 201 es seteado a 0
202 202
203 203 Inputs:
204 204 None
205 205
206 206 Return:
207 207 None
208 208
209 209 Affected:
210 210 self.profileIndex
211 211 self.datablock
212 212 self.flagIsNewFile
213 213 self.flagIsNewBlock
214 214 self.nTotalBlocks
215 215
216 216 Exceptions:
217 217 Si un bloque leido no es un bloque valido
218 218 """
219 219
220 print 'READ BLOCK'
221 220 # if self.server is not None:
222 221 # self.zBlock = self.receiver.recv()
223 222 # self.zHeader = self.zBlock[:24]
224 223 # self.zDataBlock = self.zBlock[24:]
225 224 # junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
226 225 # self.processingHeaderObj.profilesPerBlock = 240
227 226 # self.processingHeaderObj.nHeights = 248
228 227 # self.systemHeaderObj.nChannels
229 228 # else:
230 229 current_pointer_location = self.fp.tell()
231 230 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
232 231
233 232 try:
234 233 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
235 print'junked'
236 234 except:
237 235 #print "The read block (%3d) has not enough data" %self.nReadBlocks
238 236
239 237 if self.waitDataBlock(pointer_location=current_pointer_location):
240 238 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
241 239 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
242 240 # return 0
243 241
244 242 #Dimensions : nChannels, nProfiles, nSamples
245 243
246 244 junk = numpy.transpose(junk, (2,0,1))
247 245 self.datablock = junk['real'] + junk['imag']*1j
248 246
249 247 self.profileIndex = 0
250 248
251 249 self.flagIsNewFile = 0
252 250 self.flagIsNewBlock = 1
253 251
254 252 self.nTotalBlocks += 1
255 253 self.nReadBlocks += 1
256 254
257 255 return 1
258 256
259 257 def getFirstHeader(self):
260 258
261 259 self.getBasicHeader()
262 260
263 261 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
264 262
265 263 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
266 264
267 265 if self.nTxs > 1:
268 266 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
269 267
270 268 #Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
271 269
272 270 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
273 271 #
274 272 # if self.radarControllerHeaderObj.code is not None:
275 273 #
276 274 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
277 275 #
278 276 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
279 277 #
280 278 # self.dataOut.code = self.radarControllerHeaderObj.code
281 279
282 280 self.dataOut.dtype = self.dtype
283 281
284 282 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
285 283
286 284 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
287 285
288 286 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
289 287
290 288 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
291 289
292 290 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
293 291
294 292 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data no esta sin flip
295 293
296 294 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
297 295
298 296 def reshapeData(self):
299 297
300 298 if self.nTxs < 0:
301 299 return
302 300
303 301 if self.nTxs == 1:
304 302 return
305 303
306 304 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1./self.nTxs) != 0:
307 305 raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" %(1./self.nTxs, self.processingHeaderObj.profilesPerBlock)
308 306
309 307 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
310 308 raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" %(self.nTxs, self.processingHeaderObj.nHeights)
311 309
312 310 self.datablock = self.datablock.reshape((self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock*self.nTxs, self.processingHeaderObj.nHeights/self.nTxs))
313 311
314 312 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
315 313 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights/self.nTxs) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
316 314 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
317 315
318 316 return
319 317
320 318 def readFirstHeaderFromServer(self):
321 319
322 320 self.getFirstHeader()
323 321
324 322 self.firstHeaderSize = self.basicHeaderObj.size
325 323
326 324 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
327 325 if datatype == 0:
328 326 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
329 327 elif datatype == 1:
330 328 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
331 329 elif datatype == 2:
332 330 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
333 331 elif datatype == 3:
334 332 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
335 333 elif datatype == 4:
336 334 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
337 335 elif datatype == 5:
338 336 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
339 337 else:
340 338 raise ValueError, 'Data type was not defined'
341 339
342 340 self.dtype = datatype_str
343 341 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
344 342 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
345 343 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
346 344 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
347 345 self.getBlockDimension()
348 346
349 347
350 348 def getFromServer(self):
351 349 self.flagDiscontinuousBlock = 0
352 350 self.profileIndex = 0
353 351 self.flagIsNewBlock = 1
354 352 self.dataOut.flagNoData = False
355 353 self.nTotalBlocks += 1
356 354 self.nReadBlocks += 1
357 355 self.blockPointer = 0
358 356
359 357 block = self.receiver.recv()
360 358
361 359 self.basicHeaderObj.read(block[self.blockPointer:])
362 360 self.blockPointer += self.basicHeaderObj.length
363 361 self.systemHeaderObj.read(block[self.blockPointer:])
364 362 self.blockPointer += self.systemHeaderObj.length
365 363 self.radarControllerHeaderObj.read(block[self.blockPointer:])
366 364 self.blockPointer += self.radarControllerHeaderObj.length
367 365 self.processingHeaderObj.read(block[self.blockPointer:])
368 366 self.blockPointer += self.processingHeaderObj.length
369 367 self.readFirstHeaderFromServer()
370 368
371 369 timestamp = self.basicHeaderObj.get_datatime()
372 370 print '[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp)
373 371 current_pointer_location = self.blockPointer
374 372 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
375 373
376 374 try:
377 375 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
378 376 except:
379 377 #print "The read block (%3d) has not enough data" %self.nReadBlocks
380 378 if self.waitDataBlock(pointer_location=current_pointer_location):
381 379 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
382 380 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
383 381 # return 0
384 382
385 383 #Dimensions : nChannels, nProfiles, nSamples
386 384
387 385 junk = numpy.transpose(junk, (2,0,1))
388 386 self.datablock = junk['real'] + junk['imag'] * 1j
389 387 self.profileIndex = 0
390 388 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
391 389 if self.selBlocktime != None:
392 390 if self.dataOut.nCohInt is not None:
393 391 nCohInt = self.dataOut.nCohInt
394 392 else:
395 393 nCohInt = 1
396 394 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
397 395 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
398 396 datasize = self.dataOut.data.shape[1]
399 397 if datasize < self.selBlocksize:
400 398 buffer = numpy.zeros((self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype = 'complex')
401 399 buffer[:,:datasize,:] = self.dataOut.data
402 400 self.dataOut.data = buffer
403 401 self.profileIndex = blockIndex
404 402
405 403 self.dataOut.flagDataAsBlock = True
406 404 self.flagIsNewBlock = 1
407 405 self.dataOut.realtime = self.online
408 406
409 407 return self.dataOut.data
410 408
411 409 def getData(self):
412 410 """
413 411 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
414 412 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
415 413 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
416 414 "readNextBlock"
417 415
418 416 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
419 417
420 418 Return:
421 419
422 420 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
423 421 es igual al total de perfiles leidos desde el archivo.
424 422
425 423 Si self.getByBlock == False:
426 424
427 425 self.dataOut.data = buffer[:, thisProfile, :]
428 426
429 427 shape = [nChannels, nHeis]
430 428
431 429 Si self.getByBlock == True:
432 430
433 431 self.dataOut.data = buffer[:, :, :]
434 432
435 433 shape = [nChannels, nProfiles, nHeis]
436 434
437 435 Variables afectadas:
438 436 self.dataOut
439 437 self.profileIndex
440 438
441 439 Affected:
442 440 self.dataOut
443 441 self.profileIndex
444 442 self.flagDiscontinuousBlock
445 443 self.flagIsNewBlock
446 444 """
447 445 if self.flagNoMoreFiles:
448 446 self.dataOut.flagNoData = True
449 447 print 'Process finished'
450 448 return 0
451 449 self.flagDiscontinuousBlock = 0
452 450 self.flagIsNewBlock = 0
453 451 if self.__hasNotDataInBuffer():
454 452 if not( self.readNextBlock() ):
455 453 return 0
456 454
457 455 self.getFirstHeader()
458 456
459 457 self.reshapeData()
460 458 if self.datablock is None:
461 459 self.dataOut.flagNoData = True
462 460 return 0
463 461
464 462 if not self.getByBlock:
465 463
466 464 """
467 465 Return profile by profile
468 466
469 467 If nTxs > 1 then one profile is divided by nTxs and number of total
470 468 blocks is increased by nTxs (nProfiles *= nTxs)
471 469 """
472 470 self.dataOut.flagDataAsBlock = False
473 471 self.dataOut.data = self.datablock[:,self.profileIndex,:]
474 472 self.dataOut.profileIndex = self.profileIndex
475 473
476 474 self.profileIndex += 1
477 475
478 476 # elif self.selBlocksize==None or self.selBlocksize==self.dataOut.nProfiles:
479 477 # """
480 478 # Return all block
481 479 # """
482 480 # self.dataOut.flagDataAsBlock = True
483 481 # self.dataOut.data = self.datablock
484 482 # self.dataOut.profileIndex = self.dataOut.nProfiles - 1
485 483 #
486 484 # self.profileIndex = self.dataOut.nProfiles
487 485
488 486 else:
489 487 """
490 488 Return a block
491 489 """
492 490 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
493 491 if self.selBlocktime != None:
494 492 if self.dataOut.nCohInt is not None:
495 493 nCohInt = self.dataOut.nCohInt
496 494 else:
497 495 nCohInt = 1
498 496 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
499 497
500 498 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
501 499 self.profileIndex += self.selBlocksize
502 500 datasize = self.dataOut.data.shape[1]
503 501
504 502 if datasize < self.selBlocksize:
505 503 buffer = numpy.zeros((self.dataOut.data.shape[0],self.selBlocksize,self.dataOut.data.shape[2]), dtype = 'complex')
506 504 buffer[:,:datasize,:] = self.dataOut.data
507 505
508 506 while datasize < self.selBlocksize: #Not enough profiles to fill the block
509 507 if not( self.readNextBlock() ):
510 508 return 0
511 509 self.getFirstHeader()
512 510 self.reshapeData()
513 511 if self.datablock is None:
514 512 self.dataOut.flagNoData = True
515 513 return 0
516 514 #stack data
517 515 blockIndex = self.selBlocksize - datasize
518 516 datablock1 = self.datablock[:,:blockIndex,:]
519 517
520 518 buffer[:,datasize:datasize+datablock1.shape[1],:] = datablock1
521 519 datasize += datablock1.shape[1]
522 520
523 521 self.dataOut.data = buffer
524 522 self.profileIndex = blockIndex
525 523
526 524 self.dataOut.flagDataAsBlock = True
527 525 self.dataOut.nProfiles = self.dataOut.data.shape[1]
528 526
529 527 self.dataOut.flagNoData = False
530 528
531 529 self.getBasicHeader()
532 530
533 531 self.dataOut.realtime = self.online
534 532
535 533 return self.dataOut.data
536 534
537 535 class VoltageWriter(JRODataWriter, Operation):
538 536 """
539 537 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
540 538 de los datos siempre se realiza por bloques.
541 539 """
542 540
543 541 ext = ".r"
544 542
545 543 optchar = "D"
546 544
547 545 shapeBuffer = None
548 546
549 547
550 548 def __init__(self, **kwargs):
551 549 """
552 550 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
553 551
554 552 Affected:
555 553 self.dataOut
556 554
557 555 Return: None
558 556 """
559 557 Operation.__init__(self, **kwargs)
560 558
561 559 self.nTotalBlocks = 0
562 560
563 561 self.profileIndex = 0
564 562
565 563 self.isConfig = False
566 564
567 565 self.fp = None
568 566
569 567 self.flagIsNewFile = 1
570 568
571 569 self.blockIndex = 0
572 570
573 571 self.flagIsNewBlock = 0
574 572
575 573 self.setFile = None
576 574
577 575 self.dtype = None
578 576
579 577 self.path = None
580 578
581 579 self.filename = None
582 580
583 581 self.basicHeaderObj = BasicHeader(LOCALTIME)
584 582
585 583 self.systemHeaderObj = SystemHeader()
586 584
587 585 self.radarControllerHeaderObj = RadarControllerHeader()
588 586
589 587 self.processingHeaderObj = ProcessingHeader()
590 588
591 589 def hasAllDataInBuffer(self):
592 590 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
593 591 return 1
594 592 return 0
595 593
596 594
597 595 def setBlockDimension(self):
598 596 """
599 597 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
600 598
601 599 Affected:
602 600 self.shape_spc_Buffer
603 601 self.shape_cspc_Buffer
604 602 self.shape_dc_Buffer
605 603
606 604 Return: None
607 605 """
608 606 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
609 607 self.processingHeaderObj.nHeights,
610 608 self.systemHeaderObj.nChannels)
611 609
612 610 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
613 611 self.processingHeaderObj.profilesPerBlock,
614 612 self.processingHeaderObj.nHeights),
615 613 dtype=numpy.dtype('complex64'))
616 614
617 615 def writeBlock(self):
618 616 """
619 617 Escribe el buffer en el file designado
620 618
621 619 Affected:
622 620 self.profileIndex
623 621 self.flagIsNewFile
624 622 self.flagIsNewBlock
625 623 self.nTotalBlocks
626 624 self.blockIndex
627 625
628 626 Return: None
629 627 """
630 628 data = numpy.zeros( self.shapeBuffer, self.dtype )
631 629
632 630 junk = numpy.transpose(self.datablock, (1,2,0))
633 631
634 632 data['real'] = junk.real
635 633 data['imag'] = junk.imag
636 634
637 635 data = data.reshape( (-1) )
638 636
639 637 data.tofile( self.fp )
640 638
641 639 self.datablock.fill(0)
642 640
643 641 self.profileIndex = 0
644 642 self.flagIsNewFile = 0
645 643 self.flagIsNewBlock = 1
646 644
647 645 self.blockIndex += 1
648 646 self.nTotalBlocks += 1
649 647
650 648 # print "[Writing] Block = %04d" %self.blockIndex
651 649
652 650 def putData(self):
653 651 """
654 652 Setea un bloque de datos y luego los escribe en un file
655 653
656 654 Affected:
657 655 self.flagIsNewBlock
658 656 self.profileIndex
659 657
660 658 Return:
661 659 0 : Si no hay data o no hay mas files que puedan escribirse
662 660 1 : Si se escribio la data de un bloque en un file
663 661 """
664 662 if self.dataOut.flagNoData:
665 663 return 0
666 664
667 665 self.flagIsNewBlock = 0
668 666
669 667 if self.dataOut.flagDiscontinuousBlock:
670 668 self.datablock.fill(0)
671 669 self.profileIndex = 0
672 670 self.setNextFile()
673 671
674 672 if self.profileIndex == 0:
675 673 self.setBasicHeader()
676 674
677 675 self.datablock[:,self.profileIndex,:] = self.dataOut.data
678 676
679 677 self.profileIndex += 1
680 678
681 679 if self.hasAllDataInBuffer():
682 680 #if self.flagIsNewFile:
683 681 self.writeNextBlock()
684 682 # self.setFirstHeader()
685 683
686 684 return 1
687 685
688 686 def __getBlockSize(self):
689 687 '''
690 688 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
691 689 '''
692 690
693 691 dtype_width = self.getDtypeWidth()
694 692
695 693 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * dtype_width * 2)
696 694
697 695 return blocksize
698 696
699 697 def setFirstHeader(self):
700 698
701 699 """
702 700 Obtiene una copia del First Header
703 701
704 702 Affected:
705 703 self.systemHeaderObj
706 704 self.radarControllerHeaderObj
707 705 self.dtype
708 706
709 707 Return:
710 708 None
711 709 """
712 710
713 711 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
714 712 self.systemHeaderObj.nChannels = self.dataOut.nChannels
715 713 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
716 714
717 715 self.processingHeaderObj.dtype = 0 # Voltage
718 716 self.processingHeaderObj.blockSize = self.__getBlockSize()
719 717 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
720 718 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
721 719 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
722 720 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
723 721 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
724 722 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
725 723
726 724 if self.dataOut.code is not None:
727 725 self.processingHeaderObj.code = self.dataOut.code
728 726 self.processingHeaderObj.nCode = self.dataOut.nCode
729 727 self.processingHeaderObj.nBaud = self.dataOut.nBaud
730 728
731 729 if self.processingHeaderObj.nWindows != 0:
732 730 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
733 731 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
734 732 self.processingHeaderObj.nHeights = self.dataOut.nHeights
735 733 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
736 734
737 735 self.processingHeaderObj.processFlags = self.getProcessFlags()
738 736
739 737 self.setBasicHeader()
@@ -1,58 +1,139
1 #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
2 #define NUM_CPY_THREADS 8
1 3 #include <Python.h>
2 4 #include <numpy/arrayobject.h>
3 5 #include <math.h>
6 #include <complex.h>
7 #include <time.h>
4 8
9 // void printArr(int *array);
5 10 static PyObject *hildebrand_sekhon(PyObject *self, PyObject *args);
11 static PyObject *correlateByBlock(PyObject *self, PyObject *args);
12 #ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
13 #define PyMODINIT_FUNC void
14 #endif
6 15
7 16 static PyMethodDef extensionsMethods[] = {
8 { "hildebrand_sekhon", (PyCFunction)hildebrand_sekhon, METH_VARARGS, "get noise with" },
9 { NULL, NULL, 0, NULL }
17 { "correlateByBlock", (PyCFunction)correlateByBlock, METH_VARARGS, "get correlation by block" },
18 { "hildebrand_sekhon", (PyCFunction)hildebrand_sekhon, METH_VARARGS, "get noise with hildebrand_sekhon" },
19 { NULL, NULL, 0, NULL }
10 20 };
11 21
12 22 PyMODINIT_FUNC initcSchain() {
13 23 Py_InitModule("cSchain", extensionsMethods);
14 24 import_array();
15 25 }
16 26
27 static PyObject *correlateByBlock(PyObject *self, PyObject *args) {
28
29 // int *x = (int*) malloc(4000000 * 216 * sizeof(int));;
30 // int a = 5;
31 // x = &a;
32 // int b = 6;
33 // x = &b;
34 // printf("Antes de imprimir x \n");
35 // printf("%d \n", x[0]);
36
37 PyObject *data_obj1, *data_obj2;
38 PyArrayObject *data_array1, *data_array2, *correlateRow, *out, *dataRow, *codeRow; //, ,
39 int mode;
40
41 if (!PyArg_ParseTuple(args, "OOi", &data_obj1, &data_obj2, &mode)) return NULL;
42
43 data_array1 = (PyArrayObject *) PyArray_FROM_OTF(data_obj1, NPY_COMPLEX128, NPY_ARRAY_DEFAULT);
44 data_array2 = (PyArrayObject *) PyArray_FROM_OTF(data_obj2, NPY_FLOAT64, NPY_ARRAY_DEFAULT);
45
46 npy_intp dims[1];
47 dims[0] = 200;
48 npy_intp dims_code[1];
49 dims_code[0] = 16;
50
51 double complex * dataRaw;
52 double * codeRaw;
53 dataRaw = (double complex*) PyArray_DATA(data_array1);
54 codeRaw = (double *) PyArray_DATA(data_array2);
55 double complex ** outC = malloc(40000*200*sizeof(double complex));
56 int i;
57
58 clock_t start = clock();
59 for(i=0; i<40000; i++){
60 // codeRow = PyArray_SimpleNewFromData(1, dims_code, NPY_FLOAT64, codeRaw + 16 * i);
61 // dataRow = PyArray_SimpleNewFromData(1, dims, NPY_COMPLEX128, dataRaw + 200 * i);
62 // Py_INCREF(codeRow);
63 // Py_INCREF(dataRow);
64 // PyArray_ENABLEFLAGS(codeRow, NPY_ARRAY_OWNDATA);
65 // PyArray_ENABLEFLAGS(dataRow, NPY_ARRAY_OWNDATA);
66 correlateRow = (PyArrayObject *) PyArray_Correlate2(PyArray_SimpleNewFromData(1, dims_code, NPY_FLOAT64, codeRaw + 16 * i), PyArray_SimpleNewFromData(1, dims, NPY_COMPLEX128, dataRaw + 200 * i), (npy_intp) 2);
67 //Py_INCREF(correlateRow);
68 // PyArray_ENABLEFLAGS(correlateRow, NPY_ARRAY_OWNDATA);
69 memcpy(outC + 200*i, (double complex*) PyArray_DATA(correlateRow), 200 * sizeof(double complex));
70
71 Py_DECREF(correlateRow);
72 // Py_DECREF(codeRow);
73 // Py_DECREF(dataRow);
74 }
75 clock_t end = clock();
76 float seconds = (float)(end - start) / CLOCKS_PER_SEC;
77 printf("%f", seconds);
78 //
79 npy_intp dimsret[2];
80 dimsret[0] = 40000;
81 dimsret[1] = 200;
82 out = PyArray_SimpleNewFromData(2, dimsret, NPY_COMPLEX128, outC);
83 PyArray_ENABLEFLAGS(out, NPY_ARRAY_OWNDATA);
84 //Py_INCREF(out);
85 Py_DECREF(data_array1);
86 Py_DECREF(data_array2);
87 // PyArray_DebugPrint(out);
88 // Py_DECREF(data_obj2);
89 // Py_DECREF(data_obj1);
90 // Py_DECREF(codeRow);
91 // Py_DECREF(dataRow);
92 // free(dataRaw);
93 // free(codeRaw);
94
95 return PyArray_Return(out);
96 }
97
17 98 static PyObject *hildebrand_sekhon(PyObject *self, PyObject *args) {
18 /* Do your stuff here. */
19 99 double navg;
20 100 PyObject *data_obj, *data_array;
21 101
22 102 if (!PyArg_ParseTuple(args, "Od", &data_obj, &navg)) return NULL;
23 data_array = PyArray_FROM_OTF(data_obj, NPY_FLOAT64, NPY_IN_ARRAY);
103 data_array = PyArray_FROM_OTF(data_obj, NPY_FLOAT64, NPY_ARRAY_DEFAULT);
24 104 if (data_array == NULL) {
25 105 Py_XDECREF(data_array);
26 106 Py_XDECREF(data_obj);
27 107 return NULL;
28 108 }
29 109 double *sortdata = (double*)PyArray_DATA(data_array);
30 110 int lenOfData = (int)PyArray_SIZE(data_array) ;
31 111 double nums_min = lenOfData*0.2;
32 112 if (nums_min <= 5) nums_min = 5;
33 113 double sump = 0;
34 114 double sumq = 0;
35 115 int j = 0;
36 116 int cont = 1;
37 117 double rtest = 0;
38 118 while ((cont == 1) && (j < lenOfData)) {
39 119 sump = sump + sortdata[j];
40 120 sumq = sumq + pow(sortdata[j], 2);
41 121 if (j > nums_min) {
42 122 rtest = (double)j/(j-1) + 1/navg;
43 123 if ((sumq*j) > (rtest*pow(sump, 2))) {
44 124 j = j - 1;
45 125 sump = sump - sortdata[j];
46 126 sumq = sumq - pow(sortdata[j],2);
47 127 cont = 0;
48 128 }
49 129 }
50 130 j = j + 1;
51 131 }
52 132
53 133 double lnoise = sump / j;
54 134
55 135 Py_DECREF(data_array);
56 136
57 137 return Py_BuildValue("d", lnoise);
58 138 }
139
@@ -1,1285 +1,1312
1 1 import sys
2 2 import numpy
3 from profilehooks import profile
3 4 from scipy import interpolate
4
5 from schainpy import cSchain
5 6 from jroproc_base import ProcessingUnit, Operation
6 7 from schainpy.model.data.jrodata import Voltage
8 from time import time
7 9
8 10 class VoltageProc(ProcessingUnit):
9 11
10 12
11 13 def __init__(self, **kwargs):
12 14
13 15 ProcessingUnit.__init__(self, **kwargs)
14 16
15 17 # self.objectDict = {}
16 18 self.dataOut = Voltage()
17 19 self.flip = 1
18 20
19 21 def run(self):
20 22 if self.dataIn.type == 'AMISR':
21 23 self.__updateObjFromAmisrInput()
22 24
23 25 if self.dataIn.type == 'Voltage':
24 26 self.dataOut.copy(self.dataIn)
25 27
26 28 # self.dataOut.copy(self.dataIn)
27 29
28 30 def __updateObjFromAmisrInput(self):
29 31
30 32 self.dataOut.timeZone = self.dataIn.timeZone
31 33 self.dataOut.dstFlag = self.dataIn.dstFlag
32 34 self.dataOut.errorCount = self.dataIn.errorCount
33 35 self.dataOut.useLocalTime = self.dataIn.useLocalTime
34 36
35 37 self.dataOut.flagNoData = self.dataIn.flagNoData
36 38 self.dataOut.data = self.dataIn.data
37 39 self.dataOut.utctime = self.dataIn.utctime
38 40 self.dataOut.channelList = self.dataIn.channelList
39 41 # self.dataOut.timeInterval = self.dataIn.timeInterval
40 42 self.dataOut.heightList = self.dataIn.heightList
41 43 self.dataOut.nProfiles = self.dataIn.nProfiles
42 44
43 45 self.dataOut.nCohInt = self.dataIn.nCohInt
44 46 self.dataOut.ippSeconds = self.dataIn.ippSeconds
45 47 self.dataOut.frequency = self.dataIn.frequency
46 48
47 49 self.dataOut.azimuth = self.dataIn.azimuth
48 50 self.dataOut.zenith = self.dataIn.zenith
49 51
50 52 self.dataOut.beam.codeList = self.dataIn.beam.codeList
51 53 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
52 54 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
53 55 #
54 56 # pass#
55 57 #
56 58 # def init(self):
57 59 #
58 60 #
59 61 # if self.dataIn.type == 'AMISR':
60 62 # self.__updateObjFromAmisrInput()
61 63 #
62 64 # if self.dataIn.type == 'Voltage':
63 65 # self.dataOut.copy(self.dataIn)
64 66 # # No necesita copiar en cada init() los atributos de dataIn
65 67 # # la copia deberia hacerse por cada nuevo bloque de datos
66 68
67 69 def selectChannels(self, channelList):
68 70
69 71 channelIndexList = []
70 72
71 73 for channel in channelList:
72 74 if channel not in self.dataOut.channelList:
73 75 raise ValueError, "Channel %d is not in %s" %(channel, str(self.dataOut.channelList))
74 76
75 77 index = self.dataOut.channelList.index(channel)
76 78 channelIndexList.append(index)
77 79
78 80 self.selectChannelsByIndex(channelIndexList)
79 81
80 82 def selectChannelsByIndex(self, channelIndexList):
81 83 """
82 84 Selecciona un bloque de datos en base a canales segun el channelIndexList
83 85
84 86 Input:
85 87 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
86 88
87 89 Affected:
88 90 self.dataOut.data
89 91 self.dataOut.channelIndexList
90 92 self.dataOut.nChannels
91 93 self.dataOut.m_ProcessingHeader.totalSpectra
92 94 self.dataOut.systemHeaderObj.numChannels
93 95 self.dataOut.m_ProcessingHeader.blockSize
94 96
95 97 Return:
96 98 None
97 99 """
98 100
99 101 for channelIndex in channelIndexList:
100 102 if channelIndex not in self.dataOut.channelIndexList:
101 103 print channelIndexList
102 104 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
103 105
104 106 if self.dataOut.flagDataAsBlock:
105 107 """
106 108 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
107 109 """
108 110 data = self.dataOut.data[channelIndexList,:,:]
109 111 else:
110 112 data = self.dataOut.data[channelIndexList,:]
111 113
112 114 self.dataOut.data = data
113 115 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
114 116 # self.dataOut.nChannels = nChannels
115 117
116 118 return 1
117 119
118 120 def selectHeights(self, minHei=None, maxHei=None):
119 121 """
120 122 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
121 123 minHei <= height <= maxHei
122 124
123 125 Input:
124 126 minHei : valor minimo de altura a considerar
125 127 maxHei : valor maximo de altura a considerar
126 128
127 129 Affected:
128 130 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
129 131
130 132 Return:
131 133 1 si el metodo se ejecuto con exito caso contrario devuelve 0
132 134 """
133 135
134 136 if minHei == None:
135 137 minHei = self.dataOut.heightList[0]
136 138
137 139 if maxHei == None:
138 140 maxHei = self.dataOut.heightList[-1]
139 141
140 142 if (minHei < self.dataOut.heightList[0]):
141 143 minHei = self.dataOut.heightList[0]
142 144
143 145 if (maxHei > self.dataOut.heightList[-1]):
144 146 maxHei = self.dataOut.heightList[-1]
145 147
146 148 minIndex = 0
147 149 maxIndex = 0
148 150 heights = self.dataOut.heightList
149 151
150 152 inda = numpy.where(heights >= minHei)
151 153 indb = numpy.where(heights <= maxHei)
152 154
153 155 try:
154 156 minIndex = inda[0][0]
155 157 except:
156 158 minIndex = 0
157 159
158 160 try:
159 161 maxIndex = indb[0][-1]
160 162 except:
161 163 maxIndex = len(heights)
162 164
163 165 self.selectHeightsByIndex(minIndex, maxIndex)
164 166
165 167 return 1
166 168
167 169
168 170 def selectHeightsByIndex(self, minIndex, maxIndex):
169 171 """
170 172 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
171 173 minIndex <= index <= maxIndex
172 174
173 175 Input:
174 176 minIndex : valor de indice minimo de altura a considerar
175 177 maxIndex : valor de indice maximo de altura a considerar
176 178
177 179 Affected:
178 180 self.dataOut.data
179 181 self.dataOut.heightList
180 182
181 183 Return:
182 184 1 si el metodo se ejecuto con exito caso contrario devuelve 0
183 185 """
184 186
185 187 if (minIndex < 0) or (minIndex > maxIndex):
186 188 raise ValueError, "Height index range (%d,%d) is not valid" % (minIndex, maxIndex)
187 189
188 190 if (maxIndex >= self.dataOut.nHeights):
189 191 maxIndex = self.dataOut.nHeights
190 192
191 193 #voltage
192 194 if self.dataOut.flagDataAsBlock:
193 195 """
194 196 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
195 197 """
196 198 data = self.dataOut.data[:,:, minIndex:maxIndex]
197 199 else:
198 200 data = self.dataOut.data[:, minIndex:maxIndex]
199 201
200 202 # firstHeight = self.dataOut.heightList[minIndex]
201 203
202 204 self.dataOut.data = data
203 205 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
204 206
205 207 if self.dataOut.nHeights <= 1:
206 208 raise ValueError, "selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights)
207 209
208 210 return 1
209 211
210 212
211 213 def filterByHeights(self, window):
212 214
213 215 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
214 216
215 217 if window == None:
216 218 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
217 219
218 220 newdelta = deltaHeight * window
219 221 r = self.dataOut.nHeights % window
220 222 newheights = (self.dataOut.nHeights-r)/window
221 223
222 224 if newheights <= 1:
223 225 raise ValueError, "filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window)
224 226
225 227 if self.dataOut.flagDataAsBlock:
226 228 """
227 229 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
228 230 """
229 231 buffer = self.dataOut.data[:, :, 0:self.dataOut.nHeights-r]
230 232 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nProfiles,self.dataOut.nHeights/window,window)
231 233 buffer = numpy.sum(buffer,3)
232 234
233 235 else:
234 236 buffer = self.dataOut.data[:,0:self.dataOut.nHeights-r]
235 237 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nHeights/window,window)
236 238 buffer = numpy.sum(buffer,2)
237 239
238 240 self.dataOut.data = buffer
239 241 self.dataOut.heightList = self.dataOut.heightList[0] + numpy.arange( newheights )*newdelta
240 242 self.dataOut.windowOfFilter = window
241 243
242 244 def setH0(self, h0, deltaHeight = None):
243 245
244 246 if not deltaHeight:
245 247 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
246 248
247 249 nHeights = self.dataOut.nHeights
248 250
249 251 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
250 252
251 253 self.dataOut.heightList = newHeiRange
252 254
253 255 def deFlip(self, channelList = []):
254 256
255 257 data = self.dataOut.data.copy()
256 258
257 259 if self.dataOut.flagDataAsBlock:
258 260 flip = self.flip
259 261 profileList = range(self.dataOut.nProfiles)
260 262
261 263 if not channelList:
262 264 for thisProfile in profileList:
263 265 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
264 266 flip *= -1.0
265 267 else:
266 268 for thisChannel in channelList:
267 269 if thisChannel not in self.dataOut.channelList:
268 270 continue
269 271
270 272 for thisProfile in profileList:
271 273 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
272 274 flip *= -1.0
273 275
274 276 self.flip = flip
275 277
276 278 else:
277 279 if not channelList:
278 280 data[:,:] = data[:,:]*self.flip
279 281 else:
280 282 for thisChannel in channelList:
281 283 if thisChannel not in self.dataOut.channelList:
282 284 continue
283 285
284 286 data[thisChannel,:] = data[thisChannel,:]*self.flip
285 287
286 288 self.flip *= -1.
287 289
288 290 self.dataOut.data = data
289 291
290 292 def setRadarFrequency(self, frequency=None):
291 293
292 294 if frequency != None:
293 295 self.dataOut.frequency = frequency
294 296
295 297 return 1
296 298
297 299 def interpolateHeights(self, topLim, botLim):
298 300 #69 al 72 para julia
299 301 #82-84 para meteoros
300 302 if len(numpy.shape(self.dataOut.data))==2:
301 303 sampInterp = (self.dataOut.data[:,botLim-1] + self.dataOut.data[:,topLim+1])/2
302 304 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
303 305 #self.dataOut.data[:,botLim:limSup+1] = sampInterp
304 306 self.dataOut.data[:,botLim:topLim+1] = sampInterp
305 307 else:
306 308 nHeights = self.dataOut.data.shape[2]
307 309 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
308 310 y = self.dataOut.data[:,:,range(botLim)+range(topLim+1,nHeights)]
309 311 f = interpolate.interp1d(x, y, axis = 2)
310 312 xnew = numpy.arange(botLim,topLim+1)
311 313 ynew = f(xnew)
312 314
313 315 self.dataOut.data[:,:,botLim:topLim+1] = ynew
314 316
315 317 # import collections
316 318
317 319 class CohInt(Operation):
318 320
319 321 isConfig = False
320 322
321 323 __profIndex = 0
322 324 __withOverapping = False
323 325
324 326 __byTime = False
325 327 __initime = None
326 328 __lastdatatime = None
327 329 __integrationtime = None
328 330
329 331 __buffer = None
330 332
331 333 __dataReady = False
332 334
333 335 n = None
334 336
335 337
336 338 def __init__(self, **kwargs):
337 339
338 340 Operation.__init__(self, **kwargs)
339 341
340 342 # self.isConfig = False
341 343
342 344 def setup(self, n=None, timeInterval=None, overlapping=False, byblock=False):
343 345 """
344 346 Set the parameters of the integration class.
345 347
346 348 Inputs:
347 349
348 350 n : Number of coherent integrations
349 351 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
350 352 overlapping :
351 353
352 354 """
353 355
354 356 self.__initime = None
355 357 self.__lastdatatime = 0
356 358 self.__buffer = None
357 359 self.__dataReady = False
358 360 self.byblock = byblock
359 361
360 362 if n == None and timeInterval == None:
361 363 raise ValueError, "n or timeInterval should be specified ..."
362 364
363 365 if n != None:
364 366 self.n = n
365 367 self.__byTime = False
366 368 else:
367 369 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
368 370 self.n = 9999
369 371 self.__byTime = True
370 372
371 373 if overlapping:
372 374 self.__withOverapping = True
373 375 self.__buffer = None
374 376 else:
375 377 self.__withOverapping = False
376 378 self.__buffer = 0
377 379
378 380 self.__profIndex = 0
379 381
380 382 def putData(self, data):
381 383
382 384 """
383 385 Add a profile to the __buffer and increase in one the __profileIndex
384 386
385 387 """
386 388
387 389 if not self.__withOverapping:
388 390 self.__buffer += data.copy()
389 391 self.__profIndex += 1
390 392 return
391 393
392 394 #Overlapping data
393 395 nChannels, nHeis = data.shape
394 396 data = numpy.reshape(data, (1, nChannels, nHeis))
395 397
396 398 #If the buffer is empty then it takes the data value
397 399 if self.__buffer is None:
398 400 self.__buffer = data
399 401 self.__profIndex += 1
400 402 return
401 403
402 404 #If the buffer length is lower than n then stakcing the data value
403 405 if self.__profIndex < self.n:
404 406 self.__buffer = numpy.vstack((self.__buffer, data))
405 407 self.__profIndex += 1
406 408 return
407 409
408 410 #If the buffer length is equal to n then replacing the last buffer value with the data value
409 411 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
410 412 self.__buffer[self.n-1] = data
411 413 self.__profIndex = self.n
412 414 return
413 415
414 416
415 417 def pushData(self):
416 418 """
417 419 Return the sum of the last profiles and the profiles used in the sum.
418 420
419 421 Affected:
420 422
421 423 self.__profileIndex
422 424
423 425 """
424 426
425 427 if not self.__withOverapping:
426 428 data = self.__buffer
427 429 n = self.__profIndex
428 430
429 431 self.__buffer = 0
430 432 self.__profIndex = 0
431 433
432 434 return data, n
433 435
434 436 #Integration with Overlapping
435 437 data = numpy.sum(self.__buffer, axis=0)
436 438 n = self.__profIndex
437 439
438 440 return data, n
439 441
440 442 def byProfiles(self, data):
441 443
442 444 self.__dataReady = False
443 445 avgdata = None
444 446 # n = None
445 447
446 448 self.putData(data)
447 449
448 450 if self.__profIndex == self.n:
449 451
450 452 avgdata, n = self.pushData()
451 453 self.__dataReady = True
452 454
453 455 return avgdata
454 456
455 457 def byTime(self, data, datatime):
456 458
457 459 self.__dataReady = False
458 460 avgdata = None
459 461 n = None
460 462
461 463 self.putData(data)
462 464
463 465 if (datatime - self.__initime) >= self.__integrationtime:
464 466 avgdata, n = self.pushData()
465 467 self.n = n
466 468 self.__dataReady = True
467 469
468 470 return avgdata
469 471
470 472 def integrate(self, data, datatime=None):
471 473
472 474 if self.__initime == None:
473 475 self.__initime = datatime
474 476
475 477 if self.__byTime:
476 478 avgdata = self.byTime(data, datatime)
477 479 else:
478 480 avgdata = self.byProfiles(data)
479 481
480 482
481 483 self.__lastdatatime = datatime
482 484
483 485 if avgdata is None:
484 486 return None, None
485 487
486 488 avgdatatime = self.__initime
487 489
488 490 deltatime = datatime -self.__lastdatatime
489 491
490 492 if not self.__withOverapping:
491 493 self.__initime = datatime
492 494 else:
493 495 self.__initime += deltatime
494 496
495 497 return avgdata, avgdatatime
496 498
497 499 def integrateByBlock(self, dataOut):
498 500
499 501 times = int(dataOut.data.shape[1]/self.n)
500 502 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
501 503
502 504 id_min = 0
503 505 id_max = self.n
504 506
505 507 for i in range(times):
506 508 junk = dataOut.data[:,id_min:id_max,:]
507 509 avgdata[:,i,:] = junk.sum(axis=1)
508 510 id_min += self.n
509 511 id_max += self.n
510 512
511 513 timeInterval = dataOut.ippSeconds*self.n
512 514 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
513 515 self.__dataReady = True
514 516 return avgdata, avgdatatime
515 517
516 518
517 519 def run(self, dataOut, n=None, timeInterval=None, overlapping=False, byblock=False, **kwargs):
518 520 if not self.isConfig:
519 521 self.setup(n=n, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
520 522 self.isConfig = True
521 523
522 524 if dataOut.flagDataAsBlock:
523 525 """
524 526 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
525 527 """
526 528 avgdata, avgdatatime = self.integrateByBlock(dataOut)
527 529 dataOut.nProfiles /= self.n
528 530 else:
529 531 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
530 532
531 533 # dataOut.timeInterval *= n
532 534 dataOut.flagNoData = True
533 535
534 536 if self.__dataReady:
535 537 dataOut.data = avgdata
536 538 dataOut.nCohInt *= self.n
537 539 dataOut.utctime = avgdatatime
538 540 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
539 541 dataOut.flagNoData = False
540 542
541 543 class Decoder(Operation):
542 544
543 545 isConfig = False
544 546 __profIndex = 0
545 547
546 548 code = None
547 549
548 550 nCode = None
549 551 nBaud = None
550 552
551 553
552 554 def __init__(self, **kwargs):
553 555
554 556 Operation.__init__(self, **kwargs)
555 557
556 558 self.times = None
557 559 self.osamp = None
558 # self.__setValues = False
560 # self.__setValues = False
559 561 self.isConfig = False
560 562
561 563 def setup(self, code, osamp, dataOut):
562 564
563 565 self.__profIndex = 0
564 566
565 567 self.code = code
566 568
567 569 self.nCode = len(code)
568 570 self.nBaud = len(code[0])
569 571
570 572 if (osamp != None) and (osamp >1):
571 573 self.osamp = osamp
572 574 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
573 575 self.nBaud = self.nBaud*self.osamp
574 576
575 577 self.__nChannels = dataOut.nChannels
576 578 self.__nProfiles = dataOut.nProfiles
577 579 self.__nHeis = dataOut.nHeights
578 580
579 581 if self.__nHeis < self.nBaud:
580 582 raise ValueError, 'Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)
581 583
582 584 #Frequency
583 585 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
584 586
585 587 __codeBuffer[:,0:self.nBaud] = self.code
586 588
587 589 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
588 590
589 591 if dataOut.flagDataAsBlock:
590 592
591 593 self.ndatadec = self.__nHeis #- self.nBaud + 1
592 594
593 595 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
594 596
595 597 else:
596 598
597 599 #Time
598 600 self.ndatadec = self.__nHeis #- self.nBaud + 1
599 601
600 602 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
601 603
602 604 def __convolutionInFreq(self, data):
603 605
604 606 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
605 607
606 608 fft_data = numpy.fft.fft(data, axis=1)
607 609
608 610 conv = fft_data*fft_code
609 611
610 612 data = numpy.fft.ifft(conv,axis=1)
611 613
612 614 return data
613 615
614 616 def __convolutionInFreqOpt(self, data):
615 617
616 618 raise NotImplementedError
617 619
618 620 def __convolutionInTime(self, data):
619 621
620 622 code = self.code[self.__profIndex]
621 623
622 624 for i in range(self.__nChannels):
623 625 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
624 626
625 627 return self.datadecTime
626 628
629 #@profile
630 def oldCorrelate(self, i, data, code_block):
631 profilesList = xrange(self.__nProfiles)
632 for j in profilesList:
633 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
634
635 @profile
627 636 def __convolutionByBlockInTime(self, data):
628 637
629 638 repetitions = self.__nProfiles / self.nCode
630
639
631 640 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
632 641 junk = junk.flatten()
633 642 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
634 643
635 for i in range(self.__nChannels):
636 for j in range(self.__nProfiles):
637 print self.datadecTime[i,j,:].shape
638 print numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:].shape
639 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
640
644
645 # def toVectorize(a,b):
646 # return numpy.correlate(a,b, mode='full')
647 # vectorized = numpy.vectorize(toVectorize, signature='(n),(m)->(k)')
648 a = time()
649 for i in range(self.__nChannels):
650 # self.datadecTime[i,:,:] = numpy.array([numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:] for j in profilesList ])
651 # def func(i, j):
652 # self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
653 # map(lambda j: func(i, j), range(self.__nProfiles))
654 #print data[i,:,:].shape
655 # self.datadecTime[i,:,:] = vectorized(data[i,:,:], code_block[:,:])[:,self.nBaud-1:]
656 self.oldCorrelate(i, data, code_block)
657 print self.datadecTime[i,:,:]
658 # print data[i,:,:]
659 # print cSchain.correlateByBlock(data[i,:,:], code_block, 2)
660 self.datadecTime[i,:,:] = cSchain.correlateByBlock(data[i,:,:], code_block, 2)
661 print self.datadecTime[i,:,:]
662 #print self.datadecTime[i,:,:].shape
663 print time() - a
641 664 return self.datadecTime
665
642 666
643 667 def __convolutionByBlockInFreq(self, data):
644 668
645 669 raise NotImplementedError, "Decoder by frequency fro Blocks not implemented"
646 670
647 671
648 672 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
649 673
650 674 fft_data = numpy.fft.fft(data, axis=2)
651 675
652 676 conv = fft_data*fft_code
653 677
654 678 data = numpy.fft.ifft(conv,axis=2)
655 679
656 680 return data
657 681
682
658 683 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
659 684
660 685 if dataOut.flagDecodeData:
661 686 print "This data is already decoded, recoding again ..."
662 687
663 688 if not self.isConfig:
664 689
665 690 if code is None:
666 691 if dataOut.code is None:
667 692 raise ValueError, "Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type
668 693
669 694 code = dataOut.code
670 695 else:
671 696 code = numpy.array(code).reshape(nCode,nBaud)
672 697
673 698 self.setup(code, osamp, dataOut)
674 699
675 700 self.isConfig = True
676 701
677 702 if mode == 3:
678 703 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
679 704
680 705 if times != None:
681 706 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
682 707
683 708 if self.code is None:
684 709 print "Fail decoding: Code is not defined."
685 710 return
686 711
712 self.__nProfiles = dataOut.nProfiles
687 713 datadec = None
714
688 715 if mode == 3:
689 716 mode = 0
690 717
691 718 if dataOut.flagDataAsBlock:
692 719 """
693 720 Decoding when data have been read as block,
694 721 """
695 722
696 723 if mode == 0:
697 724 datadec = self.__convolutionByBlockInTime(dataOut.data)
698 725 if mode == 1:
699 726 datadec = self.__convolutionByBlockInFreq(dataOut.data)
700 727 else:
701 728 """
702 729 Decoding when data have been read profile by profile
703 730 """
704 731 if mode == 0:
705 732 datadec = self.__convolutionInTime(dataOut.data)
706 733
707 734 if mode == 1:
708 735 datadec = self.__convolutionInFreq(dataOut.data)
709 736
710 737 if mode == 2:
711 738 datadec = self.__convolutionInFreqOpt(dataOut.data)
712 739
713 740 if datadec is None:
714 741 raise ValueError, "Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode
715 742
716 743 dataOut.code = self.code
717 744 dataOut.nCode = self.nCode
718 745 dataOut.nBaud = self.nBaud
719 746
720 747 dataOut.data = datadec
721 748
722 749 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
723 750
724 751 dataOut.flagDecodeData = True #asumo q la data esta decodificada
725 752
726 753 if self.__profIndex == self.nCode-1:
727 754 self.__profIndex = 0
728 755 return 1
729 756
730 757 self.__profIndex += 1
731 758
732 759 return 1
733 760 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
734 761
735 762
736 763 class ProfileConcat(Operation):
737 764
738 765 isConfig = False
739 766 buffer = None
740 767
741 768 def __init__(self, **kwargs):
742 769
743 770 Operation.__init__(self, **kwargs)
744 771 self.profileIndex = 0
745 772
746 773 def reset(self):
747 774 self.buffer = numpy.zeros_like(self.buffer)
748 775 self.start_index = 0
749 776 self.times = 1
750 777
751 778 def setup(self, data, m, n=1):
752 779 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
753 780 self.nHeights = data.shape[1]#.nHeights
754 781 self.start_index = 0
755 782 self.times = 1
756 783
757 784 def concat(self, data):
758 785
759 786 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
760 787 self.start_index = self.start_index + self.nHeights
761 788
762 789 def run(self, dataOut, m):
763 790
764 791 dataOut.flagNoData = True
765 792
766 793 if not self.isConfig:
767 794 self.setup(dataOut.data, m, 1)
768 795 self.isConfig = True
769 796
770 797 if dataOut.flagDataAsBlock:
771 798 raise ValueError, "ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False"
772 799
773 800 else:
774 801 self.concat(dataOut.data)
775 802 self.times += 1
776 803 if self.times > m:
777 804 dataOut.data = self.buffer
778 805 self.reset()
779 806 dataOut.flagNoData = False
780 807 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
781 808 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
782 809 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
783 810 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
784 811 dataOut.ippSeconds *= m
785 812
786 813 class ProfileSelector(Operation):
787 814
788 815 profileIndex = None
789 816 # Tamanho total de los perfiles
790 817 nProfiles = None
791 818
792 819 def __init__(self, **kwargs):
793 820
794 821 Operation.__init__(self, **kwargs)
795 822 self.profileIndex = 0
796 823
797 824 def incProfileIndex(self):
798 825
799 826 self.profileIndex += 1
800 827
801 828 if self.profileIndex >= self.nProfiles:
802 829 self.profileIndex = 0
803 830
804 831 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
805 832
806 833 if profileIndex < minIndex:
807 834 return False
808 835
809 836 if profileIndex > maxIndex:
810 837 return False
811 838
812 839 return True
813 840
814 841 def isThisProfileInList(self, profileIndex, profileList):
815 842
816 843 if profileIndex not in profileList:
817 844 return False
818 845
819 846 return True
820 847
821 848 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
822 849
823 850 """
824 851 ProfileSelector:
825 852
826 853 Inputs:
827 854 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
828 855
829 856 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
830 857
831 858 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
832 859
833 860 """
834 861
835 862 if rangeList is not None:
836 863 if type(rangeList[0]) not in (tuple, list):
837 864 rangeList = [rangeList]
838 865
839 866 dataOut.flagNoData = True
840 867
841 868 if dataOut.flagDataAsBlock:
842 869 """
843 870 data dimension = [nChannels, nProfiles, nHeis]
844 871 """
845 872 if profileList != None:
846 873 dataOut.data = dataOut.data[:,profileList,:]
847 874
848 875 if profileRangeList != None:
849 876 minIndex = profileRangeList[0]
850 877 maxIndex = profileRangeList[1]
851 878 profileList = range(minIndex, maxIndex+1)
852 879
853 880 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
854 881
855 882 if rangeList != None:
856 883
857 884 profileList = []
858 885
859 886 for thisRange in rangeList:
860 887 minIndex = thisRange[0]
861 888 maxIndex = thisRange[1]
862 889
863 890 profileList.extend(range(minIndex, maxIndex+1))
864 891
865 892 dataOut.data = dataOut.data[:,profileList,:]
866 893
867 894 dataOut.nProfiles = len(profileList)
868 895 dataOut.profileIndex = dataOut.nProfiles - 1
869 896 dataOut.flagNoData = False
870 897
871 898 return True
872 899
873 900 """
874 901 data dimension = [nChannels, nHeis]
875 902 """
876 903
877 904 if profileList != None:
878 905
879 906 if self.isThisProfileInList(dataOut.profileIndex, profileList):
880 907
881 908 self.nProfiles = len(profileList)
882 909 dataOut.nProfiles = self.nProfiles
883 910 dataOut.profileIndex = self.profileIndex
884 911 dataOut.flagNoData = False
885 912
886 913 self.incProfileIndex()
887 914 return True
888 915
889 916 if profileRangeList != None:
890 917
891 918 minIndex = profileRangeList[0]
892 919 maxIndex = profileRangeList[1]
893 920
894 921 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
895 922
896 923 self.nProfiles = maxIndex - minIndex + 1
897 924 dataOut.nProfiles = self.nProfiles
898 925 dataOut.profileIndex = self.profileIndex
899 926 dataOut.flagNoData = False
900 927
901 928 self.incProfileIndex()
902 929 return True
903 930
904 931 if rangeList != None:
905 932
906 933 nProfiles = 0
907 934
908 935 for thisRange in rangeList:
909 936 minIndex = thisRange[0]
910 937 maxIndex = thisRange[1]
911 938
912 939 nProfiles += maxIndex - minIndex + 1
913 940
914 941 for thisRange in rangeList:
915 942
916 943 minIndex = thisRange[0]
917 944 maxIndex = thisRange[1]
918 945
919 946 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
920 947
921 948 self.nProfiles = nProfiles
922 949 dataOut.nProfiles = self.nProfiles
923 950 dataOut.profileIndex = self.profileIndex
924 951 dataOut.flagNoData = False
925 952
926 953 self.incProfileIndex()
927 954
928 955 break
929 956
930 957 return True
931 958
932 959
933 960 if beam != None: #beam is only for AMISR data
934 961 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
935 962 dataOut.flagNoData = False
936 963 dataOut.profileIndex = self.profileIndex
937 964
938 965 self.incProfileIndex()
939 966
940 967 return True
941 968
942 969 raise ValueError, "ProfileSelector needs profileList, profileRangeList or rangeList parameter"
943 970
944 971 return False
945 972
946 973 class Reshaper(Operation):
947 974
948 975 def __init__(self, **kwargs):
949 976
950 977 Operation.__init__(self, **kwargs)
951 978
952 979 self.__buffer = None
953 980 self.__nitems = 0
954 981
955 982 def __appendProfile(self, dataOut, nTxs):
956 983
957 984 if self.__buffer is None:
958 985 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
959 986 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
960 987
961 988 ini = dataOut.nHeights * self.__nitems
962 989 end = ini + dataOut.nHeights
963 990
964 991 self.__buffer[:, ini:end] = dataOut.data
965 992
966 993 self.__nitems += 1
967 994
968 995 return int(self.__nitems*nTxs)
969 996
970 997 def __getBuffer(self):
971 998
972 999 if self.__nitems == int(1./self.__nTxs):
973 1000
974 1001 self.__nitems = 0
975 1002
976 1003 return self.__buffer.copy()
977 1004
978 1005 return None
979 1006
980 1007 def __checkInputs(self, dataOut, shape, nTxs):
981 1008
982 1009 if shape is None and nTxs is None:
983 1010 raise ValueError, "Reshaper: shape of factor should be defined"
984 1011
985 1012 if nTxs:
986 1013 if nTxs < 0:
987 1014 raise ValueError, "nTxs should be greater than 0"
988 1015
989 1016 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
990 1017 raise ValueError, "nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs))
991 1018
992 1019 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
993 1020
994 1021 return shape, nTxs
995 1022
996 1023 if len(shape) != 2 and len(shape) != 3:
997 1024 raise ValueError, "shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights)
998 1025
999 1026 if len(shape) == 2:
1000 1027 shape_tuple = [dataOut.nChannels]
1001 1028 shape_tuple.extend(shape)
1002 1029 else:
1003 1030 shape_tuple = list(shape)
1004 1031
1005 1032 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1006 1033
1007 1034 return shape_tuple, nTxs
1008 1035
1009 1036 def run(self, dataOut, shape=None, nTxs=None):
1010 1037
1011 1038 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1012 1039
1013 1040 dataOut.flagNoData = True
1014 1041 profileIndex = None
1015 1042
1016 1043 if dataOut.flagDataAsBlock:
1017 1044
1018 1045 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1019 1046 dataOut.flagNoData = False
1020 1047
1021 1048 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1022 1049
1023 1050 else:
1024 1051
1025 1052 if self.__nTxs < 1:
1026 1053
1027 1054 self.__appendProfile(dataOut, self.__nTxs)
1028 1055 new_data = self.__getBuffer()
1029 1056
1030 1057 if new_data is not None:
1031 1058 dataOut.data = new_data
1032 1059 dataOut.flagNoData = False
1033 1060
1034 1061 profileIndex = dataOut.profileIndex*nTxs
1035 1062
1036 1063 else:
1037 1064 raise ValueError, "nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)"
1038 1065
1039 1066 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1040 1067
1041 1068 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1042 1069
1043 1070 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1044 1071
1045 1072 dataOut.profileIndex = profileIndex
1046 1073
1047 1074 dataOut.ippSeconds /= self.__nTxs
1048 1075
1049 1076 class SplitProfiles(Operation):
1050 1077
1051 1078 def __init__(self, **kwargs):
1052 1079
1053 1080 Operation.__init__(self, **kwargs)
1054 1081
1055 1082 def run(self, dataOut, n):
1056 1083
1057 1084 dataOut.flagNoData = True
1058 1085 profileIndex = None
1059 1086
1060 1087 if dataOut.flagDataAsBlock:
1061 1088
1062 1089 #nchannels, nprofiles, nsamples
1063 1090 shape = dataOut.data.shape
1064 1091
1065 1092 if shape[2] % n != 0:
1066 1093 raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[2])
1067 1094
1068 1095 new_shape = shape[0], shape[1]*n, shape[2]/n
1069 1096
1070 1097 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1071 1098 dataOut.flagNoData = False
1072 1099
1073 1100 profileIndex = int(dataOut.nProfiles/n) - 1
1074 1101
1075 1102 else:
1076 1103
1077 1104 raise ValueError, "Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)"
1078 1105
1079 1106 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1080 1107
1081 1108 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1082 1109
1083 1110 dataOut.nProfiles = int(dataOut.nProfiles*n)
1084 1111
1085 1112 dataOut.profileIndex = profileIndex
1086 1113
1087 1114 dataOut.ippSeconds /= n
1088 1115
1089 1116 class CombineProfiles(Operation):
1090 1117
1091 1118 def __init__(self, **kwargs):
1092 1119
1093 1120 Operation.__init__(self, **kwargs)
1094 1121
1095 1122 self.__remData = None
1096 1123 self.__profileIndex = 0
1097 1124
1098 1125 def run(self, dataOut, n):
1099 1126
1100 1127 dataOut.flagNoData = True
1101 1128 profileIndex = None
1102 1129
1103 1130 if dataOut.flagDataAsBlock:
1104 1131
1105 1132 #nchannels, nprofiles, nsamples
1106 1133 shape = dataOut.data.shape
1107 1134 new_shape = shape[0], shape[1]/n, shape[2]*n
1108 1135
1109 1136 if shape[1] % n != 0:
1110 1137 raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[1])
1111 1138
1112 1139 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1113 1140 dataOut.flagNoData = False
1114 1141
1115 1142 profileIndex = int(dataOut.nProfiles*n) - 1
1116 1143
1117 1144 else:
1118 1145
1119 1146 #nchannels, nsamples
1120 1147 if self.__remData is None:
1121 1148 newData = dataOut.data
1122 1149 else:
1123 1150 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1124 1151
1125 1152 self.__profileIndex += 1
1126 1153
1127 1154 if self.__profileIndex < n:
1128 1155 self.__remData = newData
1129 1156 #continue
1130 1157 return
1131 1158
1132 1159 self.__profileIndex = 0
1133 1160 self.__remData = None
1134 1161
1135 1162 dataOut.data = newData
1136 1163 dataOut.flagNoData = False
1137 1164
1138 1165 profileIndex = dataOut.profileIndex/n
1139 1166
1140 1167
1141 1168 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1142 1169
1143 1170 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1144 1171
1145 1172 dataOut.nProfiles = int(dataOut.nProfiles/n)
1146 1173
1147 1174 dataOut.profileIndex = profileIndex
1148 1175
1149 1176 dataOut.ippSeconds *= n
1150 1177
1151 1178 # import collections
1152 1179 # from scipy.stats import mode
1153 1180 #
1154 1181 # class Synchronize(Operation):
1155 1182 #
1156 1183 # isConfig = False
1157 1184 # __profIndex = 0
1158 1185 #
1159 1186 # def __init__(self, **kwargs):
1160 1187 #
1161 1188 # Operation.__init__(self, **kwargs)
1162 1189 # # self.isConfig = False
1163 1190 # self.__powBuffer = None
1164 1191 # self.__startIndex = 0
1165 1192 # self.__pulseFound = False
1166 1193 #
1167 1194 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1168 1195 #
1169 1196 # #Read data
1170 1197 #
1171 1198 # powerdB = dataOut.getPower(channel = channel)
1172 1199 # noisedB = dataOut.getNoise(channel = channel)[0]
1173 1200 #
1174 1201 # self.__powBuffer.extend(powerdB.flatten())
1175 1202 #
1176 1203 # dataArray = numpy.array(self.__powBuffer)
1177 1204 #
1178 1205 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1179 1206 #
1180 1207 # maxValue = numpy.nanmax(filteredPower)
1181 1208 #
1182 1209 # if maxValue < noisedB + 10:
1183 1210 # #No se encuentra ningun pulso de transmision
1184 1211 # return None
1185 1212 #
1186 1213 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1187 1214 #
1188 1215 # if len(maxValuesIndex) < 2:
1189 1216 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1190 1217 # return None
1191 1218 #
1192 1219 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1193 1220 #
1194 1221 # #Seleccionar solo valores con un espaciamiento de nSamples
1195 1222 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1196 1223 #
1197 1224 # if len(pulseIndex) < 2:
1198 1225 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1199 1226 # return None
1200 1227 #
1201 1228 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1202 1229 #
1203 1230 # #remover senales que se distancien menos de 10 unidades o muestras
1204 1231 # #(No deberian existir IPP menor a 10 unidades)
1205 1232 #
1206 1233 # realIndex = numpy.where(spacing > 10 )[0]
1207 1234 #
1208 1235 # if len(realIndex) < 2:
1209 1236 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1210 1237 # return None
1211 1238 #
1212 1239 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1213 1240 # realPulseIndex = pulseIndex[realIndex]
1214 1241 #
1215 1242 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1216 1243 #
1217 1244 # print "IPP = %d samples" %period
1218 1245 #
1219 1246 # self.__newNSamples = dataOut.nHeights #int(period)
1220 1247 # self.__startIndex = int(realPulseIndex[0])
1221 1248 #
1222 1249 # return 1
1223 1250 #
1224 1251 #
1225 1252 # def setup(self, nSamples, nChannels, buffer_size = 4):
1226 1253 #
1227 1254 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1228 1255 # maxlen = buffer_size*nSamples)
1229 1256 #
1230 1257 # bufferList = []
1231 1258 #
1232 1259 # for i in range(nChannels):
1233 1260 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1234 1261 # maxlen = buffer_size*nSamples)
1235 1262 #
1236 1263 # bufferList.append(bufferByChannel)
1237 1264 #
1238 1265 # self.__nSamples = nSamples
1239 1266 # self.__nChannels = nChannels
1240 1267 # self.__bufferList = bufferList
1241 1268 #
1242 1269 # def run(self, dataOut, channel = 0):
1243 1270 #
1244 1271 # if not self.isConfig:
1245 1272 # nSamples = dataOut.nHeights
1246 1273 # nChannels = dataOut.nChannels
1247 1274 # self.setup(nSamples, nChannels)
1248 1275 # self.isConfig = True
1249 1276 #
1250 1277 # #Append new data to internal buffer
1251 1278 # for thisChannel in range(self.__nChannels):
1252 1279 # bufferByChannel = self.__bufferList[thisChannel]
1253 1280 # bufferByChannel.extend(dataOut.data[thisChannel])
1254 1281 #
1255 1282 # if self.__pulseFound:
1256 1283 # self.__startIndex -= self.__nSamples
1257 1284 #
1258 1285 # #Finding Tx Pulse
1259 1286 # if not self.__pulseFound:
1260 1287 # indexFound = self.__findTxPulse(dataOut, channel)
1261 1288 #
1262 1289 # if indexFound == None:
1263 1290 # dataOut.flagNoData = True
1264 1291 # return
1265 1292 #
1266 1293 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1267 1294 # self.__pulseFound = True
1268 1295 # self.__startIndex = indexFound
1269 1296 #
1270 1297 # #If pulse was found ...
1271 1298 # for thisChannel in range(self.__nChannels):
1272 1299 # bufferByChannel = self.__bufferList[thisChannel]
1273 1300 # #print self.__startIndex
1274 1301 # x = numpy.array(bufferByChannel)
1275 1302 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1276 1303 #
1277 1304 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1278 1305 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1279 1306 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1280 1307 #
1281 1308 # dataOut.data = self.__arrayBuffer
1282 1309 #
1283 1310 # self.__startIndex += self.__newNSamples
1284 1311 #
1285 1312 # return
@@ -1,85 +1,85
1 1
2 2 import os, sys
3 3
4 4 from schainpy.controller import Project
5 5
6 6 controllerObj = Project()
7 7 controllerObj.setup(id = '002', name='script02', description="JASMET Meteor Detection")
8 8
9 9 #-------------------------------------- Setup -----------------------------------------
10 10 #Verificar estas variables
11 11
12 12 #Path para los archivos
13 13 # path = '/mnt/jars/2016_08/NOCHE'
14 14 # path = '/media/joscanoa/DATA_JASMET/JASMET/2016_08/DIA'
15 15 # path = '/media/joscanoa/DATA_JASMET/JASMET/2016_08/NOCHE'
16 path = '/media/nanosat/NewVolumen/JASMET/2016_08/DIA'
16 path = '/home/nanosat/data/jasmet'
17 17
18 18 #Path para los graficos
19 19 pathfig = os.path.join(os.environ['HOME'],'Pictures/JASMET30/201608/graphics')
20 20
21 21 #Path para los archivos HDF5 de meteoros
22 22 pathfile = os.path.join(os.environ['HOME'],'Pictures/JASMET30/201608/meteor')
23 23
24 24 #Fechas para busqueda de archivos
25 startDate = '2016/08/29'
26 endDate = '2016/09/11'
25 startDate = '2010/08/29'
26 endDate = '2017/09/11'
27 27 #Horas para busqueda de archivos
28 28 startTime = '00:00:00'
29 29 endTime = '23:59:59'
30 30
31 31
32 32 #------------------------------ Voltage Reading Unit ----------------------------------
33 33
34 34 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
35 35 path=path,
36 36 startDate=startDate,
37 37 endDate=endDate,
38 38 startTime=startTime,
39 39 endTime=endTime,
40 40 online=0,
41 41 delay=30,
42 42 walk=1,
43 43 getblock=1,
44 44 blocktime=100)
45 45
46 46 opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
47 47
48 48 #-------------------------- Voltage Processing Unit ------------------------------------
49 49
50 50 procUnitConfObj0 = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
51 51
52 52 opObj00 = procUnitConfObj0.addOperation(name='selectChannels')
53 53 opObj00.addParameter(name='channelList', value='0,1,2,3,4', format='intlist')
54 54
55 55 opObj01 = procUnitConfObj0.addOperation(name='setRadarFrequency')
56 56 opObj01.addParameter(name='frequency', value='30.e6', format='float')
57 57
58 58 opObj01 = procUnitConfObj0.addOperation(name='interpolateHeights')
59 59 opObj01.addParameter(name='topLim', value='73', format='int')
60 60 opObj01.addParameter(name='botLim', value='71', format='int')
61 61
62 62 opObj02 = procUnitConfObj0.addOperation(name='Decoder', optype='other')
63 63
64 64 opObj03 = procUnitConfObj0.addOperation(name='CohInt', optype='other')
65 65 opObj03.addParameter(name='n', value='2', format='int')
66 66
67 67 #--------------------------- Parameters Processing Unit ------------------------------------
68 68
69 69 procUnitConfObj1 = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObj0.getId())
70 70 #
71 71 opObj10 = procUnitConfObj1.addOperation(name='SMDetection', optype='other')
72 72 opObj10.addParameter(name='azimuth', value='45', format='float')
73 73 opObj10.addParameter(name='hmin', value='60', format='float')
74 74 opObj10.addParameter(name='hmax', value='120', format='float')
75 75
76 76 opObj12 = procUnitConfObj1.addOperation(name='ParamWriter', optype='other')
77 77 opObj12.addParameter(name='path', value=pathfile)
78 78 opObj12.addParameter(name='blocksPerFile', value='1000', format='int')
79 79 opObj12.addParameter(name='metadataList',value='type,heightList,paramInterval,timeZone',format='list')
80 80 opObj12.addParameter(name='dataList',value='data_param,utctime',format='list')
81 81 opObj12.addParameter(name='mode',value='2',format='int')
82 82
83 83 #--------------------------------------------------------------------------------------------------
84 84
85 85 controllerObj.start()
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now