##// END OF EJS Templates
Change multiSchain by MPProject
Juan C. Espinoza -
r1052:3fb7c359028f
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,1324 +1,1297
1 1 '''
2 2 Created on September , 2012
3 3 @author:
4 4 '''
5 5
6 6 import sys
7 7 import ast
8 8 import datetime
9 9 import traceback
10 10 import math
11 11 import time
12 from multiprocessing import Process, Queue, cpu_count
13
14 import schainpy
15 import schainpy.admin
16 from schainpy.utils.log import logToFile
12 from multiprocessing import Process, cpu_count
17 13
18 14 from xml.etree.ElementTree import ElementTree, Element, SubElement, tostring
19 15 from xml.dom import minidom
20 16
17 import schainpy
18 import schainpy.admin
21 19 from schainpy.model import *
22 from time import sleep
20 from schainpy.utils import log
23 21
22 DTYPES = {
23 'Voltage': '.r',
24 'Spectra': '.pdata'
25 }
24 26
27 def MPProject(project, n=cpu_count()):
28 '''
29 Project wrapper to run schain in n processes
30 '''
25 31
26 def prettify(elem):
27 """Return a pretty-printed XML string for the Element.
28 """
29 rough_string = tostring(elem, 'utf-8')
30 reparsed = minidom.parseString(rough_string)
31 return reparsed.toprettyxml(indent=" ")
32
33 def multiSchain(child, nProcess=cpu_count(), startDate=None, endDate=None, by_day=False):
34 skip = 0
35 cursor = 0
36 nFiles = None
37 processes = []
38 dt1 = datetime.datetime.strptime(startDate, '%Y/%m/%d')
39 dt2 = datetime.datetime.strptime(endDate, '%Y/%m/%d')
32 rconf = project.getReadUnitObj()
33 op = rconf.getOperationObj('run')
34 dt1 = op.getParameterValue('startDate')
35 dt2 = op.getParameterValue('endDate')
40 36 days = (dt2 - dt1).days
41 37
42 38 for day in range(days+1):
43 39 skip = 0
44 40 cursor = 0
45 q = Queue()
46 41 processes = []
47 dt = (dt1 + datetime.timedelta(day)).strftime('%Y/%m/%d')
48 firstProcess = Process(target=child, args=(cursor, skip, q, dt))
49 firstProcess.start()
50 if by_day:
51 continue
52 nFiles = q.get()
42 dt = dt1 + datetime.timedelta(day)
43 dt_str = dt.strftime('%Y/%m/%d')
44 reader = JRODataReader()
45 paths, files = reader.searchFilesOffLine(path=rconf.path,
46 startDate=dt,
47 endDate=dt,
48 ext=DTYPES[rconf.datatype])
49 nFiles = len(files)
53 50 if nFiles==0:
54 51 continue
55 firstProcess.terminate()
56 skip = int(math.ceil(nFiles/nProcess))
57 while True:
58 processes.append(Process(target=child, args=(cursor, skip, q, dt)))
59 processes[cursor].start()
60 if nFiles < cursor*skip:
61 break
52 skip = int(math.ceil(nFiles/n))
53 while nFiles > cursor*skip:
54 rconf.update(startDate=dt_str, endDate=dt_str, cursor=cursor,
55 skip=skip)
56 p = project.clone()
57 p.start()
58 processes.append(p)
62 59 cursor += 1
63 60
64 61 def beforeExit(exctype, value, trace):
65 62 for process in processes:
66 63 process.terminate()
67 64 process.join()
68 65 print traceback.print_tb(trace)
69 66
70 67 sys.excepthook = beforeExit
71 68
72 69 for process in processes:
73 70 process.join()
74 71 process.terminate()
75 72
76 73 time.sleep(3)
77 74
78
79 75 class ParameterConf():
80 76
81 77 id = None
82 78 name = None
83 79 value = None
84 80 format = None
85 81
86 82 __formated_value = None
87 83
88 84 ELEMENTNAME = 'Parameter'
89 85
90 86 def __init__(self):
91 87
92 88 self.format = 'str'
93 89
94 90 def getElementName(self):
95 91
96 92 return self.ELEMENTNAME
97 93
98 94 def getValue(self):
99 95
100 96 value = self.value
101 97 format = self.format
102 98
103 99 if self.__formated_value != None:
104 100
105 101 return self.__formated_value
106 102
107 103 if format == 'obj':
108 104 return value
109 105
110 106 if format == 'str':
111 107 self.__formated_value = str(value)
112 108 return self.__formated_value
113 109
114 110 if value == '':
115 raise ValueError, "%s: This parameter value is empty" %self.name
111 raise ValueError, '%s: This parameter value is empty' %self.name
116 112
117 113 if format == 'list':
118 114 strList = value.split(',')
119 115
120 116 self.__formated_value = strList
121 117
122 118 return self.__formated_value
123 119
124 120 if format == 'intlist':
125 """
121 '''
126 122 Example:
127 123 value = (0,1,2)
128 """
124 '''
129 125
130 126 new_value = ast.literal_eval(value)
131 127
132 128 if type(new_value) not in (tuple, list):
133 129 new_value = [int(new_value)]
134 130
135 131 self.__formated_value = new_value
136 132
137 133 return self.__formated_value
138 134
139 135 if format == 'floatlist':
140 """
136 '''
141 137 Example:
142 138 value = (0.5, 1.4, 2.7)
143 """
139 '''
144 140
145 141 new_value = ast.literal_eval(value)
146 142
147 143 if type(new_value) not in (tuple, list):
148 144 new_value = [float(new_value)]
149 145
150 146 self.__formated_value = new_value
151 147
152 148 return self.__formated_value
153 149
154 150 if format == 'date':
155 151 strList = value.split('/')
156 152 intList = [int(x) for x in strList]
157 153 date = datetime.date(intList[0], intList[1], intList[2])
158 154
159 155 self.__formated_value = date
160 156
161 157 return self.__formated_value
162 158
163 159 if format == 'time':
164 160 strList = value.split(':')
165 161 intList = [int(x) for x in strList]
166 162 time = datetime.time(intList[0], intList[1], intList[2])
167 163
168 164 self.__formated_value = time
169 165
170 166 return self.__formated_value
171 167
172 168 if format == 'pairslist':
173 """
169 '''
174 170 Example:
175 171 value = (0,1),(1,2)
176 """
172 '''
177 173
178 174 new_value = ast.literal_eval(value)
179 175
180 176 if type(new_value) not in (tuple, list):
181 raise ValueError, "%s has to be a tuple or list of pairs" %value
177 raise ValueError, '%s has to be a tuple or list of pairs' %value
182 178
183 179 if type(new_value[0]) not in (tuple, list):
184 180 if len(new_value) != 2:
185 raise ValueError, "%s has to be a tuple or list of pairs" %value
181 raise ValueError, '%s has to be a tuple or list of pairs' %value
186 182 new_value = [new_value]
187 183
188 184 for thisPair in new_value:
189 185 if len(thisPair) != 2:
190 raise ValueError, "%s has to be a tuple or list of pairs" %value
186 raise ValueError, '%s has to be a tuple or list of pairs' %value
191 187
192 188 self.__formated_value = new_value
193 189
194 190 return self.__formated_value
195 191
196 192 if format == 'multilist':
197 """
193 '''
198 194 Example:
199 195 value = (0,1,2),(3,4,5)
200 """
196 '''
201 197 multiList = ast.literal_eval(value)
202 198
203 199 if type(multiList[0]) == int:
204 multiList = ast.literal_eval("(" + value + ")")
200 multiList = ast.literal_eval('(' + value + ')')
205 201
206 202 self.__formated_value = multiList
207 203
208 204 return self.__formated_value
209 205
210 206 if format == 'bool':
211 207 value = int(value)
212 208
213 209 if format == 'int':
214 210 value = float(value)
215 211
216 212 format_func = eval(format)
217 213
218 214 self.__formated_value = format_func(value)
219 215
220 216 return self.__formated_value
221 217
222 218 def updateId(self, new_id):
223 219
224 220 self.id = str(new_id)
225 221
226 222 def setup(self, id, name, value, format='str'):
227 223 self.id = str(id)
228 224 self.name = name
229 225 if format == 'obj':
230 226 self.value = value
231 227 else:
232 228 self.value = str(value)
233 229 self.format = str.lower(format)
234 230
235 231 self.getValue()
236 232
237 233 return 1
238 234
239 235 def update(self, name, value, format='str'):
240 236
241 237 self.name = name
242 238 self.value = str(value)
243 239 self.format = format
244 240
245 241 def makeXml(self, opElement):
246 242 if self.name not in ('queue',):
247 243 parmElement = SubElement(opElement, self.ELEMENTNAME)
248 244 parmElement.set('id', str(self.id))
249 245 parmElement.set('name', self.name)
250 246 parmElement.set('value', self.value)
251 247 parmElement.set('format', self.format)
252 248
253 249 def readXml(self, parmElement):
254 250
255 251 self.id = parmElement.get('id')
256 252 self.name = parmElement.get('name')
257 253 self.value = parmElement.get('value')
258 254 self.format = str.lower(parmElement.get('format'))
259 255
260 256 #Compatible with old signal chain version
261 257 if self.format == 'int' and self.name == 'idfigure':
262 258 self.name = 'id'
263 259
264 260 def printattr(self):
265 261
266 print "Parameter[%s]: name = %s, value = %s, format = %s" %(self.id, self.name, self.value, self.format)
262 print 'Parameter[%s]: name = %s, value = %s, format = %s' %(self.id, self.name, self.value, self.format)
267 263
268 264 class OperationConf():
269 265
270 266 id = None
271 267 name = None
272 268 priority = None
273 269 type = None
274 270
275 271 parmConfObjList = []
276 272
277 273 ELEMENTNAME = 'Operation'
278 274
279 275 def __init__(self):
280 276
281 277 self.id = '0'
282 278 self.name = None
283 279 self.priority = None
284 280 self.type = 'self'
285 281
286 282
287 283 def __getNewId(self):
288 284
289 285 return int(self.id)*10 + len(self.parmConfObjList) + 1
290 286
291 287 def updateId(self, new_id):
292 288
293 289 self.id = str(new_id)
294 290
295 291 n = 1
296 292 for parmObj in self.parmConfObjList:
297 293
298 294 idParm = str(int(new_id)*10 + n)
299 295 parmObj.updateId(idParm)
300 296
301 297 n += 1
302 298
303 299 def getElementName(self):
304 300
305 301 return self.ELEMENTNAME
306 302
307 303 def getParameterObjList(self):
308 304
309 305 return self.parmConfObjList
310 306
311 307 def getParameterObj(self, parameterName):
312 308
313 309 for parmConfObj in self.parmConfObjList:
314 310
315 311 if parmConfObj.name != parameterName:
316 312 continue
317 313
318 314 return parmConfObj
319 315
320 316 return None
321 317
322 318 def getParameterObjfromValue(self, parameterValue):
323 319
324 320 for parmConfObj in self.parmConfObjList:
325 321
326 322 if parmConfObj.getValue() != parameterValue:
327 323 continue
328 324
329 325 return parmConfObj.getValue()
330 326
331 327 return None
332 328
333 329 def getParameterValue(self, parameterName):
334 330
335 331 parameterObj = self.getParameterObj(parameterName)
336 332
337 333 # if not parameterObj:
338 334 # return None
339 335
340 336 value = parameterObj.getValue()
341 337
342 338 return value
343 339
344 340
345 341 def getKwargs(self):
346 342
347 343 kwargs = {}
348 344
349 345 for parmConfObj in self.parmConfObjList:
350 346 if self.name == 'run' and parmConfObj.name == 'datatype':
351 347 continue
352 348
353 349 kwargs[parmConfObj.name] = parmConfObj.getValue()
354 350
355 351 return kwargs
356 352
357 353 def setup(self, id, name, priority, type):
358 354
359 355 self.id = str(id)
360 356 self.name = name
361 357 self.type = type
362 358 self.priority = priority
363 359
364 360 self.parmConfObjList = []
365 361
366 362 def removeParameters(self):
367 363
368 364 for obj in self.parmConfObjList:
369 365 del obj
370 366
371 367 self.parmConfObjList = []
372 368
373 369 def addParameter(self, name, value, format='str'):
374 370
371 if value is None:
372 return None
375 373 id = self.__getNewId()
376 374
377 375 parmConfObj = ParameterConf()
378 376 if not parmConfObj.setup(id, name, value, format):
379 377 return None
380 378
381 379 self.parmConfObjList.append(parmConfObj)
382 380
383 381 return parmConfObj
384 382
385 383 def changeParameter(self, name, value, format='str'):
386 384
387 385 parmConfObj = self.getParameterObj(name)
388 386 parmConfObj.update(name, value, format)
389 387
390 388 return parmConfObj
391 389
392 390 def makeXml(self, procUnitElement):
393 391
394 392 opElement = SubElement(procUnitElement, self.ELEMENTNAME)
395 393 opElement.set('id', str(self.id))
396 394 opElement.set('name', self.name)
397 395 opElement.set('type', self.type)
398 396 opElement.set('priority', str(self.priority))
399 397
400 398 for parmConfObj in self.parmConfObjList:
401 399 parmConfObj.makeXml(opElement)
402 400
403 401 def readXml(self, opElement):
404 402
405 403 self.id = opElement.get('id')
406 404 self.name = opElement.get('name')
407 405 self.type = opElement.get('type')
408 406 self.priority = opElement.get('priority')
409 407
410 408 #Compatible with old signal chain version
411 409 #Use of 'run' method instead 'init'
412 410 if self.type == 'self' and self.name == 'init':
413 411 self.name = 'run'
414 412
415 413 self.parmConfObjList = []
416 414
417 415 parmElementList = opElement.iter(ParameterConf().getElementName())
418 416
419 417 for parmElement in parmElementList:
420 418 parmConfObj = ParameterConf()
421 419 parmConfObj.readXml(parmElement)
422 420
423 421 #Compatible with old signal chain version
424 422 #If an 'plot' OPERATION is found, changes name operation by the value of its type PARAMETER
425 423 if self.type != 'self' and self.name == 'Plot':
426 424 if parmConfObj.format == 'str' and parmConfObj.name == 'type':
427 425 self.name = parmConfObj.value
428 426 continue
429 427
430 428 self.parmConfObjList.append(parmConfObj)
431 429
432 430 def printattr(self):
433 431
434 print "%s[%s]: name = %s, type = %s, priority = %s" %(self.ELEMENTNAME,
432 print '%s[%s]: name = %s, type = %s, priority = %s' %(self.ELEMENTNAME,
435 433 self.id,
436 434 self.name,
437 435 self.type,
438 436 self.priority)
439 437
440 438 for parmConfObj in self.parmConfObjList:
441 439 parmConfObj.printattr()
442 440
443 441 def createObject(self, plotter_queue=None):
444 442
445 443
446 444 if self.type == 'self':
447 raise ValueError, "This operation type cannot be created"
445 raise ValueError, 'This operation type cannot be created'
448 446
449 447 if self.type == 'plotter':
450 #Plotter(plotter_name)
451 448 if not plotter_queue:
452 raise ValueError, "plotter_queue is not defined. Use:\nmyProject = Project()\nmyProject.setPlotterQueue(plotter_queue)"
449 raise ValueError, 'plotter_queue is not defined. Use:\nmyProject = Project()\nmyProject.setPlotterQueue(plotter_queue)'
453 450
454 451 opObj = Plotter(self.name, plotter_queue)
455 452
456 453 if self.type == 'external' or self.type == 'other':
457 454
458 455 className = eval(self.name)
459 456 kwargs = self.getKwargs()
460 457
461 458 opObj = className(**kwargs)
462 459
463 460 return opObj
464 461
465 462
466 463 class ProcUnitConf():
467 464
468 465 id = None
469 466 name = None
470 467 datatype = None
471 468 inputId = None
472 469 parentId = None
473 470
474 471 opConfObjList = []
475 472
476 473 procUnitObj = None
477 474 opObjList = []
478 475
479 476 ELEMENTNAME = 'ProcUnit'
480 477
481 478 def __init__(self):
482 479
483 480 self.id = None
484 481 self.datatype = None
485 482 self.name = None
486 483 self.inputId = None
487 484
488 485 self.opConfObjList = []
489 486
490 487 self.procUnitObj = None
491 488 self.opObjDict = {}
492 489
493 490 def __getPriority(self):
494 491
495 492 return len(self.opConfObjList)+1
496 493
497 494 def __getNewId(self):
498 495
499 496 return int(self.id)*10 + len(self.opConfObjList) + 1
500 497
501 498 def getElementName(self):
502 499
503 500 return self.ELEMENTNAME
504 501
505 502 def getId(self):
506 503
507 504 return self.id
508 505
509 506 def updateId(self, new_id, parentId=parentId):
510 507
511 508
512 509 new_id = int(parentId)*10 + (int(self.id) % 10)
513 510 new_inputId = int(parentId)*10 + (int(self.inputId) % 10)
514 511
515 512 #If this proc unit has not inputs
516 513 if self.inputId == '0':
517 514 new_inputId = 0
518 515
519 516 n = 1
520 517 for opConfObj in self.opConfObjList:
521 518
522 519 idOp = str(int(new_id)*10 + n)
523 520 opConfObj.updateId(idOp)
524 521
525 522 n += 1
526 523
527 524 self.parentId = str(parentId)
528 525 self.id = str(new_id)
529 526 self.inputId = str(new_inputId)
530 527
531 528
532 529 def getInputId(self):
533 530
534 531 return self.inputId
535 532
536 533 def getOperationObjList(self):
537 534
538 535 return self.opConfObjList
539 536
540 537 def getOperationObj(self, name=None):
541 538
542 539 for opConfObj in self.opConfObjList:
543 540
544 541 if opConfObj.name != name:
545 542 continue
546 543
547 544 return opConfObj
548 545
549 546 return None
550 547
551 548 def getOpObjfromParamValue(self, value=None):
552 549
553 550 for opConfObj in self.opConfObjList:
554 551 if opConfObj.getParameterObjfromValue(parameterValue=value) != value:
555 552 continue
556 553 return opConfObj
557 554 return None
558 555
559 556 def getProcUnitObj(self):
560 557
561 558 return self.procUnitObj
562 559
563 560 def setup(self, id, name, datatype, inputId, parentId=None):
564 561
565 562 #Compatible with old signal chain version
566 563 if datatype==None and name==None:
567 raise ValueError, "datatype or name should be defined"
564 raise ValueError, 'datatype or name should be defined'
568 565
569 566 if name==None:
570 567 if 'Proc' in datatype:
571 568 name = datatype
572 569 else:
573 570 name = '%sProc' %(datatype)
574 571
575 572 if datatype==None:
576 573 datatype = name.replace('Proc','')
577 574
578 575 self.id = str(id)
579 576 self.name = name
580 577 self.datatype = datatype
581 578 self.inputId = inputId
582 579 self.parentId = parentId
583 580
584 581 self.opConfObjList = []
585 582
586 583 self.addOperation(name='run', optype='self')
587 584
588 585 def removeOperations(self):
589 586
590 587 for obj in self.opConfObjList:
591 588 del obj
592 589
593 590 self.opConfObjList = []
594 591 self.addOperation(name='run')
595 592
596 593 def addParameter(self, **kwargs):
597 594 '''
598 Add parameters to "run" operation
595 Add parameters to 'run' operation
599 596 '''
600 597 opObj = self.opConfObjList[0]
601 598
602 599 opObj.addParameter(**kwargs)
603 600
604 601 return opObj
605 602
606 603 def addOperation(self, name, optype='self'):
607 604
608 605 id = self.__getNewId()
609 606 priority = self.__getPriority()
610 607
611 608 opConfObj = OperationConf()
612 609 opConfObj.setup(id, name=name, priority=priority, type=optype)
613 610
614 611 self.opConfObjList.append(opConfObj)
615 612
616 613 return opConfObj
617 614
618 615 def makeXml(self, projectElement):
619 616
620 617 procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
621 618 procUnitElement.set('id', str(self.id))
622 619 procUnitElement.set('name', self.name)
623 620 procUnitElement.set('datatype', self.datatype)
624 621 procUnitElement.set('inputId', str(self.inputId))
625 622
626 623 for opConfObj in self.opConfObjList:
627 624 opConfObj.makeXml(procUnitElement)
628 625
629 626 def readXml(self, upElement):
630 627
631 628 self.id = upElement.get('id')
632 629 self.name = upElement.get('name')
633 630 self.datatype = upElement.get('datatype')
634 631 self.inputId = upElement.get('inputId')
635 632
636 if self.ELEMENTNAME == "ReadUnit":
637 self.datatype = self.datatype.replace("Reader", "")
633 if self.ELEMENTNAME == 'ReadUnit':
634 self.datatype = self.datatype.replace('Reader', '')
638 635
639 if self.ELEMENTNAME == "ProcUnit":
640 self.datatype = self.datatype.replace("Proc", "")
636 if self.ELEMENTNAME == 'ProcUnit':
637 self.datatype = self.datatype.replace('Proc', '')
641 638
642 639 if self.inputId == 'None':
643 640 self.inputId = '0'
644 641
645 642 self.opConfObjList = []
646 643
647 644 opElementList = upElement.iter(OperationConf().getElementName())
648 645
649 646 for opElement in opElementList:
650 647 opConfObj = OperationConf()
651 648 opConfObj.readXml(opElement)
652 649 self.opConfObjList.append(opConfObj)
653 650
654 651 def printattr(self):
655 652
656 print "%s[%s]: name = %s, datatype = %s, inputId = %s" %(self.ELEMENTNAME,
653 print '%s[%s]: name = %s, datatype = %s, inputId = %s' %(self.ELEMENTNAME,
657 654 self.id,
658 655 self.name,
659 656 self.datatype,
660 657 self.inputId)
661 658
662 659 for opConfObj in self.opConfObjList:
663 660 opConfObj.printattr()
664 661
665 662
666 663 def getKwargs(self):
667 664
668 665 opObj = self.opConfObjList[0]
669 666 kwargs = opObj.getKwargs()
670 667
671 668 return kwargs
672 669
673 670 def createObjects(self, plotter_queue=None):
674 671
675 672 className = eval(self.name)
676 673 kwargs = self.getKwargs()
677 674 procUnitObj = className(**kwargs)
678 675
679 676 for opConfObj in self.opConfObjList:
680 677
681 678 if opConfObj.type=='self' and self.name=='run':
682 679 continue
683 680 elif opConfObj.type=='self':
684 681 procUnitObj.addOperationKwargs(opConfObj.id, **opConfObj.getKwargs())
685 682 continue
686 683
687 684 opObj = opConfObj.createObject(plotter_queue)
688 685
689 686 self.opObjDict[opConfObj.id] = opObj
690 687
691 688 procUnitObj.addOperation(opObj, opConfObj.id)
692 689
693 690 self.procUnitObj = procUnitObj
694 691
695 692 return procUnitObj
696 693
697 694 def run(self):
698 695
699 696 is_ok = False
700 697
701 698 for opConfObj in self.opConfObjList:
702 699
703 700 kwargs = {}
704 701 for parmConfObj in opConfObj.getParameterObjList():
705 702 if opConfObj.name == 'run' and parmConfObj.name == 'datatype':
706 703 continue
707 704
708 705 kwargs[parmConfObj.name] = parmConfObj.getValue()
709 706
710 #ini = time.time()
711
712 #print "\tRunning the '%s' operation with %s" %(opConfObj.name, opConfObj.id)
713 707 sts = self.procUnitObj.call(opType = opConfObj.type,
714 708 opName = opConfObj.name,
715 709 opId = opConfObj.id)
716 710
717 # total_time = time.time() - ini
718 #
719 # if total_time > 0.002:
720 # print "%s::%s took %f seconds" %(self.name, opConfObj.name, total_time)
721
722 711 is_ok = is_ok or sts
723 712
724 713 return is_ok
725 714
726 715 def close(self):
727 716
728 717 for opConfObj in self.opConfObjList:
729 718 if opConfObj.type == 'self':
730 719 continue
731 720
732 721 opObj = self.procUnitObj.getOperationObj(opConfObj.id)
733 722 opObj.close()
734 723
735 724 self.procUnitObj.close()
736 725
737 726 return
738 727
739 728 class ReadUnitConf(ProcUnitConf):
740 729
741 730 path = None
742 731 startDate = None
743 732 endDate = None
744 733 startTime = None
745 734 endTime = None
746 735
747 736 ELEMENTNAME = 'ReadUnit'
748 737
749 738 def __init__(self):
750 739
751 740 self.id = None
752 741 self.datatype = None
753 742 self.name = None
754 743 self.inputId = None
755 744
756 745 self.parentId = None
757 746
758 747 self.opConfObjList = []
759 748 self.opObjList = []
760 749
761 750 def getElementName(self):
762 751
763 752 return self.ELEMENTNAME
764 753
765 def setup(self, id, name, datatype, path='', startDate="", endDate="", startTime="",
766 endTime="", parentId=None, queue=None, server=None, **kwargs):
754 def setup(self, id, name, datatype, path='', startDate='', endDate='',
755 startTime='', endTime='', parentId=None, server=None, **kwargs):
756
767 757 #Compatible with old signal chain version
768 758 if datatype==None and name==None:
769 raise ValueError, "datatype or name should be defined"
759 raise ValueError, 'datatype or name should be defined'
770 760
771 761 if name==None:
772 762 if 'Reader' in datatype:
773 763 name = datatype
774 764 else:
775 765 name = '%sReader' %(datatype)
776 766 if datatype==None:
777 767 datatype = name.replace('Reader','')
778 768
779 769 self.id = id
780 770 self.name = name
781 771 self.datatype = datatype
782 772 if path != '':
783 773 self.path = os.path.abspath(path)
784 774 self.startDate = startDate
785 775 self.endDate = endDate
786 776 self.startTime = startTime
787 777 self.endTime = endTime
788
789 778 self.inputId = '0'
790 779 self.parentId = parentId
791 self.queue = queue
792 780 self.server = server
793 781 self.addRunOperation(**kwargs)
794 782
795 def update(self, datatype, path, startDate, endDate, startTime, endTime, parentId=None, name=None, **kwargs):
796
797 #Compatible with old signal chain version
798 if datatype==None and name==None:
799 raise ValueError, "datatype or name should be defined"
783 def update(self, **kwargs):
800 784
801 if name==None:
785 if 'datatype' in kwargs:
786 datatype = kwargs.pop('datatype')
802 787 if 'Reader' in datatype:
803 name = datatype
788 self.name = datatype
804 789 else:
805 name = '%sReader' %(datatype)
790 self.name = '%sReader' %(datatype)
791 self.datatype = self.name.replace('Reader', '')
806 792
807 if datatype==None:
808 datatype = name.replace('Reader','')
793 attrs = ('path', 'startDate', 'endDate', 'startTime', 'endTime', 'parentId')
809 794
810 self.datatype = datatype
811 self.name = name
812 self.path = path
813 self.startDate = startDate
814 self.endDate = endDate
815 self.startTime = startTime
816 self.endTime = endTime
795 for attr in attrs:
796 if attr in kwargs:
797 setattr(self, attr, kwargs.pop(attr))
817 798
818 799 self.inputId = '0'
819 self.parentId = parentId
820
821 800 self.updateRunOperation(**kwargs)
822 801
823 802 def removeOperations(self):
824 803
825 804 for obj in self.opConfObjList:
826 805 del obj
827 806
828 807 self.opConfObjList = []
829 808
830 809 def addRunOperation(self, **kwargs):
831 810
832 811 opObj = self.addOperation(name = 'run', optype = 'self')
833 812
834 813 if self.server is None:
835 814 opObj.addParameter(name='datatype' , value=self.datatype, format='str')
836 815 opObj.addParameter(name='path' , value=self.path, format='str')
837 816 opObj.addParameter(name='startDate' , value=self.startDate, format='date')
838 817 opObj.addParameter(name='endDate' , value=self.endDate, format='date')
839 818 opObj.addParameter(name='startTime' , value=self.startTime, format='time')
840 819 opObj.addParameter(name='endTime' , value=self.endTime, format='time')
841 opObj.addParameter(name='queue' , value=self.queue, format='obj')
820
842 821 for key, value in kwargs.items():
843 822 opObj.addParameter(name=key, value=value, format=type(value).__name__)
844 823 else:
845 824 opObj.addParameter(name='server' , value=self.server, format='str')
846 825
847 826
848 827 return opObj
849 828
850 829 def updateRunOperation(self, **kwargs):
851 830
852 831 opObj = self.getOperationObj(name = 'run')
853 832 opObj.removeParameters()
854 833
855 834 opObj.addParameter(name='datatype' , value=self.datatype, format='str')
856 835 opObj.addParameter(name='path' , value=self.path, format='str')
857 836 opObj.addParameter(name='startDate' , value=self.startDate, format='date')
858 837 opObj.addParameter(name='endDate' , value=self.endDate, format='date')
859 838 opObj.addParameter(name='startTime' , value=self.startTime, format='time')
860 839 opObj.addParameter(name='endTime' , value=self.endTime, format='time')
861 840
862 841 for key, value in kwargs.items():
863 842 opObj.addParameter(name=key, value=value, format=type(value).__name__)
864 843
865 844 return opObj
866 845
867 # def makeXml(self, projectElement):
868 #
869 # procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
870 # procUnitElement.set('id', str(self.id))
871 # procUnitElement.set('name', self.name)
872 # procUnitElement.set('datatype', self.datatype)
873 # procUnitElement.set('inputId', str(self.inputId))
874 #
875 # for opConfObj in self.opConfObjList:
876 # opConfObj.makeXml(procUnitElement)
877
878 846 def readXml(self, upElement):
879 847
880 848 self.id = upElement.get('id')
881 849 self.name = upElement.get('name')
882 850 self.datatype = upElement.get('datatype')
883 851 self.inputId = upElement.get('inputId')
884 852
885 if self.ELEMENTNAME == "ReadUnit":
886 self.datatype = self.datatype.replace("Reader", "")
853 if self.ELEMENTNAME == 'ReadUnit':
854 self.datatype = self.datatype.replace('Reader', '')
887 855
888 856 if self.inputId == 'None':
889 857 self.inputId = '0'
890 858
891 859 self.opConfObjList = []
892 860
893 861 opElementList = upElement.iter(OperationConf().getElementName())
894 862
895 863 for opElement in opElementList:
896 864 opConfObj = OperationConf()
897 865 opConfObj.readXml(opElement)
898 866 self.opConfObjList.append(opConfObj)
899 867
900 868 if opConfObj.name == 'run':
901 869 self.path = opConfObj.getParameterValue('path')
902 870 self.startDate = opConfObj.getParameterValue('startDate')
903 871 self.endDate = opConfObj.getParameterValue('endDate')
904 872 self.startTime = opConfObj.getParameterValue('startTime')
905 873 self.endTime = opConfObj.getParameterValue('endTime')
906 874
907 875 class Project(Process):
876
908 877 id = None
909 name = None
878 # name = None
910 879 description = None
911 880 filename = None
912 881
913 882 procUnitConfObjDict = None
914 883
915 884 ELEMENTNAME = 'Project'
916 885
917 886 plotterQueue = None
918 887
919 def __init__(self, plotter_queue=None, logfile=None):
888 def __init__(self, plotter_queue=None):
889
920 890 Process.__init__(self)
921 891 self.id = None
922 self.name = None
892 # self.name = None
923 893 self.description = None
924 if logfile is not None:
925 logToFile(logfile)
894
926 895 self.plotterQueue = plotter_queue
927 896
928 897 self.procUnitConfObjDict = {}
929 898
930 899 def __getNewId(self):
931 900
932 901 idList = self.procUnitConfObjDict.keys()
933 902
934 903 id = int(self.id)*10
935 904
936 905 while True:
937 906 id += 1
938 907
939 908 if str(id) in idList:
940 909 continue
941 910
942 911 break
943 912
944 913 return str(id)
945 914
946 915 def getElementName(self):
947 916
948 917 return self.ELEMENTNAME
949 918
950 919 def getId(self):
951 920
952 921 return self.id
953 922
954 923 def updateId(self, new_id):
955 924
956 925 self.id = str(new_id)
957 926
958 927 keyList = self.procUnitConfObjDict.keys()
959 928 keyList.sort()
960 929
961 930 n = 1
962 931 newProcUnitConfObjDict = {}
963 932
964 933 for procKey in keyList:
965 934
966 935 procUnitConfObj = self.procUnitConfObjDict[procKey]
967 936 idProcUnit = str(int(self.id)*10 + n)
968 937 procUnitConfObj.updateId(idProcUnit, parentId = self.id)
969 938
970 939 newProcUnitConfObjDict[idProcUnit] = procUnitConfObj
971 940 n += 1
972 941
973 942 self.procUnitConfObjDict = newProcUnitConfObjDict
974 943
975 def setup(self, id, name, description):
944 def setup(self, id, name='', description=''):
976 945
946 print
947 print '*'*60
948 print ' Starting SIGNAL CHAIN PROCESSING v%s ' % schainpy.__version__
949 print '*'*60
950 print
977 951 self.id = str(id)
978 self.name = name
979 952 self.description = description
980 953
981 954 def update(self, name, description):
982 955
983 self.name = name
984 956 self.description = description
985 957
958 def clone(self):
959
960 p = Project()
961 p.procUnitConfObjDict = self.procUnitConfObjDict
962 return p
963
986 964 def addReadUnit(self, id=None, datatype=None, name=None, **kwargs):
965
987 966 if id is None:
988 967 idReadUnit = self.__getNewId()
989 968 else:
990 969 idReadUnit = str(id)
991 970
992 971 readUnitConfObj = ReadUnitConf()
993 972 readUnitConfObj.setup(idReadUnit, name, datatype, parentId=self.id, **kwargs)
994 973
995 974 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
996 975
997 976 return readUnitConfObj
998 977
999 978 def addProcUnit(self, inputId='0', datatype=None, name=None):
1000 979
1001 980 idProcUnit = self.__getNewId()
1002 981
1003 982 procUnitConfObj = ProcUnitConf()
1004 983 procUnitConfObj.setup(idProcUnit, name, datatype, inputId, parentId=self.id)
1005 984
1006 985 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1007 986
1008 987 return procUnitConfObj
1009 988
1010 989 def removeProcUnit(self, id):
1011 990
1012 991 if id in self.procUnitConfObjDict.keys():
1013 992 self.procUnitConfObjDict.pop(id)
1014 993
1015 994 def getReadUnitId(self):
1016 995
1017 996 readUnitConfObj = self.getReadUnitObj()
1018 997
1019 998 return readUnitConfObj.id
1020 999
1021 1000 def getReadUnitObj(self):
1022 1001
1023 1002 for obj in self.procUnitConfObjDict.values():
1024 if obj.getElementName() == "ReadUnit":
1003 if obj.getElementName() == 'ReadUnit':
1025 1004 return obj
1026 1005
1027 1006 return None
1028 1007
1029 1008 def getProcUnitObj(self, id=None, name=None):
1030 1009
1031 1010 if id != None:
1032 1011 return self.procUnitConfObjDict[id]
1033 1012
1034 1013 if name != None:
1035 1014 return self.getProcUnitObjByName(name)
1036 1015
1037 1016 return None
1038 1017
1039 1018 def getProcUnitObjByName(self, name):
1040 1019
1041 1020 for obj in self.procUnitConfObjDict.values():
1042 1021 if obj.name == name:
1043 1022 return obj
1044 1023
1045 1024 return None
1046 1025
1047 1026 def procUnitItems(self):
1048 1027
1049 1028 return self.procUnitConfObjDict.items()
1050 1029
1051 1030 def makeXml(self):
1052 1031
1053 1032 projectElement = Element('Project')
1054 1033 projectElement.set('id', str(self.id))
1055 1034 projectElement.set('name', self.name)
1056 1035 projectElement.set('description', self.description)
1057 1036
1058 1037 for procUnitConfObj in self.procUnitConfObjDict.values():
1059 1038 procUnitConfObj.makeXml(projectElement)
1060 1039
1061 1040 self.projectElement = projectElement
1062 1041
1063 1042 def writeXml(self, filename=None):
1064 1043
1065 1044 if filename == None:
1066 1045 if self.filename:
1067 1046 filename = self.filename
1068 1047 else:
1069 filename = "schain.xml"
1048 filename = 'schain.xml'
1070 1049
1071 1050 if not filename:
1072 print "filename has not been defined. Use setFilename(filename) for do it."
1051 print 'filename has not been defined. Use setFilename(filename) for do it.'
1073 1052 return 0
1074 1053
1075 1054 abs_file = os.path.abspath(filename)
1076 1055
1077 1056 if not os.access(os.path.dirname(abs_file), os.W_OK):
1078 print "No write permission on %s" %os.path.dirname(abs_file)
1057 print 'No write permission on %s' %os.path.dirname(abs_file)
1079 1058 return 0
1080 1059
1081 1060 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
1082 print "File %s already exists and it could not be overwriten" %abs_file
1061 print 'File %s already exists and it could not be overwriten' %abs_file
1083 1062 return 0
1084 1063
1085 1064 self.makeXml()
1086 1065
1087 1066 ElementTree(self.projectElement).write(abs_file, method='xml')
1088 1067
1089 1068 self.filename = abs_file
1090 1069
1091 1070 return 1
1092 1071
1093 1072 def readXml(self, filename = None):
1094 1073
1095 1074 if not filename:
1096 print "filename is not defined"
1075 print 'filename is not defined'
1097 1076 return 0
1098 1077
1099 1078 abs_file = os.path.abspath(filename)
1100 1079
1101 1080 if not os.path.isfile(abs_file):
1102 print "%s file does not exist" %abs_file
1081 print '%s file does not exist' %abs_file
1103 1082 return 0
1104 1083
1105 1084 self.projectElement = None
1106 1085 self.procUnitConfObjDict = {}
1107 1086
1108 1087 try:
1109 1088 self.projectElement = ElementTree().parse(abs_file)
1110 1089 except:
1111 print "Error reading %s, verify file format" %filename
1090 print 'Error reading %s, verify file format' %filename
1112 1091 return 0
1113 1092
1114 1093 self.project = self.projectElement.tag
1115 1094
1116 1095 self.id = self.projectElement.get('id')
1117 1096 self.name = self.projectElement.get('name')
1118 1097 self.description = self.projectElement.get('description')
1119 1098
1120 1099 readUnitElementList = self.projectElement.iter(ReadUnitConf().getElementName())
1121 1100
1122 1101 for readUnitElement in readUnitElementList:
1123 1102 readUnitConfObj = ReadUnitConf()
1124 1103 readUnitConfObj.readXml(readUnitElement)
1125 1104
1126 1105 if readUnitConfObj.parentId == None:
1127 1106 readUnitConfObj.parentId = self.id
1128 1107
1129 1108 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
1130 1109
1131 1110 procUnitElementList = self.projectElement.iter(ProcUnitConf().getElementName())
1132 1111
1133 1112 for procUnitElement in procUnitElementList:
1134 1113 procUnitConfObj = ProcUnitConf()
1135 1114 procUnitConfObj.readXml(procUnitElement)
1136 1115
1137 1116 if procUnitConfObj.parentId == None:
1138 1117 procUnitConfObj.parentId = self.id
1139 1118
1140 1119 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1141 1120
1142 1121 self.filename = abs_file
1143 1122
1144 1123 return 1
1145 1124
1146 1125 def printattr(self):
1147 1126
1148 print "Project[%s]: name = %s, description = %s" %(self.id,
1127 print 'Project[%s]: name = %s, description = %s' %(self.id,
1149 1128 self.name,
1150 1129 self.description)
1151 1130
1152 1131 for procUnitConfObj in self.procUnitConfObjDict.values():
1153 1132 procUnitConfObj.printattr()
1154 1133
1155 1134 def createObjects(self):
1156 1135
1157 1136 for procUnitConfObj in self.procUnitConfObjDict.values():
1158 1137 procUnitConfObj.createObjects(self.plotterQueue)
1159 1138
1160 1139 def __connect(self, objIN, thisObj):
1161 1140
1162 1141 thisObj.setInput(objIN.getOutputObj())
1163 1142
1164 1143 def connectObjects(self):
1165 1144
1166 1145 for thisPUConfObj in self.procUnitConfObjDict.values():
1167 1146
1168 1147 inputId = thisPUConfObj.getInputId()
1169 1148
1170 1149 if int(inputId) == 0:
1171 1150 continue
1172 1151
1173 1152 #Get input object
1174 1153 puConfINObj = self.procUnitConfObjDict[inputId]
1175 1154 puObjIN = puConfINObj.getProcUnitObj()
1176 1155
1177 1156 #Get current object
1178 1157 thisPUObj = thisPUConfObj.getProcUnitObj()
1179 1158
1180 1159 self.__connect(puObjIN, thisPUObj)
1181 1160
1182 def __handleError(self, procUnitConfObj, send_email=True):
1161 def __handleError(self, procUnitConfObj, send_email=False):
1183 1162
1184 1163 import socket
1185 1164
1186 1165 err = traceback.format_exception(sys.exc_info()[0],
1187 1166 sys.exc_info()[1],
1188 1167 sys.exc_info()[2])
1189 1168
1190 print "***** Error occurred in %s *****" %(procUnitConfObj.name)
1191 print "***** %s" %err[-1]
1169 print '***** Error occurred in %s *****' %(procUnitConfObj.name)
1170 print '***** %s' %err[-1]
1192 1171
1193 message = "".join(err)
1172 message = ''.join(err)
1194 1173
1195 1174 sys.stderr.write(message)
1196 1175
1197 1176 if not send_email:
1198 1177 return
1199 1178
1200 subject = "SChain v%s: Error running %s\n" %(schainpy.__version__, procUnitConfObj.name)
1179 subject = 'SChain v%s: Error running %s\n' %(schainpy.__version__, procUnitConfObj.name)
1201 1180
1202 subtitle = "%s: %s\n" %(procUnitConfObj.getElementName() ,procUnitConfObj.name)
1203 subtitle += "Hostname: %s\n" %socket.gethostbyname(socket.gethostname())
1204 subtitle += "Working directory: %s\n" %os.path.abspath("./")
1205 subtitle += "Configuration file: %s\n" %self.filename
1206 subtitle += "Time: %s\n" %str(datetime.datetime.now())
1181 subtitle = '%s: %s\n' %(procUnitConfObj.getElementName() ,procUnitConfObj.name)
1182 subtitle += 'Hostname: %s\n' %socket.gethostbyname(socket.gethostname())
1183 subtitle += 'Working directory: %s\n' %os.path.abspath('./')
1184 subtitle += 'Configuration file: %s\n' %self.filename
1185 subtitle += 'Time: %s\n' %str(datetime.datetime.now())
1207 1186
1208 1187 readUnitConfObj = self.getReadUnitObj()
1209 1188 if readUnitConfObj:
1210 subtitle += "\nInput parameters:\n"
1211 subtitle += "[Data path = %s]\n" %readUnitConfObj.path
1212 subtitle += "[Data type = %s]\n" %readUnitConfObj.datatype
1213 subtitle += "[Start date = %s]\n" %readUnitConfObj.startDate
1214 subtitle += "[End date = %s]\n" %readUnitConfObj.endDate
1215 subtitle += "[Start time = %s]\n" %readUnitConfObj.startTime
1216 subtitle += "[End time = %s]\n" %readUnitConfObj.endTime
1189 subtitle += '\nInput parameters:\n'
1190 subtitle += '[Data path = %s]\n' %readUnitConfObj.path
1191 subtitle += '[Data type = %s]\n' %readUnitConfObj.datatype
1192 subtitle += '[Start date = %s]\n' %readUnitConfObj.startDate
1193 subtitle += '[End date = %s]\n' %readUnitConfObj.endDate
1194 subtitle += '[Start time = %s]\n' %readUnitConfObj.startTime
1195 subtitle += '[End time = %s]\n' %readUnitConfObj.endTime
1217 1196
1218 1197 adminObj = schainpy.admin.SchainNotify()
1219 1198 adminObj.sendAlert(message=message,
1220 1199 subject=subject,
1221 1200 subtitle=subtitle,
1222 1201 filename=self.filename)
1223 1202
1224 1203 def isPaused(self):
1225 1204 return 0
1226 1205
1227 1206 def isStopped(self):
1228 1207 return 0
1229 1208
1230 1209 def runController(self):
1231 """
1210 '''
1232 1211 returns 0 when this process has been stopped, 1 otherwise
1233 """
1212 '''
1234 1213
1235 1214 if self.isPaused():
1236 print "Process suspended"
1215 print 'Process suspended'
1237 1216
1238 1217 while True:
1239 sleep(0.1)
1218 time.sleep(0.1)
1240 1219
1241 1220 if not self.isPaused():
1242 1221 break
1243 1222
1244 1223 if self.isStopped():
1245 1224 break
1246 1225
1247 print "Process reinitialized"
1226 print 'Process reinitialized'
1248 1227
1249 1228 if self.isStopped():
1250 print "Process stopped"
1229 print 'Process stopped'
1251 1230 return 0
1252 1231
1253 1232 return 1
1254 1233
1255 1234 def setFilename(self, filename):
1256 1235
1257 1236 self.filename = filename
1258 1237
1259 1238 def setPlotterQueue(self, plotter_queue):
1260 1239
1261 raise NotImplementedError, "Use schainpy.controller_api.ControllerThread instead Project class"
1240 raise NotImplementedError, 'Use schainpy.controller_api.ControllerThread instead Project class'
1262 1241
1263 1242 def getPlotterQueue(self):
1264 1243
1265 raise NotImplementedError, "Use schainpy.controller_api.ControllerThread instead Project class"
1244 raise NotImplementedError, 'Use schainpy.controller_api.ControllerThread instead Project class'
1266 1245
1267 1246 def useExternalPlotter(self):
1268 1247
1269 raise NotImplementedError, "Use schainpy.controller_api.ControllerThread instead Project class"
1248 raise NotImplementedError, 'Use schainpy.controller_api.ControllerThread instead Project class'
1270 1249
1250 def run(self):
1271 1251
1272 def run(self, filename=None):
1252 log.success('Starting {}'.format(self.name))
1273 1253
1274 # self.writeXml(filename)
1275 1254 self.createObjects()
1276 1255 self.connectObjects()
1277 1256
1278 print
1279 print "*"*60
1280 print " Starting SIGNAL CHAIN PROCESSING v%s " %schainpy.__version__
1281 print "*"*60
1282 print
1283
1284 1257 keyList = self.procUnitConfObjDict.keys()
1285 1258 keyList.sort()
1286 1259
1287 1260 while(True):
1288 1261
1289 1262 is_ok = False
1290 1263
1291 1264 for procKey in keyList:
1292 # print "Running the '%s' process with %s" %(procUnitConfObj.name, procUnitConfObj.id)
1293 1265
1294 1266 procUnitConfObj = self.procUnitConfObjDict[procKey]
1295 1267
1296 1268 try:
1297 1269 sts = procUnitConfObj.run()
1298 1270 is_ok = is_ok or sts
1299 1271 except KeyboardInterrupt:
1300 1272 is_ok = False
1301 1273 break
1302 1274 except ValueError, e:
1303 sleep(0.5)
1275 time.sleep(0.5)
1304 1276 self.__handleError(procUnitConfObj, send_email=True)
1305 1277 is_ok = False
1306 1278 break
1307 1279 except:
1308 sleep(0.5)
1280 time.sleep(0.5)
1309 1281 self.__handleError(procUnitConfObj)
1310 1282 is_ok = False
1311 1283 break
1312 1284
1313 1285 #If every process unit finished so end process
1314 1286 if not(is_ok):
1315 # print "Every process unit have finished"
1316 1287 break
1317 1288
1318 1289 if not self.runController():
1319 1290 break
1320 1291
1321 1292 #Closing every process
1322 1293 for procKey in keyList:
1323 1294 procUnitConfObj = self.procUnitConfObjDict[procKey]
1324 1295 procUnitConfObj.close()
1296
1297 log.success('{} finished'.format(self.name))
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,692 +1,692
1 1 '''
2 2 @author: Daniel Suarez
3 3 '''
4 4
5 5 import os
6 6 import sys
7 7 import glob
8 8 import fnmatch
9 9 import datetime
10 10 import time
11 11 import re
12 12 import h5py
13 13 import numpy
14 14
15 15 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
16 16 from schainpy.model.data.jroamisr import AMISR
17 17
18 18 try:
19 19 from gevent import sleep
20 20 except:
21 21 from time import sleep
22 22
23 23 class RadacHeader():
24 24 def __init__(self, fp):
25 25 header = 'Raw11/Data/RadacHeader'
26 26 self.beamCodeByPulse = fp.get(header+'/BeamCode')
27 27 self.beamCode = fp.get('Raw11/Data/Beamcodes')
28 28 self.code = fp.get(header+'/Code')
29 29 self.frameCount = fp.get(header+'/FrameCount')
30 30 self.modeGroup = fp.get(header+'/ModeGroup')
31 31 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')
32 32 self.pulseCount = fp.get(header+'/PulseCount')
33 33 self.radacTime = fp.get(header+'/RadacTime')
34 34 self.timeCount = fp.get(header+'/TimeCount')
35 35 self.timeStatus = fp.get(header+'/TimeStatus')
36 36
37 37 self.nrecords = self.pulseCount.shape[0] #nblocks
38 38 self.npulses = self.pulseCount.shape[1] #nprofile
39 39 self.nsamples = self.nsamplesPulse[0,0] #ngates
40 40 self.nbeams = self.beamCode.shape[1]
41 41
42 42
43 43 def getIndexRangeToPulse(self, idrecord=0):
44 44 #indexToZero = numpy.where(self.pulseCount.value[idrecord,:]==0)
45 45 #startPulseCountId = indexToZero[0][0]
46 46 #endPulseCountId = startPulseCountId - 1
47 47 #range1 = numpy.arange(startPulseCountId,self.npulses,1)
48 48 #range2 = numpy.arange(0,startPulseCountId,1)
49 49 #return range1, range2
50 50 zero = 0
51 51 npulse = max(self.pulseCount[0,:]+1)-1
52 52 looking_index = numpy.where(self.pulseCount.value[idrecord,:]==npulse)[0]
53 53 getLastIndex = looking_index[-1]
54 54 index_data = numpy.arange(0,getLastIndex+1,1)
55 55 index_buffer = numpy.arange(getLastIndex+1,self.npulses,1)
56 56 return index_data, index_buffer
57 57
58 58 class AMISRReader(ProcessingUnit):
59 59
60 60 path = None
61 61 startDate = None
62 62 endDate = None
63 63 startTime = None
64 64 endTime = None
65 65 walk = None
66 66 isConfig = False
67 67
68 68 def __init__(self):
69 69 self.set = None
70 70 self.subset = None
71 71 self.extension_file = '.h5'
72 72 self.dtc_str = 'dtc'
73 73 self.dtc_id = 0
74 74 self.status = True
75 75 self.isConfig = False
76 76 self.dirnameList = []
77 77 self.filenameList = []
78 78 self.fileIndex = None
79 79 self.flagNoMoreFiles = False
80 80 self.flagIsNewFile = 0
81 81 self.filename = ''
82 82 self.amisrFilePointer = None
83 83 self.radacHeaderObj = None
84 84 self.dataOut = self.__createObjByDefault()
85 85 self.datablock = None
86 86 self.rest_datablock = None
87 87 self.range = None
88 88 self.idrecord_count = 0
89 89 self.profileIndex = 0
90 90 self.index_amisr_sample = None
91 91 self.index_amisr_buffer = None
92 92 self.beamCodeByFrame = None
93 93 self.radacTimeByFrame = None
94 94 #atributos originales tal y como esta en el archivo de datos
95 95 self.beamCodesFromFile = None
96 96 self.radacTimeFromFile = None
97 97 self.rangeFromFile = None
98 98 self.dataByFrame = None
99 99 self.dataset = None
100 100
101 101 self.beamCodeDict = {}
102 102 self.beamRangeDict = {}
103 103
104 104 #experiment cgf file
105 105 self.npulsesint_fromfile = None
106 106 self.recordsperfile_fromfile = None
107 107 self.nbeamcodes_fromfile = None
108 108 self.ngates_fromfile = None
109 109 self.ippSeconds_fromfile = None
110 110 self.frequency_h5file = None
111 111
112 112
113 113 self.__firstFile = True
114 114 self.buffer_radactime = None
115 115
116 116 self.index4_schain_datablock = None
117 117 self.index4_buffer = None
118 118 self.schain_datablock = None
119 119 self.buffer = None
120 120 self.linear_pulseCount = None
121 121 self.npulseByFrame = None
122 122 self.profileIndex_offset = None
123 123 self.timezone = 'ut'
124 124
125 125 self.__waitForNewFile = 20
126 126 self.__filename_online = None
127 127
128 128 def __createObjByDefault(self):
129 129
130 130 dataObj = AMISR()
131 131
132 132 return dataObj
133 133
134 134 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
135 135 self.path = path
136 136 self.startDate = startDate
137 137 self.endDate = endDate
138 138 self.startTime = startTime
139 139 self.endTime = endTime
140 140 self.walk = walk
141 141
142 142 def __checkPath(self):
143 143 if os.path.exists(self.path):
144 144 self.status = 1
145 145 else:
146 146 self.status = 0
147 147 print 'Path:%s does not exists'%self.path
148 148
149 149 return
150 150
151 151 def __selDates(self, amisr_dirname_format):
152 152 try:
153 153 year = int(amisr_dirname_format[0:4])
154 154 month = int(amisr_dirname_format[4:6])
155 155 dom = int(amisr_dirname_format[6:8])
156 156 thisDate = datetime.date(year,month,dom)
157 157
158 158 if (thisDate>=self.startDate and thisDate <= self.endDate):
159 159 return amisr_dirname_format
160 160 except:
161 161 return None
162 162
163 163 def __findDataForDates(self,online=False):
164 164
165 165
166 166
167 167 if not(self.status):
168 168 return None
169 169
170 170 pat = '\d+.\d+'
171 171 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
172 172 dirnameList = filter(lambda x:x!=None,dirnameList)
173 173 dirnameList = [x.string for x in dirnameList]
174 174 if not(online):
175 175 dirnameList = [self.__selDates(x) for x in dirnameList]
176 176 dirnameList = filter(lambda x:x!=None,dirnameList)
177 177 if len(dirnameList)>0:
178 178 self.status = 1
179 179 self.dirnameList = dirnameList
180 180 self.dirnameList.sort()
181 181 else:
182 182 self.status = 0
183 183 return None
184 184
185 185 def __getTimeFromData(self):
186 186 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
187 187 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
188 188
189 189 print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)
190 190 print '........................................'
191 191 filter_filenameList = []
192 192 self.filenameList.sort()
193 193 for i in range(len(self.filenameList)-1):
194 194 filename = self.filenameList[i]
195 195 fp = h5py.File(filename,'r')
196 196 time_str = fp.get('Time/RadacTimeString')
197 197
198 198 startDateTimeStr_File = time_str[0][0].split('.')[0]
199 199 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
200 200 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
201 201
202 202 endDateTimeStr_File = time_str[-1][-1].split('.')[0]
203 203 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
204 204 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
205 205
206 206 fp.close()
207 207
208 208 if self.timezone == 'lt':
209 209 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
210 210 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
211 211
212 212 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<endDateTime_Reader):
213 213 #self.filenameList.remove(filename)
214 214 filter_filenameList.append(filename)
215 215
216 216 filter_filenameList.sort()
217 217 self.filenameList = filter_filenameList
218 218 return 1
219 219
220 220 def __filterByGlob1(self, dirName):
221 221 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
222 222 filterDict = {}
223 223 filterDict.setdefault(dirName)
224 224 filterDict[dirName] = filter_files
225 225 return filterDict
226 226
227 227 def __getFilenameList(self, fileListInKeys, dirList):
228 228 for value in fileListInKeys:
229 229 dirName = value.keys()[0]
230 230 for file in value[dirName]:
231 231 filename = os.path.join(dirName, file)
232 232 self.filenameList.append(filename)
233 233
234 234
235 235 def __selectDataForTimes(self, online=False):
236 236 #aun no esta implementado el filtro for tiempo
237 237 if not(self.status):
238 238 return None
239 239
240 240 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
241 241
242 242 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
243 243
244 244 self.__getFilenameList(fileListInKeys, dirList)
245 245 if not(online):
246 246 #filtro por tiempo
247 247 if not(self.all):
248 248 self.__getTimeFromData()
249 249
250 250 if len(self.filenameList)>0:
251 251 self.status = 1
252 252 self.filenameList.sort()
253 253 else:
254 254 self.status = 0
255 255 return None
256 256
257 257 else:
258 258 #get the last file - 1
259 259 self.filenameList = [self.filenameList[-2]]
260 260
261 261 new_dirnameList = []
262 262 for dirname in self.dirnameList:
263 263 junk = numpy.array([dirname in x for x in self.filenameList])
264 264 junk_sum = junk.sum()
265 265 if junk_sum > 0:
266 266 new_dirnameList.append(dirname)
267 267 self.dirnameList = new_dirnameList
268 268 return 1
269 269
270 def __searchFilesOnline(self,
270 def searchFilesOnLine(self,
271 271 path,
272 272 walk=True):
273 273
274 274 startDate = datetime.datetime.utcnow().date()
275 275 endDate = datetime.datetime.utcnow().date()
276 276
277 277 self.__setParameters(path=path, startDate=startDate, endDate=endDate, walk=walk)
278 278
279 279 self.__checkPath()
280 280
281 281 self.__findDataForDates(online=True)
282 282
283 283 self.dirnameList = [self.dirnameList[-1]]
284 284
285 285 self.__selectDataForTimes(online=True)
286 286
287 287 return
288 288
289 289
290 def __searchFilesOffline(self,
290 def searchFilesOffLine(self,
291 291 path,
292 292 startDate,
293 293 endDate,
294 294 startTime=datetime.time(0,0,0),
295 295 endTime=datetime.time(23,59,59),
296 296 walk=True):
297 297
298 298 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
299 299
300 300 self.__checkPath()
301 301
302 302 self.__findDataForDates()
303 303
304 304 self.__selectDataForTimes()
305 305
306 306 for i in range(len(self.filenameList)):
307 307 print "%s" %(self.filenameList[i])
308 308
309 309 return
310 310
311 311 def __setNextFileOffline(self):
312 312 idFile = self.fileIndex
313 313
314 314 while (True):
315 315 idFile += 1
316 316 if not(idFile < len(self.filenameList)):
317 317 self.flagNoMoreFiles = 1
318 318 print "No more Files"
319 319 return 0
320 320
321 321 filename = self.filenameList[idFile]
322 322
323 323 amisrFilePointer = h5py.File(filename,'r')
324 324
325 325 break
326 326
327 327 self.flagIsNewFile = 1
328 328 self.fileIndex = idFile
329 329 self.filename = filename
330 330
331 331 self.amisrFilePointer = amisrFilePointer
332 332
333 333 print "Setting the file: %s"%self.filename
334 334
335 335 return 1
336 336
337 337
338 338 def __setNextFileOnline(self):
339 339 filename = self.filenameList[0]
340 340 if self.__filename_online != None:
341 341 self.__selectDataForTimes(online=True)
342 342 filename = self.filenameList[0]
343 343 while self.__filename_online == filename:
344 344 print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile)
345 345 sleep(self.__waitForNewFile)
346 346 self.__selectDataForTimes(online=True)
347 347 filename = self.filenameList[0]
348 348
349 349 self.__filename_online = filename
350 350
351 351 self.amisrFilePointer = h5py.File(filename,'r')
352 352 self.flagIsNewFile = 1
353 353 self.filename = filename
354 354 print "Setting the file: %s"%self.filename
355 355 return 1
356 356
357 357
358 358 def __readHeader(self):
359 359 self.radacHeaderObj = RadacHeader(self.amisrFilePointer)
360 360
361 361 #update values from experiment cfg file
362 362 if self.radacHeaderObj.nrecords == self.recordsperfile_fromfile:
363 363 self.radacHeaderObj.nrecords = self.recordsperfile_fromfile
364 364 self.radacHeaderObj.nbeams = self.nbeamcodes_fromfile
365 365 self.radacHeaderObj.npulses = self.npulsesint_fromfile
366 366 self.radacHeaderObj.nsamples = self.ngates_fromfile
367 367
368 368 #looking index list for data
369 369 start_index = self.radacHeaderObj.pulseCount[0,:][0]
370 370 end_index = self.radacHeaderObj.npulses
371 371 range4data = range(start_index, end_index)
372 372 self.index4_schain_datablock = numpy.array(range4data)
373 373
374 374 buffer_start_index = 0
375 375 buffer_end_index = self.radacHeaderObj.pulseCount[0,:][0]
376 376 range4buffer = range(buffer_start_index, buffer_end_index)
377 377 self.index4_buffer = numpy.array(range4buffer)
378 378
379 379 self.linear_pulseCount = numpy.array(range4data + range4buffer)
380 380 self.npulseByFrame = max(self.radacHeaderObj.pulseCount[0,:]+1)
381 381
382 382 #get tuning frequency
383 383 frequency_h5file_dataset = self.amisrFilePointer.get('Rx'+'/TuningFrequency')
384 384 self.frequency_h5file = frequency_h5file_dataset[0,0]
385 385
386 386 self.flagIsNewFile = 1
387 387
388 388 def __getBeamCode(self):
389 389 self.beamCodeDict = {}
390 390 self.beamRangeDict = {}
391 391
392 392 beamCodeMap = self.amisrFilePointer.get('Setup/BeamcodeMap')
393 393
394 394 for i in range(len(self.radacHeaderObj.beamCode[0,:])):
395 395 self.beamCodeDict.setdefault(i)
396 396 self.beamRangeDict.setdefault(i)
397 397 beamcodeValue = self.radacHeaderObj.beamCode[0,i]
398 398 beamcodeIndex = numpy.where(beamCodeMap[:,0] == beamcodeValue)[0][0]
399 399 x = beamCodeMap[beamcodeIndex][1]
400 400 y = beamCodeMap[beamcodeIndex][2]
401 401 z = beamCodeMap[beamcodeIndex][3]
402 402 self.beamCodeDict[i] = [beamcodeValue, x, y, z]
403 403
404 404 just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:]
405 405
406 406 for i in range(len(self.beamCodeDict.values())):
407 407 xx = numpy.where(just4record0==self.beamCodeDict.values()[i][0])
408 408 indexPulseByBeam = self.linear_pulseCount[xx[0]]
409 409 self.beamRangeDict[i] = indexPulseByBeam
410 410
411 411 def __getExpParameters(self):
412 412 if not(self.status):
413 413 return None
414 414
415 415 experimentCfgPath = os.path.join(self.path, self.dirnameList[0], 'Setup')
416 416
417 417 expFinder = glob.glob1(experimentCfgPath,'*.exp')
418 418 if len(expFinder)== 0:
419 419 self.status = 0
420 420 return None
421 421
422 422 experimentFilename = os.path.join(experimentCfgPath,expFinder[0])
423 423
424 424 f = open(experimentFilename)
425 425 lines = f.readlines()
426 426 f.close()
427 427
428 428 parmsList = ['npulsesint*','recordsperfile*','nbeamcodes*','ngates*']
429 429 filterList = [fnmatch.filter(lines, x) for x in parmsList]
430 430
431 431
432 432 values = [re.sub(r'\D',"",x[0]) for x in filterList]
433 433
434 434 self.npulsesint_fromfile = int(values[0])
435 435 self.recordsperfile_fromfile = int(values[1])
436 436 self.nbeamcodes_fromfile = int(values[2])
437 437 self.ngates_fromfile = int(values[3])
438 438
439 439 tufileFinder = fnmatch.filter(lines, 'tufile=*')
440 440 tufile = tufileFinder[0].split('=')[1].split('\n')[0]
441 441 tufile = tufile.split('\r')[0]
442 442 tufilename = os.path.join(experimentCfgPath,tufile)
443 443
444 444 f = open(tufilename)
445 445 lines = f.readlines()
446 446 f.close()
447 447 self.ippSeconds_fromfile = float(lines[1].split()[2])/1E6
448 448
449 449
450 450 self.status = 1
451 451
452 452 def __setIdsAndArrays(self):
453 453 self.dataByFrame = self.__setDataByFrame()
454 454 self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[0, :]
455 455 self.readRanges()
456 456 self.index_amisr_sample, self.index_amisr_buffer = self.radacHeaderObj.getIndexRangeToPulse(0)
457 457 self.radacTimeByFrame = numpy.zeros(self.radacHeaderObj.npulses)
458 458 if len(self.index_amisr_buffer) > 0:
459 459 self.buffer_radactime = numpy.zeros_like(self.radacTimeByFrame)
460 460
461 461
462 462 def __setNextFile(self,online=False):
463 463
464 464 if not(online):
465 465 newFile = self.__setNextFileOffline()
466 466 else:
467 467 newFile = self.__setNextFileOnline()
468 468
469 469 if not(newFile):
470 470 return 0
471 471
472 472 self.__readHeader()
473 473
474 474 if self.__firstFile:
475 475 self.__setIdsAndArrays()
476 476 self.__firstFile = False
477 477
478 478 self.__getBeamCode()
479 479 self.readDataBlock()
480 480
481 481
482 482 def setup(self,path=None,
483 483 startDate=None,
484 484 endDate=None,
485 485 startTime=datetime.time(0,0,0),
486 486 endTime=datetime.time(23,59,59),
487 487 walk=True,
488 488 timezone='ut',
489 489 all=0,
490 490 online=False):
491 491
492 492 self.timezone = timezone
493 493 self.all = all
494 494 self.online = online
495 495 if not(online):
496 496 #Busqueda de archivos offline
497 self.__searchFilesOffline(path, startDate, endDate, startTime, endTime, walk)
497 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
498 498 else:
499 self.__searchFilesOnline(path, walk)
499 self.searchFilesOnLine(path, walk)
500 500
501 501 if not(self.filenameList):
502 502 print "There is no files into the folder: %s"%(path)
503 503
504 504 sys.exit(-1)
505 505
506 506 self.__getExpParameters()
507 507
508 508 self.fileIndex = -1
509 509
510 510 self.__setNextFile(online)
511 511
512 512 # first_beamcode = self.radacHeaderObj.beamCodeByPulse[0,0]
513 513 # index = numpy.where(self.radacHeaderObj.beamCodeByPulse[0,:]!=first_beamcode)[0][0]
514 514 self.profileIndex_offset = self.radacHeaderObj.pulseCount[0,:][0]
515 515 self.profileIndex = self.profileIndex_offset
516 516
517 517 def readRanges(self):
518 518 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Range')
519 519
520 520 self.rangeFromFile = numpy.reshape(dataset.value,(-1))
521 521 return self.rangeFromFile
522 522
523 523
524 524 def readRadacTime(self,idrecord, range1, range2):
525 525 self.radacTimeFromFile = self.radacHeaderObj.radacTime.value
526 526
527 527 radacTimeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
528 528 #radacTimeByFrame = dataset[idrecord - 1,range1]
529 529 #radacTimeByFrame = dataset[idrecord,range2]
530 530
531 531 return radacTimeByFrame
532 532
533 533 def readBeamCode(self, idrecord, range1, range2):
534 534 dataset = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode')
535 535 beamcodeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
536 536 self.beamCodesFromFile = dataset.value
537 537
538 538 #beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
539 539 #beamcodeByFrame[range2] = dataset[idrecord, range2]
540 540 beamcodeByFrame[range1] = dataset[idrecord, range1]
541 541 beamcodeByFrame[range2] = dataset[idrecord, range2]
542 542
543 543 return beamcodeByFrame
544 544
545 545
546 546 def __setDataByFrame(self):
547 547 ndata = 2 # porque es complejo
548 548 dataByFrame = numpy.zeros((self.radacHeaderObj.npulses, self.radacHeaderObj.nsamples, ndata))
549 549 return dataByFrame
550 550
551 551 def __readDataSet(self):
552 552 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
553 553 return dataset
554 554
555 555 def __setDataBlock(self,):
556 556 real = self.dataByFrame[:,:,0] #asumo que 0 es real
557 557 imag = self.dataByFrame[:,:,1] #asumo que 1 es imaginario
558 558 datablock = real + imag*1j #armo el complejo
559 559 return datablock
560 560
561 561 def readSamples_version1(self,idrecord):
562 562 #estas tres primeras lineas solo se deben ejecutar una vez
563 563 if self.flagIsNewFile:
564 564 #reading dataset
565 565 self.dataset = self.__readDataSet()
566 566 self.flagIsNewFile = 0
567 567
568 568 if idrecord == 0:
569 569 self.dataByFrame[self.index4_schain_datablock, : ,:] = self.dataset[0, self.index_amisr_sample,:,:]
570 570 self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[0, self.index_amisr_sample]
571 571 datablock = self.__setDataBlock()
572 572 if len(self.index_amisr_buffer) > 0:
573 573 self.buffer = self.dataset[0, self.index_amisr_buffer,:,:]
574 574 self.buffer_radactime = self.radacHeaderObj.radacTime[0, self.index_amisr_buffer]
575 575
576 576 return datablock
577 577 if len(self.index_amisr_buffer) > 0:
578 578 self.dataByFrame[self.index4_buffer,:,:] = self.buffer.copy()
579 579 self.radacTimeByFrame[self.index4_buffer] = self.buffer_radactime.copy()
580 580 self.dataByFrame[self.index4_schain_datablock,:,:] = self.dataset[idrecord, self.index_amisr_sample,:,:]
581 581 self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[idrecord, self.index_amisr_sample]
582 582 datablock = self.__setDataBlock()
583 583 if len(self.index_amisr_buffer) > 0:
584 584 self.buffer = self.dataset[idrecord, self.index_amisr_buffer, :, :]
585 585 self.buffer_radactime = self.radacHeaderObj.radacTime[idrecord, self.index_amisr_buffer]
586 586
587 587 return datablock
588 588
589 589
590 590 def readSamples(self,idrecord):
591 591 if self.flagIsNewFile:
592 592 self.dataByFrame = self.__setDataByFrame()
593 593 self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[idrecord, :]
594 594
595 595 #reading ranges
596 596 self.readRanges()
597 597 #reading dataset
598 598 self.dataset = self.__readDataSet()
599 599
600 600 self.flagIsNewFile = 0
601 601 self.radacTimeByFrame = self.radacHeaderObj.radacTime.value[idrecord, :]
602 602 self.dataByFrame = self.dataset[idrecord, :, :, :]
603 603 datablock = self.__setDataBlock()
604 604 return datablock
605 605
606 606
607 607 def readDataBlock(self):
608 608
609 609 self.datablock = self.readSamples_version1(self.idrecord_count)
610 610 #self.datablock = self.readSamples(self.idrecord_count)
611 611 #print 'record:', self.idrecord_count
612 612
613 613 self.idrecord_count += 1
614 614 self.profileIndex = 0
615 615
616 616 if self.idrecord_count >= self.radacHeaderObj.nrecords:
617 617 self.idrecord_count = 0
618 618 self.flagIsNewFile = 1
619 619
620 620 def readNextBlock(self):
621 621
622 622 self.readDataBlock()
623 623
624 624 if self.flagIsNewFile:
625 625 self.__setNextFile(self.online)
626 626 pass
627 627
628 628 def __hasNotDataInBuffer(self):
629 629 #self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
630 630 if self.profileIndex >= self.radacHeaderObj.npulses:
631 631 return 1
632 632 return 0
633 633
634 634 def printUTC(self):
635 635 print self.dataOut.utctime
636 636 print ''
637 637
638 638 def setObjProperties(self):
639 639
640 640 self.dataOut.heightList = self.rangeFromFile/1000.0 #km
641 641 self.dataOut.nProfiles = self.radacHeaderObj.npulses
642 642 self.dataOut.nRecords = self.radacHeaderObj.nrecords
643 643 self.dataOut.nBeams = self.radacHeaderObj.nbeams
644 644 self.dataOut.ippSeconds = self.ippSeconds_fromfile
645 645 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
646 646 self.dataOut.frequency = self.frequency_h5file
647 647 self.dataOut.npulseByFrame = self.npulseByFrame
648 648 self.dataOut.nBaud = None
649 649 self.dataOut.nCode = None
650 650 self.dataOut.code = None
651 651
652 652 self.dataOut.beamCodeDict = self.beamCodeDict
653 653 self.dataOut.beamRangeDict = self.beamRangeDict
654 654
655 655 if self.timezone == 'lt':
656 656 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
657 657 else:
658 658 self.dataOut.timeZone = 0 #by default time is UTC
659 659
660 660 def getData(self):
661 661
662 662 if self.flagNoMoreFiles:
663 663 self.dataOut.flagNoData = True
664 664 print 'Process finished'
665 665 return 0
666 666
667 667 if self.__hasNotDataInBuffer():
668 668 self.readNextBlock()
669 669
670 670
671 671 if self.datablock is None: # setear esta condicion cuando no hayan datos por leers
672 672 self.dataOut.flagNoData = True
673 673 return 0
674 674
675 675 self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex,:],(1,-1))
676 676
677 677 self.dataOut.utctime = self.radacTimeByFrame[self.profileIndex]
678 678 self.dataOut.profileIndex = self.profileIndex
679 679 self.dataOut.flagNoData = False
680 680
681 681 self.profileIndex += 1
682 682
683 683 return self.dataOut.data
684 684
685 685
686 686 def run(self, **kwargs):
687 687 if not(self.isConfig):
688 688 self.setup(**kwargs)
689 689 self.setObjProperties()
690 690 self.isConfig = True
691 691
692 692 self.getData()
@@ -1,1855 +1,1794
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time, datetime
14 14 import traceback
15 15 import zmq
16 16
17 17 try:
18 18 from gevent import sleep
19 19 except:
20 20 from time import sleep
21 21
22 22 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
23 23 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
24 24
25 25 LOCALTIME = True
26 26
27 27 def isNumber(cad):
28 28 """
29 29 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
30 30
31 31 Excepciones:
32 32 Si un determinado string no puede ser convertido a numero
33 33 Input:
34 34 str, string al cual se le analiza para determinar si convertible a un numero o no
35 35
36 36 Return:
37 37 True : si el string es uno numerico
38 38 False : no es un string numerico
39 39 """
40 40 try:
41 41 float( cad )
42 42 return True
43 43 except:
44 44 return False
45 45
46 46 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
47 47 """
48 48 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
49 49
50 50 Inputs:
51 51 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
52 52
53 53 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
56 56 segundos contados desde 01/01/1970.
57 57
58 58 Return:
59 59 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
60 60 fecha especificado, de lo contrario retorna False.
61 61
62 62 Excepciones:
63 63 Si el archivo no existe o no puede ser abierto
64 64 Si la cabecera no puede ser leida.
65 65
66 66 """
67 67 basicHeaderObj = BasicHeader(LOCALTIME)
68 68
69 69 try:
70 70 fp = open(filename,'rb')
71 71 except IOError:
72 72 print "The file %s can't be opened" %(filename)
73 73 return 0
74 74
75 75 sts = basicHeaderObj.read(fp)
76 76 fp.close()
77 77
78 78 if not(sts):
79 79 print "Skipping the file %s because it has not a valid header" %(filename)
80 80 return 0
81 81
82 82 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
83 83 return 0
84 84
85 85 return 1
86 86
87 87 def isTimeInRange(thisTime, startTime, endTime):
88 88
89 89 if endTime >= startTime:
90 90 if (thisTime < startTime) or (thisTime > endTime):
91 91 return 0
92 92
93 93 return 1
94 94 else:
95 95 if (thisTime < startTime) and (thisTime > endTime):
96 96 return 0
97 97
98 98 return 1
99 99
100 100 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
101 101 """
102 102 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
103 103
104 104 Inputs:
105 105 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
106 106
107 107 startDate : fecha inicial del rango seleccionado en formato datetime.date
108 108
109 109 endDate : fecha final del rango seleccionado en formato datetime.date
110 110
111 111 startTime : tiempo inicial del rango seleccionado en formato datetime.time
112 112
113 113 endTime : tiempo final del rango seleccionado en formato datetime.time
114 114
115 115 Return:
116 116 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
117 117 fecha especificado, de lo contrario retorna False.
118 118
119 119 Excepciones:
120 120 Si el archivo no existe o no puede ser abierto
121 121 Si la cabecera no puede ser leida.
122 122
123 123 """
124 124
125 125
126 126 try:
127 127 fp = open(filename,'rb')
128 128 except IOError:
129 129 print "The file %s can't be opened" %(filename)
130 130 return None
131 131
132 132 firstBasicHeaderObj = BasicHeader(LOCALTIME)
133 133 systemHeaderObj = SystemHeader()
134 134 radarControllerHeaderObj = RadarControllerHeader()
135 135 processingHeaderObj = ProcessingHeader()
136 136
137 137 lastBasicHeaderObj = BasicHeader(LOCALTIME)
138 138
139 139 sts = firstBasicHeaderObj.read(fp)
140 140
141 141 if not(sts):
142 142 print "[Reading] Skipping the file %s because it has not a valid header" %(filename)
143 143 return None
144 144
145 145 if not systemHeaderObj.read(fp):
146 146 return None
147 147
148 148 if not radarControllerHeaderObj.read(fp):
149 149 return None
150 150
151 151 if not processingHeaderObj.read(fp):
152 152 return None
153 153
154 154 filesize = os.path.getsize(filename)
155 155
156 156 offset = processingHeaderObj.blockSize + 24 #header size
157 157
158 158 if filesize <= offset:
159 159 print "[Reading] %s: This file has not enough data" %filename
160 160 return None
161 161
162 162 fp.seek(-offset, 2)
163 163
164 164 sts = lastBasicHeaderObj.read(fp)
165 165
166 166 fp.close()
167 167
168 168 thisDatetime = lastBasicHeaderObj.datatime
169 169 thisTime_last_block = thisDatetime.time()
170 170
171 171 thisDatetime = firstBasicHeaderObj.datatime
172 172 thisDate = thisDatetime.date()
173 173 thisTime_first_block = thisDatetime.time()
174 174
175 175 #General case
176 176 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
177 177 #-----------o----------------------------o-----------
178 178 # startTime endTime
179 179
180 180 if endTime >= startTime:
181 181 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
182 182 return None
183 183
184 184 return thisDatetime
185 185
186 186 #If endTime < startTime then endTime belongs to the next day
187 187
188 188
189 189 #<<<<<<<<<<<o o>>>>>>>>>>>
190 190 #-----------o----------------------------o-----------
191 191 # endTime startTime
192 192
193 193 if (thisDate == startDate) and (thisTime_last_block < startTime):
194 194 return None
195 195
196 196 if (thisDate == endDate) and (thisTime_first_block > endTime):
197 197 return None
198 198
199 199 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
200 200 return None
201 201
202 202 return thisDatetime
203 203
204 204 def isFolderInDateRange(folder, startDate=None, endDate=None):
205 205 """
206 206 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
207 207
208 208 Inputs:
209 209 folder : nombre completo del directorio.
210 210 Su formato deberia ser "/path_root/?YYYYDDD"
211 211
212 212 siendo:
213 213 YYYY : Anio (ejemplo 2015)
214 214 DDD : Dia del anio (ejemplo 305)
215 215
216 216 startDate : fecha inicial del rango seleccionado en formato datetime.date
217 217
218 218 endDate : fecha final del rango seleccionado en formato datetime.date
219 219
220 220 Return:
221 221 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
222 222 fecha especificado, de lo contrario retorna False.
223 223 Excepciones:
224 224 Si el directorio no tiene el formato adecuado
225 225 """
226 226
227 227 basename = os.path.basename(folder)
228 228
229 229 if not isRadarFolder(basename):
230 230 print "The folder %s has not the rigth format" %folder
231 231 return 0
232 232
233 233 if startDate and endDate:
234 234 thisDate = getDateFromRadarFolder(basename)
235 235
236 236 if thisDate < startDate:
237 237 return 0
238 238
239 239 if thisDate > endDate:
240 240 return 0
241 241
242 242 return 1
243 243
244 244 def isFileInDateRange(filename, startDate=None, endDate=None):
245 245 """
246 246 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
247 247
248 248 Inputs:
249 249 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
250 250
251 251 Su formato deberia ser "?YYYYDDDsss"
252 252
253 253 siendo:
254 254 YYYY : Anio (ejemplo 2015)
255 255 DDD : Dia del anio (ejemplo 305)
256 256 sss : set
257 257
258 258 startDate : fecha inicial del rango seleccionado en formato datetime.date
259 259
260 260 endDate : fecha final del rango seleccionado en formato datetime.date
261 261
262 262 Return:
263 263 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
264 264 fecha especificado, de lo contrario retorna False.
265 265 Excepciones:
266 266 Si el archivo no tiene el formato adecuado
267 267 """
268 268
269 269 basename = os.path.basename(filename)
270 270
271 271 if not isRadarFile(basename):
272 272 print "The filename %s has not the rigth format" %filename
273 273 return 0
274 274
275 275 if startDate and endDate:
276 276 thisDate = getDateFromRadarFile(basename)
277 277
278 278 if thisDate < startDate:
279 279 return 0
280 280
281 281 if thisDate > endDate:
282 282 return 0
283 283
284 284 return 1
285 285
286 286 def getFileFromSet(path, ext, set):
287 287 validFilelist = []
288 288 fileList = os.listdir(path)
289 289
290 290 # 0 1234 567 89A BCDE
291 291 # H YYYY DDD SSS .ext
292 292
293 293 for thisFile in fileList:
294 294 try:
295 295 year = int(thisFile[1:5])
296 296 doy = int(thisFile[5:8])
297 297 except:
298 298 continue
299 299
300 300 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
301 301 continue
302 302
303 303 validFilelist.append(thisFile)
304 304
305 305 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
306 306
307 307 if len(myfile)!= 0:
308 308 return myfile[0]
309 309 else:
310 310 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
311 311 print 'the filename %s does not exist'%filename
312 312 print '...going to the last file: '
313 313
314 314 if validFilelist:
315 315 validFilelist = sorted( validFilelist, key=str.lower )
316 316 return validFilelist[-1]
317 317
318 318 return None
319 319
320 320 def getlastFileFromPath(path, ext):
321 321 """
322 322 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
323 323 al final de la depuracion devuelve el ultimo file de la lista que quedo.
324 324
325 325 Input:
326 326 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
327 327 ext : extension de los files contenidos en una carpeta
328 328
329 329 Return:
330 330 El ultimo file de una determinada carpeta, no se considera el path.
331 331 """
332 332 validFilelist = []
333 333 fileList = os.listdir(path)
334 334
335 335 # 0 1234 567 89A BCDE
336 336 # H YYYY DDD SSS .ext
337 337
338 338 for thisFile in fileList:
339 339
340 340 year = thisFile[1:5]
341 341 if not isNumber(year):
342 342 continue
343 343
344 344 doy = thisFile[5:8]
345 345 if not isNumber(doy):
346 346 continue
347 347
348 348 year = int(year)
349 349 doy = int(doy)
350 350
351 351 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
352 352 continue
353 353
354 354 validFilelist.append(thisFile)
355 355
356 356 if validFilelist:
357 357 validFilelist = sorted( validFilelist, key=str.lower )
358 358 return validFilelist[-1]
359 359
360 360 return None
361 361
362 362 def checkForRealPath(path, foldercounter, year, doy, set, ext):
363 363 """
364 364 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
365 365 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
366 366 el path exacto de un determinado file.
367 367
368 368 Example :
369 369 nombre correcto del file es .../.../D2009307/P2009307367.ext
370 370
371 371 Entonces la funcion prueba con las siguientes combinaciones
372 372 .../.../y2009307367.ext
373 373 .../.../Y2009307367.ext
374 374 .../.../x2009307/y2009307367.ext
375 375 .../.../x2009307/Y2009307367.ext
376 376 .../.../X2009307/y2009307367.ext
377 377 .../.../X2009307/Y2009307367.ext
378 378 siendo para este caso, la ultima combinacion de letras, identica al file buscado
379 379
380 380 Return:
381 381 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
382 382 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
383 383 para el filename
384 384 """
385 385 fullfilename = None
386 386 find_flag = False
387 387 filename = None
388 388
389 389 prefixDirList = [None,'d','D']
390 390 if ext.lower() == ".r": #voltage
391 391 prefixFileList = ['d','D']
392 392 elif ext.lower() == ".pdata": #spectra
393 393 prefixFileList = ['p','P']
394 394 else:
395 395 return None, filename
396 396
397 397 #barrido por las combinaciones posibles
398 398 for prefixDir in prefixDirList:
399 399 thispath = path
400 400 if prefixDir != None:
401 401 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
402 402 if foldercounter == 0:
403 403 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
404 404 else:
405 405 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
406 406 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
407 407 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
408 408 fullfilename = os.path.join( thispath, filename ) #formo el path completo
409 409
410 410 if os.path.exists( fullfilename ): #verifico que exista
411 411 find_flag = True
412 412 break
413 413 if find_flag:
414 414 break
415 415
416 416 if not(find_flag):
417 417 return None, filename
418 418
419 419 return fullfilename, filename
420 420
421 421 def isRadarFolder(folder):
422 422 try:
423 423 year = int(folder[1:5])
424 424 doy = int(folder[5:8])
425 425 except:
426 426 return 0
427 427
428 428 return 1
429 429
430 430 def isRadarFile(file):
431 431 try:
432 432 year = int(file[1:5])
433 433 doy = int(file[5:8])
434 434 set = int(file[8:11])
435 435 except:
436 436 return 0
437 437
438 438 return 1
439 439
440 440 def getDateFromRadarFile(file):
441 441 try:
442 442 year = int(file[1:5])
443 443 doy = int(file[5:8])
444 444 set = int(file[8:11])
445 445 except:
446 446 return None
447 447
448 448 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
449 449 return thisDate
450 450
451 451 def getDateFromRadarFolder(folder):
452 452 try:
453 453 year = int(folder[1:5])
454 454 doy = int(folder[5:8])
455 455 except:
456 456 return None
457 457
458 458 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
459 459 return thisDate
460 460
461 461 class JRODataIO:
462 462
463 463 c = 3E8
464 464
465 465 isConfig = False
466 466
467 467 basicHeaderObj = None
468 468
469 469 systemHeaderObj = None
470 470
471 471 radarControllerHeaderObj = None
472 472
473 473 processingHeaderObj = None
474 474
475 475 dtype = None
476 476
477 477 pathList = []
478 478
479 479 filenameList = []
480 480
481 481 filename = None
482 482
483 483 ext = None
484 484
485 485 flagIsNewFile = 1
486 486
487 487 flagDiscontinuousBlock = 0
488 488
489 489 flagIsNewBlock = 0
490 490
491 491 fp = None
492 492
493 493 firstHeaderSize = 0
494 494
495 495 basicHeaderSize = 24
496 496
497 497 versionFile = 1103
498 498
499 499 fileSize = None
500 500
501 501 # ippSeconds = None
502 502
503 503 fileSizeByHeader = None
504 504
505 505 fileIndex = None
506 506
507 507 profileIndex = None
508 508
509 509 blockIndex = None
510 510
511 511 nTotalBlocks = None
512 512
513 513 maxTimeStep = 30
514 514
515 515 lastUTTime = None
516 516
517 517 datablock = None
518 518
519 519 dataOut = None
520 520
521 521 blocksize = None
522 522
523 523 getByBlock = False
524 524
525 525 def __init__(self):
526 526
527 527 raise NotImplementedError
528 528
529 529 def run(self):
530 530
531 531 raise NotImplementedError
532 532
533 533 def getDtypeWidth(self):
534 534
535 535 dtype_index = get_dtype_index(self.dtype)
536 536 dtype_width = get_dtype_width(dtype_index)
537 537
538 538 return dtype_width
539 539
540 540 def getAllowedArgs(self):
541 541 return inspect.getargspec(self.run).args
542 542
543 543 class JRODataReader(JRODataIO):
544 544
545 firstTime = True
546 545 online = 0
547 546
548 547 realtime = 0
549 548
550 549 nReadBlocks = 0
551 550
552 551 delay = 10 #number of seconds waiting a new file
553 552
554 553 nTries = 3 #quantity tries
555 554
556 555 nFiles = 3 #number of files for searching
557 556
558 557 path = None
559 558
560 559 foldercounter = 0
561 560
562 561 flagNoMoreFiles = 0
563 562
564 563 datetimeList = []
565 564
566 565 __isFirstTimeOnline = 1
567 566
568 567 __printInfo = True
569 568
570 569 profileIndex = None
571 570
572 571 nTxs = 1
573 572
574 573 txIndex = None
575 574
576 575 #Added--------------------
577 576
578 577 selBlocksize = None
579 578
580 579 selBlocktime = None
581 580
582 onlineWithDate = False
583 581 def __init__(self):
584 582
585 583 """
586 584 This class is used to find data files
587 585
588 586 Example:
589 587 reader = JRODataReader()
590 588 fileList = reader.findDataFiles()
591 589
592 590 """
593 591 pass
594 592
595 593
596 594 def createObjByDefault(self):
597 595 """
598 596
599 597 """
600 598 raise NotImplementedError
601 599
602 600 def getBlockDimension(self):
603 601
604 602 raise NotImplementedError
605 603
606 def __searchFilesOffLine(self,
604 def searchFilesOffLine(self,
607 605 path,
608 606 startDate=None,
609 607 endDate=None,
610 608 startTime=datetime.time(0,0,0),
611 609 endTime=datetime.time(23,59,59),
612 610 set=None,
613 611 expLabel='',
614 612 ext='.r',
615 queue=None,
616 613 cursor=None,
617 614 skip=None,
618 615 walk=True):
616
619 617 self.filenameList = []
620 618 self.datetimeList = []
621 619
622 620 pathList = []
623 621
624 622 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
625 623
626 624 if dateList == []:
627 # print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
628 return None, None
625 return [], []
629 626
630 627 if len(dateList) > 1:
631 628 print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList))
632 629 else:
633 630 print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0])
634 631
635 632 filenameList = []
636 633 datetimeList = []
637 634
638 635 for thisPath in pathList:
639 # thisPath = pathList[pathDict[file]]
640 636
641 637 fileList = glob.glob1(thisPath, "*%s" %ext)
642 638 fileList.sort()
643 639
644 640 skippedFileList = []
645 641
646 642 if cursor is not None and skip is not None:
647 # if cursor*skip > len(fileList):
643
648 644 if skip == 0:
649 if queue is not None:
650 queue.put(len(fileList))
651 645 skippedFileList = []
652 646 else:
653 647 skippedFileList = fileList[cursor*skip: cursor*skip + skip]
654 648
655 649 else:
656 650 skippedFileList = fileList
657 651
658 652 for file in skippedFileList:
659 653
660 654 filename = os.path.join(thisPath,file)
661 655
662 656 if not isFileInDateRange(filename, startDate, endDate):
663 657 continue
664 658
665 659 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
666 660
667 661 if not(thisDatetime):
668 662 continue
669 663
670 664 filenameList.append(filename)
671 665 datetimeList.append(thisDatetime)
672 666
673 667 if not(filenameList):
674 668 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path)
675 return None, None
669 return [], []
676 670
677 671 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
678 672 print
679 673
680 for i in range(len(filenameList)):
681 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
674 # for i in range(len(filenameList)):
675 # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
682 676
683 677 self.filenameList = filenameList
684 678 self.datetimeList = datetimeList
679
685 680 return pathList, filenameList
686 681
687 def __searchFilesOnLine(self, path, expLabel="", ext=None, walk=True, set=None, startDate=None, startTime=None):
682 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
688 683
689 684 """
690 685 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
691 686 devuelve el archivo encontrado ademas de otros datos.
692 687
693 688 Input:
694 689 path : carpeta donde estan contenidos los files que contiene data
695 690
696 691 expLabel : Nombre del subexperimento (subfolder)
697 692
698 693 ext : extension de los files
699 694
700 walk : Si es habilitado no realiza busquedas dentro de los subdirectorios (doypath)
695 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
701 696
702 697 Return:
703 698 directory : eL directorio donde esta el file encontrado
704 699 filename : el ultimo file de una determinada carpeta
705 700 year : el anho
706 701 doy : el numero de dia del anho
707 702 set : el set del archivo
708 703
709 704
710 705 """
711 pathList = None
712 filenameList = None
713 706 if not os.path.isdir(path):
714 707 return None, None, None, None, None, None
715 708
716 709 dirList = []
717 710
718 711 if not walk:
719 712 fullpath = path
720 713 foldercounter = 0
721 714 else:
722 715 # Filtra solo los directorios
723 716 for thisPath in os.listdir(path):
724 717 if not os.path.isdir(os.path.join(path,thisPath)):
725 718 continue
726 719 if not isRadarFolder(thisPath):
727 720 continue
728 721
729 722 dirList.append(thisPath)
730 723
731 724 if not(dirList):
732 725 return None, None, None, None, None, None
733 726
734 727 dirList = sorted( dirList, key=str.lower )
735 728
736 729 doypath = dirList[-1]
737 730 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
738 731 fullpath = os.path.join(path, doypath, expLabel)
739 732
740 733
741 734 print "[Reading] %s folder was found: " %(fullpath )
742 735
743 736 if set == None:
744 737 filename = getlastFileFromPath(fullpath, ext)
745 738 else:
746 739 filename = getFileFromSet(fullpath, ext, set)
747 740
748 741 if not(filename):
749 742 return None, None, None, None, None, None
750 743
751 744 print "[Reading] %s file was found" %(filename)
752 745
753 746 if not(self.__verifyFile(os.path.join(fullpath, filename))):
754 747 return None, None, None, None, None, None
755 748
756 749 year = int( filename[1:5] )
757 750 doy = int( filename[5:8] )
758 751 set = int( filename[8:11] )
759 752
760 753 return fullpath, foldercounter, filename, year, doy, set
761 754
762 755 def __setNextFileOffline(self):
763 756
764 757 idFile = self.fileIndex
765 758
766 759 while (True):
767 760 idFile += 1
768 761 if not(idFile < len(self.filenameList)):
769 762 self.flagNoMoreFiles = 1
770 763 # print "[Reading] No more Files"
771 764 return 0
772 765
773 766 filename = self.filenameList[idFile]
774 767
775 768 if not(self.__verifyFile(filename)):
776 769 continue
777 770
778 771 fileSize = os.path.getsize(filename)
779 772 fp = open(filename,'rb')
780 773 break
781 774
782 775 self.flagIsNewFile = 1
783 776 self.fileIndex = idFile
784 777 self.filename = filename
785 778 self.fileSize = fileSize
786 779 self.fp = fp
787 780
788 781 #print "[Reading] Setting the file: %s"%self.filename
789 782
790 783 return 1
791 784
792 785 def __setNextFileOnline(self):
793 786 """
794 787 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
795 788 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
796 789 siguientes.
797 790
798 791 Affected:
799 792 self.flagIsNewFile
800 793 self.filename
801 794 self.fileSize
802 795 self.fp
803 796 self.set
804 797 self.flagNoMoreFiles
805 798
806 799 Return:
807 800 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
808 801 1 : si el file fue abierto con exito y esta listo a ser leido
809 802
810 803 Excepciones:
811 804 Si un determinado file no puede ser abierto
812 805 """
813
814 806 nFiles = 0
815 807 fileOk_flag = False
816 808 firstTime_flag = True
817 809
818 810 self.set += 1
819 811
820 812 if self.set > 999:
821 813 self.set = 0
822 814 self.foldercounter += 1
823 815
824 816 #busca el 1er file disponible
825 817 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
826 818 if fullfilename:
827 819 if self.__verifyFile(fullfilename, False):
828 820 fileOk_flag = True
829 821
830 822 #si no encuentra un file entonces espera y vuelve a buscar
831 823 if not(fileOk_flag):
832 824 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
833 825
834 826 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
835 827 tries = self.nTries
836 828 else:
837 829 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
838 830
839 831 for nTries in range( tries ):
840 832 if firstTime_flag:
841 833 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
842 834 sleep( self.delay )
843 835 else:
844 836 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
845 837
846 838 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
847 839 if fullfilename:
848 840 if self.__verifyFile(fullfilename):
849 841 fileOk_flag = True
850 842 break
851 843
852 844 if fileOk_flag:
853 845 break
854 846
855 847 firstTime_flag = False
856 848
857 849 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
858 850 self.set += 1
859 851
860 852 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
861 853 self.set = 0
862 854 self.doy += 1
863 855 self.foldercounter = 0
864 856
865 857 if fileOk_flag:
866 858 self.fileSize = os.path.getsize( fullfilename )
867 859 self.filename = fullfilename
868 860 self.flagIsNewFile = 1
869 861 if self.fp != None: self.fp.close()
870 862 self.fp = open(fullfilename, 'rb')
871 863 self.flagNoMoreFiles = 0
872 864 # print '[Reading] Setting the file: %s' % fullfilename
873 865 else:
874 866 self.fileSize = 0
875 867 self.filename = None
876 868 self.flagIsNewFile = 0
877 869 self.fp = None
878 870 self.flagNoMoreFiles = 1
879 871 # print '[Reading] No more files to read'
880 872
881 873 return fileOk_flag
882 874
883 875 def setNextFile(self):
884 876 if self.fp != None:
885 877 self.fp.close()
878
886 879 if self.online:
887 880 newFile = self.__setNextFileOnline()
888 881 else:
889 882 newFile = self.__setNextFileOffline()
883
890 884 if not(newFile):
891 if self.onlineWithDate is True:
892 self.onlineWithDate=False
893 self.online = True
894 self.firstTime = False
895 self.setup(
896 path=self.path,
897 startDate=self.startDate,
898 endDate=self.endDate,
899 startTime=self.startTime ,
900 endTime=self.endTime,
901 set=self.set,
902 expLabel=self.expLabel,
903 ext=self.ext,
904 online=self.online,
905 delay=self.delay,
906 walk=self.walk,
907 getblock=self.getblock,
908 nTxs=self.nTxs,
909 realtime=self.realtime,
910 blocksize=self.blocksize,
911 blocktime=self.blocktime
912 )
913 return 1
914 885 print '[Reading] No more files to read'
915 886 return 0
916 887
917 888 if self.verbose:
918 889 print '[Reading] Setting the file: %s' % self.filename
919 890
920 891 self.__readFirstHeader()
921 892 self.nReadBlocks = 0
922 893 return 1
923 894
924 895 def __waitNewBlock(self):
925 896 """
926 897 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
927 898
928 899 Si el modo de lectura es OffLine siempre retorn 0
929 900 """
930 901 if not self.online:
931 902 return 0
932 903
933 904 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
934 905 return 0
935 906
936 907 currentPointer = self.fp.tell()
937 908
938 909 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
939 910
940 911 for nTries in range( self.nTries ):
941 912
942 913 self.fp.close()
943 914 self.fp = open( self.filename, 'rb' )
944 915 self.fp.seek( currentPointer )
945 916
946 917 self.fileSize = os.path.getsize( self.filename )
947 918 currentSize = self.fileSize - currentPointer
948 919
949 920 if ( currentSize >= neededSize ):
950 921 self.basicHeaderObj.read(self.fp)
951 922 return 1
952 923
953 924 if self.fileSize == self.fileSizeByHeader:
954 925 # self.flagEoF = True
955 926 return 0
956 927
957 928 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
958 929 sleep( self.delay )
959 930
960 931
961 932 return 0
962 933
963 934 def waitDataBlock(self,pointer_location):
964 935
965 936 currentPointer = pointer_location
966 937
967 938 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
968 939
969 940 for nTries in range( self.nTries ):
970 941 self.fp.close()
971 942 self.fp = open( self.filename, 'rb' )
972 943 self.fp.seek( currentPointer )
973 944
974 945 self.fileSize = os.path.getsize( self.filename )
975 946 currentSize = self.fileSize - currentPointer
976 947
977 948 if ( currentSize >= neededSize ):
978 949 return 1
979 950
980 951 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
981 952 sleep( self.delay )
982 953
983 954 return 0
984 955
985 956 def __jumpToLastBlock(self):
986 957
987 958 if not(self.__isFirstTimeOnline):
988 959 return
989 960
990 961 csize = self.fileSize - self.fp.tell()
991 962 blocksize = self.processingHeaderObj.blockSize
992 963
993 964 #salta el primer bloque de datos
994 965 if csize > self.processingHeaderObj.blockSize:
995 966 self.fp.seek(self.fp.tell() + blocksize)
996 967 else:
997 968 return
998 969
999 970 csize = self.fileSize - self.fp.tell()
1000 971 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1001 972 while True:
1002 973
1003 974 if self.fp.tell()<self.fileSize:
1004 975 self.fp.seek(self.fp.tell() + neededsize)
1005 976 else:
1006 977 self.fp.seek(self.fp.tell() - neededsize)
1007 978 break
1008 979
1009 980 # csize = self.fileSize - self.fp.tell()
1010 981 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1011 982 # factor = int(csize/neededsize)
1012 983 # if factor > 0:
1013 984 # self.fp.seek(self.fp.tell() + factor*neededsize)
1014 985
1015 986 self.flagIsNewFile = 0
1016 987 self.__isFirstTimeOnline = 0
1017 988
1018 989 def __setNewBlock(self):
1019 990 #if self.server is None:
1020 991 if self.fp == None:
1021 992 return 0
1022 993
1023 994 # if self.online:
1024 995 # self.__jumpToLastBlock()
1025 996
1026 997 if self.flagIsNewFile:
1027 998 self.lastUTTime = self.basicHeaderObj.utc
1028 999 return 1
1029 1000
1030 1001 if self.realtime:
1031 1002 self.flagDiscontinuousBlock = 1
1032 1003 if not(self.setNextFile()):
1033 1004 return 0
1034 1005 else:
1035 1006 return 1
1036 1007 #if self.server is None:
1037 1008 currentSize = self.fileSize - self.fp.tell()
1038 1009 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1039 1010 if (currentSize >= neededSize):
1040 1011 self.basicHeaderObj.read(self.fp)
1041 1012 self.lastUTTime = self.basicHeaderObj.utc
1042 1013 return 1
1043 1014 # else:
1044 1015 # self.basicHeaderObj.read(self.zHeader)
1045 1016 # self.lastUTTime = self.basicHeaderObj.utc
1046 1017 # return 1
1047 1018 if self.__waitNewBlock():
1048 1019 self.lastUTTime = self.basicHeaderObj.utc
1049 1020 return 1
1050 1021 #if self.server is None:
1051 1022 if not(self.setNextFile()):
1052 1023 return 0
1053 1024
1054 1025 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
1055 1026 self.lastUTTime = self.basicHeaderObj.utc
1056 1027
1057 1028 self.flagDiscontinuousBlock = 0
1058 1029
1059 1030 if deltaTime > self.maxTimeStep:
1060 1031 self.flagDiscontinuousBlock = 1
1061 1032
1062 1033 return 1
1063 1034
1064 1035 def readNextBlock(self):
1065 1036
1066 1037 #Skip block out of startTime and endTime
1067 1038 while True:
1068 1039 if not(self.__setNewBlock()):
1069 print 'returning'
1070 1040 return 0
1041
1071 1042 if not(self.readBlock()):
1072 1043 return 0
1044
1073 1045 self.getBasicHeader()
1046
1074 1047 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1075 1048
1076 1049 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1077 1050 self.processingHeaderObj.dataBlocksPerFile,
1078 1051 self.dataOut.datatime.ctime())
1079 1052 continue
1080 1053
1081 1054 break
1082 1055
1083 1056 if self.verbose:
1084 1057 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1085 1058 self.processingHeaderObj.dataBlocksPerFile,
1086 1059 self.dataOut.datatime.ctime())
1087 1060 return 1
1088 1061
1089 1062 def __readFirstHeader(self):
1090 1063
1091 1064 self.basicHeaderObj.read(self.fp)
1092 1065 self.systemHeaderObj.read(self.fp)
1093 1066 self.radarControllerHeaderObj.read(self.fp)
1094 1067 self.processingHeaderObj.read(self.fp)
1095 1068
1096 1069 self.firstHeaderSize = self.basicHeaderObj.size
1097 1070
1098 1071 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1099 1072 if datatype == 0:
1100 1073 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1101 1074 elif datatype == 1:
1102 1075 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
1103 1076 elif datatype == 2:
1104 1077 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1105 1078 elif datatype == 3:
1106 1079 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1107 1080 elif datatype == 4:
1108 1081 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1109 1082 elif datatype == 5:
1110 1083 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1111 1084 else:
1112 1085 raise ValueError, 'Data type was not defined'
1113 1086
1114 1087 self.dtype = datatype_str
1115 1088 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1116 1089 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1117 1090 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1118 1091 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1119 1092 self.getBlockDimension()
1120 1093
1121 1094 def __verifyFile(self, filename, msgFlag=True):
1122 1095
1123 1096 msg = None
1124 1097
1125 1098 try:
1126 1099 fp = open(filename, 'rb')
1127 1100 except IOError:
1128 1101
1129 1102 if msgFlag:
1130 1103 print "[Reading] File %s can't be opened" % (filename)
1131 1104
1132 1105 return False
1133 1106
1134 1107 currentPosition = fp.tell()
1135 1108 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1136 1109
1137 1110 if neededSize == 0:
1138 1111 basicHeaderObj = BasicHeader(LOCALTIME)
1139 1112 systemHeaderObj = SystemHeader()
1140 1113 radarControllerHeaderObj = RadarControllerHeader()
1141 1114 processingHeaderObj = ProcessingHeader()
1142 1115
1143 1116 if not( basicHeaderObj.read(fp) ):
1144 1117 fp.close()
1145 1118 return False
1146 1119
1147 1120 if not( systemHeaderObj.read(fp) ):
1148 1121 fp.close()
1149 1122 return False
1150 1123
1151 1124 if not( radarControllerHeaderObj.read(fp) ):
1152 1125 fp.close()
1153 1126 return False
1154 1127
1155 1128 if not( processingHeaderObj.read(fp) ):
1156 1129 fp.close()
1157 1130 return False
1158 1131
1159 1132 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1160 1133 else:
1161 1134 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1162 1135
1163 1136 fp.close()
1164 1137
1165 1138 fileSize = os.path.getsize(filename)
1166 1139 currentSize = fileSize - currentPosition
1167 1140
1168 1141 if currentSize < neededSize:
1169 1142 if msgFlag and (msg != None):
1170 1143 print msg
1171 1144 return False
1172 1145
1173 1146 return True
1174 1147
1175 1148 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1176 1149
1177 1150 path_empty = True
1178 1151
1179 1152 dateList = []
1180 1153 pathList = []
1181 1154
1182 1155 multi_path = path.split(',')
1183 1156
1184 1157 if not walk:
1185 1158
1186 1159 for single_path in multi_path:
1187 1160
1188 1161 if not os.path.isdir(single_path):
1189 1162 continue
1190 1163
1191 1164 fileList = glob.glob1(single_path, "*"+ext)
1192 1165
1193 1166 if not fileList:
1194 1167 continue
1195 1168
1196 1169 path_empty = False
1197 1170
1198 1171 fileList.sort()
1199 1172
1200 1173 for thisFile in fileList:
1201 1174
1202 1175 if not os.path.isfile(os.path.join(single_path, thisFile)):
1203 1176 continue
1204 1177
1205 1178 if not isRadarFile(thisFile):
1206 1179 continue
1207 1180
1208 1181 if not isFileInDateRange(thisFile, startDate, endDate):
1209 1182 continue
1210 1183
1211 1184 thisDate = getDateFromRadarFile(thisFile)
1212 1185
1213 1186 if thisDate in dateList:
1214 1187 continue
1215 1188
1216 1189 dateList.append(thisDate)
1217 1190 pathList.append(single_path)
1218 1191
1219 1192 else:
1220 1193 for single_path in multi_path:
1221 1194
1222 1195 if not os.path.isdir(single_path):
1223 1196 continue
1224 1197
1225 1198 dirList = []
1226 1199
1227 1200 for thisPath in os.listdir(single_path):
1228 1201
1229 1202 if not os.path.isdir(os.path.join(single_path,thisPath)):
1230 1203 continue
1231 1204
1232 1205 if not isRadarFolder(thisPath):
1233 1206 continue
1234 1207
1235 1208 if not isFolderInDateRange(thisPath, startDate, endDate):
1236 1209 continue
1237 1210
1238 1211 dirList.append(thisPath)
1239 1212
1240 1213 if not dirList:
1241 1214 continue
1242 1215
1243 1216 dirList.sort()
1244 1217
1245 1218 for thisDir in dirList:
1246 1219
1247 1220 datapath = os.path.join(single_path, thisDir, expLabel)
1248 1221 fileList = glob.glob1(datapath, "*"+ext)
1249 1222
1250 1223 if not fileList:
1251 1224 continue
1252 1225
1253 1226 path_empty = False
1254 1227
1255 1228 thisDate = getDateFromRadarFolder(thisDir)
1256 1229
1257 1230 pathList.append(datapath)
1258 1231 dateList.append(thisDate)
1259 1232
1260 1233 dateList.sort()
1261 1234
1262 1235 if walk:
1263 1236 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1264 1237 else:
1265 1238 pattern_path = multi_path[0]
1266 1239
1267 1240 if path_empty:
1268 1241 print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate)
1269 1242 else:
1270 1243 if not dateList:
1271 1244 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
1272 1245
1273 1246 if include_path:
1274 1247 return dateList, pathList
1275 1248
1276 1249 return dateList
1277 1250
1278 1251 def setup(self,
1279 1252 path=None,
1280 1253 startDate=None,
1281 1254 endDate=None,
1282 1255 startTime=datetime.time(0,0,0),
1283 1256 endTime=datetime.time(23,59,59),
1284 1257 set=None,
1285 1258 expLabel = "",
1286 1259 ext = None,
1287 1260 online = False,
1288 1261 delay = 60,
1289 1262 walk = True,
1290 1263 getblock = False,
1291 1264 nTxs = 1,
1292 1265 realtime=False,
1293 1266 blocksize=None,
1294 1267 blocktime=None,
1268 skip=None,
1269 cursor=None,
1270 warnings=True,
1295 1271 verbose=True,
1296 **kwargs):
1297
1272 server=None):
1273 if server is not None:
1274 if 'tcp://' in server:
1275 address = server
1276 else:
1277 address = 'ipc:///tmp/%s' % server
1278 self.server = address
1279 self.context = zmq.Context()
1280 self.receiver = self.context.socket(zmq.PULL)
1281 self.receiver.connect(self.server)
1282 time.sleep(0.5)
1283 print '[Starting] ReceiverData from {}'.format(self.server)
1284 else:
1285 self.server = None
1298 1286 if path == None:
1299 1287 raise ValueError, "[Reading] The path is not valid"
1300 1288
1301
1302 1289 if ext == None:
1303 1290 ext = self.ext
1304 1291
1305 self.verbose=verbose
1306 self.path = path
1307 self.startDate = startDate
1308 self.endDate = endDate
1309 self.startTime = startTime
1310 self.endTime = endTime
1311 self.set = set
1312 self.expLabel = expLabel
1313 self.ext = ext
1314 self.online = online
1315 self.delay = delay
1316 self.walk = walk
1317 self.getblock = getblock
1318 self.nTxs = nTxs
1319 self.realtime = realtime
1320 self.blocksize = blocksize
1321 self.blocktime = blocktime
1322
1323
1324 if self.firstTime is True:
1325 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1326 startTime=startTime, endTime=endTime,
1327 set=set, expLabel=expLabel, ext=ext,
1328 walk=walk)
1329 if filenameList is not None: filenameList = filenameList[:-1]
1330
1331 if pathList is not None and filenameList is not None and online:
1332 self.onlineWithDate = True
1333 online = False
1334 self.fileIndex = -1
1335 self.pathList = pathList
1336 self.filenameList = filenameList
1337 file_name = os.path.basename(filenameList[-1])
1338 basename, ext = os.path.splitext(file_name)
1339 last_set = int(basename[-3:])
1340
1341 1292 if online:
1342 1293 print "[Reading] Searching files in online mode..."
1343 1294
1344 1295 for nTries in range(self.nTries):
1345 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path,
1346 expLabel=expLabel,
1347 ext=ext,
1348 walk=walk,
1349 startDate=startDate,
1350 startTime=startTime,
1351 set=set)
1296 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1352 1297
1353 1298 if fullpath:
1354 1299 break
1300
1355 1301 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1356 1302 sleep( self.delay )
1357 1303
1358 1304 if not(fullpath):
1359 1305 print "[Reading] There 'isn't any valid file in %s" % path
1360 1306 return
1361 1307
1362 1308 self.year = year
1363 1309 self.doy = doy
1364 1310 self.set = set - 1
1365 1311 self.path = path
1366 1312 self.foldercounter = foldercounter
1367 1313 last_set = None
1368 1314 else:
1369 1315 print "[Reading] Searching files in offline mode ..."
1370 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1316 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1371 1317 startTime=startTime, endTime=endTime,
1372 1318 set=set, expLabel=expLabel, ext=ext,
1373 walk=walk)
1319 walk=walk, cursor=cursor,
1320 skip=skip)
1374 1321
1375 1322 if not(pathList):
1376 # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1377 # datetime.datetime.combine(startDate,startTime).ctime(),
1378 # datetime.datetime.combine(endDate,endTime).ctime())
1379
1380 # sys.exit(-1)
1381
1382 1323 self.fileIndex = -1
1383 1324 self.pathList = []
1384 1325 self.filenameList = []
1385 1326 return
1386 1327
1387 1328 self.fileIndex = -1
1388 1329 self.pathList = pathList
1389 1330 self.filenameList = filenameList
1390 1331 file_name = os.path.basename(filenameList[-1])
1391 1332 basename, ext = os.path.splitext(file_name)
1392 1333 last_set = int(basename[-3:])
1393 1334
1394
1395 1335 self.online = online
1396 1336 self.realtime = realtime
1397 1337 self.delay = delay
1398 1338 ext = ext.lower()
1399 1339 self.ext = ext
1400 1340 self.getByBlock = getblock
1401 1341 self.nTxs = nTxs
1402 1342 self.startTime = startTime
1403 1343 self.endTime = endTime
1404 1344
1405
1406 1345 #Added-----------------
1407 1346 self.selBlocksize = blocksize
1408 1347 self.selBlocktime = blocktime
1409 1348
1349 # Verbose-----------
1350 self.verbose = verbose
1351 self.warnings = warnings
1410 1352
1411 1353 if not(self.setNextFile()):
1412 1354 if (startDate!=None) and (endDate!=None):
1413 1355 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1414 1356 elif startDate != None:
1415 1357 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1416 1358 else:
1417 1359 print "[Reading] No files"
1418 1360
1419 1361 self.fileIndex = -1
1420 1362 self.pathList = []
1421 1363 self.filenameList = []
1422 1364 return
1423 1365
1424 1366 # self.getBasicHeader()
1425 1367
1426 1368 if last_set != None:
1427 1369 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1428 1370 return
1429 1371
1430 1372 def getBasicHeader(self):
1431 1373
1432 1374 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1433 1375
1434 1376 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1435 1377
1436 1378 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1437 1379
1438 1380 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1439 1381
1440 1382 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1441 1383
1442 1384 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1443 1385
1444 1386 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1445 1387
1446 1388 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1447 1389
1448 1390
1449 1391 def getFirstHeader(self):
1450 1392
1451 1393 raise NotImplementedError
1452 1394
1453 1395 def getData(self):
1454 1396
1455 1397 raise NotImplementedError
1456 1398
1457 1399 def hasNotDataInBuffer(self):
1458 1400
1459 1401 raise NotImplementedError
1460 1402
1461 1403 def readBlock(self):
1462 1404
1463 1405 raise NotImplementedError
1464 1406
1465 1407 def isEndProcess(self):
1466 1408
1467 1409 return self.flagNoMoreFiles
1468 1410
1469 1411 def printReadBlocks(self):
1470 1412
1471 1413 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1472 1414
1473 1415 def printTotalBlocks(self):
1474 1416
1475 1417 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1476 1418
1477 1419 def printNumberOfBlock(self):
1478 1420
1479 1421 if self.flagIsNewBlock:
1480 1422 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1481 1423 self.processingHeaderObj.dataBlocksPerFile,
1482 1424 self.dataOut.datatime.ctime())
1483 1425
1484 1426 def printInfo(self):
1485 1427
1486 1428 if self.__printInfo == False:
1487 1429 return
1488 1430
1489 1431 self.basicHeaderObj.printInfo()
1490 1432 self.systemHeaderObj.printInfo()
1491 1433 self.radarControllerHeaderObj.printInfo()
1492 1434 self.processingHeaderObj.printInfo()
1493 1435
1494 1436 self.__printInfo = False
1495 1437
1496 1438 def run(self,
1497 1439 path=None,
1498 1440 startDate=None,
1499 1441 endDate=None,
1500 1442 startTime=datetime.time(0,0,0),
1501 1443 endTime=datetime.time(23,59,59),
1502 1444 set=None,
1503 1445 expLabel = "",
1504 1446 ext = None,
1505 1447 online = False,
1506 1448 delay = 60,
1507 1449 walk = True,
1508 1450 getblock = False,
1509 1451 nTxs = 1,
1510 1452 realtime=False,
1511 1453 blocksize=None,
1512 1454 blocktime=None,
1513 queue=None,
1514 1455 skip=None,
1515 1456 cursor=None,
1516 1457 warnings=True,
1517 1458 server=None,
1518 1459 verbose=True, **kwargs):
1519 1460
1520 1461 if not(self.isConfig):
1521 # self.dataOut = dataOut
1522 1462 self.setup( path=path,
1523 1463 startDate=startDate,
1524 1464 endDate=endDate,
1525 1465 startTime=startTime,
1526 1466 endTime=endTime,
1527 1467 set=set,
1528 1468 expLabel=expLabel,
1529 1469 ext=ext,
1530 1470 online=online,
1531 1471 delay=delay,
1532 1472 walk=walk,
1533 1473 getblock=getblock,
1534 1474 nTxs=nTxs,
1535 1475 realtime=realtime,
1536 1476 blocksize=blocksize,
1537 1477 blocktime=blocktime,
1538 queue=queue,
1539 1478 skip=skip,
1540 1479 cursor=cursor,
1541 1480 warnings=warnings,
1542 1481 server=server,
1543 verbose=verbose, **kwargs)
1482 verbose=verbose)
1544 1483 self.isConfig = True
1545 1484 if server is None:
1546 1485 self.getData()
1547 1486 else:
1548 1487 self.getFromServer()
1549 1488
1550 1489 class JRODataWriter(JRODataIO):
1551 1490
1552 1491 """
1553 1492 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1554 1493 de los datos siempre se realiza por bloques.
1555 1494 """
1556 1495
1557 1496 blockIndex = 0
1558 1497
1559 1498 path = None
1560 1499
1561 1500 setFile = None
1562 1501
1563 1502 profilesPerBlock = None
1564 1503
1565 1504 blocksPerFile = None
1566 1505
1567 1506 nWriteBlocks = 0
1568 1507
1569 1508 fileDate = None
1570 1509
1571 1510 def __init__(self, dataOut=None):
1572 1511 raise NotImplementedError
1573 1512
1574 1513
1575 1514 def hasAllDataInBuffer(self):
1576 1515 raise NotImplementedError
1577 1516
1578 1517
1579 1518 def setBlockDimension(self):
1580 1519 raise NotImplementedError
1581 1520
1582 1521
1583 1522 def writeBlock(self):
1584 1523 raise NotImplementedError
1585 1524
1586 1525
1587 1526 def putData(self):
1588 1527 raise NotImplementedError
1589 1528
1590 1529
1591 1530 def getProcessFlags(self):
1592 1531
1593 1532 processFlags = 0
1594 1533
1595 1534 dtype_index = get_dtype_index(self.dtype)
1596 1535 procflag_dtype = get_procflag_dtype(dtype_index)
1597 1536
1598 1537 processFlags += procflag_dtype
1599 1538
1600 1539 if self.dataOut.flagDecodeData:
1601 1540 processFlags += PROCFLAG.DECODE_DATA
1602 1541
1603 1542 if self.dataOut.flagDeflipData:
1604 1543 processFlags += PROCFLAG.DEFLIP_DATA
1605 1544
1606 1545 if self.dataOut.code is not None:
1607 1546 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1608 1547
1609 1548 if self.dataOut.nCohInt > 1:
1610 1549 processFlags += PROCFLAG.COHERENT_INTEGRATION
1611 1550
1612 1551 if self.dataOut.type == "Spectra":
1613 1552 if self.dataOut.nIncohInt > 1:
1614 1553 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1615 1554
1616 1555 if self.dataOut.data_dc is not None:
1617 1556 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1618 1557
1619 1558 if self.dataOut.flagShiftFFT:
1620 1559 processFlags += PROCFLAG.SHIFT_FFT_DATA
1621 1560
1622 1561 return processFlags
1623 1562
1624 1563 def setBasicHeader(self):
1625 1564
1626 1565 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1627 1566 self.basicHeaderObj.version = self.versionFile
1628 1567 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1629 1568
1630 1569 utc = numpy.floor(self.dataOut.utctime)
1631 1570 milisecond = (self.dataOut.utctime - utc)* 1000.0
1632 1571
1633 1572 self.basicHeaderObj.utc = utc
1634 1573 self.basicHeaderObj.miliSecond = milisecond
1635 1574 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1636 1575 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1637 1576 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1638 1577
1639 1578 def setFirstHeader(self):
1640 1579 """
1641 1580 Obtiene una copia del First Header
1642 1581
1643 1582 Affected:
1644 1583
1645 1584 self.basicHeaderObj
1646 1585 self.systemHeaderObj
1647 1586 self.radarControllerHeaderObj
1648 1587 self.processingHeaderObj self.
1649 1588
1650 1589 Return:
1651 1590 None
1652 1591 """
1653 1592
1654 1593 raise NotImplementedError
1655 1594
1656 1595 def __writeFirstHeader(self):
1657 1596 """
1658 1597 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1659 1598
1660 1599 Affected:
1661 1600 __dataType
1662 1601
1663 1602 Return:
1664 1603 None
1665 1604 """
1666 1605
1667 1606 # CALCULAR PARAMETROS
1668 1607
1669 1608 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1670 1609 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1671 1610
1672 1611 self.basicHeaderObj.write(self.fp)
1673 1612 self.systemHeaderObj.write(self.fp)
1674 1613 self.radarControllerHeaderObj.write(self.fp)
1675 1614 self.processingHeaderObj.write(self.fp)
1676 1615
1677 1616 def __setNewBlock(self):
1678 1617 """
1679 1618 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1680 1619
1681 1620 Return:
1682 1621 0 : si no pudo escribir nada
1683 1622 1 : Si escribio el Basic el First Header
1684 1623 """
1685 1624 if self.fp == None:
1686 1625 self.setNextFile()
1687 1626
1688 1627 if self.flagIsNewFile:
1689 1628 return 1
1690 1629
1691 1630 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1692 1631 self.basicHeaderObj.write(self.fp)
1693 1632 return 1
1694 1633
1695 1634 if not( self.setNextFile() ):
1696 1635 return 0
1697 1636
1698 1637 return 1
1699 1638
1700 1639
1701 1640 def writeNextBlock(self):
1702 1641 """
1703 1642 Selecciona el bloque siguiente de datos y los escribe en un file
1704 1643
1705 1644 Return:
1706 1645 0 : Si no hizo pudo escribir el bloque de datos
1707 1646 1 : Si no pudo escribir el bloque de datos
1708 1647 """
1709 1648 if not( self.__setNewBlock() ):
1710 1649 return 0
1711 1650
1712 1651 self.writeBlock()
1713 1652
1714 1653 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1715 1654 self.processingHeaderObj.dataBlocksPerFile)
1716 1655
1717 1656 return 1
1718 1657
1719 1658 def setNextFile(self):
1720 1659 """
1721 1660 Determina el siguiente file que sera escrito
1722 1661
1723 1662 Affected:
1724 1663 self.filename
1725 1664 self.subfolder
1726 1665 self.fp
1727 1666 self.setFile
1728 1667 self.flagIsNewFile
1729 1668
1730 1669 Return:
1731 1670 0 : Si el archivo no puede ser escrito
1732 1671 1 : Si el archivo esta listo para ser escrito
1733 1672 """
1734 1673 ext = self.ext
1735 1674 path = self.path
1736 1675
1737 1676 if self.fp != None:
1738 1677 self.fp.close()
1739 1678
1740 1679 timeTuple = time.localtime( self.dataOut.utctime)
1741 1680 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1742 1681
1743 1682 fullpath = os.path.join( path, subfolder )
1744 1683 setFile = self.setFile
1745 1684
1746 1685 if not( os.path.exists(fullpath) ):
1747 1686 os.mkdir(fullpath)
1748 1687 setFile = -1 #inicializo mi contador de seteo
1749 1688 else:
1750 1689 filesList = os.listdir( fullpath )
1751 1690 if len( filesList ) > 0:
1752 1691 filesList = sorted( filesList, key=str.lower )
1753 1692 filen = filesList[-1]
1754 1693 # el filename debera tener el siguiente formato
1755 1694 # 0 1234 567 89A BCDE (hex)
1756 1695 # x YYYY DDD SSS .ext
1757 1696 if isNumber( filen[8:11] ):
1758 1697 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1759 1698 else:
1760 1699 setFile = -1
1761 1700 else:
1762 1701 setFile = -1 #inicializo mi contador de seteo
1763 1702
1764 1703 setFile += 1
1765 1704
1766 1705 #If this is a new day it resets some values
1767 1706 if self.dataOut.datatime.date() > self.fileDate:
1768 1707 setFile = 0
1769 1708 self.nTotalBlocks = 0
1770 1709
1771 1710 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1772 1711
1773 1712 filename = os.path.join( path, subfolder, filen )
1774 1713
1775 1714 fp = open( filename,'wb' )
1776 1715
1777 1716 self.blockIndex = 0
1778 1717
1779 1718 #guardando atributos
1780 1719 self.filename = filename
1781 1720 self.subfolder = subfolder
1782 1721 self.fp = fp
1783 1722 self.setFile = setFile
1784 1723 self.flagIsNewFile = 1
1785 1724 self.fileDate = self.dataOut.datatime.date()
1786 1725
1787 1726 self.setFirstHeader()
1788 1727
1789 1728 print '[Writing] Opening file: %s'%self.filename
1790 1729
1791 1730 self.__writeFirstHeader()
1792 1731
1793 1732 return 1
1794 1733
1795 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, verbose=True):
1734 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1796 1735 """
1797 1736 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1798 1737
1799 1738 Inputs:
1800 1739 path : directory where data will be saved
1801 1740 profilesPerBlock : number of profiles per block
1802 1741 set : initial file set
1803 1742 datatype : An integer number that defines data type:
1804 1743 0 : int8 (1 byte)
1805 1744 1 : int16 (2 bytes)
1806 1745 2 : int32 (4 bytes)
1807 1746 3 : int64 (8 bytes)
1808 1747 4 : float32 (4 bytes)
1809 1748 5 : double64 (8 bytes)
1810 1749
1811 1750 Return:
1812 1751 0 : Si no realizo un buen seteo
1813 1752 1 : Si realizo un buen seteo
1814 1753 """
1815 1754
1816 1755 if ext == None:
1817 1756 ext = self.ext
1818 1757
1819 1758 self.ext = ext.lower()
1820 1759
1821 1760 self.path = path
1822 1761
1823 1762 if set is None:
1824 1763 self.setFile = -1
1825 1764 else:
1826 1765 self.setFile = set - 1
1827 1766
1828 1767 self.blocksPerFile = blocksPerFile
1829 1768
1830 1769 self.profilesPerBlock = profilesPerBlock
1831 1770
1832 1771 self.dataOut = dataOut
1833 1772 self.fileDate = self.dataOut.datatime.date()
1834 1773 #By default
1835 1774 self.dtype = self.dataOut.dtype
1836 1775
1837 1776 if datatype is not None:
1838 1777 self.dtype = get_numpy_dtype(datatype)
1839 1778
1840 1779 if not(self.setNextFile()):
1841 1780 print "[Writing] There isn't a next file"
1842 1781 return 0
1843 1782
1844 1783 self.setBlockDimension()
1845 1784
1846 1785 return 1
1847 1786
1848 1787 def run(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1849 1788
1850 1789 if not(self.isConfig):
1851 1790
1852 1791 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs)
1853 1792 self.isConfig = True
1854 1793
1855 1794 self.putData()
@@ -1,848 +1,848
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import os, sys
8 8 import time, datetime
9 9 import numpy
10 10 import fnmatch
11 11 import glob
12 12 from time import sleep
13 13
14 14 try:
15 15 import pyfits
16 16 except ImportError, e:
17 17 print "Fits data cannot be used. Install pyfits module"
18 18
19 19 from xml.etree.ElementTree import ElementTree
20 20
21 21 from jroIO_base import isRadarFolder, isNumber
22 22 from schainpy.model.data.jrodata import Fits
23 23 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit
24 24
25 25 class PyFits(object):
26 26 name=None
27 27 format=None
28 28 array =None
29 29 data =None
30 30 thdulist=None
31 31 prihdr=None
32 32 hdu=None
33 33
34 34 def __init__(self):
35 35
36 36 pass
37 37
38 38 def setColF(self,name,format,array):
39 39 self.name=name
40 40 self.format=format
41 41 self.array=array
42 42 a1=numpy.array([self.array],dtype=numpy.float32)
43 43 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
44 44 return self.col1
45 45
46 46 # def setColP(self,name,format,data):
47 47 # self.name=name
48 48 # self.format=format
49 49 # self.data=data
50 50 # a2=numpy.array([self.data],dtype=numpy.float32)
51 51 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
52 52 # return self.col2
53 53
54 54
55 55 def writeData(self,name,format,data):
56 56 self.name=name
57 57 self.format=format
58 58 self.data=data
59 59 a2=numpy.array([self.data],dtype=numpy.float32)
60 60 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
61 61 return self.col2
62 62
63 63 def cFImage(self,idblock,year,month,day,hour,minute,second):
64 64 self.hdu= pyfits.PrimaryHDU(idblock)
65 65 self.hdu.header.set("Year",year)
66 66 self.hdu.header.set("Month",month)
67 67 self.hdu.header.set("Day",day)
68 68 self.hdu.header.set("Hour",hour)
69 69 self.hdu.header.set("Minute",minute)
70 70 self.hdu.header.set("Second",second)
71 71 return self.hdu
72 72
73 73
74 74 def Ctable(self,colList):
75 75 self.cols=pyfits.ColDefs(colList)
76 76 self.tbhdu = pyfits.new_table(self.cols)
77 77 return self.tbhdu
78 78
79 79
80 80 def CFile(self,hdu,tbhdu):
81 81 self.thdulist=pyfits.HDUList([hdu,tbhdu])
82 82
83 83 def wFile(self,filename):
84 84 if os.path.isfile(filename):
85 85 os.remove(filename)
86 86 self.thdulist.writeto(filename)
87 87
88 88
89 89 class ParameterConf:
90 90 ELEMENTNAME = 'Parameter'
91 91 def __init__(self):
92 92 self.name = ''
93 93 self.value = ''
94 94
95 95 def readXml(self, parmElement):
96 96 self.name = parmElement.get('name')
97 97 self.value = parmElement.get('value')
98 98
99 99 def getElementName(self):
100 100 return self.ELEMENTNAME
101 101
102 102 class Metadata(object):
103 103
104 104 def __init__(self, filename):
105 105 self.parmConfObjList = []
106 106 self.readXml(filename)
107 107
108 108 def readXml(self, filename):
109 109 self.projectElement = None
110 110 self.procUnitConfObjDict = {}
111 111 self.projectElement = ElementTree().parse(filename)
112 112 self.project = self.projectElement.tag
113 113
114 114 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
115 115
116 116 for parmElement in parmElementList:
117 117 parmConfObj = ParameterConf()
118 118 parmConfObj.readXml(parmElement)
119 119 self.parmConfObjList.append(parmConfObj)
120 120
121 121 class FitsWriter(Operation):
122 122 def __init__(self, **kwargs):
123 123 Operation.__init__(self, **kwargs)
124 124 self.isConfig = False
125 125 self.dataBlocksPerFile = None
126 126 self.blockIndex = 0
127 127 self.flagIsNewFile = 1
128 128 self.fitsObj = None
129 129 self.optchar = 'P'
130 130 self.ext = '.fits'
131 131 self.setFile = 0
132 132
133 133 def setFitsHeader(self, dataOut, metadatafile=None):
134 134
135 135 header_data = pyfits.PrimaryHDU()
136 136
137 137 header_data.header['EXPNAME'] = "RADAR DATA"
138 138 header_data.header['DATATYPE'] = "SPECTRA"
139 139 header_data.header['COMMENT'] = ""
140 140
141 141 if metadatafile:
142 142
143 143 metadata4fits = Metadata(metadatafile)
144 144
145 145 for parameter in metadata4fits.parmConfObjList:
146 146 parm_name = parameter.name
147 147 parm_value = parameter.value
148 148
149 149 header_data.header[parm_name] = parm_value
150 150
151 151 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
152 152 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
153 153 header_data.header['NCHANNELS'] = dataOut.nChannels
154 154 #header_data.header['HEIGHTS'] = dataOut.heightList
155 155 header_data.header['NHEIGHTS'] = dataOut.nHeights
156 156
157 157 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
158 158 header_data.header['NCOHINT'] = dataOut.nCohInt
159 159 header_data.header['NINCOHINT'] = dataOut.nIncohInt
160 160 header_data.header['TIMEZONE'] = dataOut.timeZone
161 161 header_data.header['NBLOCK'] = self.blockIndex
162 162
163 163 header_data.writeto(self.filename)
164 164
165 165 self.addExtension(dataOut.heightList,'HEIGHTLIST')
166 166
167 167
168 168 def setup(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None):
169 169
170 170 self.path = path
171 171 self.dataOut = dataOut
172 172 self.metadatafile = metadatafile
173 173 self.dataBlocksPerFile = dataBlocksPerFile
174 174
175 175 def open(self):
176 176 self.fitsObj = pyfits.open(self.filename, mode='update')
177 177
178 178
179 179 def addExtension(self, data, tagname):
180 180 self.open()
181 181 extension = pyfits.ImageHDU(data=data, name=tagname)
182 182 #extension.header['TAG'] = tagname
183 183 self.fitsObj.append(extension)
184 184 self.write()
185 185
186 186 def addData(self, data):
187 187 self.open()
188 188 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
189 189 extension.header['UTCTIME'] = self.dataOut.utctime
190 190 self.fitsObj.append(extension)
191 191 self.blockIndex += 1
192 192 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
193 193
194 194 self.write()
195 195
196 196 def write(self):
197 197
198 198 self.fitsObj.flush(verbose=True)
199 199 self.fitsObj.close()
200 200
201 201
202 202 def setNextFile(self):
203 203
204 204 ext = self.ext
205 205 path = self.path
206 206
207 207 timeTuple = time.localtime( self.dataOut.utctime)
208 208 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
209 209
210 210 fullpath = os.path.join( path, subfolder )
211 211 if not( os.path.exists(fullpath) ):
212 212 os.mkdir(fullpath)
213 213 self.setFile = -1 #inicializo mi contador de seteo
214 214 else:
215 215 filesList = os.listdir( fullpath )
216 216 if len( filesList ) > 0:
217 217 filesList = sorted( filesList, key=str.lower )
218 218 filen = filesList[-1]
219 219
220 220 if isNumber( filen[8:11] ):
221 221 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
222 222 else:
223 223 self.setFile = -1
224 224 else:
225 225 self.setFile = -1 #inicializo mi contador de seteo
226 226
227 227 setFile = self.setFile
228 228 setFile += 1
229 229
230 230 thisFile = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
231 231 timeTuple.tm_year,
232 232 timeTuple.tm_yday,
233 233 setFile,
234 234 ext )
235 235
236 236 filename = os.path.join( path, subfolder, thisFile )
237 237
238 238 self.blockIndex = 0
239 239 self.filename = filename
240 240 self.setFile = setFile
241 241 self.flagIsNewFile = 1
242 242
243 243 print 'Writing the file: %s'%self.filename
244 244
245 245 self.setFitsHeader(self.dataOut, self.metadatafile)
246 246
247 247 return 1
248 248
249 249 def writeBlock(self):
250 250 self.addData(self.dataOut.data_spc)
251 251 self.flagIsNewFile = 0
252 252
253 253
254 254 def __setNewBlock(self):
255 255
256 256 if self.flagIsNewFile:
257 257 return 1
258 258
259 259 if self.blockIndex < self.dataBlocksPerFile:
260 260 return 1
261 261
262 262 if not( self.setNextFile() ):
263 263 return 0
264 264
265 265 return 1
266 266
267 267 def writeNextBlock(self):
268 268 if not( self.__setNewBlock() ):
269 269 return 0
270 270 self.writeBlock()
271 271 return 1
272 272
273 273 def putData(self):
274 274 if self.flagIsNewFile:
275 275 self.setNextFile()
276 276 self.writeNextBlock()
277 277
278 278 def run(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None, **kwargs):
279 279 if not(self.isConfig):
280 280 self.setup(dataOut, path, dataBlocksPerFile=dataBlocksPerFile, metadatafile=metadatafile, **kwargs)
281 281 self.isConfig = True
282 282 self.putData()
283 283
284 284
285 285 class FitsReader(ProcessingUnit):
286 286
287 287 # __TIMEZONE = time.timezone
288 288
289 289 expName = None
290 290 datetimestr = None
291 291 utc = None
292 292 nChannels = None
293 293 nSamples = None
294 294 dataBlocksPerFile = None
295 295 comments = None
296 296 lastUTTime = None
297 297 header_dict = None
298 298 data = None
299 299 data_header_dict = None
300 300
301 301 def __init__(self, **kwargs):
302 302 ProcessingUnit.__init__(self, **kwargs)
303 303 self.isConfig = False
304 304 self.ext = '.fits'
305 305 self.setFile = 0
306 306 self.flagNoMoreFiles = 0
307 307 self.flagIsNewFile = 1
308 308 self.flagDiscontinuousBlock = None
309 309 self.fileIndex = None
310 310 self.filename = None
311 311 self.fileSize = None
312 312 self.fitsObj = None
313 313 self.timeZone = None
314 314 self.nReadBlocks = 0
315 315 self.nTotalBlocks = 0
316 316 self.dataOut = self.createObjByDefault()
317 317 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
318 318 self.blockIndex = 1
319 319
320 320 def createObjByDefault(self):
321 321
322 322 dataObj = Fits()
323 323
324 324 return dataObj
325 325
326 326 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
327 327 try:
328 328 fitsObj = pyfits.open(filename,'readonly')
329 329 except:
330 330 print "File %s can't be opened" %(filename)
331 331 return None
332 332
333 333 header = fitsObj[0].header
334 334 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
335 335 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
336 336
337 337 ltc = utc
338 338 if useLocalTime:
339 339 ltc -= time.timezone
340 340 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
341 341 thisTime = thisDatetime.time()
342 342
343 343 if not ((startTime <= thisTime) and (endTime > thisTime)):
344 344 return None
345 345
346 346 return thisDatetime
347 347
348 348 def __setNextFileOnline(self):
349 349 raise NotImplementedError
350 350
351 351 def __setNextFileOffline(self):
352 352 idFile = self.fileIndex
353 353
354 354 while (True):
355 355 idFile += 1
356 356 if not(idFile < len(self.filenameList)):
357 357 self.flagNoMoreFiles = 1
358 358 print "No more Files"
359 359 return 0
360 360
361 361 filename = self.filenameList[idFile]
362 362
363 363 # if not(self.__verifyFile(filename)):
364 364 # continue
365 365
366 366 fileSize = os.path.getsize(filename)
367 367 fitsObj = pyfits.open(filename,'readonly')
368 368 break
369 369
370 370 self.flagIsNewFile = 1
371 371 self.fileIndex = idFile
372 372 self.filename = filename
373 373 self.fileSize = fileSize
374 374 self.fitsObj = fitsObj
375 375 self.blockIndex = 0
376 376 print "Setting the file: %s"%self.filename
377 377
378 378 return 1
379 379
380 380 def __setValuesFromHeader(self):
381 381
382 382 self.dataOut.header = self.header_dict
383 383 self.dataOut.expName = self.expName
384 384
385 385 self.dataOut.timeZone = self.timeZone
386 386 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
387 387 self.dataOut.comments = self.comments
388 388 # self.dataOut.timeInterval = self.timeInterval
389 389 self.dataOut.channelList = self.channelList
390 390 self.dataOut.heightList = self.heightList
391 391
392 392 self.dataOut.nCohInt = self.nCohInt
393 393 self.dataOut.nIncohInt = self.nIncohInt
394 394
395 395 self.dataOut.ippSeconds = self.ippSeconds
396 396
397 397 def readHeader(self):
398 398 headerObj = self.fitsObj[0]
399 399
400 400 self.header_dict = headerObj.header
401 401 if 'EXPNAME' in headerObj.header.keys():
402 402 self.expName = headerObj.header['EXPNAME']
403 403
404 404 if 'DATATYPE' in headerObj.header.keys():
405 405 self.dataType = headerObj.header['DATATYPE']
406 406
407 407 self.datetimestr = headerObj.header['DATETIME']
408 408 channelList = headerObj.header['CHANNELLIST']
409 409 channelList = channelList.split('[')
410 410 channelList = channelList[1].split(']')
411 411 channelList = channelList[0].split(',')
412 412 channelList = [int(ch) for ch in channelList]
413 413 self.channelList = channelList
414 414 self.nChannels = headerObj.header['NCHANNELS']
415 415 self.nHeights = headerObj.header['NHEIGHTS']
416 416 self.ippSeconds = headerObj.header['IPPSECONDS']
417 417 self.nCohInt = headerObj.header['NCOHINT']
418 418 self.nIncohInt = headerObj.header['NINCOHINT']
419 419 self.dataBlocksPerFile = headerObj.header['NBLOCK']
420 420 self.timeZone = headerObj.header['TIMEZONE']
421 421
422 422 # self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
423 423
424 424 if 'COMMENT' in headerObj.header.keys():
425 425 self.comments = headerObj.header['COMMENT']
426 426
427 427 self.readHeightList()
428 428
429 429 def readHeightList(self):
430 430 self.blockIndex = self.blockIndex + 1
431 431 obj = self.fitsObj[self.blockIndex]
432 432 self.heightList = obj.data
433 433 self.blockIndex = self.blockIndex + 1
434 434
435 435 def readExtension(self):
436 436 obj = self.fitsObj[self.blockIndex]
437 437 self.heightList = obj.data
438 438 self.blockIndex = self.blockIndex + 1
439 439
440 440 def setNextFile(self):
441 441
442 442 if self.online:
443 443 newFile = self.__setNextFileOnline()
444 444 else:
445 445 newFile = self.__setNextFileOffline()
446 446
447 447 if not(newFile):
448 448 return 0
449 449
450 450 self.readHeader()
451 451 self.__setValuesFromHeader()
452 452 self.nReadBlocks = 0
453 453 # self.blockIndex = 1
454 454 return 1
455 455
456 def __searchFilesOffLine(self,
456 def searchFilesOffLine(self,
457 457 path,
458 458 startDate,
459 459 endDate,
460 460 startTime=datetime.time(0,0,0),
461 461 endTime=datetime.time(23,59,59),
462 462 set=None,
463 463 expLabel='',
464 464 ext='.fits',
465 465 walk=True):
466 466
467 467 pathList = []
468 468
469 469 if not walk:
470 470 pathList.append(path)
471 471
472 472 else:
473 473 dirList = []
474 474 for thisPath in os.listdir(path):
475 475 if not os.path.isdir(os.path.join(path,thisPath)):
476 476 continue
477 477 if not isRadarFolder(thisPath):
478 478 continue
479 479
480 480 dirList.append(thisPath)
481 481
482 482 if not(dirList):
483 483 return None, None
484 484
485 485 thisDate = startDate
486 486
487 487 while(thisDate <= endDate):
488 488 year = thisDate.timetuple().tm_year
489 489 doy = thisDate.timetuple().tm_yday
490 490
491 491 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
492 492 if len(matchlist) == 0:
493 493 thisDate += datetime.timedelta(1)
494 494 continue
495 495 for match in matchlist:
496 496 pathList.append(os.path.join(path,match,expLabel))
497 497
498 498 thisDate += datetime.timedelta(1)
499 499
500 500 if pathList == []:
501 501 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
502 502 return None, None
503 503
504 504 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
505 505
506 506 filenameList = []
507 507 datetimeList = []
508 508
509 509 for i in range(len(pathList)):
510 510
511 511 thisPath = pathList[i]
512 512
513 513 fileList = glob.glob1(thisPath, "*%s" %ext)
514 514 fileList.sort()
515 515
516 516 for thisFile in fileList:
517 517
518 518 filename = os.path.join(thisPath,thisFile)
519 519 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
520 520
521 521 if not(thisDatetime):
522 522 continue
523 523
524 524 filenameList.append(filename)
525 525 datetimeList.append(thisDatetime)
526 526
527 527 if not(filenameList):
528 528 print "Any file was found for the time range %s - %s" %(startTime, endTime)
529 529 return None, None
530 530
531 531 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
532 532 print
533 533
534 534 for i in range(len(filenameList)):
535 535 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
536 536
537 537 self.filenameList = filenameList
538 538 self.datetimeList = datetimeList
539 539
540 540 return pathList, filenameList
541 541
542 542 def setup(self, path=None,
543 543 startDate=None,
544 544 endDate=None,
545 545 startTime=datetime.time(0,0,0),
546 546 endTime=datetime.time(23,59,59),
547 547 set=0,
548 548 expLabel = "",
549 549 ext = None,
550 550 online = False,
551 551 delay = 60,
552 552 walk = True):
553 553
554 554 if path == None:
555 555 raise ValueError, "The path is not valid"
556 556
557 557 if ext == None:
558 558 ext = self.ext
559 559
560 560 if not(online):
561 561 print "Searching files in offline mode ..."
562 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
562 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
563 563 startTime=startTime, endTime=endTime,
564 564 set=set, expLabel=expLabel, ext=ext,
565 565 walk=walk)
566 566
567 567 if not(pathList):
568 568 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
569 569 datetime.datetime.combine(startDate,startTime).ctime(),
570 570 datetime.datetime.combine(endDate,endTime).ctime())
571 571
572 572 sys.exit(-1)
573 573
574 574 self.fileIndex = -1
575 575 self.pathList = pathList
576 576 self.filenameList = filenameList
577 577
578 578 self.online = online
579 579 self.delay = delay
580 580 ext = ext.lower()
581 581 self.ext = ext
582 582
583 583 if not(self.setNextFile()):
584 584 if (startDate!=None) and (endDate!=None):
585 585 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
586 586 elif startDate != None:
587 587 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
588 588 else:
589 589 print "No files"
590 590
591 591 sys.exit(-1)
592 592
593 593
594 594
595 595 def readBlock(self):
596 596 dataObj = self.fitsObj[self.blockIndex]
597 597
598 598 self.data = dataObj.data
599 599 self.data_header_dict = dataObj.header
600 600 self.utc = self.data_header_dict['UTCTIME']
601 601
602 602 self.flagIsNewFile = 0
603 603 self.blockIndex += 1
604 604 self.nTotalBlocks += 1
605 605 self.nReadBlocks += 1
606 606
607 607 return 1
608 608
609 609 def __jumpToLastBlock(self):
610 610 raise NotImplementedError
611 611
612 612 def __waitNewBlock(self):
613 613 """
614 614 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
615 615
616 616 Si el modo de lectura es OffLine siempre retorn 0
617 617 """
618 618 if not self.online:
619 619 return 0
620 620
621 621 if (self.nReadBlocks >= self.dataBlocksPerFile):
622 622 return 0
623 623
624 624 currentPointer = self.fp.tell()
625 625
626 626 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
627 627
628 628 for nTries in range( self.nTries ):
629 629
630 630 self.fp.close()
631 631 self.fp = open( self.filename, 'rb' )
632 632 self.fp.seek( currentPointer )
633 633
634 634 self.fileSize = os.path.getsize( self.filename )
635 635 currentSize = self.fileSize - currentPointer
636 636
637 637 if ( currentSize >= neededSize ):
638 638 self.__rdBasicHeader()
639 639 return 1
640 640
641 641 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
642 642 sleep( self.delay )
643 643
644 644
645 645 return 0
646 646
647 647 def __setNewBlock(self):
648 648
649 649 if self.online:
650 650 self.__jumpToLastBlock()
651 651
652 652 if self.flagIsNewFile:
653 653 return 1
654 654
655 655 self.lastUTTime = self.utc
656 656
657 657 if self.online:
658 658 if self.__waitNewBlock():
659 659 return 1
660 660
661 661 if self.nReadBlocks < self.dataBlocksPerFile:
662 662 return 1
663 663
664 664 if not(self.setNextFile()):
665 665 return 0
666 666
667 667 deltaTime = self.utc - self.lastUTTime
668 668
669 669 self.flagDiscontinuousBlock = 0
670 670
671 671 if deltaTime > self.maxTimeStep:
672 672 self.flagDiscontinuousBlock = 1
673 673
674 674 return 1
675 675
676 676
677 677 def readNextBlock(self):
678 678 if not(self.__setNewBlock()):
679 679 return 0
680 680
681 681 if not(self.readBlock()):
682 682 return 0
683 683
684 684 return 1
685 685
686 686 def printInfo(self):
687 687
688 688 pass
689 689
690 690 def getData(self):
691 691
692 692 if self.flagNoMoreFiles:
693 693 self.dataOut.flagNoData = True
694 694 print 'Process finished'
695 695 return 0
696 696
697 697 self.flagDiscontinuousBlock = 0
698 698 self.flagIsNewBlock = 0
699 699
700 700 if not(self.readNextBlock()):
701 701 return 0
702 702
703 703 if self.data is None:
704 704 self.dataOut.flagNoData = True
705 705 return 0
706 706
707 707 self.dataOut.data = self.data
708 708 self.dataOut.data_header = self.data_header_dict
709 709 self.dataOut.utctime = self.utc
710 710
711 711 # self.dataOut.header = self.header_dict
712 712 # self.dataOut.expName = self.expName
713 713 # self.dataOut.nChannels = self.nChannels
714 714 # self.dataOut.timeZone = self.timeZone
715 715 # self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
716 716 # self.dataOut.comments = self.comments
717 717 # # self.dataOut.timeInterval = self.timeInterval
718 718 # self.dataOut.channelList = self.channelList
719 719 # self.dataOut.heightList = self.heightList
720 720 self.dataOut.flagNoData = False
721 721
722 722 return self.dataOut.data
723 723
724 724 def run(self, **kwargs):
725 725
726 726 if not(self.isConfig):
727 727 self.setup(**kwargs)
728 728 self.isConfig = True
729 729
730 730 self.getData()
731 731
732 732 class SpectraHeisWriter(Operation):
733 733 # set = None
734 734 setFile = None
735 735 idblock = None
736 736 doypath = None
737 737 subfolder = None
738 738
739 739 def __init__(self, **kwargs):
740 740 Operation.__init__(self, **kwargs)
741 741 self.wrObj = PyFits()
742 742 # self.dataOut = dataOut
743 743 self.nTotalBlocks=0
744 744 # self.set = None
745 745 self.setFile = None
746 746 self.idblock = 0
747 747 self.wrpath = None
748 748 self.doypath = None
749 749 self.subfolder = None
750 750 self.isConfig = False
751 751
752 752 def isNumber(str):
753 753 """
754 754 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
755 755
756 756 Excepciones:
757 757 Si un determinado string no puede ser convertido a numero
758 758 Input:
759 759 str, string al cual se le analiza para determinar si convertible a un numero o no
760 760
761 761 Return:
762 762 True : si el string es uno numerico
763 763 False : no es un string numerico
764 764 """
765 765 try:
766 766 float( str )
767 767 return True
768 768 except:
769 769 return False
770 770
771 771 def setup(self, dataOut, wrpath):
772 772
773 773 if not(os.path.exists(wrpath)):
774 774 os.mkdir(wrpath)
775 775
776 776 self.wrpath = wrpath
777 777 # self.setFile = 0
778 778 self.dataOut = dataOut
779 779
780 780 def putData(self):
781 781 name= time.localtime( self.dataOut.utctime)
782 782 ext=".fits"
783 783
784 784 if self.doypath == None:
785 785 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
786 786 self.doypath = os.path.join( self.wrpath, self.subfolder )
787 787 os.mkdir(self.doypath)
788 788
789 789 if self.setFile == None:
790 790 # self.set = self.dataOut.set
791 791 self.setFile = 0
792 792 # if self.set != self.dataOut.set:
793 793 ## self.set = self.dataOut.set
794 794 # self.setFile = 0
795 795
796 796 #make the filename
797 797 thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
798 798
799 799 filename = os.path.join(self.wrpath,self.subfolder, thisFile)
800 800
801 801 idblock = numpy.array([self.idblock],dtype="int64")
802 802 header=self.wrObj.cFImage(idblock=idblock,
803 803 year=time.gmtime(self.dataOut.utctime).tm_year,
804 804 month=time.gmtime(self.dataOut.utctime).tm_mon,
805 805 day=time.gmtime(self.dataOut.utctime).tm_mday,
806 806 hour=time.gmtime(self.dataOut.utctime).tm_hour,
807 807 minute=time.gmtime(self.dataOut.utctime).tm_min,
808 808 second=time.gmtime(self.dataOut.utctime).tm_sec)
809 809
810 810 c=3E8
811 811 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
812 812 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
813 813
814 814 colList = []
815 815
816 816 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
817 817
818 818 colList.append(colFreq)
819 819
820 820 nchannel=self.dataOut.nChannels
821 821
822 822 for i in range(nchannel):
823 823 col = self.wrObj.writeData(name="PCh"+str(i+1),
824 824 format=str(self.dataOut.nFFTPoints)+'E',
825 825 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
826 826
827 827 colList.append(col)
828 828
829 829 data=self.wrObj.Ctable(colList=colList)
830 830
831 831 self.wrObj.CFile(header,data)
832 832
833 833 self.wrObj.wFile(filename)
834 834
835 835 #update the setFile
836 836 self.setFile += 1
837 837 self.idblock += 1
838 838
839 839 return 1
840 840
841 841 def run(self, dataOut, **kwargs):
842 842
843 843 if not(self.isConfig):
844 844
845 845 self.setup(dataOut, **kwargs)
846 846 self.isConfig = True
847 847
848 848 self.putData()
@@ -1,863 +1,863
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-com0419
5 5 '''
6 6
7 7 import os,sys
8 8 import time,datetime
9 9 import h5py
10 10 import numpy
11 11 import fnmatch
12 12 import re
13 13
14 14 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
15 15 from schainpy.model.data.jrodata import Voltage
16 16 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
17 17
18 18
19 19 def isNumber(str):
20 20 """
21 21 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
22 22
23 23 Excepciones:
24 24 Si un determinado string no puede ser convertido a numero
25 25 Input:
26 26 str, string al cual se le analiza para determinar si convertible a un numero o no
27 27
28 28 Return:
29 29 True : si el string es uno numerico
30 30 False : no es un string numerico
31 31 """
32 32 try:
33 33 float( str )
34 34 return True
35 35 except:
36 36 return False
37 37
38 38 def getFileFromSet(path, ext, set=None):
39 39 validFilelist = []
40 40 fileList = os.listdir(path)
41 41
42 42
43 43 if len(fileList) < 1:
44 44 return None
45 45
46 46 # 0 1234 567 89A BCDE
47 47 # H YYYY DDD SSS .ext
48 48
49 49 for thisFile in fileList:
50 50 try:
51 51 number= int(thisFile[6:16])
52 52
53 53 # year = int(thisFile[1:5])
54 54 # doy = int(thisFile[5:8])
55 55 except:
56 56 continue
57 57
58 58 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
59 59 continue
60 60
61 61 validFilelist.append(thisFile)
62 62
63 63 if len(validFilelist) < 1:
64 64 return None
65 65
66 66 validFilelist = sorted( validFilelist, key=str.lower )
67 67
68 68 if set == None:
69 69 return validFilelist[-1]
70 70
71 71 print "set =" ,set
72 72 for thisFile in validFilelist:
73 73 if set <= int(thisFile[6:16]):
74 74 print thisFile,int(thisFile[6:16])
75 75 return thisFile
76 76
77 77 return validFilelist[-1]
78 78
79 79 myfile = fnmatch.filter(validFilelist,'*%10d*'%(set))
80 80 #myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
81 81
82 82 if len(myfile)!= 0:
83 83 return myfile[0]
84 84 else:
85 85 filename = '*%10.10d%s'%(set,ext.lower())
86 86 print 'the filename %s does not exist'%filename
87 87 print '...going to the last file: '
88 88
89 89 if validFilelist:
90 90 validFilelist = sorted( validFilelist, key=str.lower )
91 91 return validFilelist[-1]
92 92
93 93 return None
94 94
95 95 def getlastFileFromPath(path, ext):
96 96 """
97 97 Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
98 98 al final de la depuracion devuelve el ultimo file de la lista que quedo.
99 99
100 100 Input:
101 101 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
102 102 ext : extension de los files contenidos en una carpeta
103 103
104 104 Return:
105 105 El ultimo file de una determinada carpeta, no se considera el path.
106 106 """
107 107 validFilelist = []
108 108 fileList = os.listdir(path)
109 109
110 110 # 0 1234 567 89A BCDE
111 111 # H YYYY DDD SSS .ext
112 112
113 113 for thisFile in fileList:
114 114
115 115 try:
116 116 number= int(thisFile[6:16])
117 117 except:
118 118 print "There is a file or folder with different format"
119 119 if not isNumber(number):
120 120 continue
121 121
122 122 # year = thisFile[1:5]
123 123 # if not isNumber(year):
124 124 # continue
125 125
126 126 # doy = thisFile[5:8]
127 127 # if not isNumber(doy):
128 128 # continue
129 129
130 130 number= int(number)
131 131 # year = int(year)
132 132 # doy = int(doy)
133 133
134 134 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
135 135 continue
136 136
137 137
138 138 validFilelist.append(thisFile)
139 139
140 140
141 141 if validFilelist:
142 142 validFilelist = sorted( validFilelist, key=str.lower )
143 143 return validFilelist[-1]
144 144
145 145 return None
146 146
147 147
148 148
149 149 class HFReader(ProcessingUnit):
150 150 '''
151 151 classdocs
152 152 '''
153 153 path = None
154 154 startDate= None
155 155 endDate = None
156 156 startTime= None
157 157 endTime = None
158 158 walk = None
159 159 isConfig = False
160 160 dataOut=None
161 161 nTries = 3
162 162 ext = ".hdf5"
163 163
164 164 def __init__(self, **kwargs):
165 165 '''
166 166 Constructor
167 167 '''
168 168 ProcessingUnit.__init__(self, **kwargs)
169 169
170 170 self.isConfig =False
171 171
172 172 self.datablock = None
173 173
174 174 self.filename_current=None
175 175
176 176 self.utc = 0
177 177
178 178 self.ext='.hdf5'
179 179
180 180 self.flagIsNewFile = 1
181 181
182 182 #-------------------------------------------------
183 183 self.fileIndex=None
184 184
185 185 self.profileIndex_offset=None
186 186
187 187 self.filenameList=[]
188 188
189 189 self.hfFilePointer= None
190 190
191 191 self.filename_online = None
192 192
193 193 self.status=True
194 194
195 195 self.flagNoMoreFiles= False
196 196
197 197 self.__waitForNewFile = 20
198 198
199 199
200 200 #--------------------------------------------------
201 201
202 202 self.dataOut = self.createObjByDefault()
203 203
204 204
205 205 def createObjByDefault(self):
206 206
207 207 dataObj = Voltage()
208 208
209 209 return dataObj
210 210
211 211 def setObjProperties(self):
212 212
213 213 pass
214 214
215 215 def getBlockDimension(self):
216 216 """
217 217 Obtiene la cantidad de puntos a leer por cada bloque de datos
218 218
219 219 Affected:
220 220 self.blocksize
221 221
222 222 Return:
223 223 None
224 224 """
225 225 pts2read =self.nChannels*self.nHeights*self.nProfiles
226 226 self.blocksize = pts2read
227 227
228 228 def __readHeader(self):
229 229
230 230 self.nProfiles = 100
231 231 self.nHeights = 1000
232 232 self.nChannels = 2
233 233 self.__firstHeigth=0
234 234 self.__nSamples=1000
235 235 self.__deltaHeigth=1.5
236 236 self.__sample_rate=1e5
237 237 #self.__frequency=2.72e6
238 238 #self.__frequency=3.64e6
239 239 self.__frequency=None
240 240 self.__online = False
241 241 self.filename_next_set=None
242 242
243 243 #print "Frequency of Operation:", self.__frequency
244 244
245 245
246 246 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
247 247 self.path = path
248 248 self.startDate = startDate
249 249 self.endDate = endDate
250 250 self.startTime = startTime
251 251 self.endTime = endTime
252 252 self.walk = walk
253 253
254 254 def __checkPath(self):
255 255 if os.path.exists(self.path):
256 256 self.status=1
257 257 else:
258 258 self.status=0
259 259 print 'Path %s does not exits'%self.path
260 260 return
261 261 return
262 262
263 263 def __selDates(self, hf_dirname_format):
264 264 try:
265 265 dir_hf_filename= self.path+"/"+hf_dirname_format
266 266 fp= h5py.File(dir_hf_filename,'r')
267 267 hipoc=fp['t'].value
268 268 fp.close()
269 269 date_time=datetime.datetime.utcfromtimestamp(hipoc)
270 270 year =int(date_time[0:4])
271 271 month=int(date_time[5:7])
272 272 dom =int(date_time[8:10])
273 273 thisDate= datetime.date(year,month,dom)
274 274 if (thisDate>=self.startDate and thisDate <= self.endDate):
275 275 return hf_dirname_format
276 276 except:
277 277 return None
278 278
279 279 def __findDataForDates(self,online=False):
280 280 if not(self.status):
281 281 return None
282 282
283 283 pat = '\d+.\d+'
284 284 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
285 285 dirnameList = filter(lambda x:x!=None,dirnameList)
286 286 dirnameList = [x.string for x in dirnameList]
287 287 if not(online):
288 288
289 289 dirnameList = [self.__selDates(x) for x in dirnameList]
290 290 dirnameList = filter(lambda x:x!=None,dirnameList)
291 291
292 292 if len(dirnameList)>0:
293 293 self.status = 1
294 294 self.dirnameList = dirnameList
295 295 self.dirnameList.sort()
296 296
297 297 else:
298 298 self.status = 0
299 299 return None
300 300
301 301 def __getTimeFromData(self):
302 302 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
303 303 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
304 304 print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)
305 305 print '........................................'
306 306 filter_filenameList=[]
307 307 self.filenameList.sort()
308 308 for i in range(len(self.filenameList)-1):
309 309 filename=self.filenameList[i]
310 310 dir_hf_filename= filename
311 311 fp= h5py.File(dir_hf_filename,'r')
312 312 hipoc=fp['t'].value
313 313 hipoc=hipoc+self.timezone
314 314 date_time=datetime.datetime.utcfromtimestamp(hipoc)
315 315 fp.close()
316 316 year =int(date_time[0:4])
317 317 month=int(date_time[5:7])
318 318 dom =int(date_time[8:10])
319 319 hour =int(date_time[11:13])
320 320 min =int(date_time[14:16])
321 321 sec =int(date_time[17:19])
322 322 this_time=datetime.datetime(year,month,dom,hour,min,sec)
323 323 if (this_time>=startDateTime_Reader and this_time <= endDateTime_Reader):
324 324 filter_filenameList.append(filename)
325 325 filter_filenameList.sort()
326 326 self.filenameList = filter_filenameList
327 327 return 1
328 328
329 329 def __getFilenameList(self):
330 330 #print "hola"
331 331 #print self.dirnameList
332 332 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
333 333 self.filenameList= dirList
334 334 #print self.filenameList
335 335 #print "pase",len(self.filenameList)
336 336
337 337 def __selectDataForTimes(self, online=False):
338 338
339 339 if not(self.status):
340 340 return None
341 341 #----------------
342 342 self.__getFilenameList()
343 343 #----------------
344 344 if not(online):
345 345 if not(self.all):
346 346 self.__getTimeFromData()
347 347 if len(self.filenameList)>0:
348 348 self.status=1
349 349 self.filenameList.sort()
350 350 else:
351 351 self.status=0
352 352 return None
353 353 else:
354 354 if self.set != None:
355 355
356 356 filename=getFileFromSet(self.path,self.ext,self.set)
357 357
358 358 if self.flag_nextfile==True:
359 359 self.dirnameList=[filename]
360 360 fullfilename=self.path+"/"+filename
361 361 self.filenameList=[fullfilename]
362 362 self.filename_next_set=int(filename[6:16])+10
363 363
364 364 self.flag_nextfile=False
365 365 else:
366 366 print filename
367 367 print "PRIMERA CONDICION"
368 368 #if self.filename_next_set== int(filename[6:16]):
369 369 print "TODO BIEN"
370 370
371 371 if filename == None:
372 372 raise ValueError, "corregir"
373 373
374 374 self.dirnameList=[filename]
375 375 fullfilename=self.path+"/"+filename
376 376 self.filenameList=[fullfilename]
377 377 self.filename_next_set=int(filename[6:16])+10
378 378 print "Setting next file",self.filename_next_set
379 379 self.set=int(filename[6:16])
380 380 if True:
381 381 pass
382 382 else:
383 383 print "ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO"
384 384
385 385 else:
386 386 filename =getlastFileFromPath(self.path,self.ext)
387 387
388 388 if self.flag_nextfile==True:
389 389 self.dirnameList=[filename]
390 390 fullfilename=self.path+"/"+filename
391 391 self.filenameList=[self.filenameList[-1]]
392 392 self.filename_next_set=int(filename[6:16])+10
393 393
394 394 self.flag_nextfile=False
395 395 else:
396 396 filename=getFileFromSet(self.path,self.ext,self.set)
397 397 print filename
398 398 print "PRIMERA CONDICION"
399 399 #if self.filename_next_set== int(filename[6:16]):
400 400 print "TODO BIEN"
401 401
402 402 if filename == None:
403 403 raise ValueError, "corregir"
404 404
405 405 self.dirnameList=[filename]
406 406 fullfilename=self.path+"/"+filename
407 407 self.filenameList=[fullfilename]
408 408 self.filename_next_set=int(filename[6:16])+10
409 409 print "Setting next file",self.filename_next_set
410 410 self.set=int(filename[6:16])
411 411 if True:
412 412 pass
413 413 else:
414 414 print "ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO"
415 415
416 416
417 417
418 def __searchFilesOffline(self,
418 def searchFilesOffLine(self,
419 419 path,
420 420 startDate,
421 421 endDate,
422 422 ext,
423 423 startTime=datetime.time(0,0,0),
424 424 endTime=datetime.time(23,59,59),
425 425 walk=True):
426 426
427 427 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
428 428
429 429 self.__checkPath()
430 430
431 431 self.__findDataForDates()
432 432 #print self.dirnameList
433 433
434 434 self.__selectDataForTimes()
435 435
436 436 for i in range(len(self.filenameList)):
437 437 print "%s"% (self.filenameList[i])
438 438
439 439 return
440 440
441 def __searchFilesOnline(self,
441 def searchFilesOnLine(self,
442 442 path,
443 443 expLabel= "",
444 444 ext=None,
445 445 startDate=None,
446 446 endDate=None,
447 447 walk=True,
448 448 set=None):
449 449
450 450
451 451 startDate = datetime.datetime.utcnow().date()
452 452 endDate = datetime.datetime.utcnow().date()
453 453
454 454 self.__setParameters(path=path,startDate=startDate,endDate=endDate,walk=walk)
455 455
456 456 self.__checkPath()
457 457
458 458 fullpath=path
459 459 print "%s folder was found: " %(fullpath )
460 460
461 461 if set == None:
462 462 self.set=None
463 463 filename =getlastFileFromPath(fullpath,ext)
464 464 startDate= datetime.datetime.utcnow().date
465 465 endDate= datetime.datetime.utcnow().date()
466 466 #
467 467 else:
468 468 filename= getFileFromSet(fullpath,ext,set)
469 469 startDate=None
470 470 endDate=None
471 471 #
472 472 if not (filename):
473 473 return None,None,None,None,None
474 474 #print "%s file was found" %(filename)
475 475
476 476 #
477 477 # dir_hf_filename= self.path+"/"+filename
478 478 # fp= h5py.File(dir_hf_filename,'r')
479 479 # hipoc=fp['t'].value
480 480 # fp.close()
481 481 # date_time=datetime.datetime.utcfromtimestamp(hipoc)
482 482 #
483 483 # year =int(date_time[0:4])
484 484 # month=int(date_time[5:7])
485 485 # dom =int(date_time[8:10])
486 486 # set= int(filename[4:10])
487 487 # self.set=set-1
488 488 #self.dirnameList=[filename]
489 489 filenameList= fullpath+"/"+filename
490 490 self.dirnameList=[filename]
491 491 self.filenameList=[filenameList]
492 492 self.flag_nextfile=True
493 493
494 494 #self.__findDataForDates(online=True)
495 495 #self.dirnameList=[self.dirnameList[-1]]
496 496 #print self.dirnameList
497 497 #self.__selectDataForTimes(online=True)
498 498 #return fullpath,filename,year,month,dom,set
499 499 return
500 500
501 501 def __setNextFile(self,online=False):
502 502 """
503 503 """
504 504 if not(online):
505 505 newFile = self.__setNextFileOffline()
506 506 else:
507 507 newFile = self.__setNextFileOnline()
508 508
509 509 if not(newFile):
510 510 return 0
511 511 return 1
512 512
513 513 def __setNextFileOffline(self):
514 514 """
515 515 """
516 516 idFile= self.fileIndex
517 517 while(True):
518 518 idFile += 1
519 519 if not (idFile < len(self.filenameList)):
520 520 self.flagNoMoreFiles = 1
521 521 print "No more Files"
522 522 return 0
523 523 filename = self.filenameList[idFile]
524 524 hfFilePointer =h5py.File(filename,'r')
525 525
526 526 epoc=hfFilePointer['t'].value
527 527 #this_time=datetime.datetime(year,month,dom,hour,min,sec)
528 528 break
529 529
530 530 self.flagIsNewFile = 1
531 531 self.fileIndex = idFile
532 532 self.filename = filename
533 533
534 534 self.hfFilePointer = hfFilePointer
535 535 hfFilePointer.close()
536 536 self.__t0=epoc
537 537 print "Setting the file: %s"%self.filename
538 538
539 539 return 1
540 540
541 541 def __setNextFileOnline(self):
542 542 """
543 543 """
544 544 print "SOY NONE",self.set
545 545 if self.set==None:
546 546 pass
547 547 else:
548 548 self.set +=10
549 549
550 550 filename = self.filenameList[0]#fullfilename
551 551 if self.filename_online != None:
552 552 self.__selectDataForTimes(online=True)
553 553 filename = self.filenameList[0]
554 554 while self.filename_online == filename:
555 555 print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile)
556 556 time.sleep(self.__waitForNewFile)
557 557 #self.__findDataForDates(online=True)
558 558 self.set=self.filename_next_set
559 559 self.__selectDataForTimes(online=True)
560 560 filename = self.filenameList[0]
561 561 sizeoffile=os.path.getsize(filename)
562 562
563 563 #print filename
564 564 sizeoffile=os.path.getsize(filename)
565 565 if sizeoffile<1670240:
566 566 print "%s is not the rigth size"%filename
567 567 delay=50
568 568 print 'waiting %d seconds for delay...'%(delay)
569 569 time.sleep(delay)
570 570 sizeoffile=os.path.getsize(filename)
571 571 if sizeoffile<1670240:
572 572 delay=50
573 573 print 'waiting %d more seconds for delay...'%(delay)
574 574 time.sleep(delay)
575 575
576 576 sizeoffile=os.path.getsize(filename)
577 577 if sizeoffile<1670240:
578 578 delay=50
579 579 print 'waiting %d more seconds for delay...'%(delay)
580 580 time.sleep(delay)
581 581
582 582 try:
583 583 hfFilePointer=h5py.File(filename,'r')
584 584
585 585 except:
586 586 print "Error reading file %s"%filename
587 587
588 588 self.filename_online=filename
589 589 epoc=hfFilePointer['t'].value
590 590
591 591 self.hfFilePointer=hfFilePointer
592 592 hfFilePointer.close()
593 593 self.__t0=epoc
594 594
595 595
596 596 self.flagIsNewFile = 1
597 597 self.filename = filename
598 598
599 599 print "Setting the file: %s"%self.filename
600 600 return 1
601 601
602 602 def __getExpParameters(self):
603 603 if not(self.status):
604 604 return None
605 605
606 606 def setup(self,
607 607 path = None,
608 608 startDate = None,
609 609 endDate = None,
610 610 startTime = datetime.time(0,0,0),
611 611 endTime = datetime.time(23,59,59),
612 612 set = None,
613 613 expLabel = "",
614 614 ext = None,
615 615 all=0,
616 616 timezone=0,
617 617 online = False,
618 618 delay = 60,
619 619 walk = True):
620 620 '''
621 621 In this method we should set all initial parameters.
622 622
623 623 '''
624 624 if path==None:
625 625 raise ValueError,"The path is not valid"
626 626
627 627 if ext==None:
628 628 ext = self.ext
629 629
630 630 self.timezone= timezone
631 631 self.online= online
632 632 self.all=all
633 633 #if set==None:
634 634
635 635 #print set
636 636 if not(online):
637 637 print "Searching files in offline mode..."
638 638
639 self.__searchFilesOffline(path, startDate, endDate, ext, startTime, endTime, walk)
639 self.searchFilesOffLine(path, startDate, endDate, ext, startTime, endTime, walk)
640 640 else:
641 641 print "Searching files in online mode..."
642 self.__searchFilesOnline(path, walk,ext,set=set)
642 self.searchFilesOnLine(path, walk,ext,set=set)
643 643 if set==None:
644 644 pass
645 645 else:
646 646 self.set=set-10
647 647
648 648 # for nTries in range(self.nTries):
649 649 #
650 # fullpath,file,year,month,day,set = self.__searchFilesOnline(path=path,expLabel=expLabel,ext=ext, walk=walk,set=set)
650 # fullpath,file,year,month,day,set = self.searchFilesOnLine(path=path,expLabel=expLabel,ext=ext, walk=walk,set=set)
651 651 #
652 652 # if fullpath:
653 653 # break
654 654 # print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
655 655 # time.sleep(self.delay)
656 656 # if not(fullpath):
657 657 # print "There ins't valid files in %s" % path
658 658 # return None
659 659
660 660
661 661 if not(self.filenameList):
662 662 print "There is no files into the folder: %s"%(path)
663 663 sys.exit(-1)
664 664
665 665 self.__getExpParameters()
666 666
667 667
668 668 self.fileIndex = -1
669 669
670 670 self.__setNextFile(online)
671 671
672 672 self.__readMetadata()
673 673
674 674 self.__setLocalVariables()
675 675
676 676 self.__setHeaderDO()
677 677 #self.profileIndex_offset= 0
678 678
679 679 #self.profileIndex = self.profileIndex_offset
680 680
681 681 self.isConfig = True
682 682
683 683 def __readMetadata(self):
684 684 self.__readHeader()
685 685
686 686
687 687 def __setLocalVariables(self):
688 688
689 689 self.datablock = numpy.zeros((self.nChannels, self.nHeights,self.nProfiles), dtype = numpy.complex)
690 690 #
691 691
692 692
693 693
694 694 self.profileIndex = 9999
695 695
696 696
697 697 def __setHeaderDO(self):
698 698
699 699
700 700 self.dataOut.radarControllerHeaderObj = RadarControllerHeader()
701 701
702 702 self.dataOut.systemHeaderObj = SystemHeader()
703 703
704 704
705 705 #---------------------------------------------------------
706 706 self.dataOut.systemHeaderObj.nProfiles=100
707 707 self.dataOut.systemHeaderObj.nSamples=1000
708 708
709 709
710 710 SAMPLING_STRUCTURE=[('h0', '<f4'), ('dh', '<f4'), ('nsa', '<u4')]
711 711 self.dataOut.radarControllerHeaderObj.samplingWindow=numpy.zeros((1,),SAMPLING_STRUCTURE)
712 712 self.dataOut.radarControllerHeaderObj.samplingWindow['h0']=0
713 713 self.dataOut.radarControllerHeaderObj.samplingWindow['dh']=1.5
714 714 self.dataOut.radarControllerHeaderObj.samplingWindow['nsa']=1000
715 715 self.dataOut.radarControllerHeaderObj.nHeights=int(self.dataOut.radarControllerHeaderObj.samplingWindow['nsa'])
716 716 self.dataOut.radarControllerHeaderObj.firstHeight = self.dataOut.radarControllerHeaderObj.samplingWindow['h0']
717 717 self.dataOut.radarControllerHeaderObj.deltaHeight = self.dataOut.radarControllerHeaderObj.samplingWindow['dh']
718 718 self.dataOut.radarControllerHeaderObj.samplesWin = self.dataOut.radarControllerHeaderObj.samplingWindow['nsa']
719 719
720 720 self.dataOut.radarControllerHeaderObj.nWindows=1
721 721 self.dataOut.radarControllerHeaderObj.codetype=0
722 722 self.dataOut.radarControllerHeaderObj.numTaus=0
723 723 #self.dataOut.radarControllerHeaderObj.Taus = numpy.zeros((1,),'<f4')
724 724
725 725
726 726 #self.dataOut.radarControllerHeaderObj.nCode=numpy.zeros((1,), '<u4')
727 727 #self.dataOut.radarControllerHeaderObj.nBaud=numpy.zeros((1,), '<u4')
728 728 #self.dataOut.radarControllerHeaderObj.code=numpy.zeros(0)
729 729
730 730 self.dataOut.radarControllerHeaderObj.code_size=0
731 731 self.dataOut.nBaud=0
732 732 self.dataOut.nCode=0
733 733 self.dataOut.nPairs=0
734 734
735 735
736 736 #---------------------------------------------------------
737 737
738 738 self.dataOut.type = "Voltage"
739 739
740 740 self.dataOut.data = None
741 741
742 742 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
743 743
744 744 self.dataOut.nProfiles = 1
745 745
746 746 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
747 747
748 748 self.dataOut.channelList = range(self.nChannels)
749 749
750 750 #self.dataOut.channelIndexList = None
751 751
752 752 self.dataOut.flagNoData = True
753 753
754 754 #Set to TRUE if the data is discontinuous
755 755 self.dataOut.flagDiscontinuousBlock = False
756 756
757 757 self.dataOut.utctime = None
758 758
759 759 self.dataOut.timeZone = self.timezone
760 760
761 761 self.dataOut.dstFlag = 0
762 762
763 763 self.dataOut.errorCount = 0
764 764
765 765 self.dataOut.nCohInt = 1
766 766
767 767 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
768 768
769 769 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
770 770
771 771 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
772 772
773 773 self.dataOut.flagShiftFFT = False
774 774
775 775 self.dataOut.ippSeconds = 1.0*self.__nSamples/self.__sample_rate
776 776
777 777 #Time interval between profiles
778 778 #self.dataOut.timeInterval =self.dataOut.ippSeconds * self.dataOut.nCohInt
779 779
780 780
781 781 self.dataOut.frequency = self.__frequency
782 782
783 783 self.dataOut.realtime = self.__online
784 784
785 785 def __hasNotDataInBuffer(self):
786 786
787 787 if self.profileIndex >= self.nProfiles:
788 788 return 1
789 789
790 790 return 0
791 791
792 792 def readNextBlock(self):
793 793 if not(self.__setNewBlock()):
794 794 return 0
795 795
796 796 if not(self.readBlock()):
797 797 return 0
798 798
799 799 return 1
800 800
801 801 def __setNewBlock(self):
802 802
803 803 if self.hfFilePointer==None:
804 804 return 0
805 805
806 806 if self.flagIsNewFile:
807 807 return 1
808 808
809 809 if self.profileIndex < self.nProfiles:
810 810 return 1
811 811
812 812 self.__setNextFile(self.online)
813 813
814 814 return 1
815 815
816 816
817 817
818 818 def readBlock(self):
819 819 fp=h5py.File(self.filename,'r')
820 820 #Puntero que apunta al archivo hdf5
821 821 ch0=(fp['ch0']).value #Primer canal (100,1000)--(perfiles,alturas)
822 822 ch1=(fp['ch1']).value #Segundo canal (100,1000)--(perfiles,alturas)
823 823 fp.close()
824 824 ch0= ch0.swapaxes(0,1) #Primer canal (100,1000)--(alturas,perfiles)
825 825 ch1= ch1.swapaxes(0,1) #Segundo canal (100,1000)--(alturas,perfiles)
826 826 self.datablock = numpy.array([ch0,ch1])
827 827 self.flagIsNewFile=0
828 828
829 829 self.profileIndex=0
830 830
831 831 return 1
832 832
833 833 def getData(self):
834 834 if self.flagNoMoreFiles:
835 835 self.dataOut.flagNoData = True
836 836 print 'Process finished'
837 837 return 0
838 838
839 839 if self.__hasNotDataInBuffer():
840 840 if not(self.readNextBlock()):
841 841 self.dataOut.flagNodata=True
842 842 return 0
843 843
844 844 ##############################
845 845 ##############################
846 846 self.dataOut.data = self.datablock[:,:,self.profileIndex]
847 847 self.dataOut.utctime = self.__t0 + self.dataOut.ippSeconds*self.profileIndex
848 848 self.dataOut.profileIndex= self.profileIndex
849 849 self.dataOut.flagNoData=False
850 850 self.profileIndex +=1
851 851
852 852 return self.dataOut.data
853 853
854 854
855 855 def run(self, **kwargs):
856 856 '''
857 857 This method will be called many times so here you should put all your code
858 858 '''
859 859
860 860 if not self.isConfig:
861 861 self.setup(**kwargs)
862 862 self.isConfig = True
863 863 self.getData()
@@ -1,675 +1,675
1 1 '''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5 '''
6 6
7 7 import os
8 8 import sys
9 9 import glob
10 10 import fnmatch
11 11 import datetime
12 12 import time
13 13 import re
14 14 import h5py
15 15 import numpy
16 16
17 17 try:
18 18 from gevent import sleep
19 19 except:
20 20 from time import sleep
21 21
22 22 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
23 23 from schainpy.model.data.jrodata import Voltage
24 24 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
25 25 from numpy import imag
26 26
27 27 class AMISRReader(ProcessingUnit):
28 28 '''
29 29 classdocs
30 30 '''
31 31
32 32 def __init__(self):
33 33 '''
34 34 Constructor
35 35 '''
36 36
37 37 ProcessingUnit.__init__(self)
38 38
39 39 self.set = None
40 40 self.subset = None
41 41 self.extension_file = '.h5'
42 42 self.dtc_str = 'dtc'
43 43 self.dtc_id = 0
44 44 self.status = True
45 45 self.isConfig = False
46 46 self.dirnameList = []
47 47 self.filenameList = []
48 48 self.fileIndex = None
49 49 self.flagNoMoreFiles = False
50 50 self.flagIsNewFile = 0
51 51 self.filename = ''
52 52 self.amisrFilePointer = None
53 53
54 54
55 55 self.dataset = None
56 56
57 57
58 58
59 59
60 60 self.profileIndex = 0
61 61
62 62
63 63 self.beamCodeByFrame = None
64 64 self.radacTimeByFrame = None
65 65
66 66 self.dataset = None
67 67
68 68
69 69
70 70
71 71 self.__firstFile = True
72 72
73 73 self.buffer = None
74 74
75 75
76 76 self.timezone = 'ut'
77 77
78 78 self.__waitForNewFile = 20
79 79 self.__filename_online = None
80 80 #Is really necessary create the output object in the initializer
81 81 self.dataOut = Voltage()
82 82
83 83 def setup(self,path=None,
84 84 startDate=None,
85 85 endDate=None,
86 86 startTime=None,
87 87 endTime=None,
88 88 walk=True,
89 89 timezone='ut',
90 90 all=0,
91 91 code = None,
92 92 nCode = 0,
93 93 nBaud = 0,
94 94 online=False):
95 95
96 96 self.timezone = timezone
97 97 self.all = all
98 98 self.online = online
99 99
100 100 self.code = code
101 101 self.nCode = int(nCode)
102 102 self.nBaud = int(nBaud)
103 103
104 104
105 105
106 106 #self.findFiles()
107 107 if not(online):
108 108 #Busqueda de archivos offline
109 self.__searchFilesOffline(path, startDate, endDate, startTime, endTime, walk)
109 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
110 110 else:
111 self.__searchFilesOnline(path, startDate, endDate, startTime,endTime,walk)
111 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
112 112
113 113 if not(self.filenameList):
114 114 print "There is no files into the folder: %s"%(path)
115 115
116 116 sys.exit(-1)
117 117
118 118 self.fileIndex = -1
119 119
120 120 self.readNextFile(online)
121 121
122 122 '''
123 123 Add code
124 124 '''
125 125 self.isConfig = True
126 126
127 127 pass
128 128
129 129
130 130 def readAMISRHeader(self,fp):
131 131 header = 'Raw11/Data/RadacHeader'
132 132 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
133 133 self.beamCode = fp.get('Raw11/Data/Beamcodes') # NUMBER OF CHANNELS AND IDENTIFY POSITION TO CREATE A FILE WITH THAT INFO
134 134 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
135 135 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
136 136 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
137 137 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
138 138 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
139 139 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
140 140 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
141 141 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
142 142 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
143 143 self.frequency = fp.get('Rx/Frequency')
144 144 txAus = fp.get('Raw11/Data/Pulsewidth')
145 145
146 146
147 147 self.nblocks = self.pulseCount.shape[0] #nblocks
148 148
149 149 self.nprofiles = self.pulseCount.shape[1] #nprofile
150 150 self.nsa = self.nsamplesPulse[0,0] #ngates
151 151 self.nchannels = self.beamCode.shape[1]
152 152 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
153 153 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
154 154 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
155 155
156 156 #filling radar controller header parameters
157 157 self.__ippKm = self.ippSeconds *.15*1e6 # in km
158 158 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
159 159 self.__txB = 0
160 160 nWindows=1
161 161 self.__nSamples = self.nsa
162 162 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
163 163 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
164 164
165 165 #for now until understand why the code saved is different (code included even though code not in tuf file)
166 166 #self.__codeType = 0
167 167 # self.__nCode = None
168 168 # self.__nBaud = None
169 169 self.__code = self.code
170 170 self.__codeType = 0
171 171 if self.code != None:
172 172 self.__codeType = 1
173 173 self.__nCode = self.nCode
174 174 self.__nBaud = self.nBaud
175 175 #self.__code = 0
176 176
177 177 #filling system header parameters
178 178 self.__nSamples = self.nsa
179 179 self.newProfiles = self.nprofiles/self.nchannels
180 180 self.__channelList = range(self.nchannels)
181 181
182 182 self.__frequency = self.frequency[0][0]
183 183
184 184
185 185
186 186 def createBuffers(self):
187 187
188 188 pass
189 189
190 190 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
191 191 self.path = path
192 192 self.startDate = startDate
193 193 self.endDate = endDate
194 194 self.startTime = startTime
195 195 self.endTime = endTime
196 196 self.walk = walk
197 197
198 198 def __checkPath(self):
199 199 if os.path.exists(self.path):
200 200 self.status = 1
201 201 else:
202 202 self.status = 0
203 203 print 'Path:%s does not exists'%self.path
204 204
205 205 return
206 206
207 207
208 208 def __selDates(self, amisr_dirname_format):
209 209 try:
210 210 year = int(amisr_dirname_format[0:4])
211 211 month = int(amisr_dirname_format[4:6])
212 212 dom = int(amisr_dirname_format[6:8])
213 213 thisDate = datetime.date(year,month,dom)
214 214
215 215 if (thisDate>=self.startDate and thisDate <= self.endDate):
216 216 return amisr_dirname_format
217 217 except:
218 218 return None
219 219
220 220
221 221 def __findDataForDates(self,online=False):
222 222
223 223 if not(self.status):
224 224 return None
225 225
226 226 pat = '\d+.\d+'
227 227 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
228 228 dirnameList = filter(lambda x:x!=None,dirnameList)
229 229 dirnameList = [x.string for x in dirnameList]
230 230 if not(online):
231 231 dirnameList = [self.__selDates(x) for x in dirnameList]
232 232 dirnameList = filter(lambda x:x!=None,dirnameList)
233 233 if len(dirnameList)>0:
234 234 self.status = 1
235 235 self.dirnameList = dirnameList
236 236 self.dirnameList.sort()
237 237 else:
238 238 self.status = 0
239 239 return None
240 240
241 241 def __getTimeFromData(self):
242 242 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
243 243 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
244 244
245 245 print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)
246 246 print '........................................'
247 247 filter_filenameList = []
248 248 self.filenameList.sort()
249 249 #for i in range(len(self.filenameList)-1):
250 250 for i in range(len(self.filenameList)):
251 251 filename = self.filenameList[i]
252 252 fp = h5py.File(filename,'r')
253 253 time_str = fp.get('Time/RadacTimeString')
254 254
255 255 startDateTimeStr_File = time_str[0][0].split('.')[0]
256 256 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
257 257 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
258 258
259 259 endDateTimeStr_File = time_str[-1][-1].split('.')[0]
260 260 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
261 261 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
262 262
263 263 fp.close()
264 264
265 265 if self.timezone == 'lt':
266 266 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
267 267 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
268 268
269 269 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<endDateTime_Reader):
270 270 #self.filenameList.remove(filename)
271 271 filter_filenameList.append(filename)
272 272
273 273 if (endDateTime_File>=endDateTime_Reader):
274 274 break
275 275
276 276
277 277 filter_filenameList.sort()
278 278 self.filenameList = filter_filenameList
279 279 return 1
280 280
281 281 def __filterByGlob1(self, dirName):
282 282 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
283 283 filter_files.sort()
284 284 filterDict = {}
285 285 filterDict.setdefault(dirName)
286 286 filterDict[dirName] = filter_files
287 287 return filterDict
288 288
289 289 def __getFilenameList(self, fileListInKeys, dirList):
290 290 for value in fileListInKeys:
291 291 dirName = value.keys()[0]
292 292 for file in value[dirName]:
293 293 filename = os.path.join(dirName, file)
294 294 self.filenameList.append(filename)
295 295
296 296
297 297 def __selectDataForTimes(self, online=False):
298 298 #aun no esta implementado el filtro for tiempo
299 299 if not(self.status):
300 300 return None
301 301
302 302 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
303 303
304 304 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
305 305
306 306 self.__getFilenameList(fileListInKeys, dirList)
307 307 if not(online):
308 308 #filtro por tiempo
309 309 if not(self.all):
310 310 self.__getTimeFromData()
311 311
312 312 if len(self.filenameList)>0:
313 313 self.status = 1
314 314 self.filenameList.sort()
315 315 else:
316 316 self.status = 0
317 317 return None
318 318
319 319 else:
320 320 #get the last file - 1
321 321 self.filenameList = [self.filenameList[-2]]
322 322
323 323 new_dirnameList = []
324 324 for dirname in self.dirnameList:
325 325 junk = numpy.array([dirname in x for x in self.filenameList])
326 326 junk_sum = junk.sum()
327 327 if junk_sum > 0:
328 328 new_dirnameList.append(dirname)
329 329 self.dirnameList = new_dirnameList
330 330 return 1
331 331
332 def __searchFilesOnline(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
332 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
333 333 endTime=datetime.time(23,59,59),walk=True):
334 334
335 335 if endDate ==None:
336 336 startDate = datetime.datetime.utcnow().date()
337 337 endDate = datetime.datetime.utcnow().date()
338 338
339 339 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
340 340
341 341 self.__checkPath()
342 342
343 343 self.__findDataForDates(online=True)
344 344
345 345 self.dirnameList = [self.dirnameList[-1]]
346 346
347 347 self.__selectDataForTimes(online=True)
348 348
349 349 return
350 350
351 351
352 def __searchFilesOffline(self,
352 def searchFilesOffLine(self,
353 353 path,
354 354 startDate,
355 355 endDate,
356 356 startTime=datetime.time(0,0,0),
357 357 endTime=datetime.time(23,59,59),
358 358 walk=True):
359 359
360 360 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
361 361
362 362 self.__checkPath()
363 363
364 364 self.__findDataForDates()
365 365
366 366 self.__selectDataForTimes()
367 367
368 368 for i in range(len(self.filenameList)):
369 369 print "%s" %(self.filenameList[i])
370 370
371 371 return
372 372
373 373 def __setNextFileOffline(self):
374 374 idFile = self.fileIndex
375 375
376 376 while (True):
377 377 idFile += 1
378 378 if not(idFile < len(self.filenameList)):
379 379 self.flagNoMoreFiles = 1
380 380 print "No more Files"
381 381 return 0
382 382
383 383 filename = self.filenameList[idFile]
384 384
385 385 amisrFilePointer = h5py.File(filename,'r')
386 386
387 387 break
388 388
389 389 self.flagIsNewFile = 1
390 390 self.fileIndex = idFile
391 391 self.filename = filename
392 392
393 393 self.amisrFilePointer = amisrFilePointer
394 394
395 395 print "Setting the file: %s"%self.filename
396 396
397 397 return 1
398 398
399 399
400 400 def __setNextFileOnline(self):
401 401 filename = self.filenameList[0]
402 402 if self.__filename_online != None:
403 403 self.__selectDataForTimes(online=True)
404 404 filename = self.filenameList[0]
405 405 wait = 0
406 406 while self.__filename_online == filename:
407 407 print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile)
408 408 if wait == 5:
409 409 return 0
410 410 sleep(self.__waitForNewFile)
411 411 self.__selectDataForTimes(online=True)
412 412 filename = self.filenameList[0]
413 413 wait += 1
414 414
415 415 self.__filename_online = filename
416 416
417 417 self.amisrFilePointer = h5py.File(filename,'r')
418 418 self.flagIsNewFile = 1
419 419 self.filename = filename
420 420 print "Setting the file: %s"%self.filename
421 421 return 1
422 422
423 423
424 424 def readData(self):
425 425 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
426 426 re = buffer[:,:,:,0]
427 427 im = buffer[:,:,:,1]
428 428 dataset = re + im*1j
429 429 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
430 430 timeset = self.radacTime[:,0]
431 431 return dataset,timeset
432 432
433 433 def reshapeData(self):
434 434 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
435 435 channels = self.beamCodeByPulse[0,:]
436 436 nchan = self.nchannels
437 437 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
438 438 nblocks = self.nblocks
439 439 nsamples = self.nsa
440 440
441 441 #Dimensions : nChannels, nProfiles, nSamples
442 442 new_block = numpy.empty((nblocks, nchan, self.newProfiles, nsamples), dtype="complex64")
443 443 ############################################
444 444
445 445 for thisChannel in range(nchan):
446 446 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[0][thisChannel])[0],:]
447 447
448 448
449 449 new_block = numpy.transpose(new_block, (1,0,2,3))
450 450 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
451 451
452 452 return new_block
453 453
454 454 def updateIndexes(self):
455 455
456 456 pass
457 457
458 458 def fillJROHeader(self):
459 459
460 460 #fill radar controller header
461 461 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
462 462 txA=self.__txA,
463 463 txB=0,
464 464 nWindows=1,
465 465 nHeights=self.__nSamples,
466 466 firstHeight=self.__firstHeight,
467 467 deltaHeight=self.__deltaHeight,
468 468 codeType=self.__codeType,
469 469 nCode=self.__nCode, nBaud=self.__nBaud,
470 470 code = self.__code,
471 471 fClock=1)
472 472
473 473
474 474
475 475 #fill system header
476 476 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
477 477 nProfiles=self.newProfiles,
478 478 nChannels=len(self.__channelList),
479 479 adcResolution=14,
480 480 pciDioBusWith=32)
481 481
482 482 self.dataOut.type = "Voltage"
483 483
484 484 self.dataOut.data = None
485 485
486 486 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
487 487
488 488 # self.dataOut.nChannels = 0
489 489
490 490 # self.dataOut.nHeights = 0
491 491
492 492 self.dataOut.nProfiles = self.newProfiles*self.nblocks
493 493
494 494 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
495 495 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
496 496 self.dataOut.heightList = ranges/1000.0 #km
497 497
498 498
499 499 self.dataOut.channelList = self.__channelList
500 500
501 501 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
502 502
503 503 # self.dataOut.channelIndexList = None
504 504
505 505 self.dataOut.flagNoData = True
506 506
507 507 #Set to TRUE if the data is discontinuous
508 508 self.dataOut.flagDiscontinuousBlock = False
509 509
510 510 self.dataOut.utctime = None
511 511
512 512 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
513 513 if self.timezone == 'lt':
514 514 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
515 515 else:
516 516 self.dataOut.timeZone = 0 #by default time is UTC
517 517
518 518 self.dataOut.dstFlag = 0
519 519
520 520 self.dataOut.errorCount = 0
521 521
522 522 self.dataOut.nCohInt = 1
523 523
524 524 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
525 525
526 526 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
527 527
528 528 self.dataOut.flagShiftFFT = False
529 529
530 530 self.dataOut.ippSeconds = self.ippSeconds
531 531
532 532 #Time interval between profiles
533 533 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
534 534
535 535 self.dataOut.frequency = self.__frequency
536 536
537 537 self.dataOut.realtime = self.online
538 538 pass
539 539
540 540 def readNextFile(self,online=False):
541 541
542 542 if not(online):
543 543 newFile = self.__setNextFileOffline()
544 544 else:
545 545 newFile = self.__setNextFileOnline()
546 546
547 547 if not(newFile):
548 548 return 0
549 549
550 550 #if self.__firstFile:
551 551 self.readAMISRHeader(self.amisrFilePointer)
552 552 self.createBuffers()
553 553 self.fillJROHeader()
554 554 #self.__firstFile = False
555 555
556 556
557 557
558 558 self.dataset,self.timeset = self.readData()
559 559
560 560 if self.endDate!=None:
561 561 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
562 562 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
563 563 startDateTimeStr_File = time_str[0][0].split('.')[0]
564 564 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
565 565 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
566 566 if self.timezone == 'lt':
567 567 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
568 568 if (startDateTime_File>endDateTime_Reader):
569 569 return 0
570 570
571 571 self.jrodataset = self.reshapeData()
572 572 #----self.updateIndexes()
573 573 self.profileIndex = 0
574 574
575 575 return 1
576 576
577 577
578 578 def __hasNotDataInBuffer(self):
579 579 if self.profileIndex >= (self.newProfiles*self.nblocks):
580 580 return 1
581 581 return 0
582 582
583 583
584 584 def getData(self):
585 585
586 586 if self.flagNoMoreFiles:
587 587 self.dataOut.flagNoData = True
588 588 print 'Process finished'
589 589 return 0
590 590
591 591 if self.__hasNotDataInBuffer():
592 592 if not (self.readNextFile(self.online)):
593 593 return 0
594 594
595 595
596 596 if self.dataset is None: # setear esta condicion cuando no hayan datos por leers
597 597 self.dataOut.flagNoData = True
598 598 return 0
599 599
600 600 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
601 601
602 602 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
603 603
604 604 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
605 605 #verificar basic header de jro data y ver si es compatible con este valor
606 606 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
607 607 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
608 608 indexblock = self.profileIndex/self.newProfiles
609 609 #print indexblock, indexprof
610 610 self.dataOut.utctime = self.timeset[indexblock] + (indexprof * self.ippSeconds * self.nchannels)
611 611 self.dataOut.profileIndex = self.profileIndex
612 612 self.dataOut.flagNoData = False
613 613 # if indexprof == 0:
614 614 # print self.dataOut.utctime
615 615
616 616 self.profileIndex += 1
617 617
618 618 return self.dataOut.data
619 619
620 620
621 621 def run(self, **kwargs):
622 622 '''
623 623 This method will be called many times so here you should put all your code
624 624 '''
625 625
626 626 if not self.isConfig:
627 627 self.setup(**kwargs)
628 628 self.isConfig = True
629 629
630 630 self.getData()
631 631
632 632 class Writer(Operation):
633 633 '''
634 634 classdocs
635 635 '''
636 636
637 637 def __init__(self):
638 638 '''
639 639 Constructor
640 640 '''
641 641 self.dataOut = None
642 642
643 643 self.isConfig = False
644 644
645 645 def setup(self, dataIn, path, blocksPerFile, set=0, ext=None):
646 646 '''
647 647 In this method we should set all initial parameters.
648 648
649 649 Input:
650 650 dataIn : Input data will also be outputa data
651 651
652 652 '''
653 653 self.dataOut = dataIn
654 654
655 655
656 656
657 657
658 658
659 659 self.isConfig = True
660 660
661 661 return
662 662
663 663 def run(self, dataIn, **kwargs):
664 664 '''
665 665 This method will be called many times so here you should put all your code
666 666
667 667 Inputs:
668 668
669 669 dataIn : object with the data
670 670
671 671 '''
672 672
673 673 if not self.isConfig:
674 674 self.setup(dataIn, **kwargs)
675 675 No newline at end of file
@@ -1,1095 +1,1095
1 1 import numpy
2 2 import time
3 3 import os
4 4 import h5py
5 5 import re
6 6 import datetime
7 7
8 8 from schainpy.model.data.jrodata import *
9 9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
10 10 # from jroIO_base import *
11 11 from schainpy.model.io.jroIO_base import *
12 12 import schainpy
13 13
14 14
15 15 class ParamReader(ProcessingUnit):
16 16 '''
17 17 Reads HDF5 format files
18 18
19 19 path
20 20
21 21 startDate
22 22
23 23 endDate
24 24
25 25 startTime
26 26
27 27 endTime
28 28 '''
29 29
30 30 ext = ".hdf5"
31 31
32 32 optchar = "D"
33 33
34 34 timezone = None
35 35
36 36 startTime = None
37 37
38 38 endTime = None
39 39
40 40 fileIndex = None
41 41
42 42 utcList = None #To select data in the utctime list
43 43
44 44 blockList = None #List to blocks to be read from the file
45 45
46 46 blocksPerFile = None #Number of blocks to be read
47 47
48 48 blockIndex = None
49 49
50 50 path = None
51 51
52 52 #List of Files
53 53
54 54 filenameList = None
55 55
56 56 datetimeList = None
57 57
58 58 #Hdf5 File
59 59
60 60 listMetaname = None
61 61
62 62 listMeta = None
63 63
64 64 listDataname = None
65 65
66 66 listData = None
67 67
68 68 listShapes = None
69 69
70 70 fp = None
71 71
72 72 #dataOut reconstruction
73 73
74 74 dataOut = None
75 75
76 76
77 77 def __init__(self, **kwargs):
78 78 ProcessingUnit.__init__(self, **kwargs)
79 79 self.dataOut = Parameters()
80 80 return
81 81
82 82 def setup(self, **kwargs):
83 83
84 84 path = kwargs['path']
85 85 startDate = kwargs['startDate']
86 86 endDate = kwargs['endDate']
87 87 startTime = kwargs['startTime']
88 88 endTime = kwargs['endTime']
89 89 walk = kwargs['walk']
90 90 if kwargs.has_key('ext'):
91 91 ext = kwargs['ext']
92 92 else:
93 93 ext = '.hdf5'
94 94 if kwargs.has_key('timezone'):
95 95 self.timezone = kwargs['timezone']
96 96 else:
97 97 self.timezone = 'lt'
98 98
99 99 print "[Reading] Searching files in offline mode ..."
100 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
100 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
101 101 startTime=startTime, endTime=endTime,
102 102 ext=ext, walk=walk)
103 103
104 104 if not(filenameList):
105 105 print "There is no files into the folder: %s"%(path)
106 106 sys.exit(-1)
107 107
108 108 self.fileIndex = -1
109 109 self.startTime = startTime
110 110 self.endTime = endTime
111 111
112 112 self.__readMetadata()
113 113
114 114 self.__setNextFileOffline()
115 115
116 116 return
117 117
118 def __searchFilesOffLine(self,
118 def searchFilesOffLine(self,
119 119 path,
120 120 startDate=None,
121 121 endDate=None,
122 122 startTime=datetime.time(0,0,0),
123 123 endTime=datetime.time(23,59,59),
124 124 ext='.hdf5',
125 125 walk=True):
126 126
127 127 expLabel = ''
128 128 self.filenameList = []
129 129 self.datetimeList = []
130 130
131 131 pathList = []
132 132
133 133 JRODataObj = JRODataReader()
134 134 dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
135 135
136 136 if dateList == []:
137 137 print "[Reading] No *%s files in %s from %s to %s)"%(ext, path,
138 138 datetime.datetime.combine(startDate,startTime).ctime(),
139 139 datetime.datetime.combine(endDate,endTime).ctime())
140 140
141 141 return None, None
142 142
143 143 if len(dateList) > 1:
144 144 print "[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate)
145 145 else:
146 146 print "[Reading] data was found for the date %s" %(dateList[0])
147 147
148 148 filenameList = []
149 149 datetimeList = []
150 150
151 151 #----------------------------------------------------------------------------------
152 152
153 153 for thisPath in pathList:
154 154 # thisPath = pathList[pathDict[file]]
155 155
156 156 fileList = glob.glob1(thisPath, "*%s" %ext)
157 157 fileList.sort()
158 158
159 159 for file in fileList:
160 160
161 161 filename = os.path.join(thisPath,file)
162 162
163 163 if not isFileInDateRange(filename, startDate, endDate):
164 164 continue
165 165
166 166 thisDatetime = self.__isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
167 167
168 168 if not(thisDatetime):
169 169 continue
170 170
171 171 filenameList.append(filename)
172 172 datetimeList.append(thisDatetime)
173 173
174 174 if not(filenameList):
175 175 print "[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
176 176 return None, None
177 177
178 178 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
179 179 print
180 180
181 181 for i in range(len(filenameList)):
182 182 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
183 183
184 184 self.filenameList = filenameList
185 185 self.datetimeList = datetimeList
186 186
187 187 return pathList, filenameList
188 188
189 189 def __isFileInTimeRange(self,filename, startDate, endDate, startTime, endTime):
190 190
191 191 """
192 192 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
193 193
194 194 Inputs:
195 195 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
196 196
197 197 startDate : fecha inicial del rango seleccionado en formato datetime.date
198 198
199 199 endDate : fecha final del rango seleccionado en formato datetime.date
200 200
201 201 startTime : tiempo inicial del rango seleccionado en formato datetime.time
202 202
203 203 endTime : tiempo final del rango seleccionado en formato datetime.time
204 204
205 205 Return:
206 206 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
207 207 fecha especificado, de lo contrario retorna False.
208 208
209 209 Excepciones:
210 210 Si el archivo no existe o no puede ser abierto
211 211 Si la cabecera no puede ser leida.
212 212
213 213 """
214 214
215 215 try:
216 216 fp = h5py.File(filename,'r')
217 217 grp1 = fp['Data']
218 218
219 219 except IOError:
220 220 traceback.print_exc()
221 221 raise IOError, "The file %s can't be opened" %(filename)
222 222 #chino rata
223 223 #In case has utctime attribute
224 224 grp2 = grp1['utctime']
225 225 # thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time
226 226 thisUtcTime = grp2.value[0]
227 227
228 228 fp.close()
229 229
230 230 if self.timezone == 'lt':
231 231 thisUtcTime -= 5*3600
232 232
233 233 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
234 234 # thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0])
235 235 thisDate = thisDatetime.date()
236 236 thisTime = thisDatetime.time()
237 237
238 238 startUtcTime = (datetime.datetime.combine(thisDate,startTime)- datetime.datetime(1970, 1, 1)).total_seconds()
239 239 endUtcTime = (datetime.datetime.combine(thisDate,endTime)- datetime.datetime(1970, 1, 1)).total_seconds()
240 240
241 241 #General case
242 242 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
243 243 #-----------o----------------------------o-----------
244 244 # startTime endTime
245 245
246 246 if endTime >= startTime:
247 247 thisUtcLog = numpy.logical_and(thisUtcTime > startUtcTime, thisUtcTime < endUtcTime)
248 248 if numpy.any(thisUtcLog): #If there is one block between the hours mentioned
249 249 return thisDatetime
250 250 return None
251 251
252 252 #If endTime < startTime then endTime belongs to the next day
253 253 #<<<<<<<<<<<o o>>>>>>>>>>>
254 254 #-----------o----------------------------o-----------
255 255 # endTime startTime
256 256
257 257 if (thisDate == startDate) and numpy.all(thisUtcTime < startUtcTime):
258 258 return None
259 259
260 260 if (thisDate == endDate) and numpy.all(thisUtcTime > endUtcTime):
261 261 return None
262 262
263 263 if numpy.all(thisUtcTime < startUtcTime) and numpy.all(thisUtcTime > endUtcTime):
264 264 return None
265 265
266 266 return thisDatetime
267 267
268 268 def __setNextFileOffline(self):
269 269
270 270 self.fileIndex += 1
271 271 idFile = self.fileIndex
272 272
273 273 if not(idFile < len(self.filenameList)):
274 274 print "No more Files"
275 275 return 0
276 276
277 277 filename = self.filenameList[idFile]
278 278
279 279 filePointer = h5py.File(filename,'r')
280 280
281 281 self.filename = filename
282 282
283 283 self.fp = filePointer
284 284
285 285 print "Setting the file: %s"%self.filename
286 286
287 287 # self.__readMetadata()
288 288 self.__setBlockList()
289 289 self.__readData()
290 290 # self.nRecords = self.fp['Data'].attrs['blocksPerFile']
291 291 # self.nRecords = self.fp['Data'].attrs['nRecords']
292 292 self.blockIndex = 0
293 293 return 1
294 294
295 295 def __setBlockList(self):
296 296 '''
297 297 Selects the data within the times defined
298 298
299 299 self.fp
300 300 self.startTime
301 301 self.endTime
302 302
303 303 self.blockList
304 304 self.blocksPerFile
305 305
306 306 '''
307 307 fp = self.fp
308 308 startTime = self.startTime
309 309 endTime = self.endTime
310 310
311 311 grp = fp['Data']
312 312 thisUtcTime = grp['utctime'].value.astype(numpy.float)[0]
313 313
314 314 #ERROOOOR
315 315 if self.timezone == 'lt':
316 316 thisUtcTime -= 5*3600
317 317
318 318 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
319 319
320 320 thisDate = thisDatetime.date()
321 321 thisTime = thisDatetime.time()
322 322
323 323 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
324 324 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
325 325
326 326 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
327 327
328 328 self.blockList = ind
329 329 self.blocksPerFile = len(ind)
330 330
331 331 return
332 332
333 333 def __readMetadata(self):
334 334 '''
335 335 Reads Metadata
336 336
337 337 self.pathMeta
338 338
339 339 self.listShapes
340 340 self.listMetaname
341 341 self.listMeta
342 342
343 343 '''
344 344
345 345 # grp = self.fp['Data']
346 346 # pathMeta = os.path.join(self.path, grp.attrs['metadata'])
347 347 #
348 348 # if pathMeta == self.pathMeta:
349 349 # return
350 350 # else:
351 351 # self.pathMeta = pathMeta
352 352 #
353 353 # filePointer = h5py.File(self.pathMeta,'r')
354 354 # groupPointer = filePointer['Metadata']
355 355
356 356 filename = self.filenameList[0]
357 357
358 358 fp = h5py.File(filename,'r')
359 359
360 360 gp = fp['Metadata']
361 361
362 362 listMetaname = []
363 363 listMetadata = []
364 364 for item in gp.items():
365 365 name = item[0]
366 366
367 367 if name=='array dimensions':
368 368 table = gp[name][:]
369 369 listShapes = {}
370 370 for shapes in table:
371 371 listShapes[shapes[0]] = numpy.array([shapes[1],shapes[2],shapes[3],shapes[4],shapes[5]])
372 372 else:
373 373 data = gp[name].value
374 374 listMetaname.append(name)
375 375 listMetadata.append(data)
376 376
377 377 # if name=='type':
378 378 # self.__initDataOut(data)
379 379
380 380 self.listShapes = listShapes
381 381 self.listMetaname = listMetaname
382 382 self.listMeta = listMetadata
383 383
384 384 fp.close()
385 385 return
386 386
387 387 def __readData(self):
388 388 grp = self.fp['Data']
389 389 listdataname = []
390 390 listdata = []
391 391
392 392 for item in grp.items():
393 393 name = item[0]
394 394 listdataname.append(name)
395 395
396 396 array = self.__setDataArray(grp[name],self.listShapes[name])
397 397 listdata.append(array)
398 398
399 399 self.listDataname = listdataname
400 400 self.listData = listdata
401 401 return
402 402
403 403 def __setDataArray(self, dataset, shapes):
404 404
405 405 nDims = shapes[0]
406 406
407 407 nDim2 = shapes[1] #Dimension 0
408 408
409 409 nDim1 = shapes[2] #Dimension 1, number of Points or Parameters
410 410
411 411 nDim0 = shapes[3] #Dimension 2, number of samples or ranges
412 412
413 413 mode = shapes[4] #Mode of storing
414 414
415 415 blockList = self.blockList
416 416
417 417 blocksPerFile = self.blocksPerFile
418 418
419 419 #Depending on what mode the data was stored
420 420 if mode == 0: #Divided in channels
421 421 arrayData = dataset.value.astype(numpy.float)[0][blockList]
422 422 if mode == 1: #Divided in parameter
423 423 strds = 'table'
424 424 nDatas = nDim1
425 425 newShapes = (blocksPerFile,nDim2,nDim0)
426 426 elif mode==2: #Concatenated in a table
427 427 strds = 'table0'
428 428 arrayData = dataset[strds].value
429 429 #Selecting part of the dataset
430 430 utctime = arrayData[:,0]
431 431 u, indices = numpy.unique(utctime, return_index=True)
432 432
433 433 if blockList.size != indices.size:
434 434 indMin = indices[blockList[0]]
435 435 if blockList[1] + 1 >= indices.size:
436 436 arrayData = arrayData[indMin:,:]
437 437 else:
438 438 indMax = indices[blockList[1] + 1]
439 439 arrayData = arrayData[indMin:indMax,:]
440 440 return arrayData
441 441
442 442 # One dimension
443 443 if nDims == 0:
444 444 arrayData = dataset.value.astype(numpy.float)[0][blockList]
445 445
446 446 # Two dimensions
447 447 elif nDims == 2:
448 448 arrayData = numpy.zeros((blocksPerFile,nDim1,nDim0))
449 449 newShapes = (blocksPerFile,nDim0)
450 450 nDatas = nDim1
451 451
452 452 for i in range(nDatas):
453 453 data = dataset[strds + str(i)].value
454 454 arrayData[:,i,:] = data[blockList,:]
455 455
456 456 # Three dimensions
457 457 else:
458 458 arrayData = numpy.zeros((blocksPerFile,nDim2,nDim1,nDim0))
459 459 for i in range(nDatas):
460 460
461 461 data = dataset[strds + str(i)].value
462 462
463 463 for b in range(blockList.size):
464 464 arrayData[b,:,i,:] = data[:,:,blockList[b]]
465 465
466 466 return arrayData
467 467
468 468 def __setDataOut(self):
469 469 listMeta = self.listMeta
470 470 listMetaname = self.listMetaname
471 471 listDataname = self.listDataname
472 472 listData = self.listData
473 473 listShapes = self.listShapes
474 474
475 475 blockIndex = self.blockIndex
476 476 # blockList = self.blockList
477 477
478 478 for i in range(len(listMeta)):
479 479 setattr(self.dataOut,listMetaname[i],listMeta[i])
480 480
481 481 for j in range(len(listData)):
482 482 nShapes = listShapes[listDataname[j]][0]
483 483 mode = listShapes[listDataname[j]][4]
484 484 if nShapes == 1:
485 485 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
486 486 elif nShapes > 1:
487 487 setattr(self.dataOut,listDataname[j],listData[j][blockIndex,:])
488 488 elif mode==0:
489 489 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
490 490 #Mode Meteors
491 491 elif mode ==2:
492 492 selectedData = self.__selectDataMode2(listData[j], blockIndex)
493 493 setattr(self.dataOut, listDataname[j], selectedData)
494 494 return
495 495
496 496 def __selectDataMode2(self, data, blockIndex):
497 497 utctime = data[:,0]
498 498 aux, indices = numpy.unique(utctime, return_inverse=True)
499 499 selInd = numpy.where(indices == blockIndex)[0]
500 500 selData = data[selInd,:]
501 501
502 502 return selData
503 503
504 504 def getData(self):
505 505
506 506 # if self.flagNoMoreFiles:
507 507 # self.dataOut.flagNoData = True
508 508 # print 'Process finished'
509 509 # return 0
510 510 #
511 511 if self.blockIndex==self.blocksPerFile:
512 512 if not( self.__setNextFileOffline() ):
513 513 self.dataOut.flagNoData = True
514 514 return 0
515 515
516 516 # if self.datablock == None: # setear esta condicion cuando no hayan datos por leers
517 517 # self.dataOut.flagNoData = True
518 518 # return 0
519 519 # self.__readData()
520 520 self.__setDataOut()
521 521 self.dataOut.flagNoData = False
522 522
523 523 self.blockIndex += 1
524 524
525 525 return
526 526
527 527 def run(self, **kwargs):
528 528
529 529 if not(self.isConfig):
530 530 self.setup(**kwargs)
531 531 # self.setObjProperties()
532 532 self.isConfig = True
533 533
534 534 self.getData()
535 535
536 536 return
537 537
538 538 class ParamWriter(Operation):
539 539 '''
540 540 HDF5 Writer, stores parameters data in HDF5 format files
541 541
542 542 path: path where the files will be stored
543 543
544 544 blocksPerFile: number of blocks that will be saved in per HDF5 format file
545 545
546 546 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
547 547
548 548 metadataList: list of attributes that will be stored as metadata
549 549
550 550 dataList: list of attributes that will be stores as data
551 551
552 552 '''
553 553
554 554
555 555 ext = ".hdf5"
556 556
557 557 optchar = "D"
558 558
559 559 metaoptchar = "M"
560 560
561 561 metaFile = None
562 562
563 563 filename = None
564 564
565 565 path = None
566 566
567 567 setFile = None
568 568
569 569 fp = None
570 570
571 571 grp = None
572 572
573 573 ds = None
574 574
575 575 firsttime = True
576 576
577 577 #Configurations
578 578
579 579 blocksPerFile = None
580 580
581 581 blockIndex = None
582 582
583 583 dataOut = None
584 584
585 585 #Data Arrays
586 586
587 587 dataList = None
588 588
589 589 metadataList = None
590 590
591 591 # arrayDim = None
592 592
593 593 dsList = None #List of dictionaries with dataset properties
594 594
595 595 tableDim = None
596 596
597 597 # dtype = [('arrayName', 'S20'),('nChannels', 'i'), ('nPoints', 'i'), ('nSamples', 'i'),('mode', 'b')]
598 598
599 599 dtype = [('arrayName', 'S20'),('nDimensions', 'i'), ('dim2', 'i'), ('dim1', 'i'),('dim0', 'i'),('mode', 'b')]
600 600
601 601 currentDay = None
602 602
603 603 lastTime = None
604 604
605 605 def __init__(self, **kwargs):
606 606 Operation.__init__(self, **kwargs)
607 607 self.isConfig = False
608 608 return
609 609
610 610 def setup(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, **kwargs):
611 611 self.path = path
612 612 self.blocksPerFile = blocksPerFile
613 613 self.metadataList = metadataList
614 614 self.dataList = dataList
615 615 self.dataOut = dataOut
616 616 self.mode = mode
617 617
618 618 if self.mode is not None:
619 619 self.mode = numpy.zeros(len(self.dataList)) + mode
620 620 else:
621 621 self.mode = numpy.ones(len(self.dataList))
622 622
623 623 arrayDim = numpy.zeros((len(self.dataList),5))
624 624
625 625 #Table dimensions
626 626 dtype0 = self.dtype
627 627 tableList = []
628 628
629 629 #Dictionary and list of tables
630 630 dsList = []
631 631
632 632 for i in range(len(self.dataList)):
633 633 dsDict = {}
634 634 dataAux = getattr(self.dataOut, self.dataList[i])
635 635 dsDict['variable'] = self.dataList[i]
636 636 #--------------------- Conditionals ------------------------
637 637 #There is no data
638 638 if dataAux is None:
639 639 return 0
640 640
641 641 #Not array, just a number
642 642 #Mode 0
643 643 if type(dataAux)==float or type(dataAux)==int:
644 644 dsDict['mode'] = 0
645 645 dsDict['nDim'] = 0
646 646 arrayDim[i,0] = 0
647 647 dsList.append(dsDict)
648 648
649 649 #Mode 2: meteors
650 650 elif mode[i] == 2:
651 651 # dsDict['nDim'] = 0
652 652 dsDict['dsName'] = 'table0'
653 653 dsDict['mode'] = 2 # Mode meteors
654 654 dsDict['shape'] = dataAux.shape[-1]
655 655 dsDict['nDim'] = 0
656 656 dsDict['dsNumber'] = 1
657 657
658 658 arrayDim[i,3] = dataAux.shape[-1]
659 659 arrayDim[i,4] = mode[i] #Mode the data was stored
660 660
661 661 dsList.append(dsDict)
662 662
663 663 #Mode 1
664 664 else:
665 665 arrayDim0 = dataAux.shape #Data dimensions
666 666 arrayDim[i,0] = len(arrayDim0) #Number of array dimensions
667 667 arrayDim[i,4] = mode[i] #Mode the data was stored
668 668
669 669 strtable = 'table'
670 670 dsDict['mode'] = 1 # Mode parameters
671 671
672 672 # Three-dimension arrays
673 673 if len(arrayDim0) == 3:
674 674 arrayDim[i,1:-1] = numpy.array(arrayDim0)
675 675 nTables = int(arrayDim[i,2])
676 676 dsDict['dsNumber'] = nTables
677 677 dsDict['shape'] = arrayDim[i,2:4]
678 678 dsDict['nDim'] = 3
679 679
680 680 for j in range(nTables):
681 681 dsDict = dsDict.copy()
682 682 dsDict['dsName'] = strtable + str(j)
683 683 dsList.append(dsDict)
684 684
685 685 # Two-dimension arrays
686 686 elif len(arrayDim0) == 2:
687 687 arrayDim[i,2:-1] = numpy.array(arrayDim0)
688 688 nTables = int(arrayDim[i,2])
689 689 dsDict['dsNumber'] = nTables
690 690 dsDict['shape'] = arrayDim[i,3]
691 691 dsDict['nDim'] = 2
692 692
693 693 for j in range(nTables):
694 694 dsDict = dsDict.copy()
695 695 dsDict['dsName'] = strtable + str(j)
696 696 dsList.append(dsDict)
697 697
698 698 # One-dimension arrays
699 699 elif len(arrayDim0) == 1:
700 700 arrayDim[i,3] = arrayDim0[0]
701 701 dsDict['shape'] = arrayDim0[0]
702 702 dsDict['dsNumber'] = 1
703 703 dsDict['dsName'] = strtable + str(0)
704 704 dsDict['nDim'] = 1
705 705 dsList.append(dsDict)
706 706
707 707 table = numpy.array((self.dataList[i],) + tuple(arrayDim[i,:]),dtype = dtype0)
708 708 tableList.append(table)
709 709
710 710 # self.arrayDim = arrayDim
711 711 self.dsList = dsList
712 712 self.tableDim = numpy.array(tableList, dtype = dtype0)
713 713 self.blockIndex = 0
714 714
715 715 timeTuple = time.localtime(dataOut.utctime)
716 716 self.currentDay = timeTuple.tm_yday
717 717 return 1
718 718
719 719 def putMetadata(self):
720 720
721 721 fp = self.createMetadataFile()
722 722 self.writeMetadata(fp)
723 723 fp.close()
724 724 return
725 725
726 726 def createMetadataFile(self):
727 727 ext = self.ext
728 728 path = self.path
729 729 setFile = self.setFile
730 730
731 731 timeTuple = time.localtime(self.dataOut.utctime)
732 732
733 733 subfolder = ''
734 734 fullpath = os.path.join( path, subfolder )
735 735
736 736 if not( os.path.exists(fullpath) ):
737 737 os.mkdir(fullpath)
738 738 setFile = -1 #inicializo mi contador de seteo
739 739
740 740 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
741 741 fullpath = os.path.join( path, subfolder )
742 742
743 743 if not( os.path.exists(fullpath) ):
744 744 os.mkdir(fullpath)
745 745 setFile = -1 #inicializo mi contador de seteo
746 746
747 747 else:
748 748 filesList = os.listdir( fullpath )
749 749 filesList = sorted( filesList, key=str.lower )
750 750 if len( filesList ) > 0:
751 751 filesList = [k for k in filesList if 'M' in k]
752 752 filen = filesList[-1]
753 753 # el filename debera tener el siguiente formato
754 754 # 0 1234 567 89A BCDE (hex)
755 755 # x YYYY DDD SSS .ext
756 756 if isNumber( filen[8:11] ):
757 757 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
758 758 else:
759 759 setFile = -1
760 760 else:
761 761 setFile = -1 #inicializo mi contador de seteo
762 762
763 763 if self.setType is None:
764 764 setFile += 1
765 765 file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar,
766 766 timeTuple.tm_year,
767 767 timeTuple.tm_yday,
768 768 setFile,
769 769 ext )
770 770 else:
771 771 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
772 772 file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar,
773 773 timeTuple.tm_year,
774 774 timeTuple.tm_yday,
775 775 setFile,
776 776 ext )
777 777
778 778 filename = os.path.join( path, subfolder, file )
779 779 self.metaFile = file
780 780 #Setting HDF5 File
781 781 fp = h5py.File(filename,'w')
782 782
783 783 return fp
784 784
785 785 def writeMetadata(self, fp):
786 786
787 787 grp = fp.create_group("Metadata")
788 788 grp.create_dataset('array dimensions', data = self.tableDim, dtype = self.dtype)
789 789
790 790 for i in range(len(self.metadataList)):
791 791 grp.create_dataset(self.metadataList[i], data=getattr(self.dataOut, self.metadataList[i]))
792 792 return
793 793
794 794 def timeFlag(self):
795 795 currentTime = self.dataOut.utctime
796 796
797 797 if self.lastTime is None:
798 798 self.lastTime = currentTime
799 799
800 800 #Day
801 801 timeTuple = time.localtime(currentTime)
802 802 dataDay = timeTuple.tm_yday
803 803
804 804 #Time
805 805 timeDiff = currentTime - self.lastTime
806 806
807 807 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
808 808 if dataDay != self.currentDay:
809 809 self.currentDay = dataDay
810 810 return True
811 811 elif timeDiff > 3*60*60:
812 812 self.lastTime = currentTime
813 813 return True
814 814 else:
815 815 self.lastTime = currentTime
816 816 return False
817 817
818 818 def setNextFile(self):
819 819
820 820 ext = self.ext
821 821 path = self.path
822 822 setFile = self.setFile
823 823 mode = self.mode
824 824
825 825 timeTuple = time.localtime(self.dataOut.utctime)
826 826 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
827 827
828 828 fullpath = os.path.join( path, subfolder )
829 829
830 830 if os.path.exists(fullpath):
831 831 filesList = os.listdir( fullpath )
832 832 filesList = [k for k in filesList if 'D' in k]
833 833 if len( filesList ) > 0:
834 834 filesList = sorted( filesList, key=str.lower )
835 835 filen = filesList[-1]
836 836 # el filename debera tener el siguiente formato
837 837 # 0 1234 567 89A BCDE (hex)
838 838 # x YYYY DDD SSS .ext
839 839 if isNumber( filen[8:11] ):
840 840 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
841 841 else:
842 842 setFile = -1
843 843 else:
844 844 setFile = -1 #inicializo mi contador de seteo
845 845 else:
846 846 os.makedirs(fullpath)
847 847 setFile = -1 #inicializo mi contador de seteo
848 848
849 849 if self.setType is None:
850 850 setFile += 1
851 851 file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar,
852 852 timeTuple.tm_year,
853 853 timeTuple.tm_yday,
854 854 setFile,
855 855 ext )
856 856 else:
857 857 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
858 858 file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar,
859 859 timeTuple.tm_year,
860 860 timeTuple.tm_yday,
861 861 setFile,
862 862 ext )
863 863
864 864 filename = os.path.join( path, subfolder, file )
865 865
866 866 #Setting HDF5 File
867 867 fp = h5py.File(filename,'w')
868 868 #write metadata
869 869 self.writeMetadata(fp)
870 870 #Write data
871 871 grp = fp.create_group("Data")
872 872 # grp.attrs['metadata'] = self.metaFile
873 873
874 874 # grp.attrs['blocksPerFile'] = 0
875 875 ds = []
876 876 data = []
877 877 dsList = self.dsList
878 878 i = 0
879 879 while i < len(dsList):
880 880 dsInfo = dsList[i]
881 881 #One-dimension data
882 882 if dsInfo['mode'] == 0:
883 883 # ds0 = grp.create_dataset(self.dataList[i], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype='S20')
884 884 ds0 = grp.create_dataset(dsInfo['variable'], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype=numpy.float64)
885 885 ds.append(ds0)
886 886 data.append([])
887 887 i += 1
888 888 continue
889 889 # nDimsForDs.append(nDims[i])
890 890
891 891 elif dsInfo['mode'] == 2:
892 892 grp0 = grp.create_group(dsInfo['variable'])
893 893 ds0 = grp0.create_dataset(dsInfo['dsName'], (1,dsInfo['shape']), data = numpy.zeros((1,dsInfo['shape'])) , maxshape=(None,dsInfo['shape']), chunks=True)
894 894 ds.append(ds0)
895 895 data.append([])
896 896 i += 1
897 897 continue
898 898
899 899 elif dsInfo['mode'] == 1:
900 900 grp0 = grp.create_group(dsInfo['variable'])
901 901
902 902 for j in range(dsInfo['dsNumber']):
903 903 dsInfo = dsList[i]
904 904 tableName = dsInfo['dsName']
905 905 shape = int(dsInfo['shape'])
906 906
907 907 if dsInfo['nDim'] == 3:
908 908 ds0 = grp0.create_dataset(tableName, (shape[0],shape[1],1) , data = numpy.zeros((shape[0],shape[1],1)), maxshape = (None,shape[1],None), chunks=True)
909 909 else:
910 910 ds0 = grp0.create_dataset(tableName, (1,shape), data = numpy.zeros((1,shape)) , maxshape=(None,shape), chunks=True)
911 911
912 912 ds.append(ds0)
913 913 data.append([])
914 914 i += 1
915 915 # nDimsForDs.append(nDims[i])
916 916
917 917 fp.flush()
918 918 fp.close()
919 919
920 920 # self.nDatas = nDatas
921 921 # self.nDims = nDims
922 922 # self.nDimsForDs = nDimsForDs
923 923 #Saving variables
924 924 print 'Writing the file: %s'%filename
925 925 self.filename = filename
926 926 # self.fp = fp
927 927 # self.grp = grp
928 928 # self.grp.attrs.modify('nRecords', 1)
929 929 self.ds = ds
930 930 self.data = data
931 931 # self.setFile = setFile
932 932 self.firsttime = True
933 933 self.blockIndex = 0
934 934 return
935 935
936 936 def putData(self):
937 937
938 938 if self.blockIndex == self.blocksPerFile or self.timeFlag():
939 939 self.setNextFile()
940 940
941 941 # if not self.firsttime:
942 942 self.readBlock()
943 943 self.setBlock() #Prepare data to be written
944 944 self.writeBlock() #Write data
945 945
946 946 return
947 947
948 948 def readBlock(self):
949 949
950 950 '''
951 951 data Array configured
952 952
953 953
954 954 self.data
955 955 '''
956 956 dsList = self.dsList
957 957 ds = self.ds
958 958 #Setting HDF5 File
959 959 fp = h5py.File(self.filename,'r+')
960 960 grp = fp["Data"]
961 961 ind = 0
962 962
963 963 # grp.attrs['blocksPerFile'] = 0
964 964 while ind < len(dsList):
965 965 dsInfo = dsList[ind]
966 966
967 967 if dsInfo['mode'] == 0:
968 968 ds0 = grp[dsInfo['variable']]
969 969 ds[ind] = ds0
970 970 ind += 1
971 971 else:
972 972
973 973 grp0 = grp[dsInfo['variable']]
974 974
975 975 for j in range(dsInfo['dsNumber']):
976 976 dsInfo = dsList[ind]
977 977 ds0 = grp0[dsInfo['dsName']]
978 978 ds[ind] = ds0
979 979 ind += 1
980 980
981 981 self.fp = fp
982 982 self.grp = grp
983 983 self.ds = ds
984 984
985 985 return
986 986
987 987 def setBlock(self):
988 988 '''
989 989 data Array configured
990 990
991 991
992 992 self.data
993 993 '''
994 994 #Creating Arrays
995 995 dsList = self.dsList
996 996 data = self.data
997 997 ind = 0
998 998
999 999 while ind < len(dsList):
1000 1000 dsInfo = dsList[ind]
1001 1001 dataAux = getattr(self.dataOut, dsInfo['variable'])
1002 1002
1003 1003 mode = dsInfo['mode']
1004 1004 nDim = dsInfo['nDim']
1005 1005
1006 1006 if mode == 0 or mode == 2 or nDim == 1:
1007 1007 data[ind] = dataAux
1008 1008 ind += 1
1009 1009 # elif nDim == 1:
1010 1010 # data[ind] = numpy.reshape(dataAux,(numpy.size(dataAux),1))
1011 1011 # ind += 1
1012 1012 elif nDim == 2:
1013 1013 for j in range(dsInfo['dsNumber']):
1014 1014 data[ind] = dataAux[j,:]
1015 1015 ind += 1
1016 1016 elif nDim == 3:
1017 1017 for j in range(dsInfo['dsNumber']):
1018 1018 data[ind] = dataAux[:,j,:]
1019 1019 ind += 1
1020 1020
1021 1021 self.data = data
1022 1022 return
1023 1023
1024 1024 def writeBlock(self):
1025 1025 '''
1026 1026 Saves the block in the HDF5 file
1027 1027 '''
1028 1028 dsList = self.dsList
1029 1029
1030 1030 for i in range(len(self.ds)):
1031 1031 dsInfo = dsList[i]
1032 1032 nDim = dsInfo['nDim']
1033 1033 mode = dsInfo['mode']
1034 1034
1035 1035 # First time
1036 1036 if self.firsttime:
1037 1037 # self.ds[i].resize(self.data[i].shape)
1038 1038 # self.ds[i][self.blockIndex,:] = self.data[i]
1039 1039 if type(self.data[i]) == numpy.ndarray:
1040 1040
1041 1041 if nDim == 3:
1042 1042 self.data[i] = self.data[i].reshape((self.data[i].shape[0],self.data[i].shape[1],1))
1043 1043 self.ds[i].resize(self.data[i].shape)
1044 1044 if mode == 2:
1045 1045 self.ds[i].resize(self.data[i].shape)
1046 1046 self.ds[i][:] = self.data[i]
1047 1047 else:
1048 1048
1049 1049 # From second time
1050 1050 # Meteors!
1051 1051 if mode == 2:
1052 1052 dataShape = self.data[i].shape
1053 1053 dsShape = self.ds[i].shape
1054 1054 self.ds[i].resize((self.ds[i].shape[0] + dataShape[0],self.ds[i].shape[1]))
1055 1055 self.ds[i][dsShape[0]:,:] = self.data[i]
1056 1056 # No dimension
1057 1057 elif mode == 0:
1058 1058 self.ds[i].resize((self.ds[i].shape[0], self.ds[i].shape[1] + 1))
1059 1059 self.ds[i][0,-1] = self.data[i]
1060 1060 # One dimension
1061 1061 elif nDim == 1:
1062 1062 self.ds[i].resize((self.ds[i].shape[0] + 1, self.ds[i].shape[1]))
1063 1063 self.ds[i][-1,:] = self.data[i]
1064 1064 # Two dimension
1065 1065 elif nDim == 2:
1066 1066 self.ds[i].resize((self.ds[i].shape[0] + 1,self.ds[i].shape[1]))
1067 1067 self.ds[i][self.blockIndex,:] = self.data[i]
1068 1068 # Three dimensions
1069 1069 elif nDim == 3:
1070 1070 self.ds[i].resize((self.ds[i].shape[0],self.ds[i].shape[1],self.ds[i].shape[2]+1))
1071 1071 self.ds[i][:,:,-1] = self.data[i]
1072 1072
1073 1073 self.firsttime = False
1074 1074 self.blockIndex += 1
1075 1075
1076 1076 #Close to save changes
1077 1077 self.fp.flush()
1078 1078 self.fp.close()
1079 1079 return
1080 1080
1081 1081 def run(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, **kwargs):
1082 1082
1083 1083 if not(self.isConfig):
1084 1084 flagdata = self.setup(dataOut, path=path, blocksPerFile=blocksPerFile,
1085 1085 metadataList=metadataList, dataList=dataList, mode=mode, **kwargs)
1086 1086
1087 1087 if not(flagdata):
1088 1088 return
1089 1089
1090 1090 self.isConfig = True
1091 1091 # self.putMetadata()
1092 1092 self.setNextFile()
1093 1093
1094 1094 self.putData()
1095 1095 return
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now