##// END OF EJS Templates
updated with changes in v3.0-devel
rflores -
r1737:23e53bebbd06
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,669 +1,677
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """API to create signal chain projects
6 6
7 7 The API is provide through class: Project
8 8 """
9 9
10 10 import re
11 11 import sys
12 12 import ast
13 13 import datetime
14 14 import traceback
15 15 import time
16 16 import multiprocessing
17 17 from multiprocessing import Process, Queue
18 18 from threading import Thread
19 19 from xml.etree.ElementTree import ElementTree, Element, SubElement
20 20
21 21 from schainpy.admin import Alarm, SchainWarning
22 22 from schainpy.model import *
23 23 from schainpy.utils import log
24 24
25 25 if 'darwin' in sys.platform and sys.version_info[0] == 3 and sys.version_info[1] > 7:
26 26 multiprocessing.set_start_method('fork')
27 27
28 28 class ConfBase():
29 29
30 30 def __init__(self):
31 31
32 32 self.id = '0'
33 33 self.name = None
34 34 self.priority = None
35 35 self.parameters = {}
36 36 self.object = None
37 37 self.operations = []
38 38
39 39 def getId(self):
40 40
41 41 return self.id
42 42
43 43 def getNewId(self):
44 44
45 45 return int(self.id) * 10 + len(self.operations) + 1
46 46
47 47 def updateId(self, new_id):
48 48
49 49 self.id = str(new_id)
50 50
51 51 n = 1
52 52 for conf in self.operations:
53 53 conf_id = str(int(new_id) * 10 + n)
54 54 conf.updateId(conf_id)
55 55 n += 1
56 56
57 57 def getKwargs(self):
58 58
59 59 params = {}
60 60
61 61 for key, value in self.parameters.items():
62 62 if value not in (None, '', ' '):
63 63 params[key] = value
64 64
65 65 return params
66 66
67 67 def update(self, **kwargs):
68 68
69 69 for key, value in kwargs.items():
70 70 self.addParameter(name=key, value=value)
71 71
72 72 def addParameter(self, name, value, format=None):
73 73 '''
74 74 '''
75 75
76 if isinstance(value, str) and re.search(r'(\d+/\d+/\d+)', value):
76 if format is not None:
77 self.parameters[name] = eval(format)(value)
78 elif isinstance(value, str) and re.search(r'(\d+/\d+/\d+)', value):
77 79 self.parameters[name] = datetime.date(*[int(x) for x in value.split('/')])
78 80 elif isinstance(value, str) and re.search(r'(\d+:\d+:\d+)', value):
79 81 self.parameters[name] = datetime.time(*[int(x) for x in value.split(':')])
80 82 else:
81 83 try:
82 84 self.parameters[name] = ast.literal_eval(value)
83 85 except:
84 86 if isinstance(value, str) and ',' in value:
85 87 self.parameters[name] = value.split(',')
86 88 else:
87 89 self.parameters[name] = value
88 90
89 91 def getParameters(self):
90 92
91 93 params = {}
92 94 for key, value in self.parameters.items():
93 95 s = type(value).__name__
94 96 if s == 'date':
95 97 params[key] = value.strftime('%Y/%m/%d')
96 98 elif s == 'time':
97 99 params[key] = value.strftime('%H:%M:%S')
98 100 else:
99 101 params[key] = str(value)
100 102
101 103 return params
102 104
103 105 def makeXml(self, element):
104 106
105 107 xml = SubElement(element, self.ELEMENTNAME)
106 108 for label in self.xml_labels:
107 109 xml.set(label, str(getattr(self, label)))
108 110
109 111 for key, value in self.getParameters().items():
110 112 xml_param = SubElement(xml, 'Parameter')
111 113 xml_param.set('name', key)
112 114 xml_param.set('value', value)
113 115
114 116 for conf in self.operations:
115 117 conf.makeXml(xml)
116 118
117 119 def __str__(self):
118 120
119 121 if self.ELEMENTNAME == 'Operation':
120 122 s = ' {}[id={}]\n'.format(self.name, self.id)
121 123 else:
122 124 s = '{}[id={}, inputId={}]\n'.format(self.name, self.id, self.inputId)
123 125
124 126 for key, value in self.parameters.items():
125 127 if self.ELEMENTNAME == 'Operation':
126 128 s += ' {}: {}\n'.format(key, value)
127 129 else:
128 130 s += ' {}: {}\n'.format(key, value)
129 131
130 132 for conf in self.operations:
131 133 s += str(conf)
132 134
133 135 return s
134 136
135 137 class OperationConf(ConfBase):
136 138
137 139 ELEMENTNAME = 'Operation'
138 140 xml_labels = ['id', 'name']
139 141
140 142 def setup(self, id, name, priority, project_id, err_queue):
141 143
142 144 self.id = str(id)
143 145 self.project_id = project_id
144 146 self.name = name
145 147 self.type = 'other'
146 148 self.err_queue = err_queue
147 149
148 150 def readXml(self, element, project_id, err_queue):
149 151
150 152 self.id = element.get('id')
151 153 self.name = element.get('name')
152 154 self.type = 'other'
153 155 self.project_id = str(project_id)
154 156 self.err_queue = err_queue
155 157
156 158 for elm in element.iter('Parameter'):
157 159 self.addParameter(elm.get('name'), elm.get('value'))
158 160
159 161 def createObject(self):
160 162
161 163 className = eval(self.name)
162 164
163 165 if 'Plot' in self.name or 'Writer' in self.name or 'Send' in self.name or 'print' in self.name:
164 166 kwargs = self.getKwargs()
165 167 opObj = className(self.id, self.id, self.project_id, self.err_queue, **kwargs)
166 168 opObj.start()
167 169 self.type = 'external'
168 170 else:
169 171 opObj = className()
170 172
171 173 self.object = opObj
172 174 return opObj
173 175
174 176 class ProcUnitConf(ConfBase):
175 177
176 178 ELEMENTNAME = 'ProcUnit'
177 179 xml_labels = ['id', 'inputId', 'name']
178 180
179 181 def setup(self, project_id, id, name, datatype, inputId, err_queue):
180 182 '''
181 183 '''
182 184
183 185 if datatype == None and name == None:
184 186 raise ValueError('datatype or name should be defined')
185 187
186 188 if name == None:
187 189 if 'Proc' in datatype:
188 190 name = datatype
189 191 else:
190 192 name = '%sProc' % (datatype)
191 193
192 194 if datatype == None:
193 195 datatype = name.replace('Proc', '')
194 196
195 197 self.id = str(id)
196 198 self.project_id = project_id
197 199 self.name = name
198 200 self.datatype = datatype
199 201 self.inputId = inputId
200 202 self.err_queue = err_queue
201 203 self.operations = []
202 204 self.parameters = {}
203 205
204 206 def removeOperation(self, id):
205 207
206 208 i = [1 if x.id == id else 0 for x in self.operations]
207 209 self.operations.pop(i.index(1))
208 210
209 211 def getOperation(self, id):
210 212
211 213 for conf in self.operations:
212 214 if conf.id == id:
213 215 return conf
214 216
215 217 def addOperation(self, name, optype='self'):
216 218 '''
217 219 '''
218 220
219 221 id = self.getNewId()
220 222 conf = OperationConf()
221 223 conf.setup(id, name=name, priority='0', project_id=self.project_id, err_queue=self.err_queue)
222 224 self.operations.append(conf)
223 225
224 226 return conf
225 227
226 228 def readXml(self, element, project_id, err_queue):
227 229
228 230 self.id = element.get('id')
229 231 self.name = element.get('name')
230 232 self.inputId = None if element.get('inputId') == 'None' else element.get('inputId')
231 233 self.datatype = element.get('datatype', self.name.replace(self.ELEMENTNAME.replace('Unit', ''), ''))
232 234 self.project_id = str(project_id)
233 235 self.err_queue = err_queue
234 236 self.operations = []
235 237 self.parameters = {}
236 238
237 239 for elm in element:
238 240 if elm.tag == 'Parameter':
239 241 self.addParameter(elm.get('name'), elm.get('value'))
240 242 elif elm.tag == 'Operation':
241 243 conf = OperationConf()
242 244 conf.readXml(elm, project_id, err_queue)
243 245 self.operations.append(conf)
244 246
245 247 def createObjects(self):
246 248 '''
247 249 Instancia de unidades de procesamiento.
248 250 '''
249 251
250 252 className = eval(self.name)
251 253 kwargs = self.getKwargs()
252 254 procUnitObj = className()
253 255 procUnitObj.name = self.name
254 256 log.success('creating process...', self.name)
255 257
256 258 for conf in self.operations:
257 259
258 260 opObj = conf.createObject()
259 261
260 262 log.success('adding operation: {}, type:{}'.format(
261 263 conf.name,
262 264 conf.type), self.name)
263 265
264 266 procUnitObj.addOperation(conf, opObj)
265 267
266 268 self.object = procUnitObj
267 269
268 270 def run(self):
269 271 '''
270 272 '''
271 273 #self.object.call(**self.getKwargs())
272 274
273 275 return self.object.call(**self.getKwargs())
274 276
275 277
276 278 class ReadUnitConf(ProcUnitConf):
277 279
278 280 ELEMENTNAME = 'ReadUnit'
279 281
280 282 def __init__(self):
281 283
282 284 self.id = None
283 285 self.datatype = None
284 286 self.name = None
285 287 self.inputId = None
286 288 self.operations = []
287 289 self.parameters = {}
288 290
289 291 def setup(self, project_id, id, name, datatype, err_queue, path='', startDate='', endDate='',
290 startTime='', endTime='', server=None, **kwargs):
292 startTime='', endTime='', server=None, topic='', **kwargs):
291 293
292 294 if datatype == None and name == None:
293 295 raise ValueError('datatype or name should be defined')
294 296 if name == None:
295 297 if 'Reader' in datatype:
296 298 name = datatype
297 299 datatype = name.replace('Reader', '')
298 300 else:
299 301 name = '{}Reader'.format(datatype)
300 302 if datatype == None:
301 303 if 'Reader' in name:
302 304 datatype = name.replace('Reader', '')
303 305 else:
304 306 datatype = name
305 307 name = '{}Reader'.format(name)
306 308
307 309 self.id = id
308 310 self.project_id = project_id
309 311 self.name = name
310 312 self.datatype = datatype
311 313 self.err_queue = err_queue
312 314
313 315 self.addParameter(name='path', value=path)
314 316 self.addParameter(name='startDate', value=startDate)
315 317 self.addParameter(name='endDate', value=endDate)
316 318 self.addParameter(name='startTime', value=startTime)
317 319 self.addParameter(name='endTime', value=endTime)
320 self.addParameter(name='server', value=server)
321 self.addParameter(name='topic', value=topic)
318 322
319 323 for key, value in kwargs.items():
320 324 self.addParameter(name=key, value=value)
321 325
322 326
323 327 class Project(Process):
324 328 """API to create signal chain projects"""
325 329
326 330 ELEMENTNAME = 'Project'
327 331
328 332 def __init__(self, name=''):
329 333
330 334 Process.__init__(self)
331 335 self.id = '1'
332 336 if name:
333 337 self.name = '{} ({})'.format(Process.__name__, name)
334 338 self.filename = None
335 339 self.description = None
336 340 self.email = None
337 341 self.alarm = []
338 342 self.configurations = {}
339 343 # self.err_queue = Queue()
340 344 self.err_queue = None
341 345 self.started = False
342 346
343 347 def getNewId(self):
344 348
345 349 idList = list(self.configurations.keys())
346 350 id = int(self.id) * 10
347 351
348 352 while True:
349 353 id += 1
350 354
351 355 if str(id) in idList:
352 356 continue
353 357
354 358 break
355 359
356 360 return str(id)
357 361
358 362 def updateId(self, new_id):
359 363
360 364 self.id = str(new_id)
361 365
362 366 keyList = list(self.configurations.keys())
363 367 keyList.sort()
364 368
365 369 n = 1
366 370 new_confs = {}
367 371
368 372 for procKey in keyList:
369 373
370 374 conf = self.configurations[procKey]
371 375 idProcUnit = str(int(self.id) * 10 + n)
372 376 conf.updateId(idProcUnit)
373 377 new_confs[idProcUnit] = conf
374 378 n += 1
375 379
376 380 self.configurations = new_confs
377 381
378 382 def setup(self, id=1, name='', description='', email=None, alarm=[]):
379 383
380 384 self.id = str(id)
381 385 self.description = description
382 386 self.email = email
383 387 self.alarm = alarm
384 388 if name:
385 389 self.name = '{} ({})'.format(Process.__name__, name)
386 390
387 391 def update(self, **kwargs):
388 392
389 393 for key, value in kwargs.items():
390 394 setattr(self, key, value)
391 395
392 396 def clone(self):
393 397
394 398 p = Project()
395 399 p.id = self.id
396 400 p.name = self.name
397 401 p.description = self.description
398 402 p.configurations = self.configurations.copy()
399 403
400 404 return p
401 405
402 406 def addReadUnit(self, id=None, datatype=None, name=None, **kwargs):
403 407
404 408 '''
405 409 '''
406 410
407 411 if id is None:
408 412 idReadUnit = self.getNewId()
409 413 else:
410 414 idReadUnit = str(id)
411 415
412 416 conf = ReadUnitConf()
413 417 conf.setup(self.id, idReadUnit, name, datatype, self.err_queue, **kwargs)
414 418 self.configurations[conf.id] = conf
415 419
416 420 return conf
417 421
418 422 def addProcUnit(self, id=None, inputId='0', datatype=None, name=None):
419 423
420 424 '''
421 425 '''
422 426
423 427 if id is None:
424 428 idProcUnit = self.getNewId()
425 429 else:
426 430 idProcUnit = id
427 431
428 432 conf = ProcUnitConf()
429 433 conf.setup(self.id, idProcUnit, name, datatype, inputId, self.err_queue)
430 434 self.configurations[conf.id] = conf
431 435
432 436 return conf
433 437
434 438 def removeProcUnit(self, id):
435 439
436 440 if id in self.configurations:
437 441 self.configurations.pop(id)
438 442
439 443 def getReadUnit(self):
440 444
441 445 for obj in list(self.configurations.values()):
442 446 if obj.ELEMENTNAME == 'ReadUnit':
443 447 return obj
444 448
445 449 return None
446 450
447 451 def getProcUnit(self, id):
448 452
449 453 return self.configurations[id]
450 454
451 455 def getUnits(self):
452 456
453 457 keys = list(self.configurations)
454 458 keys.sort()
455 459
456 460 for key in keys:
457 461 yield self.configurations[key]
458 462
459 463 def updateUnit(self, id, **kwargs):
460 464
461 465 conf = self.configurations[id].update(**kwargs)
462 466
463 467 def makeXml(self):
464 468
465 469 xml = Element('Project')
466 470 xml.set('id', str(self.id))
467 471 xml.set('name', self.name)
468 472 xml.set('description', self.description)
469 473
470 474 for conf in self.configurations.values():
471 475 conf.makeXml(xml)
472 476
473 477 self.xml = xml
474 478
475 479 def writeXml(self, filename=None):
476 480
477 481 if filename == None:
478 482 if self.filename:
479 483 filename = self.filename
480 484 else:
481 485 filename = 'schain.xml'
482 486
483 487 if not filename:
484 488 print('filename has not been defined. Use setFilename(filename) for do it.')
485 489 return 0
486 490
487 491 abs_file = os.path.abspath(filename)
488 492
489 493 if not os.access(os.path.dirname(abs_file), os.W_OK):
490 494 print('No write permission on %s' % os.path.dirname(abs_file))
491 495 return 0
492 496
493 497 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
494 498 print('File %s already exists and it could not be overwriten' % abs_file)
495 499 return 0
496 500
497 501 self.makeXml()
498 502
499 503 ElementTree(self.xml).write(abs_file, method='xml')
500 504
501 505 self.filename = abs_file
502 506
503 507 return 1
504 508
505 509 def readXml(self, filename):
506 510
507 511 abs_file = os.path.abspath(filename)
508 512
509 513 self.configurations = {}
510 514
511 515 try:
512 516 self.xml = ElementTree().parse(abs_file)
513 517 except:
514 518 log.error('Error reading %s, verify file format' % filename)
515 519 return 0
516 520
517 521 self.id = self.xml.get('id')
518 522 self.name = self.xml.get('name')
519 523 self.description = self.xml.get('description')
520 524
521 525 for element in self.xml:
522 526 if element.tag == 'ReadUnit':
523 527 conf = ReadUnitConf()
524 528 conf.readXml(element, self.id, self.err_queue)
525 529 self.configurations[conf.id] = conf
526 530 elif element.tag == 'ProcUnit':
527 531 conf = ProcUnitConf()
528 532 input_proc = self.configurations[element.get('inputId')]
529 533 conf.readXml(element, self.id, self.err_queue)
530 534 self.configurations[conf.id] = conf
531 535
532 536 self.filename = abs_file
533 537
534 538 return 1
535 539
536 540 def __str__(self):
537 541
538 542 text = '\nProject[id=%s, name=%s, description=%s]\n\n' % (
539 543 self.id,
540 544 self.name,
541 545 self.description,
542 546 )
543 547
544 548 for conf in self.configurations.values():
545 549 text += '{}'.format(conf)
546 550
547 551 return text
548 552
549 553 def createObjects(self):
550 554
551 555 keys = list(self.configurations.keys())
552 556 keys.sort()
553 557 for key in keys:
554 558 conf = self.configurations[key]
555 559 conf.createObjects()
560 if 'Reader' in str(conf):
561 reader = conf.object
562 else:
563 conf.object.reader = reader
556 564 if conf.inputId is not None:
557 565 if isinstance(conf.inputId, list):
558 566 conf.object.setInput([self.configurations[x].object for x in conf.inputId])
559 567 else:
560 568 conf.object.setInput([self.configurations[conf.inputId].object])
561 569
562 570 def monitor(self):
563 571
564 572 t = Thread(target=self._monitor, args=(self.err_queue, self.ctx))
565 573 t.start()
566 574
567 575 def _monitor(self, queue, ctx):
568 576
569 577 import socket
570 578
571 579 procs = 0
572 580 err_msg = ''
573 581
574 582 while True:
575 583 msg = queue.get()
576 584 if '#_start_#' in msg:
577 585 procs += 1
578 586 elif '#_end_#' in msg:
579 587 procs -= 1
580 588 else:
581 589 err_msg = msg
582 590
583 591 if procs == 0 or 'Traceback' in err_msg:
584 592 break
585 593 time.sleep(0.1)
586 594
587 595 if '|' in err_msg:
588 596 name, err = err_msg.split('|')
589 597 if 'SchainWarning' in err:
590 598 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), name)
591 599 elif 'SchainError' in err:
592 600 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), name)
593 601 else:
594 602 log.error(err, name)
595 603 else:
596 604 name, err = self.name, err_msg
597 605
598 606 time.sleep(1)
599 607
600 608 ctx.term()
601 609
602 610 message = ''.join(err)
603 611
604 612 if err_msg:
605 613 subject = 'SChain v%s: Error running %s\n' % (
606 614 schainpy.__version__, self.name)
607 615
608 616 subtitle = 'Hostname: %s\n' % socket.gethostbyname(
609 617 socket.gethostname())
610 618 subtitle += 'Working directory: %s\n' % os.path.abspath('./')
611 619 subtitle += 'Configuration file: %s\n' % self.filename
612 620 subtitle += 'Time: %s\n' % str(datetime.datetime.now())
613 621
614 622 readUnitConfObj = self.getReadUnit()
615 623 if readUnitConfObj:
616 624 subtitle += '\nInput parameters:\n'
617 625 subtitle += '[Data path = %s]\n' % readUnitConfObj.parameters['path']
618 626 subtitle += '[Start date = %s]\n' % readUnitConfObj.parameters['startDate']
619 627 subtitle += '[End date = %s]\n' % readUnitConfObj.parameters['endDate']
620 628 subtitle += '[Start time = %s]\n' % readUnitConfObj.parameters['startTime']
621 629 subtitle += '[End time = %s]\n' % readUnitConfObj.parameters['endTime']
622 630
623 631 a = Alarm(
624 632 modes=self.alarm,
625 633 email=self.email,
626 634 message=message,
627 635 subject=subject,
628 636 subtitle=subtitle,
629 637 filename=self.filename
630 638 )
631 639
632 640 a.start()
633 641
634 642 def setFilename(self, filename):
635 643
636 644 self.filename = filename
637 645
638 646 def runProcs(self):
639 647
640 648 err = False
641 649 n = len(self.configurations)
642 650 #print(n)
643 651
644 652 while not err:
645 653 #print(self.getUnits())
646 654 for conf in self.getUnits():
647 655 #print(conf)
648 656 ok = conf.run()
649 657 #print("ok", ok)
650 658 if ok == 'Error':
651 659 n -= 1
652 660 continue
653 661 elif not ok:
654 662 break
655 663 #print("****************************************************end")
656 664 #exit(1)
657 665 if n == 0:
658 666 err = True
659 667
660 668 def run(self):
661 669
662 670 log.success('\nStarting Project {} [id={}]'.format(self.name, self.id), tag='')
663 671 self.started = True
664 672 self.start_time = time.time()
665 673 self.createObjects()
666 674 self.runProcs()
667 675 log.success('{} Done (Time: {:4.2f}s)'.format(
668 676 self.name,
669 677 time.time() - self.start_time), '')
@@ -1,1082 +1,1093
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Definition of diferent Data objects for different types of data
6 6
7 7 Here you will find the diferent data objects for the different types
8 8 of data, this data objects must be used as dataIn or dataOut objects in
9 9 processing units and operations. Currently the supported data objects are:
10 10 Voltage, Spectra, SpectraHeis, Fits, Correlation and Parameters
11 11 """
12 12
13 13 import copy
14 14 import numpy
15 15 import datetime
16 16 import json
17 17
18 18 import schainpy.admin
19 19 from schainpy.utils import log
20 20 from .jroheaderIO import SystemHeader, RadarControllerHeader
21 21 from schainpy.model.data import _noise
22 22
23 23
24 24 def getNumpyDtype(dataTypeCode):
25 25
26 26 if dataTypeCode == 0:
27 27 numpyDtype = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
28 28 elif dataTypeCode == 1:
29 29 numpyDtype = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
30 30 elif dataTypeCode == 2:
31 31 numpyDtype = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
32 32 elif dataTypeCode == 3:
33 33 numpyDtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
34 34 elif dataTypeCode == 4:
35 35 numpyDtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
36 36 elif dataTypeCode == 5:
37 37 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
38 38 else:
39 39 raise ValueError('dataTypeCode was not defined')
40 40
41 41 return numpyDtype
42 42
43 43
44 44 def getDataTypeCode(numpyDtype):
45 45
46 46 if numpyDtype == numpy.dtype([('real', '<i1'), ('imag', '<i1')]):
47 47 datatype = 0
48 48 elif numpyDtype == numpy.dtype([('real', '<i2'), ('imag', '<i2')]):
49 49 datatype = 1
50 50 elif numpyDtype == numpy.dtype([('real', '<i4'), ('imag', '<i4')]):
51 51 datatype = 2
52 52 elif numpyDtype == numpy.dtype([('real', '<i8'), ('imag', '<i8')]):
53 53 datatype = 3
54 54 elif numpyDtype == numpy.dtype([('real', '<f4'), ('imag', '<f4')]):
55 55 datatype = 4
56 56 elif numpyDtype == numpy.dtype([('real', '<f8'), ('imag', '<f8')]):
57 57 datatype = 5
58 58 else:
59 59 datatype = None
60 60
61 61 return datatype
62 62
63 63
64 64 def hildebrand_sekhon(data, navg):
65 65 """
66 66 This method is for the objective determination of the noise level in Doppler spectra. This
67 67 implementation technique is based on the fact that the standard deviation of the spectral
68 68 densities is equal to the mean spectral density for white Gaussian noise
69 69
70 70 Inputs:
71 71 Data : heights
72 72 navg : numbers of averages
73 73
74 74 Return:
75 75 mean : noise's level
76 76 """
77 77
78 78 sortdata = numpy.sort(data, axis=None)
79 79 #print(numpy.shape(data))
80 80 #exit()
81 81 '''
82 82 lenOfData = len(sortdata)
83 83 nums_min = lenOfData*0.2
84 84
85 85 if nums_min <= 5:
86 86
87 87 nums_min = 5
88 88
89 89 sump = 0.
90 90 sumq = 0.
91 91
92 92 j = 0
93 93 cont = 1
94 94
95 95 while((cont == 1)and(j < lenOfData)):
96 96
97 97 sump += sortdata[j]
98 98 sumq += sortdata[j]**2
99 99
100 100 if j > nums_min:
101 101 rtest = float(j)/(j-1) + 1.0/navg
102 102 if ((sumq*j) > (rtest*sump**2)):
103 103 j = j - 1
104 104 sump = sump - sortdata[j]
105 105 sumq = sumq - sortdata[j]**2
106 106 cont = 0
107 107
108 108 j += 1
109 109
110 110 lnoise = sump / j
111 111
112 112 return lnoise
113 113 '''
114 114 return _noise.hildebrand_sekhon(sortdata, navg)
115 115
116 116
117 117 class Beam:
118 118
119 119 def __init__(self):
120 120 self.codeList = []
121 121 self.azimuthList = []
122 122 self.zenithList = []
123 123
124 124
125 125 class GenericData(object):
126 126
127 127 flagNoData = True
128 blockReader = False
128 129
129 130 def copy(self, inputObj=None):
130 131
131 132 if inputObj == None:
132 133 return copy.deepcopy(self)
133 134
134 135 for key in list(inputObj.__dict__.keys()):
135 136
136 137 attribute = inputObj.__dict__[key]
137 138
138 139 # If this attribute is a tuple or list
139 140 if type(inputObj.__dict__[key]) in (tuple, list):
140 141 self.__dict__[key] = attribute[:]
141 142 continue
142 143
143 144 # If this attribute is another object or instance
144 145 if hasattr(attribute, '__dict__'):
145 146 self.__dict__[key] = attribute.copy()
146 147 continue
147 148
148 149 self.__dict__[key] = inputObj.__dict__[key]
149 150
150 151 def deepcopy(self):
151 152
152 153 return copy.deepcopy(self)
153 154
154 155 def isEmpty(self):
155 156
156 157 return self.flagNoData
157 158
158 159 def isReady(self):
159 160
160 161 return not self.flagNoData
161 162
162 163
163 164 class JROData(GenericData):
164 165
165 166 systemHeaderObj = SystemHeader()
166 167 radarControllerHeaderObj = RadarControllerHeader()
167 168 type = None
168 169 datatype = None # dtype but in string
169 170 nProfiles = None
170 171 heightList = None
171 172 channelList = None
172 173 flagDiscontinuousBlock = False
173 174 useLocalTime = False
174 175 utctime = None
175 176 timeZone = None
176 177 dstFlag = None
177 178 errorCount = None
178 179 blocksize = None
179 180 flagDecodeData = False # asumo q la data no esta decodificada
180 181 flagDeflipData = False # asumo q la data no esta sin flip
181 182 flagShiftFFT = False
182 183 nCohInt = None
183 184 windowOfFilter = 1
184 185 C = 3e8
185 186 frequency = 49.92e6
186 187 realtime = False
187 188 beacon_heiIndexList = None
188 189 last_block = None
189 190 blocknow = None
190 191 azimuth = None
191 192 zenith = None
192 193 beam = Beam()
193 194 profileIndex = None
194 195 error = None
195 196 data = None
196 197 nmodes = None
197 198 metadata_list = ['heightList', 'timeZone', 'type']
198 199
199 200 def __str__(self):
200 201
201 return '{} - {}'.format(self.type, self.datatime())
202 try:
203 dt = self.datatime
204 except:
205 dt = 'None'
206 return '{} - {}'.format(self.type, dt)
202 207
203 208 def getNoise(self):
204 209
205 210 raise NotImplementedError
206 211
207 212 @property
208 213 def nChannels(self):
209 214
210 215 return len(self.channelList)
211 216
212 217 @property
213 218 def channelIndexList(self):
214 219
215 220 return list(range(self.nChannels))
216 221
217 222 @property
218 223 def nHeights(self):
219 224
220 225 return len(self.heightList)
221 226
222 227 def getDeltaH(self):
223 228
224 229 return self.heightList[1] - self.heightList[0]
225 230
226 231 @property
227 232 def ltctime(self):
228 233
229 234 if self.useLocalTime:
230 235 return self.utctime - self.timeZone * 60
231 236
232 237 return self.utctime
233 238
234 239 @property
235 240 def datatime(self):
236 241
237 242 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
238 243 return datatimeValue
239 244
240 245 def getTimeRange(self):
241 246
242 247 datatime = []
243 248
244 249 datatime.append(self.ltctime)
245 250 datatime.append(self.ltctime + self.timeInterval + 1)
246 251
247 252 datatime = numpy.array(datatime)
248 253
249 254 return datatime
250 255
251 256 def getFmaxTimeResponse(self):
252 257
253 258 period = (10**-6) * self.getDeltaH() / (0.15)
254 259
255 260 PRF = 1. / (period * self.nCohInt)
256 261
257 262 fmax = PRF
258 263
259 264 return fmax
260 265
261 266 def getFmax(self):
262 267 PRF = 1. / (self.ippSeconds * self.nCohInt)
263 268 #print("ippsec",self.ippSeconds)
264 269 fmax = PRF
265 270 return fmax
266 271
267 272 def getVmax(self):
268 273
269 274 _lambda = self.C / self.frequency
270 275
271 276 vmax = self.getFmax() * _lambda / 2
272 277
273 278 return vmax
274 279
275 280 @property
276 281 def ippSeconds(self):
277 282 '''
278 283 '''
279 284 return self.radarControllerHeaderObj.ippSeconds
280 285
281 286 @ippSeconds.setter
282 287 def ippSeconds(self, ippSeconds):
283 288 '''
284 289 '''
285 290 self.radarControllerHeaderObj.ippSeconds = ippSeconds
286 291
287 292 @property
288 293 def code(self):
289 294 '''
290 295 '''
291 296 return self.radarControllerHeaderObj.code
292 297
293 298 @code.setter
294 299 def code(self, code):
295 300 '''
296 301 '''
297 302 self.radarControllerHeaderObj.code = code
298 303
299 304 @property
300 305 def nCode(self):
301 306 '''
302 307 '''
303 308 return self.radarControllerHeaderObj.nCode
304 309
305 310 @nCode.setter
306 311 def nCode(self, ncode):
307 312 '''
308 313 '''
309 314 self.radarControllerHeaderObj.nCode = ncode
310 315
311 316 @property
312 317 def nBaud(self):
313 318 '''
314 319 '''
315 320 return self.radarControllerHeaderObj.nBaud
316 321
317 322 @nBaud.setter
318 323 def nBaud(self, nbaud):
319 324 '''
320 325 '''
321 326 self.radarControllerHeaderObj.nBaud = nbaud
322 327
323 328 @property
324 329 def ipp(self):
325 330 '''
326 331 '''
327 332 return self.radarControllerHeaderObj.ipp
328 333
329 334 @ipp.setter
330 335 def ipp(self, ipp):
331 336 '''
332 337 '''
333 338 self.radarControllerHeaderObj.ipp = ipp
334 339
335 340 @property
336 341 def metadata(self):
337 342 '''
338 343 '''
339 344
340 345 return {attr: getattr(self, attr) for attr in self.metadata_list}
341 346
342 347
343 348 class Voltage(JROData):
344 349
345 350 dataPP_POW = None
346 351 dataPP_DOP = None
347 352 dataPP_WIDTH = None
348 353 dataPP_SNR = None
349 354
350 355 def __init__(self):
351 356 '''
352 357 Constructor
353 358 '''
354 359
355 360 self.useLocalTime = True
356 361 self.radarControllerHeaderObj = RadarControllerHeader()
357 362 self.systemHeaderObj = SystemHeader()
358 363 self.type = "Voltage"
359 364 self.data = None
360 365 self.nProfiles = None
361 366 self.heightList = None
362 367 self.channelList = None
363 368 self.flagNoData = True
364 369 self.flagDiscontinuousBlock = False
365 370 self.utctime = None
366 371 self.timeZone = 0
367 372 self.dstFlag = None
368 373 self.errorCount = None
369 374 self.nCohInt = None
370 375 self.blocksize = None
371 376 self.flagCohInt = False
372 377 self.flagDecodeData = False # asumo q la data no esta decodificada
373 378 self.flagDeflipData = False # asumo q la data no esta sin flip
374 379 self.flagShiftFFT = False
375 380 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
376 381 self.profileIndex = 0
377 382 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
378 383 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp']
379 384
380 385 def getNoisebyHildebrand(self, channel=None, Profmin_index=None, Profmax_index=None):
381 386 """
382 387 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
383 388
384 389 Return:
385 390 noiselevel
386 391 """
387 392
388 393 if channel != None:
389 394 data = self.data[channel]
390 395 nChannels = 1
391 396 else:
392 397 data = self.data
393 398 nChannels = self.nChannels
394 399
395 400 noise = numpy.zeros(nChannels)
396 401 power = data * numpy.conjugate(data)
397 402
398 403 for thisChannel in range(nChannels):
399 404 if nChannels == 1:
400 405 daux = power[:].real
401 406 else:
402 407 #print(power.shape)
403 408 daux = power[thisChannel, Profmin_index:Profmax_index, :].real
404 409 #print(daux.shape)
405 410 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
406 411
407 412 return noise
408 413
409 414 def getNoise(self, type=1, channel=None, Profmin_index=None, Profmax_index=None):
410 415
411 416 if type == 1:
412 417 noise = self.getNoisebyHildebrand(channel, Profmin_index, Profmax_index)
413 418
414 419 return noise
415 420
416 421 def getPower(self, channel=None):
417 422
418 423 if channel != None:
419 424 data = self.data[channel]
420 425 else:
421 426 data = self.data
422 427
423 428 power = data * numpy.conjugate(data)
424 429 powerdB = 10 * numpy.log10(power.real)
425 430 powerdB = numpy.squeeze(powerdB)
426 431
427 432 return powerdB
428 433
429 434 @property
430 435 def timeInterval(self):
431 436
432 437 return self.ippSeconds * self.nCohInt
433 438
434 439 noise = property(getNoise, "I'm the 'nHeights' property.")
435 440
436 441
437 442 class Spectra(JROData):
438 443
439 444 def __init__(self):
440 445 '''
441 446 Constructor
442 447 '''
443 448
444 449 self.data_dc = None
445 450 self.data_spc = None
446 451 self.data_cspc = None
447 452 self.useLocalTime = True
448 453 self.radarControllerHeaderObj = RadarControllerHeader()
449 454 self.systemHeaderObj = SystemHeader()
450 455 self.type = "Spectra"
451 456 self.timeZone = 0
452 457 self.nProfiles = None
453 458 self.heightList = None
454 459 self.channelList = None
455 460 self.pairsList = None
456 461 self.flagNoData = True
457 462 self.flagDiscontinuousBlock = False
458 463 self.utctime = None
459 464 self.nCohInt = None
460 465 self.nIncohInt = None
461 466 self.blocksize = None
462 467 self.nFFTPoints = None
463 468 self.wavelength = None
464 469 self.flagDecodeData = False # asumo q la data no esta decodificada
465 470 self.flagDeflipData = False # asumo q la data no esta sin flip
466 471 self.flagShiftFFT = False
467 472 self.ippFactor = 1
468 473 self.beacon_heiIndexList = []
469 474 self.noise_estimation = None
475 self.spc_noise = None
470 476 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
471 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp','nIncohInt', 'nFFTPoints', 'nProfiles']
477 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp', 'nIncohInt', 'nFFTPoints', 'nProfiles', 'flagDecodeData']
472 478
473 479 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
474 480 """
475 481 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
476 482
477 483 Return:
478 484 noiselevel
479 485 """
480 486
481 487 noise = numpy.zeros(self.nChannels)
482 488
483 489 for channel in range(self.nChannels):
484 490 #print(self.data_spc[0])
485 491 #exit(1)
486 492 daux = self.data_spc[channel,
487 493 xmin_index:xmax_index, ymin_index:ymax_index]
488 494 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
489 495
490 496 return noise
491 497
492 498 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
493 499
494 if self.noise_estimation is not None:
500 if self.spc_noise is not None:
501 # this was estimated by getNoise Operation defined in jroproc_parameters.py
502 return self.spc_noise
503 elif self.noise_estimation is not None:
495 504 # this was estimated by getNoise Operation defined in jroproc_spectra.py
496 505 return self.noise_estimation
497 506 else:
498 507
499 508 noise = self.getNoisebyHildebrand(
500 509 xmin_index, xmax_index, ymin_index, ymax_index)
501 510 return noise
502 511
503 512 def getFreqRangeTimeResponse(self, extrapoints=0):
504 513
505 514 deltafreq = self.getFmaxTimeResponse() / (self.nFFTPoints * self.ippFactor)
506 515 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
507 516
508 517 return freqrange
509 518
510 519 def getAcfRange(self, extrapoints=0):
511 520
512 521 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
513 522 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
514 523
515 524 return freqrange
516 525
517 526 def getFreqRange(self, extrapoints=0):
518 527
519 528 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
520 529 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
521 530
522 531 return freqrange
523 532
524 533 def getVelRange(self, extrapoints=0):
525 534
526 535 deltav = self.getVmax() / (self.nFFTPoints * self.ippFactor)
527 536 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
528 537
529 538 if self.nmodes:
530 539 return velrange/self.nmodes
531 540 else:
532 541 return velrange
533 542
534 543 @property
535 544 def nPairs(self):
536 545
537 546 return len(self.pairsList)
538 547
539 548 @property
540 549 def pairsIndexList(self):
541 550
542 551 return list(range(self.nPairs))
543 552
544 553 @property
545 554 def normFactor(self):
546 555
547 556 pwcode = 1
548 557
549 558 if self.flagDecodeData:
550 559 pwcode = numpy.sum(self.code[0]**2)
551 560 #pwcode = 64
552 561 #print("pwcode: ", pwcode)
553 562 #exit(1)
554 563 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
555 564 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
556 565
557 566 return normFactor
558 567
559 568 @property
560 569 def flag_cspc(self):
561 570
562 571 if self.data_cspc is None:
563 572 return True
564 573
565 574 return False
566 575
567 576 @property
568 577 def flag_dc(self):
569 578
570 579 if self.data_dc is None:
571 580 return True
572 581
573 582 return False
574 583
575 584 @property
576 585 def timeInterval(self):
577 586
578 587 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
579 588 if self.nmodes:
580 589 return self.nmodes*timeInterval
581 590 else:
582 591 return timeInterval
583 592
584 593 def getPower(self):
585 594
586 595 factor = self.normFactor
587 596 z = self.data_spc / factor
588 597 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
589 598 avg = numpy.average(z, axis=1)
590 599
591 600 return 10 * numpy.log10(avg)
592 601
593 602 def getCoherence(self, pairsList=None, phase=False):
594 603
595 604 z = []
596 605 if pairsList is None:
597 606 pairsIndexList = self.pairsIndexList
598 607 else:
599 608 pairsIndexList = []
600 609 for pair in pairsList:
601 610 if pair not in self.pairsList:
602 611 raise ValueError("Pair %s is not in dataOut.pairsList" % (
603 612 pair))
604 613 pairsIndexList.append(self.pairsList.index(pair))
605 614 for i in range(len(pairsIndexList)):
606 615 pair = self.pairsList[pairsIndexList[i]]
607 616 ccf = numpy.average(self.data_cspc[pairsIndexList[i], :, :], axis=0)
608 617 powa = numpy.average(self.data_spc[pair[0], :, :], axis=0)
609 618 powb = numpy.average(self.data_spc[pair[1], :, :], axis=0)
610 619 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
611 620 if phase:
612 621 data = numpy.arctan2(avgcoherenceComplex.imag,
613 622 avgcoherenceComplex.real) * 180 / numpy.pi
614 623 else:
615 624 data = numpy.abs(avgcoherenceComplex)
616 625
617 626 z.append(data)
618 627
619 628 return numpy.array(z)
620 629
621 630 def setValue(self, value):
622 631
623 632 print("This property should not be initialized")
624 633
625 634 return
626 635
627 636 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
628 637
629 638
630 639 class SpectraHeis(Spectra):
631 640
632 641 def __init__(self):
633 642
634 643 self.radarControllerHeaderObj = RadarControllerHeader()
635 644 self.systemHeaderObj = SystemHeader()
636 645 self.type = "SpectraHeis"
637 646 self.nProfiles = None
638 647 self.heightList = None
639 648 self.channelList = None
640 649 self.flagNoData = True
641 650 self.flagDiscontinuousBlock = False
642 651 self.utctime = None
643 652 self.blocksize = None
644 653 self.profileIndex = 0
645 654 self.nCohInt = 1
646 655 self.nIncohInt = 1
647 656
648 657 @property
649 658 def normFactor(self):
650 659 pwcode = 1
651 660 if self.flagDecodeData:
652 661 pwcode = numpy.sum(self.code[0]**2)
653 662
654 663 normFactor = self.nIncohInt * self.nCohInt * pwcode
655 664
656 665 return normFactor
657 666
658 667 @property
659 668 def timeInterval(self):
660 669
661 670 return self.ippSeconds * self.nCohInt * self.nIncohInt
662 671
663 672
664 673 class Fits(JROData):
665 674
666 675 def __init__(self):
667 676
668 677 self.type = "Fits"
669 678 self.nProfiles = None
670 679 self.heightList = None
671 680 self.channelList = None
672 681 self.flagNoData = True
673 682 self.utctime = None
674 683 self.nCohInt = 1
675 684 self.nIncohInt = 1
676 685 self.useLocalTime = True
677 686 self.profileIndex = 0
678 687 self.timeZone = 0
679 688
680 689 def getTimeRange(self):
681 690
682 691 datatime = []
683 692
684 693 datatime.append(self.ltctime)
685 694 datatime.append(self.ltctime + self.timeInterval)
686 695
687 696 datatime = numpy.array(datatime)
688 697
689 698 return datatime
690 699
691 700 def getChannelIndexList(self):
692 701
693 702 return list(range(self.nChannels))
694 703
695 704 def getNoise(self, type=1):
696 705
697 706
698 707 if type == 1:
699 708 noise = self.getNoisebyHildebrand()
700 709
701 710 if type == 2:
702 711 noise = self.getNoisebySort()
703 712
704 713 if type == 3:
705 714 noise = self.getNoisebyWindow()
706 715
707 716 return noise
708 717
709 718 @property
710 719 def timeInterval(self):
711 720
712 721 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
713 722
714 723 return timeInterval
715 724
716 725 @property
717 726 def ippSeconds(self):
718 727 '''
719 728 '''
720 729 return self.ipp_sec
721 730
722 731 noise = property(getNoise, "I'm the 'nHeights' property.")
723 732
724 733
725 734 class Correlation(JROData):
726 735
727 736 def __init__(self):
728 737 '''
729 738 Constructor
730 739 '''
731 740 self.radarControllerHeaderObj = RadarControllerHeader()
732 741 self.systemHeaderObj = SystemHeader()
733 742 self.type = "Correlation"
734 743 self.data = None
735 744 self.dtype = None
736 745 self.nProfiles = None
737 746 self.heightList = None
738 747 self.channelList = None
739 748 self.flagNoData = True
740 749 self.flagDiscontinuousBlock = False
741 750 self.utctime = None
742 751 self.timeZone = 0
743 752 self.dstFlag = None
744 753 self.errorCount = None
745 754 self.blocksize = None
746 755 self.flagDecodeData = False # asumo q la data no esta decodificada
747 756 self.flagDeflipData = False # asumo q la data no esta sin flip
748 757 self.pairsList = None
749 758 self.nPoints = None
750 759
751 760 def getPairsList(self):
752 761
753 762 return self.pairsList
754 763
755 764 def getNoise(self, mode=2):
756 765
757 766 indR = numpy.where(self.lagR == 0)[0][0]
758 767 indT = numpy.where(self.lagT == 0)[0][0]
759 768
760 769 jspectra0 = self.data_corr[:, :, indR, :]
761 770 jspectra = copy.copy(jspectra0)
762 771
763 772 num_chan = jspectra.shape[0]
764 773 num_hei = jspectra.shape[2]
765 774
766 775 freq_dc = jspectra.shape[1] / 2
767 776 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
768 777
769 778 if ind_vel[0] < 0:
770 779 ind_vel[list(range(0, 1))] = ind_vel[list(
771 780 range(0, 1))] + self.num_prof
772 781
773 782 if mode == 1:
774 783 jspectra[:, freq_dc, :] = (
775 784 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
776 785
777 786 if mode == 2:
778 787
779 788 vel = numpy.array([-2, -1, 1, 2])
780 789 xx = numpy.zeros([4, 4])
781 790
782 791 for fil in range(4):
783 792 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
784 793
785 794 xx_inv = numpy.linalg.inv(xx)
786 795 xx_aux = xx_inv[0, :]
787 796
788 797 for ich in range(num_chan):
789 798 yy = jspectra[ich, ind_vel, :]
790 799 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
791 800
792 801 junkid = jspectra[ich, freq_dc, :] <= 0
793 802 cjunkid = sum(junkid)
794 803
795 804 if cjunkid.any():
796 805 jspectra[ich, freq_dc, junkid.nonzero()] = (
797 806 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
798 807
799 808 noise = jspectra0[:, freq_dc, :] - jspectra[:, freq_dc, :]
800 809
801 810 return noise
802 811
803 812 @property
804 813 def timeInterval(self):
805 814
806 815 return self.ippSeconds * self.nCohInt * self.nProfiles
807 816
808 817 def splitFunctions(self):
809 818
810 819 pairsList = self.pairsList
811 820 ccf_pairs = []
812 821 acf_pairs = []
813 822 ccf_ind = []
814 823 acf_ind = []
815 824 for l in range(len(pairsList)):
816 825 chan0 = pairsList[l][0]
817 826 chan1 = pairsList[l][1]
818 827
819 828 # Obteniendo pares de Autocorrelacion
820 829 if chan0 == chan1:
821 830 acf_pairs.append(chan0)
822 831 acf_ind.append(l)
823 832 else:
824 833 ccf_pairs.append(pairsList[l])
825 834 ccf_ind.append(l)
826 835
827 836 data_acf = self.data_cf[acf_ind]
828 837 data_ccf = self.data_cf[ccf_ind]
829 838
830 839 return acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf
831 840
832 841 @property
833 842 def normFactor(self):
834 843 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.splitFunctions()
835 844 acf_pairs = numpy.array(acf_pairs)
836 845 normFactor = numpy.zeros((self.nPairs, self.nHeights))
837 846
838 847 for p in range(self.nPairs):
839 848 pair = self.pairsList[p]
840 849
841 850 ch0 = pair[0]
842 851 ch1 = pair[1]
843 852
844 853 ch0_max = numpy.max(data_acf[acf_pairs == ch0, :, :], axis=1)
845 854 ch1_max = numpy.max(data_acf[acf_pairs == ch1, :, :], axis=1)
846 855 normFactor[p, :] = numpy.sqrt(ch0_max * ch1_max)
847 856
848 857 return normFactor
849 858
850 859
851 860 class Parameters(Spectra):
852 861
853 862 groupList = None # List of Pairs, Groups, etc
854 863 data_param = None # Parameters obtained
855 864 data_pre = None # Data Pre Parametrization
856 865 data_SNR = None # Signal to Noise Ratio
857 866 abscissaList = None # Abscissa, can be velocities, lags or time
858 867 utctimeInit = None # Initial UTC time
859 868 paramInterval = None # Time interval to calculate Parameters in seconds
860 869 useLocalTime = True
861 870 # Fitting
862 871 data_error = None # Error of the estimation
863 872 constants = None
864 873 library = None
865 874 # Output signal
866 875 outputInterval = None # Time interval to calculate output signal in seconds
867 876 data_output = None # Out signal
868 877 nAvg = None
869 878 noise_estimation = None
870 879 GauSPC = None # Fit gaussian SPC
880 spc_noise = None
871 881
872 882 def __init__(self):
873 883 '''
874 884 Constructor
875 885 '''
876 886 self.radarControllerHeaderObj = RadarControllerHeader()
877 887 self.systemHeaderObj = SystemHeader()
878 888 self.type = "Parameters"
879 889 self.timeZone = 0
890 self.ippFactor = 1
880 891
881 892 def getTimeRange1(self, interval):
882 893
883 894 datatime = []
884 895
885 896 if self.useLocalTime:
886 897 time1 = self.utctimeInit - self.timeZone * 60
887 898 else:
888 899 time1 = self.utctimeInit
889 900
890 901 datatime.append(time1)
891 902 datatime.append(time1 + interval)
892 903 datatime = numpy.array(datatime)
893 904
894 905 return datatime
895 906
896 907 @property
897 908 def timeInterval(self):
898 909
899 910 if hasattr(self, 'timeInterval1'):
900 911 return self.timeInterval1
901 912 else:
902 913 return self.paramInterval
903 914
904 915
905 916 def setValue(self, value):
906 917
907 918 print("This property should not be initialized")
908 919
909 920 return
910 921
911 922 def getNoise(self):
912 923
913 924 return self.spc_noise
914 925
915 926 noise = property(getNoise, setValue, "I'm the 'Noise' property.")
916 927
917 928
918 929 class PlotterData(object):
919 930 '''
920 931 Object to hold data to be plotted
921 932 '''
922 933
923 934 MAXNUMX = 200
924 935 MAXNUMY = 200
925 936
926 937 def __init__(self, code, exp_code, localtime=True):
927 938
928 939 self.key = code
929 940 self.exp_code = exp_code
930 941 self.ready = False
931 942 self.flagNoData = False
932 943 self.localtime = localtime
933 944 self.data = {}
934 945 self.meta = {}
935 946 self.__heights = []
936 947
937 948 def __str__(self):
938 949 dum = ['{}{}'.format(key, self.shape(key)) for key in self.data]
939 950 return 'Data[{}][{}]'.format(';'.join(dum), len(self.times))
940 951
941 952 def __len__(self):
942 953 return len(self.data)
943 954
944 955 def __getitem__(self, key):
945 956 if isinstance(key, int):
946 957 return self.data[self.times[key]]
947 958 elif isinstance(key, str):
948 959 ret = numpy.array([self.data[x][key] for x in self.times])
949 960 if ret.ndim > 1:
950 961 ret = numpy.swapaxes(ret, 0, 1)
951 962 return ret
952 963
953 964 def __contains__(self, key):
954 965 return key in self.data[self.min_time]
955 966
956 967 def setup(self):
957 968 '''
958 969 Configure object
959 970 '''
960 971 self.type = ''
961 972 self.ready = False
962 973 del self.data
963 974 self.data = {}
964 975 self.__heights = []
965 976 self.__all_heights = set()
966 977
967 978 def shape(self, key):
968 979 '''
969 980 Get the shape of the one-element data for the given key
970 981 '''
971 982
972 983 if len(self.data[self.min_time][key]):
973 984 return self.data[self.min_time][key].shape
974 985 return (0,)
975 986
976 987 def update(self, data, tm, meta={}):
977 988 '''
978 989 Update data object with new dataOut
979 990 '''
980 991
981 992 self.data[tm] = data
982 993
983 994 for key, value in meta.items():
984 995 setattr(self, key, value)
985 996
986 997 def normalize_heights(self):
987 998 '''
988 999 Ensure same-dimension of the data for different heighList
989 1000 '''
990 1001
991 1002 H = numpy.array(list(self.__all_heights))
992 1003 H.sort()
993 1004 for key in self.data:
994 1005 shape = self.shape(key)[:-1] + H.shape
995 1006 for tm, obj in list(self.data[key].items()):
996 1007 h = self.__heights[self.times.tolist().index(tm)]
997 1008 if H.size == h.size:
998 1009 continue
999 1010 index = numpy.where(numpy.in1d(H, h))[0]
1000 1011 dummy = numpy.zeros(shape) + numpy.nan
1001 1012 if len(shape) == 2:
1002 1013 dummy[:, index] = obj
1003 1014 else:
1004 1015 dummy[index] = obj
1005 1016 self.data[key][tm] = dummy
1006 1017
1007 1018 self.__heights = [H for tm in self.times]
1008 1019
1009 1020 def jsonify(self, tm, plot_name, plot_type, decimate=False):
1010 1021 '''
1011 1022 Convert data to json
1012 1023 '''
1013 1024
1014 1025 meta = {}
1015 1026 meta['xrange'] = []
1016 1027 dy = int(len(self.yrange)/self.MAXNUMY) + 1
1017 1028 tmp = self.data[tm][self.key]
1018 1029 shape = tmp.shape
1019 1030 if len(shape) == 2:
1020 1031 data = self.roundFloats(self.data[tm][self.key][::, ::dy].tolist())
1021 1032 elif len(shape) == 3:
1022 1033 dx = int(self.data[tm][self.key].shape[1]/self.MAXNUMX) + 1
1023 1034 data = self.roundFloats(
1024 1035 self.data[tm][self.key][::, ::dx, ::dy].tolist())
1025 1036 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
1026 1037 else:
1027 1038 data = self.roundFloats(self.data[tm][self.key].tolist())
1028 1039
1029 1040 ret = {
1030 1041 'plot': plot_name,
1031 1042 'code': self.exp_code,
1032 1043 'time': float(tm),
1033 1044 'data': data,
1034 1045 }
1035 1046 meta['type'] = plot_type
1036 1047 meta['interval'] = float(self.interval)
1037 1048 meta['localtime'] = self.localtime
1038 1049 meta['yrange'] = self.roundFloats(self.yrange[::dy].tolist())
1039 1050 meta.update(self.meta)
1040 1051 ret['metadata'] = meta
1041 1052 return json.dumps(ret)
1042 1053
1043 1054 @property
1044 1055 def times(self):
1045 1056 '''
1046 1057 Return the list of times of the current data
1047 1058 '''
1048 1059
1049 1060 ret = [t for t in self.data]
1050 1061 ret.sort()
1051 1062 return numpy.array(ret)
1052 1063
1053 1064 @property
1054 1065 def min_time(self):
1055 1066 '''
1056 1067 Return the minimun time value
1057 1068 '''
1058 1069
1059 1070 return self.times[0]
1060 1071
1061 1072 @property
1062 1073 def max_time(self):
1063 1074 '''
1064 1075 Return the maximun time value
1065 1076 '''
1066 1077
1067 1078 return self.times[-1]
1068 1079
1069 1080 # @property
1070 1081 # def heights(self):
1071 1082 # '''
1072 1083 # Return the list of heights of the current data
1073 1084 # '''
1074 1085
1075 1086 # return numpy.array(self.__heights[-1])
1076 1087
1077 1088 @staticmethod
1078 1089 def roundFloats(obj):
1079 1090 if isinstance(obj, list):
1080 1091 return list(map(PlotterData.roundFloats, obj))
1081 1092 elif isinstance(obj, float):
1082 1093 return round(obj, 2)
@@ -1,704 +1,705
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Base class to create plot operations
6 6
7 7 """
8 8
9 9 import os
10 10 import sys
11 11 import zmq
12 12 import time
13 13 import numpy
14 14 import datetime
15 15 from collections import deque
16 16 from functools import wraps
17 17 from threading import Thread
18 18 import matplotlib
19 19
20 20 if 'BACKEND' in os.environ:
21 21 matplotlib.use(os.environ['BACKEND'])
22 22 elif 'linux' in sys.platform:
23 23 matplotlib.use("TkAgg")
24 24 elif 'darwin' in sys.platform:
25 25 matplotlib.use('MacOSX')
26 26 else:
27 27 from schainpy.utils import log
28 28 log.warning('Using default Backend="Agg"', 'INFO')
29 29 matplotlib.use('Agg')
30 30
31 31 import matplotlib.pyplot as plt
32 32 from matplotlib.patches import Polygon
33 33 from mpl_toolkits.axes_grid1 import make_axes_locatable
34 34 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
35 35
36 36 from schainpy.model.data.jrodata import PlotterData
37 37 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
38 38 from schainpy.utils import log
39 39
40 40 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
41 41 blu_values = matplotlib.pyplot.get_cmap(
42 42 'seismic_r', 20)(numpy.arange(20))[10:15]
43 43 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
44 44 'jro', numpy.vstack((blu_values, jet_values)))
45 45 matplotlib.pyplot.register_cmap(cmap=ncmap)
46 46
47 47 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
48 48 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
49 49
50 50 EARTH_RADIUS = 6.3710e3
51 51
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 60 theta = -theta + numpy.pi/2
61 61 return r*numpy.cos(theta), r*numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
65 65 '''
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 69 return numpy.rad2deg(km/EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
73 73 backend = plt.rcParams['backend']
74 74 if backend in matplotlib.rcsetup.interactive_bk:
75 75 figManager = matplotlib._pylab_helpers.Gcf.get_active()
76 76 if figManager is not None:
77 77 canvas = figManager.canvas
78 78 if canvas.figure.stale:
79 79 canvas.draw()
80 80 try:
81 81 canvas.start_event_loop(interval)
82 82 except:
83 83 pass
84 84 return
85 85
86 86 def popup(message):
87 87 '''
88 88 '''
89 89
90 90 fig = plt.figure(figsize=(12, 8), facecolor='r')
91 91 text = '\n'.join([s.strip() for s in message.split(':')])
92 92 fig.text(0.01, 0.5, text, ha='left', va='center',
93 93 size='20', weight='heavy', color='w')
94 94 fig.show()
95 95 figpause(1000)
96 96
97 97
98 98 class Throttle(object):
99 99 '''
100 100 Decorator that prevents a function from being called more than once every
101 101 time period.
102 102 To create a function that cannot be called more than once a minute, but
103 103 will sleep until it can be called:
104 104 @Throttle(minutes=1)
105 105 def foo():
106 106 pass
107 107
108 108 for i in range(10):
109 109 foo()
110 110 print "This function has run %s times." % i
111 111 '''
112 112
113 113 def __init__(self, seconds=0, minutes=0, hours=0):
114 114 self.throttle_period = datetime.timedelta(
115 115 seconds=seconds, minutes=minutes, hours=hours
116 116 )
117 117
118 118 self.time_of_last_call = datetime.datetime.min
119 119
120 120 def __call__(self, fn):
121 121 @wraps(fn)
122 122 def wrapper(*args, **kwargs):
123 123 coerce = kwargs.pop('coerce', None)
124 124 if coerce:
125 125 self.time_of_last_call = datetime.datetime.now()
126 126 return fn(*args, **kwargs)
127 127 else:
128 128 now = datetime.datetime.now()
129 129 time_since_last_call = now - self.time_of_last_call
130 130 time_left = self.throttle_period - time_since_last_call
131 131
132 132 if time_left > datetime.timedelta(seconds=0):
133 133 return
134 134
135 135 self.time_of_last_call = datetime.datetime.now()
136 136 return fn(*args, **kwargs)
137 137
138 138 return wrapper
139 139
140 140 def apply_throttle(value):
141 141
142 142 @Throttle(seconds=value)
143 143 def fnThrottled(fn):
144 144 fn()
145 145
146 146 return fnThrottled
147 147
148 148
149 149 @MPDecorator
150 150 class Plot(Operation):
151 151 """Base class for Schain plotting operations
152 152
153 153 This class should never be use directtly you must subclass a new operation,
154 154 children classes must be defined as follow:
155 155
156 156 ExamplePlot(Plot):
157 157
158 158 CODE = 'code'
159 159 colormap = 'jet'
160 160 plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
161 161
162 162 def setup(self):
163 163 pass
164 164
165 165 def plot(self):
166 166 pass
167 167
168 168 """
169 169
170 170 CODE = 'Figure'
171 171 colormap = 'jet'
172 172 bgcolor = 'white'
173 173 buffering = True
174 174 __missing = 1E30
175 175
176 176 __attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
177 177 'showprofile']
178 178
179 179 def __init__(self):
180 180
181 181 Operation.__init__(self)
182 182 self.isConfig = False
183 183 self.isPlotConfig = False
184 184 self.save_time = 0
185 185 self.sender_time = 0
186 186 self.data = None
187 187 self.firsttime = True
188 188 self.sender_queue = deque(maxlen=10)
189 189 self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
190 190
191 191 def __fmtTime(self, x, pos):
192 192 '''
193 193 '''
194 194
195 195 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
196 196
197 197 def __setup(self, **kwargs):
198 198 '''
199 199 Initialize variables
200 200 '''
201 201
202 202 self.figures = []
203 203 self.axes = []
204 204 self.cb_axes = []
205 205 self.localtime = kwargs.pop('localtime', True)
206 206 self.show = kwargs.get('show', True)
207 207 self.save = kwargs.get('save', False)
208 208 self.save_period = kwargs.get('save_period', 0)
209 209 self.colormap = kwargs.get('colormap', self.colormap)
210 210 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
211 211 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
212 212 self.colormaps = kwargs.get('colormaps', None)
213 213 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
214 214 self.showprofile = kwargs.get('showprofile', False)
215 215 self.title = kwargs.get('wintitle', self.CODE.upper())
216 216 self.cb_label = kwargs.get('cb_label', None)
217 217 self.cb_labels = kwargs.get('cb_labels', None)
218 218 self.labels = kwargs.get('labels', None)
219 219 self.xaxis = kwargs.get('xaxis', 'frequency')
220 220 self.zmin = kwargs.get('zmin', None)
221 221 self.zmax = kwargs.get('zmax', None)
222 222 self.zlimits = kwargs.get('zlimits', None)
223 223 self.xlimits = kwargs.get('xlimits', None)
224 224 self.xstep_given = kwargs.get('xstep_given', None)
225 225 self.ystep_given = kwargs.get('ystep_given', None)
226 226 self.autoxticks = kwargs.get('autoxticks', True)
227 227 self.xmin = kwargs.get('xmin', None)
228 228 self.xmax = kwargs.get('xmax', None)
229 229 self.xrange = kwargs.get('xrange', 12)
230 230 self.xscale = kwargs.get('xscale', None)
231 231 self.ymin = kwargs.get('ymin', None)
232 232 self.ymax = kwargs.get('ymax', None)
233 233 self.yscale = kwargs.get('yscale', None)
234 234 self.xlabel = kwargs.get('xlabel', None)
235 235 self.attr_time = kwargs.get('attr_time', 'utctime')
236 236 self.attr_data = kwargs.get('attr_data', 'data_param')
237 237 self.decimation = kwargs.get('decimation', None)
238 238 self.oneFigure = kwargs.get('oneFigure', True)
239 239 self.width = kwargs.get('width', None)
240 240 self.height = kwargs.get('height', None)
241 241 self.colorbar = kwargs.get('colorbar', True)
242 242 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
243 243 self.channels = kwargs.get('channels', None)
244 244 self.titles = kwargs.get('titles', [])
245 245 self.polar = False
246 246 self.type = kwargs.get('type', 'iq')
247 247 self.grid = kwargs.get('grid', False)
248 248 self.pause = kwargs.get('pause', False)
249 249 self.save_code = kwargs.get('save_code', self.CODE)
250 250 self.throttle = kwargs.get('throttle', 0)
251 251 self.exp_code = kwargs.get('exp_code', None)
252 252 self.server = kwargs.get('server', False)
253 253 self.sender_period = kwargs.get('sender_period', 60)
254 254 self.tag = kwargs.get('tag', '')
255 255 self.height_index = kwargs.get('height_index', None)
256 256 self.__throttle_plot = apply_throttle(self.throttle)
257 257 code = self.attr_data if self.attr_data else self.CODE
258 258 self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
259 259 #self.EEJtype = kwargs.get('EEJtype', 2)
260 260
261 261 if self.server:
262 262 if not self.server.startswith('tcp://'):
263 263 self.server = 'tcp://{}'.format(self.server)
264 264 log.success(
265 265 'Sending to server: {}'.format(self.server),
266 266 self.name
267 267 )
268 268
269 269 if isinstance(self.attr_data, str):
270 270 self.attr_data = [self.attr_data]
271 271
272 272 def __setup_plot(self):
273 273 '''
274 274 Common setup for all figures, here figures and axes are created
275 275 '''
276 276
277 277 self.setup()
278 278
279 279 self.time_label = 'LT' if self.localtime else 'UTC'
280 280
281 281 if self.width is None:
282 282 self.width = 8
283 283
284 284 self.figures = []
285 285 self.axes = []
286 286 self.cb_axes = []
287 287 self.pf_axes = []
288 288 self.cmaps = []
289 289
290 290 size = '15%' if self.ncols == 1 else '30%'
291 291 pad = '4%' if self.ncols == 1 else '8%'
292 292
293 293 if self.oneFigure:
294 294 if self.height is None:
295 295 self.height = 1.4 * self.nrows + 1
296 296 fig = plt.figure(figsize=(self.width, self.height),
297 297 edgecolor='k',
298 298 facecolor='w')
299 299 self.figures.append(fig)
300 300 for n in range(self.nplots):
301 301 ax = fig.add_subplot(self.nrows, self.ncols,
302 302 n + 1, polar=self.polar)
303 303 ax.tick_params(labelsize=8)
304 304 ax.firsttime = True
305 305 ax.index = 0
306 306 ax.press = None
307 ax.cbar = None
307 308 self.axes.append(ax)
308 309 if self.showprofile:
309 310 cax = self.__add_axes(ax, size=size, pad=pad)
310 311 cax.tick_params(labelsize=8)
311 312 self.pf_axes.append(cax)
312 313 else:
313 314 if self.height is None:
314 315 self.height = 3
315 316 for n in range(self.nplots):
316 317 fig = plt.figure(figsize=(self.width, self.height),
317 318 edgecolor='k',
318 319 facecolor='w')
319 320 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
320 321 ax.tick_params(labelsize=8)
321 322 ax.firsttime = True
322 323 ax.index = 0
323 324 ax.press = None
324 325 self.figures.append(fig)
325 326 self.axes.append(ax)
326 327 if self.showprofile:
327 328 cax = self.__add_axes(ax, size=size, pad=pad)
328 329 cax.tick_params(labelsize=8)
329 330 self.pf_axes.append(cax)
330 331
331 332 for n in range(self.nrows):
332 333 if self.colormaps is not None:
333 334 cmap = plt.get_cmap(self.colormaps[n])
334 335 else:
335 336 cmap = plt.get_cmap(self.colormap)
336 337 cmap.set_bad(self.bgcolor, 1.)
337 338 self.cmaps.append(cmap)
338 339
339 340 def __add_axes(self, ax, size='30%', pad='8%'):
340 341 '''
341 342 Add new axes to the given figure
342 343 '''
343 344 divider = make_axes_locatable(ax)
344 345 nax = divider.new_horizontal(size=size, pad=pad)
345 346 ax.figure.add_axes(nax)
346 347 return nax
347 348
348 349 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
349 350 '''
350 351 Create a masked array for missing data
351 352 '''
352 353 if x_buffer.shape[0] < 2:
353 354 return x_buffer, y_buffer, z_buffer
354 355
355 356 deltas = x_buffer[1:] - x_buffer[0:-1]
356 357 x_median = numpy.median(deltas)
357 358
358 359 index = numpy.where(deltas > 5 * x_median)
359 360
360 361 if len(index[0]) != 0:
361 362 z_buffer[::, index[0], ::] = self.__missing
362 363 z_buffer = numpy.ma.masked_inside(z_buffer,
363 364 0.99 * self.__missing,
364 365 1.01 * self.__missing)
365 366
366 367 return x_buffer, y_buffer, z_buffer
367 368
368 369 def decimate(self):
369 370
370 371 # dx = int(len(self.x)/self.__MAXNUMX) + 1
371 372 dy = int(len(self.y) / self.decimation) + 1
372 373
373 374 # x = self.x[::dx]
374 375 x = self.x
375 376 y = self.y[::dy]
376 377 z = self.z[::, ::, ::dy]
377 378
378 379 return x, y, z
379 380
380 381 def format(self):
381 382 '''
382 383 Set min and max values, labels, ticks and titles
383 384 '''
384 385
385 386 for n, ax in enumerate(self.axes):
386 387 if ax.firsttime:
387 388 if self.xaxis != 'time':
388 389 xmin = self.xmin
389 390 xmax = self.xmax
390 391 else:
391 392 xmin = self.tmin
392 393 xmax = self.tmin + self.xrange*60*60
393 394 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
394 395 ax.xaxis.set_major_locator(LinearLocator(9))
395 396 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
396 397 ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
397 398 ax.set_facecolor(self.bgcolor)
398 399 if self.xscale:
399 400 ax.xaxis.set_major_formatter(FuncFormatter(
400 401 lambda x, pos: '{0:g}'.format(x*self.xscale)))
401 402 if self.yscale:
402 403 ax.yaxis.set_major_formatter(FuncFormatter(
403 404 lambda x, pos: '{0:g}'.format(x*self.yscale)))
404 405 if self.xlabel is not None:
405 406 ax.set_xlabel(self.xlabel)
406 407 if self.ylabel is not None:
407 408 ax.set_ylabel(self.ylabel)
408 409 if self.showprofile:
409 410 if self.zlimits is not None:
410 411 self.zmin, self.zmax = self.zlimits[n]
411 412 self.pf_axes[n].set_ylim(ymin, ymax)
412 413 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
413 414 self.pf_axes[n].set_xlabel('dB')
414 415 self.pf_axes[n].grid(b=True, axis='x')
415 416 [tick.set_visible(False)
416 417 for tick in self.pf_axes[n].get_yticklabels()]
417 if self.colorbar:
418 if self.colorbar and ax.cbar == None:
418 419 ax.cbar = plt.colorbar(
419 420 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
420 421 ax.cbar.ax.tick_params(labelsize=8)
421 422 ax.cbar.ax.press = None
422 423 if self.cb_label:
423 424 ax.cbar.set_label(self.cb_label, size=8)
424 425 elif self.cb_labels:
425 426 ax.cbar.set_label(self.cb_labels[n], size=8)
426 427 else:
427 428 ax.cbar = None
428 429 ax.set_xlim(xmin, xmax)
429 430 ax.set_ylim(ymin, ymax)
430 431 ax.firsttime = False
431 432 if self.grid:
432 433 ax.grid(True)
433 434 if not self.polar:
434 435 ax.set_title('{} {} {}'.format(
435 436 self.titles[n],
436 437 self.getDateTime(self.data.max_time).strftime(
437 438 '%Y-%m-%d %H:%M:%S'),
438 439 self.time_label),
439 440 size=8)
440 441 else:
441 442 ax.set_title('{}'.format(self.titles[n]), size=8)
442 443 ax.set_ylim(0, 90)
443 444 ax.set_yticks(numpy.arange(0, 90, 20))
444 445 ax.yaxis.labelpad = 40
445 446
446 447 if self.firsttime:
447 448 for n, fig in enumerate(self.figures):
448 449 fig.subplots_adjust(**self.plots_adjust)
449 450 self.firsttime = False
450 451
451 452 def clear_figures(self):
452 453 '''
453 454 Reset axes for redraw plots
454 455 '''
455 456
456 457 for ax in self.axes+self.pf_axes+self.cb_axes:
457 458 ax.clear()
458 459 ax.firsttime = True
459 460 if hasattr(ax, 'cbar') and ax.cbar:
460 461 ax.cbar.remove()
461 462
462 463 def __plot(self):
463 464 '''
464 465 Main function to plot, format and save figures
465 466 '''
466 467
467 468 self.plot()
468 469 self.format()
469 470
470 471 for n, fig in enumerate(self.figures):
471 472 if self.nrows == 0 or self.nplots == 0:
472 473 log.warning('No data', self.name)
473 474 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
474 475 fig.canvas.manager.set_window_title(self.CODE)
475 476 continue
476 477
477 478 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
478 479 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
479 480 fig.canvas.draw()
480 481 if self.show:
481 482 fig.show()
482 483 figpause(0.01)
483 484
484 485 if self.save:
485 486 self.save_figure(n)
486 487
487 488 if self.server:
488 489 self.send_to_server()
489 490
490 491 def __update(self, dataOut, timestamp):
491 492 '''
492 493 '''
493 494
494 495 metadata = {
495 496 'yrange': dataOut.heightList,
496 497 'interval': dataOut.timeInterval,
497 498 'channels': dataOut.channelList
498 499 }
499 500
500 501 data, meta = self.update(dataOut)
501 502 metadata.update(meta)
502 503 self.data.update(data, timestamp, metadata)
503 504
504 505 def save_figure(self, n):
505 506 '''
506 507 '''
507 508
508 509 if (self.data.max_time - self.save_time) <= self.save_period:
509 510 return
510 511
511 512 self.save_time = self.data.max_time
512 513
513 514 fig = self.figures[n]
514 515
515 516 if self.throttle == 0:
516 517 figname = os.path.join(
517 518 self.save,
518 519 self.save_code,
519 520 '{}_{}.png'.format(
520 521 self.save_code,
521 522 self.getDateTime(self.data.max_time).strftime(
522 523 '%Y%m%d_%H%M%S'
523 524 ),
524 525 )
525 526 )
526 527 log.log('Saving figure: {}'.format(figname), self.name)
527 528 if not os.path.isdir(os.path.dirname(figname)):
528 529 os.makedirs(os.path.dirname(figname))
529 530 fig.savefig(figname)
530 531
531 532 figname = os.path.join(
532 533 self.save,
533 534 #self.save_code,
534 535 '{}_{}.png'.format(
535 536 self.save_code,
536 537 self.getDateTime(self.data.min_time).strftime(
537 538 '%Y%m%d'
538 539 ),
539 540 )
540 541 )
541 542 log.log('Saving figure: {}'.format(figname), self.name)
542 543 if not os.path.isdir(os.path.dirname(figname)):
543 544 os.makedirs(os.path.dirname(figname))
544 545 fig.savefig(figname)
545 546
546 547 def send_to_server(self):
547 548 '''
548 549 '''
549 550
550 551 if self.exp_code == None:
551 552 log.warning('Missing `exp_code` skipping sending to server...')
552 553
553 554 last_time = self.data.max_time
554 555 interval = last_time - self.sender_time
555 556 if interval < self.sender_period:
556 557 return
557 558
558 559 self.sender_time = last_time
559 560
560 561 attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax', 'zlimits']
561 562 for attr in attrs:
562 563 value = getattr(self, attr)
563 564 if value:
564 565 if isinstance(value, (numpy.float32, numpy.float64)):
565 566 value = round(float(value), 2)
566 567 self.data.meta[attr] = value
567 568 if self.colormap == 'jet':
568 569 self.data.meta['colormap'] = 'Jet'
569 570 elif 'RdBu' in self.colormap:
570 571 self.data.meta['colormap'] = 'RdBu'
571 572 else:
572 573 self.data.meta['colormap'] = 'Viridis'
573 574 self.data.meta['interval'] = int(interval)
574 575 #print(last_time)
575 576 #print(time.time())
576 577 #exit(1)
577 578 self.sender_queue.append(last_time)
578 579
579 580 while True:
580 581 try:
581 582 tm = self.sender_queue.popleft()
582 583 except IndexError:
583 584 break
584 585 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
585 586 self.socket.send_string(msg)
586 587 socks = dict(self.poll.poll(2000))
587 588 if socks.get(self.socket) == zmq.POLLIN:
588 589 reply = self.socket.recv_string()
589 590 if reply == 'ok':
590 591 log.log("Response from server ok", self.name)
591 592 time.sleep(0.1)
592 593 continue
593 594 else:
594 595 log.warning(
595 596 "Malformed reply from server: {}".format(reply), self.name)
596 597 else:
597 598 log.warning(
598 599 "No response from server, retrying...", self.name)
599 600 self.sender_queue.appendleft(tm)
600 601 self.socket.setsockopt(zmq.LINGER, 0)
601 602 self.socket.close()
602 603 self.poll.unregister(self.socket)
603 604 self.socket = self.context.socket(zmq.REQ)
604 605 self.socket.connect(self.server)
605 606 self.poll.register(self.socket, zmq.POLLIN)
606 607 break
607 608
608 609 def setup(self):
609 610 '''
610 611 This method should be implemented in the child class, the following
611 612 attributes should be set:
612 613
613 614 self.nrows: number of rows
614 615 self.ncols: number of cols
615 616 self.nplots: number of plots (channels or pairs)
616 617 self.ylabel: label for Y axes
617 618 self.titles: list of axes title
618 619
619 620 '''
620 621 raise NotImplementedError
621 622
622 623 def plot(self):
623 624 '''
624 625 Must be defined in the child class, the actual plotting method
625 626 '''
626 627 raise NotImplementedError
627 628
628 629 def update(self, dataOut):
629 630 '''
630 631 Must be defined in the child class, update self.data with new data
631 632 '''
632 633
633 634 data = {
634 635 self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
635 636 }
636 637 meta = {}
637 638
638 639 return data, meta
639 640
640 641 def run(self, dataOut, **kwargs):
641 642 '''
642 643 Main plotting routine
643 644 '''
644 645
645 646 if self.isConfig is False:
646 647 self.__setup(**kwargs)
647 648
648 649 if self.localtime:
649 650 self.getDateTime = datetime.datetime.fromtimestamp
650 651 else:
651 652 self.getDateTime = datetime.datetime.utcfromtimestamp
652 653
653 654 self.data.setup()
654 655 self.isConfig = True
655 656 if self.server:
656 657 self.context = zmq.Context()
657 658 self.socket = self.context.socket(zmq.REQ)
658 659 self.socket.connect(self.server)
659 660 self.poll = zmq.Poller()
660 661 self.poll.register(self.socket, zmq.POLLIN)
661 662
662 663 tm = getattr(dataOut, self.attr_time)
663 664 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
664 665 self.save_time = tm
665 666 self.__plot()
666 667 #self.tmin += self.xrange*60*60 #Modified by R. Flores
667 668 self.tmin += 24*60*60 #Modified by R. Flores
668 669
669 670 self.data.setup()
670 671 self.clear_figures()
671 672
672 673 self.__update(dataOut, tm)
673 674
674 675 if self.isPlotConfig is False:
675 676 self.__setup_plot()
676 677 self.isPlotConfig = True
677 678 if self.xaxis == 'time':
678 679 dt = self.getDateTime(tm)
679 680
680 681 if self.xmin is None:
681 682 self.tmin = tm
682 683 self.xmin = dt.hour
683 684 minutes = (self.xmin-int(self.xmin)) * 60
684 685 seconds = (minutes - int(minutes)) * 60
685 686 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
686 687 datetime.datetime(1970, 1, 1)).total_seconds()
687 688 if self.localtime:
688 689 self.tmin += time.timezone
689 690
690 691 if self.xmin is not None and self.xmax is not None:
691 692 self.xrange = self.xmax - self.xmin
692 693
693 694 if self.throttle == 0:
694 695 self.__plot()
695 696 else:
696 697 self.__throttle_plot(self.__plot)#, coerce=coerce)
697 698
698 699 def close(self):
699 700
700 701 if self.data and not self.data.flagNoData:
701 702 self.save_time = 0
702 703 self.__plot()
703 704 if self.data and not self.data.flagNoData and self.pause:
704 705 figpause(10)
@@ -1,494 +1,494
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot, SpectraCutPlot
7 7 from schainpy.utils import log
8 8
9 9 EARTH_RADIUS = 6.3710e3
10 10
11 11
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 20 theta = -theta + numpy.pi/2
21 21 return r*numpy.cos(theta), r*numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
25 25 '''
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 29 return numpy.rad2deg(km/EARTH_RADIUS)
30 30
31 31
32 32
33 33 class SpectralMomentsPlot(SpectraPlot):
34 34 '''
35 35 Plot for Spectral Moments
36 36 '''
37 37 CODE = 'spc_moments'
38 38 # colormap = 'jet'
39 39 # plot_type = 'pcolor'
40 40
41 41 class DobleGaussianPlot(SpectraPlot):
42 42 '''
43 43 Plot for Double Gaussian Plot
44 44 '''
45 45 CODE = 'gaussian_fit'
46 46 # colormap = 'jet'
47 47 # plot_type = 'pcolor'
48 48
49 49
50 50 class DoubleGaussianSpectraCutPlot(SpectraCutPlot):
51 51 '''
52 52 Plot SpectraCut with Double Gaussian Fit
53 53 '''
54 54 CODE = 'cut_gaussian_fit'
55 55
56 56
57 57 class SpectralFitObliquePlot(SpectraPlot):
58 58 '''
59 59 Plot for Spectral Oblique
60 60 '''
61 61 CODE = 'spc_moments'
62 62 colormap = 'jet'
63 63 plot_type = 'pcolor'
64 64
65 65
66 66
67 67 class SnrPlot(RTIPlot):
68 68 '''
69 69 Plot for SNR Data
70 70 '''
71 71
72 72 CODE = 'snr'
73 73 colormap = 'jet'
74 74
75 75 def update(self, dataOut):
76 76
77 77 data = {
78 78 'snr': 10*numpy.log10(dataOut.data_snr)
79 79 }
80 80
81 81 return data, {}
82 82
83 83 class DopplerPlot(RTIPlot):
84 84 '''
85 85 Plot for DOPPLER Data (1st moment)
86 86 '''
87 87
88 88 CODE = 'dop'
89 89 colormap = 'jet'
90 90
91 91 def update(self, dataOut):
92 92
93 93 data = {
94 94 'dop': 10*numpy.log10(dataOut.data_dop)
95 95 }
96 96
97 97 return data, {}
98 98
99 99 class DopplerEEJPlot_V0(RTIPlot):
100 100 '''
101 101 Written by R. Flores
102 102 '''
103 103 '''
104 104 Plot for EEJ
105 105 '''
106 106
107 107 CODE = 'dop'
108 108 colormap = 'RdBu_r'
109 109 colormap = 'jet'
110 110
111 111 def setup(self):
112 112
113 113 self.xaxis = 'time'
114 114 self.ncols = 1
115 115 self.nrows = len(self.data.channels)
116 116 self.nplots = len(self.data.channels)
117 117 self.ylabel = 'Range [km]'
118 118 self.xlabel = 'Time'
119 119 self.cb_label = '(m/s)'
120 120 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.1, 'right':0.95})
121 121 self.titles = ['{} Channel {}'.format(
122 122 self.CODE.upper(), x) for x in range(self.nrows)]
123 123
124 124 def update(self, dataOut):
125 125 #print(self.EEJtype)
126 126
127 127 if self.EEJtype == 1:
128 128 data = {
129 129 'dop': dataOut.Oblique_params[:,-2,:]
130 130 }
131 131 elif self.EEJtype == 2:
132 132 data = {
133 133 'dop': dataOut.Oblique_params[:,-1,:]
134 134 }
135 135
136 136 return data, {}
137 137
138 138 class DopplerEEJPlot(RTIPlot):
139 139 '''
140 140 Written by R. Flores
141 141 '''
142 142 '''
143 143 Plot for Doppler Shift EEJ
144 144 '''
145 145
146 146 CODE = 'dop'
147 147 colormap = 'RdBu_r'
148 148 #colormap = 'jet'
149 149
150 150 def setup(self):
151 151
152 152 self.xaxis = 'time'
153 153 self.ncols = 1
154 154 self.nrows = 2
155 155 self.nplots = 2
156 156 self.ylabel = 'Range [km]'
157 157 self.xlabel = 'Time'
158 158 self.cb_label = '(m/s)'
159 159 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.1, 'right':0.95})
160 160 self.titles = ['{} EJJ Type {} /'.format(
161 161 self.CODE.upper(), x) for x in range(1,1+self.nrows)]
162 162
163 163 def update(self, dataOut):
164 164
165 165 if dataOut.mode == 11: #Double Gaussian
166 166 doppler = numpy.append(dataOut.Oblique_params[:,1,:],dataOut.Oblique_params[:,4,:],axis=0)
167 167 elif dataOut.mode == 9: #Double Skew Gaussian
168 168 doppler = numpy.append(dataOut.Oblique_params[:,-2,:],dataOut.Oblique_params[:,-1,:],axis=0)
169 169 data = {
170 170 'dop': doppler
171 171 }
172 172
173 173 return data, {}
174 174
175 175 class SpcWidthEEJPlot(RTIPlot):
176 176 '''
177 177 Written by R. Flores
178 178 '''
179 179 '''
180 180 Plot for EEJ Spectral Width
181 181 '''
182 182
183 183 CODE = 'width'
184 184 colormap = 'RdBu_r'
185 185 colormap = 'jet'
186 186
187 187 def setup(self):
188 188
189 189 self.xaxis = 'time'
190 190 self.ncols = 1
191 191 self.nrows = 2
192 192 self.nplots = 2
193 193 self.ylabel = 'Range [km]'
194 194 self.xlabel = 'Time'
195 195 self.cb_label = '(m/s)'
196 196 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.1, 'right':0.95})
197 197 self.titles = ['{} EJJ Type {} /'.format(
198 198 self.CODE.upper(), x) for x in range(1,1+self.nrows)]
199 199
200 200 def update(self, dataOut):
201 201
202 202 if dataOut.mode == 11: #Double Gaussian
203 203 width = numpy.append(dataOut.Oblique_params[:,2,:],dataOut.Oblique_params[:,5,:],axis=0)
204 204 elif dataOut.mode == 9: #Double Skew Gaussian
205 205 width = numpy.append(dataOut.Oblique_params[:,2,:],dataOut.Oblique_params[:,6,:],axis=0)
206 206 data = {
207 207 'width': width
208 208 }
209 209
210 210 return data, {}
211 211
212 212 class PowerPlot(RTIPlot):
213 213 '''
214 214 Plot for Power Data (0 moment)
215 215 '''
216 216
217 217 CODE = 'pow'
218 218 colormap = 'jet'
219 219
220 220 def update(self, dataOut):
221 221
222 222 data = {
223 223 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
224 224 }
225 225
226 226 return data, {}
227 227
228 228 class SpectralWidthPlot(RTIPlot):
229 229 '''
230 230 Plot for Spectral Width Data (2nd moment)
231 231 '''
232 232
233 233 CODE = 'width'
234 234 colormap = 'jet'
235 235
236 236 def update(self, dataOut):
237 237
238 238 data = {
239 239 'width': dataOut.data_width
240 240 }
241 241
242 242 return data, {}
243 243
244 244 class SkyMapPlot(Plot):
245 245 '''
246 246 Plot for meteors detection data
247 247 '''
248 248
249 249 CODE = 'param'
250 250
251 251 def setup(self):
252 252
253 253 self.ncols = 1
254 254 self.nrows = 1
255 255 self.width = 7.2
256 256 self.height = 7.2
257 257 self.nplots = 1
258 258 self.xlabel = 'Zonal Zenith Angle (deg)'
259 259 self.ylabel = 'Meridional Zenith Angle (deg)'
260 260 self.polar = True
261 261 self.ymin = -180
262 262 self.ymax = 180
263 263 self.colorbar = False
264 264
265 265 def plot(self):
266 266
267 267 arrayParameters = numpy.concatenate(self.data['param'])
268 268 error = arrayParameters[:, -1]
269 269 indValid = numpy.where(error == 0)[0]
270 270 finalMeteor = arrayParameters[indValid, :]
271 271 finalAzimuth = finalMeteor[:, 3]
272 272 finalZenith = finalMeteor[:, 4]
273 273
274 274 x = finalAzimuth * numpy.pi / 180
275 275 y = finalZenith
276 276
277 277 ax = self.axes[0]
278 278
279 279 if ax.firsttime:
280 280 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
281 281 else:
282 282 ax.plot.set_data(x, y)
283 283
284 284 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
285 285 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
286 286 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
287 287 dt2,
288 288 len(x))
289 289 self.titles[0] = title
290 290
291 291
292 292 class GenericRTIPlot(Plot):
293 293 '''
294 294 Plot for data_xxxx object
295 295 '''
296 296
297 297 CODE = 'param'
298 298 colormap = 'viridis'
299 299 plot_type = 'pcolorbuffer'
300 300
301 301 def setup(self):
302 302 self.xaxis = 'time'
303 303 self.ncols = 1
304 304 self.nrows = self.data.shape('param')[0]
305 305 self.nplots = self.nrows
306 306 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
307 307
308 308 if not self.xlabel:
309 309 self.xlabel = 'Time'
310 310
311 311 self.ylabel = 'Range [km]'
312 312 if not self.titles:
313 313 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
314 314
315 315 def update(self, dataOut):
316 316
317 317 data = {
318 318 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
319 319 }
320 320
321 321 meta = {}
322 322
323 323 return data, meta
324 324
325 325 def plot(self):
326 326 # self.data.normalize_heights()
327 327 self.x = self.data.times
328 328 self.y = self.data.yrange
329 329 self.z = self.data['param']
330 330
331 331 self.z = numpy.ma.masked_invalid(self.z)
332 332
333 333 if self.decimation is None:
334 334 x, y, z = self.fill_gaps(self.x, self.y, self.z)
335 335 else:
336 336 x, y, z = self.fill_gaps(*self.decimate())
337 337
338 338 for n, ax in enumerate(self.axes):
339 339
340 340 self.zmax = self.zmax if self.zmax is not None else numpy.max(
341 341 self.z[n])
342 342 self.zmin = self.zmin if self.zmin is not None else numpy.min(
343 343 self.z[n])
344 344
345 345 if ax.firsttime:
346 346 if self.zlimits is not None:
347 347 self.zmin, self.zmax = self.zlimits[n]
348 348
349 349 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
350 350 vmin=self.zmin,
351 351 vmax=self.zmax,
352 352 cmap=self.cmaps[n]
353 353 )
354 354 else:
355 355 if self.zlimits is not None:
356 356 self.zmin, self.zmax = self.zlimits[n]
357 ax.collections.remove(ax.collections[0])
357 ax.plt.remove()
358 358 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
359 359 vmin=self.zmin,
360 360 vmax=self.zmax,
361 361 cmap=self.cmaps[n]
362 362 )
363 363
364 364
365 365 class PolarMapPlot(Plot):
366 366 '''
367 367 Plot for weather radar
368 368 '''
369 369
370 370 CODE = 'param'
371 371 colormap = 'seismic'
372 372
373 373 def setup(self):
374 374 self.ncols = 1
375 375 self.nrows = 1
376 376 self.width = 9
377 377 self.height = 8
378 378 self.mode = self.data.meta['mode']
379 379 if self.channels is not None:
380 380 self.nplots = len(self.channels)
381 381 self.nrows = len(self.channels)
382 382 else:
383 383 self.nplots = self.data.shape(self.CODE)[0]
384 384 self.nrows = self.nplots
385 385 self.channels = list(range(self.nplots))
386 386 if self.mode == 'E':
387 387 self.xlabel = 'Longitude'
388 388 self.ylabel = 'Latitude'
389 389 else:
390 390 self.xlabel = 'Range (km)'
391 391 self.ylabel = 'Height (km)'
392 392 self.bgcolor = 'white'
393 393 self.cb_labels = self.data.meta['units']
394 394 self.lat = self.data.meta['latitude']
395 395 self.lon = self.data.meta['longitude']
396 396 self.xmin, self.xmax = float(
397 397 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
398 398 self.ymin, self.ymax = float(
399 399 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
400 400 # self.polar = True
401 401
402 402 def plot(self):
403 403
404 404 for n, ax in enumerate(self.axes):
405 405 data = self.data['param'][self.channels[n]]
406 406
407 407 zeniths = numpy.linspace(
408 408 0, self.data.meta['max_range'], data.shape[1])
409 409 if self.mode == 'E':
410 410 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
411 411 r, theta = numpy.meshgrid(zeniths, azimuths)
412 412 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
413 413 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
414 414 x = km2deg(x) + self.lon
415 415 y = km2deg(y) + self.lat
416 416 else:
417 417 azimuths = numpy.radians(self.data.yrange)
418 418 r, theta = numpy.meshgrid(zeniths, azimuths)
419 419 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
420 420 self.y = zeniths
421 421
422 422 if ax.firsttime:
423 423 if self.zlimits is not None:
424 424 self.zmin, self.zmax = self.zlimits[n]
425 425 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
426 426 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
427 427 vmin=self.zmin,
428 428 vmax=self.zmax,
429 429 cmap=self.cmaps[n])
430 430 else:
431 431 if self.zlimits is not None:
432 432 self.zmin, self.zmax = self.zlimits[n]
433 ax.collections.remove(ax.collections[0])
433 ax.plt.remove()
434 434 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
435 435 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
436 436 vmin=self.zmin,
437 437 vmax=self.zmax,
438 438 cmap=self.cmaps[n])
439 439
440 440 if self.mode == 'A':
441 441 continue
442 442
443 443 # plot district names
444 444 f = open('/data/workspace/schain_scripts/distrito.csv')
445 445 for line in f:
446 446 label, lon, lat = [s.strip() for s in line.split(',') if s]
447 447 lat = float(lat)
448 448 lon = float(lon)
449 449 # ax.plot(lon, lat, '.b', ms=2)
450 450 ax.text(lon, lat, label.decode('utf8'), ha='center',
451 451 va='bottom', size='8', color='black')
452 452
453 453 # plot limites
454 454 limites = []
455 455 tmp = []
456 456 for line in open('/data/workspace/schain_scripts/lima.csv'):
457 457 if '#' in line:
458 458 if tmp:
459 459 limites.append(tmp)
460 460 tmp = []
461 461 continue
462 462 values = line.strip().split(',')
463 463 tmp.append((float(values[0]), float(values[1])))
464 464 for points in limites:
465 465 ax.add_patch(
466 466 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
467 467
468 468 # plot Cuencas
469 469 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
470 470 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
471 471 values = [line.strip().split(',') for line in f]
472 472 points = [(float(s[0]), float(s[1])) for s in values]
473 473 ax.add_patch(Polygon(points, ec='b', fc='none'))
474 474
475 475 # plot grid
476 476 for r in (15, 30, 45, 60):
477 477 ax.add_artist(plt.Circle((self.lon, self.lat),
478 478 km2deg(r), color='0.6', fill=False, lw=0.2))
479 479 ax.text(
480 480 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
481 481 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
482 482 '{}km'.format(r),
483 483 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
484 484
485 485 if self.mode == 'E':
486 486 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
487 487 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
488 488 else:
489 489 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
490 490 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
491 491
492 492 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
493 493 self.titles = ['{} {}'.format(
494 494 self.data.parameters[x], title) for x in self.channels]
@@ -1,1349 +1,1349
1 1 # Copyright (c) 2012-2021 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Classes to plot Spectra data
6 6
7 7 """
8 8
9 9 import os
10 10 import numpy
11 11 #import collections.abc
12 12
13 13 from schainpy.model.graphics.jroplot_base import Plot, plt, log
14 14
15 15 class SpectraPlot(Plot):
16 16 '''
17 17 Plot for Spectra data
18 18 '''
19 19
20 20 CODE = 'spc'
21 21 colormap = 'jet'
22 22 plot_type = 'pcolor'
23 23 buffering = False
24 24
25 25 def setup(self):
26 26
27 27 self.nplots = len(self.data.channels)
28 28 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
29 29 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
30 30 self.height = 2.6 * self.nrows
31 31 self.cb_label = 'dB'
32 32 if self.showprofile:
33 33 self.width = 4 * self.ncols
34 34 else:
35 35 self.width = 3.5 * self.ncols
36 36 self.plots_adjust.update({'wspace': 0.8, 'hspace':0.2, 'left': 0.2, 'right': 0.9, 'bottom': 0.18})
37 37 self.ylabel = 'Range [km]'
38 38
39 39 def update(self, dataOut):
40 40
41 41 data = {}
42 42 meta = {}
43 43
44 44 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
45 45 #print("dataOut.normFactor: ", dataOut.normFactor)
46 46 #print("spc: ", dataOut.data_spc[0,0,0])
47 47 #spc = 10*numpy.log10(dataOut.data_spc)
48 48 #print("Spc: ",spc[0])
49 49 #exit(1)
50 50 data['spc'] = spc
51 51 data['rti'] = dataOut.getPower()
52 52 #print(data['rti'][0])
53 53 #exit(1)
54 54 #print("NormFactor: ",dataOut.normFactor)
55 55 #data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
56 56 if hasattr(dataOut, 'LagPlot'): #Double Pulse
57 57 max_hei_id = dataOut.nHeights - 2*dataOut.LagPlot
58 58 #data['noise'] = 10*numpy.log10(dataOut.getNoise(ymin_index=46,ymax_index=max_hei_id)/dataOut.normFactor)
59 59 #data['noise'] = 10*numpy.log10(dataOut.getNoise(ymin_index=40,ymax_index=max_hei_id)/dataOut.normFactor)
60 60 data['noise'] = 10*numpy.log10(dataOut.getNoise(ymin_index=53,ymax_index=max_hei_id)/dataOut.normFactor)
61 61 data['noise'][0] = 10*numpy.log10(dataOut.getNoise(ymin_index=53)[0]/dataOut.normFactor)
62 62 #data['noise'][1] = 22.035507
63 63 else:
64 64 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
65 65 #data['noise'] = 10*numpy.log10(dataOut.getNoise(ymin_index=26,ymax_index=44)/dataOut.normFactor)
66 66 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
67 67
68 68 if self.CODE == 'spc_moments':
69 69 data['moments'] = dataOut.moments
70 70 if self.CODE == 'gaussian_fit':
71 71 data['gaussfit'] = dataOut.DGauFitParams
72 72
73 73 return data, meta
74 74
75 75 def plot(self):
76 76
77 77 if self.xaxis == "frequency":
78 78 x = self.data.xrange[0]
79 79 self.xlabel = "Frequency (kHz)"
80 80 elif self.xaxis == "time":
81 81 x = self.data.xrange[1]
82 82 self.xlabel = "Time (ms)"
83 83 else:
84 84 x = self.data.xrange[2]
85 85 self.xlabel = "Velocity (m/s)"
86 86
87 87 if (self.CODE == 'spc_moments') | (self.CODE == 'gaussian_fit'):
88 88 x = self.data.xrange[2]
89 89 self.xlabel = "Velocity (m/s)"
90 90
91 91 self.titles = []
92 92
93 93 y = self.data.yrange
94 94 self.y = y
95 95
96 96 data = self.data[-1]
97 97 z = data['spc']
98 98
99 99 self.CODE2 = 'spc_oblique'
100 100
101 101 for n, ax in enumerate(self.axes):
102 102 noise = data['noise'][n]
103 103 if self.CODE == 'spc_moments':
104 104 mean = data['moments'][n, 1]
105 105 if self.CODE == 'gaussian_fit':
106 106 gau0 = data['gaussfit'][n][2,:,0]
107 107 gau1 = data['gaussfit'][n][2,:,1]
108 108 if ax.firsttime:
109 109 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
110 110 self.xmin = self.xmin if self.xmin else numpy.nanmin(x)#-self.xmax
111 111 #self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
112 112 #self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
113 113 if self.zlimits is not None:
114 114 self.zmin, self.zmax = self.zlimits[n]
115 115
116 116 ax.plt = ax.pcolormesh(x, y, z[n].T,
117 117 vmin=self.zmin,
118 118 vmax=self.zmax,
119 119 cmap=plt.get_cmap(self.colormap),
120 120 )
121 121
122 122 if self.showprofile:
123 123 ax.plt_profile = self.pf_axes[n].plot(
124 124 data['rti'][n], y)[0]
125 125 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
126 126 color="k", linestyle="dashed", lw=1)[0]
127 127 if self.CODE == 'spc_moments':
128 128 ax.plt_mean = ax.plot(mean, y, color='k', lw=1)[0]
129 129 if self.CODE == 'gaussian_fit':
130 130 ax.plt_gau0 = ax.plot(gau0, y, color='r', lw=1)[0]
131 131 ax.plt_gau1 = ax.plot(gau1, y, color='y', lw=1)[0]
132 132 else:
133 133 if self.zlimits is not None:
134 134 self.zmin, self.zmax = self.zlimits[n]
135 135 ax.plt.set_array(z[n].T.ravel())
136 136 if self.showprofile:
137 137 ax.plt_profile.set_data(data['rti'][n], y)
138 138 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
139 139 if self.CODE == 'spc_moments':
140 140 ax.plt_mean.set_data(mean, y)
141 141 if self.CODE == 'gaussian_fit':
142 142 ax.plt_gau0.set_data(gau0, y)
143 143 ax.plt_gau1.set_data(gau1, y)
144 144 self.titles.append('CH {}: {:3.2f}dB'.format(n, noise))
145 145
146 146 class SpectraObliquePlot(Plot):
147 147 '''
148 148 Plot for Spectra data
149 149 '''
150 150
151 151 CODE = 'spc_oblique'
152 152 colormap = 'jet'
153 153 plot_type = 'pcolor'
154 154
155 155 def setup(self):
156 156 self.xaxis = "oblique"
157 157 self.nplots = len(self.data.channels)
158 158 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
159 159 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
160 160 self.height = 2.6 * self.nrows
161 161 self.cb_label = 'dB'
162 162 if self.showprofile:
163 163 self.width = 4 * self.ncols
164 164 else:
165 165 self.width = 3.5 * self.ncols
166 166 self.plots_adjust.update({'wspace': 0.8, 'hspace':0.2, 'left': 0.2, 'right': 0.9, 'bottom': 0.18})
167 167 self.ylabel = 'Range [km]'
168 168
169 169 def update(self, dataOut):
170 170
171 171 data = {}
172 172 meta = {}
173 173
174 174 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
175 175 data['spc'] = spc
176 176 data['rti'] = dataOut.getPower()
177 177 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
178 178 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
179 179 '''
180 180 data['shift1'] = dataOut.Oblique_params[0,-2,:]
181 181 data['shift2'] = dataOut.Oblique_params[0,-1,:]
182 182 data['shift1_error'] = dataOut.Oblique_param_errors[0,-2,:]
183 183 data['shift2_error'] = dataOut.Oblique_param_errors[0,-1,:]
184 184 '''
185 185 '''
186 186 data['shift1'] = dataOut.Oblique_params[0,1,:]
187 187 data['shift2'] = dataOut.Oblique_params[0,4,:]
188 188 data['shift1_error'] = dataOut.Oblique_param_errors[0,1,:]
189 189 data['shift2_error'] = dataOut.Oblique_param_errors[0,4,:]
190 190 '''
191 191 data['shift1'] = dataOut.Dop_EEJ_T1[0]
192 192 data['shift2'] = dataOut.Dop_EEJ_T2[0]
193 193 data['max_val_2'] = dataOut.Oblique_params[0,-1,:]
194 194 data['shift1_error'] = dataOut.Err_Dop_EEJ_T1[0]
195 195 data['shift2_error'] = dataOut.Err_Dop_EEJ_T2[0]
196 196
197 197 return data, meta
198 198
199 199 def plot(self):
200 200
201 201 if self.xaxis == "frequency":
202 202 x = self.data.xrange[0]
203 203 self.xlabel = "Frequency (kHz)"
204 204 elif self.xaxis == "time":
205 205 x = self.data.xrange[1]
206 206 self.xlabel = "Time (ms)"
207 207 else:
208 208 x = self.data.xrange[2]
209 209 self.xlabel = "Velocity (m/s)"
210 210
211 211 self.titles = []
212 212
213 213 y = self.data.yrange
214 214 self.y = y
215 215
216 216 data = self.data[-1]
217 217 z = data['spc']
218 218
219 219 for n, ax in enumerate(self.axes):
220 220 noise = self.data['noise'][n][-1]
221 221 shift1 = data['shift1']
222 222 #print(shift1)
223 223 shift2 = data['shift2']
224 224 max_val_2 = data['max_val_2']
225 225 err1 = data['shift1_error']
226 226 err2 = data['shift2_error']
227 227 if ax.firsttime:
228 228
229 229 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
230 230 self.xmin = self.xmin if self.xmin else -self.xmax
231 231 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
232 232 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
233 233 ax.plt = ax.pcolormesh(x, y, z[n].T,
234 234 vmin=self.zmin,
235 235 vmax=self.zmax,
236 236 cmap=plt.get_cmap(self.colormap)
237 237 )
238 238
239 239 if self.showprofile:
240 240 ax.plt_profile = self.pf_axes[n].plot(
241 241 self.data['rti'][n][-1], y)[0]
242 242 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
243 243 color="k", linestyle="dashed", lw=1)[0]
244 244
245 245 self.ploterr1 = ax.errorbar(shift1, y, xerr=err1, fmt='k^', elinewidth=2.2, marker='o', linestyle='None',markersize=2.5,capsize=0.3,markeredgewidth=0.2)
246 246 self.ploterr2 = ax.errorbar(shift2, y, xerr=err2, fmt='m^',elinewidth=2.2,marker='o',linestyle='None',markersize=2.5,capsize=0.3,markeredgewidth=0.2)
247 247 self.ploterr3 = ax.errorbar(max_val_2, y, xerr=0, fmt='g^',elinewidth=2.2,marker='o',linestyle='None',markersize=2.5,capsize=0.3,markeredgewidth=0.2)
248 248
249 249 #print("plotter1: ", self.ploterr1,shift1)
250 250
251 251 else:
252 252 #print("else plotter1: ", self.ploterr1,shift1)
253 253 self.ploterr1.remove()
254 254 self.ploterr2.remove()
255 255 self.ploterr3.remove()
256 256 ax.plt.set_array(z[n].T.ravel())
257 257 if self.showprofile:
258 258 ax.plt_profile.set_data(self.data['rti'][n][-1], y)
259 259 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
260 260 self.ploterr1 = ax.errorbar(shift1, y, xerr=err1, fmt='k^', elinewidth=2.2, marker='o', linestyle='None',markersize=2.5,capsize=0.3,markeredgewidth=0.2)
261 261 self.ploterr2 = ax.errorbar(shift2, y, xerr=err2, fmt='m^',elinewidth=2.2,marker='o',linestyle='None',markersize=2.5,capsize=0.3,markeredgewidth=0.2)
262 262 self.ploterr3 = ax.errorbar(max_val_2, y, xerr=0, fmt='g^',elinewidth=2.2,marker='o',linestyle='None',markersize=2.5,capsize=0.3,markeredgewidth=0.2)
263 263
264 264 self.titles.append('CH {}: {:3.2f}dB'.format(n, noise))
265 265
266 266
267 267 class CrossSpectraPlot(Plot):
268 268
269 269 CODE = 'cspc'
270 270 colormap = 'jet'
271 271 plot_type = 'pcolor'
272 272 zmin_coh = None
273 273 zmax_coh = None
274 274 zmin_phase = None
275 275 zmax_phase = None
276 276
277 277 def setup(self):
278 278
279 279 self.ncols = 4
280 280 self.nplots = len(self.data.pairs) * 2
281 281 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
282 282 self.width = 3.1 * self.ncols
283 283 self.height = 5 * self.nrows
284 284 self.ylabel = 'Range [km]'
285 285 self.showprofile = False
286 286 self.plots_adjust.update({'left': 0.08, 'right': 0.92, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
287 287
288 288 def update(self, dataOut):
289 289
290 290 data = {}
291 291 meta = {}
292 292
293 293 spc = dataOut.data_spc
294 294 cspc = dataOut.data_cspc
295 295 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
296 296 meta['pairs'] = dataOut.pairsList
297 297
298 298 tmp = []
299 299
300 300 for n, pair in enumerate(meta['pairs']):
301 301 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
302 302 coh = numpy.abs(out)
303 303 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
304 304 tmp.append(coh)
305 305 tmp.append(phase)
306 306
307 307 data['cspc'] = numpy.array(tmp)
308 308
309 309 return data, meta
310 310
311 311 def plot(self):
312 312
313 313 if self.xaxis == "frequency":
314 314 x = self.data.xrange[0]
315 315 self.xlabel = "Frequency (kHz)"
316 316 elif self.xaxis == "time":
317 317 x = self.data.xrange[1]
318 318 self.xlabel = "Time (ms)"
319 319 else:
320 320 x = self.data.xrange[2]
321 321 self.xlabel = "Velocity (m/s)"
322 322
323 323 self.titles = []
324 324
325 325 y = self.data.yrange
326 326 self.y = y
327 327
328 328 data = self.data[-1]
329 329 cspc = data['cspc']
330 330
331 331 for n in range(len(self.data.pairs)):
332 332 pair = self.data.pairs[n]
333 333 coh = cspc[n*2]
334 334 phase = cspc[n*2+1]
335 335 ax = self.axes[2 * n]
336 336 if ax.firsttime:
337 337 ax.plt = ax.pcolormesh(x, y, coh.T,
338 338 vmin=0,
339 339 vmax=1,
340 340 cmap=plt.get_cmap(self.colormap_coh)
341 341 )
342 342 else:
343 343 ax.plt.set_array(coh.T.ravel())
344 344 self.titles.append(
345 345 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
346 346
347 347 ax = self.axes[2 * n + 1]
348 348 if ax.firsttime:
349 349 ax.plt = ax.pcolormesh(x, y, phase.T,
350 350 vmin=-180,
351 351 vmax=180,
352 352 cmap=plt.get_cmap(self.colormap_phase)
353 353 )
354 354 else:
355 355 ax.plt.set_array(phase.T.ravel())
356 356 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
357 357
358 358
359 359 class CrossSpectra4Plot(Plot):
360 360
361 361 CODE = 'cspc'
362 362 colormap = 'jet'
363 363 plot_type = 'pcolor'
364 364 zmin_coh = None
365 365 zmax_coh = None
366 366 zmin_phase = None
367 367 zmax_phase = None
368 368
369 369 def setup(self):
370 370
371 371 self.ncols = 4
372 372 self.nrows = len(self.data.pairs)
373 373 self.nplots = self.nrows * 4
374 374 self.width = 3.1 * self.ncols
375 375 self.height = 5 * self.nrows
376 376 self.ylabel = 'Range [km]'
377 377 self.showprofile = False
378 378 self.plots_adjust.update({'left': 0.08, 'right': 0.92, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
379 379
380 380 def plot(self):
381 381
382 382 if self.xaxis == "frequency":
383 383 x = self.data.xrange[0]
384 384 self.xlabel = "Frequency (kHz)"
385 385 elif self.xaxis == "time":
386 386 x = self.data.xrange[1]
387 387 self.xlabel = "Time (ms)"
388 388 else:
389 389 x = self.data.xrange[2]
390 390 self.xlabel = "Velocity (m/s)"
391 391
392 392 self.titles = []
393 393
394 394
395 395 y = self.data.heights
396 396 self.y = y
397 397 nspc = self.data['spc']
398 398 #print(numpy.shape(self.data['spc']))
399 399 spc = self.data['cspc'][0]
400 400 #print(numpy.shape(nspc))
401 401 #exit()
402 402 #nspc[1,:,:] = numpy.flip(nspc[1,:,:],axis=0)
403 403 #print(numpy.shape(spc))
404 404 #exit()
405 405 cspc = self.data['cspc'][1]
406 406
407 407 #xflip=numpy.flip(x)
408 408 #print(numpy.shape(cspc))
409 409 #exit()
410 410
411 411 for n in range(self.nrows):
412 412 noise = self.data['noise'][:,-1]
413 413 pair = self.data.pairs[n]
414 414 #print(pair)
415 415 #exit()
416 416 ax = self.axes[4 * n]
417 417 if ax.firsttime:
418 418 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
419 419 self.xmin = self.xmin if self.xmin else -self.xmax
420 420 self.zmin = self.zmin if self.zmin else numpy.nanmin(nspc)
421 421 self.zmax = self.zmax if self.zmax else numpy.nanmax(nspc)
422 422 ax.plt = ax.pcolormesh(x , y , nspc[pair[0]].T,
423 423 vmin=self.zmin,
424 424 vmax=self.zmax,
425 425 cmap=plt.get_cmap(self.colormap)
426 426 )
427 427 else:
428 428 #print(numpy.shape(nspc[pair[0]].T))
429 429 #exit()
430 430 ax.plt.set_array(nspc[pair[0]].T.ravel())
431 431 self.titles.append('CH {}: {:3.2f}dB'.format(pair[0], noise[pair[0]]))
432 432
433 433 ax = self.axes[4 * n + 1]
434 434
435 435 if ax.firsttime:
436 436 ax.plt = ax.pcolormesh(x , y, numpy.flip(nspc[pair[1]],axis=0).T,
437 437 vmin=self.zmin,
438 438 vmax=self.zmax,
439 439 cmap=plt.get_cmap(self.colormap)
440 440 )
441 441 else:
442 442
443 443 ax.plt.set_array(numpy.flip(nspc[pair[1]],axis=0).T.ravel())
444 444 self.titles.append('CH {}: {:3.2f}dB'.format(pair[1], noise[pair[1]]))
445 445
446 446 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
447 447 coh = numpy.abs(out)
448 448 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
449 449
450 450 ax = self.axes[4 * n + 2]
451 451 if ax.firsttime:
452 452 ax.plt = ax.pcolormesh(x, y, numpy.flip(coh,axis=0).T,
453 453 vmin=0,
454 454 vmax=1,
455 455 cmap=plt.get_cmap(self.colormap_coh)
456 456 )
457 457 else:
458 458 ax.plt.set_array(numpy.flip(coh,axis=0).T.ravel())
459 459 self.titles.append(
460 460 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
461 461
462 462 ax = self.axes[4 * n + 3]
463 463 if ax.firsttime:
464 464 ax.plt = ax.pcolormesh(x, y, numpy.flip(phase,axis=0).T,
465 465 vmin=-180,
466 466 vmax=180,
467 467 cmap=plt.get_cmap(self.colormap_phase)
468 468 )
469 469 else:
470 470 ax.plt.set_array(numpy.flip(phase,axis=0).T.ravel())
471 471 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
472 472
473 473
474 474 class CrossSpectra2Plot(Plot):
475 475
476 476 CODE = 'cspc'
477 477 colormap = 'jet'
478 478 plot_type = 'pcolor'
479 479 zmin_coh = None
480 480 zmax_coh = None
481 481 zmin_phase = None
482 482 zmax_phase = None
483 483
484 484 def setup(self):
485 485
486 486 self.ncols = 1
487 487 self.nrows = len(self.data.pairs)
488 488 self.nplots = self.nrows * 1
489 489 self.width = 3.1 * self.ncols
490 490 self.height = 5 * self.nrows
491 491 self.ylabel = 'Range [km]'
492 492 self.showprofile = False
493 493 self.plots_adjust.update({'left': 0.22, 'right': .90, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
494 494
495 495 def plot(self):
496 496
497 497 if self.xaxis == "frequency":
498 498 x = self.data.xrange[0]
499 499 self.xlabel = "Frequency (kHz)"
500 500 elif self.xaxis == "time":
501 501 x = self.data.xrange[1]
502 502 self.xlabel = "Time (ms)"
503 503 else:
504 504 x = self.data.xrange[2]
505 505 self.xlabel = "Velocity (m/s)"
506 506
507 507 self.titles = []
508 508
509 509
510 510 y = self.data.heights
511 511 self.y = y
512 512 #nspc = self.data['spc']
513 513 #print(numpy.shape(self.data['spc']))
514 514 #spc = self.data['cspc'][0]
515 515 #print(numpy.shape(spc))
516 516 #exit()
517 517 cspc = self.data['cspc'][1]
518 518 #print(numpy.shape(cspc))
519 519 #exit()
520 520
521 521 for n in range(self.nrows):
522 522 noise = self.data['noise'][:,-1]
523 523 pair = self.data.pairs[n]
524 524 #print(pair) #exit()
525 525
526 526
527 527
528 528 out = cspc[n]# / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
529 529
530 530 #print(out[:,53])
531 531 #exit()
532 532 cross = numpy.abs(out)
533 533 z = cross/self.data.nFactor
534 534 #print("here")
535 535 #print(dataOut.data_spc[0,0,0])
536 536 #exit()
537 537
538 538 cross = 10*numpy.log10(z)
539 539 #print(numpy.shape(cross))
540 540 #print(cross[0,:])
541 541 #print(self.data.nFactor)
542 542 #exit()
543 543 #phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
544 544
545 545 ax = self.axes[1 * n]
546 546 if ax.firsttime:
547 547 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
548 548 self.xmin = self.xmin if self.xmin else -self.xmax
549 549 self.zmin = self.zmin if self.zmin else numpy.nanmin(cross)
550 550 self.zmax = self.zmax if self.zmax else numpy.nanmax(cross)
551 551 ax.plt = ax.pcolormesh(x, y, cross.T,
552 552 vmin=self.zmin,
553 553 vmax=self.zmax,
554 554 cmap=plt.get_cmap(self.colormap)
555 555 )
556 556 else:
557 557 ax.plt.set_array(cross.T.ravel())
558 558 self.titles.append(
559 559 'Cross Spectra Power Ch{} * Ch{}'.format(pair[0], pair[1]))
560 560
561 561
562 562 class CrossSpectra3Plot(Plot):
563 563
564 564 CODE = 'cspc'
565 565 colormap = 'jet'
566 566 plot_type = 'pcolor'
567 567 zmin_coh = None
568 568 zmax_coh = None
569 569 zmin_phase = None
570 570 zmax_phase = None
571 571
572 572 def setup(self):
573 573
574 574 self.ncols = 3
575 575 self.nrows = len(self.data.pairs)
576 576 self.nplots = self.nrows * 3
577 577 self.width = 3.1 * self.ncols
578 578 self.height = 5 * self.nrows
579 579 self.ylabel = 'Range [km]'
580 580 self.showprofile = False
581 581 self.plots_adjust.update({'left': 0.22, 'right': .90, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
582 582
583 583 def plot(self):
584 584
585 585 if self.xaxis == "frequency":
586 586 x = self.data.xrange[0]
587 587 self.xlabel = "Frequency (kHz)"
588 588 elif self.xaxis == "time":
589 589 x = self.data.xrange[1]
590 590 self.xlabel = "Time (ms)"
591 591 else:
592 592 x = self.data.xrange[2]
593 593 self.xlabel = "Velocity (m/s)"
594 594
595 595 self.titles = []
596 596
597 597
598 598 y = self.data.heights
599 599 self.y = y
600 600 #nspc = self.data['spc']
601 601 #print(numpy.shape(self.data['spc']))
602 602 #spc = self.data['cspc'][0]
603 603 #print(numpy.shape(spc))
604 604 #exit()
605 605 cspc = self.data['cspc'][1]
606 606 #print(numpy.shape(cspc))
607 607 #exit()
608 608
609 609 for n in range(self.nrows):
610 610 noise = self.data['noise'][:,-1]
611 611 pair = self.data.pairs[n]
612 612 #print(pair) #exit()
613 613
614 614
615 615
616 616 out = cspc[n]# / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
617 617
618 618 #print(out[:,53])
619 619 #exit()
620 620 cross = numpy.abs(out)
621 621 z = cross/self.data.nFactor
622 622 cross = 10*numpy.log10(z)
623 623
624 624 out_r= out.real/self.data.nFactor
625 625 #out_r = 10*numpy.log10(out_r)
626 626
627 627 out_i= out.imag/self.data.nFactor
628 628 #out_i = 10*numpy.log10(out_i)
629 629 #print(numpy.shape(cross))
630 630 #print(cross[0,:])
631 631 #print(self.data.nFactor)
632 632 #exit()
633 633 #phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
634 634
635 635 ax = self.axes[3 * n]
636 636 if ax.firsttime:
637 637 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
638 638 self.xmin = self.xmin if self.xmin else -self.xmax
639 639 self.zmin = self.zmin if self.zmin else numpy.nanmin(cross)
640 640 self.zmax = self.zmax if self.zmax else numpy.nanmax(cross)
641 641 ax.plt = ax.pcolormesh(x, y, cross.T,
642 642 vmin=self.zmin,
643 643 vmax=self.zmax,
644 644 cmap=plt.get_cmap(self.colormap)
645 645 )
646 646 else:
647 647 ax.plt.set_array(cross.T.ravel())
648 648 self.titles.append(
649 649 'Cross Spectra Power Ch{} * Ch{}'.format(pair[0], pair[1]))
650 650
651 651 ax = self.axes[3 * n + 1]
652 652 if ax.firsttime:
653 653 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
654 654 self.xmin = self.xmin if self.xmin else -self.xmax
655 655 self.zmin = self.zmin if self.zmin else numpy.nanmin(cross)
656 656 self.zmax = self.zmax if self.zmax else numpy.nanmax(cross)
657 657 ax.plt = ax.pcolormesh(x, y, out_r.T,
658 658 vmin=-1.e6,
659 659 vmax=0,
660 660 cmap=plt.get_cmap(self.colormap)
661 661 )
662 662 else:
663 663 ax.plt.set_array(out_r.T.ravel())
664 664 self.titles.append(
665 665 'Cross Spectra Real Ch{} * Ch{}'.format(pair[0], pair[1]))
666 666
667 667 ax = self.axes[3 * n + 2]
668 668
669 669
670 670 if ax.firsttime:
671 671 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
672 672 self.xmin = self.xmin if self.xmin else -self.xmax
673 673 self.zmin = self.zmin if self.zmin else numpy.nanmin(cross)
674 674 self.zmax = self.zmax if self.zmax else numpy.nanmax(cross)
675 675 ax.plt = ax.pcolormesh(x, y, out_i.T,
676 676 vmin=-1.e6,
677 677 vmax=1.e6,
678 678 cmap=plt.get_cmap(self.colormap)
679 679 )
680 680 else:
681 681 ax.plt.set_array(out_i.T.ravel())
682 682 self.titles.append(
683 683 'Cross Spectra Imag Ch{} * Ch{}'.format(pair[0], pair[1]))
684 684
685 685 class RTIPlot(Plot):
686 686 '''
687 687 Plot for RTI data
688 688 '''
689 689
690 690 CODE = 'rti'
691 691 colormap = 'jet'
692 692 plot_type = 'pcolorbuffer'
693 693
694 694 def setup(self):
695 695 self.xaxis = 'time'
696 696 self.ncols = 1
697 697 self.nrows = len(self.data.channels)
698 698 self.nplots = len(self.data.channels)
699 699 self.ylabel = 'Range [km]'
700 700 self.xlabel = 'Time'
701 701 self.cb_label = 'dB'
702 702 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.1, 'right':0.95})
703 703 self.titles = ['{} Channel {}'.format(
704 704 self.CODE.upper(), x) for x in range(self.nrows)]
705 705
706 706 def update(self, dataOut):
707 707
708 708 data = {}
709 709 meta = {}
710 710 data['rti'] = dataOut.getPower()
711 711 #print(numpy.shape(data['rti']))
712 712
713 713 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
714 714
715 715 return data, meta
716 716
717 717 def plot(self):
718 718
719 719 self.x = self.data.times
720 720 self.y = self.data.yrange
721 721 self.z = self.data[self.CODE]
722 722 #print("Inside RTI: ", self.z)
723 723 self.z = numpy.ma.masked_invalid(self.z)
724 724
725 725 if self.decimation is None:
726 726 x, y, z = self.fill_gaps(self.x, self.y, self.z)
727 727 else:
728 728 x, y, z = self.fill_gaps(*self.decimate())
729 729 #print("self.z: ", self.z)
730 730 #exit(1)
731 731 '''
732 732 if not isinstance(self.zmin, collections.abc.Sequence):
733 733 if not self.zmin:
734 734 self.zmin = [numpy.min(self.z)]*len(self.axes)
735 735 else:
736 736 self.zmin = [self.zmin]*len(self.axes)
737 737
738 738 if not isinstance(self.zmax, collections.abc.Sequence):
739 739 if not self.zmax:
740 740 self.zmax = [numpy.max(self.z)]*len(self.axes)
741 741 else:
742 742 self.zmax = [self.zmax]*len(self.axes)
743 743 '''
744 744 for n, ax in enumerate(self.axes):
745 745
746 746 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
747 747 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
748 748
749 749 if ax.firsttime:
750 750 if self.zlimits is not None:
751 751 self.zmin, self.zmax = self.zlimits[n]
752 752 ax.plt = ax.pcolormesh(x, y, z[n].T,
753 753 vmin=self.zmin,
754 754 vmax=self.zmax,
755 755 cmap=plt.get_cmap(self.colormap)
756 756 )
757 757 if self.showprofile:
758 758 ax.plot_profile = self.pf_axes[n].plot(
759 759 self.data['rti'][n][-1], self.y)[0]
760 760 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(self.data['noise'][n][-1], len(self.y)), self.y,
761 761 color="k", linestyle="dashed", lw=1)[0]
762 762 else:
763 763 if self.zlimits is not None:
764 764 self.zmin, self.zmax = self.zlimits[n]
765 ax.collections.remove(ax.collections[0])
765 ax.plt.remove()
766 766 ax.plt = ax.pcolormesh(x, y, z[n].T,
767 767 vmin=self.zmin,
768 768 vmax=self.zmax,
769 769 cmap=plt.get_cmap(self.colormap)
770 770 )
771 771 if self.showprofile:
772 772 ax.plot_profile.set_data(self.data['rti'][n][-1], self.y)
773 773 ax.plot_noise.set_data(numpy.repeat(
774 774 self.data['noise'][n][-1], len(self.y)), self.y)
775 775
776 776
777 777 class SpectrogramPlot(Plot):
778 778 '''
779 779 Plot for Spectrogram data
780 780 '''
781 781
782 782 CODE = 'Spectrogram_Profile'
783 783 colormap = 'binary'
784 784 plot_type = 'pcolorbuffer'
785 785
786 786 def setup(self):
787 787 self.xaxis = 'time'
788 788 self.ncols = 1
789 789 self.nrows = len(self.data.channels)
790 790 self.nplots = len(self.data.channels)
791 791 self.xlabel = 'Time'
792 792 #self.cb_label = 'dB'
793 793 self.plots_adjust.update({'hspace':1.2, 'left': 0.1, 'bottom': 0.12, 'right':0.95})
794 794 self.titles = []
795 795
796 796 #self.titles = ['{} Channel {} \n H = {} km ({} - {})'.format(
797 797 #self.CODE.upper(), x, self.data.heightList[self.data.hei], self.data.heightList[self.data.hei],self.data.heightList[self.data.hei]+(self.data.DH*self.data.nProfiles)) for x in range(self.nrows)]
798 798
799 799 self.titles = ['{} Channel {}'.format(
800 800 self.CODE.upper(), x) for x in range(self.nrows)]
801 801
802 802
803 803 def update(self, dataOut):
804 804 data = {}
805 805 meta = {}
806 806
807 807 maxHei = 1620#+12000
808 808 maxHei = 1180
809 809 indb = numpy.where(dataOut.heightList <= maxHei)
810 810 hei = indb[0][-1]
811 811 #print(dataOut.heightList)
812 812
813 813 factor = dataOut.nIncohInt
814 814 z = dataOut.data_spc[:,:,hei] / factor
815 815 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
816 816 #buffer = 10 * numpy.log10(z)
817 817
818 818 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
819 819
820 820
821 821 #self.hei = hei
822 822 #self.heightList = dataOut.heightList
823 823 #self.DH = (dataOut.heightList[1] - dataOut.heightList[0])/dataOut.step
824 824 #self.nProfiles = dataOut.nProfiles
825 825
826 826 data['Spectrogram_Profile'] = 10 * numpy.log10(z)
827 827
828 828 data['hei'] = hei
829 829 data['DH'] = (dataOut.heightList[1] - dataOut.heightList[0])/dataOut.step
830 830 data['nProfiles'] = dataOut.nProfiles
831 831 #meta['yrange'] = dataOut.heightList[0:dataOut.NSHTS]
832 832 '''
833 833 import matplotlib.pyplot as plt
834 834 plt.plot(10 * numpy.log10(z[0,:]))
835 835 plt.show()
836 836
837 837 from time import sleep
838 838 sleep(10)
839 839 '''
840 840 return data, meta
841 841
842 842 def plot(self):
843 843
844 844 self.x = self.data.times
845 845 self.z = self.data[self.CODE]
846 846 self.y = self.data.xrange[0]
847 847
848 848 hei = self.data['hei'][-1]
849 849 DH = self.data['DH'][-1]
850 850 nProfiles = self.data['nProfiles'][-1]
851 851
852 852 self.ylabel = "Frequency (kHz)"
853 853
854 854 self.z = numpy.ma.masked_invalid(self.z)
855 855
856 856 if self.decimation is None:
857 857 x, y, z = self.fill_gaps(self.x, self.y, self.z)
858 858 else:
859 859 x, y, z = self.fill_gaps(*self.decimate())
860 860
861 861 for n, ax in enumerate(self.axes):
862 862 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
863 863 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
864 864 data = self.data[-1]
865 865 if ax.firsttime:
866 866 ax.plt = ax.pcolormesh(x, y, z[n].T,
867 867 vmin=self.zmin,
868 868 vmax=self.zmax,
869 869 cmap=plt.get_cmap(self.colormap)
870 870 )
871 871 else:
872 ax.collections.remove(ax.collections[0])
872 ax.plt.remove()
873 873 ax.plt = ax.pcolormesh(x, y, z[n].T,
874 874 vmin=self.zmin,
875 875 vmax=self.zmax,
876 876 cmap=plt.get_cmap(self.colormap)
877 877 )
878 878
879 879 #self.titles.append('Spectrogram')
880 880
881 881 #self.titles.append('{} Channel {} \n H = {} km ({} - {})'.format(
882 882 #self.CODE.upper(), x, y[hei], y[hei],y[hei]+(DH*nProfiles)))
883 883
884 884
885 885
886 886
887 887 class CoherencePlot(RTIPlot):
888 888 '''
889 889 Plot for Coherence data
890 890 '''
891 891
892 892 CODE = 'coh'
893 893
894 894 def setup(self):
895 895 self.xaxis = 'time'
896 896 self.ncols = 1
897 897 self.nrows = len(self.data.pairs)
898 898 self.nplots = len(self.data.pairs)
899 899 self.ylabel = 'Range [km]'
900 900 self.xlabel = 'Time'
901 901 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
902 902 if self.CODE == 'coh':
903 903 self.cb_label = ''
904 904 self.titles = [
905 905 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
906 906 else:
907 907 self.cb_label = 'Degrees'
908 908 self.titles = [
909 909 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
910 910
911 911 def update(self, dataOut):
912 912
913 913 data = {}
914 914 meta = {}
915 915 data['coh'] = dataOut.getCoherence()
916 916 meta['pairs'] = dataOut.pairsList
917 917
918 918 return data, meta
919 919
920 920 class PhasePlot(CoherencePlot):
921 921 '''
922 922 Plot for Phase map data
923 923 '''
924 924
925 925 CODE = 'phase'
926 926 colormap = 'seismic'
927 927
928 928 def update(self, dataOut):
929 929
930 930 data = {}
931 931 meta = {}
932 932 data['phase'] = dataOut.getCoherence(phase=True)
933 933 meta['pairs'] = dataOut.pairsList
934 934
935 935 return data, meta
936 936
937 937 class NoisePlot(Plot):
938 938 '''
939 939 Plot for noise
940 940 '''
941 941
942 942 CODE = 'noise'
943 943 plot_type = 'scatterbuffer'
944 944
945 945 def setup(self):
946 946 self.xaxis = 'time'
947 947 self.ncols = 1
948 948 self.nrows = 1
949 949 self.nplots = 1
950 950 self.ylabel = 'Intensity [dB]'
951 951 self.xlabel = 'Time'
952 952 self.titles = ['Noise']
953 953 self.colorbar = False
954 954 self.plots_adjust.update({'right': 0.85 })
955 955
956 956 def update(self, dataOut):
957 957
958 958 data = {}
959 959 meta = {}
960 960 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
961 961 meta['yrange'] = numpy.array([])
962 962
963 963 return data, meta
964 964
965 965 def plot(self):
966 966
967 967 x = self.data.times
968 968 xmin = self.data.min_time
969 969 xmax = xmin + self.xrange * 60 * 60
970 970 Y = self.data['noise']
971 971
972 972 if self.axes[0].firsttime:
973 973 self.ymin = numpy.nanmin(Y) - 5
974 974 self.ymax = numpy.nanmax(Y) + 5
975 975 for ch in self.data.channels:
976 976 y = Y[ch]
977 977 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
978 978 plt.legend(bbox_to_anchor=(1.18, 1.0))
979 979 else:
980 980 for ch in self.data.channels:
981 981 y = Y[ch]
982 982 self.axes[0].lines[ch].set_data(x, y)
983 983
984 984 self.ymin = numpy.nanmin(Y) - 5
985 985 self.ymax = numpy.nanmax(Y) + 10
986 986
987 987
988 988 class PowerProfilePlot(Plot):
989 989
990 990 CODE = 'pow_profile'
991 991 plot_type = 'scatter'
992 992
993 993 def setup(self):
994 994
995 995 self.ncols = 1
996 996 self.nrows = 1
997 997 self.nplots = 1
998 998 self.height = 4
999 999 self.width = 3
1000 1000 self.ylabel = 'Range [km]'
1001 1001 self.xlabel = 'Intensity [dB]'
1002 1002 self.titles = ['Power Profile']
1003 1003 self.colorbar = False
1004 1004
1005 1005 def update(self, dataOut):
1006 1006
1007 1007 data = {}
1008 1008 meta = {}
1009 1009 data[self.CODE] = dataOut.getPower()
1010 1010
1011 1011 return data, meta
1012 1012
1013 1013 def plot(self):
1014 1014
1015 1015 y = self.data.yrange
1016 1016 self.y = y
1017 1017
1018 1018 x = self.data[-1][self.CODE]
1019 1019
1020 1020 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
1021 1021 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
1022 1022
1023 1023 if self.axes[0].firsttime:
1024 1024 for ch in self.data.channels:
1025 1025 self.axes[0].plot(x[ch], y, lw=1, label='Ch{}'.format(ch))
1026 1026 plt.legend()
1027 1027 else:
1028 1028 for ch in self.data.channels:
1029 1029 self.axes[0].lines[ch].set_data(x[ch], y)
1030 1030
1031 1031
1032 1032 class SpectraCutPlot(Plot):
1033 1033
1034 1034 CODE = 'spc_cut'
1035 1035 plot_type = 'scatter'
1036 1036 buffering = False
1037 1037
1038 1038 def setup(self):
1039 1039
1040 1040 self.nplots = len(self.data.channels)
1041 1041 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
1042 1042 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
1043 1043 self.width = 3.4 * self.ncols + 1.5
1044 1044 self.height = 3 * self.nrows
1045 1045 self.ylabel = 'Power [dB]'
1046 1046 self.colorbar = False
1047 1047 self.plots_adjust.update({'left':0.1, 'hspace':0.3, 'right': 0.75, 'bottom':0.08})
1048 1048
1049 1049 def update(self, dataOut):
1050 1050
1051 1051 data = {}
1052 1052 meta = {}
1053 1053 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
1054 1054 data['spc'] = spc
1055 1055 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
1056 1056 if self.CODE == 'cut_gaussian_fit':
1057 1057 data['gauss_fit0'] = 10*numpy.log10(dataOut.GaussFit0/dataOut.normFactor)
1058 1058 data['gauss_fit1'] = 10*numpy.log10(dataOut.GaussFit1/dataOut.normFactor)
1059 1059 return data, meta
1060 1060
1061 1061 def plot(self):
1062 1062 if self.xaxis == "frequency":
1063 1063 x = self.data.xrange[0][1:]
1064 1064 self.xlabel = "Frequency (kHz)"
1065 1065 elif self.xaxis == "time":
1066 1066 x = self.data.xrange[1]
1067 1067 self.xlabel = "Time (ms)"
1068 1068 else:
1069 1069 x = self.data.xrange[2][:-1]
1070 1070 self.xlabel = "Velocity (m/s)"
1071 1071
1072 1072 if self.CODE == 'cut_gaussian_fit':
1073 1073 x = self.data.xrange[2][:-1]
1074 1074 self.xlabel = "Velocity (m/s)"
1075 1075
1076 1076 self.titles = []
1077 1077
1078 1078 y = self.data.yrange
1079 1079 data = self.data[-1]
1080 1080 z = data['spc']
1081 1081
1082 1082 if self.height_index:
1083 1083 index = numpy.array(self.height_index)
1084 1084 else:
1085 1085 index = numpy.arange(0, len(y), int((len(y))/9))
1086 1086
1087 1087 for n, ax in enumerate(self.axes):
1088 1088 if self.CODE == 'cut_gaussian_fit':
1089 1089 gau0 = data['gauss_fit0']
1090 1090 gau1 = data['gauss_fit1']
1091 1091 if ax.firsttime:
1092 1092 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
1093 1093 self.xmin = self.xmin if self.xmin else -self.xmax
1094 1094 self.ymin = self.ymin if self.ymin else numpy.nanmin(z[:,:,index])
1095 1095 self.ymax = self.ymax if self.ymax else numpy.nanmax(z[:,:,index])
1096 1096 #print(self.ymax)
1097 1097 #print(z[n, :, index])
1098 1098 ax.plt = ax.plot(x, z[n, :, index].T, lw=0.25)
1099 1099 if self.CODE == 'cut_gaussian_fit':
1100 1100 ax.plt_gau0 = ax.plot(x, gau0[n, :, index].T, lw=1, linestyle='-.')
1101 1101 for i, line in enumerate(ax.plt_gau0):
1102 1102 line.set_color(ax.plt[i].get_color())
1103 1103 ax.plt_gau1 = ax.plot(x, gau1[n, :, index].T, lw=1, linestyle='--')
1104 1104 for i, line in enumerate(ax.plt_gau1):
1105 1105 line.set_color(ax.plt[i].get_color())
1106 1106 labels = ['Range = {:2.1f}km'.format(y[i]) for i in index]
1107 1107 self.figures[0].legend(ax.plt, labels, loc='center right')
1108 1108 else:
1109 1109 for i, line in enumerate(ax.plt):
1110 1110 line.set_data(x, z[n, :, index[i]].T)
1111 1111 for i, line in enumerate(ax.plt_gau0):
1112 1112 line.set_data(x, gau0[n, :, index[i]].T)
1113 1113 line.set_color(ax.plt[i].get_color())
1114 1114 for i, line in enumerate(ax.plt_gau1):
1115 1115 line.set_data(x, gau1[n, :, index[i]].T)
1116 1116 line.set_color(ax.plt[i].get_color())
1117 1117 self.titles.append('CH {}'.format(n))
1118 1118
1119 1119
1120 1120 class BeaconPhase(Plot):
1121 1121
1122 1122 __isConfig = None
1123 1123 __nsubplots = None
1124 1124
1125 1125 PREFIX = 'beacon_phase'
1126 1126
1127 1127 def __init__(self):
1128 1128 Plot.__init__(self)
1129 1129 self.timerange = 24*60*60
1130 1130 self.isConfig = False
1131 1131 self.__nsubplots = 1
1132 1132 self.counter_imagwr = 0
1133 1133 self.WIDTH = 800
1134 1134 self.HEIGHT = 400
1135 1135 self.WIDTHPROF = 120
1136 1136 self.HEIGHTPROF = 0
1137 1137 self.xdata = None
1138 1138 self.ydata = None
1139 1139
1140 1140 self.PLOT_CODE = BEACON_CODE
1141 1141
1142 1142 self.FTP_WEI = None
1143 1143 self.EXP_CODE = None
1144 1144 self.SUB_EXP_CODE = None
1145 1145 self.PLOT_POS = None
1146 1146
1147 1147 self.filename_phase = None
1148 1148
1149 1149 self.figfile = None
1150 1150
1151 1151 self.xmin = None
1152 1152 self.xmax = None
1153 1153
1154 1154 def getSubplots(self):
1155 1155
1156 1156 ncol = 1
1157 1157 nrow = 1
1158 1158
1159 1159 return nrow, ncol
1160 1160
1161 1161 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
1162 1162
1163 1163 self.__showprofile = showprofile
1164 1164 self.nplots = nplots
1165 1165
1166 1166 ncolspan = 7
1167 1167 colspan = 6
1168 1168 self.__nsubplots = 2
1169 1169
1170 1170 self.createFigure(id = id,
1171 1171 wintitle = wintitle,
1172 1172 widthplot = self.WIDTH+self.WIDTHPROF,
1173 1173 heightplot = self.HEIGHT+self.HEIGHTPROF,
1174 1174 show=show)
1175 1175
1176 1176 nrow, ncol = self.getSubplots()
1177 1177
1178 1178 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
1179 1179
1180 1180 def save_phase(self, filename_phase):
1181 1181 f = open(filename_phase,'w+')
1182 1182 f.write('\n\n')
1183 1183 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
1184 1184 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
1185 1185 f.close()
1186 1186
1187 1187 def save_data(self, filename_phase, data, data_datetime):
1188 1188 f=open(filename_phase,'a')
1189 1189 timetuple_data = data_datetime.timetuple()
1190 1190 day = str(timetuple_data.tm_mday)
1191 1191 month = str(timetuple_data.tm_mon)
1192 1192 year = str(timetuple_data.tm_year)
1193 1193 hour = str(timetuple_data.tm_hour)
1194 1194 minute = str(timetuple_data.tm_min)
1195 1195 second = str(timetuple_data.tm_sec)
1196 1196 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
1197 1197 f.close()
1198 1198
1199 1199 def plot(self):
1200 1200 log.warning('TODO: Not yet implemented...')
1201 1201
1202 1202 def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True',
1203 1203 xmin=None, xmax=None, ymin=None, ymax=None, hmin=None, hmax=None,
1204 1204 timerange=None,
1205 1205 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
1206 1206 server=None, folder=None, username=None, password=None,
1207 1207 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
1208 1208
1209 1209 if dataOut.flagNoData:
1210 1210 return dataOut
1211 1211
1212 1212 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
1213 1213 return
1214 1214
1215 1215 if pairsList == None:
1216 1216 pairsIndexList = dataOut.pairsIndexList[:10]
1217 1217 else:
1218 1218 pairsIndexList = []
1219 1219 for pair in pairsList:
1220 1220 if pair not in dataOut.pairsList:
1221 1221 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
1222 1222 pairsIndexList.append(dataOut.pairsList.index(pair))
1223 1223
1224 1224 if pairsIndexList == []:
1225 1225 return
1226 1226
1227 1227 # if len(pairsIndexList) > 4:
1228 1228 # pairsIndexList = pairsIndexList[0:4]
1229 1229
1230 1230 hmin_index = None
1231 1231 hmax_index = None
1232 1232
1233 1233 if hmin != None and hmax != None:
1234 1234 indexes = numpy.arange(dataOut.nHeights)
1235 1235 hmin_list = indexes[dataOut.heightList >= hmin]
1236 1236 hmax_list = indexes[dataOut.heightList <= hmax]
1237 1237
1238 1238 if hmin_list.any():
1239 1239 hmin_index = hmin_list[0]
1240 1240
1241 1241 if hmax_list.any():
1242 1242 hmax_index = hmax_list[-1]+1
1243 1243
1244 1244 x = dataOut.getTimeRange()
1245 1245
1246 1246 thisDatetime = dataOut.datatime
1247 1247
1248 1248 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
1249 1249 xlabel = "Local Time"
1250 1250 ylabel = "Phase (degrees)"
1251 1251
1252 1252 update_figfile = False
1253 1253
1254 1254 nplots = len(pairsIndexList)
1255 1255 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
1256 1256 phase_beacon = numpy.zeros(len(pairsIndexList))
1257 1257 for i in range(nplots):
1258 1258 pair = dataOut.pairsList[pairsIndexList[i]]
1259 1259 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
1260 1260 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
1261 1261 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
1262 1262 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
1263 1263 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
1264 1264
1265 1265 if dataOut.beacon_heiIndexList:
1266 1266 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
1267 1267 else:
1268 1268 phase_beacon[i] = numpy.average(phase)
1269 1269
1270 1270 if not self.isConfig:
1271 1271
1272 1272 nplots = len(pairsIndexList)
1273 1273
1274 1274 self.setup(id=id,
1275 1275 nplots=nplots,
1276 1276 wintitle=wintitle,
1277 1277 showprofile=showprofile,
1278 1278 show=show)
1279 1279
1280 1280 if timerange != None:
1281 1281 self.timerange = timerange
1282 1282
1283 1283 self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange)
1284 1284
1285 1285 if ymin == None: ymin = 0
1286 1286 if ymax == None: ymax = 360
1287 1287
1288 1288 self.FTP_WEI = ftp_wei
1289 1289 self.EXP_CODE = exp_code
1290 1290 self.SUB_EXP_CODE = sub_exp_code
1291 1291 self.PLOT_POS = plot_pos
1292 1292
1293 1293 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
1294 1294 self.isConfig = True
1295 1295 self.figfile = figfile
1296 1296 self.xdata = numpy.array([])
1297 1297 self.ydata = numpy.array([])
1298 1298
1299 1299 update_figfile = True
1300 1300
1301 1301 #open file beacon phase
1302 1302 path = '%s%03d' %(self.PREFIX, self.id)
1303 1303 beacon_file = os.path.join(path,'%s.txt'%self.name)
1304 1304 self.filename_phase = os.path.join(figpath,beacon_file)
1305 1305 #self.save_phase(self.filename_phase)
1306 1306
1307 1307
1308 1308 #store data beacon phase
1309 1309 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
1310 1310
1311 1311 self.setWinTitle(title)
1312 1312
1313 1313
1314 1314 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
1315 1315
1316 1316 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
1317 1317
1318 1318 axes = self.axesList[0]
1319 1319
1320 1320 self.xdata = numpy.hstack((self.xdata, x[0:1]))
1321 1321
1322 1322 if len(self.ydata)==0:
1323 1323 self.ydata = phase_beacon.reshape(-1,1)
1324 1324 else:
1325 1325 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
1326 1326
1327 1327
1328 1328 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
1329 1329 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax,
1330 1330 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid",
1331 1331 XAxisAsTime=True, grid='both'
1332 1332 )
1333 1333
1334 1334 self.draw()
1335 1335
1336 1336 if dataOut.ltctime >= self.xmax:
1337 1337 self.counter_imagwr = wr_period
1338 1338 self.isConfig = False
1339 1339 update_figfile = True
1340 1340
1341 1341 self.save(figpath=figpath,
1342 1342 figfile=figfile,
1343 1343 save=save,
1344 1344 ftp=ftp,
1345 1345 wr_period=wr_period,
1346 1346 thisDatetime=thisDatetime,
1347 1347 update_figfile=update_figfile)
1348 1348
1349 1349 return dataOut
@@ -1,1428 +1,1428
1 1
2 2 import os
3 3 import time
4 4 import math
5 5 import datetime
6 6 import numpy
7 7
8 8 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator #YONG
9 9
10 10 from .jroplot_spectra import RTIPlot, NoisePlot
11 11
12 12 from schainpy.utils import log
13 13 from .plotting_codes import *
14 14
15 15 from schainpy.model.graphics.jroplot_base import Plot, plt
16 16
17 17 import matplotlib.pyplot as plt
18 18 import matplotlib.colors as colors
19 19 from matplotlib.ticker import MultipleLocator, LogLocator, NullFormatter
20 20
21 21 class RTIDPPlot(RTIPlot):
22 22 '''
23 23 Written by R. Flores
24 24 '''
25 25 '''Plot for RTI Double Pulse Experiment Using Cross Products Analysis
26 26 '''
27 27
28 28 CODE = 'RTIDP'
29 29 colormap = 'jet'
30 30 plot_name = 'RTI'
31 31 plot_type = 'pcolorbuffer'
32 32
33 33 def setup(self):
34 34 self.xaxis = 'time'
35 35 self.ncols = 1
36 36 self.nrows = 3
37 37 self.nplots = self.nrows
38 38
39 39 self.ylabel = 'Range [km]'
40 40 self.xlabel = 'Time (LT)'
41 41
42 42 self.cb_label = 'Intensity (dB)'
43 43
44 44 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.1, 'right':0.95})
45 45
46 46 self.titles = ['{} Channel {}'.format(
47 47 self.plot_name.upper(), '0x1'),'{} Channel {}'.format(
48 48 self.plot_name.upper(), '0'),'{} Channel {}'.format(
49 49 self.plot_name.upper(), '1')]
50 50
51 51 def update(self, dataOut):
52 52
53 53 data = {}
54 54 meta = {}
55 55 data['rti'] = dataOut.data_for_RTI_DP
56 56 data['NDP'] = dataOut.NDP
57 57
58 58 return data, meta
59 59
60 60 def plot(self):
61 61
62 62 NDP = self.data['NDP'][-1]
63 63 self.x = self.data.times
64 64 self.y = self.data.yrange[0:NDP]
65 65 self.z = self.data['rti']
66 66 self.z = numpy.ma.masked_invalid(self.z)
67 67
68 68 if self.decimation is None:
69 69 x, y, z = self.fill_gaps(self.x, self.y, self.z)
70 70 else:
71 71 x, y, z = self.fill_gaps(*self.decimate())
72 72
73 73 for n, ax in enumerate(self.axes):
74 74
75 75 self.zmax = self.zmax if self.zmax is not None else numpy.max(
76 76 self.z[1][0,12:40])
77 77 self.zmin = self.zmin if self.zmin is not None else numpy.min(
78 78 self.z[1][0,12:40])
79 79
80 80 if ax.firsttime:
81 81
82 82 if self.zlimits is not None:
83 83 self.zmin, self.zmax = self.zlimits[n]
84 84
85 85 ax.plt = ax.pcolormesh(x, y, z[n].T,
86 86 vmin=self.zmin,
87 87 vmax=self.zmax,
88 88 cmap=plt.get_cmap(self.colormap)
89 89 )
90 90 else:
91 91 #if self.zlimits is not None:
92 92 #self.zmin, self.zmax = self.zlimits[n]
93 ax.collections.remove(ax.collections[0])
93 ax.plt.remove()
94 94 ax.plt = ax.pcolormesh(x, y, z[n].T,
95 95 vmin=self.zmin,
96 96 vmax=self.zmax,
97 97 cmap=plt.get_cmap(self.colormap)
98 98 )
99 99
100 100
101 101 class RTILPPlot(RTIPlot):
102 102 '''
103 103 Written by R. Flores
104 104 '''
105 105 '''
106 106 Plot for RTI Long Pulse Using Cross Products Analysis
107 107 '''
108 108
109 109 CODE = 'RTILP'
110 110 colormap = 'jet'
111 111 plot_name = 'RTI LP'
112 112 plot_type = 'pcolorbuffer'
113 113
114 114 def setup(self):
115 115 self.xaxis = 'time'
116 116 self.ncols = 1
117 117 self.nrows = 2
118 118 self.nplots = self.nrows
119 119
120 120 self.ylabel = 'Range [km]'
121 121 self.xlabel = 'Time (LT)'
122 122
123 123 self.cb_label = 'Intensity (dB)'
124 124
125 125 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.1, 'right':0.95})
126 126
127 127
128 128 self.titles = ['{} Channel {}'.format(
129 129 self.plot_name.upper(), '0'),'{} Channel {}'.format(
130 130 self.plot_name.upper(), '1'),'{} Channel {}'.format(
131 131 self.plot_name.upper(), '2'),'{} Channel {}'.format(
132 132 self.plot_name.upper(), '3')]
133 133
134 134
135 135 def update(self, dataOut):
136 136
137 137 data = {}
138 138 meta = {}
139 139 data['rti'] = dataOut.data_for_RTI_LP
140 140 data['NRANGE'] = dataOut.NRANGE
141 141
142 142 return data, meta
143 143
144 144 def plot(self):
145 145
146 146 NRANGE = self.data['NRANGE'][-1]
147 147 self.x = self.data.times
148 148 self.y = self.data.yrange[0:NRANGE]
149 149
150 150 self.z = self.data['rti']
151 151
152 152 self.z = numpy.ma.masked_invalid(self.z)
153 153
154 154 if self.decimation is None:
155 155 x, y, z = self.fill_gaps(self.x, self.y, self.z)
156 156 else:
157 157 x, y, z = self.fill_gaps(*self.decimate())
158 158
159 159 for n, ax in enumerate(self.axes):
160 160
161 161 self.zmax = self.zmax if self.zmax is not None else numpy.max(
162 162 self.z[1][0,12:40])
163 163 self.zmin = self.zmin if self.zmin is not None else numpy.min(
164 164 self.z[1][0,12:40])
165 165
166 166 if ax.firsttime:
167 167
168 168 if self.zlimits is not None:
169 169 self.zmin, self.zmax = self.zlimits[n]
170 170
171 171
172 172 ax.plt = ax.pcolormesh(x, y, z[n].T,
173 173 vmin=self.zmin,
174 174 vmax=self.zmax,
175 175 cmap=plt.get_cmap(self.colormap)
176 176 )
177 177
178 178 else:
179 179 if self.zlimits is not None:
180 180 self.zmin, self.zmax = self.zlimits[n]
181 ax.collections.remove(ax.collections[0])
181 ax.plt.remove()
182 182 ax.plt = ax.pcolormesh(x, y, z[n].T,
183 183 vmin=self.zmin,
184 184 vmax=self.zmax,
185 185 cmap=plt.get_cmap(self.colormap)
186 186 )
187 187
188 188
189 189 class DenRTIPlot(RTIPlot):
190 190 '''
191 191 Written by R. Flores
192 192 '''
193 193 '''
194 Plot for Den
194 RTI Plot for Electron Densities
195 195 '''
196 196
197 197 CODE = 'denrti'
198 198 colormap = 'jet'
199 199
200 200 def setup(self):
201 201 self.xaxis = 'time'
202 202 self.ncols = 1
203 203 self.nrows = self.data.shape(self.CODE)[0]
204 204 self.nplots = self.nrows
205 205
206 206 self.ylabel = 'Range [km]'
207 207 self.xlabel = 'Time (LT)'
208 208
209 209 self.plots_adjust.update({'wspace': 0.8, 'hspace':0.2, 'left': 0.2, 'right': 0.9, 'bottom': 0.18})
210 210
211 211 if self.CODE == 'denrti':
212 212 self.cb_label = r'$\mathrm{N_e}$ Electron Density ($\mathrm{1/cm^3}$)'
213 213
214 214 self.titles = ['Electron Density RTI']
215 215
216 216 def update(self, dataOut):
217 217
218 218 data = {}
219 219 meta = {}
220 220
221 221 data['denrti'] = dataOut.DensityFinal*1.e-6 #To Plot in cm^-3
222 222
223 223 return data, meta
224 224
225 225 def plot(self):
226 226
227 227 self.x = self.data.times
228 228 self.y = self.data.yrange
229 229
230 230 self.z = self.data[self.CODE]
231 231
232 232 self.z = numpy.ma.masked_invalid(self.z)
233 233
234 234 if self.decimation is None:
235 235 x, y, z = self.fill_gaps(self.x, self.y, self.z)
236 236 else:
237 237 x, y, z = self.fill_gaps(*self.decimate())
238 238
239 239 for n, ax in enumerate(self.axes):
240 240
241 241 self.zmax = self.zmax if self.zmax is not None else numpy.max(
242 242 self.z[n])
243 243 self.zmin = self.zmin if self.zmin is not None else numpy.min(
244 244 self.z[n])
245 245
246 246 if ax.firsttime:
247 247
248 248 if self.zlimits is not None:
249 249 self.zmin, self.zmax = self.zlimits[n]
250 250 if numpy.log10(self.zmin)<0:
251 251 self.zmin=1
252 252 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
253 vmin=self.zmin,
254 vmax=self.zmax,
253 #vmin=self.zmin,
254 #vmax=self.zmax,
255 255 cmap=self.cmaps[n],
256 norm=colors.LogNorm()
256 norm=colors.LogNorm(vmin=self.zmin,vmax=self.zmax)
257 257 )
258 258
259 259 else:
260 260 if self.zlimits is not None:
261 261 self.zmin, self.zmax = self.zlimits[n]
262 ax.collections.remove(ax.collections[0])
262 ax.plt.remove()
263 263 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
264 vmin=self.zmin,
265 vmax=self.zmax,
264 #vmin=self.zmin,
265 #vmax=self.zmax,
266 266 cmap=self.cmaps[n],
267 norm=colors.LogNorm()
267 norm=colors.LogNorm(vmin=self.zmin,vmax=self.zmax)
268 268 )
269 269
270 270
271 271 class ETempRTIPlot(RTIPlot):
272 272 '''
273 273 Written by R. Flores
274 274 '''
275 275 '''
276 276 Plot for Electron Temperature
277 277 '''
278 278
279 279 CODE = 'ETemp'
280 280 colormap = 'jet'
281 281
282 282 def setup(self):
283 283 self.xaxis = 'time'
284 284 self.ncols = 1
285 285 self.nrows = self.data.shape(self.CODE)[0]
286 286 self.nplots = self.nrows
287 287
288 288 self.ylabel = 'Range [km]'
289 289 self.xlabel = 'Time (LT)'
290 290 self.plots_adjust.update({'wspace': 0.8, 'hspace':0.2, 'left': 0.2, 'right': 0.9, 'bottom': 0.18})
291 291 if self.CODE == 'ETemp':
292 292 self.cb_label = 'Electron Temperature (K)'
293 293 self.titles = ['Electron Temperature RTI']
294 294 if self.CODE == 'ITemp':
295 295 self.cb_label = 'Ion Temperature (K)'
296 296 self.titles = ['Ion Temperature RTI']
297 297 if self.CODE == 'HeFracLP':
298 298 self.cb_label ='He+ Fraction'
299 299 self.titles = ['He+ Fraction RTI']
300 300 self.zmax=0.16
301 301 if self.CODE == 'HFracLP':
302 302 self.cb_label ='H+ Fraction'
303 303 self.titles = ['H+ Fraction RTI']
304 304
305 305 def update(self, dataOut):
306 306
307 307 data = {}
308 308 meta = {}
309 309
310 310 data['ETemp'] = dataOut.ElecTempFinal
311 311
312 312 return data, meta
313 313
314 314 def plot(self):
315 315
316 316 self.x = self.data.times
317 317 self.y = self.data.yrange
318 318 self.z = self.data[self.CODE]
319 319
320 320 self.z = numpy.ma.masked_invalid(self.z)
321 321
322 322 if self.decimation is None:
323 323 x, y, z = self.fill_gaps(self.x, self.y, self.z)
324 324 else:
325 325 x, y, z = self.fill_gaps(*self.decimate())
326 326
327 327 for n, ax in enumerate(self.axes):
328 328
329 329 self.zmax = self.zmax if self.zmax is not None else numpy.max(
330 330 self.z[n])
331 331 self.zmin = self.zmin if self.zmin is not None else numpy.min(
332 332 self.z[n])
333 333
334 334 if ax.firsttime:
335 335
336 336 if self.zlimits is not None:
337 337 self.zmin, self.zmax = self.zlimits[n]
338 338
339 339 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
340 340 vmin=self.zmin,
341 341 vmax=self.zmax,
342 342 cmap=self.cmaps[n]
343 343 )
344 344 #plt.tight_layout()
345 345
346 346 else:
347 347 if self.zlimits is not None:
348 348 self.zmin, self.zmax = self.zlimits[n]
349 ax.collections.remove(ax.collections[0])
349 ax.plt.remove()
350 350 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
351 351 vmin=self.zmin,
352 352 vmax=self.zmax,
353 353 cmap=self.cmaps[n]
354 354 )
355 355
356 356
357 357 class ITempRTIPlot(ETempRTIPlot):
358 358 '''
359 359 Written by R. Flores
360 360 '''
361 361 '''
362 362 Plot for Ion Temperature
363 363 '''
364 364
365 365 CODE = 'ITemp'
366 366 colormap = 'jet'
367 367 plot_name = 'Ion Temperature'
368 368
369 369 def update(self, dataOut):
370 370
371 371 data = {}
372 372 meta = {}
373 373
374 374 data['ITemp'] = dataOut.IonTempFinal
375 375
376 376 return data, meta
377 377
378 378
379 379 class HFracRTIPlot(ETempRTIPlot):
380 380 '''
381 381 Written by R. Flores
382 382 '''
383 383 '''
384 384 Plot for H+ LP
385 385 '''
386 386
387 387 CODE = 'HFracLP'
388 388 colormap = 'jet'
389 389 plot_name = 'H+ Frac'
390 390
391 391 def update(self, dataOut):
392 392
393 393 data = {}
394 394 meta = {}
395 395 data['HFracLP'] = dataOut.PhyFinal
396 396
397 397 return data, meta
398 398
399 399
400 400 class HeFracRTIPlot(ETempRTIPlot):
401 401 '''
402 402 Written by R. Flores
403 403 '''
404 404 '''
405 405 Plot for He+ LP
406 406 '''
407 407
408 408 CODE = 'HeFracLP'
409 409 colormap = 'jet'
410 410 plot_name = 'He+ Frac'
411 411
412 412 def update(self, dataOut):
413 413
414 414 data = {}
415 415 meta = {}
416 416 data['HeFracLP'] = dataOut.PheFinal
417 417
418 418 return data, meta
419 419
420 420
421 421 class TempsDPPlot(Plot):
422 422 '''
423 423 Written by R. Flores
424 424 '''
425 425 '''
426 426 Plot for Electron - Ion Temperatures
427 427 '''
428 428
429 429 CODE = 'tempsDP'
430 430 #plot_name = 'Temperatures'
431 431 plot_type = 'scatterbuffer'
432 432
433 433 def setup(self):
434 434
435 435 self.ncols = 1
436 436 self.nrows = 1
437 437 self.nplots = 1
438 438 self.ylabel = 'Range [km]'
439 439 self.xlabel = 'Temperature (K)'
440 440 self.titles = ['Electron/Ion Temperatures']
441 441 self.width = 3.5
442 442 self.height = 5.5
443 443 self.colorbar = False
444 444 self.plots_adjust.update({'left': 0.17, 'right': 0.88, 'bottom': 0.1})
445 445
446 446 def update(self, dataOut):
447 447 data = {}
448 448 meta = {}
449 449
450 450 data['Te'] = dataOut.te2
451 451 data['Ti'] = dataOut.ti2
452 452 data['Te_error'] = dataOut.ete2
453 453 data['Ti_error'] = dataOut.eti2
454 454
455 455 meta['yrange'] = dataOut.heightList[0:dataOut.NSHTS]
456 456
457 457 return data, meta
458 458
459 459 def plot(self):
460 460
461 461 y = self.data.yrange
462 462
463 463 self.xmin = -100
464 464 self.xmax = 5000
465 465
466 466 ax = self.axes[0]
467 467
468 468 data = self.data[-1]
469 469
470 470 Te = data['Te']
471 471 Ti = data['Ti']
472 472 errTe = data['Te_error']
473 473 errTi = data['Ti_error']
474 474
475 475 if ax.firsttime:
476 ax.errorbar(Te, y, xerr=errTe, fmt='r^',elinewidth=1.0,color='b',linewidth=2.0, label='Te')
477 ax.errorbar(Ti, y, fmt='k^', xerr=errTi,elinewidth=1.0,color='b',linewidth=2.0, label='Ti')
476 ax.errorbar(Te, y, xerr=errTe, fmt='r^',elinewidth=1.0,color='r',linewidth=2.0, label='Te')
477 ax.errorbar(Ti, y, fmt='k^', xerr=errTi,elinewidth=1.0,color='k',linewidth=2.0, label='Ti')
478 478 plt.legend(loc='lower right')
479 479 self.ystep_given = 50
480 480 ax.yaxis.set_minor_locator(MultipleLocator(15))
481 481 ax.grid(which='minor')
482 482
483 483 else:
484 484 self.clear_figures()
485 ax.errorbar(Te, y, xerr=errTe, fmt='r^',elinewidth=1.0,color='b',linewidth=2.0, label='Te')
486 ax.errorbar(Ti, y, fmt='k^', xerr=errTi,elinewidth=1.0,color='b',linewidth=2.0, label='Ti')
485 ax.errorbar(Te, y, xerr=errTe, fmt='r^',elinewidth=1.0,color='r',linewidth=2.0, label='Te')
486 ax.errorbar(Ti, y, fmt='k^', xerr=errTi,elinewidth=1.0,color='k',linewidth=2.0, label='Ti')
487 487 plt.legend(loc='lower right')
488 488 ax.yaxis.set_minor_locator(MultipleLocator(15))
489 489
490 490
491 491 class TempsHPPlot(Plot):
492 492 '''
493 493 Written by R. Flores
494 494 '''
495 495 '''
496 496 Plot for Temperatures Hybrid Experiment
497 497 '''
498 498
499 499 CODE = 'temps_LP'
500 500 #plot_name = 'Temperatures'
501 501 plot_type = 'scatterbuffer'
502 502
503 503
504 504 def setup(self):
505 505
506 506 self.ncols = 1
507 507 self.nrows = 1
508 508 self.nplots = 1
509 509 self.ylabel = 'Range [km]'
510 510 self.xlabel = 'Temperature (K)'
511 511 self.titles = ['Electron/Ion Temperatures']
512 512 self.width = 3.5
513 513 self.height = 6.5
514 514 self.colorbar = False
515 515 self.plots_adjust.update({'left': 0.17, 'right': 0.88, 'bottom': 0.1})
516 516
517 517 def update(self, dataOut):
518 518 data = {}
519 519 meta = {}
520 520
521 521
522 522 data['Te'] = numpy.concatenate((dataOut.te2[:dataOut.cut],dataOut.te[dataOut.cut:]))
523 523 data['Ti'] = numpy.concatenate((dataOut.ti2[:dataOut.cut],dataOut.ti[dataOut.cut:]))
524 524 data['Te_error'] = numpy.concatenate((dataOut.ete2[:dataOut.cut],dataOut.ete[dataOut.cut:]))
525 525 data['Ti_error'] = numpy.concatenate((dataOut.eti2[:dataOut.cut],dataOut.eti[dataOut.cut:]))
526 526
527 527 meta['yrange'] = dataOut.heightList[0:dataOut.NACF]
528 528
529 529 return data, meta
530 530
531 531 def plot(self):
532 532
533 533
534 534 self.y = self.data.yrange
535 535 self.xmin = -100
536 536 self.xmax = 4500
537 537 ax = self.axes[0]
538 538
539 539 data = self.data[-1]
540 540
541 541 Te = data['Te']
542 542 Ti = data['Ti']
543 543 errTe = data['Te_error']
544 544 errTi = data['Ti_error']
545 545
546 546 if ax.firsttime:
547 547
548 ax.errorbar(Te, self.y, xerr=errTe, fmt='r^',elinewidth=1.0,color='b',linewidth=2.0, label='Te')
549 ax.errorbar(Ti, self.y, fmt='k^', xerr=errTi,elinewidth=1.0,color='b',linewidth=2.0, label='Ti')
548 ax.errorbar(Te, self.y, xerr=errTe, fmt='r^',elinewidth=1.0,color='r',linewidth=2.0, label='Te')
549 ax.errorbar(Ti, self.y, fmt='k^', xerr=errTi,elinewidth=1.0,color='',linewidth=2.0, label='Ti')
550 550 plt.legend(loc='lower right')
551 551 self.ystep_given = 200
552 552 ax.yaxis.set_minor_locator(MultipleLocator(15))
553 553 ax.grid(which='minor')
554 554
555 555 else:
556 556 self.clear_figures()
557 ax.errorbar(Te, self.y, xerr=errTe, fmt='r^',elinewidth=1.0,color='b',linewidth=2.0, label='Te')
558 ax.errorbar(Ti, self.y, fmt='k^', xerr=errTi,elinewidth=1.0,color='b',linewidth=2.0, label='Ti')
557 ax.errorbar(Te, self.y, xerr=errTe, fmt='r^',elinewidth=1.0,color='r',linewidth=2.0, label='Te')
558 ax.errorbar(Ti, self.y, fmt='k^', xerr=errTi,elinewidth=1.0,color='k',linewidth=2.0, label='Ti')
559 559 plt.legend(loc='lower right')
560 560 ax.yaxis.set_minor_locator(MultipleLocator(15))
561 561 ax.grid(which='minor')
562 562
563 563
564 564 class FracsHPPlot(Plot):
565 565 '''
566 566 Written by R. Flores
567 567 '''
568 568 '''
569 569 Plot for Composition LP
570 570 '''
571 571
572 572 CODE = 'fracs_LP'
573 573 plot_type = 'scatterbuffer'
574 574
575 575
576 576 def setup(self):
577 577
578 578 self.ncols = 1
579 579 self.nrows = 1
580 580 self.nplots = 1
581 581 self.ylabel = 'Range [km]'
582 582 self.xlabel = 'Frac'
583 583 self.titles = ['Composition']
584 584 self.width = 3.5
585 585 self.height = 6.5
586 586 self.colorbar = False
587 587 self.plots_adjust.update({'left': 0.17, 'right': 0.88, 'bottom': 0.1})
588 588
589 589 def update(self, dataOut):
590 590 data = {}
591 591 meta = {}
592 592
593 593 #aux_nan=numpy.zeros(dataOut.cut,'float32')
594 594 #aux_nan[:]=numpy.nan
595 595 #data['ph'] = numpy.concatenate((aux_nan,dataOut.ph[dataOut.cut:]))
596 596 #data['eph'] = numpy.concatenate((aux_nan,dataOut.eph[dataOut.cut:]))
597 597
598 598 data['ph'] = dataOut.ph[dataOut.cut:]
599 599 data['eph'] = dataOut.eph[dataOut.cut:]
600 600 data['phe'] = dataOut.phe[dataOut.cut:]
601 601 data['ephe'] = dataOut.ephe[dataOut.cut:]
602 602
603 603 data['cut'] = dataOut.cut
604 604
605 605 meta['yrange'] = dataOut.heightList[0:dataOut.NACF]
606 606
607 607
608 608 return data, meta
609 609
610 610 def plot(self):
611 611
612 612 data = self.data[-1]
613 613
614 614 ph = data['ph']
615 615 eph = data['eph']
616 616 phe = data['phe']
617 617 ephe = data['ephe']
618 618 cut = data['cut']
619 619 self.y = self.data.yrange
620 620
621 621 self.xmin = 0
622 622 self.xmax = 1
623 623 ax = self.axes[0]
624 624
625 625 if ax.firsttime:
626 626
627 ax.errorbar(ph, self.y[cut:], xerr=eph, fmt='r^',elinewidth=1.0,color='b',linewidth=2.0, label='H+')
628 ax.errorbar(phe, self.y[cut:], fmt='k^', xerr=ephe,elinewidth=1.0,color='b',linewidth=2.0, label='He+')
627 ax.errorbar(ph, self.y[cut:], xerr=eph, fmt='r^',elinewidth=1.0,color='r',linewidth=2.0, label='H+')
628 ax.errorbar(phe, self.y[cut:], fmt='k^', xerr=ephe,elinewidth=1.0,color='k',linewidth=2.0, label='He+')
629 629 plt.legend(loc='lower right')
630 630 self.xstep_given = 0.2
631 631 self.ystep_given = 200
632 632 ax.yaxis.set_minor_locator(MultipleLocator(15))
633 633 ax.grid(which='minor')
634 634
635 635 else:
636 636 self.clear_figures()
637 ax.errorbar(ph, self.y[cut:], xerr=eph, fmt='r^',elinewidth=1.0,color='b',linewidth=2.0, label='H+')
638 ax.errorbar(phe, self.y[cut:], fmt='k^', xerr=ephe,elinewidth=1.0,color='b',linewidth=2.0, label='He+')
637 ax.errorbar(ph, self.y[cut:], xerr=eph, fmt='r^',elinewidth=1.0,color='r',linewidth=2.0, label='H+')
638 ax.errorbar(phe, self.y[cut:], fmt='k^', xerr=ephe,elinewidth=1.0,color='k',linewidth=2.0, label='He+')
639 639 plt.legend(loc='lower right')
640 640 ax.yaxis.set_minor_locator(MultipleLocator(15))
641 641 ax.grid(which='minor')
642 642
643 643 class EDensityPlot(Plot):
644 644 '''
645 645 Written by R. Flores
646 646 '''
647 647 '''
648 648 Plot for electron density
649 649 '''
650 650
651 651 CODE = 'den'
652 652 #plot_name = 'Electron Density'
653 653 plot_type = 'scatterbuffer'
654 654
655 655 def setup(self):
656 656
657 657 self.ncols = 1
658 658 self.nrows = 1
659 659 self.nplots = 1
660 660 self.ylabel = 'Range [km]'
661 661 self.xlabel = r'$\mathrm{N_e}$ Electron Density ($\mathrm{1/cm^3}$)'
662 662 self.titles = ['Electron Density']
663 663 self.width = 3.5
664 664 self.height = 5.5
665 665 self.colorbar = False
666 666 self.plots_adjust.update({'left': 0.17, 'right': 0.88, 'bottom': 0.1})
667 667
668 668 def update(self, dataOut):
669 669 data = {}
670 670 meta = {}
671 671
672 672 data['den_power'] = dataOut.ph2[:dataOut.NSHTS]
673 673 data['den_Faraday'] = dataOut.dphi[:dataOut.NSHTS]
674 674 data['den_error'] = dataOut.sdp2[:dataOut.NSHTS]
675 675 #data['err_Faraday'] = dataOut.sdn1[:dataOut.NSHTS]
676 676 #print(numpy.shape(data['den_power']))
677 677 #print(numpy.shape(data['den_Faraday']))
678 678 #print(numpy.shape(data['den_error']))
679 679
680 680 data['NSHTS'] = dataOut.NSHTS
681 681
682 682 meta['yrange'] = dataOut.heightList[0:dataOut.NSHTS]
683 683
684 684 return data, meta
685 685
686 686 def plot(self):
687 687
688 688 y = self.data.yrange
689 689
690 690 #self.xmin = 1e3
691 691 #self.xmax = 1e7
692 692
693 693 ax = self.axes[0]
694 694
695 695 data = self.data[-1]
696 696
697 697 DenPow = data['den_power']
698 698 DenFar = data['den_Faraday']
699 699 errDenPow = data['den_error']
700 700 #errFaraday = data['err_Faraday']
701 701
702 702 NSHTS = data['NSHTS']
703 703
704 704 if self.CODE == 'denLP':
705 705 DenPowLP = data['den_LP']
706 706 errDenPowLP = data['den_LP_error']
707 707 cut = data['cut']
708 708
709 709 if ax.firsttime:
710 710 self.autoxticks=False
711 711 #ax.errorbar(DenFar, y[:NSHTS], xerr=1, fmt='h-',elinewidth=1.0,color='g',linewidth=1.0, label='Faraday Profile',markersize=2)
712 712 ax.errorbar(DenFar, y[:NSHTS], xerr=1, fmt='h-',elinewidth=1.0,color='g',linewidth=1.0, label='Faraday',markersize=2,linestyle='-')
713 713 #ax.errorbar(DenPow, y[:NSHTS], fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='b',linewidth=1.0, label='Power Profile',markersize=2)
714 ax.errorbar(DenPow, y[:NSHTS], fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='b',linewidth=1.0, label='Power',markersize=2,linestyle='-')
714 ax.errorbar(DenPow, y[:NSHTS], fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='k',linewidth=1.0, label='Power',markersize=2,linestyle='-')
715 715
716 716 if self.CODE=='denLP':
717 717 ax.errorbar(DenPowLP[cut:], y[cut:], xerr=errDenPowLP[cut:], fmt='r^-',elinewidth=1.0,color='r',linewidth=1.0, label='LP Profile',markersize=2)
718 718
719 719 plt.legend(loc='upper left',fontsize=8.5)
720 720 #plt.legend(loc='lower left',fontsize=8.5)
721 ax.set_xscale("log", nonposx='clip')
721 ax.set_xscale("log")#, nonposx='clip')
722 722 grid_y_ticks=numpy.arange(numpy.nanmin(y),numpy.nanmax(y),50)
723 723 self.ystep_given=100
724 724 if self.CODE=='denLP':
725 725 self.ystep_given=200
726 726 ax.set_yticks(grid_y_ticks,minor=True)
727 727 locmaj = LogLocator(base=10,numticks=12)
728 728 ax.xaxis.set_major_locator(locmaj)
729 729 locmin = LogLocator(base=10.0,subs=(0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9),numticks=12)
730 730 ax.xaxis.set_minor_locator(locmin)
731 731 ax.xaxis.set_minor_formatter(NullFormatter())
732 732 ax.grid(which='minor')
733 733
734 734 else:
735 735 dataBefore = self.data[-2]
736 736 DenPowBefore = dataBefore['den_power']
737 737 self.clear_figures()
738 738 #ax.errorbar(DenFar, y[:NSHTS], xerr=1, fmt='h-',elinewidth=1.0,color='g',linewidth=1.0, label='Faraday Profile',markersize=2)
739 739 ax.errorbar(DenFar, y[:NSHTS], xerr=1, fmt='h-',elinewidth=1.0,color='g',linewidth=1.0, label='Faraday',markersize=2,linestyle='-')
740 740 #ax.errorbar(DenPow, y[:NSHTS], fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='b',linewidth=1.0, label='Power Profile',markersize=2)
741 ax.errorbar(DenPow, y[:NSHTS], fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='b',linewidth=1.0, label='Power',markersize=2,linestyle='-')
741 ax.errorbar(DenPow, y[:NSHTS], fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='k',linewidth=1.0, label='Power',markersize=2,linestyle='-')
742 742 ax.errorbar(DenPowBefore, y[:NSHTS], elinewidth=1.0,color='r',linewidth=0.5,linestyle="dashed")
743 743
744 744 if self.CODE=='denLP':
745 745 ax.errorbar(DenPowLP[cut:], y[cut:], fmt='r^-', xerr=errDenPowLP[cut:],elinewidth=1.0,color='r',linewidth=1.0, label='LP Profile',markersize=2)
746 746
747 ax.set_xscale("log", nonposx='clip')
747 ax.set_xscale("log")#, nonposx='clip')
748 748 grid_y_ticks=numpy.arange(numpy.nanmin(y),numpy.nanmax(y),50)
749 749 ax.set_yticks(grid_y_ticks,minor=True)
750 750 locmaj = LogLocator(base=10,numticks=12)
751 751 ax.xaxis.set_major_locator(locmaj)
752 752 locmin = LogLocator(base=10.0,subs=(0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9),numticks=12)
753 753 ax.xaxis.set_minor_locator(locmin)
754 754 ax.xaxis.set_minor_formatter(NullFormatter())
755 755 ax.grid(which='minor')
756 756 plt.legend(loc='upper left',fontsize=8.5)
757 757 #plt.legend(loc='lower left',fontsize=8.5)
758 758
759 759 class RelativeDenPlot(Plot):
760 760 '''
761 761 Written by R. Flores
762 762 '''
763 763 '''
764 764 Plot for electron density
765 765 '''
766 766
767 767 CODE = 'den'
768 768 #plot_name = 'Electron Density'
769 769 plot_type = 'scatterbuffer'
770 770
771 771 def setup(self):
772 772
773 773 self.ncols = 1
774 774 self.nrows = 1
775 775 self.nplots = 1
776 776 self.ylabel = 'Range [km]'
777 777 self.xlabel = r'$\mathrm{N_e}$ Relative Electron Density ($\mathrm{1/cm^3}$)'
778 778 self.titles = ['Electron Density']
779 779 self.width = 3.5
780 780 self.height = 5.5
781 781 self.colorbar = False
782 782 self.plots_adjust.update({'left': 0.17, 'right': 0.88, 'bottom': 0.1})
783 783
784 784 def update(self, dataOut):
785 785 data = {}
786 786 meta = {}
787 787
788 788 data['den_power'] = dataOut.ph2
789 789 data['den_error'] = dataOut.sdp2
790 790
791 791 meta['yrange'] = dataOut.heightList
792 792
793 793 return data, meta
794 794
795 795 def plot(self):
796 796
797 797 y = self.data.yrange
798 798
799 799 ax = self.axes[0]
800 800
801 801 data = self.data[-1]
802 802
803 803 DenPow = data['den_power']
804 804 errDenPow = data['den_error']
805 805
806 806 if ax.firsttime:
807 807 self.autoxticks=False
808 ax.errorbar(DenPow, y, fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='b',linewidth=1.0, label='Power',markersize=2,linestyle='-')
808 ax.errorbar(DenPow, y, fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='k',linewidth=1.0, label='Power',markersize=2,linestyle='-')
809 809
810 810 plt.legend(loc='upper left',fontsize=8.5)
811 811 #plt.legend(loc='lower left',fontsize=8.5)
812 ax.set_xscale("log", nonposx='clip')
812 ax.set_xscale("log")#, nonposx='clip')
813 813 grid_y_ticks=numpy.arange(numpy.nanmin(y),numpy.nanmax(y),50)
814 814 self.ystep_given=100
815 815 ax.set_yticks(grid_y_ticks,minor=True)
816 816 locmaj = LogLocator(base=10,numticks=12)
817 817 ax.xaxis.set_major_locator(locmaj)
818 818 locmin = LogLocator(base=10.0,subs=(0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9),numticks=12)
819 819 ax.xaxis.set_minor_locator(locmin)
820 820 ax.xaxis.set_minor_formatter(NullFormatter())
821 821 ax.grid(which='minor')
822 822
823 823 else:
824 824 dataBefore = self.data[-2]
825 825 DenPowBefore = dataBefore['den_power']
826 826 self.clear_figures()
827 ax.errorbar(DenPow, y, fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='b',linewidth=1.0, label='Power',markersize=2,linestyle='-')
827 ax.errorbar(DenPow, y, fmt='k^-', xerr=errDenPow,elinewidth=1.0,color='k',linewidth=1.0, label='Power',markersize=2,linestyle='-')
828 828 ax.errorbar(DenPowBefore, y, elinewidth=1.0,color='r',linewidth=0.5,linestyle="dashed")
829 829
830 ax.set_xscale("log", nonposx='clip')
830 ax.set_xscale("log")#, nonposx='clip')
831 831 grid_y_ticks=numpy.arange(numpy.nanmin(y),numpy.nanmax(y),50)
832 832 ax.set_yticks(grid_y_ticks,minor=True)
833 833 locmaj = LogLocator(base=10,numticks=12)
834 834 ax.xaxis.set_major_locator(locmaj)
835 835 locmin = LogLocator(base=10.0,subs=(0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9),numticks=12)
836 836 ax.xaxis.set_minor_locator(locmin)
837 837 ax.xaxis.set_minor_formatter(NullFormatter())
838 838 ax.grid(which='minor')
839 839 plt.legend(loc='upper left',fontsize=8.5)
840 840 #plt.legend(loc='lower left',fontsize=8.5)
841 841
842 842 class FaradayAnglePlot(Plot):
843 843 '''
844 844 Written by R. Flores
845 845 '''
846 846 '''
847 847 Plot for electron density
848 848 '''
849 849
850 850 CODE = 'angle'
851 851 plot_name = 'Faraday Angle'
852 852 plot_type = 'scatterbuffer'
853 853
854 854 def setup(self):
855 855
856 856 self.ncols = 1
857 857 self.nrows = 1
858 858 self.nplots = 1
859 859 self.ylabel = 'Range [km]'
860 860 self.xlabel = 'Faraday Angle (º)'
861 861 self.titles = ['Electron Density']
862 862 self.width = 3.5
863 863 self.height = 5.5
864 864 self.colorbar = False
865 865 self.plots_adjust.update({'left': 0.17, 'right': 0.88, 'bottom': 0.1})
866 866
867 867 def update(self, dataOut):
868 868 data = {}
869 869 meta = {}
870 870
871 871 data['angle'] = numpy.degrees(dataOut.phi)
872 872 #'''
873 873 #print(dataOut.phi_uwrp)
874 874 #print(data['angle'])
875 875 #exit(1)
876 876 #'''
877 877 data['dphi'] = dataOut.dphi_uc*10
878 878 #print(dataOut.dphi)
879 879
880 880 #data['NSHTS'] = dataOut.NSHTS
881 881
882 882 #meta['yrange'] = dataOut.heightList[0:dataOut.NSHTS]
883 883
884 884 return data, meta
885 885
886 886 def plot(self):
887 887
888 888 data = self.data[-1]
889 889 self.x = data[self.CODE]
890 890 dphi = data['dphi']
891 891 self.y = self.data.yrange
892 892 self.xmin = -360#-180
893 893 self.xmax = 360#180
894 894 ax = self.axes[0]
895 895
896 896 if ax.firsttime:
897 897 self.autoxticks=False
898 898 #if self.CODE=='den':
899 899 ax.plot(self.x, self.y,marker='o',color='g',linewidth=1.0,markersize=2)
900 900 ax.plot(dphi, self.y,marker='o',color='blue',linewidth=1.0,markersize=2)
901 901
902 902 grid_y_ticks=numpy.arange(numpy.nanmin(self.y),numpy.nanmax(self.y),50)
903 903 self.ystep_given=100
904 904 if self.CODE=='denLP':
905 905 self.ystep_given=200
906 906 ax.set_yticks(grid_y_ticks,minor=True)
907 907 ax.grid(which='minor')
908 908 #plt.tight_layout()
909 909 else:
910 910
911 911 self.clear_figures()
912 912 #if self.CODE=='den':
913 913 #print(numpy.shape(self.x))
914 914 ax.plot(self.x, self.y, marker='o',color='g',linewidth=1.0, markersize=2)
915 915 ax.plot(dphi, self.y,marker='o',color='blue',linewidth=1.0,markersize=2)
916 916
917 917 grid_y_ticks=numpy.arange(numpy.nanmin(self.y),numpy.nanmax(self.y),50)
918 918 ax.set_yticks(grid_y_ticks,minor=True)
919 919 ax.grid(which='minor')
920 920
921 921 class EDensityHPPlot(EDensityPlot):
922 922 '''
923 923 Written by R. Flores
924 924 '''
925 925 '''
926 926 Plot for Electron Density Hybrid Experiment
927 927 '''
928 928
929 929 CODE = 'denLP'
930 930 plot_name = 'Electron Density'
931 931 plot_type = 'scatterbuffer'
932 932
933 933 def update(self, dataOut):
934 934 data = {}
935 935 meta = {}
936 936
937 937 data['den_power'] = dataOut.ph2[:dataOut.NSHTS]
938 938 data['den_Faraday']=dataOut.dphi[:dataOut.NSHTS]
939 939 data['den_error']=dataOut.sdp2[:dataOut.NSHTS]
940 940 data['den_LP']=dataOut.ne[:dataOut.NACF]
941 941 data['den_LP_error']=dataOut.ene[:dataOut.NACF]*dataOut.ne[:dataOut.NACF]*0.434
942 942 #self.ene=10**dataOut.ene[:dataOut.NACF]
943 943 data['NSHTS']=dataOut.NSHTS
944 944 data['cut']=dataOut.cut
945 945
946 946 return data, meta
947 947
948 948
949 949 class ACFsPlot(Plot):
950 950 '''
951 951 Written by R. Flores
952 952 '''
953 953 '''
954 954 Plot for ACFs Double Pulse Experiment
955 955 '''
956 956
957 957 CODE = 'acfs'
958 958 #plot_name = 'ACF'
959 959 plot_type = 'scatterbuffer'
960 960
961 961
962 962 def setup(self):
963 963 self.ncols = 1
964 964 self.nrows = 1
965 965 self.nplots = 1
966 966 self.ylabel = 'Range [km]'
967 967 self.xlabel = 'Lag (ms)'
968 968 self.titles = ['ACFs']
969 969 self.width = 3.5
970 970 self.height = 5.5
971 971 self.colorbar = False
972 972 self.plots_adjust.update({'left': 0.17, 'right': 0.88, 'bottom': 0.1})
973 973
974 974 def update(self, dataOut):
975 975 data = {}
976 976 meta = {}
977 977
978 978 data['ACFs'] = dataOut.acfs_to_plot
979 979 data['ACFs_error'] = dataOut.acfs_error_to_plot
980 980 data['lags'] = dataOut.lags_to_plot
981 981 data['Lag_contaminated_1'] = dataOut.x_igcej_to_plot
982 982 data['Lag_contaminated_2'] = dataOut.x_ibad_to_plot
983 983 data['Height_contaminated_1'] = dataOut.y_igcej_to_plot
984 984 data['Height_contaminated_2'] = dataOut.y_ibad_to_plot
985 985
986 986 meta['yrange'] = numpy.array([])
987 987 #meta['NSHTS'] = dataOut.NSHTS
988 988 #meta['DPL'] = dataOut.DPL
989 989 data['NSHTS'] = dataOut.NSHTS #This is metadata
990 990 data['DPL'] = dataOut.DPL #This is metadata
991 991
992 992 return data, meta
993 993
994 994 def plot(self):
995 995
996 996 data = self.data[-1]
997 997 #NSHTS = self.meta['NSHTS']
998 998 #DPL = self.meta['DPL']
999 999 NSHTS = data['NSHTS'] #This is metadata
1000 1000 DPL = data['DPL'] #This is metadata
1001 1001
1002 1002 lags = data['lags']
1003 1003 ACFs = data['ACFs']
1004 1004 errACFs = data['ACFs_error']
1005 1005 BadLag1 = data['Lag_contaminated_1']
1006 1006 BadLag2 = data['Lag_contaminated_2']
1007 1007 BadHei1 = data['Height_contaminated_1']
1008 1008 BadHei2 = data['Height_contaminated_2']
1009 1009
1010 1010 self.xmin = 0.0
1011 1011 self.xmax = 2.0
1012 1012 self.y = ACFs
1013 1013
1014 1014 ax = self.axes[0]
1015 1015
1016 1016 if ax.firsttime:
1017 1017
1018 1018 for i in range(NSHTS):
1019 1019 x_aux = numpy.isfinite(lags[i,:])
1020 1020 y_aux = numpy.isfinite(ACFs[i,:])
1021 1021 yerr_aux = numpy.isfinite(errACFs[i,:])
1022 1022 x_igcej_aux = numpy.isfinite(BadLag1[i,:])
1023 1023 y_igcej_aux = numpy.isfinite(BadHei1[i,:])
1024 1024 x_ibad_aux = numpy.isfinite(BadLag2[i,:])
1025 1025 y_ibad_aux = numpy.isfinite(BadHei2[i,:])
1026 1026 if lags[i,:][~numpy.isnan(lags[i,:])].shape[0]>2:
1027 1027 ax.errorbar(lags[i,x_aux], ACFs[i,y_aux], yerr=errACFs[i,x_aux],color='b',marker='o',linewidth=1.0,markersize=2)
1028 1028 ax.plot(BadLag1[i,x_igcej_aux],BadHei1[i,y_igcej_aux],'x',color='red',markersize=2)
1029 1029 ax.plot(BadLag2[i,x_ibad_aux],BadHei2[i,y_ibad_aux],'X',color='red',markersize=2)
1030 1030
1031 1031 self.xstep_given = (self.xmax-self.xmin)/(DPL-1)
1032 1032 self.ystep_given = 50
1033 1033 ax.yaxis.set_minor_locator(MultipleLocator(15))
1034 1034 ax.grid(which='minor')
1035 1035
1036 1036 else:
1037 1037 self.clear_figures()
1038 1038 for i in range(NSHTS):
1039 1039 x_aux = numpy.isfinite(lags[i,:])
1040 1040 y_aux = numpy.isfinite(ACFs[i,:])
1041 1041 yerr_aux = numpy.isfinite(errACFs[i,:])
1042 1042 x_igcej_aux = numpy.isfinite(BadLag1[i,:])
1043 1043 y_igcej_aux = numpy.isfinite(BadHei1[i,:])
1044 1044 x_ibad_aux = numpy.isfinite(BadLag2[i,:])
1045 1045 y_ibad_aux = numpy.isfinite(BadHei2[i,:])
1046 1046 if lags[i,:][~numpy.isnan(lags[i,:])].shape[0]>2:
1047 1047 ax.errorbar(lags[i,x_aux], ACFs[i,y_aux], yerr=errACFs[i,x_aux],linewidth=1.0,markersize=2,color='b',marker='o')
1048 1048 ax.plot(BadLag1[i,x_igcej_aux],BadHei1[i,y_igcej_aux],'x',color='red',markersize=2)
1049 1049 ax.plot(BadLag2[i,x_ibad_aux],BadHei2[i,y_ibad_aux],'X',color='red',markersize=2)
1050 1050 ax.yaxis.set_minor_locator(MultipleLocator(15))
1051 1051
1052 1052 class ACFsLPPlot(Plot):
1053 1053 '''
1054 1054 Written by R. Flores
1055 1055 '''
1056 1056 '''
1057 1057 Plot for ACFs Double Pulse Experiment
1058 1058 '''
1059 1059
1060 1060 CODE = 'acfs_LP'
1061 1061 #plot_name = 'ACF'
1062 1062 plot_type = 'scatterbuffer'
1063 1063
1064 1064
1065 1065 def setup(self):
1066 1066 self.ncols = 1
1067 1067 self.nrows = 1
1068 1068 self.nplots = 1
1069 1069 self.ylabel = 'Range [km]'
1070 1070 self.xlabel = 'Lag (ms)'
1071 1071 self.titles = ['ACFs']
1072 1072 self.width = 3.5
1073 1073 self.height = 5.5
1074 1074 self.colorbar = False
1075 1075 self.plots_adjust.update({'left': 0.17, 'right': 0.88, 'bottom': 0.1})
1076 1076
1077 1077 def update(self, dataOut):
1078 1078 data = {}
1079 1079 meta = {}
1080 1080
1081 1081 aux=numpy.zeros((dataOut.NACF,dataOut.IBITS),'float32')
1082 1082 errors=numpy.zeros((dataOut.NACF,dataOut.IBITS),'float32')
1083 1083 lags_LP_to_plot=numpy.zeros((dataOut.NACF,dataOut.IBITS),'float32')
1084 1084
1085 1085 for i in range(dataOut.NACF):
1086 1086 for j in range(dataOut.IBITS):
1087 1087 if numpy.abs(dataOut.errors[j,i]/dataOut.output_LP_integrated.real[0,i,0])<1.0:
1088 1088 aux[i,j]=dataOut.output_LP_integrated.real[j,i,0]/dataOut.output_LP_integrated.real[0,i,0]
1089 1089 aux[i,j]=max(min(aux[i,j],1.0),-1.0)*dataOut.DH+dataOut.heightList[i]
1090 1090 lags_LP_to_plot[i,j]=dataOut.lags_LP[j]
1091 1091 errors[i,j]=dataOut.errors[j,i]/dataOut.output_LP_integrated.real[0,i,0]*dataOut.DH
1092 1092 else:
1093 1093 aux[i,j]=numpy.nan
1094 1094 lags_LP_to_plot[i,j]=numpy.nan
1095 1095 errors[i,j]=numpy.nan
1096 1096
1097 1097 data['ACFs'] = aux
1098 1098 data['ACFs_error'] = errors
1099 1099 data['lags'] = lags_LP_to_plot
1100 1100
1101 1101 meta['yrange'] = numpy.array([])
1102 1102 #meta['NACF'] = dataOut.NACF
1103 1103 #meta['NLAG'] = dataOut.NLAG
1104 1104 data['NACF'] = dataOut.NACF #This is metadata
1105 1105 data['NLAG'] = dataOut.NLAG #This is metadata
1106 1106
1107 1107 return data, meta
1108 1108
1109 1109 def plot(self):
1110 1110
1111 1111 data = self.data[-1]
1112 1112 #NACF = self.meta['NACF']
1113 1113 #NLAG = self.meta['NLAG']
1114 1114 NACF = data['NACF'] #This is metadata
1115 1115 NLAG = data['NLAG'] #This is metadata
1116 1116
1117 1117 lags = data['lags']
1118 1118 ACFs = data['ACFs']
1119 1119 errACFs = data['ACFs_error']
1120 1120
1121 1121 self.xmin = 0.0
1122 1122 self.xmax = 1.5
1123 1123
1124 1124 self.y = ACFs
1125 1125
1126 1126 ax = self.axes[0]
1127 1127
1128 1128 if ax.firsttime:
1129 1129
1130 1130 for i in range(NACF):
1131 1131 x_aux = numpy.isfinite(lags[i,:])
1132 1132 y_aux = numpy.isfinite(ACFs[i,:])
1133 1133 yerr_aux = numpy.isfinite(errACFs[i,:])
1134 1134
1135 1135 if lags[i,:][~numpy.isnan(lags[i,:])].shape[0]>2:
1136 1136 ax.errorbar(lags[i,x_aux], ACFs[i,y_aux], yerr=errACFs[i,x_aux],color='b',linewidth=1.0,markersize=2,ecolor='r')
1137 1137
1138 1138 #self.xstep_given = (self.xmax-self.xmin)/(self.data.NLAG-1)
1139 1139 self.xstep_given=0.3
1140 1140 self.ystep_given = 200
1141 1141 ax.yaxis.set_minor_locator(MultipleLocator(15))
1142 1142 ax.grid(which='minor')
1143 1143
1144 1144 else:
1145 1145 self.clear_figures()
1146 1146
1147 1147 for i in range(NACF):
1148 1148 x_aux = numpy.isfinite(lags[i,:])
1149 1149 y_aux = numpy.isfinite(ACFs[i,:])
1150 1150 yerr_aux = numpy.isfinite(errACFs[i,:])
1151 1151
1152 1152 if lags[i,:][~numpy.isnan(lags[i,:])].shape[0]>2:
1153 1153 ax.errorbar(lags[i,x_aux], ACFs[i,y_aux], yerr=errACFs[i,x_aux],color='b',linewidth=1.0,markersize=2,ecolor='r')
1154 1154
1155 1155 ax.yaxis.set_minor_locator(MultipleLocator(15))
1156 1156
1157 1157
1158 1158 class CrossProductsPlot(Plot):
1159 1159 '''
1160 1160 Written by R. Flores
1161 1161 '''
1162 1162 '''
1163 1163 Plot for cross products
1164 1164 '''
1165 1165
1166 1166 CODE = 'crossprod'
1167 1167 plot_name = 'Cross Products'
1168 1168 plot_type = 'scatterbuffer'
1169 1169
1170 1170 def setup(self):
1171 1171
1172 1172 self.ncols = 3
1173 1173 self.nrows = 1
1174 1174 self.nplots = 3
1175 1175 self.ylabel = 'Range [km]'
1176 1176 self.titles = []
1177 1177 self.width = 3.5*self.nplots
1178 1178 self.height = 5.5
1179 1179 self.colorbar = False
1180 1180 self.plots_adjust.update({'wspace':.3, 'left': 0.12, 'right': 0.92, 'bottom': 0.1})
1181 1181
1182 1182
1183 1183 def update(self, dataOut):
1184 1184
1185 1185 data = {}
1186 1186 meta = {}
1187 1187
1188 1188 data['crossprod'] = dataOut.crossprods
1189 1189 data['NDP'] = dataOut.NDP
1190 1190
1191 1191 return data, meta
1192 1192
1193 1193 def plot(self):
1194 1194
1195 1195 NDP = self.data['NDP'][-1]
1196 1196 x = self.data['crossprod'][:,-1,:,:,:,:]
1197 1197 y = self.data.yrange[0:NDP]
1198 1198
1199 1199 for n, ax in enumerate(self.axes):
1200 1200
1201 1201 self.xmin=numpy.min(numpy.concatenate((x[n][0,20:30,0,0],x[n][1,20:30,0,0],x[n][2,20:30,0,0],x[n][3,20:30,0,0])))
1202 1202 self.xmax=numpy.max(numpy.concatenate((x[n][0,20:30,0,0],x[n][1,20:30,0,0],x[n][2,20:30,0,0],x[n][3,20:30,0,0])))
1203 1203
1204 1204 if ax.firsttime:
1205 1205
1206 1206 self.autoxticks=False
1207 1207 if n==0:
1208 1208 label1='kax'
1209 1209 label2='kay'
1210 1210 label3='kbx'
1211 1211 label4='kby'
1212 1212 self.xlimits=[(self.xmin,self.xmax)]
1213 1213 elif n==1:
1214 1214 label1='kax2'
1215 1215 label2='kay2'
1216 1216 label3='kbx2'
1217 1217 label4='kby2'
1218 1218 self.xlimits.append((self.xmin,self.xmax))
1219 1219 elif n==2:
1220 1220 label1='kaxay'
1221 1221 label2='kbxby'
1222 1222 label3='kaxbx'
1223 1223 label4='kaxby'
1224 1224 self.xlimits.append((self.xmin,self.xmax))
1225 1225
1226 1226 ax.plotline1 = ax.plot(x[n][0,:,0,0], y, color='r',linewidth=2.0, label=label1)
1227 1227 ax.plotline2 = ax.plot(x[n][1,:,0,0], y, color='k',linewidth=2.0, label=label2)
1228 1228 ax.plotline3 = ax.plot(x[n][2,:,0,0], y, color='b',linewidth=2.0, label=label3)
1229 1229 ax.plotline4 = ax.plot(x[n][3,:,0,0], y, color='m',linewidth=2.0, label=label4)
1230 1230 ax.legend(loc='upper right')
1231 1231 ax.set_xlim(self.xmin, self.xmax)
1232 1232 self.titles.append('{}'.format(self.plot_name.upper()))
1233 1233
1234 1234 else:
1235 1235
1236 1236 if n==0:
1237 1237 self.xlimits=[(self.xmin,self.xmax)]
1238 1238 else:
1239 1239 self.xlimits.append((self.xmin,self.xmax))
1240 1240
1241 1241 ax.set_xlim(self.xmin, self.xmax)
1242 1242
1243 1243 ax.plotline1[0].set_data(x[n][0,:,0,0],y)
1244 1244 ax.plotline2[0].set_data(x[n][1,:,0,0],y)
1245 1245 ax.plotline3[0].set_data(x[n][2,:,0,0],y)
1246 1246 ax.plotline4[0].set_data(x[n][3,:,0,0],y)
1247 1247 self.titles.append('{}'.format(self.plot_name.upper()))
1248 1248
1249 1249
1250 1250 class CrossProductsLPPlot(Plot):
1251 1251 '''
1252 1252 Written by R. Flores
1253 1253 '''
1254 1254 '''
1255 1255 Plot for cross products LP
1256 1256 '''
1257 1257
1258 1258 CODE = 'crossprodslp'
1259 1259 plot_name = 'Cross Products LP'
1260 1260 plot_type = 'scatterbuffer'
1261 1261
1262 1262
1263 1263 def setup(self):
1264 1264
1265 1265 self.ncols = 2
1266 1266 self.nrows = 1
1267 1267 self.nplots = 2
1268 1268 self.ylabel = 'Range [km]'
1269 1269 self.xlabel = 'dB'
1270 1270 self.width = 3.5*self.nplots
1271 1271 self.height = 5.5
1272 1272 self.colorbar = False
1273 1273 self.titles = []
1274 1274 self.plots_adjust.update({'wspace': .8 ,'left': 0.17, 'right': 0.88, 'bottom': 0.1})
1275 1275
1276 1276 def update(self, dataOut):
1277 1277 data = {}
1278 1278 meta = {}
1279 1279
1280 1280 data['crossprodslp'] = 10*numpy.log10(numpy.abs(dataOut.output_LP))
1281 1281
1282 1282 data['NRANGE'] = dataOut.NRANGE #This is metadata
1283 1283 data['NLAG'] = dataOut.NLAG #This is metadata
1284 1284
1285 1285 return data, meta
1286 1286
1287 1287 def plot(self):
1288 1288
1289 1289 NRANGE = self.data['NRANGE'][-1]
1290 1290 NLAG = self.data['NLAG'][-1]
1291 1291
1292 1292 x = self.data[self.CODE][:,-1,:,:]
1293 1293 self.y = self.data.yrange[0:NRANGE]
1294 1294
1295 1295 label_array=numpy.array(['lag '+ str(x) for x in range(NLAG)])
1296 1296 color_array=['r','k','g','b','c','m','y','orange','steelblue','purple','peru','darksalmon','grey','limegreen','olive','midnightblue']
1297 1297
1298 1298
1299 1299 for n, ax in enumerate(self.axes):
1300 1300
1301 1301 self.xmin=28#30
1302 1302 self.xmax=70#70
1303 1303 #self.xmin=numpy.min(numpy.concatenate((self.x[0,:,n],self.x[1,:,n])))
1304 1304 #self.xmax=numpy.max(numpy.concatenate((self.x[0,:,n],self.x[1,:,n])))
1305 1305
1306 1306 if ax.firsttime:
1307 1307
1308 1308 self.autoxticks=False
1309 1309 if n == 0:
1310 1310 self.plotline_array=numpy.zeros((2,NLAG),dtype=object)
1311 1311
1312 1312 for i in range(NLAG):
1313 1313 self.plotline_array[n,i], = ax.plot(x[i,:,n], self.y, color=color_array[i],linewidth=1.0, label=label_array[i])
1314 1314
1315 1315 ax.legend(loc='upper right')
1316 1316 ax.set_xlim(self.xmin, self.xmax)
1317 1317 if n==0:
1318 1318 self.titles.append('{} CH0'.format(self.plot_name.upper()))
1319 1319 if n==1:
1320 1320 self.titles.append('{} CH1'.format(self.plot_name.upper()))
1321 1321 else:
1322 1322 for i in range(NLAG):
1323 1323 self.plotline_array[n,i].set_data(x[i,:,n],self.y)
1324 1324
1325 1325 if n==0:
1326 1326 self.titles.append('{} CH0'.format(self.plot_name.upper()))
1327 1327 if n==1:
1328 1328 self.titles.append('{} CH1'.format(self.plot_name.upper()))
1329 1329
1330 1330
1331 1331 class NoiseDPPlot(NoisePlot):
1332 1332 '''
1333 1333 Written by R. Flores
1334 1334 '''
1335 1335 '''
1336 1336 Plot for noise Double Pulse
1337 1337 '''
1338 1338
1339 1339 CODE = 'noise'
1340 1340 #plot_name = 'Noise'
1341 1341 #plot_type = 'scatterbuffer'
1342 1342
1343 1343 def update(self, dataOut):
1344 1344
1345 1345 data = {}
1346 1346 meta = {}
1347 1347 data['noise'] = 10*numpy.log10(dataOut.noise_final)
1348 1348
1349 1349 return data, meta
1350 1350
1351 1351
1352 1352 class XmitWaveformPlot(Plot):
1353 1353 '''
1354 1354 Written by R. Flores
1355 1355 '''
1356 1356 '''
1357 1357 Plot for xmit waveform
1358 1358 '''
1359 1359
1360 1360 CODE = 'xmit'
1361 1361 plot_name = 'Xmit Waveform'
1362 1362 plot_type = 'scatterbuffer'
1363 1363
1364 1364
1365 1365 def setup(self):
1366 1366
1367 1367 self.ncols = 1
1368 1368 self.nrows = 1
1369 1369 self.nplots = 1
1370 1370 self.ylabel = ''
1371 1371 self.xlabel = 'Number of Lag'
1372 1372 self.width = 5.5
1373 1373 self.height = 3.5
1374 1374 self.colorbar = False
1375 1375 self.plots_adjust.update({'right': 0.85 })
1376 1376 self.titles = [self.plot_name]
1377 1377 #self.plots_adjust.update({'left': 0.17, 'right': 0.88, 'bottom': 0.1})
1378 1378
1379 1379 #if not self.titles:
1380 1380 #self.titles = self.data.parameters \
1381 1381 #if self.data.parameters else ['{}'.format(self.plot_name.upper())]
1382 1382
1383 1383 def update(self, dataOut):
1384 1384
1385 1385 data = {}
1386 1386 meta = {}
1387 1387
1388 1388 y_1=numpy.arctan2(dataOut.output_LP[:,0,2].imag,dataOut.output_LP[:,0,2].real)* 180 / (numpy.pi*10)
1389 1389 y_2=numpy.abs(dataOut.output_LP[:,0,2])
1390 1390 norm=numpy.max(y_2)
1391 1391 norm=max(norm,0.1)
1392 1392 y_2=y_2/norm
1393 1393
1394 1394 meta['yrange'] = numpy.array([])
1395 1395
1396 1396 data['xmit'] = numpy.vstack((y_1,y_2))
1397 1397 data['NLAG'] = dataOut.NLAG
1398 1398
1399 1399 return data, meta
1400 1400
1401 1401 def plot(self):
1402 1402
1403 1403 data = self.data[-1]
1404 1404 NLAG = data['NLAG']
1405 1405 x = numpy.arange(0,NLAG,1,'float32')
1406 1406 y = data['xmit']
1407 1407
1408 1408 self.xmin = 0
1409 1409 self.xmax = NLAG-1
1410 1410 self.ymin = -1.0
1411 1411 self.ymax = 1.0
1412 1412 ax = self.axes[0]
1413 1413
1414 1414 if ax.firsttime:
1415 1415 ax.plotline0=ax.plot(x,y[0,:],color='blue')
1416 1416 ax.plotline1=ax.plot(x,y[1,:],color='red')
1417 1417 secax=ax.secondary_xaxis(location=0.5)
1418 1418 secax.xaxis.tick_bottom()
1419 1419 secax.tick_params( labelleft=False, labeltop=False,
1420 1420 labelright=False, labelbottom=False)
1421 1421
1422 1422 self.xstep_given = 3
1423 1423 self.ystep_given = .25
1424 1424 secax.set_xticks(numpy.linspace(self.xmin, self.xmax, 6)) #only works on matplotlib.version>3.2
1425 1425
1426 1426 else:
1427 1427 ax.plotline0[0].set_data(x,y[0,:])
1428 1428 ax.plotline1[0].set_data(x,y[1,:])
@@ -1,355 +1,361
1 1 '''
2 2 Created on Nov 9, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7
8 8 import os
9 9 import sys
10 10 import time
11 11 import glob
12 12 import datetime
13 13
14 14 import numpy
15 15
16 16 import schainpy.admin
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator
18 18 from schainpy.model.data.jrodata import Parameters
19 19 from schainpy.model.io.jroIO_base import Reader
20 20 from schainpy.utils import log
21 21
22 22 FILE_HEADER_STRUCTURE = numpy.dtype([
23 23 ('FMN', '<u4'),
24 24 ('nrec', '<u4'),
25 25 ('fr_offset', '<u4'),
26 26 ('id', '<u4'),
27 27 ('site', 'u1', (32,))
28 28 ])
29 29
30 30 REC_HEADER_STRUCTURE = numpy.dtype([
31 31 ('rmn', '<u4'),
32 32 ('rcounter', '<u4'),
33 33 ('nr_offset', '<u4'),
34 34 ('tr_offset', '<u4'),
35 35 ('time', '<u4'),
36 36 ('time_msec', '<u4'),
37 37 ('tag', 'u1', (32,)),
38 38 ('comments', 'u1', (32,)),
39 39 ('lat', '<f4'),
40 40 ('lon', '<f4'),
41 41 ('gps_status', '<u4'),
42 42 ('freq', '<u4'),
43 43 ('freq0', '<u4'),
44 44 ('nchan', '<u4'),
45 45 ('delta_r', '<u4'),
46 46 ('nranges', '<u4'),
47 47 ('r0', '<u4'),
48 48 ('prf', '<u4'),
49 49 ('ncoh', '<u4'),
50 50 ('npoints', '<u4'),
51 51 ('polarization', '<i4'),
52 52 ('rx_filter', '<u4'),
53 53 ('nmodes', '<u4'),
54 54 ('dmode_index', '<u4'),
55 55 ('dmode_rngcorr', '<u4'),
56 56 ('nrxs', '<u4'),
57 57 ('acf_length', '<u4'),
58 58 ('acf_lags', '<u4'),
59 59 ('sea_to_atmos', '<f4'),
60 60 ('sea_notch', '<u4'),
61 61 ('lh_sea', '<u4'),
62 62 ('hh_sea', '<u4'),
63 63 ('nbins_sea', '<u4'),
64 64 ('min_snr', '<f4'),
65 65 ('min_cc', '<f4'),
66 66 ('max_time_diff', '<f4')
67 67 ])
68 68
69 69 DATA_STRUCTURE = numpy.dtype([
70 70 ('range', '<u4'),
71 71 ('status', '<u4'),
72 72 ('zonal', '<f4'),
73 73 ('meridional', '<f4'),
74 74 ('vertical', '<f4'),
75 75 ('zonal_a', '<f4'),
76 76 ('meridional_a', '<f4'),
77 77 ('corrected_fading', '<f4'), # seconds
78 78 ('uncorrected_fading', '<f4'), # seconds
79 79 ('time_diff', '<f4'),
80 80 ('major_axis', '<f4'),
81 81 ('axial_ratio', '<f4'),
82 82 ('orientation', '<f4'),
83 83 ('sea_power', '<u4'),
84 84 ('sea_algorithm', '<u4')
85 85 ])
86 86
87 87
88 88 class BLTRParamReader(Reader, ProcessingUnit):
89 89 '''
90 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR
90 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR
91 91 from *.sswma files
92 92 '''
93 93
94 94 ext = '.sswma'
95 95
96 96 def __init__(self):
97 97
98 98 ProcessingUnit.__init__(self)
99 99
100 100 self.dataOut = Parameters()
101 101 self.dataOut.timezone = 300
102 102 self.counter_records = 0
103 103 self.flagNoMoreFiles = 0
104 104 self.isConfig = False
105 105 self.filename = None
106 106 self.status_value = 0
107 107 self.datatime = datetime.datetime(1900,1,1)
108 108 self.filefmt = "*********%Y%m%d******"
109 109
110 110 def setup(self, **kwargs):
111
111
112 112 self.set_kwargs(**kwargs)
113
113
114 114 if self.path is None:
115 115 raise ValueError("The path is not valid")
116 116
117 117 if self.online:
118 118 log.log("Searching files in online mode...", self.name)
119 119
120 120 for nTries in range(self.nTries):
121 121 fullpath = self.searchFilesOnLine(self.path, self.startDate,
122 self.endDate, self.expLabel, self.ext, self.walk,
122 self.endDate, self.expLabel, self.ext, self.walk,
123 123 self.filefmt, self.folderfmt)
124 124 try:
125 125 fullpath = next(fullpath)
126 126 except:
127 127 fullpath = None
128
128
129 129 if fullpath:
130 130 self.fileSize = os.path.getsize(fullpath)
131 131 self.filename = fullpath
132 132 self.flagIsNewFile = 1
133 133 if self.fp != None:
134 134 self.fp.close()
135 135 self.fp = self.open_file(fullpath, self.open_mode)
136 136 self.flagNoMoreFiles = 0
137 137 break
138 138
139 139 log.warning(
140 140 'Waiting {} sec for a valid file in {}: try {} ...'.format(
141 self.delay, self.path, nTries + 1),
141 self.delay, self.path, nTries + 1),
142 142 self.name)
143 143 time.sleep(self.delay)
144 144
145 145 if not(fullpath):
146 146 raise schainpy.admin.SchainError(
147 'There isn\'t any valid file in {}'.format(self.path))
147 'There isn\'t any valid file in {}'.format(self.path))
148 148 self.readFirstHeader()
149 149 else:
150 150 log.log("Searching files in {}".format(self.path), self.name)
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
152 152 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
153 153 self.setNextFile()
154 154
155 155 def checkForRealPath(self, nextFile, nextDay):
156 156 '''
157 157 '''
158 158
159 159 dt = self.datatime + datetime.timedelta(1)
160 160 filename = '{}.{}{}'.format(self.siteFile, dt.strftime('%Y%m%d'), self.ext)
161 161 fullfilename = os.path.join(self.path, filename)
162 162 if os.path.exists(fullfilename):
163 163 return fullfilename, filename
164 164 return None, filename
165
166
165
166
167 167 def readFirstHeader(self):
168 168 '''
169 169 '''
170 170
171 171 # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
172 172 self.siteFile = self.filename.split('/')[-1].split('.')[0]
173 173 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
174 174 self.nrecords = self.header_file['nrec'][0]
175 175 self.counter_records = 0
176 176 self.flagIsNewFile = 0
177 self.fileIndex += 1
177 self.fileIndex += 1
178 178
179 179 def readNextBlock(self):
180 180
181 181 while True:
182 182 if not self.online and self.counter_records == self.nrecords:
183 183 self.flagIsNewFile = 1
184 184 if not self.setNextFile():
185 185 return 0
186 186 try:
187 pointer = self.fp.tell()
187 if self.online and self.counter_records == 0:
188 pos = int(self.fileSize / (38512))
189 self.counter_records = pos*2 - 2
190 pointer = 38512 * (pos-1) + 48
191 self.fp.seek(pointer)
192 else:
193 pointer = self.fp.tell()
188 194 self.readBlock()
189 195 except:
190 196 if self.online and self.waitDataBlock(pointer, 38512) == 1:
191 197 continue
192 198 else:
193 199 if not self.setNextFile():
194 200 return 0
195 201
196 202 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
197 203 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
198 204 log.warning(
199 205 'Reading Record No. {}/{} -> {} [Skipping]'.format(
200 206 self.counter_records,
201 207 self.nrecords,
202 208 self.datatime.ctime()),
203 209 'BLTRParamReader')
204 210 continue
205 211 break
206 212
207 213 log.log('Reading Record No. {} -> {}'.format(
208 214 self.counter_records,
209 215 self.datatime.ctime()), 'BLTRParamReader')
210 216
211 217 return 1
212 218
213 219 def readBlock(self):
214 220
215 221 pointer = self.fp.tell()
216 222 header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
217 223 self.nchannels = int(header_rec['nchan'][0] / 2)
218 224 self.kchan = header_rec['nrxs'][0]
219 225 self.nmodes = header_rec['nmodes'][0]
220 226 self.nranges = header_rec['nranges'][0]
221 227 self.fp.seek(pointer)
222 228 self.height = numpy.empty((self.nmodes, self.nranges))
223 229 self.snr = numpy.empty((self.nmodes, int(self.nchannels), self.nranges))
224 230 self.buffer = numpy.empty((self.nmodes, 3, self.nranges))
225 231 self.flagDiscontinuousBlock = 0
226 232
227 233 for mode in range(self.nmodes):
228 234 self.readHeader()
229 235 data = self.readData()
230 236 self.height[mode] = (data[0] - self.correction) / 1000.
231 237 self.buffer[mode] = data[1]
232 238 self.snr[mode] = data[2]
233 239
234 240 self.counter_records = self.counter_records + self.nmodes
235 241
236 242 return
237 243
238 244 def readHeader(self):
239 245 '''
240 246 RecordHeader of BLTR rawdata file
241 247 '''
242 248
243 249 header_structure = numpy.dtype(
244 250 REC_HEADER_STRUCTURE.descr + [
245 251 ('antenna_coord', 'f4', (2, int(self.nchannels))),
246 252 ('rx_gains', 'u4', (int(self.nchannels),)),
247 253 ('rx_analysis', 'u4', (int(self.nchannels),))
248 254 ]
249 255 )
250 256
251 257 self.header_rec = numpy.fromfile(self.fp, header_structure, 1)
252 258 self.lat = self.header_rec['lat'][0]
253 259 self.lon = self.header_rec['lon'][0]
254 260 self.delta = self.header_rec['delta_r'][0]
255 261 self.correction = self.header_rec['dmode_rngcorr'][0]
256 262 self.imode = self.header_rec['dmode_index'][0]
257 263 self.antenna = self.header_rec['antenna_coord']
258 self.rx_gains = self.header_rec['rx_gains']
259 self.time = self.header_rec['time'][0]
264 self.rx_gains = self.header_rec['rx_gains']
265 self.time = self.header_rec['time'][0]
260 266 dt = datetime.datetime.utcfromtimestamp(self.time)
261 267 if dt.date()>self.datatime.date():
262 268 self.flagDiscontinuousBlock = 1
263 269 self.datatime = dt
264
270
265 271 def readData(self):
266 272 '''
267 Reading and filtering data block record of BLTR rawdata file,
273 Reading and filtering data block record of BLTR rawdata file,
268 274 filtering is according to status_value.
269 275
270 276 Input:
271 status_value - Array data is set to NAN for values that are not
277 status_value - Array data is set to NAN for values that are not
272 278 equal to status_value
273 279
274 280 '''
275 281 self.nchannels = int(self.nchannels)
276 282
277 283 data_structure = numpy.dtype(
278 284 DATA_STRUCTURE.descr + [
279 285 ('rx_saturation', 'u4', (self.nchannels,)),
280 286 ('chan_offset', 'u4', (2 * self.nchannels,)),
281 287 ('rx_amp', 'u4', (self.nchannels,)),
282 288 ('rx_snr', 'f4', (self.nchannels,)),
283 289 ('cross_snr', 'f4', (self.kchan,)),
284 290 ('sea_power_relative', 'f4', (self.kchan,))]
285 291 )
286 292
287 293 data = numpy.fromfile(self.fp, data_structure, self.nranges)
288 294
289 295 height = data['range']
290 296 winds = numpy.array(
291 297 (data['zonal'], data['meridional'], data['vertical']))
292 298 snr = data['rx_snr'].T
293 299
294 300 winds[numpy.where(winds == -9999.)] = numpy.nan
295 301 winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
296 302 snr[numpy.where(snr == -9999.)] = numpy.nan
297 303 snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
298 304 snr = numpy.power(10, snr / 10)
299 305
300 306 return height, winds, snr
301 307
302 308 def set_output(self):
303 309 '''
304 310 Storing data from databuffer to dataOut object
305 311 '''
306 312
307 313 self.dataOut.data_snr = self.snr
308 314 self.dataOut.height = self.height
309 315 self.dataOut.data = self.buffer
310 316 self.dataOut.utctimeInit = self.time
311 317 self.dataOut.utctime = self.dataOut.utctimeInit
312 318 self.dataOut.useLocalTime = False
313 319 self.dataOut.paramInterval = 157
314 320 self.dataOut.site = self.siteFile
315 321 self.dataOut.nrecords = self.nrecords / self.nmodes
316 322 self.dataOut.lat = self.lat
317 323 self.dataOut.lon = self.lon
318 324 self.dataOut.channelList = list(range(self.nchannels))
319 self.dataOut.kchan = self.kchan
325 self.dataOut.kchan = self.kchan
320 326 self.dataOut.delta = self.delta
321 327 self.dataOut.correction = self.correction
322 328 self.dataOut.nmodes = self.nmodes
323 329 self.dataOut.imode = self.imode
324 330 self.dataOut.antenna = self.antenna
325 331 self.dataOut.rx_gains = self.rx_gains
326 332 self.dataOut.flagNoData = False
327 333 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
328 334
329 335 def getData(self):
330 336 '''
331 337 Storing data from databuffer to dataOut object
332 338 '''
333 339 if self.flagNoMoreFiles:
334 340 self.dataOut.flagNoData = True
335 341 return 0
336 342
337 343 if not self.readNextBlock():
338 344 self.dataOut.flagNoData = True
339 345 return 0
340 346
341 347 self.set_output()
342 348
343 349 return 1
344
350
345 351 def run(self, **kwargs):
346 352 '''
347 353 '''
348 354
349 355 if not(self.isConfig):
350 356 self.setup(**kwargs)
351 357 self.isConfig = True
352 358
353 359 self.getData()
354 360
355 return No newline at end of file
361 return
@@ -1,1614 +1,1618
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81
82 82 fp = open(filename, 'rb')
83 83 except IOError:
84 84 print("The file %s can't be opened" % (filename))
85 85 return 0
86 86
87 87 sts = basicHeaderObj.read(fp)
88 88 fp.close()
89 89
90 90 if not(sts):
91 91 print("Skipping the file %s because it has not a valid header" % (filename))
92 92 return 0
93 93
94 94 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
95 95 return 0
96 96
97 97 return 1
98 98
99 99
100 100 def isTimeInRange(thisTime, startTime, endTime):
101 101 if endTime >= startTime:
102 102 if (thisTime < startTime) or (thisTime > endTime):
103 103 return 0
104 104 return 1
105 105 else:
106 106 if (thisTime < startTime) and (thisTime > endTime):
107 107 return 0
108 108 return 1
109 109
110 110
111 111 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
112 112 """
113 113 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
114 114
115 115 Inputs:
116 116 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
117 117
118 118 startDate : fecha inicial del rango seleccionado en formato datetime.date
119 119
120 120 endDate : fecha final del rango seleccionado en formato datetime.date
121 121
122 122 startTime : tiempo inicial del rango seleccionado en formato datetime.time
123 123
124 124 endTime : tiempo final del rango seleccionado en formato datetime.time
125 125
126 126 Return:
127 127 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
128 128 fecha especificado, de lo contrario retorna False.
129 129
130 130 Excepciones:
131 131 Si el archivo no existe o no puede ser abierto
132 132 Si la cabecera no puede ser leida.
133 133
134 134 """
135 135
136 136 try:
137 137 fp = open(filename, 'rb')
138 138 except IOError:
139 139 print("The file %s can't be opened" % (filename))
140 140 return None
141 141
142 142 firstBasicHeaderObj = BasicHeader(LOCALTIME)
143 143 systemHeaderObj = SystemHeader()
144 144
145 145 radarControllerHeaderObj = RadarControllerHeader()
146 146 processingHeaderObj = ProcessingHeader()
147 147
148 148 lastBasicHeaderObj = BasicHeader(LOCALTIME)
149 149
150 150 sts = firstBasicHeaderObj.read(fp)
151 151
152 152 if not(sts):
153 153 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
154 154 return None
155 155
156 156 if not systemHeaderObj.read(fp):
157 157 return None
158 158
159 159 if not radarControllerHeaderObj.read(fp):
160 160 return None
161 161
162 162 if not processingHeaderObj.read(fp):
163 163 return None
164 164
165 165 filesize = os.path.getsize(filename)
166 166
167 167 offset = processingHeaderObj.blockSize + 24 # header size
168 168
169 169 if filesize <= offset:
170 170 print("[Reading] %s: This file has not enough data" % filename)
171 171 return None
172 172
173 173 fp.seek(-offset, 2)
174 174
175 175 sts = lastBasicHeaderObj.read(fp)
176 176
177 177 fp.close()
178 178
179 179 thisDatetime = lastBasicHeaderObj.datatime
180 180 thisTime_last_block = thisDatetime.time()
181 181
182 182 thisDatetime = firstBasicHeaderObj.datatime
183 183 thisDate = thisDatetime.date()
184 184 thisTime_first_block = thisDatetime.time()
185 185
186 186 # General case
187 187 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
188 188 #-----------o----------------------------o-----------
189 189 # startTime endTime
190 190
191 191 if endTime >= startTime:
192 192 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
193 193 return None
194 194
195 195 return thisDatetime
196 196
197 197 # If endTime < startTime then endTime belongs to the next day
198 198
199 199 #<<<<<<<<<<<o o>>>>>>>>>>>
200 200 #-----------o----------------------------o-----------
201 201 # endTime startTime
202 202
203 203 if (thisDate == startDate) and (thisTime_last_block < startTime):
204 204 return None
205 205
206 206 if (thisDate == endDate) and (thisTime_first_block > endTime):
207 207 return None
208 208
209 209 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
210 210 return None
211 211
212 212 return thisDatetime
213 213
214 214
215 215 def isFolderInDateRange(folder, startDate=None, endDate=None):
216 216 """
217 217 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
218 218
219 219 Inputs:
220 220 folder : nombre completo del directorio.
221 221 Su formato deberia ser "/path_root/?YYYYDDD"
222 222
223 223 siendo:
224 224 YYYY : Anio (ejemplo 2015)
225 225 DDD : Dia del anio (ejemplo 305)
226 226
227 227 startDate : fecha inicial del rango seleccionado en formato datetime.date
228 228
229 229 endDate : fecha final del rango seleccionado en formato datetime.date
230 230
231 231 Return:
232 232 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
233 233 fecha especificado, de lo contrario retorna False.
234 234 Excepciones:
235 235 Si el directorio no tiene el formato adecuado
236 236 """
237 237
238 238 basename = os.path.basename(folder)
239 239
240 240 if not isRadarFolder(basename):
241 241 print("The folder %s has not the rigth format" % folder)
242 242 return 0
243 243
244 244 if startDate and endDate:
245 245 thisDate = getDateFromRadarFolder(basename)
246 246
247 247 if thisDate < startDate:
248 248 return 0
249 249
250 250 if thisDate > endDate:
251 251 return 0
252 252
253 253 return 1
254 254
255 255
256 256 def isFileInDateRange(filename, startDate=None, endDate=None):
257 257 """
258 258 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
259 259
260 260 Inputs:
261 261 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
262 262
263 263 Su formato deberia ser "?YYYYDDDsss"
264 264
265 265 siendo:
266 266 YYYY : Anio (ejemplo 2015)
267 267 DDD : Dia del anio (ejemplo 305)
268 268 sss : set
269 269
270 270 startDate : fecha inicial del rango seleccionado en formato datetime.date
271 271
272 272 endDate : fecha final del rango seleccionado en formato datetime.date
273 273
274 274 Return:
275 275 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
276 276 fecha especificado, de lo contrario retorna False.
277 277 Excepciones:
278 278 Si el archivo no tiene el formato adecuado
279 279 """
280 280
281 281 basename = os.path.basename(filename)
282 282
283 283 if not isRadarFile(basename):
284 284 print("The filename %s has not the rigth format" % filename)
285 285 return 0
286 286
287 287 if startDate and endDate:
288 288 thisDate = getDateFromRadarFile(basename)
289 289
290 290 if thisDate < startDate:
291 291 return 0
292 292
293 293 if thisDate > endDate:
294 294 return 0
295 295
296 296 return 1
297 297
298 298
299 299 def getFileFromSet(path, ext, set):
300 300 validFilelist = []
301 301 fileList = os.listdir(path)
302 302
303 303 # 0 1234 567 89A BCDE
304 304 # H YYYY DDD SSS .ext
305 305
306 306 for thisFile in fileList:
307 307 try:
308 308 year = int(thisFile[1:5])
309 309 doy = int(thisFile[5:8])
310 310 except:
311 311 continue
312 312
313 313 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
314 314 continue
315 315
316 316 validFilelist.append(thisFile)
317 317
318 318 myfile = fnmatch.filter(
319 319 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
320 320
321 321 if len(myfile) != 0:
322 322 return myfile[0]
323 323 else:
324 324 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
325 325 print('the filename %s does not exist' % filename)
326 326 print('...going to the last file: ')
327 327
328 328 if validFilelist:
329 329 validFilelist = sorted(validFilelist, key=str.lower)
330 330 return validFilelist[-1]
331 331
332 332 return None
333 333
334 334
335 335 def getlastFileFromPath(path, ext):
336 336 """
337 337 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
338 338 al final de la depuracion devuelve el ultimo file de la lista que quedo.
339 339
340 340 Input:
341 341 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
342 342 ext : extension de los files contenidos en una carpeta
343 343
344 344 Return:
345 345 El ultimo file de una determinada carpeta, no se considera el path.
346 346 """
347 347 validFilelist = []
348 348 fileList = os.listdir(path)
349 349
350 350 # 0 1234 567 89A BCDE
351 351 # H YYYY DDD SSS .ext
352 352
353 353 for thisFile in fileList:
354 354
355 355 year = thisFile[1:5]
356 356 if not isNumber(year):
357 357 continue
358 358
359 359 doy = thisFile[5:8]
360 360 if not isNumber(doy):
361 361 continue
362 362
363 363 year = int(year)
364 364 doy = int(doy)
365 365
366 366 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
367 367 continue
368 368
369 369 validFilelist.append(thisFile)
370 370
371 371 if validFilelist:
372 372 validFilelist = sorted(validFilelist, key=str.lower)
373 373 return validFilelist[-1]
374 374
375 375 return None
376 376
377 377
378 378 def isRadarFolder(folder):
379 379 try:
380 380 year = int(folder[1:5])
381 381 doy = int(folder[5:8])
382 382 except:
383 383 return 0
384 384
385 385 return 1
386 386
387 387
388 388 def isRadarFile(file):
389 389 try:
390 390 year = int(file[1:5])
391 391 doy = int(file[5:8])
392 392 set = int(file[8:11])
393 393 except:
394 394 return 0
395 395
396 396 return 1
397 397
398 398
399 399 def getDateFromRadarFile(file):
400 400 try:
401 401 year = int(file[1:5])
402 402 doy = int(file[5:8])
403 403 set = int(file[8:11])
404 404 except:
405 405 return None
406 406
407 407 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
408 408 return thisDate
409 409
410 410
411 411 def getDateFromRadarFolder(folder):
412 412 try:
413 413 year = int(folder[1:5])
414 414 doy = int(folder[5:8])
415 415 except:
416 416 return None
417 417
418 418 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
419 419 return thisDate
420 420
421 421 def parse_format(s, fmt):
422 422
423 423 for i in range(fmt.count('%')):
424 424 x = fmt.index('%')
425 425 d = DT_DIRECTIVES[fmt[x:x+2]]
426 426 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
427 427 return fmt
428 428
429 429 class Reader(object):
430 430
431 431 c = 3E8
432 432 isConfig = False
433 433 dtype = None
434 434 pathList = []
435 435 filenameList = []
436 436 datetimeList = []
437 437 filename = None
438 438 ext = None
439 439 flagIsNewFile = 1
440 440 flagDiscontinuousBlock = 0
441 441 flagIsNewBlock = 0
442 442 flagNoMoreFiles = 0
443 443 fp = None
444 444 firstHeaderSize = 0
445 445 basicHeaderSize = 24
446 446 versionFile = 1103
447 447 fileSize = None
448 448 fileSizeByHeader = None
449 449 fileIndex = -1
450 450 profileIndex = None
451 451 blockIndex = 0
452 452 nTotalBlocks = 0
453 453 maxTimeStep = 30
454 454 lastUTTime = None
455 455 datablock = None
456 456 dataOut = None
457 457 getByBlock = False
458 458 path = None
459 459 startDate = None
460 460 endDate = None
461 461 startTime = datetime.time(0, 0, 0)
462 462 endTime = datetime.time(23, 59, 59)
463 463 set = None
464 464 expLabel = ""
465 465 online = False
466 466 delay = 60
467 467 nTries = 3 # quantity tries
468 468 nFiles = 3 # number of files for searching
469 469 walk = True
470 470 getblock = False
471 471 nTxs = 1
472 472 realtime = False
473 473 blocksize = 0
474 474 blocktime = None
475 475 warnings = True
476 476 verbose = True
477 477 server = None
478 topic = None
478 479 format = None
479 480 oneDDict = None
480 481 twoDDict = None
481 482 independentParam = None
482 483 filefmt = None
483 484 folderfmt = None
484 485 open_file = open
485 486 open_mode = 'rb'
486 487
487 488 def run(self):
488 489
489 490 raise NotImplementedError
490 491
491 492 def getAllowedArgs(self):
492 493 if hasattr(self, '__attrs__'):
493 494 return self.__attrs__
494 495 else:
495 496 return inspect.getargspec(self.run).args
496 497
497 498 def set_kwargs(self, **kwargs):
498 499
499 500 for key, value in kwargs.items():
500 501 setattr(self, key, value)
501 502
502 503 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
503 504
504 505 folders = [x for f in path.split(',')
505 506 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
506 507 folders.sort()
507 508
508 509 if last:
509 510 folders = [folders[-1]]
510 511
511 512 for folder in folders:
512 513 try:
513 514 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
514 515 if dt >= startDate and dt <= endDate:
515 516 yield os.path.join(path, folder)
516 517 else:
517 518 log.log('Skiping folder {}'.format(folder), self.name)
518 519 except Exception as e:
519 520 log.log('Skiping folder {}'.format(folder), self.name)
520 521 continue
521 522 return
522 523
523 524 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
524 525 expLabel='', last=False):
525 526 for path in folders:
526 527 files = glob.glob1(path+'/'+expLabel, '*{}'.format(ext))
527 528 files.sort()
528 529 if last:
529 530 if files:
530 531 fo = files[-1]
531 532 try:
532 533 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
533 534 yield os.path.join(path, expLabel, fo)
534 535 except Exception as e:
535 536 pass
536 537 return
537 538 else:
538 539 return
539 540
540 541 for fo in files:
541 542 try:
542 543 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
543 544 #print(dt)
544 545 #print(startDate)
545 546 #print(endDate)
546 547 if dt >= startDate and dt <= endDate:
547 548
548 549 yield os.path.join(path, expLabel, fo)
549 550
550 551 else:
551 552
552 553 log.log('Skiping file {}'.format(fo), self.name)
553 554 except Exception as e:
554 555 log.log('Skiping file {}'.format(fo), self.name)
555 556 continue
556 557
557 558 def searchFilesOffLine(self, path, startDate, endDate,
558 559 expLabel, ext, walk,
559 560 filefmt, folderfmt):
560 561 """Search files in offline mode for the given arguments
561 562
562 563 Return:
563 564 Generator of files
564 565 """
565 566
566 567 if walk:
567 568 folders = self.find_folders(
568 569 path, startDate, endDate, folderfmt)
569 570 #print("folders: ", folders)
570 571 else:
571 572 folders = path.split(',')
572 573
573 574 return self.find_files(
574 575 folders, ext, filefmt, startDate, endDate, expLabel)
575 576
576 577 def searchFilesOnLine(self, path, startDate, endDate,
577 578 expLabel, ext, walk,
578 579 filefmt, folderfmt):
579 580 """Search for the last file of the last folder
580 581
581 582 Arguments:
582 583 path : carpeta donde estan contenidos los files que contiene data
583 584 expLabel : Nombre del subexperimento (subfolder)
584 585 ext : extension de los files
585 586 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
586 587
587 588 Return:
588 589 generator with the full path of last filename
589 590 """
590 591
591 592 if walk:
592 593 folders = self.find_folders(
593 594 path, startDate, endDate, folderfmt, last=True)
594 595 else:
595 596 folders = path.split(',')
596 597
597 598 return self.find_files(
598 599 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
599 600
600 601 def setNextFile(self):
601 602 """Set the next file to be readed open it and parse de file header"""
602 603
603 604 #print("fp: ",self.fp)
604 605 while True:
605 606
606 607 #print(self.fp)
607 608 if self.fp != None:
608 609 self.fp.close()
609 610
610 611 #print("setNextFile")
611 612 #print("BEFORE OPENING",self.filename)
612 613 if self.online:
613 614 newFile = self.setNextFileOnline()
614 615
615 616 else:
616 617
617 618 newFile = self.setNextFileOffline()
618 619
619 620 #print("newFile: ",newFile)
620 621 if not(newFile):
621 622
622 623 if self.online:
623 624 raise schainpy.admin.SchainError('Time to wait for new files reach')
624 625 else:
625 626 if self.fileIndex == -1:
626 627 #print("OKK")
627 628 raise schainpy.admin.SchainWarning('No files found in the given path')
628 629 else:
629 630
630 631 raise schainpy.admin.SchainWarning('No more files to read')
631 632
632 633 if self.verifyFile(self.filename):
633 634
634 635 break
635 636
636 637 ##print("BEFORE OPENING",self.filename)
637 638
638 639 log.log('Opening file: %s' % self.filename, self.name)
639 640
640 641 self.readFirstHeader()
641 642 self.nReadBlocks = 0
642 643
643 644 def setNextFileOnline(self):
644 645 """Check for the next file to be readed in online mode.
645 646
646 647 Set:
647 648 self.filename
648 649 self.fp
649 650 self.filesize
650 651
651 652 Return:
652 653 boolean
653 654
654 655 """
655 656
656 657 nextFile = True
657 658 nextDay = False
658 659
659 660 for nFiles in range(self.nFiles+1):
660 661 for nTries in range(self.nTries):
661 662 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
662 663 if fullfilename is not None:
663 664 break
664 665 log.warning(
665 666 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
666 667 self.name)
667 668 time.sleep(self.delay)
668 669 nextFile = False
669 670 continue
670 671
671 672 if fullfilename is not None:
672 673 break
673 674
674 675 #self.nTries = 1
675 676 nextFile = True
676 677
677 678 if nFiles == (self.nFiles - 1):
678 679 log.log('Trying with next day...', self.name)
679 680 nextDay = True
680 681 self.nTries = 3
681 682
682 683 if fullfilename:
683 684 self.fileSize = os.path.getsize(fullfilename)
684 685 self.filename = fullfilename
685 686 self.flagIsNewFile = 1
686 687 if self.fp != None:
687 688 self.fp.close()
688 689 #print(fullfilename)
689 690 self.fp = self.open_file(fullfilename, self.open_mode)
690 691
691 692 self.flagNoMoreFiles = 0
692 693 self.fileIndex += 1
693 694 return 1
694 695 else:
695 696 return 0
696 697
697 698 def setNextFileOffline(self):
698 699 """Open the next file to be readed in offline mode"""
699 700
700 701 try:
701 702 filename = next(self.filenameList)
702 703 self.fileIndex +=1
703 704 except StopIteration:
704 705 self.flagNoMoreFiles = 1
705 706 return 0
706 707 #print(self.fileIndex)
707 708 #print(filename)
708 709 self.filename = filename
709 710 self.fileSize = os.path.getsize(filename)
710 711 self.fp = self.open_file(filename, self.open_mode)
711 712 self.flagIsNewFile = 1
712 713
713 714 return 1
714 715
715 716 @staticmethod
716 717 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
717 718 """Check if the given datetime is in range"""
718 719
719 720 if startDate <= dt.date() <= endDate:
720 721 if startTime <= dt.time() <= endTime:
721 722 return True
722 723 return False
723 724
724 725 def verifyFile(self, filename):
725 726 """Check for a valid file
726 727
727 728 Arguments:
728 729 filename -- full path filename
729 730
730 731 Return:
731 732 boolean
732 733 """
733 734
734 735 return True
735 736
736 737 def checkForRealPath(self, nextFile, nextDay):
737 738 """Check if the next file to be readed exists"""
738 739
739 740 raise NotImplementedError
740 741
741 742 def readFirstHeader(self):
742 743 """Parse the file header"""
743 744
744 745
745 746 pass
746 747
747 748 def waitDataBlock(self, pointer_location, blocksize=None):
748 749 """
749 750 """
750 751
751 752 currentPointer = pointer_location
752 753 if blocksize is None:
753 754 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
754 755 else:
755 756 neededSize = blocksize
756 757
757 758 for nTries in range(self.nTries):
758 759 self.fp.close()
759 760 self.fp = open(self.filename, 'rb')
760 761 self.fp.seek(currentPointer)
761 762
762 763 self.fileSize = os.path.getsize(self.filename)
763 764 currentSize = self.fileSize - currentPointer
764 765
765 766 if (currentSize >= neededSize):
766 767 return 1
767 768
768 769 log.warning(
769 770 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
770 771 self.name
771 772 )
772 773 time.sleep(self.delay)
773 774
774 775 return 0
775 776
776 777 class JRODataReader(Reader):
777 778
778 779 utc = 0
779 780 nReadBlocks = 0
780 781 foldercounter = 0
781 782 firstHeaderSize = 0
782 783 basicHeaderSize = 24
783 784 __isFirstTimeOnline = 1
785 topic = ''
784 786 filefmt = "*%Y%j***"
785 787 folderfmt = "*%Y%j"
786 788 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
787 789
788 790 def getDtypeWidth(self):
789 791
790 792 dtype_index = get_dtype_index(self.dtype)
791 793 dtype_width = get_dtype_width(dtype_index)
792 794
793 795 return dtype_width
794 796
795 797 def checkForRealPath(self, nextFile, nextDay):
796 798 """Check if the next file to be readed exists.
797 799
798 800 Example :
799 801 nombre correcto del file es .../.../D2009307/P2009307367.ext
800 802
801 803 Entonces la funcion prueba con las siguientes combinaciones
802 804 .../.../y2009307367.ext
803 805 .../.../Y2009307367.ext
804 806 .../.../x2009307/y2009307367.ext
805 807 .../.../x2009307/Y2009307367.ext
806 808 .../.../X2009307/y2009307367.ext
807 809 .../.../X2009307/Y2009307367.ext
808 810 siendo para este caso, la ultima combinacion de letras, identica al file buscado
809 811
810 812 Return:
811 813 str -- fullpath of the file
812 814 """
813 815
814 816
815 817 if nextFile:
816 818 self.set += 1
817 819 if nextDay:
818 820 self.set = 0
819 821 self.doy += 1
820 822 foldercounter = 0
821 823 prefixDirList = [None, 'd', 'D']
822 824 if self.ext.lower() == ".r": # voltage
823 825 prefixFileList = ['d', 'D']
824 826 elif self.ext.lower() == ".pdata": # spectra
825 827 prefixFileList = ['p', 'P']
826 828
827 829 ##############DP##############
828 830
829 831 elif self.ext.lower() == ".dat": # dat
830 832 prefixFileList = ['z', 'Z']
831 833
832 834
833 835
834 836 ##############DP##############
835 837 # barrido por las combinaciones posibles
836 838 for prefixDir in prefixDirList:
837 839 thispath = self.path
838 840 if prefixDir != None:
839 841 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
840 842 if foldercounter == 0:
841 843 thispath = os.path.join(self.path, "%s%04d%03d" %
842 844 (prefixDir, self.year, self.doy))
843 845 else:
844 846 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
845 847 prefixDir, self.year, self.doy, foldercounter))
846 848 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
847 849 # formo el nombre del file xYYYYDDDSSS.ext
848 850 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
849 851 fullfilename = os.path.join(
850 852 thispath, filename)
851 853
852 854 if os.path.exists(fullfilename):
853 855 return fullfilename, filename
854 856
855 857 return None, filename
856 858
857 859 def __waitNewBlock(self):
858 860 """
859 861 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
860 862
861 863 Si el modo de lectura es OffLine siempre retorn 0
862 864 """
863 865 if not self.online:
864 866 return 0
865 867
866 868 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
867 869 return 0
868 870
869 871 currentPointer = self.fp.tell()
870 872
871 873 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
872 874
873 875 for nTries in range(self.nTries):
874 876
875 877 self.fp.close()
876 878 self.fp = open(self.filename, 'rb')
877 879 self.fp.seek(currentPointer)
878 880
879 881 self.fileSize = os.path.getsize(self.filename)
880 882 currentSize = self.fileSize - currentPointer
881 883
882 884 if (currentSize >= neededSize):
883 885 self.basicHeaderObj.read(self.fp)
884 886 return 1
885 887
886 888 if self.fileSize == self.fileSizeByHeader:
887 889 # self.flagEoF = True
888 890 return 0
889 891
890 892 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
891 893 #print(self.filename)
892 894 time.sleep(self.delay)
893 895
894 896 return 0
895 897
896 898 def __setNewBlock(self):
897 899
898 900 if self.fp == None:
899 901 return 0
900 902
901 903 if self.flagIsNewFile:
902 904 self.lastUTTime = self.basicHeaderObj.utc
903 905 return 1
904 906
905 907 if self.realtime:
906 908 self.flagDiscontinuousBlock = 1
907 909 if not(self.setNextFile()):
908 910 return 0
909 911 else:
910 912 return 1
911 913
912 914 currentSize = self.fileSize - self.fp.tell()
913 915 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
914 916
915 917 if (currentSize >= neededSize):
916 918 self.basicHeaderObj.read(self.fp)
917 919 self.lastUTTime = self.basicHeaderObj.utc
918 920 return 1
919 921
920 922 if self.__waitNewBlock():
921 923 self.lastUTTime = self.basicHeaderObj.utc
922 924 return 1
923 925
924 926 if not(self.setNextFile()):
925 927 return 0
926 928
927 929 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
928 930 self.lastUTTime = self.basicHeaderObj.utc
929 931
930 932 self.flagDiscontinuousBlock = 0
931 933 if deltaTime > self.maxTimeStep:
932 934 self.flagDiscontinuousBlock = 1
933 935
934 936 return 1
935 937
936 938 def readNextBlock(self):
937 939
938 940 while True:
939 941 if not(self.__setNewBlock()):
940 942 continue
941 943
942 944 if not(self.readBlock()):
943 945 return 0
944 946
945 947 self.getBasicHeader()
946 948
947 949 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
948 950 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
949 951 self.processingHeaderObj.dataBlocksPerFile,
950 952 self.dataOut.datatime.ctime()))
951 953 continue
952 954
953 955 break
954 956
955 957 if self.verbose:
956 958 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
957 959 self.processingHeaderObj.dataBlocksPerFile,
958 960 self.dataOut.datatime.ctime()))
959 961 #################DP#################
960 962 self.dataOut.TimeBlockDate=self.dataOut.datatime.ctime()
961 963 self.dataOut.TimeBlockSeconds=time.mktime(time.strptime(self.dataOut.datatime.ctime()))
962 964 #################DP#################
963 965 return 1
964 966
965 967 def readFirstHeader(self):
966 968
967 969 self.basicHeaderObj.read(self.fp)
968 970 self.systemHeaderObj.read(self.fp)
969 971 self.radarControllerHeaderObj.read(self.fp)
970 972 self.processingHeaderObj.read(self.fp)
971 973 self.firstHeaderSize = self.basicHeaderObj.size
972 974
973 975 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
974 976 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
975 977 if datatype == 0:
976 978 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
977 979 elif datatype == 1:
978 980 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
979 981 elif datatype == 2:
980 982 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
981 983 elif datatype == 3:
982 984 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
983 985 elif datatype == 4:
984 986 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
985 987 elif datatype == 5:
986 988 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
987 989 else:
988 990 raise ValueError('Data type was not defined')
989 991
990 992 self.dtype = datatype_str
991 993 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
992 994 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
993 995 self.firstHeaderSize + self.basicHeaderSize * \
994 996 (self.processingHeaderObj.dataBlocksPerFile - 1)
995 997 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
996 998 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
997 999 self.getBlockDimension()
998 1000
999 1001 def verifyFile(self, filename):
1000 1002
1001 1003 flag = True
1002 1004
1003 1005 try:
1004 1006 fp = open(filename, 'rb')
1005 1007 except IOError:
1006 1008 log.error("File {} can't be opened".format(filename), self.name)
1007 1009 return False
1008 1010
1009 1011 if self.online and self.waitDataBlock(0):
1010 1012 pass
1011 1013
1012 1014 basicHeaderObj = BasicHeader(LOCALTIME)
1013 1015 systemHeaderObj = SystemHeader()
1014 1016 radarControllerHeaderObj = RadarControllerHeader()
1015 1017 processingHeaderObj = ProcessingHeader()
1016 1018
1017 1019 if not(basicHeaderObj.read(fp)):
1018 1020 flag = False
1019 1021 if not(systemHeaderObj.read(fp)):
1020 1022 flag = False
1021 1023 if not(radarControllerHeaderObj.read(fp)):
1022 1024 flag = False
1023 1025 if not(processingHeaderObj.read(fp)):
1024 1026 flag = False
1025 1027 if not self.online:
1026 1028 dt1 = basicHeaderObj.datatime
1027 1029 pos = self.fileSize-processingHeaderObj.blockSize-24
1028 1030 if pos<0:
1029 1031 flag = False
1030 1032 log.error('Invalid size for file: {}'.format(self.filename), self.name)
1031 1033 else:
1032 1034 fp.seek(pos)
1033 1035 if not(basicHeaderObj.read(fp)):
1034 1036 flag = False
1035 1037 dt2 = basicHeaderObj.datatime
1036 1038 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
1037 1039 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1038 1040 flag = False
1039 1041
1040 1042 fp.close()
1041 1043 return flag
1042 1044
1043 1045 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1044 1046
1045 1047 path_empty = True
1046 1048
1047 1049 dateList = []
1048 1050 pathList = []
1049 1051
1050 1052 multi_path = path.split(',')
1051 1053
1052 1054 if not walk:
1053 1055
1054 1056 for single_path in multi_path:
1055 1057
1056 1058 if not os.path.isdir(single_path):
1057 1059 continue
1058 1060
1059 1061 fileList = glob.glob1(single_path, "*" + ext)
1060 1062
1061 1063 if not fileList:
1062 1064 continue
1063 1065
1064 1066 path_empty = False
1065 1067
1066 1068 fileList.sort()
1067 1069
1068 1070 for thisFile in fileList:
1069 1071
1070 1072 if not os.path.isfile(os.path.join(single_path, thisFile)):
1071 1073 continue
1072 1074
1073 1075 if not isRadarFile(thisFile):
1074 1076 continue
1075 1077
1076 1078 if not isFileInDateRange(thisFile, startDate, endDate):
1077 1079 continue
1078 1080
1079 1081 thisDate = getDateFromRadarFile(thisFile)
1080 1082
1081 1083 if thisDate in dateList or single_path in pathList:
1082 1084 continue
1083 1085
1084 1086 dateList.append(thisDate)
1085 1087 pathList.append(single_path)
1086 1088
1087 1089 else:
1088 1090 for single_path in multi_path:
1089 1091
1090 1092 if not os.path.isdir(single_path):
1091 1093 continue
1092 1094
1093 1095 dirList = []
1094 1096
1095 1097 for thisPath in os.listdir(single_path):
1096 1098
1097 1099 if not os.path.isdir(os.path.join(single_path, thisPath)):
1098 1100 continue
1099 1101
1100 1102 if not isRadarFolder(thisPath):
1101 1103 continue
1102 1104
1103 1105 if not isFolderInDateRange(thisPath, startDate, endDate):
1104 1106 continue
1105 1107
1106 1108 dirList.append(thisPath)
1107 1109
1108 1110 if not dirList:
1109 1111 continue
1110 1112
1111 1113 dirList.sort()
1112 1114
1113 1115 for thisDir in dirList:
1114 1116
1115 1117 datapath = os.path.join(single_path, thisDir, expLabel)
1116 1118 fileList = glob.glob1(datapath, "*" + ext)
1117 1119
1118 1120 if not fileList:
1119 1121 continue
1120 1122
1121 1123 path_empty = False
1122 1124
1123 1125 thisDate = getDateFromRadarFolder(thisDir)
1124 1126
1125 1127 pathList.append(datapath)
1126 1128 dateList.append(thisDate)
1127 1129
1128 1130 dateList.sort()
1129 1131
1130 1132 if walk:
1131 1133 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1132 1134 else:
1133 1135 pattern_path = multi_path[0]
1134 1136
1135 1137 if path_empty:
1136 1138 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1137 1139 else:
1138 1140 if not dateList:
1139 1141 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1140 1142
1141 1143 if include_path:
1142 1144 return dateList, pathList
1143 1145
1144 1146 return dateList
1145 1147
1146 1148 def setup(self, **kwargs):
1147 1149
1148 1150 self.set_kwargs(**kwargs)
1149 1151 if not self.ext.startswith('.'):
1150 1152 self.ext = '.{}'.format(self.ext)
1151 1153
1152 1154 if self.server is not None:
1153 1155 if 'tcp://' in self.server:
1154 address = server
1156 address = self.server
1155 1157 else:
1156 1158 address = 'ipc:///tmp/%s' % self.server
1157 1159 self.server = address
1158 1160 self.context = zmq.Context()
1159 self.receiver = self.context.socket(zmq.PULL)
1161 self.receiver = self.context.socket(zmq.SUB)
1160 1162 self.receiver.connect(self.server)
1163 self.receiver.setsockopt(zmq.SUBSCRIBE, str.encode(str(self.topic)))
1161 1164 time.sleep(0.5)
1162 1165 print('[Starting] ReceiverData from {}'.format(self.server))
1163 1166 else:
1164 1167 self.server = None
1165 1168 if self.path == None:
1166 1169 raise ValueError("[Reading] The path is not valid")
1167 1170
1168 1171 if self.online:
1169 1172 log.log("[Reading] Searching files in online mode...", self.name)
1170 1173
1171 1174 for nTries in range(self.nTries):
1172 1175 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1173 1176 self.endDate, self.expLabel, self.ext, self.walk,
1174 1177 self.filefmt, self.folderfmt)
1175 1178
1176 1179 try:
1177 1180 fullpath = next(fullpath)
1178 1181 except:
1179 1182 fullpath = None
1180 1183
1181 1184 if fullpath:
1182 1185 break
1183 1186
1184 1187 log.warning(
1185 1188 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1186 1189 self.delay, self.path, nTries + 1),
1187 1190 self.name)
1188 1191 time.sleep(self.delay)
1189 1192
1190 1193 if not(fullpath):
1191 1194 raise schainpy.admin.SchainError(
1192 1195 'There isn\'t any valid file in {}'.format(self.path))
1193 1196
1194 1197 pathname, filename = os.path.split(fullpath)
1195 1198 self.year = int(filename[1:5])
1196 1199 self.doy = int(filename[5:8])
1197 1200 self.set = int(filename[8:11]) - 1
1198 1201 else:
1199 1202 log.log("Searching files in {}".format(self.path), self.name)
1200 1203 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1201 1204 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1202 1205
1203 1206 self.setNextFile()
1204 1207
1205 1208 return
1206 1209
1207 1210 def getBasicHeader(self):
1208 1211
1209 1212 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1210 1213 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1211 1214
1212 1215 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1213 1216
1214 1217 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1215 1218
1216 1219 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1217 1220
1218 1221 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1219 1222
1220 1223 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1221 1224
1222 1225 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1223 1226
1224 1227 def getFirstHeader(self):
1225 1228
1226 1229 raise NotImplementedError
1227 1230
1228 1231 def getData(self):
1229 1232
1230 1233 raise NotImplementedError
1231 1234
1232 1235 def hasNotDataInBuffer(self):
1233 1236
1234 1237 raise NotImplementedError
1235 1238
1236 1239 def readBlock(self):
1237 1240
1238 1241 raise NotImplementedError
1239 1242
1240 1243 def isEndProcess(self):
1241 1244
1242 1245 return self.flagNoMoreFiles
1243 1246
1244 1247 def printReadBlocks(self):
1245 1248
1246 1249 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1247 1250
1248 1251 def printTotalBlocks(self):
1249 1252
1250 1253 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1251 1254
1252 1255 def run(self, **kwargs):
1253 1256 """
1254 1257
1255 1258 Arguments:
1256 1259 path :
1257 1260 startDate :
1258 1261 endDate :
1259 1262 startTime :
1260 1263 endTime :
1261 1264 set :
1262 1265 expLabel :
1263 1266 ext :
1264 1267 online :
1265 1268 delay :
1266 1269 walk :
1267 1270 getblock :
1268 1271 nTxs :
1269 1272 realtime :
1270 1273 blocksize :
1271 1274 blocktime :
1272 1275 skip :
1273 1276 cursor :
1274 1277 warnings :
1275 1278 server :
1276 1279 verbose :
1277 1280 format :
1278 1281 oneDDict :
1279 1282 twoDDict :
1280 1283 independentParam :
1281 1284 """
1282 1285
1283 1286 if not(self.isConfig):
1284 1287 self.setup(**kwargs)
1285 1288 self.isConfig = True
1286 1289 if self.server is None:
1287 1290 self.getData()
1288 1291 else:
1289 self.getFromServer()
1292 try:
1293 self.getFromServer()
1294 except Exception as e:
1295 log.warning('Invalid block...')
1296 self.dataOut.flagNoData = True
1290 1297
1291 1298
1292 1299 class JRODataWriter(Reader):
1293 1300
1294 1301 """
1295 1302 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1296 1303 de los datos siempre se realiza por bloques.
1297 1304 """
1298 1305
1299 1306 setFile = None
1300 1307 profilesPerBlock = None
1301 1308 blocksPerFile = None
1302 1309 nWriteBlocks = 0
1303 1310 fileDate = None
1304 1311
1305 1312 def __init__(self, dataOut=None):
1306 1313 raise NotImplementedError
1307 1314
1308 1315 def hasAllDataInBuffer(self):
1309 1316 raise NotImplementedError
1310 1317
1311 1318 def setBlockDimension(self):
1312 1319 raise NotImplementedError
1313 1320
1314 1321 def writeBlock(self):
1315 1322 raise NotImplementedError
1316 1323
1317 1324 def putData(self):
1318 1325 raise NotImplementedError
1319 1326
1320 1327 def getDtypeWidth(self):
1321 1328
1322 1329 dtype_index = get_dtype_index(self.dtype)
1323 1330 dtype_width = get_dtype_width(dtype_index)
1324 1331
1325 1332 return dtype_width
1326 1333
1327 1334 def getProcessFlags(self):
1328 1335
1329 1336 processFlags = 0
1330 1337
1331 1338 dtype_index = get_dtype_index(self.dtype)
1332 1339 procflag_dtype = get_procflag_dtype(dtype_index)
1333 1340
1334 1341 processFlags += procflag_dtype
1335 1342
1336 1343 if self.dataOut.flagDecodeData:
1337 1344 processFlags += PROCFLAG.DECODE_DATA
1338 1345
1339 1346 if self.dataOut.flagDeflipData:
1340 1347 processFlags += PROCFLAG.DEFLIP_DATA
1341 1348
1342 1349 if self.dataOut.code is not None:
1343 1350 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1344 1351
1345 1352 if self.dataOut.nCohInt > 1:
1346 1353 processFlags += PROCFLAG.COHERENT_INTEGRATION
1347 1354
1348 1355 if self.dataOut.type == "Spectra":
1349 1356 if self.dataOut.nIncohInt > 1:
1350 1357 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1351 1358
1352 1359 if self.dataOut.data_dc is not None:
1353 1360 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1354 1361
1355 1362 if self.dataOut.flagShiftFFT:
1356 1363 processFlags += PROCFLAG.SHIFT_FFT_DATA
1357 1364
1358 1365 return processFlags
1359 1366
1360 1367 def setBasicHeader(self):
1361 1368
1362 1369 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1363 1370 self.basicHeaderObj.version = self.versionFile
1364 1371 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1365 1372 utc = numpy.floor(self.dataOut.utctime)
1366 1373 milisecond = (self.dataOut.utctime - utc) * 1000.0
1367 1374 self.basicHeaderObj.utc = utc
1368 1375 self.basicHeaderObj.miliSecond = milisecond
1369 1376 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1370 1377 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1371 1378 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1372 1379
1373 1380 def setFirstHeader(self):
1374 1381 """
1375 1382 Obtiene una copia del First Header
1376 1383
1377 1384 Affected:
1378 1385
1379 1386 self.basicHeaderObj
1380 1387 self.systemHeaderObj
1381 1388 self.radarControllerHeaderObj
1382 1389 self.processingHeaderObj self.
1383 1390
1384 1391 Return:
1385 1392 None
1386 1393 """
1387 1394
1388 1395 raise NotImplementedError
1389 1396
1390 1397 def __writeFirstHeader(self):
1391 1398 """
1392 1399 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1393 1400
1394 1401 Affected:
1395 1402 __dataType
1396 1403
1397 1404 Return:
1398 1405 None
1399 1406 """
1400 1407
1401 1408 # CALCULAR PARAMETROS
1402 1409
1403 1410 sizeLongHeader = self.systemHeaderObj.size + \
1404 1411 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1405 1412 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1406 1413
1407 1414 self.basicHeaderObj.write(self.fp)
1408 1415 self.systemHeaderObj.write(self.fp)
1409 1416 self.radarControllerHeaderObj.write(self.fp)
1410 1417 self.processingHeaderObj.write(self.fp)
1411 1418
1412 1419 def __setNewBlock(self):
1413 1420 """
1414 1421 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1415 1422
1416 1423 Return:
1417 1424 0 : si no pudo escribir nada
1418 1425 1 : Si escribio el Basic el First Header
1419 1426 """
1420 1427 if self.fp == None:
1421 1428 self.setNextFile()
1422 1429
1423 1430 if self.flagIsNewFile:
1424 1431 return 1
1425 1432
1426 1433 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1427 1434 self.basicHeaderObj.write(self.fp)
1428 1435 return 1
1429 1436
1430 1437 if not(self.setNextFile()):
1431 1438 return 0
1432 1439
1433 1440 return 1
1434 1441
1435 1442 def writeNextBlock(self):
1436 1443 """
1437 1444 Selecciona el bloque siguiente de datos y los escribe en un file
1438 1445
1439 1446 Return:
1440 1447 0 : Si no hizo pudo escribir el bloque de datos
1441 1448 1 : Si no pudo escribir el bloque de datos
1442 1449 """
1443 1450 if not(self.__setNewBlock()):
1444 1451 return 0
1445 1452
1446 1453 self.writeBlock()
1447 1454
1448 1455 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1449 1456 self.processingHeaderObj.dataBlocksPerFile))
1450 1457
1451 1458 return 1
1452 1459
1453 1460 def setNextFile(self):
1454 1461 """Determina el siguiente file que sera escrito
1455 1462
1456 1463 Affected:
1457 1464 self.filename
1458 1465 self.subfolder
1459 1466 self.fp
1460 1467 self.setFile
1461 1468 self.flagIsNewFile
1462 1469
1463 1470 Return:
1464 1471 0 : Si el archivo no puede ser escrito
1465 1472 1 : Si el archivo esta listo para ser escrito
1466 1473 """
1467 1474 ext = self.ext
1468 1475 path = self.path
1469 1476
1470 1477 if self.fp != None:
1471 1478 self.fp.close()
1472 1479
1473 if not os.path.exists(path):
1474 os.mkdir(path)
1475
1476 1480 timeTuple = time.localtime(self.dataOut.utctime)
1477 1481 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1478 1482
1479 1483 fullpath = os.path.join(path, subfolder)
1480 1484 setFile = self.setFile
1481 1485
1482 1486 if not(os.path.exists(fullpath)):
1483 os.mkdir(fullpath)
1487 os.makedirs(fullpath)
1484 1488 setFile = -1 # inicializo mi contador de seteo
1485 1489 else:
1486 1490 filesList = os.listdir(fullpath)
1487 1491 if len(filesList) > 0:
1488 1492 filesList = sorted(filesList, key=str.lower)
1489 1493 filen = filesList[-1]
1490 1494 # el filename debera tener el siguiente formato
1491 1495 # 0 1234 567 89A BCDE (hex)
1492 1496 # x YYYY DDD SSS .ext
1493 1497 if isNumber(filen[8:11]):
1494 1498 # inicializo mi contador de seteo al seteo del ultimo file
1495 1499 setFile = int(filen[8:11])
1496 1500 else:
1497 1501 setFile = -1
1498 1502 else:
1499 1503 setFile = -1 # inicializo mi contador de seteo
1500 1504
1501 1505 setFile += 1
1502 1506
1503 1507 # If this is a new day it resets some values
1504 1508 if self.dataOut.datatime.date() > self.fileDate:
1505 1509 setFile = 0
1506 1510 self.nTotalBlocks = 0
1507 1511
1508 1512 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1509 1513 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1510 1514
1511 1515 filename = os.path.join(path, subfolder, filen)
1512 1516
1513 1517 fp = open(filename, 'wb')
1514 1518
1515 1519 self.blockIndex = 0
1516 1520 self.filename = filename
1517 1521 self.subfolder = subfolder
1518 1522 self.fp = fp
1519 1523 self.setFile = setFile
1520 1524 self.flagIsNewFile = 1
1521 1525 self.fileDate = self.dataOut.datatime.date()
1522 1526 self.setFirstHeader()
1523 1527
1524 1528 print('[Writing] Opening file: %s' % self.filename)
1525 1529
1526 1530 self.__writeFirstHeader()
1527 1531
1528 1532 return 1
1529 1533
1530 1534 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1531 1535 """
1532 1536 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1533 1537
1534 1538 Inputs:
1535 1539 path : directory where data will be saved
1536 1540 profilesPerBlock : number of profiles per block
1537 1541 set : initial file set
1538 1542 datatype : An integer number that defines data type:
1539 1543 0 : int8 (1 byte)
1540 1544 1 : int16 (2 bytes)
1541 1545 2 : int32 (4 bytes)
1542 1546 3 : int64 (8 bytes)
1543 1547 4 : float32 (4 bytes)
1544 1548 5 : double64 (8 bytes)
1545 1549
1546 1550 Return:
1547 1551 0 : Si no realizo un buen seteo
1548 1552 1 : Si realizo un buen seteo
1549 1553 """
1550 1554
1551 1555 if ext == None:
1552 1556 ext = self.ext
1553 1557
1554 1558 self.ext = ext.lower()
1555 1559
1556 1560 self.path = path
1557 1561
1558 1562 if set is None:
1559 1563 self.setFile = -1
1560 1564 else:
1561 1565 self.setFile = set - 1
1562 1566
1563 1567 self.blocksPerFile = blocksPerFile
1564 1568 self.profilesPerBlock = profilesPerBlock
1565 1569 self.dataOut = dataOut
1566 1570 self.fileDate = self.dataOut.datatime.date()
1567 1571 self.dtype = self.dataOut.dtype
1568 1572
1569 1573 if datatype is not None:
1570 1574 self.dtype = get_numpy_dtype(datatype)
1571 1575
1572 1576 if not(self.setNextFile()):
1573 1577 print("[Writing] There isn't a next file")
1574 1578 return 0
1575 1579
1576 1580 self.setBlockDimension()
1577 1581
1578 1582 return 1
1579 1583
1580 1584 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1581 1585
1582 1586 if not(self.isConfig):
1583 1587
1584 1588 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1585 1589 set=set, ext=ext, datatype=datatype, **kwargs)
1586 1590 self.isConfig = True
1587 1591
1588 1592 self.dataOut = dataOut
1589 1593 self.putData()
1590 1594 return self.dataOut
1591 1595
1592 1596 @MPDecorator
1593 1597 class printInfo(Operation):
1594 1598
1595 1599 def __init__(self):
1596 1600
1597 1601 Operation.__init__(self)
1598 1602 self.__printInfo = True
1599 1603
1600 1604 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1601 1605 if self.__printInfo == False:
1602 1606 return
1603 1607
1604 1608 for header in headers:
1605 1609 if hasattr(dataOut, header):
1606 1610 obj = getattr(dataOut, header)
1607 1611 if hasattr(obj, 'printInfo'):
1608 1612 obj.printInfo()
1609 1613 else:
1610 1614 print(obj)
1611 1615 else:
1612 1616 log.warning('Header {} Not found in object'.format(header))
1613 1617
1614 1618 self.__printInfo = False
@@ -1,649 +1,650
1 1 '''
2 2 Created on Aug 1, 2017
3 3
4 4 @author: Juan C. Espinoza
5 5 '''
6 6
7 7 import os
8 8 import sys
9 9 import time
10 10 import json
11 11 import glob
12 12 import datetime
13 13
14 14 import numpy
15 15 import h5py
16 16
17 17 import schainpy.admin
18 18 from schainpy.model.io.jroIO_base import LOCALTIME, Reader
19 19 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
20 20 from schainpy.model.data.jrodata import Parameters
21 21 from schainpy.utils import log
22 22
23 23 try:
24 24 import madrigal.cedar
25 25 except:
26 26 pass
27 27
28 28 try:
29 29 basestring
30 30 except:
31 31 basestring = str
32 32
33 33 DEF_CATALOG = {
34 34 'principleInvestigator': 'Marco Milla',
35 35 'expPurpose': '',
36 36 'cycleTime': '',
37 37 'correlativeExp': '',
38 38 'sciRemarks': '',
39 39 'instRemarks': ''
40 40 }
41 41
42 42 DEF_HEADER = {
43 43 'kindatDesc': '',
44 44 'analyst': 'Jicamarca User',
45 45 'comments': '',
46 46 'history': ''
47 47 }
48 48
49 49 MNEMONICS = {
50 50 10: 'jro',
51 12: 'jmp',
51 52 11: 'jbr',
52 53 14: 'jmp', #Added by R. Flores
53 54 840: 'jul',
54 55 13: 'jas',
55 56 1000: 'pbr',
56 57 1001: 'hbr',
57 58 1002: 'obr',
58 59 400: 'clr'
59 60
60 61 }
61 62
62 63 UT1970 = datetime.datetime(1970, 1, 1) - datetime.timedelta(seconds=time.timezone)
63 64
64 65 def load_json(obj):
65 66 '''
66 67 Parse json as string instead of unicode
67 68 '''
68 69
69 70 if isinstance(obj, str):
70 71 iterable = json.loads(obj)
71 72 else:
72 73 iterable = obj
73 74
74 75 if isinstance(iterable, dict):
75 76 return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, basestring) else v
76 77 for k, v in list(iterable.items())}
77 78 elif isinstance(iterable, (list, tuple)):
78 79 return [str(v) if isinstance(v, basestring) else v for v in iterable]
79 80
80 81 return iterable
81 82
82 83
83 84 class MADReader(Reader, ProcessingUnit):
84 85
85 86 def __init__(self):
86 87
87 88 ProcessingUnit.__init__(self)
88 89
89 90 self.dataOut = Parameters()
90 91 self.counter_records = 0
91 92 self.nrecords = None
92 93 self.flagNoMoreFiles = 0
93 94 self.filename = None
94 95 self.intervals = set()
95 96 self.datatime = datetime.datetime(1900,1,1)
96 97 self.format = None
97 98 self.filefmt = "***%Y%m%d*******"
98 99
99 100 def setup(self, **kwargs):
100 101
101 102 self.set_kwargs(**kwargs)
102 103 self.oneDDict = load_json(self.oneDDict)
103 104 self.twoDDict = load_json(self.twoDDict)
104 105 self.ind2DList = load_json(self.ind2DList)
105 106 self.independentParam = self.ind2DList[0]
106 107
107 108 if self.path is None:
108 109 raise ValueError('The path is not valid')
109 110
110 111 self.open_file = open
111 112 self.open_mode = 'rb'
112 113
113 114 if self.format is None:
114 115 raise ValueError('The format is not valid choose simple or hdf5')
115 116 elif self.format.lower() in ('simple', 'txt'):
116 117 self.ext = '.txt'
117 118 elif self.format.lower() in ('cedar',):
118 119 self.ext = '.001'
119 120 else:
120 121 self.ext = '.hdf5'
121 122 self.open_file = h5py.File
122 123 self.open_mode = 'r'
123 124
124 125 if self.online:
125 126 log.log("Searching files in online mode...", self.name)
126 127
127 128 for nTries in range(self.nTries):
128 129 fullpath = self.searchFilesOnLine(self.path, self.startDate,
129 130 self.endDate, self.expLabel, self.ext, self.walk,
130 131 self.filefmt, self.folderfmt)
131 132
132 133 try:
133 134 fullpath = next(fullpath)
134 135 except:
135 136 fullpath = None
136 137
137 138 if fullpath:
138 139 break
139 140
140 141 log.warning(
141 142 'Waiting {} sec for a valid file in {}: try {} ...'.format(
142 143 self.delay, self.path, nTries + 1),
143 144 self.name)
144 145 time.sleep(self.delay)
145 146
146 147 if not(fullpath):
147 148 raise schainpy.admin.SchainError(
148 149 'There isn\'t any valid file in {}'.format(self.path))
149 150
150 151 else:
151 152 log.log("Searching files in {}".format(self.path), self.name)
152 153 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
153 154 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
154 155
155 156 self.setNextFile()
156 157
157 158 def readFirstHeader(self):
158 159 '''Read header and data'''
159 160
160 161 self.parseHeader()
161 162 self.parseData()
162 163 self.blockIndex = 0
163 164
164 165 return
165 166
166 167 def parseHeader(self):
167 168 '''
168 169 '''
169 170
170 171 self.output = {}
171 172 self.version = '2'
172 173 s_parameters = None
173 174 if self.ext == '.txt':
174 175 self.parameters = [s.strip().lower() for s in self.fp.readline().decode().strip().split(' ') if s]
175 176 elif self.ext == '.hdf5':
176 177 self.metadata = self.fp['Metadata']
177 178 if '_record_layout' in self.metadata:
178 179 s_parameters = [s[0].lower().decode() for s in self.metadata['Independent Spatial Parameters']]
179 180 self.version = '3'
180 181 self.parameters = [s[0].lower().decode() for s in self.metadata['Data Parameters']]
181 182
182 183 log.success('Parameters found: {}'.format(self.parameters),
183 184 'MADReader')
184 185 if s_parameters:
185 186 log.success('Spatial parameters found: {}'.format(s_parameters),
186 187 'MADReader')
187 188
188 189 for param in list(self.oneDDict.keys()):
189 190 if param.lower() not in self.parameters:
190 191 log.warning(
191 192 'Parameter {} not found will be ignored'.format(
192 193 param),
193 194 'MADReader')
194 195 self.oneDDict.pop(param, None)
195 196
196 197 for param, value in list(self.twoDDict.items()):
197 198 if param.lower() not in self.parameters:
198 199 log.warning(
199 200 'Parameter {} not found, it will be ignored'.format(
200 201 param),
201 202 'MADReader')
202 203 self.twoDDict.pop(param, None)
203 204 continue
204 205 if isinstance(value, list):
205 206 if value[0] not in self.output:
206 207 self.output[value[0]] = []
207 208 self.output[value[0]].append([])
208 209
209 210 def parseData(self):
210 211 '''
211 212 '''
212 213
213 214 if self.ext == '.txt':
214 215 self.data = numpy.genfromtxt(self.fp, missing_values=('missing'))
215 216 self.nrecords = self.data.shape[0]
216 217 self.ranges = numpy.unique(self.data[:,self.parameters.index(self.independentParam.lower())])
217 218 self.counter_records = 0
218 219 elif self.ext == '.hdf5':
219 220 self.data = self.fp['Data']
220 221 self.ranges = numpy.unique(self.data['Table Layout'][self.independentParam.lower()])
221 222 self.times = numpy.unique(self.data['Table Layout']['ut1_unix'])
222 223 self.counter_records = int(self.data['Table Layout']['recno'][0])
223 224 self.nrecords = int(self.data['Table Layout']['recno'][-1])
224 225
225 226 def readNextBlock(self):
226 227
227 228 while True:
228 229 self.flagDiscontinuousBlock = 0
229 230 if self.counter_records == self.nrecords:
230 231 self.setNextFile()
231 232
232 233 self.readBlock()
233 234
234 235 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
235 236 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
236 237 log.warning(
237 238 'Reading Record No. {}/{} -> {} [Skipping]'.format(
238 239 self.counter_records,
239 240 self.nrecords,
240 241 self.datatime.ctime()),
241 242 'MADReader')
242 243 continue
243 244 break
244 245
245 246 log.log(
246 247 'Reading Record No. {}/{} -> {}'.format(
247 248 self.counter_records,
248 249 self.nrecords,
249 250 self.datatime.ctime()),
250 251 'MADReader')
251 252
252 253 return 1
253 254
254 255 def readBlock(self):
255 256 '''
256 257 '''
257 258 dum = []
258 259 if self.ext == '.txt':
259 260 dt = self.data[self.counter_records][:6].astype(int)
260 261 if datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5]).date() > self.datatime.date():
261 262 self.flagDiscontinuousBlock = 1
262 263 self.datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
263 264 while True:
264 265 dt = self.data[self.counter_records][:6].astype(int)
265 266 datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
266 267 if datatime == self.datatime:
267 268 dum.append(self.data[self.counter_records])
268 269 self.counter_records += 1
269 270 if self.counter_records == self.nrecords:
270 271 break
271 272 continue
272 273 self.intervals.add((datatime-self.datatime).seconds)
273 274 break
274 275 elif self.ext == '.hdf5':
275 276 datatime = datetime.datetime.utcfromtimestamp(
276 277 self.times[self.counter_records])
277 278 dum = self.data['Table Layout'][self.data['Table Layout']['recno']==self.counter_records]
278 279 self.intervals.add((datatime-self.datatime).seconds)
279 280 if datatime.date()>self.datatime.date():
280 281 self.flagDiscontinuousBlock = 1
281 282 self.datatime = datatime
282 283 self.counter_records += 1
283 284
284 285 self.buffer = numpy.array(dum)
285 286 return
286 287
287 288 def set_output(self):
288 289 '''
289 290 Storing data from buffer to dataOut object
290 291 '''
291 292
292 293 parameters = [None for __ in self.parameters]
293 294
294 295 for param, attr in list(self.oneDDict.items()):
295 296 x = self.parameters.index(param.lower())
296 297 setattr(self.dataOut, attr, self.buffer[0][x])
297 298
298 299 for param, value in list(self.twoDDict.items()):
299 300 dummy = numpy.zeros(self.ranges.shape) + numpy.nan
300 301 if self.ext == '.txt':
301 302 x = self.parameters.index(param.lower())
302 303 y = self.parameters.index(self.independentParam.lower())
303 304 ranges = self.buffer[:,y]
304 305 #if self.ranges.size == ranges.size:
305 306 # continue
306 307 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
307 308 dummy[index] = self.buffer[:,x]
308 309 else:
309 310 ranges = self.buffer[self.independentParam.lower()]
310 311 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
311 312 dummy[index] = self.buffer[param.lower()]
312 313
313 314 if isinstance(value, str):
314 315 if value not in self.independentParam:
315 316 setattr(self.dataOut, value, dummy.reshape(1,-1))
316 317 elif isinstance(value, list):
317 318 self.output[value[0]][value[1]] = dummy
318 319 parameters[value[1]] = param
319 320 for key, value in list(self.output.items()):
320 321 setattr(self.dataOut, key, numpy.array(value))
321 322
322 323 self.dataOut.parameters = [s for s in parameters if s]
323 324 self.dataOut.heightList = self.ranges
324 325 self.dataOut.utctime = (self.datatime - datetime.datetime(1970, 1, 1)).total_seconds()
325 326 self.dataOut.utctimeInit = self.dataOut.utctime
326 327 self.dataOut.paramInterval = min(self.intervals)
327 328 self.dataOut.useLocalTime = False
328 329 self.dataOut.flagNoData = False
329 330 self.dataOut.nrecords = self.nrecords
330 331 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
331 332
332 333 def getData(self):
333 334 '''
334 335 Storing data from databuffer to dataOut object
335 336 '''
336 337
337 338 if not self.readNextBlock():
338 339 self.dataOut.flagNoData = True
339 340 return 0
340 341
341 342 self.set_output()
342 343
343 344 return 1
344 345
345 346 def run(self, **kwargs):
346 347
347 348 if not(self.isConfig):
348 349 self.setup(**kwargs)
349 350 self.isConfig = True
350 351
351 352 self.getData()
352 353
353 354 return
354 355
355 356 @MPDecorator
356 357 class MADWriter(Operation):
357 358 '''Writing module for Madrigal files
358 359
359 360 type: external
360 361
361 362 Inputs:
362 363 path path where files will be created
363 364 oneDDict json of one-dimensional parameters in record where keys
364 365 are Madrigal codes (integers or mnemonics) and values the corresponding
365 366 dataOut attribute e.g: {
366 367 'gdlatr': 'lat',
367 368 'gdlonr': 'lon',
368 369 'gdlat2':'lat',
369 370 'glon2':'lon'}
370 371 ind2DList list of independent spatial two-dimensional parameters e.g:
371 372 ['heigthList']
372 373 twoDDict json of two-dimensional parameters in record where keys
373 374 are Madrigal codes (integers or mnemonics) and values the corresponding
374 375 dataOut attribute if multidimensional array specify as tupple
375 376 ('attr', pos) e.g: {
376 377 'gdalt': 'heightList',
377 378 'vn1p2': ('data_output', 0),
378 379 'vn2p2': ('data_output', 1),
379 380 'vn3': ('data_output', 2),
380 381 'snl': ('data_SNR', 'db')
381 382 }
382 383 metadata json of madrigal metadata (kinst, kindat, catalog and header)
383 384 format hdf5, cedar
384 385 blocks number of blocks per file'''
385 386
386 387 __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict','metadata', 'format', 'blocks']
387 388 missing = -32767
388 389 currentDay = None
389 390
390 391 def __init__(self):
391 392
392 393 Operation.__init__(self)
393 394 self.dataOut = Parameters()
394 395 self.counter = 0
395 396 self.path = None
396 397 self.fp = None
397 398
398 399 def run(self, dataOut, path, oneDDict, ind2DList='[]', twoDDict='{}',
399 400 metadata='{}', format='cedar', **kwargs):
400 401
401 402
402 403 #if dataOut.AUX==1: #Modified
403 404
404 405 if not self.isConfig:
405 406 self.setup(path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs)
406 407 self.isConfig = True
407 408
408 409 self.dataOut = dataOut
409 410 self.putData()
410 411
411 412 return 1
412 413
413 414 def setup(self, path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs):
414 415 '''
415 416 Configure Operation
416 417 '''
417 418
418 419 self.path = path
419 420 self.blocks = kwargs.get('blocks', None)
420 421 self.counter = 0
421 422 self.oneDDict = load_json(oneDDict)
422 423 self.twoDDict = load_json(twoDDict)
423 424 self.ind2DList = load_json(ind2DList)
424 425 meta = load_json(metadata)
425 426 self.kinst = meta.get('kinst')
426 427 self.kindat = meta.get('kindat')
427 428 self.catalog = meta.get('catalog', DEF_CATALOG)
428 429 self.header = meta.get('header', DEF_HEADER)
429 430 if format == 'cedar':
430 431 self.ext = '.dat'
431 432 self.extra_args = {}
432 433 elif format == 'hdf5':
433 434 self.ext = '.hdf5'
434 435 self.extra_args = {'ind2DList': self.ind2DList}
435 436
436 437 self.keys = [k.lower() for k in self.twoDDict]
437 438 if 'range' in self.keys:
438 439 self.keys.remove('range')
439 440 if 'gdalt' in self.keys:
440 441 self.keys.remove('gdalt')
441 442
442 443 def setFile(self):
443 444 '''
444 445 Create new cedar file object
445 446 '''
446 447
447 448 self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal
448 449 date = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
449 450 #if self.dataOut.input_dat_type:
450 451 #date=datetime.datetime.fromtimestamp(self.dataOut.TimeBlockSeconds_for_dp_power)
451 452 #print("date",date)
452 453
453 454 filename = '{}{}{}'.format(self.mnemonic,
454 455 date.strftime('%Y%m%d_%H%M%S'),
455 456 self.ext)
456 457
457 458 self.fullname = os.path.join(self.path, filename)
458 459
459 460 if os.path.isfile(self.fullname) :
460 461 log.warning(
461 462 'Destination file {} already exists, previous file deleted.'.format(
462 463 self.fullname),
463 464 'MADWriter')
464 465 os.remove(self.fullname)
465 466
466 467 try:
467 468 log.success(
468 469 'Creating file: {}'.format(self.fullname),
469 470 'MADWriter')
470 471 if not os.path.exists(self.path):
471 472 os.makedirs(self.path)
472 473 self.fp = madrigal.cedar.MadrigalCedarFile(self.fullname, True)
473 474
474 475
475 476 except ValueError as e:
476 477 log.error(
477 478 'Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile"',
478 479 'MADWriter')
479 480 return
480 481
481 482 return 1
482 483
483 484 def writeBlock(self):
484 485 '''
485 486 Add data records to cedar file taking data from oneDDict and twoDDict
486 487 attributes.
487 488 Allowed parameters in: parcodes.tab
488 489 '''
489 490 #self.dataOut.paramInterval=2
490 491 startTime = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
491 492
492 493 endTime = startTime + datetime.timedelta(seconds=self.dataOut.paramInterval)
493 494
494 495 #if self.dataOut.input_dat_type:
495 496 #if self.dataOut.experiment=="DP":
496 497 #startTime=datetime.datetime.fromtimestamp(self.dataOut.TimeBlockSeconds_for_dp_power)
497 498 #endTime = startTime + datetime.timedelta(seconds=self.dataOut.paramInterval)
498 499
499 500
500 501 #print("2: ",startTime)
501 502 #print(endTime)
502 503 heights = self.dataOut.heightList
503 504 #print(heights)
504 505 #exit(1)
505 506 #print(self.blocks)
506 507 #print(startTime)
507 508 #print(endTime)
508 509 #print(heights)
509 510 #input()
510 511 if self.ext == '.dat':
511 512 for key, value in list(self.twoDDict.items()):
512 513 if isinstance(value, str):
513 514 data = getattr(self.dataOut, value)
514 515 invalid = numpy.isnan(data)
515 516 data[invalid] = self.missing
516 517 elif isinstance(value, (tuple, list)):
517 518 attr, key = value
518 519 data = getattr(self.dataOut, attr)
519 520 invalid = numpy.isnan(data)
520 521 data[invalid] = self.missing
521 522
522 523 out = {}
523 524 for key, value in list(self.twoDDict.items()):
524 525 key = key.lower()
525 526 if isinstance(value, str):
526 527 if 'db' in value.lower():
527 528 tmp = getattr(self.dataOut, value.replace('_db', ''))
528 529 SNRavg = numpy.average(tmp, axis=0)
529 530 tmp = 10*numpy.log10(SNRavg)
530 531 else:
531 532 tmp = getattr(self.dataOut, value)
532 533 out[key] = tmp.flatten()[:len(heights)]
533 534 elif isinstance(value, (tuple, list)):
534 535 attr, x = value
535 536 data = getattr(self.dataOut, attr)
536 537 #print(x)
537 538 #print(len(heights))
538 539 #print(data[int(x)][:len(heights)])
539 540 #print(numpy.shape(out))
540 541 #print(numpy.shape(data))
541 542
542 543 out[key] = data[int(x)][:len(heights)]
543 544
544 545 a = numpy.array([out[k] for k in self.keys])
545 546 #print(a)
546 547 nrows = numpy.array([numpy.isnan(a[:, x]).all() for x in range(len(heights))])
547 548 index = numpy.where(nrows == False)[0]
548 549
549 550 #print(startTime.minute)
550 551 rec = madrigal.cedar.MadrigalDataRecord(
551 552 self.kinst,
552 553 self.kindat,
553 554 startTime.year,
554 555 startTime.month,
555 556 startTime.day,
556 557 startTime.hour,
557 558 startTime.minute,
558 559 startTime.second,
559 560 startTime.microsecond/10000,
560 561 endTime.year,
561 562 endTime.month,
562 563 endTime.day,
563 564 endTime.hour,
564 565 endTime.minute,
565 566 endTime.second,
566 567 endTime.microsecond/10000,
567 568 list(self.oneDDict.keys()),
568 569 list(self.twoDDict.keys()),
569 570 len(index),
570 571 **self.extra_args
571 572 )
572 573 #print("rec",rec)
573 574 # Setting 1d values
574 575 for key in self.oneDDict:
575 576 rec.set1D(key, getattr(self.dataOut, self.oneDDict[key]))
576 577
577 578 # Setting 2d values
578 579 nrec = 0
579 580 for n in index:
580 581 for key in out:
581 582 rec.set2D(key, nrec, out[key][n])
582 583 nrec += 1
583 584
584 585 self.fp.append(rec)
585 586 if self.ext == '.hdf5' and self.counter %2 == 0 and self.counter > 0:
586 587 #print("here")
587 588 self.fp.dump()
588 589 if self.counter % 20 == 0 and self.counter > 0:
589 590 #self.fp.write()
590 591 log.log(
591 592 'Writing {} records'.format(
592 593 self.counter),
593 594 'MADWriter')
594 595
595 596 def setHeader(self):
596 597 '''
597 598 Create an add catalog and header to cedar file
598 599 '''
599 600
600 601 log.success('Closing file {}'.format(self.fullname), 'MADWriter')
601 602
602 603 if self.ext == '.dat':
603 604 self.fp.write()
604 605 else:
605 606 self.fp.dump()
606 607 self.fp.close()
607 608
608 609 header = madrigal.cedar.CatalogHeaderCreator(self.fullname)
609 610 header.createCatalog(**self.catalog)
610 611 header.createHeader(**self.header)
611 612 header.write()
612 613
613 614 def timeFlag(self):
614 615 currentTime = self.dataOut.utctime
615 616 timeTuple = time.localtime(currentTime)
616 617 dataDay = timeTuple.tm_yday
617 618
618 619 if self.currentDay is None:
619 620 self.currentDay = dataDay
620 621 return False
621 622
622 623 #Si el dia es diferente
623 624 if dataDay != self.currentDay:
624 625 self.currentDay = dataDay
625 626 return True
626 627
627 628 else:
628 629 return False
629 630
630 631 def putData(self):
631 632
632 633 if self.dataOut.flagNoData:
633 634 return 0
634 635
635 636 if self.dataOut.flagDiscontinuousBlock or self.counter == self.blocks or self.timeFlag():
636 637 if self.counter > 0:
637 638 self.setHeader()
638 639 self.counter = 0
639 640
640 641 if self.counter == 0:
641 642 self.setFile()
642 643
643 644 self.writeBlock()
644 645 self.counter += 1
645 646
646 647 def close(self):
647 648
648 649 if self.counter > 0:
649 650 self.setHeader()
@@ -1,640 +1,662
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 98 def setup(self, **kwargs):
99 99
100 100 self.set_kwargs(**kwargs)
101 101 if not self.ext.startswith('.'):
102 102 self.ext = '.{}'.format(self.ext)
103 103
104 104 if self.online:
105 105 log.log("Searching files in online mode...", self.name)
106 106
107 107 for nTries in range(self.nTries):
108 108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 109 self.endDate, self.expLabel, self.ext, self.walk,
110 110 self.filefmt, self.folderfmt)
111 111 try:
112 112 fullpath = next(fullpath)
113 113 except:
114 114 fullpath = None
115 115
116 116 if fullpath:
117 117 break
118 118
119 119 log.warning(
120 120 'Waiting {} sec for a valid file in {}: try {} ...'.format(
121 121 self.delay, self.path, nTries + 1),
122 122 self.name)
123 123 time.sleep(self.delay)
124 124
125 125 if not(fullpath):
126 126 raise schainpy.admin.SchainError(
127 127 'There isn\'t any valid file in {}'.format(self.path))
128 128
129 129 pathname, filename = os.path.split(fullpath)
130 130 self.year = int(filename[1:5])
131 131 self.doy = int(filename[5:8])
132 132 self.set = int(filename[8:11]) - 1
133 133 else:
134 134 log.log("Searching files in {}".format(self.path), self.name)
135 135 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
136 136 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
137 137
138 138 self.setNextFile()
139 139
140 140 return
141 141
142 142 def readFirstHeader(self):
143 143 '''Read metadata and data'''
144 144
145 145 self.__readMetadata()
146 146 self.__readData()
147 147 self.__setBlockList()
148 148
149 149 if 'type' in self.meta:
150 150 self.dataOut = eval(self.meta['type'])()
151 151
152 152 for attr in self.meta:
153 153 setattr(self.dataOut, attr, self.meta[attr])
154 154
155 155 self.blockIndex = 0
156 156
157 157 return
158 158
159 159 def __setBlockList(self):
160 160 '''
161 161 Selects the data within the times defined
162 162
163 163 self.fp
164 164 self.startTime
165 165 self.endTime
166 166 self.blockList
167 167 self.blocksPerFile
168 168
169 169 '''
170 170
171 171 startTime = self.startTime
172 172 endTime = self.endTime
173 173 thisUtcTime = self.data['utctime'] + self.utcoffset
174 174 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
175 175 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
176 176
177 177 thisDate = thisDatetime.date()
178 178 thisTime = thisDatetime.time()
179 179
180 180 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
181 181 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182 182
183 183 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
184 184
185 185 self.blockList = ind
186 186 self.blocksPerFile = len(ind)
187 # similar to master
188 if len(ind)==0:
189 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.blockIndex,
190 self.blocksPerFile,
191 thisDatetime))
192 self.setNextFile()
193 # similar to master
187 194 return
188 195
189 196 def __readMetadata(self):
190 197 '''
191 198 Reads Metadata
192 199 '''
193 200
194 201 meta = {}
195 202
196 203 if self.description:
197 204 for key, value in self.description['Metadata'].items():
198 205 meta[key] = self.fp[value][()]
199 206 else:
200 207 grp = self.fp['Metadata']
201 208 for name in grp:
202 209 meta[name] = grp[name][()]
203 210
204 211 if self.extras:
205 212 for key, value in self.extras.items():
206 213 meta[key] = value
207 214 self.meta = meta
208 215
209 216 return
210 217
211 218 def __readData(self):
212 219
213 220 data = {}
214 221
215 222 if self.description:
216 223 for key, value in self.description['Data'].items():
217 224 if isinstance(value, str):
218 225 if isinstance(self.fp[value], h5py.Dataset):
219 226 data[key] = self.fp[value][()]
220 227 elif isinstance(self.fp[value], h5py.Group):
221 228 array = []
222 229 for ch in self.fp[value]:
223 230 array.append(self.fp[value][ch][()])
224 231 data[key] = numpy.array(array)
225 232 elif isinstance(value, list):
226 233 array = []
227 234 for ch in value:
228 235 array.append(self.fp[ch][()])
229 236 data[key] = numpy.array(array)
230 237 else:
231 238 grp = self.fp['Data']
232 239 for name in grp:
233 240 if isinstance(grp[name], h5py.Dataset):
234 241 array = grp[name][()]
235 242 elif isinstance(grp[name], h5py.Group):
236 243 array = []
237 244 for ch in grp[name]:
238 245 array.append(grp[name][ch][()])
239 246 array = numpy.array(array)
240 247 else:
241 248 log.warning('Unknown type: {}'.format(name))
242 249
243 250 if name in self.description:
244 251 key = self.description[name]
245 252 else:
246 253 key = name
247 254 data[key] = array
248 255
249 256 self.data = data
250 257 return
251 258
252 259 def getData(self):
253 260
254 261 for attr in self.data:
255 262 if self.data[attr].ndim == 1:
256 263 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
257 264 else:
258 265 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
259 266
260 267 self.dataOut.flagNoData = False
261 268 self.blockIndex += 1
262 269
263 270 log.log("Block No. {}/{} -> {}".format(
264 271 self.blockIndex,
265 272 self.blocksPerFile,
266 273 self.dataOut.datatime.ctime()), self.name)
267 274
268 275 return
269 276
270 277 def run(self, **kwargs):
271 278
272 279 if not(self.isConfig):
273 280 self.setup(**kwargs)
274 281 self.isConfig = True
275 282
276 283 if self.blockIndex == self.blocksPerFile:
277 284 self.setNextFile()
278 285
279 286 self.getData()
280 287
281 288 return
282 289
283 290 @MPDecorator
284 291 class HDFWriter(Operation):
285 292 """Operation to write HDF5 files.
286 293
287 294 The HDF5 file contains by default two groups Data and Metadata where
288 295 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
289 296 parameters, data attributes are normaly time dependent where the metadata
290 297 are not.
291 298 It is possible to customize the structure of the HDF5 file with the
292 299 optional description parameter see the examples.
293 300
294 301 Parameters:
295 302 -----------
296 303 path : str
297 304 Path where files will be saved.
298 305 blocksPerFile : int
299 306 Number of blocks per file
300 307 metadataList : list
301 308 List of the dataOut attributes that will be saved as metadata
302 309 dataList : int
303 310 List of the dataOut attributes that will be saved as data
304 311 setType : bool
305 312 If True the name of the files corresponds to the timestamp of the data
306 313 description : dict, optional
307 314 Dictionary with the desired description of the HDF5 file
308 315
309 316 Examples
310 317 --------
311 318
312 319 desc = {
313 320 'data_output': {'winds': ['z', 'w', 'v']},
314 321 'utctime': 'timestamps',
315 322 'heightList': 'heights'
316 323 }
317 324 desc = {
318 325 'data_output': ['z', 'w', 'v'],
319 326 'utctime': 'timestamps',
320 327 'heightList': 'heights'
321 328 }
322 329 desc = {
323 330 'Data': {
324 331 'data_output': 'winds',
325 332 'utctime': 'timestamps'
326 333 },
327 334 'Metadata': {
328 335 'heightList': 'heights'
329 336 }
330 337 }
331 338
332 339 writer = proc_unit.addOperation(name='HDFWriter')
333 340 writer.addParameter(name='path', value='/path/to/file')
334 341 writer.addParameter(name='blocksPerFile', value='32')
335 342 writer.addParameter(name='metadataList', value='heightList,timeZone')
336 343 writer.addParameter(name='dataList',value='data_output,utctime')
337 344 # writer.addParameter(name='description',value=json.dumps(desc))
338 345
339 346 """
340 347
341 348 ext = ".hdf5"
342 349 optchar = "D"
343 350 filename = None
344 351 path = None
345 352 setFile = None
346 353 fp = None
347 354 firsttime = True
348 355 #Configurations
349 356 blocksPerFile = None
350 357 blockIndex = None
351 358 dataOut = None
352 359 #Data Arrays
353 360 dataList = None
354 361 metadataList = None
355 362 currentDay = None
356 363 lastTime = None
357 364
358 365 def __init__(self):
359 366
360 367 Operation.__init__(self)
361 368 return
362 369
363 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None, uniqueChannel=False):
370 def set_kwargs(self, **kwargs):
371
372 for key, value in kwargs.items():
373 setattr(self, key, value)
374
375 def set_kwargs_obj(self, obj, **kwargs):
376
377 for key, value in kwargs.items():
378 setattr(obj, key, value)
379
380 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None, **kwargs):
364 381 self.path = path
365 382 self.blocksPerFile = blocksPerFile
366 383 self.metadataList = metadataList
367 384 self.dataList = [s.strip() for s in dataList]
368 385 self.setType = setType
369 386 self.description = description
370 self.uniqueChannel = uniqueChannel
387 self.set_kwargs(**kwargs)
388 #print("self.uniqueChannel: ", self.uniqueChannel)
389 #self.uniqueChannel = uniqueChannel
371 390
372 391 if self.metadataList is None:
373 392 self.metadataList = self.dataOut.metadata_list
374 393
375 394 tableList = []
376 395 dsList = []
377 396
378 397 for i in range(len(self.dataList)):
379 398 dsDict = {}
380 399 if hasattr(self.dataOut, self.dataList[i]):
381 400 dataAux = getattr(self.dataOut, self.dataList[i])
382 401 dsDict['variable'] = self.dataList[i]
383 402 else:
384 403 log.warning('Attribute {} not found in dataOut', self.name)
385 404 continue
386 405
387 406 if dataAux is None:
388 407 continue
389 408 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
390 409 dsDict['nDim'] = 0
391 410 else:
392 if uniqueChannel: #Creates extra dimension to avoid the creation of multiple channels
411 if self.uniqueChannel: #Creates extra dimension to avoid the creation of multiple channels
393 412 dataAux = numpy.expand_dims(dataAux, axis=0)
394 413 #setattr(self.dataOut, self.dataList[i], numpy.expand_dims(getattr(self.dataOut, self.dataList[i]), axis=0))
395 414 #dataAux = getattr(self.dataOut, self.dataList[i])
396 415 #print(getattr(self.dataOut, self.dataList[i]))
397 416 dsDict['nDim'] = len(dataAux.shape)
398 417 dsDict['shape'] = dataAux.shape
399 418 dsDict['dsNumber'] = dataAux.shape[0]
400 419 dsDict['dtype'] = dataAux.dtype
401 420
402 421 dsList.append(dsDict)
403 422
404 423 self.dsList = dsList
405 424 self.currentDay = self.dataOut.datatime.date()
406 425
407 426 def timeFlag(self):
408 427 currentTime = self.dataOut.utctime
409 428 timeTuple = time.localtime(currentTime)
410 429 dataDay = timeTuple.tm_yday
411 430
412 431 if self.lastTime is None:
413 432 self.lastTime = currentTime
414 433 self.currentDay = dataDay
415 434 return False
416 435
417 436 timeDiff = currentTime - self.lastTime
418 437
419 438 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
420 439 if dataDay != self.currentDay:
421 440 self.currentDay = dataDay
422 441 return True
423 442 elif timeDiff > 3*60*60:
424 443 self.lastTime = currentTime
425 444 return True
426 445 else:
427 446 self.lastTime = currentTime
428 447 return False
429 448
430 449 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
431 dataList=[], setType=None, description={}, uniqueChannel= False):
450 dataList=[], setType=None, description={}, **kwargs):
432 451
433 452 self.dataOut = dataOut
453 self.set_kwargs_obj(self.dataOut, **kwargs)
434 454 if not(self.isConfig):
435 455 self.setup(path=path, blocksPerFile=blocksPerFile,
436 456 metadataList=metadataList, dataList=dataList,
437 setType=setType, description=description, uniqueChannel=uniqueChannel)
457 setType=setType, description=description, **kwargs)
438 458
439 459 self.isConfig = True
440 460 self.setNextFile()
441 461
442 462 self.putData()
443 463
444 464 return
445 465
446 466 def setNextFile(self):
447 467
448 468 ext = self.ext
449 469 path = self.path
450 470 setFile = self.setFile
451 471
452 472 timeTuple = time.localtime(self.dataOut.utctime)
453 473 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
454 474 fullpath = os.path.join(path, subfolder)
455 475
456 476 if os.path.exists(fullpath):
457 477 filesList = os.listdir(fullpath)
458 478 filesList = [k for k in filesList if k.startswith(self.optchar)]
459 479 if len( filesList ) > 0:
460 480 filesList = sorted(filesList, key=str.lower)
461 481 filen = filesList[-1]
462 482 # el filename debera tener el siguiente formato
463 483 # 0 1234 567 89A BCDE (hex)
464 484 # x YYYY DDD SSS .ext
465 485 if isNumber(filen[8:11]):
466 486 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
467 487 else:
468 488 setFile = -1
469 489 else:
470 490 setFile = -1 #inicializo mi contador de seteo
471 491 else:
472 492 os.makedirs(fullpath)
473 493 setFile = -1 #inicializo mi contador de seteo
474 494
475 495 if self.setType is None:
476 496 setFile += 1
477 497 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
478 498 timeTuple.tm_year,
479 499 timeTuple.tm_yday,
480 500 setFile,
481 501 ext )
482 502 else:
483 503 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
484 504 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
485 505 timeTuple.tm_year,
486 506 timeTuple.tm_yday,
487 507 setFile,
488 508 ext )
489 509
490 510 self.filename = os.path.join( path, subfolder, file )
491 511
492 512 #Setting HDF5 File
493 513 self.fp = h5py.File(self.filename, 'w')
494 514 #write metadata
495 515 self.writeMetadata(self.fp)
496 516 #Write data
497 517 self.writeData(self.fp)
498 518
499 519 def getLabel(self, name, x=None):
500 520 #print("x: ", x)
501 521 if x is None:
502 522 if 'Data' in self.description:
503 523 data = self.description['Data']
504 524 if 'Metadata' in self.description:
505 525 data.update(self.description['Metadata'])
506 526 else:
507 527 data = self.description
508 528 if name in data:
509 529 if isinstance(data[name], str):
510 530 return data[name]
511 531 elif isinstance(data[name], list):
512 532 return None
513 533 elif isinstance(data[name], dict):
514 534 for key, value in data[name].items():
515 535 return key
516 536 return name
517 537 else:
518 if 'Metadata' in self.description:
519 meta = self.description['Metadata']
538 if 'Data' in self.description:
539 data = self.description['Data']
540 if 'Metadata' in self.description:
541 data.update(self.description['Metadata'])
520 542 else:
521 meta = self.description
522 if name in meta:
523 if isinstance(meta[name], list):
524 return meta[name][x]
525 elif isinstance(meta[name], dict):
526 for key, value in meta[name].items():
543 data = self.description
544 if name in data:
545 if isinstance(data[name], list):
546 return data[name][x]
547 elif isinstance(data[name], dict):
548 for key, value in data[name].items():
527 549 return value[x]
528 550 if 'cspc' in name:
529 551 return 'pair{:02d}'.format(x)
530 552 else:
531 553 return 'channel{:02d}'.format(x)
532 554
533 555 def writeMetadata(self, fp):
534 556
535 557 if self.description:
536 558 if 'Metadata' in self.description:
537 559 grp = fp.create_group('Metadata')
538 560 else:
539 561 grp = fp
540 562 else:
541 563 grp = fp.create_group('Metadata')
542 564
543 565 for i in range(len(self.metadataList)):
544 566 if not hasattr(self.dataOut, self.metadataList[i]):
545 567 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
546 568 continue
547 569 value = getattr(self.dataOut, self.metadataList[i])
548 570 if isinstance(value, bool):
549 571 if value is True:
550 572 value = 1
551 573 else:
552 574 value = 0
553 575 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
554 576 return
555 577
556 578 def writeData(self, fp):
557 579
558 580 if self.description:
559 581 if 'Data' in self.description:
560 582 grp = fp.create_group('Data')
561 583 else:
562 584 grp = fp
563 585 else:
564 586 grp = fp.create_group('Data')
565 587
566 588 dtsets = []
567 589 data = []
568 590 #print("self.dsList: ", self.dsList)
569 591 for dsInfo in self.dsList:
570 592 if dsInfo['nDim'] == 0:
571 593 ds = grp.create_dataset(
572 594 self.getLabel(dsInfo['variable']),
573 595 (self.blocksPerFile, ),
574 596 chunks=True,
575 597 dtype=numpy.float64)
576 598 dtsets.append(ds)
577 599 data.append((dsInfo['variable'], -1))
578 600 else:
579 601 label = self.getLabel(dsInfo['variable'])
580 602 if label is not None:
581 603 sgrp = grp.create_group(label)
582 604 else:
583 605 sgrp = grp
584 606 if self.uniqueChannel: #Creates extra dimension to avoid the creation of multiple channels
585 607 setattr(self.dataOut, dsInfo['variable'], numpy.expand_dims(getattr(self.dataOut, dsInfo['variable']), axis=0))
586 608 for i in range(dsInfo['dsNumber']):
587 609 ds = sgrp.create_dataset(
588 610 self.getLabel(dsInfo['variable'], i),
589 611 (self.blocksPerFile, ) + dsInfo['shape'][1:],
590 612 chunks=True,
591 613 dtype=dsInfo['dtype'])
592 614 dtsets.append(ds)
593 615 data.append((dsInfo['variable'], i))
594 616
595 617 fp.flush()
596 618
597 619 log.log('Creating file: {}'.format(fp.filename), self.name)
598 620
599 621 self.ds = dtsets
600 622 self.data = data
601 623 self.firsttime = True
602 624 self.blockIndex = 0
603 625 return
604 626
605 627 def putData(self):
606 628
607 629 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
608 630 self.closeFile()
609 631 self.setNextFile()
610 632
611 633 for i, ds in enumerate(self.ds):
612 634 attr, ch = self.data[i]
613 635 if ch == -1:
614 636 ds[self.blockIndex] = getattr(self.dataOut, attr)
615 637 else:
616 638 if self.uniqueChannel and self.blockIndex != 0: #Creates extra dimension to avoid the creation of multiple channels
617 639 setattr(self.dataOut, attr, numpy.expand_dims(getattr(self.dataOut, attr), axis=0))
618 640 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
619 641 if self.uniqueChannel: #Deletes extra dimension created to avoid the creation of multiple channels
620 642 setattr(self.dataOut, attr, getattr(self.dataOut, attr)[0])
621 643
622 644 self.fp.flush()
623 645 self.blockIndex += 1
624 646 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
625 647
626 648 return
627 649
628 650 def closeFile(self):
629 651
630 652 if self.blockIndex != self.blocksPerFile:
631 653 for ds in self.ds:
632 654 ds.resize(self.blockIndex, axis=0)
633 655
634 656 if self.fp:
635 657 self.fp.flush()
636 658 self.fp.close()
637 659
638 660 def close(self):
639 661
640 662 self.closeFile()
@@ -1,696 +1,700
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import numpy
8 8
9 9 from .jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 12 from schainpy.model.data.jrodata import Voltage
13 13
14 14
15 15 class VoltageReader(JRODataReader, ProcessingUnit):
16 16 """
17 17 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
18 18 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
19 19 perfiles*alturas*canales) son almacenados en la variable "buffer".
20 20
21 21 perfiles * alturas * canales
22 22
23 23 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
24 24 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
25 25 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
26 26 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
27 27
28 28 Example:
29 29
30 30 dpath = "/home/myuser/data"
31 31
32 32 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
33 33
34 34 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
35 35
36 36 readerObj = VoltageReader()
37 37
38 38 readerObj.setup(dpath, startTime, endTime)
39 39
40 40 while(True):
41 41
42 42 #to get one profile
43 43 profile = readerObj.getData()
44 44
45 45 #print the profile
46 46 print profile
47 47
48 48 #If you want to see all datablock
49 49 print readerObj.datablock
50 50
51 51 if readerObj.flagNoMoreFiles:
52 52 break
53 53
54 54 """
55 55
56 56 def __init__(self):
57 57 """
58 58 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
59 59
60 60 Input:
61 61 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
62 62 almacenar un perfil de datos cada vez que se haga un requerimiento
63 63 (getData). El perfil sera obtenido a partir del buffer de datos,
64 64 si el buffer esta vacio se hara un nuevo proceso de lectura de un
65 65 bloque de datos.
66 66 Si este parametro no es pasado se creara uno internamente.
67 67
68 68 Variables afectadas:
69 69 self.dataOut
70 70
71 71 Return:
72 72 None
73 73 """
74 74
75 75 ProcessingUnit.__init__(self)
76 76
77 77 self.ext = ".r"
78 78 self.optchar = "D"
79 79 self.basicHeaderObj = BasicHeader(LOCALTIME)
80 80 self.systemHeaderObj = SystemHeader()
81 81 self.radarControllerHeaderObj = RadarControllerHeader()
82 82
83 83 self.processingHeaderObj = ProcessingHeader()
84 84 self.lastUTTime = 0
85 85 self.profileIndex = 2**32 - 1
86 86 self.dataOut = Voltage()
87 87 self.selBlocksize = None
88 88 self.selBlocktime = None
89 89 ##print("1--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
90 90 def createObjByDefault(self):
91 91 ##print("2--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
92 92 dataObj = Voltage()
93 93
94 94 return dataObj
95 95
96 96 def __hasNotDataInBuffer(self):
97 97 ##print("3--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
98 98 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock * self.nTxs:
99 99 return 1
100 100
101 101 return 0
102 102
103 103 def getBlockDimension(self):
104 104 """
105 105 Obtiene la cantidad de puntos a leer por cada bloque de datos
106 106
107 107 Affected:
108 108 self.blocksize
109 109
110 110 Return:
111 111 None
112 112 """
113 113 ##print("4--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
114 114 pts2read = self.processingHeaderObj.profilesPerBlock * \
115 115 self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
116 116 self.blocksize = pts2read
117 117
118 118 def readBlock(self):
119 119
120 120 """
121 121 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
122 122 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
123 123 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
124 124 es seteado a 0
125 125
126 126 Inputs:
127 127 None
128 128
129 129 Return:
130 130 None
131 131
132 132 Affected:
133 133 self.profileIndex
134 134 self.datablock
135 135 self.flagIsNewFile
136 136 self.flagIsNewBlock
137 137 self.nTotalBlocks
138 138
139 139 Exceptions:
140 140 Si un bloque leido no es un bloque valido
141 141 """
142 142 ##print("5--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
143 143 # if self.server is not None:
144 144 # self.zBlock = self.receiver.recv()
145 145 # self.zHeader = self.zBlock[:24]
146 146 # self.zDataBlock = self.zBlock[24:]
147 147 # junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
148 148 # self.processingHeaderObj.profilesPerBlock = 240
149 149 # self.processingHeaderObj.nHeights = 248
150 150 # self.systemHeaderObj.nChannels
151 151 # else:
152 152 current_pointer_location = self.fp.tell()
153 153 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
154 154
155 155 try:
156 156 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
157 157 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
158 158 except:
159 159 # print "The read block (%3d) has not enough data" %self.nReadBlocks
160 160
161 161 if self.waitDataBlock(pointer_location=current_pointer_location):
162 162 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
163 163 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
164 164 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
165 165 # return 0
166 166
167 167 # Dimensions : nChannels, nProfiles, nSamples
168 168
169 169 junk = numpy.transpose(junk, (2, 0, 1))
170 170 self.datablock = junk['real'] + junk['imag'] * 1j
171 171
172 172 self.profileIndex = 0
173 173
174 174 self.flagIsNewFile = 0
175 175 self.flagIsNewBlock = 1
176 176
177 177 self.nTotalBlocks += 1
178 178 self.nReadBlocks += 1
179 179
180 180 return 1
181 181
182 182 def getFirstHeader(self):
183 183 ##print("6--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
184 184
185 185 self.getBasicHeader()
186 186
187 187 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
188 188
189 189 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
190 190
191 191 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
192 192
193 193 #self.dataOut.ippSeconds_general=self.radarControllerHeaderObj.ippSeconds
194 194 #print(self.nTxs)
195 195 if self.nTxs > 1:
196 196 #print(self.radarControllerHeaderObj.ippSeconds)
197 197 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
198 198 #print(self.radarControllerHeaderObj.ippSeconds)
199 199 # Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
200 200
201 201 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
202 202 #
203 203 # if self.radarControllerHeaderObj.code is not None:
204 204 #
205 205 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
206 206 #
207 207 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
208 208 #
209 209 # self.dataOut.code = self.radarControllerHeaderObj.code
210 210
211 211 self.dataOut.dtype = self.dtype
212 212
213 213 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
214 214
215 215 self.dataOut.heightList = numpy.arange(
216 216 self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
217 217
218 218 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
219 219
220 220 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
221 221
222 222 # asumo q la data no esta decodificada
223 223 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
224 224
225 225 # asumo q la data no esta sin flip
226 226 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
227 227
228 228 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
229 229
230 230 def reshapeData(self):
231 231 ##print("7--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
232 232 if self.nTxs < 0:
233 233 return
234 234
235 235 if self.nTxs == 1:
236 236 return
237 237
238 238 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1. / self.nTxs) != 0:
239 239 raise ValueError("1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % (
240 240 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock))
241 241
242 242 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
243 243 raise ValueError("nTxs (=%d), should be a multiple of nHeights (=%d)" % (
244 244 self.nTxs, self.processingHeaderObj.nHeights))
245 245
246 246 self.datablock = self.datablock.reshape(
247 247 (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, int(self.processingHeaderObj.nHeights / self.nTxs)))
248 248
249 249 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock * self.nTxs
250 250 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights / self.nTxs) * \
251 251 self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
252 252 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
253 253
254 254 return
255 255
256 256 def readFirstHeaderFromServer(self):
257 257
258 258 ##print("8--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
259 259 self.getFirstHeader()
260 260
261 261 self.firstHeaderSize = self.basicHeaderObj.size
262 262
263 263 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
264 264 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
265 265 if datatype == 0:
266 266 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
267 267 elif datatype == 1:
268 268 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
269 269 elif datatype == 2:
270 270 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
271 271 elif datatype == 3:
272 272 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
273 273 elif datatype == 4:
274 274 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
275 275 elif datatype == 5:
276 276 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
277 277 else:
278 278 raise ValueError('Data type was not defined')
279 279
280 280 self.dtype = datatype_str
281 281 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
282 282 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
283 283 self.firstHeaderSize + self.basicHeaderSize * \
284 284 (self.processingHeaderObj.dataBlocksPerFile - 1)
285 285 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
286 286 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
287 287 self.getBlockDimension()
288 288
289 289 def getFromServer(self):
290 290 ##print("9--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
291 291 self.flagDiscontinuousBlock = 0
292 292 self.profileIndex = 0
293 293 self.flagIsNewBlock = 1
294 294 self.dataOut.flagNoData = False
295 295 self.nTotalBlocks += 1
296 296 self.nReadBlocks += 1
297 297 self.blockPointer = 0
298 298
299 block = self.receiver.recv()
299 topic, block = self.receiver.recv_multipart()
300 300
301 301 self.basicHeaderObj.read(block[self.blockPointer:])
302 302 self.blockPointer += self.basicHeaderObj.length
303 303 self.systemHeaderObj.read(block[self.blockPointer:])
304 304 self.blockPointer += self.systemHeaderObj.length
305 305 self.radarControllerHeaderObj.read(block[self.blockPointer:])
306 306 self.blockPointer += self.radarControllerHeaderObj.length
307 307 self.processingHeaderObj.read(block[self.blockPointer:])
308 308 self.blockPointer += self.processingHeaderObj.length
309 309 self.readFirstHeaderFromServer()
310 310
311 311 timestamp = self.basicHeaderObj.get_datatime()
312 print('[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp))
312 print('[Receiving] - Block {} - {} from {}'.format(self.nTotalBlocks, timestamp, topic.decode()))
313 if self.nTotalBlocks == self.processingHeaderObj.dataBlocksPerFile:
314 self.nTotalBlocks = 0
315 self.nReadBlocks = 0
316 print('Receiving the next stream...')
313 317 current_pointer_location = self.blockPointer
314 318 junk = numpy.fromstring(
315 319 block[self.blockPointer:], self.dtype, self.blocksize)
316 320
317 321 try:
318 322 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
319 323 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
320 324 except:
321 325 # print "The read block (%3d) has not enough data" %self.nReadBlocks
322 326 if self.waitDataBlock(pointer_location=current_pointer_location):
323 327 junk = numpy.fromstring(
324 328 block[self.blockPointer:], self.dtype, self.blocksize)
325 329 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
326 330 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
327 331 # return 0
328 332
329 333 # Dimensions : nChannels, nProfiles, nSamples
330 334
331 335 junk = numpy.transpose(junk, (2, 0, 1))
332 336 self.datablock = junk['real'] + junk['imag'] * 1j
333 337 self.profileIndex = 0
334 338 if self.selBlocksize == None:
335 339 self.selBlocksize = self.dataOut.nProfiles
336 340 if self.selBlocktime != None:
337 341 if self.dataOut.nCohInt is not None:
338 342 nCohInt = self.dataOut.nCohInt
339 343 else:
340 344 nCohInt = 1
341 345 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
342 346 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
343 347 self.dataOut.data = self.datablock[:,
344 348 self.profileIndex:self.profileIndex + self.selBlocksize, :]
345 349 datasize = self.dataOut.data.shape[1]
346 350 if datasize < self.selBlocksize:
347 351 buffer = numpy.zeros(
348 352 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
349 353 buffer[:, :datasize, :] = self.dataOut.data
350 354 self.dataOut.data = buffer
351 355 self.profileIndex = blockIndex
352 356
353 357 self.dataOut.flagDataAsBlock = True
354 358 self.flagIsNewBlock = 1
355 359 self.dataOut.realtime = self.online
356 360
357 361 return self.dataOut.data
358 362
359 363 def getData(self):
360 364 """
361 365 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
362 366 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
363 367 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
364 368 "readNextBlock"
365 369
366 370 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
367 371
368 372 Return:
369 373
370 374 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
371 375 es igual al total de perfiles leidos desde el archivo.
372 376
373 377 Si self.getByBlock == False:
374 378
375 379 self.dataOut.data = buffer[:, thisProfile, :]
376 380
377 381 shape = [nChannels, nHeis]
378 382
379 383 Si self.getByBlock == True:
380 384
381 385 self.dataOut.data = buffer[:, :, :]
382 386
383 387 shape = [nChannels, nProfiles, nHeis]
384 388
385 389 Variables afectadas:
386 390 self.dataOut
387 391 self.profileIndex
388 392
389 393 Affected:
390 394 self.dataOut
391 395 self.profileIndex
392 396 self.flagDiscontinuousBlock
393 397 self.flagIsNewBlock
394 398 """
395 399
396 400 ##print("10--OKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK")
397 401 if self.flagNoMoreFiles:
398 402 self.dataOut.flagNoData = True
399 403 return 0
400 404 self.flagDiscontinuousBlock = 0
401 405 self.flagIsNewBlock = 0
402 406 if self.__hasNotDataInBuffer():
403 407 if not(self.readNextBlock()):
404 408 return 0
405 409
406 410 self.getFirstHeader()
407 411
408 412 self.reshapeData()
409 413 if self.datablock is None:
410 414 self.dataOut.flagNoData = True
411 415 return 0
412 416
413 417 if not self.getByBlock:
414 418
415 419 """
416 420 Return profile by profile
417 421
418 422 If nTxs > 1 then one profile is divided by nTxs and number of total
419 423 blocks is increased by nTxs (nProfiles *= nTxs)
420 424 """
421 425 self.dataOut.flagDataAsBlock = False
422 426 self.dataOut.data = self.datablock[:, self.profileIndex, :]
423 427 self.dataOut.profileIndex = self.profileIndex
424 428
425 429
426 430 self.profileIndex += 1
427 431
428 432 else:
429 433 """
430 434 Return a block
431 435 """
432 436 if self.selBlocksize == None:
433 437 self.selBlocksize = self.dataOut.nProfiles
434 438 if self.selBlocktime != None:
435 439 if self.dataOut.nCohInt is not None:
436 440 nCohInt = self.dataOut.nCohInt
437 441 else:
438 442 nCohInt = 1
439 443 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
440 444 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
441 445
442 446 self.dataOut.data = self.datablock[:,
443 447 self.profileIndex:self.profileIndex + self.selBlocksize, :]
444 448 self.profileIndex += self.selBlocksize
445 449 datasize = self.dataOut.data.shape[1]
446 450
447 451 if datasize < self.selBlocksize:
448 452 buffer = numpy.zeros(
449 453 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
450 454 buffer[:, :datasize, :] = self.dataOut.data
451 455
452 456 while datasize < self.selBlocksize: # Not enough profiles to fill the block
453 457 if not(self.readNextBlock()):
454 458 return 0
455 459 self.getFirstHeader()
456 460 self.reshapeData()
457 461 if self.datablock is None:
458 462 self.dataOut.flagNoData = True
459 463 return 0
460 464 # stack data
461 465 blockIndex = self.selBlocksize - datasize
462 466 datablock1 = self.datablock[:, :blockIndex, :]
463 467
464 468 buffer[:, datasize:datasize +
465 469 datablock1.shape[1], :] = datablock1
466 470 datasize += datablock1.shape[1]
467 471
468 472 self.dataOut.data = buffer
469 473 self.profileIndex = blockIndex
470 474
471 475 self.dataOut.flagDataAsBlock = True
472 476 self.dataOut.nProfiles = self.dataOut.data.shape[1]
473 477
474 478 #######################DP#######################
475 479 self.dataOut.CurrentBlock=self.nReadBlocks
476 480 self.dataOut.LastBlock=self.processingHeaderObj.dataBlocksPerFile
477 481 #######################DP#######################
478 482 self.dataOut.flagNoData = False
479 483
480 484 #self.getBasicHeader()
481 485
482 486 self.dataOut.realtime = self.online
483 487
484 488 return self.dataOut.data
485 489
486 490
487 491 @MPDecorator
488 492 class VoltageWriter(JRODataWriter, Operation):
489 493 """
490 494 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
491 495 de los datos siempre se realiza por bloques.
492 496 """
493 497
494 498 ext = ".r"
495 499
496 500 optchar = "D"
497 501
498 502 shapeBuffer = None
499 503
500 504 def __init__(self):#, **kwargs):
501 505 """
502 506 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
503 507
504 508 Affected:
505 509 self.dataOut
506 510
507 511 Return: None
508 512 """
509 513 Operation.__init__(self)#, **kwargs)
510 514
511 515 self.nTotalBlocks = 0
512 516
513 517 self.profileIndex = 0
514 518
515 519 self.isConfig = False
516 520
517 521 self.fp = None
518 522
519 523 self.flagIsNewFile = 1
520 524
521 525 self.blockIndex = 0
522 526
523 527 self.flagIsNewBlock = 0
524 528
525 529 self.setFile = None
526 530
527 531 self.dtype = None
528 532
529 533 self.path = None
530 534
531 535 self.filename = None
532 536
533 537 self.basicHeaderObj = BasicHeader(LOCALTIME)
534 538
535 539 self.systemHeaderObj = SystemHeader()
536 540
537 541 self.radarControllerHeaderObj = RadarControllerHeader()
538 542
539 543 self.processingHeaderObj = ProcessingHeader()
540 544
541 545 def hasAllDataInBuffer(self):
542 546 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
543 547 return 1
544 548 return 0
545 549
546 550 def setBlockDimension(self):
547 551 """
548 552 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
549 553
550 554 Affected:
551 555 self.shape_spc_Buffer
552 556 self.shape_cspc_Buffer
553 557 self.shape_dc_Buffer
554 558
555 559 Return: None
556 560 """
557 561 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
558 562 self.processingHeaderObj.nHeights,
559 563 self.systemHeaderObj.nChannels)
560 564
561 565 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
562 566 self.processingHeaderObj.profilesPerBlock,
563 567 self.processingHeaderObj.nHeights),
564 568 dtype=numpy.dtype('complex64'))
565 569
566 570 def writeBlock(self):
567 571 """
568 572 Escribe el buffer en el file designado
569 573
570 574 Affected:
571 575 self.profileIndex
572 576 self.flagIsNewFile
573 577 self.flagIsNewBlock
574 578 self.nTotalBlocks
575 579 self.blockIndex
576 580
577 581 Return: None
578 582 """
579 583 data = numpy.zeros(self.shapeBuffer, self.dtype)
580 584
581 585 junk = numpy.transpose(self.datablock, (1, 2, 0))
582 586
583 587 data['real'] = junk.real
584 588 data['imag'] = junk.imag
585 589
586 590 data = data.reshape((-1))
587 591
588 592 data.tofile(self.fp)
589 593
590 594 self.datablock.fill(0)
591 595
592 596 self.profileIndex = 0
593 597 self.flagIsNewFile = 0
594 598 self.flagIsNewBlock = 1
595 599
596 600 self.blockIndex += 1
597 601 self.nTotalBlocks += 1
598 602
599 603 # print "[Writing] Block = %04d" %self.blockIndex
600 604
601 605 def putData(self):
602 606 """
603 607 Setea un bloque de datos y luego los escribe en un file
604 608
605 609 Affected:
606 610 self.flagIsNewBlock
607 611 self.profileIndex
608 612
609 613 Return:
610 614 0 : Si no hay data o no hay mas files que puedan escribirse
611 615 1 : Si se escribio la data de un bloque en un file
612 616 """
613 617 if self.dataOut.flagNoData:
614 618 return 0
615 619
616 620 self.flagIsNewBlock = 0
617 621
618 622 if self.dataOut.flagDiscontinuousBlock:
619 623 self.datablock.fill(0)
620 624 self.profileIndex = 0
621 625 self.setNextFile()
622 626
623 627 if self.profileIndex == 0:
624 628 self.setBasicHeader()
625 629
626 630 if not self.dataOut.flagDataAsBlock:
627 631 self.datablock[:, self.profileIndex, :] = self.dataOut.data
628 632
629 633 self.profileIndex += 1
630 634 else:
631 635 self.datablock[:,:,:] = self.dataOut.data
632 636 self.profileIndex = self.processingHeaderObj.profilesPerBlock
633 637
634 638 if self.hasAllDataInBuffer():
635 639 # if self.flagIsNewFile:
636 640 self.writeNextBlock()
637 641 # self.setFirstHeader()
638 642
639 643 return 1
640 644
641 645 def __getBlockSize(self):
642 646 '''
643 647 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
644 648 '''
645 649
646 650 dtype_width = self.getDtypeWidth()
647 651
648 652 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
649 653 self.profilesPerBlock * dtype_width * 2)
650 654
651 655 return blocksize
652 656
653 657 def setFirstHeader(self):
654 658 """
655 659 Obtiene una copia del First Header
656 660
657 661 Affected:
658 662 self.systemHeaderObj
659 663 self.radarControllerHeaderObj
660 664 self.dtype
661 665
662 666 Return:
663 667 None
664 668 """
665 669
666 670 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
667 671 self.systemHeaderObj.nChannels = self.dataOut.nChannels
668 672 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
669 673
670 674 self.processingHeaderObj.dtype = 0 # Voltage
671 675 self.processingHeaderObj.blockSize = self.__getBlockSize()
672 676 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
673 677 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
674 678 # podria ser 1 o self.dataOut.processingHeaderObj.nWindows
675 679 self.processingHeaderObj.nWindows = 1
676 680 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
677 681 # Cuando la data de origen es de tipo Voltage
678 682 self.processingHeaderObj.nIncohInt = 1
679 683 # Cuando la data de origen es de tipo Voltage
680 684 self.processingHeaderObj.totalSpectra = 0
681 685
682 686 if self.dataOut.code is not None:
683 687 self.processingHeaderObj.code = self.dataOut.code
684 688 self.processingHeaderObj.nCode = self.dataOut.nCode
685 689 self.processingHeaderObj.nBaud = self.dataOut.nBaud
686 690
687 691 if self.processingHeaderObj.nWindows != 0:
688 692 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
689 693 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - \
690 694 self.dataOut.heightList[0]
691 695 self.processingHeaderObj.nHeights = self.dataOut.nHeights
692 696 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
693 697
694 698 self.processingHeaderObj.processFlags = self.getProcessFlags()
695 699
696 700 self.setBasicHeader()
@@ -1,252 +1,252
1 1 '''
2 2 Base clases to create Processing units and operations, the MPDecorator
3 3 must be used in plotting and writing operations to allow to run as an
4 4 external process.
5 5 '''
6 6
7 7 import os
8 8 import inspect
9 9 import zmq
10 10 import time
11 11 import pickle
12 12 import traceback
13 13 from threading import Thread
14 14 from multiprocessing import Process, Queue
15 15 from schainpy.utils import log
16 16
17 import copy
18
19 17 QUEUE_SIZE = int(os.environ.get('QUEUE_MAX_SIZE', '10'))
20 18
21 19 class ProcessingUnit(object):
22 20 '''
23 21 Base class to create Signal Chain Units
24 22 '''
25 23
26 24 proc_type = 'processing'
25 bypass = False
27 26
28 27 def __init__(self):
29 28
30 29 self.dataIn = None
31 30 self.dataOut = None
32 31 self.isConfig = False
33 32 self.operations = []
34 33 self.name = 'Test'
35 34 self.inputs = []
36 35
37 36 def setInput(self, unit):
38 37
39 38 attr = 'dataIn'
40 39 for i, u in enumerate(unit):
41 40 if i==0:
42 41 #print(u.dataOut.flagNoData)
43 42 #exit(1)
44 43 self.dataIn = u.dataOut#.copy()
45 44 self.inputs.append('dataIn')
46 45 else:
47 46 setattr(self, 'dataIn{}'.format(i), u.dataOut)#.copy())
48 47 self.inputs.append('dataIn{}'.format(i))
49 48
50 49
51 50 def getAllowedArgs(self):
52 51 if hasattr(self, '__attrs__'):
53 52 return self.__attrs__
54 53 else:
55 54 return inspect.getargspec(self.run).args
56 55
57 56 def addOperation(self, conf, operation):
58 57 '''
59 58 '''
60 59
61 60 self.operations.append((operation, conf.type, conf.getKwargs()))
62 61
63 62 def getOperationObj(self, objId):
64 63
65 64 if objId not in list(self.operations.keys()):
66 65 return None
67 66
68 67 return self.operations[objId]
69 68
70 69 def call(self, **kwargs):
71 70 '''
72 71 '''
73 72
74 73 try:
75 74 if self.dataIn is not None and self.dataIn.flagNoData and not self.dataIn.error:
76 75 #if self.dataIn is not None and self.dataIn.flagNoData and not self.dataIn.error and not self.dataIn.runNextUnit:
77 76 if self.dataIn.runNextUnit:
78 77 #print("SUCCESSSSSSS")
79 78 #exit(1)
80 79 return not self.dataIn.isReady()
81 80 else:
82 81 return self.dataIn.isReady()
83 82 elif self.dataIn is None or not self.dataIn.error:
84 #print([getattr(self, at) for at in self.inputs])
85 #print("Elif 1")
83 if 'Reader' in self.name and self.bypass:
84 print('Skipping...reader')
85 return self.dataOut.isReady()
86 86 self.run(**kwargs)
87 87 elif self.dataIn.error:
88 88 #print("Elif 2")
89 89 self.dataOut.error = self.dataIn.error
90 90 self.dataOut.flagNoData = True
91 91 except:
92 92 #print("Except")
93 93 err = traceback.format_exc()
94 94 if 'SchainWarning' in err:
95 95 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), self.name)
96 96 elif 'SchainError' in err:
97 97 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), self.name)
98 98 else:
99 99 log.error(err, self.name)
100 100 self.dataOut.error = True
101 101 #print("before op")
102 102 for op, optype, opkwargs in self.operations:
103 103 aux = self.dataOut.copy()
104 104 #aux = copy.deepcopy(self.dataOut)
105 105 #print("**********************Before",op)
106 106 if optype == 'other' and not self.dataOut.flagNoData:
107 107 #print("**********************Other",op)
108 108 #print(self.dataOut.flagNoData)
109 109 self.dataOut = op.run(self.dataOut, **opkwargs)
110 110 elif optype == 'external' and not self.dataOut.flagNoData:
111 111 op.queue.put(aux)
112 112 elif optype == 'external' and self.dataOut.error:
113 113 op.queue.put(aux)
114 114 #elif optype == 'external' and self.dataOut.isReady():
115 115 #op.queue.put(copy.deepcopy(self.dataOut))
116 116 #print(not self.dataOut.isReady())
117 117
118 118 try:
119 119 if self.dataOut.runNextUnit:
120 120 runNextUnit = self.dataOut.runNextUnit
121 121 #print(self.operations)
122 122 #print("Tru")
123 123
124 124 else:
125 125 runNextUnit = self.dataOut.isReady()
126 126 except:
127 127 runNextUnit = self.dataOut.isReady()
128 128 #exit(1)
129 129 #if not self.dataOut.isReady():
130 130 #return 'Error' if self.dataOut.error else input()
131 131 #print("NexT",runNextUnit)
132 132 #print("error: ",self.dataOut.error)
133 133 return 'Error' if self.dataOut.error else runNextUnit# self.dataOut.isReady()
134 134
135 135 def setup(self):
136 136
137 137 raise NotImplementedError
138 138
139 139 def run(self):
140 140
141 141 raise NotImplementedError
142 142
143 143 def close(self):
144 144
145 145 return
146 146
147 147
148 148 class Operation(object):
149 149
150 150 '''
151 151 '''
152 152
153 153 proc_type = 'operation'
154 154
155 155 def __init__(self):
156 156
157 157 self.id = None
158 158 self.isConfig = False
159 159
160 160 if not hasattr(self, 'name'):
161 161 self.name = self.__class__.__name__
162 162
163 163 def getAllowedArgs(self):
164 164 if hasattr(self, '__attrs__'):
165 165 return self.__attrs__
166 166 else:
167 167 return inspect.getargspec(self.run).args
168 168
169 169 def setup(self):
170 170
171 171 self.isConfig = True
172 172
173 173 raise NotImplementedError
174 174
175 175 def run(self, dataIn, **kwargs):
176 176 """
177 177 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
178 178 atributos del objeto dataIn.
179 179
180 180 Input:
181 181
182 182 dataIn : objeto del tipo JROData
183 183
184 184 Return:
185 185
186 186 None
187 187
188 188 Affected:
189 189 __buffer : buffer de recepcion de datos.
190 190
191 191 """
192 192 if not self.isConfig:
193 193 self.setup(**kwargs)
194 194
195 195 raise NotImplementedError
196 196
197 197 def close(self):
198 198
199 199 return
200 200
201 201
202 202 def MPDecorator(BaseClass):
203 203 """
204 204 Multiprocessing class decorator
205 205
206 206 This function add multiprocessing features to a BaseClass.
207 207 """
208 208
209 209 class MPClass(BaseClass, Process):
210 210
211 211 def __init__(self, *args, **kwargs):
212 212 super(MPClass, self).__init__()
213 213 Process.__init__(self)
214 214
215 215 self.args = args
216 216 self.kwargs = kwargs
217 217 self.t = time.time()
218 218 self.op_type = 'external'
219 219 self.name = BaseClass.__name__
220 220 self.__doc__ = BaseClass.__doc__
221 221
222 222 if 'plot' in self.name.lower() and not self.name.endswith('_'):
223 223 self.name = '{}{}'.format(self.CODE.upper(), 'Plot')
224 224
225 225 self.start_time = time.time()
226 226 self.err_queue = args[3]
227 227 self.queue = Queue(maxsize=QUEUE_SIZE)
228 228 self.myrun = BaseClass.run
229 229
230 230 def run(self):
231 231
232 232 while True:
233 233
234 234 dataOut = self.queue.get()
235 235
236 236 if not dataOut.error:
237 237 try:
238 238 BaseClass.run(self, dataOut, **self.kwargs)
239 239 except:
240 240 err = traceback.format_exc()
241 241 log.error(err, self.name)
242 242 else:
243 243 break
244 244
245 245 self.close()
246 246
247 247 def close(self):
248 248
249 249 BaseClass.close(self)
250 250 log.success('Done...(Time:{:4.2f} secs)'.format(time.time() - self.start_time), self.name)
251 251
252 252 return MPClass
@@ -1,1049 +1,1062
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15
16 16 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
17 17 from schainpy.model.data.jrodata import Spectra
18 18 from schainpy.model.data.jrodata import hildebrand_sekhon
19 19 from schainpy.utils import log
20 20
21 21
22 22 class SpectraProc(ProcessingUnit):
23 23
24 24 def __init__(self):
25 25
26 26 ProcessingUnit.__init__(self)
27 27
28 28 self.buffer = None
29 29 self.firstdatatime = None
30 30 self.profIndex = 0
31 31 self.dataOut = Spectra()
32 32 self.id_min = None
33 33 self.id_max = None
34 34 self.setupReq = False #Agregar a todas las unidades de proc
35 35
36 36 def __updateSpecFromVoltage(self):
37 37
38 38 self.dataOut.timeZone = self.dataIn.timeZone
39 39 self.dataOut.dstFlag = self.dataIn.dstFlag
40 40 self.dataOut.errorCount = self.dataIn.errorCount
41 41 self.dataOut.useLocalTime = self.dataIn.useLocalTime
42 42 try:
43 43 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
44 44 except:
45 45 pass
46 46 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
47 47 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
48 48 self.dataOut.channelList = self.dataIn.channelList
49 49 self.dataOut.heightList = self.dataIn.heightList
50 50 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
51 51 self.dataOut.nProfiles = self.dataOut.nFFTPoints
52 52 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
53 53 self.dataOut.utctime = self.firstdatatime
54 54 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
55 55 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
56 56 self.dataOut.flagShiftFFT = False
57 57 self.dataOut.nCohInt = self.dataIn.nCohInt
58 58 self.dataOut.nIncohInt = 1
59 59 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
60 60 self.dataOut.frequency = self.dataIn.frequency
61 61 self.dataOut.realtime = self.dataIn.realtime
62 62 self.dataOut.azimuth = self.dataIn.azimuth
63 63 self.dataOut.zenith = self.dataIn.zenith
64 64 self.dataOut.beam.codeList = self.dataIn.beam.codeList
65 65 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
66 66 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
67 67 self.dataOut.runNextUnit = self.dataIn.runNextUnit
68 68 try:
69 69 self.dataOut.step = self.dataIn.step
70 70 except:
71 71 pass
72 72
73 73 def __getFft(self):
74 74 """
75 75 Convierte valores de Voltaje a Spectra
76 76
77 77 Affected:
78 78 self.dataOut.data_spc
79 79 self.dataOut.data_cspc
80 80 self.dataOut.data_dc
81 81 self.dataOut.heightList
82 82 self.profIndex
83 83 self.buffer
84 84 self.dataOut.flagNoData
85 85 """
86 86 fft_volt = numpy.fft.fft(
87 87 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
88 88 fft_volt = fft_volt.astype(numpy.dtype('complex'))
89 89 dc = fft_volt[:, 0, :]
90 90
91 91 # calculo de self-spectra
92 92 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
93 93 spc = fft_volt * numpy.conjugate(fft_volt)
94 94 spc = spc.real
95 95
96 96 blocksize = 0
97 97 blocksize += dc.size
98 98 blocksize += spc.size
99 99
100 100 cspc = None
101 101 pairIndex = 0
102 102 if self.dataOut.pairsList != None:
103 103 # calculo de cross-spectra
104 104 cspc = numpy.zeros(
105 105 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
106 106 for pair in self.dataOut.pairsList:
107 107 if pair[0] not in self.dataOut.channelList:
108 108 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
109 109 str(pair), str(self.dataOut.channelList)))
110 110 if pair[1] not in self.dataOut.channelList:
111 111 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
112 112 str(pair), str(self.dataOut.channelList)))
113 113
114 114 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
115 115 numpy.conjugate(fft_volt[pair[1], :, :])
116 116 pairIndex += 1
117 117 blocksize += cspc.size
118 118
119 119 self.dataOut.data_spc = spc
120 120 self.dataOut.data_cspc = cspc
121 121 self.dataOut.data_dc = dc
122 122 self.dataOut.blockSize = blocksize
123 123 self.dataOut.flagShiftFFT = False
124 124
125 125 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False, runNextUnit = 0):
126 126
127 127 self.dataIn.runNextUnit = runNextUnit
128 128 if self.dataIn.type == "Spectra":
129 129
130 130 self.dataOut.copy(self.dataIn)
131 131 if shift_fft:
132 132 #desplaza a la derecha en el eje 2 determinadas posiciones
133 133 shift = int(self.dataOut.nFFTPoints/2)
134 134 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
135 135
136 136 if self.dataOut.data_cspc is not None:
137 137 #desplaza a la derecha en el eje 2 determinadas posiciones
138 138 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
139 139 if pairsList:
140 140 self.__selectPairs(pairsList)
141 141
142 142 elif self.dataIn.type == "Voltage":
143 143
144 144 self.dataOut.flagNoData = True
145 145
146 146 if nFFTPoints == None:
147 147 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
148 148
149 149 if nProfiles == None:
150 150 nProfiles = nFFTPoints
151 151 #print(self.dataOut.ipp)
152 152 #exit(1)
153 153 if ippFactor == None:
154 154 self.dataOut.ippFactor = 1
155 155 #if ippFactor is not None:
156 156 #self.dataOut.ippFactor = ippFactor
157 157 #print(ippFactor)
158 158 #print(self.dataOut.ippFactor)
159 159 #exit(1)
160 160
161 161 self.dataOut.nFFTPoints = nFFTPoints
162 162
163 163 if self.buffer is None:
164 164 self.buffer = numpy.zeros((self.dataIn.nChannels,
165 165 nProfiles,
166 166 self.dataIn.nHeights),
167 167 dtype='complex')
168 168
169 169 if self.dataIn.flagDataAsBlock:
170 170 nVoltProfiles = self.dataIn.data.shape[1]
171 171
172 172 if nVoltProfiles == nProfiles:
173 173 self.buffer = self.dataIn.data.copy()
174 174 self.profIndex = nVoltProfiles
175 175
176 176 elif nVoltProfiles < nProfiles:
177 177
178 178 if self.profIndex == 0:
179 179 self.id_min = 0
180 180 self.id_max = nVoltProfiles
181 181 #print(self.id_min)
182 182 #print(self.id_max)
183 183 #print(numpy.shape(self.buffer))
184 184 self.buffer[:, self.id_min:self.id_max,
185 185 :] = self.dataIn.data
186 186 self.profIndex += nVoltProfiles
187 187 self.id_min += nVoltProfiles
188 188 self.id_max += nVoltProfiles
189 elif nVoltProfiles > nProfiles:
190 self.reader.bypass = True
191 if self.profIndex == 0:
192 self.id_min = 0
193 self.id_max = nProfiles
194
195 self.buffer = self.dataIn.data[:, self.id_min:self.id_max,:]
196 self.profIndex += nProfiles
197 self.id_min += nProfiles
198 self.id_max += nProfiles
199 if self.id_max == nVoltProfiles:
200 self.reader.bypass = False
189 201 else:
190 202 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
191 203 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
192 204 self.dataOut.flagNoData = True
193 205 else:
194 206 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
195 207 self.profIndex += 1
196 208
197 209 if self.firstdatatime == None:
198 210 self.firstdatatime = self.dataIn.utctime
199 211
200 if self.profIndex == nProfiles:
212 if self.profIndex % nProfiles == 0:
201 213 self.__updateSpecFromVoltage()
202 214 if pairsList == None:
203 215 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
204 216 else:
205 217 self.dataOut.pairsList = pairsList
206 218 self.__getFft()
207 219 self.dataOut.flagNoData = False
208 220 self.firstdatatime = None
209 self.profIndex = 0
221 if not self.reader.bypass:
222 self.profIndex = 0
210 223 else:
211 224 raise ValueError("The type of input object '%s' is not valid".format(
212 225 self.dataIn.type))
213 226
214 227
215 228 def __selectPairs(self, pairsList):
216 229
217 230 if not pairsList:
218 231 return
219 232
220 233 pairs = []
221 234 pairsIndex = []
222 235
223 236 for pair in pairsList:
224 237 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
225 238 continue
226 239 pairs.append(pair)
227 240 pairsIndex.append(pairs.index(pair))
228 241
229 242 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
230 243 self.dataOut.pairsList = pairs
231 244
232 245 return
233 246
234 247 def selectFFTs(self, minFFT, maxFFT ):
235 248 """
236 249 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
237 250 minFFT<= FFT <= maxFFT
238 251 """
239 252
240 253 if (minFFT > maxFFT):
241 254 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
242 255
243 256 if (minFFT < self.dataOut.getFreqRange()[0]):
244 257 minFFT = self.dataOut.getFreqRange()[0]
245 258
246 259 if (maxFFT > self.dataOut.getFreqRange()[-1]):
247 260 maxFFT = self.dataOut.getFreqRange()[-1]
248 261
249 262 minIndex = 0
250 263 maxIndex = 0
251 264 FFTs = self.dataOut.getFreqRange()
252 265
253 266 inda = numpy.where(FFTs >= minFFT)
254 267 indb = numpy.where(FFTs <= maxFFT)
255 268
256 269 try:
257 270 minIndex = inda[0][0]
258 271 except:
259 272 minIndex = 0
260 273
261 274 try:
262 275 maxIndex = indb[0][-1]
263 276 except:
264 277 maxIndex = len(FFTs)
265 278
266 279 self.selectFFTsByIndex(minIndex, maxIndex)
267 280
268 281 return 1
269 282
270 283 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
271 284 newheis = numpy.where(
272 285 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
273 286
274 287 if hei_ref != None:
275 288 newheis = numpy.where(self.dataOut.heightList > hei_ref)
276 289
277 290 minIndex = min(newheis[0])
278 291 maxIndex = max(newheis[0])
279 292 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
280 293 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
281 294
282 295 # determina indices
283 296 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
284 297 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
285 298 avg_dB = 10 * \
286 299 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
287 300 beacon_dB = numpy.sort(avg_dB)[-nheis:]
288 301 beacon_heiIndexList = []
289 302 for val in avg_dB.tolist():
290 303 if val >= beacon_dB[0]:
291 304 beacon_heiIndexList.append(avg_dB.tolist().index(val))
292 305
293 306 #data_spc = data_spc[:,:,beacon_heiIndexList]
294 307 data_cspc = None
295 308 if self.dataOut.data_cspc is not None:
296 309 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
297 310 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
298 311
299 312 data_dc = None
300 313 if self.dataOut.data_dc is not None:
301 314 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
302 315 #data_dc = data_dc[:,beacon_heiIndexList]
303 316
304 317 self.dataOut.data_spc = data_spc
305 318 self.dataOut.data_cspc = data_cspc
306 319 self.dataOut.data_dc = data_dc
307 320 self.dataOut.heightList = heightList
308 321 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
309 322
310 323 return 1
311 324
312 325 def selectFFTsByIndex(self, minIndex, maxIndex):
313 326 """
314 327
315 328 """
316 329
317 330 if (minIndex < 0) or (minIndex > maxIndex):
318 331 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
319 332
320 333 if (maxIndex >= self.dataOut.nProfiles):
321 334 maxIndex = self.dataOut.nProfiles-1
322 335
323 336 #Spectra
324 337 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
325 338
326 339 data_cspc = None
327 340 if self.dataOut.data_cspc is not None:
328 341 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
329 342
330 343 data_dc = None
331 344 if self.dataOut.data_dc is not None:
332 345 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
333 346
334 347 self.dataOut.data_spc = data_spc
335 348 self.dataOut.data_cspc = data_cspc
336 349 self.dataOut.data_dc = data_dc
337 350
338 351 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
339 352 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
340 353 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
341 354
342 355 return 1
343 356
344 357 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
345 358 # validacion de rango
346 359 print("NOISeeee")
347 360 if minHei == None:
348 361 minHei = self.dataOut.heightList[0]
349 362
350 363 if maxHei == None:
351 364 maxHei = self.dataOut.heightList[-1]
352 365
353 366 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
354 367 print('minHei: %.2f is out of the heights range' % (minHei))
355 368 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
356 369 minHei = self.dataOut.heightList[0]
357 370
358 371 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
359 372 print('maxHei: %.2f is out of the heights range' % (maxHei))
360 373 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
361 374 maxHei = self.dataOut.heightList[-1]
362 375
363 376 # validacion de velocidades
364 377 velrange = self.dataOut.getVelRange(1)
365 378
366 379 if minVel == None:
367 380 minVel = velrange[0]
368 381
369 382 if maxVel == None:
370 383 maxVel = velrange[-1]
371 384
372 385 if (minVel < velrange[0]) or (minVel > maxVel):
373 386 print('minVel: %.2f is out of the velocity range' % (minVel))
374 387 print('minVel is setting to %.2f' % (velrange[0]))
375 388 minVel = velrange[0]
376 389
377 390 if (maxVel > velrange[-1]) or (maxVel < minVel):
378 391 print('maxVel: %.2f is out of the velocity range' % (maxVel))
379 392 print('maxVel is setting to %.2f' % (velrange[-1]))
380 393 maxVel = velrange[-1]
381 394
382 395 # seleccion de indices para rango
383 396 minIndex = 0
384 397 maxIndex = 0
385 398 heights = self.dataOut.heightList
386 399
387 400 inda = numpy.where(heights >= minHei)
388 401 indb = numpy.where(heights <= maxHei)
389 402
390 403 try:
391 404 minIndex = inda[0][0]
392 405 except:
393 406 minIndex = 0
394 407
395 408 try:
396 409 maxIndex = indb[0][-1]
397 410 except:
398 411 maxIndex = len(heights)
399 412
400 413 if (minIndex < 0) or (minIndex > maxIndex):
401 414 raise ValueError("some value in (%d,%d) is not valid" % (
402 415 minIndex, maxIndex))
403 416
404 417 if (maxIndex >= self.dataOut.nHeights):
405 418 maxIndex = self.dataOut.nHeights - 1
406 419
407 420 # seleccion de indices para velocidades
408 421 indminvel = numpy.where(velrange >= minVel)
409 422 indmaxvel = numpy.where(velrange <= maxVel)
410 423 try:
411 424 minIndexVel = indminvel[0][0]
412 425 except:
413 426 minIndexVel = 0
414 427
415 428 try:
416 429 maxIndexVel = indmaxvel[0][-1]
417 430 except:
418 431 maxIndexVel = len(velrange)
419 432
420 433 # seleccion del espectro
421 434 data_spc = self.dataOut.data_spc[:,
422 435 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
423 436 # estimacion de ruido
424 437 noise = numpy.zeros(self.dataOut.nChannels)
425 438
426 439 for channel in range(self.dataOut.nChannels):
427 440 daux = data_spc[channel, :, :]
428 441 sortdata = numpy.sort(daux, axis=None)
429 442 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
430 443
431 444 self.dataOut.noise_estimation = noise.copy()
432 445
433 446 return 1
434 447
435 448 class GetSNR(Operation):
436 449 '''
437 450 Written by R. Flores
438 451 '''
439 452 """Operation to get SNR.
440 453
441 454 Parameters:
442 455 -----------
443 456
444 457 Example
445 458 --------
446 459
447 460 op = proc_unit.addOperation(name='GetSNR', optype='other')
448 461
449 462 """
450 463
451 464 def __init__(self, **kwargs):
452 465
453 466 Operation.__init__(self, **kwargs)
454 467
455 468
456 469 def run(self,dataOut):
457 470
458 471 noise = dataOut.getNoise()
459 472 #noise = dataOut.getNoise(ymin_index=-10) #Región superior donde solo debería de haber ruido
460 473 #print("Noise: ", noise)
461 474 #print("Noise_dB: ", 10*numpy.log10(noise/dataOut.normFactor))
462 475 #print("Heights: ", dataOut.heightList)
463 476 #dataOut.data_snr = (dataOut.data_spc.sum(axis=1))/(noise[:,None]*dataOut.normFactor)
464 477 ################dataOut.data_snr = (dataOut.data_spc.sum(axis=1))/(noise[:,None]*dataOut.nFFTPoints) #Before 12Jan2023
465 478 #dataOut.data_snr = (dataOut.data_spc.sum(axis=1)-noise[:,None])/(noise[:,None])
466 479 dataOut.data_snr = (dataOut.data_spc.sum(axis=1)-noise[:,None]*dataOut.nFFTPoints)/(noise[:,None]*dataOut.nFFTPoints) #It works apparently
467 480 dataOut.snl = numpy.log10(dataOut.data_snr)
468 481 #print("snl: ", dataOut.snl)
469 482 #exit(1)
470 483 #print(dataOut.heightList[-11])
471 484 #print(numpy.shape(dataOut.heightList))
472 485 #print(dataOut.data_snr)
473 486 #print(dataOut.data_snr[0,-11])
474 487 #exit(1)
475 488 #dataOut.data_snr = numpy.where(10*numpy.log10(dataOut.data_snr)<.5, numpy.nan, dataOut.data_snr)
476 489 #dataOut.data_snr = numpy.where(10*numpy.log10(dataOut.data_snr)<.1, numpy.nan, dataOut.data_snr)
477 490 #dataOut.data_snr = numpy.where(10*numpy.log10(dataOut.data_snr)<.0, numpy.nan, dataOut.data_snr)
478 491 #dataOut.data_snr = numpy.where(dataOut.data_snr<.05, numpy.nan, dataOut.data_snr)
479 492 #dataOut.snl = numpy.where(dataOut.data_snr<.01, numpy.nan, dataOut.snl)
480 493 dataOut.snl = numpy.where(dataOut.snl<-1, numpy.nan, dataOut.snl)
481 494 '''
482 495 import matplotlib.pyplot as plt
483 496 #plt.plot(10*numpy.log10(dataOut.data_snr[0]),dataOut.heightList)
484 497 plt.plot(dataOut.data_snr[0],dataOut.heightList)#,marker='*')
485 498 plt.xlim(-1,10)
486 499 plt.axvline(1,color='k')
487 500 plt.axvline(.1,color='k',linestyle='--')
488 501 plt.grid()
489 502 plt.show()
490 503 '''
491 504 #dataOut.data_snr = 10*numpy.log10(dataOut.data_snr)
492 505 #dataOut.data_snr = numpy.expand_dims(dataOut.data_snr,axis=0)
493 506 #print(dataOut.data_snr.shape)
494 507 #exit(1)
495 508 #print("Before: ", dataOut.data_snr[0])
496 509
497 510
498 511 return dataOut
499 512
500 513 class removeDC(Operation):
501 514
502 515 def run(self, dataOut, mode=2):
503 516 self.dataOut = dataOut
504 517 jspectra = self.dataOut.data_spc
505 518 jcspectra = self.dataOut.data_cspc
506 519
507 520 num_chan = jspectra.shape[0]
508 521 num_hei = jspectra.shape[2]
509 522
510 523 if jcspectra is not None:
511 524 jcspectraExist = True
512 525 num_pairs = jcspectra.shape[0]
513 526 else:
514 527 jcspectraExist = False
515 528
516 529 freq_dc = int(jspectra.shape[1] / 2)
517 530 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
518 531 ind_vel = ind_vel.astype(int)
519 532
520 533 if ind_vel[0] < 0:
521 534 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
522 535
523 536 if mode == 1:
524 537 jspectra[:, freq_dc, :] = (
525 538 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
526 539
527 540 if jcspectraExist:
528 541 jcspectra[:, freq_dc, :] = (
529 542 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
530 543
531 544 if mode == 2:
532 545
533 546 vel = numpy.array([-2, -1, 1, 2])
534 547 xx = numpy.zeros([4, 4])
535 548
536 549 for fil in range(4):
537 550 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
538 551
539 552 xx_inv = numpy.linalg.inv(xx)
540 553 xx_aux = xx_inv[0, :]
541 554
542 555 for ich in range(num_chan):
543 556 yy = jspectra[ich, ind_vel, :]
544 557 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
545 558
546 559 junkid = jspectra[ich, freq_dc, :] <= 0
547 560 cjunkid = sum(junkid)
548 561
549 562 if cjunkid.any():
550 563 jspectra[ich, freq_dc, junkid.nonzero()] = (
551 564 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
552 565
553 566 if jcspectraExist:
554 567 for ip in range(num_pairs):
555 568 yy = jcspectra[ip, ind_vel, :]
556 569 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
557 570
558 571 self.dataOut.data_spc = jspectra
559 572 self.dataOut.data_cspc = jcspectra
560 573
561 574 return self.dataOut
562 575
563 576 class removeInterference(Operation):
564 577
565 578 def removeInterference2(self):
566 579
567 580 cspc = self.dataOut.data_cspc
568 581 spc = self.dataOut.data_spc
569 582 Heights = numpy.arange(cspc.shape[2])
570 583 realCspc = numpy.abs(cspc)
571 584
572 585 for i in range(cspc.shape[0]):
573 586 LinePower= numpy.sum(realCspc[i], axis=0)
574 587 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
575 588 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
576 589 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
577 590 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
578 591 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
579 592
580 593
581 594 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
582 595 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
583 596 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
584 597 cspc[i,InterferenceRange,:] = numpy.NaN
585 598
586 599 self.dataOut.data_cspc = cspc
587 600
588 601 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
589 602
590 603 jspectra = self.dataOut.data_spc
591 604 jcspectra = self.dataOut.data_cspc
592 605 jnoise = self.dataOut.getNoise()
593 606 num_incoh = self.dataOut.nIncohInt
594 607
595 608 num_channel = jspectra.shape[0]
596 609 num_prof = jspectra.shape[1]
597 610 num_hei = jspectra.shape[2]
598 611
599 612 # hei_interf
600 613 if hei_interf is None:
601 614 count_hei = int(num_hei / 2)
602 615 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
603 616 hei_interf = numpy.asarray(hei_interf)[0]
604 617 # nhei_interf
605 618 if (nhei_interf == None):
606 619 nhei_interf = 5
607 620 if (nhei_interf < 1):
608 621 nhei_interf = 1
609 622 if (nhei_interf > count_hei):
610 623 nhei_interf = count_hei
611 624 if (offhei_interf == None):
612 625 offhei_interf = 0
613 626
614 627 ind_hei = list(range(num_hei))
615 628 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
616 629 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
617 630 mask_prof = numpy.asarray(list(range(num_prof)))
618 631 num_mask_prof = mask_prof.size
619 632 comp_mask_prof = [0, num_prof / 2]
620 633
621 634 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
622 635 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
623 636 jnoise = numpy.nan
624 637 noise_exist = jnoise[0] < numpy.Inf
625 638
626 639 # Subrutina de Remocion de la Interferencia
627 640 for ich in range(num_channel):
628 641 # Se ordena los espectros segun su potencia (menor a mayor)
629 642 power = jspectra[ich, mask_prof, :]
630 643 power = power[:, hei_interf]
631 644 power = power.sum(axis=0)
632 645 psort = power.ravel().argsort()
633 646
634 647 # Se estima la interferencia promedio en los Espectros de Potencia empleando
635 648 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
636 649 offhei_interf, nhei_interf + offhei_interf))]]]
637 650
638 651 if noise_exist:
639 652 # tmp_noise = jnoise[ich] / num_prof
640 653 tmp_noise = jnoise[ich]
641 654 junkspc_interf = junkspc_interf - tmp_noise
642 655 #junkspc_interf[:,comp_mask_prof] = 0
643 656
644 657 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
645 658 jspc_interf = jspc_interf.transpose()
646 659 # Calculando el espectro de interferencia promedio
647 660 noiseid = numpy.where(
648 661 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
649 662 noiseid = noiseid[0]
650 663 cnoiseid = noiseid.size
651 664 interfid = numpy.where(
652 665 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
653 666 interfid = interfid[0]
654 667 cinterfid = interfid.size
655 668
656 669 if (cnoiseid > 0):
657 670 jspc_interf[noiseid] = 0
658 671
659 672 # Expandiendo los perfiles a limpiar
660 673 if (cinterfid > 0):
661 674 new_interfid = (
662 675 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
663 676 new_interfid = numpy.asarray(new_interfid)
664 677 new_interfid = {x for x in new_interfid}
665 678 new_interfid = numpy.array(list(new_interfid))
666 679 new_cinterfid = new_interfid.size
667 680 else:
668 681 new_cinterfid = 0
669 682
670 683 for ip in range(new_cinterfid):
671 684 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
672 685 jspc_interf[new_interfid[ip]
673 686 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
674 687
675 688 jspectra[ich, :, ind_hei] = jspectra[ich, :,
676 689 ind_hei] - jspc_interf # Corregir indices
677 690
678 691 # Removiendo la interferencia del punto de mayor interferencia
679 692 ListAux = jspc_interf[mask_prof].tolist()
680 693 maxid = ListAux.index(max(ListAux))
681 694
682 695 if cinterfid > 0:
683 696 for ip in range(cinterfid * (interf == 2) - 1):
684 697 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
685 698 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
686 699 cind = len(ind)
687 700
688 701 if (cind > 0):
689 702 jspectra[ich, interfid[ip], ind] = tmp_noise * \
690 703 (1 + (numpy.random.uniform(cind) - 0.5) /
691 704 numpy.sqrt(num_incoh))
692 705
693 706 ind = numpy.array([-2, -1, 1, 2])
694 707 xx = numpy.zeros([4, 4])
695 708
696 709 for id1 in range(4):
697 710 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
698 711
699 712 xx_inv = numpy.linalg.inv(xx)
700 713 xx = xx_inv[:, 0]
701 714 ind = (ind + maxid + num_mask_prof) % num_mask_prof
702 715 yy = jspectra[ich, mask_prof[ind], :]
703 716 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
704 717 yy.transpose(), xx)
705 718
706 719 indAux = (jspectra[ich, :, :] < tmp_noise *
707 720 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
708 721 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
709 722 (1 - 1 / numpy.sqrt(num_incoh))
710 723
711 724 # Remocion de Interferencia en el Cross Spectra
712 725 if jcspectra is None:
713 726 return jspectra, jcspectra
714 727 num_pairs = int(jcspectra.size / (num_prof * num_hei))
715 728 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
716 729
717 730 for ip in range(num_pairs):
718 731
719 732 #-------------------------------------------
720 733
721 734 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
722 735 cspower = cspower[:, hei_interf]
723 736 cspower = cspower.sum(axis=0)
724 737
725 738 cspsort = cspower.ravel().argsort()
726 739 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
727 740 offhei_interf, nhei_interf + offhei_interf))]]]
728 741 junkcspc_interf = junkcspc_interf.transpose()
729 742 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
730 743
731 744 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
732 745
733 746 median_real = int(numpy.median(numpy.real(
734 747 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
735 748 median_imag = int(numpy.median(numpy.imag(
736 749 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
737 750 comp_mask_prof = [int(e) for e in comp_mask_prof]
738 751 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
739 752 median_real, median_imag)
740 753
741 754 for iprof in range(num_prof):
742 755 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
743 756 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
744 757
745 758 # Removiendo la Interferencia
746 759 jcspectra[ip, :, ind_hei] = jcspectra[ip,
747 760 :, ind_hei] - jcspc_interf
748 761
749 762 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
750 763 maxid = ListAux.index(max(ListAux))
751 764
752 765 ind = numpy.array([-2, -1, 1, 2])
753 766 xx = numpy.zeros([4, 4])
754 767
755 768 for id1 in range(4):
756 769 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
757 770
758 771 xx_inv = numpy.linalg.inv(xx)
759 772 xx = xx_inv[:, 0]
760 773
761 774 ind = (ind + maxid + num_mask_prof) % num_mask_prof
762 775 yy = jcspectra[ip, mask_prof[ind], :]
763 776 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
764 777
765 778 # Guardar Resultados
766 779 self.dataOut.data_spc = jspectra
767 780 self.dataOut.data_cspc = jcspectra
768 781
769 782 return 1
770 783
771 784 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
772 785
773 786 self.dataOut = dataOut
774 787
775 788 if mode == 1:
776 789 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
777 790 elif mode == 2:
778 791 self.removeInterference2()
779 792
780 793 return self.dataOut
781 794
782 795 class removeInterferenceAtFreq(Operation):
783 796 '''
784 797 Written by R. Flores
785 798 '''
786 799 """Operation to remove interfernce at a known frequency(s).
787 800
788 801 Parameters:
789 802 -----------
790 803 None
791 804
792 805 Example
793 806 --------
794 807
795 808 op = proc_unit.addOperation(name='removeInterferenceAtFreq')
796 809
797 810 """
798 811
799 812 def __init__(self):
800 813
801 814 Operation.__init__(self)
802 815
803 816 def run(self, dataOut, freq = None, freqList = None):
804 817
805 818 VelRange = dataOut.getVelRange()
806 819 #print("VelRange: ", VelRange)
807 820
808 821 freq_ids = []
809 822
810 823 if freq is not None:
811 824 #print("freq")
812 825 #if freq < 0:
813 826 inda = numpy.where(VelRange >= freq)
814 827 minIndex = inda[0][0]
815 828 #print(numpy.shape(dataOut.dataLag_spc))
816 829 dataOut.data_spc[:,minIndex,:] = numpy.nan
817 830
818 831 #inda = numpy.where(VelRange >= ymin_noise)
819 832 #indb = numpy.where(VelRange <= ymax_noise)
820 833
821 834 #minIndex = inda[0][0]
822 835 #maxIndex = indb[0][-1]
823 836
824 837 elif freqList is not None:
825 838 #print("freqList")
826 839 for freq in freqList:
827 840 #if freq < 0:
828 841 inda = numpy.where(VelRange >= freq)
829 842 minIndex = inda[0][0]
830 843 #print(numpy.shape(dataOut.dataLag_spc))
831 844 if freq > 0:
832 845 #dataOut.data_spc[:,minIndex-1,:] = numpy.nan
833 846 freq_ids.append(minIndex-1)
834 847 else:
835 848 #dataOut.data_spc[:,minIndex,:] = numpy.nan
836 849 freq_ids.append(minIndex)
837 850 else:
838 851 raise ValueError("freq or freqList should be specified ...")
839 852
840 853 #freq_ids = numpy.array(freq_ids).flatten()
841 854
842 855 avg = numpy.mean(dataOut.data_spc[:,[t for t in range(dataOut.data_spc.shape[0]) if t not in freq_ids],:],axis=1)
843 856
844 857 for p in list(freq_ids):
845 858 dataOut.data_spc[:,p,:] = avg#numpy.nan
846 859
847 860
848 861 return dataOut
849 862
850 863 class IncohInt(Operation):
851 864
852 865 __profIndex = 0
853 866 __withOverapping = False
854 867
855 868 __byTime = False
856 869 __initime = None
857 870 __lastdatatime = None
858 871 __integrationtime = None
859 872
860 873 __buffer_spc = None
861 874 __buffer_cspc = None
862 875 __buffer_dc = None
863 876
864 877 __dataReady = False
865 878
866 879 __timeInterval = None
867 880
868 881 n = None
869 882
870 883 def __init__(self):
871 884
872 885 Operation.__init__(self)
873 886
874 887 def setup(self, n=None, timeInterval=None, overlapping=False):
875 888 """
876 889 Set the parameters of the integration class.
877 890
878 891 Inputs:
879 892
880 893 n : Number of coherent integrations
881 894 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
882 895 overlapping :
883 896
884 897 """
885 898
886 899 self.__initime = None
887 900 self.__lastdatatime = 0
888 901
889 902 self.__buffer_spc = 0
890 903 self.__buffer_cspc = 0
891 904 self.__buffer_dc = 0
892 905
893 906 self.__profIndex = 0
894 907 self.__dataReady = False
895 908 self.__byTime = False
896 909
897 910 if n is None and timeInterval is None:
898 911 raise ValueError("n or timeInterval should be specified ...")
899 912
900 913 if n is not None:
901 914 self.n = int(n)
902 915 else:
903 916
904 917 self.__integrationtime = int(timeInterval)
905 918 self.n = None
906 919 self.__byTime = True
907 920
908 921 def putData(self, data_spc, data_cspc, data_dc):
909 922 """
910 923 Add a profile to the __buffer_spc and increase in one the __profileIndex
911 924
912 925 """
913 926
914 927 self.__buffer_spc += data_spc
915 928
916 929 if data_cspc is None:
917 930 self.__buffer_cspc = None
918 931 else:
919 932 self.__buffer_cspc += data_cspc
920 933
921 934 if data_dc is None:
922 935 self.__buffer_dc = None
923 936 else:
924 937 self.__buffer_dc += data_dc
925 938
926 939 self.__profIndex += 1
927 940
928 941 return
929 942
930 943 def pushData(self):
931 944 """
932 945 Return the sum of the last profiles and the profiles used in the sum.
933 946
934 947 Affected:
935 948
936 949 self.__profileIndex
937 950
938 951 """
939 952
940 953 data_spc = self.__buffer_spc
941 954 data_cspc = self.__buffer_cspc
942 955 data_dc = self.__buffer_dc
943 956 n = self.__profIndex
944 957
945 958 self.__buffer_spc = 0
946 959 self.__buffer_cspc = 0
947 960 self.__buffer_dc = 0
948 961 self.__profIndex = 0
949 962
950 963 return data_spc, data_cspc, data_dc, n
951 964
952 965 def byProfiles(self, *args):
953 966
954 967 self.__dataReady = False
955 968 avgdata_spc = None
956 969 avgdata_cspc = None
957 970 avgdata_dc = None
958 971
959 972 self.putData(*args)
960 973
961 974 if self.__profIndex == self.n:
962 975
963 976 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
964 977 self.n = n
965 978 self.__dataReady = True
966 979
967 980 return avgdata_spc, avgdata_cspc, avgdata_dc
968 981
969 982 def byTime(self, datatime, *args):
970 983
971 984 self.__dataReady = False
972 985 avgdata_spc = None
973 986 avgdata_cspc = None
974 987 avgdata_dc = None
975 988
976 989 self.putData(*args)
977 990
978 991 if (datatime - self.__initime) >= self.__integrationtime:
979 992 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
980 993 self.n = n
981 994 self.__dataReady = True
982 995
983 996 return avgdata_spc, avgdata_cspc, avgdata_dc
984 997
985 998 def integrate(self, datatime, *args):
986 999
987 1000 if self.__profIndex == 0:
988 1001 self.__initime = datatime
989 1002
990 1003 if self.__byTime:
991 1004 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
992 1005 datatime, *args)
993 1006 else:
994 1007 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
995 1008
996 1009 if not self.__dataReady:
997 1010 return None, None, None, None
998 1011
999 1012 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1000 1013
1001 1014 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1002 1015 if n == 1:
1003 1016 return dataOut
1004 1017 print("JERE")
1005 1018 dataOut.flagNoData = True
1006 1019
1007 1020 if not self.isConfig:
1008 1021 self.setup(n, timeInterval, overlapping)
1009 1022 self.isConfig = True
1010 1023
1011 1024 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1012 1025 dataOut.data_spc,
1013 1026 dataOut.data_cspc,
1014 1027 dataOut.data_dc)
1015 1028
1016 1029 if self.__dataReady:
1017 1030
1018 1031 dataOut.data_spc = avgdata_spc
1019 1032 print(numpy.sum(dataOut.data_spc))
1020 1033 exit(1)
1021 1034 dataOut.data_cspc = avgdata_cspc
1022 1035 dataOut.data_dc = avgdata_dc
1023 1036 dataOut.nIncohInt *= self.n
1024 1037 dataOut.utctime = avgdatatime
1025 1038 dataOut.flagNoData = False
1026 1039
1027 1040 return dataOut
1028 1041
1029 1042 class dopplerFlip(Operation):
1030 1043
1031 1044 def run(self, dataOut, chann = None):
1032 1045 # arreglo 1: (num_chan, num_profiles, num_heights)
1033 1046 self.dataOut = dataOut
1034 1047 # JULIA-oblicua, indice 2
1035 1048 # arreglo 2: (num_profiles, num_heights)
1036 1049 jspectra = self.dataOut.data_spc[chann]
1037 1050 jspectra_tmp = numpy.zeros(jspectra.shape)
1038 1051 num_profiles = jspectra.shape[0]
1039 1052 freq_dc = int(num_profiles / 2)
1040 1053 # Flip con for
1041 1054 for j in range(num_profiles):
1042 1055 jspectra_tmp[num_profiles-j-1]= jspectra[j]
1043 1056 # Intercambio perfil de DC con perfil inmediato anterior
1044 1057 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
1045 1058 jspectra_tmp[freq_dc]= jspectra[freq_dc]
1046 1059 # canal modificado es re-escrito en el arreglo de canales
1047 1060 self.dataOut.data_spc[chann] = jspectra_tmp
1048 1061
1049 1062 return self.dataOut
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now