##// END OF EJS Templates
Merge branch 'v3.0-devel' of http://jro-dev.igp.gob.pe/rhodecode/schain into v3.0-devel
Juan C. Espinoza -
r1231:a7a69d889908 merge
parent child
Show More
@@ -1,1035 +1,1037
1 1 import numpy
2 2 import time
3 3 import os
4 4 import h5py
5 5 import re
6 6 import datetime
7 7
8 8 from schainpy.model.data.jrodata import *
9 9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
10 10 # from .jroIO_base import *
11 11 from schainpy.model.io.jroIO_base import *
12 12 import schainpy
13 13 from schainpy.utils import log
14 14
15 15 @MPDecorator
16 16 class ParamReader(JRODataReader,ProcessingUnit):
17 17 '''
18 18 Reads HDF5 format files
19 19
20 20 path
21 21
22 22 startDate
23 23
24 24 endDate
25 25
26 26 startTime
27 27
28 28 endTime
29 29 '''
30 30
31 31 ext = ".hdf5"
32 32
33 33 optchar = "D"
34 34
35 35 timezone = None
36 36
37 37 startTime = None
38 38
39 39 endTime = None
40 40
41 41 fileIndex = None
42 42
43 43 utcList = None #To select data in the utctime list
44 44
45 45 blockList = None #List to blocks to be read from the file
46 46
47 47 blocksPerFile = None #Number of blocks to be read
48 48
49 49 blockIndex = None
50 50
51 51 path = None
52 52
53 53 #List of Files
54 54
55 55 filenameList = None
56 56
57 57 datetimeList = None
58 58
59 59 #Hdf5 File
60 60
61 61 listMetaname = None
62 62
63 63 listMeta = None
64 64
65 65 listDataname = None
66 66
67 67 listData = None
68 68
69 69 listShapes = None
70 70
71 71 fp = None
72 72
73 73 #dataOut reconstruction
74 74
75 75 dataOut = None
76 76
77 77
78 78 def __init__(self):#, **kwargs):
79 79 ProcessingUnit.__init__(self) #, **kwargs)
80 80 self.dataOut = Parameters()
81 81 return
82 82
83 83 def setup(self, **kwargs):
84 84
85 85 path = kwargs['path']
86 86 startDate = kwargs['startDate']
87 87 endDate = kwargs['endDate']
88 88 startTime = kwargs['startTime']
89 89 endTime = kwargs['endTime']
90 90 walk = kwargs['walk']
91 91 if 'ext' in kwargs:
92 92 ext = kwargs['ext']
93 93 else:
94 94 ext = '.hdf5'
95 95 if 'timezone' in kwargs:
96 96 self.timezone = kwargs['timezone']
97 97 else:
98 98 self.timezone = 'lt'
99 99
100 100 print("[Reading] Searching files in offline mode ...")
101 101 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
102 102 startTime=startTime, endTime=endTime,
103 103 ext=ext, walk=walk)
104 104
105 105 if not(filenameList):
106 106 print("There is no files into the folder: %s"%(path))
107 107 sys.exit(-1)
108 108
109 109 self.fileIndex = -1
110 110 self.startTime = startTime
111 111 self.endTime = endTime
112 112
113 113 self.__readMetadata()
114 114
115 115 self.__setNextFileOffline()
116 116
117 117 return
118 118
119 119 def searchFilesOffLine(self,
120 120 path,
121 121 startDate=None,
122 122 endDate=None,
123 123 startTime=datetime.time(0,0,0),
124 124 endTime=datetime.time(23,59,59),
125 125 ext='.hdf5',
126 126 walk=True):
127 127
128 128 expLabel = ''
129 129 self.filenameList = []
130 130 self.datetimeList = []
131 131
132 132 pathList = []
133 133
134 134 JRODataObj = JRODataReader()
135 135 dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
136 136
137 137 if dateList == []:
138 138 print("[Reading] No *%s files in %s from %s to %s)"%(ext, path,
139 139 datetime.datetime.combine(startDate,startTime).ctime(),
140 140 datetime.datetime.combine(endDate,endTime).ctime()))
141 141
142 142 return None, None
143 143
144 144 if len(dateList) > 1:
145 145 print("[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate))
146 146 else:
147 147 print("[Reading] data was found for the date %s" %(dateList[0]))
148 148
149 149 filenameList = []
150 150 datetimeList = []
151 151
152 152 #----------------------------------------------------------------------------------
153 153
154 154 for thisPath in pathList:
155 155 # thisPath = pathList[pathDict[file]]
156 156
157 157 fileList = glob.glob1(thisPath, "*%s" %ext)
158 158 fileList.sort()
159 159
160 160 for file in fileList:
161 161
162 162 filename = os.path.join(thisPath,file)
163 163
164 164 if not isFileInDateRange(filename, startDate, endDate):
165 165 continue
166 166
167 167 thisDatetime = self.__isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
168 168
169 169 if not(thisDatetime):
170 170 continue
171 171
172 172 filenameList.append(filename)
173 173 datetimeList.append(thisDatetime)
174 174
175 175 if not(filenameList):
176 176 print("[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
177 177 return None, None
178 178
179 179 print("[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime))
180 180 print()
181 181
182 182 self.filenameList = filenameList
183 183 self.datetimeList = datetimeList
184 184
185 185 return pathList, filenameList
186 186
187 187 def __isFileInTimeRange(self,filename, startDate, endDate, startTime, endTime):
188 188
189 189 """
190 190 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
191 191
192 192 Inputs:
193 193 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
194 194
195 195 startDate : fecha inicial del rango seleccionado en formato datetime.date
196 196
197 197 endDate : fecha final del rango seleccionado en formato datetime.date
198 198
199 199 startTime : tiempo inicial del rango seleccionado en formato datetime.time
200 200
201 201 endTime : tiempo final del rango seleccionado en formato datetime.time
202 202
203 203 Return:
204 204 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
205 205 fecha especificado, de lo contrario retorna False.
206 206
207 207 Excepciones:
208 208 Si el archivo no existe o no puede ser abierto
209 209 Si la cabecera no puede ser leida.
210 210
211 211 """
212 212
213 213 try:
214 214 fp = h5py.File(filename,'r')
215 215 grp1 = fp['Data']
216 216
217 217 except IOError:
218 218 traceback.print_exc()
219 219 raise IOError("The file %s can't be opened" %(filename))
220 220 #chino rata
221 221 #In case has utctime attribute
222 222 grp2 = grp1['utctime']
223 223 # thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time
224 224 thisUtcTime = grp2.value[0]
225 225
226 226 fp.close()
227 227
228 228 if self.timezone == 'lt':
229 229 thisUtcTime -= 5*3600
230 230
231 231 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
232 232 # thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0])
233 233 thisDate = thisDatetime.date()
234 234 thisTime = thisDatetime.time()
235 235
236 236 startUtcTime = (datetime.datetime.combine(thisDate,startTime)- datetime.datetime(1970, 1, 1)).total_seconds()
237 237 endUtcTime = (datetime.datetime.combine(thisDate,endTime)- datetime.datetime(1970, 1, 1)).total_seconds()
238 238
239 239 #General case
240 240 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
241 241 #-----------o----------------------------o-----------
242 242 # startTime endTime
243 243
244 244 if endTime >= startTime:
245 245 thisUtcLog = numpy.logical_and(thisUtcTime > startUtcTime, thisUtcTime < endUtcTime)
246 246 if numpy.any(thisUtcLog): #If there is one block between the hours mentioned
247 247 return thisDatetime
248 248 return None
249 249
250 250 #If endTime < startTime then endTime belongs to the next day
251 251 #<<<<<<<<<<<o o>>>>>>>>>>>
252 252 #-----------o----------------------------o-----------
253 253 # endTime startTime
254 254
255 255 if (thisDate == startDate) and numpy.all(thisUtcTime < startUtcTime):
256 256 return None
257 257
258 258 if (thisDate == endDate) and numpy.all(thisUtcTime > endUtcTime):
259 259 return None
260 260
261 261 if numpy.all(thisUtcTime < startUtcTime) and numpy.all(thisUtcTime > endUtcTime):
262 262 return None
263 263
264 264 return thisDatetime
265 265
266 266 def __setNextFileOffline(self):
267 267
268 268 self.fileIndex += 1
269 269 idFile = self.fileIndex
270 270
271 271 if not(idFile < len(self.filenameList)):
272 272 print("No more Files")
273 273 return 0
274 274
275 275 filename = self.filenameList[idFile]
276 276
277 277 filePointer = h5py.File(filename,'r')
278 278
279 279 self.filename = filename
280 280
281 281 self.fp = filePointer
282 282
283 283 print("Setting the file: %s"%self.filename)
284 284
285 285 # self.__readMetadata()
286 286 self.__setBlockList()
287 287 self.__readData()
288 288 # self.nRecords = self.fp['Data'].attrs['blocksPerFile']
289 289 # self.nRecords = self.fp['Data'].attrs['nRecords']
290 290 self.blockIndex = 0
291 291 return 1
292 292
293 293 def __setBlockList(self):
294 294 '''
295 295 Selects the data within the times defined
296 296
297 297 self.fp
298 298 self.startTime
299 299 self.endTime
300 300
301 301 self.blockList
302 302 self.blocksPerFile
303 303
304 304 '''
305 305 fp = self.fp
306 306 startTime = self.startTime
307 307 endTime = self.endTime
308 308
309 309 grp = fp['Data']
310 310 thisUtcTime = grp['utctime'].value.astype(numpy.float)[0]
311 311
312 312 #ERROOOOR
313 313 if self.timezone == 'lt':
314 314 thisUtcTime -= 5*3600
315 315
316 316 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
317 317
318 318 thisDate = thisDatetime.date()
319 319 thisTime = thisDatetime.time()
320 320
321 321 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
322 322 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
323 323
324 324 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
325 325
326 326 self.blockList = ind
327 327 self.blocksPerFile = len(ind)
328 328
329 329 return
330 330
331 331 def __readMetadata(self):
332 332 '''
333 333 Reads Metadata
334 334
335 335 self.pathMeta
336 336
337 337 self.listShapes
338 338 self.listMetaname
339 339 self.listMeta
340 340
341 341 '''
342 342
343 343 # grp = self.fp['Data']
344 344 # pathMeta = os.path.join(self.path, grp.attrs['metadata'])
345 345 #
346 346 # if pathMeta == self.pathMeta:
347 347 # return
348 348 # else:
349 349 # self.pathMeta = pathMeta
350 350 #
351 351 # filePointer = h5py.File(self.pathMeta,'r')
352 352 # groupPointer = filePointer['Metadata']
353 353
354 354 filename = self.filenameList[0]
355 355
356 356 fp = h5py.File(filename,'r')
357 357
358 358 gp = fp['Metadata']
359 359
360 360 listMetaname = []
361 361 listMetadata = []
362 362 for item in list(gp.items()):
363 363 name = item[0]
364 364
365 365 if name=='array dimensions':
366 366 table = gp[name][:]
367 367 listShapes = {}
368 368 for shapes in table:
369 369 listShapes[shapes[0]] = numpy.array([shapes[1],shapes[2],shapes[3],shapes[4],shapes[5]])
370 370 else:
371 371 data = gp[name].value
372 372 listMetaname.append(name)
373 373 listMetadata.append(data)
374 374
375 375 # if name=='type':
376 376 # self.__initDataOut(data)
377 377
378 378 self.listShapes = listShapes
379 379 self.listMetaname = listMetaname
380 380 self.listMeta = listMetadata
381 381
382 382 fp.close()
383 383 return
384 384
385 385 def __readData(self):
386 386 grp = self.fp['Data']
387 387 listdataname = []
388 388 listdata = []
389 389
390 390 for item in list(grp.items()):
391 391 name = item[0]
392 392 listdataname.append(name)
393 393
394 394 array = self.__setDataArray(grp[name],self.listShapes[name])
395 395 listdata.append(array)
396 396
397 397 self.listDataname = listdataname
398 398 self.listData = listdata
399 399 return
400 400
401 401 def __setDataArray(self, dataset, shapes):
402 402
403 403 nDims = shapes[0]
404 404
405 405 nDim2 = shapes[1] #Dimension 0
406 406
407 407 nDim1 = shapes[2] #Dimension 1, number of Points or Parameters
408 408
409 409 nDim0 = shapes[3] #Dimension 2, number of samples or ranges
410 410
411 411 mode = shapes[4] #Mode of storing
412 412
413 413 blockList = self.blockList
414 414
415 415 blocksPerFile = self.blocksPerFile
416 416
417 417 #Depending on what mode the data was stored
418 418 if mode == 0: #Divided in channels
419 419 arrayData = dataset.value.astype(numpy.float)[0][blockList]
420 420 if mode == 1: #Divided in parameter
421 421 strds = 'table'
422 422 nDatas = nDim1
423 423 newShapes = (blocksPerFile,nDim2,nDim0)
424 424 elif mode==2: #Concatenated in a table
425 425 strds = 'table0'
426 426 arrayData = dataset[strds].value
427 427 #Selecting part of the dataset
428 428 utctime = arrayData[:,0]
429 429 u, indices = numpy.unique(utctime, return_index=True)
430 430
431 431 if blockList.size != indices.size:
432 432 indMin = indices[blockList[0]]
433 433 if blockList[1] + 1 >= indices.size:
434 434 arrayData = arrayData[indMin:,:]
435 435 else:
436 436 indMax = indices[blockList[1] + 1]
437 437 arrayData = arrayData[indMin:indMax,:]
438 438 return arrayData
439 439
440 440 # One dimension
441 441 if nDims == 0:
442 442 arrayData = dataset.value.astype(numpy.float)[0][blockList]
443 443
444 444 # Two dimensions
445 445 elif nDims == 2:
446 446 arrayData = numpy.zeros((blocksPerFile,nDim1,nDim0))
447 447 newShapes = (blocksPerFile,nDim0)
448 448 nDatas = nDim1
449 449
450 450 for i in range(nDatas):
451 451 data = dataset[strds + str(i)].value
452 452 arrayData[:,i,:] = data[blockList,:]
453 453
454 454 # Three dimensions
455 455 else:
456 456 arrayData = numpy.zeros((blocksPerFile,nDim2,nDim1,nDim0))
457 457 for i in range(nDatas):
458 458
459 459 data = dataset[strds + str(i)].value
460 460
461 461 for b in range(blockList.size):
462 462 arrayData[b,:,i,:] = data[:,:,blockList[b]]
463 463
464 464 return arrayData
465 465
466 466 def __setDataOut(self):
467 467 listMeta = self.listMeta
468 468 listMetaname = self.listMetaname
469 469 listDataname = self.listDataname
470 470 listData = self.listData
471 471 listShapes = self.listShapes
472 472
473 473 blockIndex = self.blockIndex
474 474 # blockList = self.blockList
475 475
476 476 for i in range(len(listMeta)):
477 477 setattr(self.dataOut,listMetaname[i],listMeta[i])
478 478
479 479 for j in range(len(listData)):
480 480 nShapes = listShapes[listDataname[j]][0]
481 481 mode = listShapes[listDataname[j]][4]
482 482 if nShapes == 1:
483 483 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
484 484 elif nShapes > 1:
485 485 setattr(self.dataOut,listDataname[j],listData[j][blockIndex,:])
486 486 elif mode==0:
487 487 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
488 488 #Mode Meteors
489 489 elif mode ==2:
490 490 selectedData = self.__selectDataMode2(listData[j], blockIndex)
491 491 setattr(self.dataOut, listDataname[j], selectedData)
492 492 return
493 493
494 494 def __selectDataMode2(self, data, blockIndex):
495 495 utctime = data[:,0]
496 496 aux, indices = numpy.unique(utctime, return_inverse=True)
497 497 selInd = numpy.where(indices == blockIndex)[0]
498 498 selData = data[selInd,:]
499 499
500 500 return selData
501 501
502 502 def getData(self):
503 503
504 504 if self.blockIndex==self.blocksPerFile:
505 505 if not( self.__setNextFileOffline() ):
506 506 self.dataOut.flagNoData = True
507 507 return 0
508 508
509 509 self.__setDataOut()
510 510 self.dataOut.flagNoData = False
511 511
512 512 self.blockIndex += 1
513 513
514 514 return
515 515
516 516 def run(self, **kwargs):
517 517
518 518 if not(self.isConfig):
519 519 self.setup(**kwargs)
520 520 # self.setObjProperties()
521 521 self.isConfig = True
522 522
523 523 self.getData()
524 524
525 525 return
526 526
527 527 @MPDecorator
528 528 class ParamWriter(Operation):
529 529 '''
530 530 HDF5 Writer, stores parameters data in HDF5 format files
531 531
532 532 path: path where the files will be stored
533 533
534 534 blocksPerFile: number of blocks that will be saved in per HDF5 format file
535 535
536 536 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
537 537
538 538 metadataList: list of attributes that will be stored as metadata
539 539
540 540 dataList: list of attributes that will be stores as data
541 541
542 542 '''
543 543
544 544
545 545 ext = ".hdf5"
546 546 optchar = "D"
547 547 metaoptchar = "M"
548 548 metaFile = None
549 549 filename = None
550 550 path = None
551 551 setFile = None
552 552 fp = None
553 553 grp = None
554 554 ds = None
555 555 firsttime = True
556 556 #Configurations
557 557 blocksPerFile = None
558 558 blockIndex = None
559 559 dataOut = None
560 560 #Data Arrays
561 561 dataList = None
562 562 metadataList = None
563 563 dsList = None #List of dictionaries with dataset properties
564 564 tableDim = None
565 565 dtype = [('arrayName', 'S20'),('nDimensions', 'i'), ('dim2', 'i'), ('dim1', 'i'),('dim0', 'i'),('mode', 'b')]
566 566 currentDay = None
567 567 lastTime = None
568 setType = None
568 569
569 570 def __init__(self):
570 571
571 572 Operation.__init__(self)
572 573 return
573 574
574 575 def setup(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
575 576 self.path = path
576 577 self.blocksPerFile = blocksPerFile
577 578 self.metadataList = metadataList
578 579 self.dataList = dataList
579 580 self.dataOut = dataOut
580 581 self.mode = mode
581 582 if self.mode is not None:
582 583 self.mode = numpy.zeros(len(self.dataList)) + mode
583 584 else:
584 585 self.mode = numpy.ones(len(self.dataList))
585 586
586 587 self.setType = setType
587 588
588 589 arrayDim = numpy.zeros((len(self.dataList),5))
589 590
590 591 #Table dimensions
591 592 dtype0 = self.dtype
592 593 tableList = []
593 594
594 595 #Dictionary and list of tables
595 596 dsList = []
596 597
597 598 for i in range(len(self.dataList)):
598 599 dsDict = {}
599 600 dataAux = getattr(self.dataOut, self.dataList[i])
600 601 dsDict['variable'] = self.dataList[i]
601 602 #--------------------- Conditionals ------------------------
602 603 #There is no data
603 604
604 605 if dataAux is None:
605 606
606 607 return 0
607 608
608 609 if isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
609 610 dsDict['mode'] = 0
610 611 dsDict['nDim'] = 0
611 612 arrayDim[i,0] = 0
612 613 dsList.append(dsDict)
613 614
614 615 #Mode 2: meteors
615 616 elif self.mode[i] == 2:
616 617 dsDict['dsName'] = 'table0'
617 618 dsDict['mode'] = 2 # Mode meteors
618 619 dsDict['shape'] = dataAux.shape[-1]
619 620 dsDict['nDim'] = 0
620 621 dsDict['dsNumber'] = 1
621 622 arrayDim[i,3] = dataAux.shape[-1]
622 623 arrayDim[i,4] = self.mode[i] #Mode the data was stored
623 624 dsList.append(dsDict)
624 625
625 626 #Mode 1
626 627 else:
627 628 arrayDim0 = dataAux.shape #Data dimensions
628 629 arrayDim[i,0] = len(arrayDim0) #Number of array dimensions
629 630 arrayDim[i,4] = self.mode[i] #Mode the data was stored
630 631 strtable = 'table'
631 632 dsDict['mode'] = 1 # Mode parameters
632 633
633 634 # Three-dimension arrays
634 635 if len(arrayDim0) == 3:
635 636 arrayDim[i,1:-1] = numpy.array(arrayDim0)
636 637 nTables = int(arrayDim[i,2])
637 638 dsDict['dsNumber'] = nTables
638 639 dsDict['shape'] = arrayDim[i,2:4]
639 640 dsDict['nDim'] = 3
640 641
641 642 for j in range(nTables):
642 643 dsDict = dsDict.copy()
643 644 dsDict['dsName'] = strtable + str(j)
644 645 dsList.append(dsDict)
645 646
646 647 # Two-dimension arrays
647 648 elif len(arrayDim0) == 2:
648 649 arrayDim[i,2:-1] = numpy.array(arrayDim0)
649 650 nTables = int(arrayDim[i,2])
650 651 dsDict['dsNumber'] = nTables
651 652 dsDict['shape'] = arrayDim[i,3]
652 653 dsDict['nDim'] = 2
653 654
654 655 for j in range(nTables):
655 656 dsDict = dsDict.copy()
656 657 dsDict['dsName'] = strtable + str(j)
657 658 dsList.append(dsDict)
658 659
659 660 # One-dimension arrays
660 661 elif len(arrayDim0) == 1:
661 662 arrayDim[i,3] = arrayDim0[0]
662 663 dsDict['shape'] = arrayDim0[0]
663 664 dsDict['dsNumber'] = 1
664 665 dsDict['dsName'] = strtable + str(0)
665 666 dsDict['nDim'] = 1
666 667 dsList.append(dsDict)
667 668
668 669 table = numpy.array((self.dataList[i],) + tuple(arrayDim[i,:]),dtype = dtype0)
669 670 tableList.append(table)
670 671
671 672 self.dsList = dsList
672 673 self.tableDim = numpy.array(tableList, dtype = dtype0)
673 674 self.blockIndex = 0
674 675 timeTuple = time.localtime(dataOut.utctime)
675 676 self.currentDay = timeTuple.tm_yday
676 677
677 678 def putMetadata(self):
678 679
679 680 fp = self.createMetadataFile()
680 681 self.writeMetadata(fp)
681 682 fp.close()
682 683 return
683 684
684 685 def createMetadataFile(self):
685 686 ext = self.ext
686 687 path = self.path
687 688 setFile = self.setFile
688 689
689 690 timeTuple = time.localtime(self.dataOut.utctime)
690 691
691 692 subfolder = ''
692 693 fullpath = os.path.join( path, subfolder )
693 694
694 695 if not( os.path.exists(fullpath) ):
695 696 os.mkdir(fullpath)
696 697 setFile = -1 #inicializo mi contador de seteo
697 698
698 699 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
699 700 fullpath = os.path.join( path, subfolder )
700 701
701 702 if not( os.path.exists(fullpath) ):
702 703 os.mkdir(fullpath)
703 704 setFile = -1 #inicializo mi contador de seteo
704 705
705 706 else:
706 707 filesList = os.listdir( fullpath )
707 708 filesList = sorted( filesList, key=str.lower )
708 709 if len( filesList ) > 0:
709 710 filesList = [k for k in filesList if k.startswith(self.metaoptchar)]
710 711 filen = filesList[-1]
711 712 # el filename debera tener el siguiente formato
712 713 # 0 1234 567 89A BCDE (hex)
713 714 # x YYYY DDD SSS .ext
714 715 if isNumber( filen[8:11] ):
715 716 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
716 717 else:
717 718 setFile = -1
718 719 else:
719 720 setFile = -1 #inicializo mi contador de seteo
720 721
721 722 if self.setType is None:
722 723 setFile += 1
723 724 file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar,
724 725 timeTuple.tm_year,
725 726 timeTuple.tm_yday,
726 727 setFile,
727 728 ext )
728 729 else:
729 730 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
730 731 file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar,
731 732 timeTuple.tm_year,
732 733 timeTuple.tm_yday,
733 734 setFile,
734 735 ext )
735 736
736 737 filename = os.path.join( path, subfolder, file )
737 738 self.metaFile = file
738 739 #Setting HDF5 File
739 740 fp = h5py.File(filename,'w')
740 741
741 742 return fp
742 743
743 744 def writeMetadata(self, fp):
744 745
745 746 grp = fp.create_group("Metadata")
746 747 grp.create_dataset('array dimensions', data = self.tableDim, dtype = self.dtype)
747 748
748 749 for i in range(len(self.metadataList)):
749 750 grp.create_dataset(self.metadataList[i], data=getattr(self.dataOut, self.metadataList[i]))
750 751 return
751 752
752 753 def timeFlag(self):
753 754 currentTime = self.dataOut.utctime
754 755
755 756 if self.lastTime is None:
756 757 self.lastTime = currentTime
757 758
758 759 #Day
759 760 timeTuple = time.localtime(currentTime)
760 761 dataDay = timeTuple.tm_yday
761 762
762 763 #Time
763 764 timeDiff = currentTime - self.lastTime
764 765
765 766 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
766 767 if dataDay != self.currentDay:
767 768 self.currentDay = dataDay
768 769 return True
769 770 elif timeDiff > 3*60*60:
770 771 self.lastTime = currentTime
771 772 return True
772 773 else:
773 774 self.lastTime = currentTime
774 775 return False
775 776
776 777 def setNextFile(self):
777 778
778 779 ext = self.ext
779 780 path = self.path
780 781 setFile = self.setFile
781 782 mode = self.mode
782 783
783 784 timeTuple = time.localtime(self.dataOut.utctime)
784 785 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
785 786
786 787 fullpath = os.path.join( path, subfolder )
787 788
788 789 if os.path.exists(fullpath):
789 790 filesList = os.listdir( fullpath )
790 filesList = [k for k in filesList if k.startswith(self.optchar)]
791 filesList = [k for k in filesList if 'M' in k]
791 792 if len( filesList ) > 0:
792 793 filesList = sorted( filesList, key=str.lower )
793 794 filen = filesList[-1]
794 795 # el filename debera tener el siguiente formato
795 796 # 0 1234 567 89A BCDE (hex)
796 797 # x YYYY DDD SSS .ext
797 798 if isNumber( filen[8:11] ):
798 799 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
799 800 else:
800 801 setFile = -1
801 802 else:
802 803 setFile = -1 #inicializo mi contador de seteo
803 804 else:
804 805 os.makedirs(fullpath)
805 806 setFile = -1 #inicializo mi contador de seteo
806 807
807 808 if self.setType is None:
808 809 setFile += 1
809 810 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
810 811 timeTuple.tm_year,
811 812 timeTuple.tm_yday,
812 813 setFile,
813 814 ext )
814 815 else:
815 816 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
816 817 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
817 818 timeTuple.tm_year,
818 819 timeTuple.tm_yday,
819 820 setFile,
820 821 ext )
821 822
822 823 filename = os.path.join( path, subfolder, file )
823 824
824 825 #Setting HDF5 File
825 826 fp = h5py.File(filename,'w')
826 827 #write metadata
827 828 self.writeMetadata(fp)
828 829 #Write data
829 830 grp = fp.create_group("Data")
830 831 ds = []
831 832 data = []
832 833 dsList = self.dsList
833 834 i = 0
834 835 while i < len(dsList):
835 836 dsInfo = dsList[i]
836 837 #One-dimension data
837 838 if dsInfo['mode'] == 0:
838 839 ds0 = grp.create_dataset(dsInfo['variable'], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype=numpy.float64)
839 840 ds.append(ds0)
840 841 data.append([])
841 842 i += 1
842 843 continue
843 844
844 845 elif dsInfo['mode'] == 2:
845 846 grp0 = grp.create_group(dsInfo['variable'])
846 847 ds0 = grp0.create_dataset(dsInfo['dsName'], (1,dsInfo['shape']), data = numpy.zeros((1,dsInfo['shape'])) , maxshape=(None,dsInfo['shape']), chunks=True)
847 848 ds.append(ds0)
848 849 data.append([])
849 850 i += 1
850 851 continue
851 852
852 853 elif dsInfo['mode'] == 1:
853 854 grp0 = grp.create_group(dsInfo['variable'])
854 855
855 856 for j in range(dsInfo['dsNumber']):
856 857 dsInfo = dsList[i]
857 858 tableName = dsInfo['dsName']
858 859
859 860
860 861 if dsInfo['nDim'] == 3:
861 862 shape = dsInfo['shape'].astype(int)
862 863 ds0 = grp0.create_dataset(tableName, (shape[0],shape[1],1) , data = numpy.zeros((shape[0],shape[1],1)), maxshape = (None,shape[1],None), chunks=True)
863 864 else:
864 865 shape = int(dsInfo['shape'])
865 866 ds0 = grp0.create_dataset(tableName, (1,shape), data = numpy.zeros((1,shape)) , maxshape=(None,shape), chunks=True)
866 867
867 868 ds.append(ds0)
868 869 data.append([])
869 870 i += 1
870 871
871 872 fp.flush()
872 873 fp.close()
873 874
874 875 log.log('creating file: {}'.format(filename), 'Writing')
875 876 self.filename = filename
876 877 self.ds = ds
877 878 self.data = data
878 879 self.firsttime = True
879 880 self.blockIndex = 0
880 881 return
881 882
882 883 def putData(self):
883 884
884 885 if self.blockIndex == self.blocksPerFile or self.timeFlag():
885 886 self.setNextFile()
886 887
887 888 self.readBlock()
888 889 self.setBlock() #Prepare data to be written
889 890 self.writeBlock() #Write data
890 891
891 892 return
892 893
893 894 def readBlock(self):
894 895
895 896 '''
896 897 data Array configured
897 898
898 899
899 900 self.data
900 901 '''
901 902 dsList = self.dsList
902 903 ds = self.ds
903 904 #Setting HDF5 File
904 905 fp = h5py.File(self.filename,'r+')
905 906 grp = fp["Data"]
906 907 ind = 0
907 908
908 909 while ind < len(dsList):
909 910 dsInfo = dsList[ind]
910 911
911 912 if dsInfo['mode'] == 0:
912 913 ds0 = grp[dsInfo['variable']]
913 914 ds[ind] = ds0
914 915 ind += 1
915 916 else:
916 917
917 918 grp0 = grp[dsInfo['variable']]
918 919
919 920 for j in range(dsInfo['dsNumber']):
920 921 dsInfo = dsList[ind]
921 922 ds0 = grp0[dsInfo['dsName']]
922 923 ds[ind] = ds0
923 924 ind += 1
924 925
925 926 self.fp = fp
926 927 self.grp = grp
927 928 self.ds = ds
928 929
929 930 return
930 931
931 932 def setBlock(self):
932 933 '''
933 934 data Array configured
934 935
935 936
936 937 self.data
937 938 '''
938 939 #Creating Arrays
939 940 dsList = self.dsList
940 941 data = self.data
941 942 ind = 0
942 943
943 944 while ind < len(dsList):
944 945 dsInfo = dsList[ind]
945 946 dataAux = getattr(self.dataOut, dsInfo['variable'])
946 947
947 948 mode = dsInfo['mode']
948 949 nDim = dsInfo['nDim']
949 950
950 951 if mode == 0 or mode == 2 or nDim == 1:
951 952 data[ind] = dataAux
952 953 ind += 1
953 954 # elif nDim == 1:
954 955 # data[ind] = numpy.reshape(dataAux,(numpy.size(dataAux),1))
955 956 # ind += 1
956 957 elif nDim == 2:
957 958 for j in range(dsInfo['dsNumber']):
958 959 data[ind] = dataAux[j,:]
959 960 ind += 1
960 961 elif nDim == 3:
961 962 for j in range(dsInfo['dsNumber']):
962 963 data[ind] = dataAux[:,j,:]
963 964 ind += 1
964 965
965 966 self.data = data
966 967 return
967 968
968 969 def writeBlock(self):
969 970 '''
970 971 Saves the block in the HDF5 file
971 972 '''
972 973 dsList = self.dsList
973 974
974 975 for i in range(len(self.ds)):
975 976 dsInfo = dsList[i]
976 977 nDim = dsInfo['nDim']
977 978 mode = dsInfo['mode']
978 979
979 980 # First time
980 981 if self.firsttime:
981 982 if type(self.data[i]) == numpy.ndarray:
982 983
983 984 if nDim == 3:
984 985 self.data[i] = self.data[i].reshape((self.data[i].shape[0],self.data[i].shape[1],1))
985 986 self.ds[i].resize(self.data[i].shape)
986 987 if mode == 2:
987 988 self.ds[i].resize(self.data[i].shape)
988 989 self.ds[i][:] = self.data[i]
989 990 else:
990 991
991 992 # From second time
992 993 # Meteors!
993 994 if mode == 2:
994 995 dataShape = self.data[i].shape
995 996 dsShape = self.ds[i].shape
996 997 self.ds[i].resize((self.ds[i].shape[0] + dataShape[0],self.ds[i].shape[1]))
997 998 self.ds[i][dsShape[0]:,:] = self.data[i]
998 999 # No dimension
999 1000 elif mode == 0:
1000 1001 self.ds[i].resize((self.ds[i].shape[0], self.ds[i].shape[1] + 1))
1001 1002 self.ds[i][0,-1] = self.data[i]
1002 1003 # One dimension
1003 1004 elif nDim == 1:
1004 1005 self.ds[i].resize((self.ds[i].shape[0] + 1, self.ds[i].shape[1]))
1005 1006 self.ds[i][-1,:] = self.data[i]
1006 1007 # Two dimension
1007 1008 elif nDim == 2:
1008 1009 self.ds[i].resize((self.ds[i].shape[0] + 1,self.ds[i].shape[1]))
1009 1010 self.ds[i][self.blockIndex,:] = self.data[i]
1010 1011 # Three dimensions
1011 1012 elif nDim == 3:
1012 1013 self.ds[i].resize((self.ds[i].shape[0],self.ds[i].shape[1],self.ds[i].shape[2]+1))
1013 1014 self.ds[i][:,:,-1] = self.data[i]
1014 1015
1015 1016 self.firsttime = False
1016 1017 self.blockIndex += 1
1017 1018
1018 1019 #Close to save changes
1019 1020 self.fp.flush()
1020 1021 self.fp.close()
1021 1022 return
1022 1023
1023 1024 def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
1024 1025
1026 self.dataOut = dataOut
1025 1027 if not(self.isConfig):
1026 1028 self.setup(dataOut, path=path, blocksPerFile=blocksPerFile,
1027 1029 metadataList=metadataList, dataList=dataList, mode=mode,
1028 1030 setType=setType)
1029 1031
1030 1032 self.isConfig = True
1031 1033 self.setNextFile()
1032 1034
1033 1035 self.putData()
1034 1036 return
1035 1037 No newline at end of file
@@ -1,678 +1,678
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import numpy
7 7
8 8 from schainpy.model.io.jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
10 10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 11 from schainpy.model.data.jrodata import Spectra
12 12 from schainpy.utils import log
13 13
14 14 @MPDecorator
15 15 class SpectraReader(JRODataReader, ProcessingUnit):
16 16 """
17 17 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
18 18 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
19 19 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
20 20
21 21 paresCanalesIguales * alturas * perfiles (Self Spectra)
22 22 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
23 23 canales * alturas (DC Channels)
24 24
25 25 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
26 26 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
27 27 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
28 28 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
29 29
30 30 Example:
31 31 dpath = "/home/myuser/data"
32 32
33 33 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
34 34
35 35 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
36 36
37 37 readerObj = SpectraReader()
38 38
39 39 readerObj.setup(dpath, startTime, endTime)
40 40
41 41 while(True):
42 42
43 43 readerObj.getData()
44 44
45 45 print readerObj.data_spc
46 46
47 47 print readerObj.data_cspc
48 48
49 49 print readerObj.data_dc
50 50
51 51 if readerObj.flagNoMoreFiles:
52 52 break
53 53
54 54 """
55 55
56 56 pts2read_SelfSpectra = 0
57 57
58 58 pts2read_CrossSpectra = 0
59 59
60 60 pts2read_DCchannels = 0
61 61
62 62 ext = ".pdata"
63 63
64 64 optchar = "P"
65 65
66 66 dataOut = None
67 67
68 68 nRdChannels = None
69 69
70 70 nRdPairs = None
71 71
72 72 rdPairList = []
73 73
74 74 def __init__(self):#, **kwargs):
75 75 """
76 76 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
77 77
78 78 Inputs:
79 79 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
80 80 almacenar un perfil de datos cada vez que se haga un requerimiento
81 81 (getData). El perfil sera obtenido a partir del buffer de datos,
82 82 si el buffer esta vacio se hara un nuevo proceso de lectura de un
83 83 bloque de datos.
84 84 Si este parametro no es pasado se creara uno internamente.
85 85
86 86 Affected:
87 87 self.dataOut
88 88
89 89 Return : None
90 90 """
91 91
92 92 #Eliminar de la base la herencia
93 93 ProcessingUnit.__init__(self)#, **kwargs)
94 94
95 95 # self.isConfig = False
96 96
97 97 self.pts2read_SelfSpectra = 0
98 98
99 99 self.pts2read_CrossSpectra = 0
100 100
101 101 self.pts2read_DCchannels = 0
102 102
103 103 self.datablock = None
104 104
105 105 self.utc = None
106 106
107 107 self.ext = ".pdata"
108 108
109 109 self.optchar = "P"
110 110
111 111 self.basicHeaderObj = BasicHeader(LOCALTIME)
112 112
113 113 self.systemHeaderObj = SystemHeader()
114 114
115 115 self.radarControllerHeaderObj = RadarControllerHeader()
116 116
117 117 self.processingHeaderObj = ProcessingHeader()
118 118
119 119 self.online = 0
120 120
121 121 self.fp = None
122 122
123 123 self.idFile = None
124 124
125 125 self.dtype = None
126 126
127 127 self.fileSizeByHeader = None
128 128
129 129 self.filenameList = []
130 130
131 131 self.filename = None
132 132
133 133 self.fileSize = None
134 134
135 135 self.firstHeaderSize = 0
136 136
137 137 self.basicHeaderSize = 24
138 138
139 139 self.pathList = []
140 140
141 141 self.lastUTTime = 0
142 142
143 143 self.maxTimeStep = 30
144 144
145 145 self.flagNoMoreFiles = 0
146 146
147 147 self.set = 0
148 148
149 149 self.path = None
150 150
151 151 self.delay = 60 #seconds
152 152
153 153 self.nTries = 3 #quantity tries
154 154
155 155 self.nFiles = 3 #number of files for searching
156 156
157 157 self.nReadBlocks = 0
158 158
159 159 self.flagIsNewFile = 1
160 160
161 161 self.__isFirstTimeOnline = 1
162 162
163 163 # self.ippSeconds = 0
164 164
165 165 self.flagDiscontinuousBlock = 0
166 166
167 167 self.flagIsNewBlock = 0
168 168
169 169 self.nTotalBlocks = 0
170 170
171 171 self.blocksize = 0
172 172
173 173 self.dataOut = self.createObjByDefault()
174 174
175 175 self.profileIndex = 1 #Always
176 176
177 177
178 178 def createObjByDefault(self):
179 179
180 180 dataObj = Spectra()
181 181
182 182 return dataObj
183 183
184 184 def __hasNotDataInBuffer(self):
185 185 return 1
186 186
187 187
188 188 def getBlockDimension(self):
189 189 """
190 190 Obtiene la cantidad de puntos a leer por cada bloque de datos
191 191
192 192 Affected:
193 193 self.nRdChannels
194 194 self.nRdPairs
195 195 self.pts2read_SelfSpectra
196 196 self.pts2read_CrossSpectra
197 197 self.pts2read_DCchannels
198 198 self.blocksize
199 199 self.dataOut.nChannels
200 200 self.dataOut.nPairs
201 201
202 202 Return:
203 203 None
204 204 """
205 205 self.nRdChannels = 0
206 206 self.nRdPairs = 0
207 207 self.rdPairList = []
208 208
209 209 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
210 210 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
211 211 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
212 212 else:
213 213 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
214 214 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
215 215
216 216 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
217 217
218 218 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
219 219 self.blocksize = self.pts2read_SelfSpectra
220 220
221 221 if self.processingHeaderObj.flag_cspc:
222 222 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
223 223 self.blocksize += self.pts2read_CrossSpectra
224 224
225 225 if self.processingHeaderObj.flag_dc:
226 226 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
227 227 self.blocksize += self.pts2read_DCchannels
228 228
229 229 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
230 230
231 231
232 232 def readBlock(self):
233 233 """
234 234 Lee el bloque de datos desde la posicion actual del puntero del archivo
235 235 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
236 236 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
237 237 es seteado a 0
238 238
239 239 Return: None
240 240
241 241 Variables afectadas:
242 242
243 243 self.flagIsNewFile
244 244 self.flagIsNewBlock
245 245 self.nTotalBlocks
246 246 self.data_spc
247 247 self.data_cspc
248 248 self.data_dc
249 249
250 250 Exceptions:
251 251 Si un bloque leido no es un bloque valido
252 252 """
253 253 blockOk_flag = False
254 254 fpointer = self.fp.tell()
255 255
256 256 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
257 257 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
258 258
259 259 if self.processingHeaderObj.flag_cspc:
260 260 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
261 261 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
262 262
263 263 if self.processingHeaderObj.flag_dc:
264 264 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
265 265 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
266 266
267 267
268 268 if not self.processingHeaderObj.shif_fft:
269 269 #desplaza a la derecha en el eje 2 determinadas posiciones
270 270 shift = int(self.processingHeaderObj.profilesPerBlock/2)
271 271 spc = numpy.roll( spc, shift , axis=2 )
272 272
273 273 if self.processingHeaderObj.flag_cspc:
274 274 #desplaza a la derecha en el eje 2 determinadas posiciones
275 275 cspc = numpy.roll( cspc, shift, axis=2 )
276 276
277 277 #Dimensions : nChannels, nProfiles, nSamples
278 278 spc = numpy.transpose( spc, (0,2,1) )
279 279 self.data_spc = spc
280 280
281 281 if self.processingHeaderObj.flag_cspc:
282 282 cspc = numpy.transpose( cspc, (0,2,1) )
283 283 self.data_cspc = cspc['real'] + cspc['imag']*1j
284 284 else:
285 285 self.data_cspc = None
286 286
287 287 if self.processingHeaderObj.flag_dc:
288 288 self.data_dc = dc['real'] + dc['imag']*1j
289 289 else:
290 290 self.data_dc = None
291 291
292 292 self.flagIsNewFile = 0
293 293 self.flagIsNewBlock = 1
294 294
295 295 self.nTotalBlocks += 1
296 296 self.nReadBlocks += 1
297 297
298 298 return 1
299 299
300 300 def getFirstHeader(self):
301 301
302 302 self.getBasicHeader()
303 303
304 304 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
305 305
306 306 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
307 307
308 308 # self.dataOut.ippSeconds = self.ippSeconds
309 309
310 310 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.processingHeaderObj.profilesPerBlock
311 311
312 312 self.dataOut.dtype = self.dtype
313 313
314 314 # self.dataOut.nPairs = self.nPairs
315 315
316 316 self.dataOut.pairsList = self.rdPairList
317 317
318 318 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
319 319
320 320 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
321 321
322 322 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
323 323
324 324 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
325 325
326 326 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
327 327
328 328 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
329 329
330 330 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
331 331
332 332 self.dataOut.flagShiftFFT = True #Data is always shifted
333 333
334 334 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
335 335
336 336 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip
337 337
338 338 def getData(self):
339 339 """
340 340 First method to execute before "RUN" is called.
341 341
342 342 Copia el buffer de lectura a la clase "Spectra",
343 343 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
344 344 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
345 345
346 346 Return:
347 347 0 : Si no hay mas archivos disponibles
348 348 1 : Si hizo una buena copia del buffer
349 349
350 350 Affected:
351 351 self.dataOut
352 352
353 353 self.flagDiscontinuousBlock
354 354 self.flagIsNewBlock
355 355 """
356 356
357 357 if self.flagNoMoreFiles:
358 358 self.dataOut.flagNoData = True
359 359 print('Process finished')
360 360 return 0
361 361
362 362 self.flagDiscontinuousBlock = 0
363 363 self.flagIsNewBlock = 0
364 364
365 365 if self.__hasNotDataInBuffer():
366 366
367 367 if not( self.readNextBlock() ):
368 368 self.dataOut.flagNoData = True
369 369 return 0
370 370
371 371 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
372 372
373 373 if self.data_spc is None:
374 374 self.dataOut.flagNoData = True
375 375 return 0
376 376
377 377 self.getBasicHeader()
378 378
379 379 self.getFirstHeader()
380 380
381 381 self.dataOut.data_spc = self.data_spc
382 382
383 383 self.dataOut.data_cspc = self.data_cspc
384 384
385 385 self.dataOut.data_dc = self.data_dc
386 386
387 387 self.dataOut.flagNoData = False
388 388
389 389 self.dataOut.realtime = self.online
390 390
391 391 return self.dataOut.data_spc
392 392 @MPDecorator
393 393 class SpectraWriter(JRODataWriter, Operation):
394 394
395 395 """
396 396 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
397 397 de los datos siempre se realiza por bloques.
398 398 """
399 399
400 400 ext = ".pdata"
401 401
402 402 optchar = "P"
403 403
404 404 shape_spc_Buffer = None
405 405
406 406 shape_cspc_Buffer = None
407 407
408 408 shape_dc_Buffer = None
409 409
410 410 data_spc = None
411 411
412 412 data_cspc = None
413 413
414 414 data_dc = None
415 415
416 416 def __init__(self):
417 417 """
418 418 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
419 419
420 420 Affected:
421 421 self.dataOut
422 422 self.basicHeaderObj
423 423 self.systemHeaderObj
424 424 self.radarControllerHeaderObj
425 425 self.processingHeaderObj
426 426
427 427 Return: None
428 428 """
429 429
430 430 Operation.__init__(self)
431 431
432 432 self.nTotalBlocks = 0
433 433
434 434 self.data_spc = None
435 435
436 436 self.data_cspc = None
437 437
438 438 self.data_dc = None
439 439
440 440 self.fp = None
441 441
442 442 self.flagIsNewFile = 1
443 443
444 444 self.nTotalBlocks = 0
445 445
446 446 self.flagIsNewBlock = 0
447 447
448 448 self.setFile = None
449 449
450 450 self.dtype = None
451 451
452 452 self.path = None
453 453
454 454 self.noMoreFiles = 0
455 455
456 456 self.filename = None
457 457
458 458 self.basicHeaderObj = BasicHeader(LOCALTIME)
459 459
460 460 self.systemHeaderObj = SystemHeader()
461 461
462 462 self.radarControllerHeaderObj = RadarControllerHeader()
463 463
464 464 self.processingHeaderObj = ProcessingHeader()
465 465
466 466
467 467 def hasAllDataInBuffer(self):
468 468 return 1
469 469
470 470
471 471 def setBlockDimension(self):
472 472 """
473 473 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
474 474
475 475 Affected:
476 476 self.shape_spc_Buffer
477 477 self.shape_cspc_Buffer
478 478 self.shape_dc_Buffer
479 479
480 480 Return: None
481 481 """
482 482 self.shape_spc_Buffer = (self.dataOut.nChannels,
483 483 self.processingHeaderObj.nHeights,
484 484 self.processingHeaderObj.profilesPerBlock)
485 485
486 486 self.shape_cspc_Buffer = (self.dataOut.nPairs,
487 487 self.processingHeaderObj.nHeights,
488 488 self.processingHeaderObj.profilesPerBlock)
489 489
490 490 self.shape_dc_Buffer = (self.dataOut.nChannels,
491 491 self.processingHeaderObj.nHeights)
492 492
493 493
494 494 def writeBlock(self):
495 495 """processingHeaderObj
496 496 Escribe el buffer en el file designado
497 497
498 498 Affected:
499 499 self.data_spc
500 500 self.data_cspc
501 501 self.data_dc
502 502 self.flagIsNewFile
503 503 self.flagIsNewBlock
504 504 self.nTotalBlocks
505 505 self.nWriteBlocks
506 506
507 507 Return: None
508 508 """
509 509
510 510 spc = numpy.transpose( self.data_spc, (0,2,1) )
511 511 if not self.processingHeaderObj.shif_fft:
512 512 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
513 513 data = spc.reshape((-1))
514 514 data = data.astype(self.dtype[0])
515 515 data.tofile(self.fp)
516 516
517 517 if self.data_cspc is not None:
518 518
519 519 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
520 #data = numpy.zeros( numpy.shape(cspc), self.dtype )
520 data = numpy.zeros( numpy.shape(cspc), self.dtype )
521 521 #print 'data.shape', self.shape_cspc_Buffer
522 522 if not self.processingHeaderObj.shif_fft:
523 523 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
524 524 data['real'] = cspc.real
525 525 data['imag'] = cspc.imag
526 526 data = data.reshape((-1))
527 527 data.tofile(self.fp)
528 528
529 529 if self.data_dc is not None:
530 530
531 531 dc = self.data_dc
532 532 data = numpy.zeros( numpy.shape(dc), self.dtype )
533 533 data['real'] = dc.real
534 534 data['imag'] = dc.imag
535 535 data = data.reshape((-1))
536 536 data.tofile(self.fp)
537 537
538 538 # self.data_spc.fill(0)
539 539 #
540 540 # if self.data_dc is not None:
541 541 # self.data_dc.fill(0)
542 542 #
543 543 # if self.data_cspc is not None:
544 544 # self.data_cspc.fill(0)
545 545
546 546 self.flagIsNewFile = 0
547 547 self.flagIsNewBlock = 1
548 548 self.nTotalBlocks += 1
549 549 self.nWriteBlocks += 1
550 550 self.blockIndex += 1
551 551
552 552 # print "[Writing] Block = %d04" %self.blockIndex
553 553
554 554 def putData(self):
555 555 """
556 556 Setea un bloque de datos y luego los escribe en un file
557 557
558 558 Affected:
559 559 self.data_spc
560 560 self.data_cspc
561 561 self.data_dc
562 562
563 563 Return:
564 564 0 : Si no hay data o no hay mas files que puedan escribirse
565 565 1 : Si se escribio la data de un bloque en un file
566 566 """
567 567
568 568 if self.dataOut.flagNoData:
569 569 return 0
570 570
571 571 self.flagIsNewBlock = 0
572 572
573 573 if self.dataOut.flagDiscontinuousBlock:
574 574 self.data_spc.fill(0)
575 575 if self.dataOut.data_cspc is not None:
576 576 self.data_cspc.fill(0)
577 577 if self.dataOut.data_dc is not None:
578 578 self.data_dc.fill(0)
579 579 self.setNextFile()
580 580
581 581 if self.flagIsNewFile == 0:
582 582 self.setBasicHeader()
583 583
584 584 self.data_spc = self.dataOut.data_spc.copy()
585 585
586 586 if self.dataOut.data_cspc is not None:
587 587 self.data_cspc = self.dataOut.data_cspc.copy()
588 588
589 589 if self.dataOut.data_dc is not None:
590 590 self.data_dc = self.dataOut.data_dc.copy()
591 591
592 592 # #self.processingHeaderObj.dataBlocksPerFile)
593 593 if self.hasAllDataInBuffer():
594 594 # self.setFirstHeader()
595 595 self.writeNextBlock()
596 596
597 597 def __getBlockSize(self):
598 598 '''
599 599 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
600 600 '''
601 601
602 602 dtype_width = self.getDtypeWidth()
603 603
604 604 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
605 605
606 606 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
607 607 blocksize = (pts2write_SelfSpectra*dtype_width)
608 608
609 609 if self.dataOut.data_cspc is not None:
610 610 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
611 611 blocksize += (pts2write_CrossSpectra*dtype_width*2)
612 612
613 613 if self.dataOut.data_dc is not None:
614 614 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
615 615 blocksize += (pts2write_DCchannels*dtype_width*2)
616 616
617 617 # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
618 618
619 619 return blocksize
620 620
621 621 def setFirstHeader(self):
622 622
623 623 """
624 624 Obtiene una copia del First Header
625 625
626 626 Affected:
627 627 self.systemHeaderObj
628 628 self.radarControllerHeaderObj
629 629 self.dtype
630 630
631 631 Return:
632 632 None
633 633 """
634 634
635 635 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
636 636 self.systemHeaderObj.nChannels = self.dataOut.nChannels
637 637 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
638 638
639 639 self.processingHeaderObj.dtype = 1 # Spectra
640 640 self.processingHeaderObj.blockSize = self.__getBlockSize()
641 641 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
642 642 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
643 643 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
644 644 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
645 645 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
646 646 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
647 647 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
648 648
649 649 if self.processingHeaderObj.totalSpectra > 0:
650 650 channelList = []
651 651 for channel in range(self.dataOut.nChannels):
652 652 channelList.append(channel)
653 653 channelList.append(channel)
654 654
655 655 pairsList = []
656 656 if self.dataOut.nPairs > 0:
657 657 for pair in self.dataOut.pairsList:
658 658 pairsList.append(pair[0])
659 659 pairsList.append(pair[1])
660 660
661 661 spectraComb = channelList + pairsList
662 662 spectraComb = numpy.array(spectraComb, dtype="u1")
663 663 self.processingHeaderObj.spectraComb = spectraComb
664 664
665 665 if self.dataOut.code is not None:
666 666 self.processingHeaderObj.code = self.dataOut.code
667 667 self.processingHeaderObj.nCode = self.dataOut.nCode
668 668 self.processingHeaderObj.nBaud = self.dataOut.nBaud
669 669
670 670 if self.processingHeaderObj.nWindows != 0:
671 671 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
672 672 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
673 673 self.processingHeaderObj.nHeights = self.dataOut.nHeights
674 674 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
675 675
676 676 self.processingHeaderObj.processFlags = self.getProcessFlags()
677 677
678 678 self.setBasicHeader() No newline at end of file
@@ -1,1060 +1,1056
1 1 import itertools
2 2
3 3 import numpy
4 4
5 5 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
6 6 from schainpy.model.data.jrodata import Spectra
7 7 from schainpy.model.data.jrodata import hildebrand_sekhon
8 8 from schainpy.utils import log
9 9
10 10 @MPDecorator
11 11 class SpectraProc(ProcessingUnit):
12 12
13 13
14 14 def __init__(self):
15 15
16 16 ProcessingUnit.__init__(self)
17 17
18 18 self.buffer = None
19 19 self.firstdatatime = None
20 20 self.profIndex = 0
21 21 self.dataOut = Spectra()
22 22 self.id_min = None
23 23 self.id_max = None
24 24 self.setupReq = False #Agregar a todas las unidades de proc
25 25
26 26 def __updateSpecFromVoltage(self):
27 27
28 28 self.dataOut.timeZone = self.dataIn.timeZone
29 29 self.dataOut.dstFlag = self.dataIn.dstFlag
30 30 self.dataOut.errorCount = self.dataIn.errorCount
31 31 self.dataOut.useLocalTime = self.dataIn.useLocalTime
32 32 try:
33 33 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
34 34 except:
35 35 pass
36 36 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
37 37 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
38 38 self.dataOut.channelList = self.dataIn.channelList
39 39 self.dataOut.heightList = self.dataIn.heightList
40 40 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
41 41
42 42 self.dataOut.nBaud = self.dataIn.nBaud
43 43 self.dataOut.nCode = self.dataIn.nCode
44 44 self.dataOut.code = self.dataIn.code
45 45 self.dataOut.nProfiles = self.dataOut.nFFTPoints
46 46
47 47 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
48 48 self.dataOut.utctime = self.firstdatatime
49 49 # asumo q la data esta decodificada
50 50 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
51 51 # asumo q la data esta sin flip
52 52 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
53 53 self.dataOut.flagShiftFFT = False
54 54
55 55 self.dataOut.nCohInt = self.dataIn.nCohInt
56 56 self.dataOut.nIncohInt = 1
57 57
58 58 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
59 59
60 60 self.dataOut.frequency = self.dataIn.frequency
61 61 self.dataOut.realtime = self.dataIn.realtime
62 62
63 63 self.dataOut.azimuth = self.dataIn.azimuth
64 64 self.dataOut.zenith = self.dataIn.zenith
65 65
66 66 self.dataOut.beam.codeList = self.dataIn.beam.codeList
67 67 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
68 68 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
69 69
70 70 def __getFft(self):
71 71 """
72 72 Convierte valores de Voltaje a Spectra
73 73
74 74 Affected:
75 75 self.dataOut.data_spc
76 76 self.dataOut.data_cspc
77 77 self.dataOut.data_dc
78 78 self.dataOut.heightList
79 79 self.profIndex
80 80 self.buffer
81 81 self.dataOut.flagNoData
82 82 """
83 83 fft_volt = numpy.fft.fft(
84 84 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
85 85 fft_volt = fft_volt.astype(numpy.dtype('complex'))
86 86 dc = fft_volt[:, 0, :]
87 87
88 88 # calculo de self-spectra
89 89 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
90 90 spc = fft_volt * numpy.conjugate(fft_volt)
91 91 spc = spc.real
92 92
93 93 blocksize = 0
94 94 blocksize += dc.size
95 95 blocksize += spc.size
96 96
97 97 cspc = None
98 98 pairIndex = 0
99 99 if self.dataOut.pairsList != None:
100 100 # calculo de cross-spectra
101 101 cspc = numpy.zeros(
102 102 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
103 103 for pair in self.dataOut.pairsList:
104 104 if pair[0] not in self.dataOut.channelList:
105 105 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
106 106 str(pair), str(self.dataOut.channelList)))
107 107 if pair[1] not in self.dataOut.channelList:
108 108 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
109 109 str(pair), str(self.dataOut.channelList)))
110 110
111 111 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
112 112 numpy.conjugate(fft_volt[pair[1], :, :])
113 113 pairIndex += 1
114 114 blocksize += cspc.size
115 115
116 116 self.dataOut.data_spc = spc
117 117 self.dataOut.data_cspc = cspc
118 118 self.dataOut.data_dc = dc
119 119 self.dataOut.blockSize = blocksize
120 120 self.dataOut.flagShiftFFT = True
121 121
122 122 def run(self, nProfiles=None, nFFTPoints=None, pairsList=[], ippFactor=None, shift_fft=False):
123 123
124 124 if self.dataIn.type == "Spectra":
125 125 self.dataOut.copy(self.dataIn)
126 126 if shift_fft:
127 127 #desplaza a la derecha en el eje 2 determinadas posiciones
128 128 shift = int(self.dataOut.nFFTPoints/2)
129 129 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
130 130
131 131 if self.dataOut.data_cspc is not None:
132 132 #desplaza a la derecha en el eje 2 determinadas posiciones
133 133 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
134 134
135 135 return True
136 136
137 137 if self.dataIn.type == "Voltage":
138 138
139 139 self.dataOut.flagNoData = True
140 140
141 141 if nFFTPoints == None:
142 142 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
143 143
144 144 if nProfiles == None:
145 145 nProfiles = nFFTPoints
146 146
147 147 if ippFactor == None:
148 148 ippFactor = 1
149 149
150 150 self.dataOut.ippFactor = ippFactor
151 151
152 152 self.dataOut.nFFTPoints = nFFTPoints
153 153 self.dataOut.pairsList = pairsList
154 154
155 155 if self.buffer is None:
156 156 self.buffer = numpy.zeros((self.dataIn.nChannels,
157 157 nProfiles,
158 158 self.dataIn.nHeights),
159 159 dtype='complex')
160 160
161 161 if self.dataIn.flagDataAsBlock:
162 162 nVoltProfiles = self.dataIn.data.shape[1]
163 163
164 164 if nVoltProfiles == nProfiles:
165 165 self.buffer = self.dataIn.data.copy()
166 166 self.profIndex = nVoltProfiles
167 167
168 168 elif nVoltProfiles < nProfiles:
169 169
170 170 if self.profIndex == 0:
171 171 self.id_min = 0
172 172 self.id_max = nVoltProfiles
173 173
174 174 self.buffer[:, self.id_min:self.id_max,
175 175 :] = self.dataIn.data
176 176 self.profIndex += nVoltProfiles
177 177 self.id_min += nVoltProfiles
178 178 self.id_max += nVoltProfiles
179 179 else:
180 180 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
181 181 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
182 182 self.dataOut.flagNoData = True
183 183 return 0
184 184 else:
185 185 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
186 186 self.profIndex += 1
187 187
188 188 if self.firstdatatime == None:
189 189 self.firstdatatime = self.dataIn.utctime
190 190
191 191 if self.profIndex == nProfiles:
192 192 self.__updateSpecFromVoltage()
193 193 self.__getFft()
194 194
195 195 self.dataOut.flagNoData = False
196 196 self.firstdatatime = None
197 197 self.profIndex = 0
198 198
199 199 return True
200 200
201 201 raise ValueError("The type of input object '%s' is not valid" % (
202 202 self.dataIn.type))
203 203
204 204 def __selectPairs(self, pairsList):
205 205
206 206 if not pairsList:
207 207 return
208 208
209 209 pairs = []
210 210 pairsIndex = []
211 211
212 212 for pair in pairsList:
213 213 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
214 214 continue
215 215 pairs.append(pair)
216 216 pairsIndex.append(pairs.index(pair))
217 217
218 218 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
219 219 self.dataOut.pairsList = pairs
220 220
221 221 return
222 222
223 223 def __selectPairsByChannel(self, channelList=None):
224 224
225 225 if channelList == None:
226 226 return
227 227
228 228 pairsIndexListSelected = []
229 229 for pairIndex in self.dataOut.pairsIndexList:
230 230 # First pair
231 231 if self.dataOut.pairsList[pairIndex][0] not in channelList:
232 232 continue
233 233 # Second pair
234 234 if self.dataOut.pairsList[pairIndex][1] not in channelList:
235 235 continue
236 236
237 237 pairsIndexListSelected.append(pairIndex)
238 238
239 239 if not pairsIndexListSelected:
240 240 self.dataOut.data_cspc = None
241 241 self.dataOut.pairsList = []
242 242 return
243 243
244 244 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
245 245 self.dataOut.pairsList = [self.dataOut.pairsList[i]
246 246 for i in pairsIndexListSelected]
247 247
248 248 return
249 249
250 250 def selectChannels(self, channelList):
251 251
252 252 channelIndexList = []
253 253
254 254 for channel in channelList:
255 255 if channel not in self.dataOut.channelList:
256 256 raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" % (
257 257 channel, str(self.dataOut.channelList)))
258 258
259 259 index = self.dataOut.channelList.index(channel)
260 260 channelIndexList.append(index)
261 261
262 262 self.selectChannelsByIndex(channelIndexList)
263 263
264 264 def selectChannelsByIndex(self, channelIndexList):
265 265 """
266 266 Selecciona un bloque de datos en base a canales segun el channelIndexList
267 267
268 268 Input:
269 269 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
270 270
271 271 Affected:
272 272 self.dataOut.data_spc
273 273 self.dataOut.channelIndexList
274 274 self.dataOut.nChannels
275 275
276 276 Return:
277 277 None
278 278 """
279 279
280 280 for channelIndex in channelIndexList:
281 281 if channelIndex not in self.dataOut.channelIndexList:
282 282 raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " % (
283 283 channelIndex, self.dataOut.channelIndexList))
284 284
285 # nChannels = len(channelIndexList)
286
287 285 data_spc = self.dataOut.data_spc[channelIndexList, :]
288 286 data_dc = self.dataOut.data_dc[channelIndexList, :]
289 287
290 288 self.dataOut.data_spc = data_spc
291 289 self.dataOut.data_dc = data_dc
292 290
293 self.dataOut.channelList = [
294 self.dataOut.channelList[i] for i in channelIndexList]
295 # self.dataOut.nChannels = nChannels
296
297 self.__selectPairsByChannel(self.dataOut.channelList)
291 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
292 self.dataOut.channelList = range(len(channelIndexList))
293 self.__selectPairsByChannel(channelIndexList)
298 294
299 295 return 1
300 296
301 297
302 298 def selectFFTs(self, minFFT, maxFFT ):
303 299 """
304 300 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
305 301 minFFT<= FFT <= maxFFT
306 302 """
307 303
308 304 if (minFFT > maxFFT):
309 305 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
310 306
311 307 if (minFFT < self.dataOut.getFreqRange()[0]):
312 308 minFFT = self.dataOut.getFreqRange()[0]
313 309
314 310 if (maxFFT > self.dataOut.getFreqRange()[-1]):
315 311 maxFFT = self.dataOut.getFreqRange()[-1]
316 312
317 313 minIndex = 0
318 314 maxIndex = 0
319 315 FFTs = self.dataOut.getFreqRange()
320 316
321 317 inda = numpy.where(FFTs >= minFFT)
322 318 indb = numpy.where(FFTs <= maxFFT)
323 319
324 320 try:
325 321 minIndex = inda[0][0]
326 322 except:
327 323 minIndex = 0
328 324
329 325 try:
330 326 maxIndex = indb[0][-1]
331 327 except:
332 328 maxIndex = len(FFTs)
333 329
334 330 self.selectFFTsByIndex(minIndex, maxIndex)
335 331
336 332 return 1
337 333
338 334
339 335 def setH0(self, h0, deltaHeight = None):
340 336
341 337 if not deltaHeight:
342 338 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
343 339
344 340 nHeights = self.dataOut.nHeights
345 341
346 342 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
347 343
348 344 self.dataOut.heightList = newHeiRange
349 345
350 346
351 347 def selectHeights(self, minHei, maxHei):
352 348 """
353 349 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
354 350 minHei <= height <= maxHei
355 351
356 352 Input:
357 353 minHei : valor minimo de altura a considerar
358 354 maxHei : valor maximo de altura a considerar
359 355
360 356 Affected:
361 357 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
362 358
363 359 Return:
364 360 1 si el metodo se ejecuto con exito caso contrario devuelve 0
365 361 """
366 362
367 363
368 364 if (minHei > maxHei):
369 365 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei))
370 366
371 367 if (minHei < self.dataOut.heightList[0]):
372 368 minHei = self.dataOut.heightList[0]
373 369
374 370 if (maxHei > self.dataOut.heightList[-1]):
375 371 maxHei = self.dataOut.heightList[-1]
376 372
377 373 minIndex = 0
378 374 maxIndex = 0
379 375 heights = self.dataOut.heightList
380 376
381 377 inda = numpy.where(heights >= minHei)
382 378 indb = numpy.where(heights <= maxHei)
383 379
384 380 try:
385 381 minIndex = inda[0][0]
386 382 except:
387 383 minIndex = 0
388 384
389 385 try:
390 386 maxIndex = indb[0][-1]
391 387 except:
392 388 maxIndex = len(heights)
393 389
394 390 self.selectHeightsByIndex(minIndex, maxIndex)
395 391
396 392
397 393 return 1
398 394
399 395 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
400 396 newheis = numpy.where(
401 397 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
402 398
403 399 if hei_ref != None:
404 400 newheis = numpy.where(self.dataOut.heightList > hei_ref)
405 401
406 402 minIndex = min(newheis[0])
407 403 maxIndex = max(newheis[0])
408 404 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
409 405 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
410 406
411 407 # determina indices
412 408 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
413 409 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
414 410 avg_dB = 10 * \
415 411 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
416 412 beacon_dB = numpy.sort(avg_dB)[-nheis:]
417 413 beacon_heiIndexList = []
418 414 for val in avg_dB.tolist():
419 415 if val >= beacon_dB[0]:
420 416 beacon_heiIndexList.append(avg_dB.tolist().index(val))
421 417
422 418 #data_spc = data_spc[:,:,beacon_heiIndexList]
423 419 data_cspc = None
424 420 if self.dataOut.data_cspc is not None:
425 421 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
426 422 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
427 423
428 424 data_dc = None
429 425 if self.dataOut.data_dc is not None:
430 426 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
431 427 #data_dc = data_dc[:,beacon_heiIndexList]
432 428
433 429 self.dataOut.data_spc = data_spc
434 430 self.dataOut.data_cspc = data_cspc
435 431 self.dataOut.data_dc = data_dc
436 432 self.dataOut.heightList = heightList
437 433 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
438 434
439 435 return 1
440 436
441 437 def selectFFTsByIndex(self, minIndex, maxIndex):
442 438 """
443 439
444 440 """
445 441
446 442 if (minIndex < 0) or (minIndex > maxIndex):
447 443 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
448 444
449 445 if (maxIndex >= self.dataOut.nProfiles):
450 446 maxIndex = self.dataOut.nProfiles-1
451 447
452 448 #Spectra
453 449 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
454 450
455 451 data_cspc = None
456 452 if self.dataOut.data_cspc is not None:
457 453 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
458 454
459 455 data_dc = None
460 456 if self.dataOut.data_dc is not None:
461 457 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
462 458
463 459 self.dataOut.data_spc = data_spc
464 460 self.dataOut.data_cspc = data_cspc
465 461 self.dataOut.data_dc = data_dc
466 462
467 463 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
468 464 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
469 465 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
470 466
471 467 return 1
472 468
473 469
474 470
475 471 def selectHeightsByIndex(self, minIndex, maxIndex):
476 472 """
477 473 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
478 474 minIndex <= index <= maxIndex
479 475
480 476 Input:
481 477 minIndex : valor de indice minimo de altura a considerar
482 478 maxIndex : valor de indice maximo de altura a considerar
483 479
484 480 Affected:
485 481 self.dataOut.data_spc
486 482 self.dataOut.data_cspc
487 483 self.dataOut.data_dc
488 484 self.dataOut.heightList
489 485
490 486 Return:
491 487 1 si el metodo se ejecuto con exito caso contrario devuelve 0
492 488 """
493 489
494 490 if (minIndex < 0) or (minIndex > maxIndex):
495 491 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
496 492 minIndex, maxIndex))
497 493
498 494 if (maxIndex >= self.dataOut.nHeights):
499 495 maxIndex = self.dataOut.nHeights - 1
500 496
501 497 # Spectra
502 498 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
503 499
504 500 data_cspc = None
505 501 if self.dataOut.data_cspc is not None:
506 502 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
507 503
508 504 data_dc = None
509 505 if self.dataOut.data_dc is not None:
510 506 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
511 507
512 508 self.dataOut.data_spc = data_spc
513 509 self.dataOut.data_cspc = data_cspc
514 510 self.dataOut.data_dc = data_dc
515 511
516 512 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
517 513
518 514 return 1
519 515
520 516 def removeDC(self, mode=2):
521 517 jspectra = self.dataOut.data_spc
522 518 jcspectra = self.dataOut.data_cspc
523 519
524 520 num_chan = jspectra.shape[0]
525 521 num_hei = jspectra.shape[2]
526 522
527 523 if jcspectra is not None:
528 524 jcspectraExist = True
529 525 num_pairs = jcspectra.shape[0]
530 526 else:
531 527 jcspectraExist = False
532 528
533 529 freq_dc = int(jspectra.shape[1] / 2)
534 530 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
535 531 ind_vel = ind_vel.astype(int)
536 532
537 533 if ind_vel[0] < 0:
538 534 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
539 535
540 536 if mode == 1:
541 537 jspectra[:, freq_dc, :] = (
542 538 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
543 539
544 540 if jcspectraExist:
545 541 jcspectra[:, freq_dc, :] = (
546 542 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
547 543
548 544 if mode == 2:
549 545
550 546 vel = numpy.array([-2, -1, 1, 2])
551 547 xx = numpy.zeros([4, 4])
552 548
553 549 for fil in range(4):
554 550 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
555 551
556 552 xx_inv = numpy.linalg.inv(xx)
557 553 xx_aux = xx_inv[0, :]
558 554
559 555 for ich in range(num_chan):
560 556 yy = jspectra[ich, ind_vel, :]
561 557 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
562 558
563 559 junkid = jspectra[ich, freq_dc, :] <= 0
564 560 cjunkid = sum(junkid)
565 561
566 562 if cjunkid.any():
567 563 jspectra[ich, freq_dc, junkid.nonzero()] = (
568 564 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
569 565
570 566 if jcspectraExist:
571 567 for ip in range(num_pairs):
572 568 yy = jcspectra[ip, ind_vel, :]
573 569 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
574 570
575 571 self.dataOut.data_spc = jspectra
576 572 self.dataOut.data_cspc = jcspectra
577 573
578 574 return 1
579 575
580 576 def removeInterference2(self):
581 577
582 578 cspc = self.dataOut.data_cspc
583 579 spc = self.dataOut.data_spc
584 580 Heights = numpy.arange(cspc.shape[2])
585 581 realCspc = numpy.abs(cspc)
586 582
587 583 for i in range(cspc.shape[0]):
588 584 LinePower= numpy.sum(realCspc[i], axis=0)
589 585 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
590 586 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
591 587 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
592 588 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
593 589 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
594 590
595 591
596 592 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
597 593 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
598 594 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
599 595 cspc[i,InterferenceRange,:] = numpy.NaN
600 596
601 597
602 598
603 599 self.dataOut.data_cspc = cspc
604 600
605 601 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
606 602
607 603 jspectra = self.dataOut.data_spc
608 604 jcspectra = self.dataOut.data_cspc
609 605 jnoise = self.dataOut.getNoise()
610 606 num_incoh = self.dataOut.nIncohInt
611 607
612 608 num_channel = jspectra.shape[0]
613 609 num_prof = jspectra.shape[1]
614 610 num_hei = jspectra.shape[2]
615 611
616 612 # hei_interf
617 613 if hei_interf is None:
618 614 count_hei = int(num_hei / 2)
619 615 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
620 616 hei_interf = numpy.asarray(hei_interf)[0]
621 617 # nhei_interf
622 618 if (nhei_interf == None):
623 619 nhei_interf = 5
624 620 if (nhei_interf < 1):
625 621 nhei_interf = 1
626 622 if (nhei_interf > count_hei):
627 623 nhei_interf = count_hei
628 624 if (offhei_interf == None):
629 625 offhei_interf = 0
630 626
631 627 ind_hei = list(range(num_hei))
632 628 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
633 629 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
634 630 mask_prof = numpy.asarray(list(range(num_prof)))
635 631 num_mask_prof = mask_prof.size
636 632 comp_mask_prof = [0, num_prof / 2]
637 633
638 634 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
639 635 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
640 636 jnoise = numpy.nan
641 637 noise_exist = jnoise[0] < numpy.Inf
642 638
643 639 # Subrutina de Remocion de la Interferencia
644 640 for ich in range(num_channel):
645 641 # Se ordena los espectros segun su potencia (menor a mayor)
646 642 power = jspectra[ich, mask_prof, :]
647 643 power = power[:, hei_interf]
648 644 power = power.sum(axis=0)
649 645 psort = power.ravel().argsort()
650 646
651 647 # Se estima la interferencia promedio en los Espectros de Potencia empleando
652 648 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
653 649 offhei_interf, nhei_interf + offhei_interf))]]]
654 650
655 651 if noise_exist:
656 652 # tmp_noise = jnoise[ich] / num_prof
657 653 tmp_noise = jnoise[ich]
658 654 junkspc_interf = junkspc_interf - tmp_noise
659 655 #junkspc_interf[:,comp_mask_prof] = 0
660 656
661 657 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
662 658 jspc_interf = jspc_interf.transpose()
663 659 # Calculando el espectro de interferencia promedio
664 660 noiseid = numpy.where(
665 661 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
666 662 noiseid = noiseid[0]
667 663 cnoiseid = noiseid.size
668 664 interfid = numpy.where(
669 665 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
670 666 interfid = interfid[0]
671 667 cinterfid = interfid.size
672 668
673 669 if (cnoiseid > 0):
674 670 jspc_interf[noiseid] = 0
675 671
676 672 # Expandiendo los perfiles a limpiar
677 673 if (cinterfid > 0):
678 674 new_interfid = (
679 675 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
680 676 new_interfid = numpy.asarray(new_interfid)
681 677 new_interfid = {x for x in new_interfid}
682 678 new_interfid = numpy.array(list(new_interfid))
683 679 new_cinterfid = new_interfid.size
684 680 else:
685 681 new_cinterfid = 0
686 682
687 683 for ip in range(new_cinterfid):
688 684 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
689 685 jspc_interf[new_interfid[ip]
690 686 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
691 687
692 688 jspectra[ich, :, ind_hei] = jspectra[ich, :,
693 689 ind_hei] - jspc_interf # Corregir indices
694 690
695 691 # Removiendo la interferencia del punto de mayor interferencia
696 692 ListAux = jspc_interf[mask_prof].tolist()
697 693 maxid = ListAux.index(max(ListAux))
698 694
699 695 if cinterfid > 0:
700 696 for ip in range(cinterfid * (interf == 2) - 1):
701 697 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
702 698 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
703 699 cind = len(ind)
704 700
705 701 if (cind > 0):
706 702 jspectra[ich, interfid[ip], ind] = tmp_noise * \
707 703 (1 + (numpy.random.uniform(cind) - 0.5) /
708 704 numpy.sqrt(num_incoh))
709 705
710 706 ind = numpy.array([-2, -1, 1, 2])
711 707 xx = numpy.zeros([4, 4])
712 708
713 709 for id1 in range(4):
714 710 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
715 711
716 712 xx_inv = numpy.linalg.inv(xx)
717 713 xx = xx_inv[:, 0]
718 714 ind = (ind + maxid + num_mask_prof) % num_mask_prof
719 715 yy = jspectra[ich, mask_prof[ind], :]
720 716 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
721 717 yy.transpose(), xx)
722 718
723 719 indAux = (jspectra[ich, :, :] < tmp_noise *
724 720 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
725 721 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
726 722 (1 - 1 / numpy.sqrt(num_incoh))
727 723
728 724 # Remocion de Interferencia en el Cross Spectra
729 725 if jcspectra is None:
730 726 return jspectra, jcspectra
731 727 num_pairs = int(jcspectra.size / (num_prof * num_hei))
732 728 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
733 729
734 730 for ip in range(num_pairs):
735 731
736 732 #-------------------------------------------
737 733
738 734 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
739 735 cspower = cspower[:, hei_interf]
740 736 cspower = cspower.sum(axis=0)
741 737
742 738 cspsort = cspower.ravel().argsort()
743 739 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
744 740 offhei_interf, nhei_interf + offhei_interf))]]]
745 741 junkcspc_interf = junkcspc_interf.transpose()
746 742 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
747 743
748 744 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
749 745
750 746 median_real = int(numpy.median(numpy.real(
751 747 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
752 748 median_imag = int(numpy.median(numpy.imag(
753 749 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
754 750 comp_mask_prof = [int(e) for e in comp_mask_prof]
755 751 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
756 752 median_real, median_imag)
757 753
758 754 for iprof in range(num_prof):
759 755 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
760 756 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
761 757
762 758 # Removiendo la Interferencia
763 759 jcspectra[ip, :, ind_hei] = jcspectra[ip,
764 760 :, ind_hei] - jcspc_interf
765 761
766 762 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
767 763 maxid = ListAux.index(max(ListAux))
768 764
769 765 ind = numpy.array([-2, -1, 1, 2])
770 766 xx = numpy.zeros([4, 4])
771 767
772 768 for id1 in range(4):
773 769 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
774 770
775 771 xx_inv = numpy.linalg.inv(xx)
776 772 xx = xx_inv[:, 0]
777 773
778 774 ind = (ind + maxid + num_mask_prof) % num_mask_prof
779 775 yy = jcspectra[ip, mask_prof[ind], :]
780 776 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
781 777
782 778 # Guardar Resultados
783 779 self.dataOut.data_spc = jspectra
784 780 self.dataOut.data_cspc = jcspectra
785 781
786 782 return 1
787 783
788 784 def setRadarFrequency(self, frequency=None):
789 785
790 786 if frequency != None:
791 787 self.dataOut.frequency = frequency
792 788
793 789 return 1
794 790
795 791 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
796 792 # validacion de rango
797 793 if minHei == None:
798 794 minHei = self.dataOut.heightList[0]
799 795
800 796 if maxHei == None:
801 797 maxHei = self.dataOut.heightList[-1]
802 798
803 799 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
804 800 print('minHei: %.2f is out of the heights range' % (minHei))
805 801 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
806 802 minHei = self.dataOut.heightList[0]
807 803
808 804 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
809 805 print('maxHei: %.2f is out of the heights range' % (maxHei))
810 806 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
811 807 maxHei = self.dataOut.heightList[-1]
812 808
813 809 # validacion de velocidades
814 810 velrange = self.dataOut.getVelRange(1)
815 811
816 812 if minVel == None:
817 813 minVel = velrange[0]
818 814
819 815 if maxVel == None:
820 816 maxVel = velrange[-1]
821 817
822 818 if (minVel < velrange[0]) or (minVel > maxVel):
823 819 print('minVel: %.2f is out of the velocity range' % (minVel))
824 820 print('minVel is setting to %.2f' % (velrange[0]))
825 821 minVel = velrange[0]
826 822
827 823 if (maxVel > velrange[-1]) or (maxVel < minVel):
828 824 print('maxVel: %.2f is out of the velocity range' % (maxVel))
829 825 print('maxVel is setting to %.2f' % (velrange[-1]))
830 826 maxVel = velrange[-1]
831 827
832 828 # seleccion de indices para rango
833 829 minIndex = 0
834 830 maxIndex = 0
835 831 heights = self.dataOut.heightList
836 832
837 833 inda = numpy.where(heights >= minHei)
838 834 indb = numpy.where(heights <= maxHei)
839 835
840 836 try:
841 837 minIndex = inda[0][0]
842 838 except:
843 839 minIndex = 0
844 840
845 841 try:
846 842 maxIndex = indb[0][-1]
847 843 except:
848 844 maxIndex = len(heights)
849 845
850 846 if (minIndex < 0) or (minIndex > maxIndex):
851 847 raise ValueError("some value in (%d,%d) is not valid" % (
852 848 minIndex, maxIndex))
853 849
854 850 if (maxIndex >= self.dataOut.nHeights):
855 851 maxIndex = self.dataOut.nHeights - 1
856 852
857 853 # seleccion de indices para velocidades
858 854 indminvel = numpy.where(velrange >= minVel)
859 855 indmaxvel = numpy.where(velrange <= maxVel)
860 856 try:
861 857 minIndexVel = indminvel[0][0]
862 858 except:
863 859 minIndexVel = 0
864 860
865 861 try:
866 862 maxIndexVel = indmaxvel[0][-1]
867 863 except:
868 864 maxIndexVel = len(velrange)
869 865
870 866 # seleccion del espectro
871 867 data_spc = self.dataOut.data_spc[:,
872 868 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
873 869 # estimacion de ruido
874 870 noise = numpy.zeros(self.dataOut.nChannels)
875 871
876 872 for channel in range(self.dataOut.nChannels):
877 873 daux = data_spc[channel, :, :]
878 874 noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
879 875
880 876 self.dataOut.noise_estimation = noise.copy()
881 877
882 878 return 1
883 879
884 880
885 881 class IncohInt(Operation):
886 882
887 883 __profIndex = 0
888 884 __withOverapping = False
889 885
890 886 __byTime = False
891 887 __initime = None
892 888 __lastdatatime = None
893 889 __integrationtime = None
894 890
895 891 __buffer_spc = None
896 892 __buffer_cspc = None
897 893 __buffer_dc = None
898 894
899 895 __dataReady = False
900 896
901 897 __timeInterval = None
902 898
903 899 n = None
904 900
905 901 def __init__(self):
906 902
907 903 Operation.__init__(self)
908 904
909 905 def setup(self, n=None, timeInterval=None, overlapping=False):
910 906 """
911 907 Set the parameters of the integration class.
912 908
913 909 Inputs:
914 910
915 911 n : Number of coherent integrations
916 912 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
917 913 overlapping :
918 914
919 915 """
920 916
921 917 self.__initime = None
922 918 self.__lastdatatime = 0
923 919
924 920 self.__buffer_spc = 0
925 921 self.__buffer_cspc = 0
926 922 self.__buffer_dc = 0
927 923
928 924 self.__profIndex = 0
929 925 self.__dataReady = False
930 926 self.__byTime = False
931 927
932 928 if n is None and timeInterval is None:
933 929 raise ValueError("n or timeInterval should be specified ...")
934 930
935 931 if n is not None:
936 932 self.n = int(n)
937 933 else:
938 934
939 935 self.__integrationtime = int(timeInterval)
940 936 self.n = None
941 937 self.__byTime = True
942 938
943 939 def putData(self, data_spc, data_cspc, data_dc):
944 940 """
945 941 Add a profile to the __buffer_spc and increase in one the __profileIndex
946 942
947 943 """
948 944
949 945 self.__buffer_spc += data_spc
950 946
951 947 if data_cspc is None:
952 948 self.__buffer_cspc = None
953 949 else:
954 950 self.__buffer_cspc += data_cspc
955 951
956 952 if data_dc is None:
957 953 self.__buffer_dc = None
958 954 else:
959 955 self.__buffer_dc += data_dc
960 956
961 957 self.__profIndex += 1
962 958
963 959 return
964 960
965 961 def pushData(self):
966 962 """
967 963 Return the sum of the last profiles and the profiles used in the sum.
968 964
969 965 Affected:
970 966
971 967 self.__profileIndex
972 968
973 969 """
974 970
975 971 data_spc = self.__buffer_spc
976 972 data_cspc = self.__buffer_cspc
977 973 data_dc = self.__buffer_dc
978 974 n = self.__profIndex
979 975
980 976 self.__buffer_spc = 0
981 977 self.__buffer_cspc = 0
982 978 self.__buffer_dc = 0
983 979 self.__profIndex = 0
984 980
985 981 return data_spc, data_cspc, data_dc, n
986 982
987 983 def byProfiles(self, *args):
988 984
989 985 self.__dataReady = False
990 986 avgdata_spc = None
991 987 avgdata_cspc = None
992 988 avgdata_dc = None
993 989
994 990 self.putData(*args)
995 991
996 992 if self.__profIndex == self.n:
997 993
998 994 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
999 995 self.n = n
1000 996 self.__dataReady = True
1001 997
1002 998 return avgdata_spc, avgdata_cspc, avgdata_dc
1003 999
1004 1000 def byTime(self, datatime, *args):
1005 1001
1006 1002 self.__dataReady = False
1007 1003 avgdata_spc = None
1008 1004 avgdata_cspc = None
1009 1005 avgdata_dc = None
1010 1006
1011 1007 self.putData(*args)
1012 1008
1013 1009 if (datatime - self.__initime) >= self.__integrationtime:
1014 1010 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1015 1011 self.n = n
1016 1012 self.__dataReady = True
1017 1013
1018 1014 return avgdata_spc, avgdata_cspc, avgdata_dc
1019 1015
1020 1016 def integrate(self, datatime, *args):
1021 1017
1022 1018 if self.__profIndex == 0:
1023 1019 self.__initime = datatime
1024 1020
1025 1021 if self.__byTime:
1026 1022 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1027 1023 datatime, *args)
1028 1024 else:
1029 1025 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1030 1026
1031 1027 if not self.__dataReady:
1032 1028 return None, None, None, None
1033 1029
1034 1030 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1035 1031
1036 1032 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1037 1033 if n == 1:
1038 1034 return
1039 1035
1040 1036 dataOut.flagNoData = True
1041 1037
1042 1038 if not self.isConfig:
1043 1039 self.setup(n, timeInterval, overlapping)
1044 1040 self.isConfig = True
1045 1041
1046 1042 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1047 1043 dataOut.data_spc,
1048 1044 dataOut.data_cspc,
1049 1045 dataOut.data_dc)
1050 1046
1051 1047 if self.__dataReady:
1052 1048
1053 1049 dataOut.data_spc = avgdata_spc
1054 1050 dataOut.data_cspc = avgdata_cspc
1055 1051 dataOut.data_dc = avgdata_dc
1056 1052 dataOut.nIncohInt *= self.n
1057 1053 dataOut.utctime = avgdatatime
1058 1054 dataOut.flagNoData = False
1059 1055
1060 1056 return dataOut No newline at end of file
@@ -1,1329 +1,1329
1 1 import sys
2 2 import numpy
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage
6 6 from schainpy.utils import log
7 7 from time import time
8 8
9 9
10 10 @MPDecorator
11 11 class VoltageProc(ProcessingUnit):
12 12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 # self.dataOut.copy(self.dataIn)
30 30
31 31 def __updateObjFromAmisrInput(self):
32 32
33 33 self.dataOut.timeZone = self.dataIn.timeZone
34 34 self.dataOut.dstFlag = self.dataIn.dstFlag
35 35 self.dataOut.errorCount = self.dataIn.errorCount
36 36 self.dataOut.useLocalTime = self.dataIn.useLocalTime
37 37
38 38 self.dataOut.flagNoData = self.dataIn.flagNoData
39 39 self.dataOut.data = self.dataIn.data
40 40 self.dataOut.utctime = self.dataIn.utctime
41 41 self.dataOut.channelList = self.dataIn.channelList
42 42 #self.dataOut.timeInterval = self.dataIn.timeInterval
43 43 self.dataOut.heightList = self.dataIn.heightList
44 44 self.dataOut.nProfiles = self.dataIn.nProfiles
45 45
46 46 self.dataOut.nCohInt = self.dataIn.nCohInt
47 47 self.dataOut.ippSeconds = self.dataIn.ippSeconds
48 48 self.dataOut.frequency = self.dataIn.frequency
49 49
50 50 self.dataOut.azimuth = self.dataIn.azimuth
51 51 self.dataOut.zenith = self.dataIn.zenith
52 52
53 53 self.dataOut.beam.codeList = self.dataIn.beam.codeList
54 54 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
55 55 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
56 56 #
57 57 # pass#
58 58 #
59 59 # def init(self):
60 60 #
61 61 #
62 62 # if self.dataIn.type == 'AMISR':
63 63 # self.__updateObjFromAmisrInput()
64 64 #
65 65 # if self.dataIn.type == 'Voltage':
66 66 # self.dataOut.copy(self.dataIn)
67 67 # # No necesita copiar en cada init() los atributos de dataIn
68 68 # # la copia deberia hacerse por cada nuevo bloque de datos
69 69
70 70 def selectChannels(self, channelList):
71 71
72 72 channelIndexList = []
73 73
74 74 for channel in channelList:
75 75 if channel not in self.dataOut.channelList:
76 76 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
77 77
78 78 index = self.dataOut.channelList.index(channel)
79 79 channelIndexList.append(index)
80 80
81 81 self.selectChannelsByIndex(channelIndexList)
82 82
83 83 def selectChannelsByIndex(self, channelIndexList):
84 84 """
85 85 Selecciona un bloque de datos en base a canales segun el channelIndexList
86 86
87 87 Input:
88 88 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
89 89
90 90 Affected:
91 91 self.dataOut.data
92 92 self.dataOut.channelIndexList
93 93 self.dataOut.nChannels
94 94 self.dataOut.m_ProcessingHeader.totalSpectra
95 95 self.dataOut.systemHeaderObj.numChannels
96 96 self.dataOut.m_ProcessingHeader.blockSize
97 97
98 98 Return:
99 99 None
100 100 """
101 101
102 102 for channelIndex in channelIndexList:
103 103 if channelIndex not in self.dataOut.channelIndexList:
104 104 print(channelIndexList)
105 105 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
106 106
107 107 if self.dataOut.flagDataAsBlock:
108 108 """
109 109 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
110 110 """
111 111 data = self.dataOut.data[channelIndexList,:,:]
112 112 else:
113 113 data = self.dataOut.data[channelIndexList,:]
114 114
115 115 self.dataOut.data = data
116 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
117 # self.dataOut.nChannels = nChannels
116 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
117 self.dataOut.channelList = range(len(channelIndexList))
118 118
119 119 return 1
120 120
121 121 def selectHeights(self, minHei=None, maxHei=None):
122 122 """
123 123 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
124 124 minHei <= height <= maxHei
125 125
126 126 Input:
127 127 minHei : valor minimo de altura a considerar
128 128 maxHei : valor maximo de altura a considerar
129 129
130 130 Affected:
131 131 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
132 132
133 133 Return:
134 134 1 si el metodo se ejecuto con exito caso contrario devuelve 0
135 135 """
136 136
137 137 if minHei == None:
138 138 minHei = self.dataOut.heightList[0]
139 139
140 140 if maxHei == None:
141 141 maxHei = self.dataOut.heightList[-1]
142 142
143 143 if (minHei < self.dataOut.heightList[0]):
144 144 minHei = self.dataOut.heightList[0]
145 145
146 146 if (maxHei > self.dataOut.heightList[-1]):
147 147 maxHei = self.dataOut.heightList[-1]
148 148
149 149 minIndex = 0
150 150 maxIndex = 0
151 151 heights = self.dataOut.heightList
152 152
153 153 inda = numpy.where(heights >= minHei)
154 154 indb = numpy.where(heights <= maxHei)
155 155
156 156 try:
157 157 minIndex = inda[0][0]
158 158 except:
159 159 minIndex = 0
160 160
161 161 try:
162 162 maxIndex = indb[0][-1]
163 163 except:
164 164 maxIndex = len(heights)
165 165
166 166 self.selectHeightsByIndex(minIndex, maxIndex)
167 167
168 168 return 1
169 169
170 170
171 171 def selectHeightsByIndex(self, minIndex, maxIndex):
172 172 """
173 173 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
174 174 minIndex <= index <= maxIndex
175 175
176 176 Input:
177 177 minIndex : valor de indice minimo de altura a considerar
178 178 maxIndex : valor de indice maximo de altura a considerar
179 179
180 180 Affected:
181 181 self.dataOut.data
182 182 self.dataOut.heightList
183 183
184 184 Return:
185 185 1 si el metodo se ejecuto con exito caso contrario devuelve 0
186 186 """
187 187
188 188 if (minIndex < 0) or (minIndex > maxIndex):
189 189 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
190 190
191 191 if (maxIndex >= self.dataOut.nHeights):
192 192 maxIndex = self.dataOut.nHeights
193 193
194 194 #voltage
195 195 if self.dataOut.flagDataAsBlock:
196 196 """
197 197 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
198 198 """
199 199 data = self.dataOut.data[:,:, minIndex:maxIndex]
200 200 else:
201 201 data = self.dataOut.data[:, minIndex:maxIndex]
202 202
203 203 # firstHeight = self.dataOut.heightList[minIndex]
204 204
205 205 self.dataOut.data = data
206 206 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
207 207
208 208 if self.dataOut.nHeights <= 1:
209 209 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
210 210
211 211 return 1
212 212
213 213
214 214 def filterByHeights(self, window):
215 215
216 216 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
217 217
218 218 if window == None:
219 219 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
220 220
221 221 newdelta = deltaHeight * window
222 222 r = self.dataOut.nHeights % window
223 223 newheights = (self.dataOut.nHeights-r)/window
224 224
225 225 if newheights <= 1:
226 226 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window))
227 227
228 228 if self.dataOut.flagDataAsBlock:
229 229 """
230 230 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
231 231 """
232 232 buffer = self.dataOut.data[:, :, 0:int(self.dataOut.nHeights-r)]
233 233 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nProfiles,self.dataOut.nHeights/window,window)
234 234 buffer = numpy.sum(buffer,3)
235 235
236 236 else:
237 237 buffer = self.dataOut.data[:,0:int(self.dataOut.nHeights-r)]
238 238 buffer = buffer.reshape(self.dataOut.nChannels,int(self.dataOut.nHeights/window),int(window))
239 239 buffer = numpy.sum(buffer,2)
240 240
241 241 self.dataOut.data = buffer
242 242 self.dataOut.heightList = self.dataOut.heightList[0] + numpy.arange( newheights )*newdelta
243 243 self.dataOut.windowOfFilter = window
244 244
245 245 def setH0(self, h0, deltaHeight = None):
246 246
247 247 if not deltaHeight:
248 248 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
249 249
250 250 nHeights = self.dataOut.nHeights
251 251
252 252 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
253 253
254 254 self.dataOut.heightList = newHeiRange
255 255
256 256 def deFlip(self, channelList = []):
257 257
258 258 data = self.dataOut.data.copy()
259 259
260 260 if self.dataOut.flagDataAsBlock:
261 261 flip = self.flip
262 262 profileList = list(range(self.dataOut.nProfiles))
263 263
264 264 if not channelList:
265 265 for thisProfile in profileList:
266 266 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
267 267 flip *= -1.0
268 268 else:
269 269 for thisChannel in channelList:
270 270 if thisChannel not in self.dataOut.channelList:
271 271 continue
272 272
273 273 for thisProfile in profileList:
274 274 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
275 275 flip *= -1.0
276 276
277 277 self.flip = flip
278 278
279 279 else:
280 280 if not channelList:
281 281 data[:,:] = data[:,:]*self.flip
282 282 else:
283 283 for thisChannel in channelList:
284 284 if thisChannel not in self.dataOut.channelList:
285 285 continue
286 286
287 287 data[thisChannel,:] = data[thisChannel,:]*self.flip
288 288
289 289 self.flip *= -1.
290 290
291 291 self.dataOut.data = data
292 292
293 293 def setRadarFrequency(self, frequency=None):
294 294
295 295 if frequency != None:
296 296 self.dataOut.frequency = frequency
297 297
298 298 return 1
299 299
300 300 def interpolateHeights(self, topLim, botLim):
301 301 #69 al 72 para julia
302 302 #82-84 para meteoros
303 303 if len(numpy.shape(self.dataOut.data))==2:
304 304 sampInterp = (self.dataOut.data[:,botLim-1] + self.dataOut.data[:,topLim+1])/2
305 305 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
306 306 #self.dataOut.data[:,botLim:limSup+1] = sampInterp
307 307 self.dataOut.data[:,botLim:topLim+1] = sampInterp
308 308 else:
309 309 nHeights = self.dataOut.data.shape[2]
310 310 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
311 311 y = self.dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
312 312 f = interpolate.interp1d(x, y, axis = 2)
313 313 xnew = numpy.arange(botLim,topLim+1)
314 314 ynew = f(xnew)
315 315
316 316 self.dataOut.data[:,:,botLim:topLim+1] = ynew
317 317
318 318 # import collections
319 319
320 320 class CohInt(Operation):
321 321
322 322 isConfig = False
323 323 __profIndex = 0
324 324 __byTime = False
325 325 __initime = None
326 326 __lastdatatime = None
327 327 __integrationtime = None
328 328 __buffer = None
329 329 __bufferStride = []
330 330 __dataReady = False
331 331 __profIndexStride = 0
332 332 __dataToPutStride = False
333 333 n = None
334 334
335 335 def __init__(self, **kwargs):
336 336
337 337 Operation.__init__(self, **kwargs)
338 338
339 339 # self.isConfig = False
340 340
341 341 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
342 342 """
343 343 Set the parameters of the integration class.
344 344
345 345 Inputs:
346 346
347 347 n : Number of coherent integrations
348 348 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
349 349 overlapping :
350 350 """
351 351
352 352 self.__initime = None
353 353 self.__lastdatatime = 0
354 354 self.__buffer = None
355 355 self.__dataReady = False
356 356 self.byblock = byblock
357 357 self.stride = stride
358 358
359 359 if n == None and timeInterval == None:
360 360 raise ValueError("n or timeInterval should be specified ...")
361 361
362 362 if n != None:
363 363 self.n = n
364 364 self.__byTime = False
365 365 else:
366 366 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
367 367 self.n = 9999
368 368 self.__byTime = True
369 369
370 370 if overlapping:
371 371 self.__withOverlapping = True
372 372 self.__buffer = None
373 373 else:
374 374 self.__withOverlapping = False
375 375 self.__buffer = 0
376 376
377 377 self.__profIndex = 0
378 378
379 379 def putData(self, data):
380 380
381 381 """
382 382 Add a profile to the __buffer and increase in one the __profileIndex
383 383
384 384 """
385 385
386 386 if not self.__withOverlapping:
387 387 self.__buffer += data.copy()
388 388 self.__profIndex += 1
389 389 return
390 390
391 391 #Overlapping data
392 392 nChannels, nHeis = data.shape
393 393 data = numpy.reshape(data, (1, nChannels, nHeis))
394 394
395 395 #If the buffer is empty then it takes the data value
396 396 if self.__buffer is None:
397 397 self.__buffer = data
398 398 self.__profIndex += 1
399 399 return
400 400
401 401 #If the buffer length is lower than n then stakcing the data value
402 402 if self.__profIndex < self.n:
403 403 self.__buffer = numpy.vstack((self.__buffer, data))
404 404 self.__profIndex += 1
405 405 return
406 406
407 407 #If the buffer length is equal to n then replacing the last buffer value with the data value
408 408 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
409 409 self.__buffer[self.n-1] = data
410 410 self.__profIndex = self.n
411 411 return
412 412
413 413
414 414 def pushData(self):
415 415 """
416 416 Return the sum of the last profiles and the profiles used in the sum.
417 417
418 418 Affected:
419 419
420 420 self.__profileIndex
421 421
422 422 """
423 423
424 424 if not self.__withOverlapping:
425 425 data = self.__buffer
426 426 n = self.__profIndex
427 427
428 428 self.__buffer = 0
429 429 self.__profIndex = 0
430 430
431 431 return data, n
432 432
433 433 #Integration with Overlapping
434 434 data = numpy.sum(self.__buffer, axis=0)
435 435 # print data
436 436 # raise
437 437 n = self.__profIndex
438 438
439 439 return data, n
440 440
441 441 def byProfiles(self, data):
442 442
443 443 self.__dataReady = False
444 444 avgdata = None
445 445 # n = None
446 446 # print data
447 447 # raise
448 448 self.putData(data)
449 449
450 450 if self.__profIndex == self.n:
451 451 avgdata, n = self.pushData()
452 452 self.__dataReady = True
453 453
454 454 return avgdata
455 455
456 456 def byTime(self, data, datatime):
457 457
458 458 self.__dataReady = False
459 459 avgdata = None
460 460 n = None
461 461
462 462 self.putData(data)
463 463
464 464 if (datatime - self.__initime) >= self.__integrationtime:
465 465 avgdata, n = self.pushData()
466 466 self.n = n
467 467 self.__dataReady = True
468 468
469 469 return avgdata
470 470
471 471 def integrateByStride(self, data, datatime):
472 472 # print data
473 473 if self.__profIndex == 0:
474 474 self.__buffer = [[data.copy(), datatime]]
475 475 else:
476 476 self.__buffer.append([data.copy(),datatime])
477 477 self.__profIndex += 1
478 478 self.__dataReady = False
479 479
480 480 if self.__profIndex == self.n * self.stride :
481 481 self.__dataToPutStride = True
482 482 self.__profIndexStride = 0
483 483 self.__profIndex = 0
484 484 self.__bufferStride = []
485 485 for i in range(self.stride):
486 486 current = self.__buffer[i::self.stride]
487 487 data = numpy.sum([t[0] for t in current], axis=0)
488 488 avgdatatime = numpy.average([t[1] for t in current])
489 489 # print data
490 490 self.__bufferStride.append((data, avgdatatime))
491 491
492 492 if self.__dataToPutStride:
493 493 self.__dataReady = True
494 494 self.__profIndexStride += 1
495 495 if self.__profIndexStride == self.stride:
496 496 self.__dataToPutStride = False
497 497 # print self.__bufferStride[self.__profIndexStride - 1]
498 498 # raise
499 499 return self.__bufferStride[self.__profIndexStride - 1]
500 500
501 501
502 502 return None, None
503 503
504 504 def integrate(self, data, datatime=None):
505 505
506 506 if self.__initime == None:
507 507 self.__initime = datatime
508 508
509 509 if self.__byTime:
510 510 avgdata = self.byTime(data, datatime)
511 511 else:
512 512 avgdata = self.byProfiles(data)
513 513
514 514
515 515 self.__lastdatatime = datatime
516 516
517 517 if avgdata is None:
518 518 return None, None
519 519
520 520 avgdatatime = self.__initime
521 521
522 522 deltatime = datatime - self.__lastdatatime
523 523
524 524 if not self.__withOverlapping:
525 525 self.__initime = datatime
526 526 else:
527 527 self.__initime += deltatime
528 528
529 529 return avgdata, avgdatatime
530 530
531 531 def integrateByBlock(self, dataOut):
532 532
533 533 times = int(dataOut.data.shape[1]/self.n)
534 534 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
535 535
536 536 id_min = 0
537 537 id_max = self.n
538 538
539 539 for i in range(times):
540 540 junk = dataOut.data[:,id_min:id_max,:]
541 541 avgdata[:,i,:] = junk.sum(axis=1)
542 542 id_min += self.n
543 543 id_max += self.n
544 544
545 545 timeInterval = dataOut.ippSeconds*self.n
546 546 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
547 547 self.__dataReady = True
548 548 return avgdata, avgdatatime
549 549
550 550 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
551 551
552 552 if not self.isConfig:
553 553 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
554 554 self.isConfig = True
555 555
556 556 if dataOut.flagDataAsBlock:
557 557 """
558 558 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
559 559 """
560 560 avgdata, avgdatatime = self.integrateByBlock(dataOut)
561 561 dataOut.nProfiles /= self.n
562 562 else:
563 563 if stride is None:
564 564 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
565 565 else:
566 566 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
567 567
568 568
569 569 # dataOut.timeInterval *= n
570 570 dataOut.flagNoData = True
571 571
572 572 if self.__dataReady:
573 573 dataOut.data = avgdata
574 574 dataOut.nCohInt *= self.n
575 575 dataOut.utctime = avgdatatime
576 576 # print avgdata, avgdatatime
577 577 # raise
578 578 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
579 579 dataOut.flagNoData = False
580 580 return dataOut
581 581
582 582 class Decoder(Operation):
583 583
584 584 isConfig = False
585 585 __profIndex = 0
586 586
587 587 code = None
588 588
589 589 nCode = None
590 590 nBaud = None
591 591
592 592 def __init__(self, **kwargs):
593 593
594 594 Operation.__init__(self, **kwargs)
595 595
596 596 self.times = None
597 597 self.osamp = None
598 598 # self.__setValues = False
599 599 self.isConfig = False
600 600 self.setupReq = False
601 601 def setup(self, code, osamp, dataOut):
602 602
603 603 self.__profIndex = 0
604 604
605 605 self.code = code
606 606
607 607 self.nCode = len(code)
608 608 self.nBaud = len(code[0])
609 609
610 610 if (osamp != None) and (osamp >1):
611 611 self.osamp = osamp
612 612 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
613 613 self.nBaud = self.nBaud*self.osamp
614 614
615 615 self.__nChannels = dataOut.nChannels
616 616 self.__nProfiles = dataOut.nProfiles
617 617 self.__nHeis = dataOut.nHeights
618 618
619 619 if self.__nHeis < self.nBaud:
620 620 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
621 621
622 622 #Frequency
623 623 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
624 624
625 625 __codeBuffer[:,0:self.nBaud] = self.code
626 626
627 627 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
628 628
629 629 if dataOut.flagDataAsBlock:
630 630
631 631 self.ndatadec = self.__nHeis #- self.nBaud + 1
632 632
633 633 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
634 634
635 635 else:
636 636
637 637 #Time
638 638 self.ndatadec = self.__nHeis #- self.nBaud + 1
639 639
640 640 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
641 641
642 642 def __convolutionInFreq(self, data):
643 643
644 644 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
645 645
646 646 fft_data = numpy.fft.fft(data, axis=1)
647 647
648 648 conv = fft_data*fft_code
649 649
650 650 data = numpy.fft.ifft(conv,axis=1)
651 651
652 652 return data
653 653
654 654 def __convolutionInFreqOpt(self, data):
655 655
656 656 raise NotImplementedError
657 657
658 658 def __convolutionInTime(self, data):
659 659
660 660 code = self.code[self.__profIndex]
661 661 for i in range(self.__nChannels):
662 662 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
663 663
664 664 return self.datadecTime
665 665
666 666 def __convolutionByBlockInTime(self, data):
667 667
668 668 repetitions = self.__nProfiles / self.nCode
669 669
670 670 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
671 671 junk = junk.flatten()
672 672 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
673 673 profilesList = range(self.__nProfiles)
674 674
675 675 for i in range(self.__nChannels):
676 676 for j in profilesList:
677 677 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
678 678 return self.datadecTime
679 679
680 680 def __convolutionByBlockInFreq(self, data):
681 681
682 682 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
683 683
684 684
685 685 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
686 686
687 687 fft_data = numpy.fft.fft(data, axis=2)
688 688
689 689 conv = fft_data*fft_code
690 690
691 691 data = numpy.fft.ifft(conv,axis=2)
692 692
693 693 return data
694 694
695 695
696 696 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
697 697
698 698 if dataOut.flagDecodeData:
699 699 print("This data is already decoded, recoding again ...")
700 700
701 701 if not self.isConfig:
702 702
703 703 if code is None:
704 704 if dataOut.code is None:
705 705 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
706 706
707 707 code = dataOut.code
708 708 else:
709 709 code = numpy.array(code).reshape(nCode,nBaud)
710 710 self.setup(code, osamp, dataOut)
711 711
712 712 self.isConfig = True
713 713
714 714 if mode == 3:
715 715 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
716 716
717 717 if times != None:
718 718 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
719 719
720 720 if self.code is None:
721 721 print("Fail decoding: Code is not defined.")
722 722 return
723 723
724 724 self.__nProfiles = dataOut.nProfiles
725 725 datadec = None
726 726
727 727 if mode == 3:
728 728 mode = 0
729 729
730 730 if dataOut.flagDataAsBlock:
731 731 """
732 732 Decoding when data have been read as block,
733 733 """
734 734
735 735 if mode == 0:
736 736 datadec = self.__convolutionByBlockInTime(dataOut.data)
737 737 if mode == 1:
738 738 datadec = self.__convolutionByBlockInFreq(dataOut.data)
739 739 else:
740 740 """
741 741 Decoding when data have been read profile by profile
742 742 """
743 743 if mode == 0:
744 744 datadec = self.__convolutionInTime(dataOut.data)
745 745
746 746 if mode == 1:
747 747 datadec = self.__convolutionInFreq(dataOut.data)
748 748
749 749 if mode == 2:
750 750 datadec = self.__convolutionInFreqOpt(dataOut.data)
751 751
752 752 if datadec is None:
753 753 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
754 754
755 755 dataOut.code = self.code
756 756 dataOut.nCode = self.nCode
757 757 dataOut.nBaud = self.nBaud
758 758
759 759 dataOut.data = datadec
760 760
761 761 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
762 762
763 763 dataOut.flagDecodeData = True #asumo q la data esta decodificada
764 764
765 765 if self.__profIndex == self.nCode-1:
766 766 self.__profIndex = 0
767 767 return dataOut
768 768
769 769 self.__profIndex += 1
770 770
771 771 return dataOut
772 772 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
773 773
774 774
775 775 class ProfileConcat(Operation):
776 776
777 777 isConfig = False
778 778 buffer = None
779 779
780 780 def __init__(self, **kwargs):
781 781
782 782 Operation.__init__(self, **kwargs)
783 783 self.profileIndex = 0
784 784
785 785 def reset(self):
786 786 self.buffer = numpy.zeros_like(self.buffer)
787 787 self.start_index = 0
788 788 self.times = 1
789 789
790 790 def setup(self, data, m, n=1):
791 791 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
792 792 self.nHeights = data.shape[1]#.nHeights
793 793 self.start_index = 0
794 794 self.times = 1
795 795
796 796 def concat(self, data):
797 797
798 798 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
799 799 self.start_index = self.start_index + self.nHeights
800 800
801 801 def run(self, dataOut, m):
802 802 dataOut.flagNoData = True
803 803
804 804 if not self.isConfig:
805 805 self.setup(dataOut.data, m, 1)
806 806 self.isConfig = True
807 807
808 808 if dataOut.flagDataAsBlock:
809 809 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
810 810
811 811 else:
812 812 self.concat(dataOut.data)
813 813 self.times += 1
814 814 if self.times > m:
815 815 dataOut.data = self.buffer
816 816 self.reset()
817 817 dataOut.flagNoData = False
818 818 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
819 819 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
820 820 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
821 821 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
822 822 dataOut.ippSeconds *= m
823 823 return dataOut
824 824
825 825 class ProfileSelector(Operation):
826 826
827 827 profileIndex = None
828 828 # Tamanho total de los perfiles
829 829 nProfiles = None
830 830
831 831 def __init__(self, **kwargs):
832 832
833 833 Operation.__init__(self, **kwargs)
834 834 self.profileIndex = 0
835 835
836 836 def incProfileIndex(self):
837 837
838 838 self.profileIndex += 1
839 839
840 840 if self.profileIndex >= self.nProfiles:
841 841 self.profileIndex = 0
842 842
843 843 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
844 844
845 845 if profileIndex < minIndex:
846 846 return False
847 847
848 848 if profileIndex > maxIndex:
849 849 return False
850 850
851 851 return True
852 852
853 853 def isThisProfileInList(self, profileIndex, profileList):
854 854
855 855 if profileIndex not in profileList:
856 856 return False
857 857
858 858 return True
859 859
860 860 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
861 861
862 862 """
863 863 ProfileSelector:
864 864
865 865 Inputs:
866 866 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
867 867
868 868 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
869 869
870 870 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
871 871
872 872 """
873 873
874 874 if rangeList is not None:
875 875 if type(rangeList[0]) not in (tuple, list):
876 876 rangeList = [rangeList]
877 877
878 878 dataOut.flagNoData = True
879 879
880 880 if dataOut.flagDataAsBlock:
881 881 """
882 882 data dimension = [nChannels, nProfiles, nHeis]
883 883 """
884 884 if profileList != None:
885 885 dataOut.data = dataOut.data[:,profileList,:]
886 886
887 887 if profileRangeList != None:
888 888 minIndex = profileRangeList[0]
889 889 maxIndex = profileRangeList[1]
890 890 profileList = list(range(minIndex, maxIndex+1))
891 891
892 892 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
893 893
894 894 if rangeList != None:
895 895
896 896 profileList = []
897 897
898 898 for thisRange in rangeList:
899 899 minIndex = thisRange[0]
900 900 maxIndex = thisRange[1]
901 901
902 902 profileList.extend(list(range(minIndex, maxIndex+1)))
903 903
904 904 dataOut.data = dataOut.data[:,profileList,:]
905 905
906 906 dataOut.nProfiles = len(profileList)
907 907 dataOut.profileIndex = dataOut.nProfiles - 1
908 908 dataOut.flagNoData = False
909 909
910 910 return dataOut
911 911
912 912 """
913 913 data dimension = [nChannels, nHeis]
914 914 """
915 915
916 916 if profileList != None:
917 917
918 918 if self.isThisProfileInList(dataOut.profileIndex, profileList):
919 919
920 920 self.nProfiles = len(profileList)
921 921 dataOut.nProfiles = self.nProfiles
922 922 dataOut.profileIndex = self.profileIndex
923 923 dataOut.flagNoData = False
924 924
925 925 self.incProfileIndex()
926 926 return dataOut
927 927
928 928 if profileRangeList != None:
929 929
930 930 minIndex = profileRangeList[0]
931 931 maxIndex = profileRangeList[1]
932 932
933 933 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
934 934
935 935 self.nProfiles = maxIndex - minIndex + 1
936 936 dataOut.nProfiles = self.nProfiles
937 937 dataOut.profileIndex = self.profileIndex
938 938 dataOut.flagNoData = False
939 939
940 940 self.incProfileIndex()
941 941 return dataOut
942 942
943 943 if rangeList != None:
944 944
945 945 nProfiles = 0
946 946
947 947 for thisRange in rangeList:
948 948 minIndex = thisRange[0]
949 949 maxIndex = thisRange[1]
950 950
951 951 nProfiles += maxIndex - minIndex + 1
952 952
953 953 for thisRange in rangeList:
954 954
955 955 minIndex = thisRange[0]
956 956 maxIndex = thisRange[1]
957 957
958 958 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
959 959
960 960 self.nProfiles = nProfiles
961 961 dataOut.nProfiles = self.nProfiles
962 962 dataOut.profileIndex = self.profileIndex
963 963 dataOut.flagNoData = False
964 964
965 965 self.incProfileIndex()
966 966
967 967 break
968 968
969 969 return dataOut
970 970
971 971
972 972 if beam != None: #beam is only for AMISR data
973 973 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
974 974 dataOut.flagNoData = False
975 975 dataOut.profileIndex = self.profileIndex
976 976
977 977 self.incProfileIndex()
978 978
979 979 return dataOut
980 980
981 981 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
982 982
983 983 #return False
984 984 return dataOut
985 985
986 986 class Reshaper(Operation):
987 987
988 988 def __init__(self, **kwargs):
989 989
990 990 Operation.__init__(self, **kwargs)
991 991
992 992 self.__buffer = None
993 993 self.__nitems = 0
994 994
995 995 def __appendProfile(self, dataOut, nTxs):
996 996
997 997 if self.__buffer is None:
998 998 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
999 999 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1000 1000
1001 1001 ini = dataOut.nHeights * self.__nitems
1002 1002 end = ini + dataOut.nHeights
1003 1003
1004 1004 self.__buffer[:, ini:end] = dataOut.data
1005 1005
1006 1006 self.__nitems += 1
1007 1007
1008 1008 return int(self.__nitems*nTxs)
1009 1009
1010 1010 def __getBuffer(self):
1011 1011
1012 1012 if self.__nitems == int(1./self.__nTxs):
1013 1013
1014 1014 self.__nitems = 0
1015 1015
1016 1016 return self.__buffer.copy()
1017 1017
1018 1018 return None
1019 1019
1020 1020 def __checkInputs(self, dataOut, shape, nTxs):
1021 1021
1022 1022 if shape is None and nTxs is None:
1023 1023 raise ValueError("Reshaper: shape of factor should be defined")
1024 1024
1025 1025 if nTxs:
1026 1026 if nTxs < 0:
1027 1027 raise ValueError("nTxs should be greater than 0")
1028 1028
1029 1029 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1030 1030 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1031 1031
1032 1032 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1033 1033
1034 1034 return shape, nTxs
1035 1035
1036 1036 if len(shape) != 2 and len(shape) != 3:
1037 1037 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1038 1038
1039 1039 if len(shape) == 2:
1040 1040 shape_tuple = [dataOut.nChannels]
1041 1041 shape_tuple.extend(shape)
1042 1042 else:
1043 1043 shape_tuple = list(shape)
1044 1044
1045 1045 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1046 1046
1047 1047 return shape_tuple, nTxs
1048 1048
1049 1049 def run(self, dataOut, shape=None, nTxs=None):
1050 1050
1051 1051 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1052 1052
1053 1053 dataOut.flagNoData = True
1054 1054 profileIndex = None
1055 1055
1056 1056 if dataOut.flagDataAsBlock:
1057 1057
1058 1058 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1059 1059 dataOut.flagNoData = False
1060 1060
1061 1061 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1062 1062
1063 1063 else:
1064 1064
1065 1065 if self.__nTxs < 1:
1066 1066
1067 1067 self.__appendProfile(dataOut, self.__nTxs)
1068 1068 new_data = self.__getBuffer()
1069 1069
1070 1070 if new_data is not None:
1071 1071 dataOut.data = new_data
1072 1072 dataOut.flagNoData = False
1073 1073
1074 1074 profileIndex = dataOut.profileIndex*nTxs
1075 1075
1076 1076 else:
1077 1077 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1078 1078
1079 1079 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1080 1080
1081 1081 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1082 1082
1083 1083 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1084 1084
1085 1085 dataOut.profileIndex = profileIndex
1086 1086
1087 1087 dataOut.ippSeconds /= self.__nTxs
1088 1088
1089 1089 return dataOut
1090 1090
1091 1091 class SplitProfiles(Operation):
1092 1092
1093 1093 def __init__(self, **kwargs):
1094 1094
1095 1095 Operation.__init__(self, **kwargs)
1096 1096
1097 1097 def run(self, dataOut, n):
1098 1098
1099 1099 dataOut.flagNoData = True
1100 1100 profileIndex = None
1101 1101
1102 1102 if dataOut.flagDataAsBlock:
1103 1103
1104 1104 #nchannels, nprofiles, nsamples
1105 1105 shape = dataOut.data.shape
1106 1106
1107 1107 if shape[2] % n != 0:
1108 1108 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1109 1109
1110 1110 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1111 1111
1112 1112 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1113 1113 dataOut.flagNoData = False
1114 1114
1115 1115 profileIndex = int(dataOut.nProfiles/n) - 1
1116 1116
1117 1117 else:
1118 1118
1119 1119 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1120 1120
1121 1121 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1122 1122
1123 1123 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1124 1124
1125 1125 dataOut.nProfiles = int(dataOut.nProfiles*n)
1126 1126
1127 1127 dataOut.profileIndex = profileIndex
1128 1128
1129 1129 dataOut.ippSeconds /= n
1130 1130
1131 1131 return dataOut
1132 1132
1133 1133 class CombineProfiles(Operation):
1134 1134 def __init__(self, **kwargs):
1135 1135
1136 1136 Operation.__init__(self, **kwargs)
1137 1137
1138 1138 self.__remData = None
1139 1139 self.__profileIndex = 0
1140 1140
1141 1141 def run(self, dataOut, n):
1142 1142
1143 1143 dataOut.flagNoData = True
1144 1144 profileIndex = None
1145 1145
1146 1146 if dataOut.flagDataAsBlock:
1147 1147
1148 1148 #nchannels, nprofiles, nsamples
1149 1149 shape = dataOut.data.shape
1150 1150 new_shape = shape[0], shape[1]/n, shape[2]*n
1151 1151
1152 1152 if shape[1] % n != 0:
1153 1153 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1154 1154
1155 1155 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1156 1156 dataOut.flagNoData = False
1157 1157
1158 1158 profileIndex = int(dataOut.nProfiles*n) - 1
1159 1159
1160 1160 else:
1161 1161
1162 1162 #nchannels, nsamples
1163 1163 if self.__remData is None:
1164 1164 newData = dataOut.data
1165 1165 else:
1166 1166 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1167 1167
1168 1168 self.__profileIndex += 1
1169 1169
1170 1170 if self.__profileIndex < n:
1171 1171 self.__remData = newData
1172 1172 #continue
1173 1173 return
1174 1174
1175 1175 self.__profileIndex = 0
1176 1176 self.__remData = None
1177 1177
1178 1178 dataOut.data = newData
1179 1179 dataOut.flagNoData = False
1180 1180
1181 1181 profileIndex = dataOut.profileIndex/n
1182 1182
1183 1183
1184 1184 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1185 1185
1186 1186 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1187 1187
1188 1188 dataOut.nProfiles = int(dataOut.nProfiles/n)
1189 1189
1190 1190 dataOut.profileIndex = profileIndex
1191 1191
1192 1192 dataOut.ippSeconds *= n
1193 1193
1194 1194 return dataOut
1195 1195 # import collections
1196 1196 # from scipy.stats import mode
1197 1197 #
1198 1198 # class Synchronize(Operation):
1199 1199 #
1200 1200 # isConfig = False
1201 1201 # __profIndex = 0
1202 1202 #
1203 1203 # def __init__(self, **kwargs):
1204 1204 #
1205 1205 # Operation.__init__(self, **kwargs)
1206 1206 # # self.isConfig = False
1207 1207 # self.__powBuffer = None
1208 1208 # self.__startIndex = 0
1209 1209 # self.__pulseFound = False
1210 1210 #
1211 1211 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1212 1212 #
1213 1213 # #Read data
1214 1214 #
1215 1215 # powerdB = dataOut.getPower(channel = channel)
1216 1216 # noisedB = dataOut.getNoise(channel = channel)[0]
1217 1217 #
1218 1218 # self.__powBuffer.extend(powerdB.flatten())
1219 1219 #
1220 1220 # dataArray = numpy.array(self.__powBuffer)
1221 1221 #
1222 1222 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1223 1223 #
1224 1224 # maxValue = numpy.nanmax(filteredPower)
1225 1225 #
1226 1226 # if maxValue < noisedB + 10:
1227 1227 # #No se encuentra ningun pulso de transmision
1228 1228 # return None
1229 1229 #
1230 1230 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1231 1231 #
1232 1232 # if len(maxValuesIndex) < 2:
1233 1233 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1234 1234 # return None
1235 1235 #
1236 1236 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1237 1237 #
1238 1238 # #Seleccionar solo valores con un espaciamiento de nSamples
1239 1239 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1240 1240 #
1241 1241 # if len(pulseIndex) < 2:
1242 1242 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1243 1243 # return None
1244 1244 #
1245 1245 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1246 1246 #
1247 1247 # #remover senales que se distancien menos de 10 unidades o muestras
1248 1248 # #(No deberian existir IPP menor a 10 unidades)
1249 1249 #
1250 1250 # realIndex = numpy.where(spacing > 10 )[0]
1251 1251 #
1252 1252 # if len(realIndex) < 2:
1253 1253 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1254 1254 # return None
1255 1255 #
1256 1256 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1257 1257 # realPulseIndex = pulseIndex[realIndex]
1258 1258 #
1259 1259 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1260 1260 #
1261 1261 # print "IPP = %d samples" %period
1262 1262 #
1263 1263 # self.__newNSamples = dataOut.nHeights #int(period)
1264 1264 # self.__startIndex = int(realPulseIndex[0])
1265 1265 #
1266 1266 # return 1
1267 1267 #
1268 1268 #
1269 1269 # def setup(self, nSamples, nChannels, buffer_size = 4):
1270 1270 #
1271 1271 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1272 1272 # maxlen = buffer_size*nSamples)
1273 1273 #
1274 1274 # bufferList = []
1275 1275 #
1276 1276 # for i in range(nChannels):
1277 1277 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1278 1278 # maxlen = buffer_size*nSamples)
1279 1279 #
1280 1280 # bufferList.append(bufferByChannel)
1281 1281 #
1282 1282 # self.__nSamples = nSamples
1283 1283 # self.__nChannels = nChannels
1284 1284 # self.__bufferList = bufferList
1285 1285 #
1286 1286 # def run(self, dataOut, channel = 0):
1287 1287 #
1288 1288 # if not self.isConfig:
1289 1289 # nSamples = dataOut.nHeights
1290 1290 # nChannels = dataOut.nChannels
1291 1291 # self.setup(nSamples, nChannels)
1292 1292 # self.isConfig = True
1293 1293 #
1294 1294 # #Append new data to internal buffer
1295 1295 # for thisChannel in range(self.__nChannels):
1296 1296 # bufferByChannel = self.__bufferList[thisChannel]
1297 1297 # bufferByChannel.extend(dataOut.data[thisChannel])
1298 1298 #
1299 1299 # if self.__pulseFound:
1300 1300 # self.__startIndex -= self.__nSamples
1301 1301 #
1302 1302 # #Finding Tx Pulse
1303 1303 # if not self.__pulseFound:
1304 1304 # indexFound = self.__findTxPulse(dataOut, channel)
1305 1305 #
1306 1306 # if indexFound == None:
1307 1307 # dataOut.flagNoData = True
1308 1308 # return
1309 1309 #
1310 1310 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1311 1311 # self.__pulseFound = True
1312 1312 # self.__startIndex = indexFound
1313 1313 #
1314 1314 # #If pulse was found ...
1315 1315 # for thisChannel in range(self.__nChannels):
1316 1316 # bufferByChannel = self.__bufferList[thisChannel]
1317 1317 # #print self.__startIndex
1318 1318 # x = numpy.array(bufferByChannel)
1319 1319 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1320 1320 #
1321 1321 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1322 1322 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1323 1323 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1324 1324 #
1325 1325 # dataOut.data = self.__arrayBuffer
1326 1326 #
1327 1327 # self.__startIndex += self.__newNSamples
1328 1328 #
1329 1329 # return
General Comments 0
You need to be logged in to leave comments. Login now