##// END OF EJS Templates
digital rf fixes
José Chávez -
r1103:5c36aee43db5
parent child
Show More
@@ -1,790 +1,791
1 1
2 2 '''
3 3 Created on Jul 3, 2014
4 4
5 5 @author: roj-idl71
6 6 '''
7 7 # SUBCHANNELS EN VEZ DE CHANNELS
8 8 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
9 9 # ACTUALIZACION DE VERSION
10 10 # HEADERS
11 11 # MODULO DE ESCRITURA
12 12 # METADATA
13 13
14 14 import os
15 15 import datetime
16 16 import numpy
17 17 import timeit
18 18 from fractions import Fraction
19 19
20 20 try:
21 21 from gevent import sleep
22 22 except:
23 23 from time import sleep
24 24
25 25 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
26 26 from schainpy.model.data.jrodata import Voltage
27 27 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
28 28 from time import time
29 29
30 30 import cPickle
31 31 try:
32 32 import digital_rf
33 33 except:
34 34 print 'You should install "digital_rf" module if you want to read Digital RF data'
35 35
36 36
37 37 class DigitalRFReader(ProcessingUnit):
38 38 '''
39 39 classdocs
40 40 '''
41 41
42 42 def __init__(self, **kwargs):
43 43 '''
44 44 Constructor
45 45 '''
46 46
47 47 ProcessingUnit.__init__(self, **kwargs)
48 48
49 49 self.dataOut = Voltage()
50 50 self.__printInfo = True
51 51 self.__flagDiscontinuousBlock = False
52 52 self.__bufferIndex = 9999999
53 53 self.__ippKm = None
54 54 self.__codeType = 0
55 55 self.__nCode = None
56 56 self.__nBaud = None
57 57 self.__code = None
58 58 self.dtype = None
59 self.oldAverage = None
59 60
60 61 def close(self):
61 62 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
62 63 return
63 64
64 65 def __getCurrentSecond(self):
65 66
66 67 return self.__thisUnixSample / self.__sample_rate
67 68
68 69 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
69 70
70 71 def __setFileHeader(self):
71 72 '''
72 73 In this method will be initialized every parameter of dataOut object (header, no data)
73 74 '''
74 75 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
75 76
76 77 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
77 78
78 79 try:
79 80 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
80 81 self.__radarControllerHeader)
81 82 except:
82 83 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
83 84 txA=0,
84 85 txB=0,
85 86 nWindows=1,
86 87 nHeights=self.__nSamples,
87 88 firstHeight=self.__firstHeigth,
88 89 deltaHeight=self.__deltaHeigth,
89 90 codeType=self.__codeType,
90 91 nCode=self.__nCode, nBaud=self.__nBaud,
91 92 code=self.__code)
92 93
93 94 try:
94 95 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
95 96 except:
96 97 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
97 98 nProfiles=nProfiles,
98 99 nChannels=len(
99 100 self.__channelList),
100 101 adcResolution=14)
101 102 self.dataOut.type = "Voltage"
102 103
103 104 self.dataOut.data = None
104 105
105 106 self.dataOut.dtype = self.dtype
106 107
107 108 # self.dataOut.nChannels = 0
108 109
109 110 # self.dataOut.nHeights = 0
110 111
111 112 self.dataOut.nProfiles = int(nProfiles)
112 113
113 114 self.dataOut.heightList = self.__firstHeigth + \
114 115 numpy.arange(self.__nSamples, dtype=numpy.float) * \
115 116 self.__deltaHeigth
116 117
117 118 self.dataOut.channelList = range(self.__num_subchannels)
118 119
119 120 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
120 121
121 122 # self.dataOut.channelIndexList = None
122 123
123 124 self.dataOut.flagNoData = True
124 125
125 126 self.dataOut.flagDataAsBlock = False
126 127 # Set to TRUE if the data is discontinuous
127 128 self.dataOut.flagDiscontinuousBlock = False
128 129
129 130 self.dataOut.utctime = None
130 131
131 132 # timezone like jroheader, difference in minutes between UTC and localtime
132 133 self.dataOut.timeZone = self.__timezone / 60
133 134
134 135 self.dataOut.dstFlag = 0
135 136
136 137 self.dataOut.errorCount = 0
137 138
138 139 try:
139 140 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
140 141 'nCohInt', self.nCohInt)
141 142
142 143 # asumo que la data esta decodificada
143 144 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
144 145 'flagDecodeData', self.flagDecodeData)
145 146
146 147 # asumo que la data esta sin flip
147 148 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
148 149
149 150 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
150 151
151 152 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
152 153 except:
153 154 pass
154 155
155 156 self.dataOut.ippSeconds = ippSeconds
156 157
157 158 # Time interval between profiles
158 159 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
159 160
160 161 self.dataOut.frequency = self.__frequency
161 162
162 163 self.dataOut.realtime = self.__online
163 164
164 165 def findDatafiles(self, path, startDate=None, endDate=None):
165 166
166 167 if not os.path.isdir(path):
167 168 return []
168 169
169 170 try:
170 171 digitalReadObj = digital_rf.DigitalRFReader(
171 172 path, load_all_metadata=True)
172 173 except:
173 174 digitalReadObj = digital_rf.DigitalRFReader(path)
174 175
175 176 channelNameList = digitalReadObj.get_channels()
176 177
177 178 if not channelNameList:
178 179 return []
179 180
180 181 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
181 182
182 183 sample_rate = metadata_dict['sample_rate'][0]
183 184
184 185 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
185 186
186 187 try:
187 188 timezone = this_metadata_file['timezone'].value
188 189 except:
189 190 timezone = 0
190 191
191 192 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
192 193 channelNameList[0]) / sample_rate - timezone
193 194
194 195 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
195 196 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
196 197
197 198 if not startDate:
198 199 startDate = startDatetime.date()
199 200
200 201 if not endDate:
201 202 endDate = endDatatime.date()
202 203
203 204 dateList = []
204 205
205 206 thisDatetime = startDatetime
206 207
207 208 while(thisDatetime <= endDatatime):
208 209
209 210 thisDate = thisDatetime.date()
210 211
211 212 if thisDate < startDate:
212 213 continue
213 214
214 215 if thisDate > endDate:
215 216 break
216 217
217 218 dateList.append(thisDate)
218 219 thisDatetime += datetime.timedelta(1)
219 220
220 221 return dateList
221 222
222 223 def setup(self, path=None,
223 224 startDate=None,
224 225 endDate=None,
225 226 startTime=datetime.time(0, 0, 0),
226 227 endTime=datetime.time(23, 59, 59),
227 228 channelList=None,
228 229 nSamples=None,
229 230 online=False,
230 231 delay=60,
231 232 buffer_size=1024,
232 233 ippKm=None,
233 234 nCohInt=1,
234 235 nCode=1,
235 236 nBaud=1,
236 237 flagDecodeData=False,
237 238 code=numpy.ones((1, 1), dtype=numpy.int),
238 239 **kwargs):
239 240 '''
240 241 In this method we should set all initial parameters.
241 242
242 243 Inputs:
243 244 path
244 245 startDate
245 246 endDate
246 247 startTime
247 248 endTime
248 249 set
249 250 expLabel
250 251 ext
251 252 online
252 253 delay
253 254 '''
254 255 self.nCohInt = nCohInt
255 256 self.flagDecodeData = flagDecodeData
256 257 self.i = 0
257 258 if not os.path.isdir(path):
258 259 raise ValueError, "[Reading] Directory %s does not exist" % path
259 260
260 261 try:
261 262 self.digitalReadObj = digital_rf.DigitalRFReader(
262 263 path, load_all_metadata=True)
263 264 except:
264 265 self.digitalReadObj = digital_rf.DigitalRFReader(path)
265 266
266 267 channelNameList = self.digitalReadObj.get_channels()
267 268
268 269 if not channelNameList:
269 270 raise ValueError, "[Reading] Directory %s does not have any files" % path
270 271
271 272 if not channelList:
272 273 channelList = range(len(channelNameList))
273 274
274 275 ########## Reading metadata ######################
275 276
276 277 top_properties = self.digitalReadObj.get_properties(
277 278 channelNameList[channelList[0]])
278 279
279 280 self.__num_subchannels = top_properties['num_subchannels']
280 281 self.__sample_rate = 1.0 * \
281 282 top_properties['sample_rate_numerator'] / \
282 283 top_properties['sample_rate_denominator']
283 284 # self.__samples_per_file = top_properties['samples_per_file'][0]
284 285 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
285 286
286 287 this_metadata_file = self.digitalReadObj.get_digital_metadata(
287 288 channelNameList[channelList[0]])
288 289 metadata_bounds = this_metadata_file.get_bounds()
289 290 self.fixed_metadata_dict = this_metadata_file.read(
290 291 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
291 292
292 293 try:
293 294 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
294 295 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
295 296 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 297 self.dtype = cPickle.loads(self.fixed_metadata_dict['dtype'])
297 298 except:
298 299 pass
299 300
300 301 self.__frequency = None
301 302
302 303 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
303 304
304 305 self.__timezone = self.fixed_metadata_dict.get('timezone', 300)
305 306
306 307 try:
307 308 nSamples = self.fixed_metadata_dict['nSamples']
308 309 except:
309 310 nSamples = None
310 311
311 312 self.__firstHeigth = 0
312 313
313 314 try:
314 315 codeType = self.__radarControllerHeader['codeType']
315 316 except:
316 317 codeType = 0
317 318
318 319 try:
319 320 if codeType:
320 321 nCode = self.__radarControllerHeader['nCode']
321 322 nBaud = self.__radarControllerHeader['nBaud']
322 323 code = self.__radarControllerHeader['code']
323 324 except:
324 325 pass
325 326
326 327 if not ippKm:
327 328 try:
328 329 # seconds to km
329 330 ippKm = self.__radarControllerHeader['ipp']
330 331 except:
331 332 ippKm = None
332 333 ####################################################
333 334 self.__ippKm = ippKm
334 335 startUTCSecond = None
335 336 endUTCSecond = None
336 337
337 338 if startDate:
338 339 startDatetime = datetime.datetime.combine(startDate, startTime)
339 340 startUTCSecond = (
340 341 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
341 342
342 343 if endDate:
343 344 endDatetime = datetime.datetime.combine(endDate, endTime)
344 345 endUTCSecond = (endDatetime - datetime.datetime(1970,
345 346 1, 1)).total_seconds() + self.__timezone
346 347
347 348 start_index, end_index = self.digitalReadObj.get_bounds(
348 349 channelNameList[channelList[0]])
349 350
350 351 if not startUTCSecond:
351 352 startUTCSecond = start_index / self.__sample_rate
352 353
353 354 if start_index > startUTCSecond * self.__sample_rate:
354 355 startUTCSecond = start_index / self.__sample_rate
355 356
356 357 if not endUTCSecond:
357 358 endUTCSecond = end_index / self.__sample_rate
358 359
359 360 if end_index < endUTCSecond * self.__sample_rate:
360 361 endUTCSecond = end_index / self.__sample_rate
361 362 if not nSamples:
362 363 if not ippKm:
363 364 raise ValueError, "[Reading] nSamples or ippKm should be defined"
364 365 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
365 366 channelBoundList = []
366 367 channelNameListFiltered = []
367 368
368 369 for thisIndexChannel in channelList:
369 370 thisChannelName = channelNameList[thisIndexChannel]
370 371 start_index, end_index = self.digitalReadObj.get_bounds(
371 372 thisChannelName)
372 373 channelBoundList.append((start_index, end_index))
373 374 channelNameListFiltered.append(thisChannelName)
374 375
375 376 self.profileIndex = 0
376 377 self.i = 0
377 378 self.__delay = delay
378 379
379 380 self.__codeType = codeType
380 381 self.__nCode = nCode
381 382 self.__nBaud = nBaud
382 383 self.__code = code
383 384
384 385 self.__datapath = path
385 386 self.__online = online
386 387 self.__channelList = channelList
387 388 self.__channelNameList = channelNameListFiltered
388 389 self.__channelBoundList = channelBoundList
389 390 self.__nSamples = nSamples
390 391 self.__samples_to_read = long(nSamples) # FIJO: AHORA 40
391 392 self.__nChannels = len(self.__channelList)
392 393
393 394 self.__startUTCSecond = startUTCSecond
394 395 self.__endUTCSecond = endUTCSecond
395 396
396 397 self.__timeInterval = 1.0 * self.__samples_to_read / \
397 398 self.__sample_rate # Time interval
398 399
399 400 if online:
400 401 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
401 402 startUTCSecond = numpy.floor(endUTCSecond)
402 403
403 404 # por que en el otro metodo lo primero q se hace es sumar samplestoread
404 405 self.__thisUnixSample = long(
405 406 startUTCSecond * self.__sample_rate) - self.__samples_to_read
406 407
407 408 self.__data_buffer = numpy.zeros(
408 409 (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
409 410
410 411 self.__setFileHeader()
411 412 self.isConfig = True
412 413
413 414 print "[Reading] Digital RF Data was found from %s to %s " % (
414 415 datetime.datetime.utcfromtimestamp(
415 416 self.__startUTCSecond - self.__timezone),
416 417 datetime.datetime.utcfromtimestamp(
417 418 self.__endUTCSecond - self.__timezone)
418 419 )
419 420
420 421 print "[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
421 422 datetime.datetime.utcfromtimestamp(
422 423 endUTCSecond - self.__timezone)
423 424 )
424 425 self.oldAverage = None
425 426 self.count = 0
426 427 self.executionTime = 0
427 428
428 429 def __reload(self):
429 430 # print
430 431 # print "%s not in range [%s, %s]" %(
431 432 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
432 433 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
433 434 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
434 435 # )
435 436 print "[Reading] reloading metadata ..."
436 437
437 438 try:
438 439 self.digitalReadObj.reload(complete_update=True)
439 440 except:
440 441 self.digitalReadObj.reload()
441 442
442 443 start_index, end_index = self.digitalReadObj.get_bounds(
443 444 self.__channelNameList[self.__channelList[0]])
444 445
445 446 if start_index > self.__startUTCSecond * self.__sample_rate:
446 447 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
447 448
448 449 if end_index > self.__endUTCSecond * self.__sample_rate:
449 450 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
450 451 print
451 452 print "[Reading] New timerange found [%s, %s] " % (
452 453 datetime.datetime.utcfromtimestamp(
453 454 self.__startUTCSecond - self.__timezone),
454 455 datetime.datetime.utcfromtimestamp(
455 456 self.__endUTCSecond - self.__timezone)
456 457 )
457 458
458 459 return True
459 460
460 461 return False
461 462
462 463 def timeit(self, toExecute):
463 464 t0 = time()
464 465 toExecute()
465 466 self.executionTime = time() - t0
466 467 if self.oldAverage is None:
467 468 self.oldAverage = self.executionTime
468 469 self.oldAverage = (self.executionTime + self.count *
469 470 self.oldAverage) / (self.count + 1.0)
470 471 self.count = self.count + 1.0
471 472 return
472 473
473 474 def __readNextBlock(self, seconds=30, volt_scale=1):
474 475 '''
475 476 '''
476 477
477 478 # Set the next data
478 479 self.__flagDiscontinuousBlock = False
479 480 self.__thisUnixSample += self.__samples_to_read
480 481
481 482 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
482 483 print "[Reading] There are no more data into selected time-range"
483 484 if self.__online:
484 485 self.__reload()
485 486 else:
486 487 return False
487 488
488 489 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
489 490 return False
490 491 self.__thisUnixSample -= self.__samples_to_read
491 492
492 493 indexChannel = 0
493 494
494 495 dataOk = False
495 496 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
496 497 for indexSubchannel in range(self.__num_subchannels):
497 498 try:
498 499 t0 = time()
499 500 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
500 501 self.__samples_to_read,
501 502 thisChannelName, sub_channel=indexSubchannel)
502 503 self.executionTime = time() - t0
503 504 if self.oldAverage is None:
504 505 self.oldAverage = self.executionTime
505 506 self.oldAverage = (
506 507 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
507 508 self.count = self.count + 1.0
508 509
509 510 except IOError, e:
510 511 # read next profile
511 512 self.__flagDiscontinuousBlock = True
512 513 print "[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e
513 514 break
514 515
515 516 if result.shape[0] != self.__samples_to_read:
516 517 self.__flagDiscontinuousBlock = True
517 518 print "[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
518 519 result.shape[0],
519 520 self.__samples_to_read)
520 521 break
521 522
522 523 self.__data_buffer[indexSubchannel, :] = result * volt_scale
523 524
524 525 indexChannel += 1
525 526
526 527 dataOk = True
527 528
528 529 self.__utctime = self.__thisUnixSample / self.__sample_rate
529 530
530 531 if not dataOk:
531 532 return False
532 533
533 534 print "[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
534 535 self.__samples_to_read,
535 536 self.__timeInterval)
536 537
537 538 self.__bufferIndex = 0
538 539
539 540 return True
540 541
541 542 def __isBufferEmpty(self):
542 543 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
543 544
544 545 def getData(self, seconds=30, nTries=5):
545 546 '''
546 547 This method gets the data from files and put the data into the dataOut object
547 548
548 549 In addition, increase el the buffer counter in one.
549 550
550 551 Return:
551 552 data : retorna un perfil de voltages (alturas * canales) copiados desde el
552 553 buffer. Si no hay mas archivos a leer retorna None.
553 554
554 555 Affected:
555 556 self.dataOut
556 557 self.profileIndex
557 558 self.flagDiscontinuousBlock
558 559 self.flagIsNewBlock
559 560 '''
560 561
561 562 err_counter = 0
562 563 self.dataOut.flagNoData = True
563 564
564 565 if self.__isBufferEmpty():
565 566 self.__flagDiscontinuousBlock = False
566 567
567 568 while True:
568 569 if self.__readNextBlock():
569 570 break
570 571 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
571 572 return False
572 573
573 574 if self.__flagDiscontinuousBlock:
574 575 print '[Reading] discontinuous block found ... continue with the next block'
575 576 continue
576 577
577 578 if not self.__online:
578 579 return False
579 580
580 581 err_counter += 1
581 582 if err_counter > nTries:
582 583 return False
583 584
584 585 print '[Reading] waiting %d seconds to read a new block' % seconds
585 586 sleep(seconds)
586 587
587 588 self.dataOut.data = self.__data_buffer[:,
588 589 self.__bufferIndex:self.__bufferIndex + self.__nSamples]
589 590 self.dataOut.utctime = (
590 591 self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
591 592 self.dataOut.flagNoData = False
592 593 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
593 594 self.dataOut.profileIndex = self.profileIndex
594 595
595 596 self.__bufferIndex += self.__nSamples
596 597 self.profileIndex += 1
597 598
598 599 if self.profileIndex == self.dataOut.nProfiles:
599 600 self.profileIndex = 0
600 601
601 602 return True
602 603
603 604 def printInfo(self):
604 605 '''
605 606 '''
606 607 if self.__printInfo == False:
607 608 return
608 609
609 610 # self.systemHeaderObj.printInfo()
610 611 # self.radarControllerHeaderObj.printInfo()
611 612
612 613 self.__printInfo = False
613 614
614 615 def printNumberOfBlock(self):
615 616 '''
616 617 '''
617 618 return
618 619 # print self.profileIndex
619 620
620 621 def run(self, **kwargs):
621 622 '''
622 623 This method will be called many times so here you should put all your code
623 624 '''
624 625
625 626 if not self.isConfig:
626 627 self.setup(**kwargs)
627 628 #self.i = self.i+1
628 629 self.getData(seconds=self.__delay)
629 630
630 631 return
631 632
632 633
633 634 class DigitalRFWriter(Operation):
634 635 '''
635 636 classdocs
636 637 '''
637 638
638 639 def __init__(self, **kwargs):
639 640 '''
640 641 Constructor
641 642 '''
642 643 Operation.__init__(self, **kwargs)
643 644 self.metadata_dict = {}
644 645 self.dataOut = None
645 646 self.dtype = None
647 self.oldAverage = 0
646 648
647 649 def setHeader(self):
648 650
649 651 self.metadata_dict['frequency'] = self.dataOut.frequency
650 652 self.metadata_dict['timezone'] = self.dataOut.timeZone
651 653 self.metadata_dict['dtype'] = cPickle.dumps(self.dataOut.dtype)
652 654 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
653 655 self.metadata_dict['heightList'] = self.dataOut.heightList
654 656 self.metadata_dict['channelList'] = self.dataOut.channelList
655 657 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
656 658 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
657 659 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
658 660 self.metadata_dict['flagDataAsBlock'] = self.dataOut.flagDataAsBlock
659 661 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
660 662 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
661 663
662 664 return
663 665
664 666 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
665 667 '''
666 668 In this method we should set all initial parameters.
667 669 Input:
668 670 dataOut: Input data will also be outputa data
669 671 '''
670 672 self.setHeader()
671 673 self.__ippSeconds = dataOut.ippSeconds
672 674 self.__deltaH = dataOut.getDeltaH()
673 675 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
674 676 self.__dtype = dataOut.dtype
675 677 if len(dataOut.dtype) == 2:
676 678 self.__dtype = dataOut.dtype[0]
677 679 self.__nSamples = dataOut.systemHeaderObj.nSamples
678 680 self.__nProfiles = dataOut.nProfiles
679 681 self.__blocks_per_file = dataOut.processingHeaderObj.dataBlocksPerFile
680 682
681 683 self.arr_data = arr_data = numpy.ones((self.__nSamples, len(
682 684 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
683 685
684 file_cadence_millisecs = long(
685 1.0 * self.__blocks_per_file * self.__nProfiles * self.__nSamples / self.__sample_rate) * 1000
686 sub_cadence_secs = file_cadence_millisecs / 500
686 file_cadence_millisecs = 1000
687 687
688 688 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
689 689 sample_rate_numerator = long(sample_rate_fraction.numerator)
690 690 sample_rate_denominator = long(sample_rate_fraction.denominator)
691 691 start_global_index = dataOut.utctime * self.__sample_rate
692 692
693 693 uuid = 'prueba'
694 compression_level = 1
694 compression_level = 0
695 695 checksum = False
696 696 is_complex = True
697 697 num_subchannels = len(dataOut.channelList)
698 698 is_continuous = True
699 699 marching_periods = False
700 700
701 701 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
702 702 fileCadence, start_global_index,
703 703 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
704 704 is_complex, num_subchannels, is_continuous, marching_periods)
705 705
706 706 metadata_dir = os.path.join(path, 'metadata')
707 707 os.system('mkdir %s' % (metadata_dir))
708 708
709 709 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
710 710 sample_rate_numerator, sample_rate_denominator,
711 711 metadataFile)
712 712
713 713 self.isConfig = True
714 714 self.currentSample = 0
715 715 self.oldAverage = 0
716 716 self.count = 0
717 717 return
718 718
719 719 def writeMetadata(self):
720 720 print '[Writing] - Writing metadata'
721 721 start_idx = self.__sample_rate * self.dataOut.utctime
722 722
723 723 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
724 724 )
725 725 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
726 726 )
727 727 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
728 728 )
729 729 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
730 730 return
731 731
732 732 def timeit(self, toExecute):
733 733 t0 = time()
734 734 toExecute()
735 735 self.executionTime = time() - t0
736 736 if self.oldAverage is None:
737 737 self.oldAverage = self.executionTime
738 738 self.oldAverage = (self.executionTime + self.count *
739 739 self.oldAverage) / (self.count + 1.0)
740 740 self.count = self.count + 1.0
741 741 return
742 742
743 743 def writeData(self):
744 744 for i in range(self.dataOut.systemHeaderObj.nSamples):
745 745 for channel in self.dataOut.channelList:
746 746 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
747 747 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
748 748
749 749 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
750 750 self.timeit(f)
751 751
752 752 return
753 753
754 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=100, dirCadence=25, metadataCadence=1, **kwargs):
754 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
755 755 '''
756 756 This method will be called many times so here you should put all your code
757 757 Inputs:
758 758 dataOut: object with the data
759 759 '''
760 760 # print dataOut.__dict__
761 761 self.dataOut = dataOut
762 762 if not self.isConfig:
763 763 self.setup(dataOut, path, frequency, fileCadence,
764 764 dirCadence, metadataCadence, **kwargs)
765 765 self.writeMetadata()
766 766
767 767 self.writeData()
768 768
769 769 ## self.currentSample += 1
770 770 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
771 771 # self.writeMetadata()
772 772 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
773 773
774 774 def close(self):
775 775 print '[Writing] - Closing files '
776 776 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
777 777 try:
778 778 self.digitalWriteObj.close()
779 779 except:
780 780 pass
781 781
782
782 783 # raise
783 784 if __name__ == '__main__':
784 785
785 786 readObj = DigitalRFReader()
786 787
787 788 while True:
788 789 readObj.run(path='/home/jchavez/jicamarca/mocked_data/')
789 790 # readObj.printInfo()
790 791 # readObj.printNumberOfBlock()
@@ -1,762 +1,764
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import numpy
8 8
9 9 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
11 11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 12 from schainpy.model.data.jrodata import Voltage
13 13 import zmq
14 14 import tempfile
15 15 from StringIO import StringIO
16 16 # from _sha import blocksize
17 17
18 18
19 19 class VoltageReader(JRODataReader, ProcessingUnit):
20 20 """
21 21 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
22 22 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
23 23 perfiles*alturas*canales) son almacenados en la variable "buffer".
24 24
25 25 perfiles * alturas * canales
26 26
27 27 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
28 28 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
29 29 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
30 30 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
31 31
32 32 Example:
33 33
34 34 dpath = "/home/myuser/data"
35 35
36 36 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
37 37
38 38 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
39 39
40 40 readerObj = VoltageReader()
41 41
42 42 readerObj.setup(dpath, startTime, endTime)
43 43
44 44 while(True):
45 45
46 46 #to get one profile
47 47 profile = readerObj.getData()
48 48
49 49 #print the profile
50 50 print profile
51 51
52 52 #If you want to see all datablock
53 53 print readerObj.datablock
54 54
55 55 if readerObj.flagNoMoreFiles:
56 56 break
57 57
58 58 """
59 59
60 60 ext = ".r"
61 61
62 62 optchar = "D"
63 63 dataOut = None
64 64
65 65 def __init__(self, **kwargs):
66 66 """
67 67 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
68 68
69 69 Input:
70 70 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
71 71 almacenar un perfil de datos cada vez que se haga un requerimiento
72 72 (getData). El perfil sera obtenido a partir del buffer de datos,
73 73 si el buffer esta vacio se hara un nuevo proceso de lectura de un
74 74 bloque de datos.
75 75 Si este parametro no es pasado se creara uno internamente.
76 76
77 77 Variables afectadas:
78 78 self.dataOut
79 79
80 80 Return:
81 81 None
82 82 """
83 83
84 84 ProcessingUnit.__init__(self, **kwargs)
85 85
86 86 self.isConfig = False
87 87
88 88 self.datablock = None
89 89
90 90 self.utc = 0
91 91
92 92 self.ext = ".r"
93 93
94 94 self.optchar = "D"
95 95
96 96 self.basicHeaderObj = BasicHeader(LOCALTIME)
97 97
98 98 self.systemHeaderObj = SystemHeader()
99 99
100 100 self.radarControllerHeaderObj = RadarControllerHeader()
101 101
102 102 self.processingHeaderObj = ProcessingHeader()
103 103
104 104 self.online = 0
105 105
106 106 self.fp = None
107 107
108 108 self.idFile = None
109 109
110 110 self.dtype = None
111 111
112 112 self.fileSizeByHeader = None
113 113
114 114 self.filenameList = []
115 115
116 116 self.filename = None
117 117
118 118 self.fileSize = None
119 119
120 120 self.firstHeaderSize = 0
121 121
122 122 self.basicHeaderSize = 24
123 123
124 124 self.pathList = []
125 125
126 126 self.filenameList = []
127 127
128 128 self.lastUTTime = 0
129 129
130 130 self.maxTimeStep = 30
131 131
132 132 self.flagNoMoreFiles = 0
133 133
134 134 self.set = 0
135 135
136 136 self.path = None
137 137
138 138 self.profileIndex = 2**32 - 1
139 139
140 140 self.delay = 3 # seconds
141 141
142 142 self.nTries = 3 # quantity tries
143 143
144 144 self.nFiles = 3 # number of files for searching
145 145
146 146 self.nReadBlocks = 0
147 147
148 148 self.flagIsNewFile = 1
149 149
150 150 self.__isFirstTimeOnline = 1
151 151
152 152 # self.ippSeconds = 0
153 153
154 154 self.flagDiscontinuousBlock = 0
155 155
156 156 self.flagIsNewBlock = 0
157 157
158 158 self.nTotalBlocks = 0
159 159
160 160 self.blocksize = 0
161 161
162 162 self.dataOut = self.createObjByDefault()
163 163
164 164 self.nTxs = 1
165 165
166 166 self.txIndex = 0
167 167
168 168 def createObjByDefault(self):
169 169
170 170 dataObj = Voltage()
171 171
172 172 return dataObj
173 173
174 174 def __hasNotDataInBuffer(self):
175 175
176 176 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock * self.nTxs:
177 177 return 1
178 178
179 179 return 0
180 180
181 181 def getBlockDimension(self):
182 182 """
183 183 Obtiene la cantidad de puntos a leer por cada bloque de datos
184 184
185 185 Affected:
186 186 self.blocksize
187 187
188 188 Return:
189 189 None
190 190 """
191 191 pts2read = self.processingHeaderObj.profilesPerBlock * \
192 192 self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
193 193 self.blocksize = pts2read
194 194
195 195 def readBlock(self):
196 196 """
197 197 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
198 198 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
199 199 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
200 200 es seteado a 0
201 201
202 202 Inputs:
203 203 None
204 204
205 205 Return:
206 206 None
207 207
208 208 Affected:
209 209 self.profileIndex
210 210 self.datablock
211 211 self.flagIsNewFile
212 212 self.flagIsNewBlock
213 213 self.nTotalBlocks
214 214
215 215 Exceptions:
216 216 Si un bloque leido no es un bloque valido
217 217 """
218 218
219 219 # if self.server is not None:
220 220 # self.zBlock = self.receiver.recv()
221 221 # self.zHeader = self.zBlock[:24]
222 222 # self.zDataBlock = self.zBlock[24:]
223 223 # junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
224 224 # self.processingHeaderObj.profilesPerBlock = 240
225 225 # self.processingHeaderObj.nHeights = 248
226 226 # self.systemHeaderObj.nChannels
227 227 # else:
228 228 current_pointer_location = self.fp.tell()
229 229 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
230 230
231 231 try:
232 232 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
233 233 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
234 234 except:
235 235 # print "The read block (%3d) has not enough data" %self.nReadBlocks
236 236
237 237 if self.waitDataBlock(pointer_location=current_pointer_location):
238 238 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
239 239 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
240 240 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
241 241 # return 0
242 242
243 243 # Dimensions : nChannels, nProfiles, nSamples
244 244
245 245 junk = numpy.transpose(junk, (2, 0, 1))
246 246 self.datablock = junk['real'] + junk['imag'] * 1j
247 247
248 248 self.profileIndex = 0
249 249
250 250 self.flagIsNewFile = 0
251 251 self.flagIsNewBlock = 1
252 252
253 253 self.nTotalBlocks += 1
254 254 self.nReadBlocks += 1
255 255
256 256 return 1
257 257
258 258 def getFirstHeader(self):
259 259
260 260 self.getBasicHeader()
261 261
262 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
263
262 264 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
263 265
264 266 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
265 267
266 268 if self.nTxs > 1:
267 269 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
268 270 # Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
269 271
270 272 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
271 273 #
272 274 # if self.radarControllerHeaderObj.code is not None:
273 275 #
274 276 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
275 277 #
276 278 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
277 279 #
278 280 # self.dataOut.code = self.radarControllerHeaderObj.code
279 281
280 282 self.dataOut.dtype = self.dtype
281 283
282 284 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
283 285
284 286 self.dataOut.heightList = numpy.arange(
285 287 self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
286 288
287 289 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
288 290
289 291 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
290 292
291 293 # asumo q la data no esta decodificada
292 294 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
293 295
294 296 # asumo q la data no esta sin flip
295 297 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
296 298
297 299 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
298 300
299 301 def reshapeData(self):
300 302
301 303 if self.nTxs < 0:
302 304 return
303 305
304 306 if self.nTxs == 1:
305 307 return
306 308
307 309 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1. / self.nTxs) != 0:
308 310 raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % (
309 311 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock)
310 312
311 313 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
312 314 raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" % (
313 315 self.nTxs, self.processingHeaderObj.nHeights)
314 316
315 317 self.datablock = self.datablock.reshape(
316 318 (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, self.processingHeaderObj.nHeights / self.nTxs))
317 319
318 320 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock * self.nTxs
319 321 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights / self.nTxs) * \
320 322 self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
321 323 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
322 324
323 325 return
324 326
325 327 def readFirstHeaderFromServer(self):
326 328
327 329 self.getFirstHeader()
328 330
329 331 self.firstHeaderSize = self.basicHeaderObj.size
330 332
331 333 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
332 334 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
333 335 if datatype == 0:
334 336 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
335 337 elif datatype == 1:
336 338 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
337 339 elif datatype == 2:
338 340 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
339 341 elif datatype == 3:
340 342 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
341 343 elif datatype == 4:
342 344 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
343 345 elif datatype == 5:
344 346 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
345 347 else:
346 348 raise ValueError, 'Data type was not defined'
347 349
348 350 self.dtype = datatype_str
349 351 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
350 352 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
351 353 self.firstHeaderSize + self.basicHeaderSize * \
352 354 (self.processingHeaderObj.dataBlocksPerFile - 1)
353 355 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
354 356 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
355 357 self.getBlockDimension()
356 358
357 359 def getFromServer(self):
358 360 self.flagDiscontinuousBlock = 0
359 361 self.profileIndex = 0
360 362 self.flagIsNewBlock = 1
361 363 self.dataOut.flagNoData = False
362 364 self.nTotalBlocks += 1
363 365 self.nReadBlocks += 1
364 366 self.blockPointer = 0
365 367
366 368 block = self.receiver.recv()
367 369
368 370 self.basicHeaderObj.read(block[self.blockPointer:])
369 371 self.blockPointer += self.basicHeaderObj.length
370 372 self.systemHeaderObj.read(block[self.blockPointer:])
371 373 self.blockPointer += self.systemHeaderObj.length
372 374 self.radarControllerHeaderObj.read(block[self.blockPointer:])
373 375 self.blockPointer += self.radarControllerHeaderObj.length
374 376 self.processingHeaderObj.read(block[self.blockPointer:])
375 377 self.blockPointer += self.processingHeaderObj.length
376 378 self.readFirstHeaderFromServer()
377 379
378 380 timestamp = self.basicHeaderObj.get_datatime()
379 381 print '[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp)
380 382 current_pointer_location = self.blockPointer
381 383 junk = numpy.fromstring(
382 384 block[self.blockPointer:], self.dtype, self.blocksize)
383 385
384 386 try:
385 387 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
386 388 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
387 389 except:
388 390 # print "The read block (%3d) has not enough data" %self.nReadBlocks
389 391 if self.waitDataBlock(pointer_location=current_pointer_location):
390 392 junk = numpy.fromstring(
391 393 block[self.blockPointer:], self.dtype, self.blocksize)
392 394 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
393 395 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
394 396 # return 0
395 397
396 398 # Dimensions : nChannels, nProfiles, nSamples
397 399
398 400 junk = numpy.transpose(junk, (2, 0, 1))
399 401 self.datablock = junk['real'] + junk['imag'] * 1j
400 402 self.profileIndex = 0
401 403 if self.selBlocksize == None:
402 404 self.selBlocksize = self.dataOut.nProfiles
403 405 if self.selBlocktime != None:
404 406 if self.dataOut.nCohInt is not None:
405 407 nCohInt = self.dataOut.nCohInt
406 408 else:
407 409 nCohInt = 1
408 410 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
409 411 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
410 412 self.dataOut.data = self.datablock[:,
411 413 self.profileIndex:self.profileIndex + self.selBlocksize, :]
412 414 datasize = self.dataOut.data.shape[1]
413 415 if datasize < self.selBlocksize:
414 416 buffer = numpy.zeros(
415 417 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
416 418 buffer[:, :datasize, :] = self.dataOut.data
417 419 self.dataOut.data = buffer
418 420 self.profileIndex = blockIndex
419 421
420 422 self.dataOut.flagDataAsBlock = True
421 423 self.flagIsNewBlock = 1
422 424 self.dataOut.realtime = self.online
423 425
424 426 return self.dataOut.data
425 427
426 428 def getData(self):
427 429 """
428 430 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
429 431 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
430 432 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
431 433 "readNextBlock"
432 434
433 435 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
434 436
435 437 Return:
436 438
437 439 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
438 440 es igual al total de perfiles leidos desde el archivo.
439 441
440 442 Si self.getByBlock == False:
441 443
442 444 self.dataOut.data = buffer[:, thisProfile, :]
443 445
444 446 shape = [nChannels, nHeis]
445 447
446 448 Si self.getByBlock == True:
447 449
448 450 self.dataOut.data = buffer[:, :, :]
449 451
450 452 shape = [nChannels, nProfiles, nHeis]
451 453
452 454 Variables afectadas:
453 455 self.dataOut
454 456 self.profileIndex
455 457
456 458 Affected:
457 459 self.dataOut
458 460 self.profileIndex
459 461 self.flagDiscontinuousBlock
460 462 self.flagIsNewBlock
461 463 """
462 464 if self.flagNoMoreFiles:
463 465 self.dataOut.flagNoData = True
464 466 print 'Process finished'
465 467 return 0
466 468 self.flagDiscontinuousBlock = 0
467 469 self.flagIsNewBlock = 0
468 470 if self.__hasNotDataInBuffer():
469 471 if not(self.readNextBlock()):
470 472 return 0
471 473
472 474 self.getFirstHeader()
473 475
474 476 self.reshapeData()
475 477 if self.datablock is None:
476 478 self.dataOut.flagNoData = True
477 479 return 0
478 480
479 481 if not self.getByBlock:
480 482
481 483 """
482 484 Return profile by profile
483 485
484 486 If nTxs > 1 then one profile is divided by nTxs and number of total
485 487 blocks is increased by nTxs (nProfiles *= nTxs)
486 488 """
487 489 self.dataOut.flagDataAsBlock = False
488 490 self.dataOut.data = self.datablock[:, self.profileIndex, :]
489 491 self.dataOut.profileIndex = self.profileIndex
490 492
491 493 self.profileIndex += 1
492 494
493 495 # elif self.selBlocksize==None or self.selBlocksize==self.dataOut.nProfiles:
494 496 # """
495 497 # Return all block
496 498 # """
497 499 # self.dataOut.flagDataAsBlock = True
498 500 # self.dataOut.data = self.datablock
499 501 # self.dataOut.profileIndex = self.dataOut.nProfiles - 1
500 502 #
501 503 # self.profileIndex = self.dataOut.nProfiles
502 504
503 505 else:
504 506 """
505 507 Return a block
506 508 """
507 509 if self.selBlocksize == None:
508 510 self.selBlocksize = self.dataOut.nProfiles
509 511 if self.selBlocktime != None:
510 512 if self.dataOut.nCohInt is not None:
511 513 nCohInt = self.dataOut.nCohInt
512 514 else:
513 515 nCohInt = 1
514 516 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
515 517 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
516 518
517 519 self.dataOut.data = self.datablock[:,
518 520 self.profileIndex:self.profileIndex + self.selBlocksize, :]
519 521 self.profileIndex += self.selBlocksize
520 522 datasize = self.dataOut.data.shape[1]
521 523
522 524 if datasize < self.selBlocksize:
523 525 buffer = numpy.zeros(
524 526 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
525 527 buffer[:, :datasize, :] = self.dataOut.data
526 528
527 529 while datasize < self.selBlocksize: # Not enough profiles to fill the block
528 530 if not(self.readNextBlock()):
529 531 return 0
530 532 self.getFirstHeader()
531 533 self.reshapeData()
532 534 if self.datablock is None:
533 535 self.dataOut.flagNoData = True
534 536 return 0
535 537 # stack data
536 538 blockIndex = self.selBlocksize - datasize
537 539 datablock1 = self.datablock[:, :blockIndex, :]
538 540
539 541 buffer[:, datasize:datasize +
540 542 datablock1.shape[1], :] = datablock1
541 543 datasize += datablock1.shape[1]
542 544
543 545 self.dataOut.data = buffer
544 546 self.profileIndex = blockIndex
545 547
546 548 self.dataOut.flagDataAsBlock = True
547 549 self.dataOut.nProfiles = self.dataOut.data.shape[1]
548 550
549 551 self.dataOut.flagNoData = False
550 552
551 553 self.getBasicHeader()
552 554
553 555 self.dataOut.realtime = self.online
554 556
555 557 return self.dataOut.data
556 558
557 559
558 560 class VoltageWriter(JRODataWriter, Operation):
559 561 """
560 562 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
561 563 de los datos siempre se realiza por bloques.
562 564 """
563 565
564 566 ext = ".r"
565 567
566 568 optchar = "D"
567 569
568 570 shapeBuffer = None
569 571
570 572 def __init__(self, **kwargs):
571 573 """
572 574 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
573 575
574 576 Affected:
575 577 self.dataOut
576 578
577 579 Return: None
578 580 """
579 581 Operation.__init__(self, **kwargs)
580 582
581 583 self.nTotalBlocks = 0
582 584
583 585 self.profileIndex = 0
584 586
585 587 self.isConfig = False
586 588
587 589 self.fp = None
588 590
589 591 self.flagIsNewFile = 1
590 592
591 593 self.blockIndex = 0
592 594
593 595 self.flagIsNewBlock = 0
594 596
595 597 self.setFile = None
596 598
597 599 self.dtype = None
598 600
599 601 self.path = None
600 602
601 603 self.filename = None
602 604
603 605 self.basicHeaderObj = BasicHeader(LOCALTIME)
604 606
605 607 self.systemHeaderObj = SystemHeader()
606 608
607 609 self.radarControllerHeaderObj = RadarControllerHeader()
608 610
609 611 self.processingHeaderObj = ProcessingHeader()
610 612
611 613 def hasAllDataInBuffer(self):
612 614 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
613 615 return 1
614 616 return 0
615 617
616 618 def setBlockDimension(self):
617 619 """
618 620 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
619 621
620 622 Affected:
621 623 self.shape_spc_Buffer
622 624 self.shape_cspc_Buffer
623 625 self.shape_dc_Buffer
624 626
625 627 Return: None
626 628 """
627 629 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
628 630 self.processingHeaderObj.nHeights,
629 631 self.systemHeaderObj.nChannels)
630 632
631 633 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
632 634 self.processingHeaderObj.profilesPerBlock,
633 635 self.processingHeaderObj.nHeights),
634 636 dtype=numpy.dtype('complex64'))
635 637
636 638 def writeBlock(self):
637 639 """
638 640 Escribe el buffer en el file designado
639 641
640 642 Affected:
641 643 self.profileIndex
642 644 self.flagIsNewFile
643 645 self.flagIsNewBlock
644 646 self.nTotalBlocks
645 647 self.blockIndex
646 648
647 649 Return: None
648 650 """
649 651 data = numpy.zeros(self.shapeBuffer, self.dtype)
650 652
651 653 junk = numpy.transpose(self.datablock, (1, 2, 0))
652 654
653 655 data['real'] = junk.real
654 656 data['imag'] = junk.imag
655 657
656 658 data = data.reshape((-1))
657 659
658 660 data.tofile(self.fp)
659 661
660 662 self.datablock.fill(0)
661 663
662 664 self.profileIndex = 0
663 665 self.flagIsNewFile = 0
664 666 self.flagIsNewBlock = 1
665 667
666 668 self.blockIndex += 1
667 669 self.nTotalBlocks += 1
668 670
669 671 # print "[Writing] Block = %04d" %self.blockIndex
670 672
671 673 def putData(self):
672 674 """
673 675 Setea un bloque de datos y luego los escribe en un file
674 676
675 677 Affected:
676 678 self.flagIsNewBlock
677 679 self.profileIndex
678 680
679 681 Return:
680 682 0 : Si no hay data o no hay mas files que puedan escribirse
681 683 1 : Si se escribio la data de un bloque en un file
682 684 """
683 685 if self.dataOut.flagNoData:
684 686 return 0
685 687
686 688 self.flagIsNewBlock = 0
687 689
688 690 if self.dataOut.flagDiscontinuousBlock:
689 691 self.datablock.fill(0)
690 692 self.profileIndex = 0
691 693 self.setNextFile()
692 694
693 695 if self.profileIndex == 0:
694 696 self.setBasicHeader()
695 697
696 698 self.datablock[:, self.profileIndex, :] = self.dataOut.data
697 699
698 700 self.profileIndex += 1
699 701
700 702 if self.hasAllDataInBuffer():
701 703 # if self.flagIsNewFile:
702 704 self.writeNextBlock()
703 705 # self.setFirstHeader()
704 706
705 707 return 1
706 708
707 709 def __getBlockSize(self):
708 710 '''
709 711 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
710 712 '''
711 713
712 714 dtype_width = self.getDtypeWidth()
713 715
714 716 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
715 717 self.profilesPerBlock * dtype_width * 2)
716 718
717 719 return blocksize
718 720
719 721 def setFirstHeader(self):
720 722 """
721 723 Obtiene una copia del First Header
722 724
723 725 Affected:
724 726 self.systemHeaderObj
725 727 self.radarControllerHeaderObj
726 728 self.dtype
727 729
728 730 Return:
729 731 None
730 732 """
731 733
732 734 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
733 735 self.systemHeaderObj.nChannels = self.dataOut.nChannels
734 736 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
735 737
736 738 self.processingHeaderObj.dtype = 0 # Voltage
737 739 self.processingHeaderObj.blockSize = self.__getBlockSize()
738 740 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
739 741 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
740 742 # podria ser 1 o self.dataOut.processingHeaderObj.nWindows
741 743 self.processingHeaderObj.nWindows = 1
742 744 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
743 745 # Cuando la data de origen es de tipo Voltage
744 746 self.processingHeaderObj.nIncohInt = 1
745 747 # Cuando la data de origen es de tipo Voltage
746 748 self.processingHeaderObj.totalSpectra = 0
747 749
748 750 if self.dataOut.code is not None:
749 751 self.processingHeaderObj.code = self.dataOut.code
750 752 self.processingHeaderObj.nCode = self.dataOut.nCode
751 753 self.processingHeaderObj.nBaud = self.dataOut.nBaud
752 754
753 755 if self.processingHeaderObj.nWindows != 0:
754 756 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
755 757 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - \
756 758 self.dataOut.heightList[0]
757 759 self.processingHeaderObj.nHeights = self.dataOut.nHeights
758 760 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
759 761
760 762 self.processingHeaderObj.processFlags = self.getProcessFlags()
761 763
762 764 self.setBasicHeader()
General Comments 0
You need to be logged in to leave comments. Login now