This diff has been collapsed as it changes many lines, (1509 lines changed) Show them Hide them | |||
@@ -4,16 +4,1511 Created on 23/01/2012 | |||
|
4 | 4 | @author $Author$ |
|
5 | 5 | @version $Id$ |
|
6 | 6 | ''' |
|
7 | import os, sys | |
|
8 | import glob | |
|
9 | import time | |
|
10 | import numpy | |
|
7 | 11 | |
|
12 | path = os.path.split(os.getcwd())[0] | |
|
13 | sys.path.append(path) | |
|
8 | 14 | |
|
9 | class DataReader: | |
|
10 | ||
|
15 | from Model.JROHeader import * | |
|
16 | ||
|
17 | def checkForRealPath( path, year, doy, set, ext ): | |
|
18 | """ | |
|
19 | Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path, | |
|
20 | Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar | |
|
21 | el path exacto de un determinado file. | |
|
22 | ||
|
23 | Example : | |
|
24 | nombre correcto del file es ../RAWDATA/D2009307/P2009307367 | |
|
25 | ||
|
26 | Entonces la funcion prueba con las siguientes combinaciones | |
|
27 | ../RAWDATA/d2009307/p2009307367 | |
|
28 | ../RAWDATA/d2009307/P2009307367 | |
|
29 | ../RAWDATA/D2009307/p2009307367 | |
|
30 | ../RAWDATA/D2009307/P2009307367 | |
|
31 | siendo para este caso, la ultima combinacion de letras, identica al file buscado | |
|
32 | ||
|
33 | Return: | |
|
34 | Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file | |
|
35 | caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas | |
|
36 | para el filename | |
|
37 | """ | |
|
38 | filepath = None | |
|
39 | find_flag = False | |
|
40 | filename = None | |
|
41 | ||
|
42 | for dir in "dD": #barrido por las dos combinaciones posibles de "D" | |
|
43 | for fil in "dD": #barrido por las dos combinaciones posibles de "D" | |
|
44 | doypath = "%s%04d%03d" % ( dir, year, doy ) #formo el nombre del directorio xYYYYDDD (x=d o x=D) | |
|
45 | filename = "%s%04d%03d%03d%s" % ( fil, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext (p=d o p=D) | |
|
46 | filepath = os.path.join( path, doypath, filename ) #formo el path completo | |
|
47 | if os.path.exists( filepath ): #verifico que exista | |
|
48 | find_flag = True | |
|
49 | break | |
|
50 | if find_flag: | |
|
51 | break | |
|
52 | ||
|
53 | if not(find_flag): | |
|
54 | return None, filename | |
|
55 | ||
|
56 | return filepath, filename | |
|
57 | ||
|
58 | ||
|
59 | def isNumber( str ): | |
|
60 | """ | |
|
61 | Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. | |
|
62 | ||
|
63 | Excepciones: | |
|
64 | Si un determinado string no puede ser convertido a numero | |
|
65 | Input: | |
|
66 | str, string al cual se le analiza para determinar si convertible a un numero o no | |
|
67 | ||
|
68 | Return: | |
|
69 | True : si el string es uno numerico | |
|
70 | False : no es un string numerico | |
|
71 | """ | |
|
72 | try: | |
|
73 | float( str ) | |
|
74 | return True | |
|
75 | except: | |
|
76 | return False | |
|
77 | ||
|
78 | ||
|
79 | def isThisFileinRange(filename, startUTSeconds, endUTSeconds): | |
|
80 | """ | |
|
81 | Esta funcion determina si un archivo de datos en formato Jicamarca(.r) se encuentra | |
|
82 | o no dentro del rango de fecha especificado. | |
|
83 | ||
|
84 | Inputs: | |
|
85 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) | |
|
86 | ||
|
87 | startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en | |
|
88 | segundos contados desde 01/01/1970. | |
|
89 | endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en | |
|
90 | segundos contados desde 01/01/1970. | |
|
91 | ||
|
92 | Return: | |
|
93 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de | |
|
94 | fecha especificado, de lo contrario retorna False. | |
|
95 | ||
|
96 | Excepciones: | |
|
97 | Si el archivo no existe o no puede ser abierto | |
|
98 | Si la cabecera no puede ser leida. | |
|
99 | ||
|
100 | """ | |
|
101 | m_BasicHeader = BasicHeader() | |
|
102 | ||
|
103 | try: | |
|
104 | fp = open(filename,'rb') | |
|
105 | except: | |
|
106 | raise IOError, "The file %s can't be opened" %(filename) | |
|
107 | ||
|
108 | if not(m_BasicHeader.read(fp)): | |
|
109 | raise IOError, "The file %s has not a valid header" %(filename) | |
|
110 | ||
|
111 | fp.close() | |
|
112 | ||
|
113 | if not ((startUTSeconds <= m_BasicHeader.utc) and (endUTSeconds >= m_BasicHeader.utc)): | |
|
114 | return 0 | |
|
115 | ||
|
116 | return 1 | |
|
117 | ||
|
118 | ||
|
119 | def getlastFileFromPath( pathList, ext ): | |
|
120 | """ | |
|
121 | Depura el pathList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext" | |
|
122 | al final de la depuracion devuelve el ultimo file de la lista que quedo. | |
|
123 | ||
|
124 | Input: | |
|
125 | pathList : lista conteniendo todos los filename completos que componen una determinada carpeta | |
|
126 | ext : extension de los files contenidos en una carpeta | |
|
127 | ||
|
128 | Return: | |
|
129 | El ultimo file de una determinada carpeta | |
|
130 | """ | |
|
131 | ||
|
132 | filesList = [] | |
|
133 | filename = None | |
|
134 | ||
|
135 | # 0 1234 567 89A BCDE | |
|
136 | # D YYYY DDD SSS .ext | |
|
137 | ||
|
138 | for filename in pathList: | |
|
139 | year = filename[1:5] | |
|
140 | doy = filename[5:8] | |
|
141 | leng = len( ext ) | |
|
142 | ||
|
143 | if ( filename[-leng:].upper() != ext.upper() ) : continue | |
|
144 | if not( isNumber( year ) ) : continue | |
|
145 | if not( isNumber( doy ) ) : continue | |
|
146 | ||
|
147 | filesList.append(filename) | |
|
148 | ||
|
149 | if len( filesList ) > 0: | |
|
150 | filesList = sorted( filesList, key=str.lower ) | |
|
151 | filename = filesList[-1] | |
|
152 | ||
|
153 | return filename | |
|
154 | ||
|
155 | ||
|
156 | class DataReader(): | |
|
157 | ||
|
11 | 158 | def __init__(self): |
|
12 | __buffer = 0 | |
|
13 | __buffer_count = 0 | |
|
159 | pass | |
|
160 | ||
|
161 | class DataWriter(): | |
|
162 | ||
|
163 | def __init__(self): | |
|
164 | pass | |
|
165 | ||
|
166 | class JRODataReader(): | |
|
167 | ||
|
168 | """ | |
|
169 | Esta clase es usada como la clase padre de las clases VoltageReader and SpectraReader, | |
|
170 | contiene todos lo metodos necesarios para leer datos desde archivos en formato | |
|
171 | Jicamarca (.r o .pdata). La lectura de los datos siempre se realiza por bloques. Los datos | |
|
172 | leidos son array de 3 dimensiones: | |
|
173 | perfiles*alturas*canales | |
|
174 | ||
|
175 | y son almacenados en la variable "datablock". | |
|
176 | ||
|
177 | Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, | |
|
178 | RadarControllerHeader y DataObj. Los tres primeros se usan para almacenar informacion de la | |
|
179 | cabecera de datos (metadata), y el cuarto (DataObj) para obtener y almacenar los datos desde | |
|
180 | el "datablock" cada vez que se ejecute el metodo "getData". | |
|
181 | ||
|
182 | ||
|
183 | """ | |
|
184 | ||
|
185 | m_BasicHeader = BasicHeader() | |
|
186 | ||
|
187 | m_SystemHeader = SystemHeader() | |
|
188 | ||
|
189 | m_RadarControllerHeader = RadarControllerHeader() | |
|
190 | ||
|
191 | m_ProcessingHeader = ProcessingHeader() | |
|
192 | ||
|
193 | m_DataObj = None | |
|
194 | ||
|
195 | online = 0 | |
|
196 | ||
|
197 | __startDateTime = None | |
|
198 | ||
|
199 | __endDateTime = None | |
|
200 | ||
|
201 | __fp = None | |
|
202 | ||
|
203 | __fileSizeByHeader = None | |
|
204 | ||
|
205 | __pathList = [] | |
|
206 | ||
|
207 | __filenameList = [] | |
|
208 | ||
|
209 | __fileIndex = None | |
|
210 | ||
|
211 | filename = None | |
|
212 | ||
|
213 | fileSize = None | |
|
214 | ||
|
215 | firstHeaderSize = 0 | |
|
216 | ||
|
217 | basicHeaderSize = 24 | |
|
218 | ||
|
219 | __dataType = None | |
|
220 | ||
|
221 | __blocksize = 0 | |
|
222 | ||
|
223 | datablock = None | |
|
224 | ||
|
225 | __datablockIndex = None | |
|
226 | ||
|
227 | __pts2read = 0 | |
|
228 | ||
|
229 | #Parametros para el procesamiento en linea | |
|
230 | __year = 0 | |
|
231 | ||
|
232 | __doy = 0 | |
|
233 | ||
|
234 | __set = 0 | |
|
14 | 235 | |
|
15 | class DataWriter: | |
|
236 | __ext = None | |
|
237 | ||
|
238 | __path = None | |
|
239 | ||
|
240 | __delay = 60 #seconds | |
|
241 | ||
|
242 | __nTries = 3 #quantity tries | |
|
243 | ||
|
244 | __nFiles = 3 #number of files for searching | |
|
245 | ||
|
246 | #speed of light | |
|
247 | __c = 3E8 | |
|
248 | ||
|
249 | def __init__(self): | |
|
250 | ||
|
251 | """ | |
|
252 | Inicializador | |
|
253 | """ | |
|
254 | ||
|
255 | raise ValueError, "This class has not been implemented" | |
|
256 | ||
|
257 | def __rdSystemHeader(self,fp=None): | |
|
258 | ||
|
259 | if fp == None: | |
|
260 | fp = self.__fp | |
|
261 | ||
|
262 | self.m_SystemHeader.read(fp) | |
|
263 | ||
|
264 | def __rdRadarControllerHeader(self,fp=None): | |
|
265 | if fp == None: | |
|
266 | fp = self.__fp | |
|
267 | ||
|
268 | self.m_RadarControllerHeader.read(fp) | |
|
269 | ||
|
270 | def __rdProcessingHeader(self,fp=None): | |
|
271 | if fp == None: | |
|
272 | fp = self.__fp | |
|
273 | ||
|
274 | self.m_ProcessingHeader.read(fp) | |
|
275 | ||
|
276 | def __rdBasicHeader(self, fp=None): | |
|
277 | ||
|
278 | if fp == None: | |
|
279 | fp = self.__fp | |
|
280 | ||
|
281 | self.m_BasicHeader.read(fp) | |
|
282 | ||
|
283 | def __readFirstHeader(self): | |
|
284 | ||
|
285 | self.__rdBasicHeader() | |
|
286 | self.__rdSystemHeader() | |
|
287 | self.__rdRadarControllerHeader() | |
|
288 | self.__rdProcessingHeader() | |
|
289 | self.firstHeaderSize = self.m_BasicHeader.size | |
|
290 | ||
|
291 | data_type=int(numpy.log2((self.m_ProcessingHeader.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) | |
|
292 | if data_type == 0: | |
|
293 | tmp = numpy.dtype([('real','<i1'),('imag','<i1')]) | |
|
294 | ||
|
295 | elif data_type == 1: | |
|
296 | tmp = numpy.dtype([('real','<i2'),('imag','<i2')]) | |
|
297 | ||
|
298 | elif data_type == 2: | |
|
299 | tmp = numpy.dtype([('real','<i4'),('imag','<i4')]) | |
|
300 | ||
|
301 | elif data_type == 3: | |
|
302 | tmp = numpy.dtype([('real','<i8'),('imag','<i8')]) | |
|
303 | ||
|
304 | elif data_type == 4: | |
|
305 | tmp = numpy.dtype([('real','<f4'),('imag','<f4')]) | |
|
306 | ||
|
307 | elif data_type == 5: | |
|
308 | tmp = numpy.dtype([('real','<f8'),('imag','<f8')]) | |
|
309 | ||
|
310 | else: | |
|
311 | raise ValueError, 'Data type was not defined' | |
|
312 | ||
|
313 | xi = self.m_ProcessingHeader.firstHeight | |
|
314 | step = self.m_ProcessingHeader.deltaHeight | |
|
315 | xf = xi + self.m_ProcessingHeader.numHeights*step | |
|
316 | ||
|
317 | self.__heights = numpy.arange(xi, xf, step) | |
|
318 | self.__dataType = tmp | |
|
319 | self.__fileSizeByHeader = self.m_ProcessingHeader.dataBlocksPerFile * self.m_ProcessingHeader.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.m_ProcessingHeader.dataBlocksPerFile - 1) | |
|
320 | self.__ippSeconds = 2*1000*self.m_RadarControllerHeader.ipp/self.__c | |
|
321 | ||
|
322 | self.__pts2read = self.m_ProcessingHeader.profilesPerBlock * self.m_ProcessingHeader.numHeights * self.m_SystemHeader.numChannels | |
|
323 | self.__blocksize = self.__pts2read | |
|
324 | ||
|
325 | ||
|
326 | def __setNextFileOnline( self ): | |
|
327 | """ | |
|
328 | Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si | |
|
329 | no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files | |
|
330 | siguientes. | |
|
331 | ||
|
332 | Affected: | |
|
333 | self.__flagNewFile | |
|
334 | self.filename | |
|
335 | self.fileSize | |
|
336 | self.__fp | |
|
337 | self.__set | |
|
338 | self.flagNoMoreFiles | |
|
339 | ||
|
340 | Return: | |
|
341 | 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado | |
|
342 | 1 : si el file fue abierto con exito y esta listo a ser leido | |
|
343 | ||
|
344 | Excepciones: | |
|
345 | Si un determinado file no puede ser abierto | |
|
346 | """ | |
|
347 | countFiles = 0 | |
|
348 | countTries = 0 | |
|
349 | ||
|
350 | fileStatus = 0 | |
|
351 | notFirstTime_flag = False | |
|
352 | bChangeDir = False | |
|
353 | ||
|
354 | fileSize = 0 | |
|
355 | fp = None | |
|
356 | ||
|
357 | self.__flagNewFile = 0 | |
|
358 | ||
|
359 | #este loop permite llevar la cuenta de intentos, de files y carpetas, si no encuentra alguno sale del bucle | |
|
360 | while( True ): | |
|
361 | countFiles += 1 | |
|
362 | ||
|
363 | if countFiles > (self.__nFiles + 1): | |
|
364 | break | |
|
365 | ||
|
366 | self.__set += 1 | |
|
367 | ||
|
368 | if countFiles > self.__nFiles: #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta | |
|
369 | self.__set = 0 | |
|
370 | self.__doy += 1 | |
|
371 | bChangeDir = True | |
|
372 | ||
|
373 | file = None | |
|
374 | filename = None | |
|
375 | ||
|
376 | countTries = 0 | |
|
377 | ||
|
378 | #espero hasta encontrar el 1er file disponible | |
|
379 | while( True ): | |
|
380 | ||
|
381 | countTries += 1 | |
|
382 | if( countTries >= self.__nTries ): #checkeo que no haya ido mas alla de la cantidad de intentos | |
|
383 | break | |
|
384 | ||
|
385 | file, filename = checkForRealPath( self.__path, self.__year, self.__doy, self.__set, self.__ext ) | |
|
386 | if file != None: | |
|
387 | break | |
|
388 | ||
|
389 | if notFirstTime_flag: #este flag me sirve solo para esperar por el 1er file, en lo siguientes no espera solo checkea si existe o no | |
|
390 | countTries = self.__nTries | |
|
391 | print "\tsearching next \"%s\" file ..." % filename | |
|
392 | break | |
|
393 | ||
|
394 | print "\twaiting new \"%s\" file ..." % filename | |
|
395 | time.sleep( self.__delay ) | |
|
396 | ||
|
397 | if countTries >= self.__nTries: #se realizaron n intentos y no hubo un file nuevo | |
|
398 | notFirstTime_flag = True | |
|
399 | continue #vuelvo al inico del while principal | |
|
400 | ||
|
401 | countTries = 0 | |
|
402 | ||
|
403 | #una vez que se obtuvo el 1er file valido se procede a checkear su contenido, y se espera una cierta cantidad | |
|
404 | #de tiempo por una cierta cantidad de veces hasta que el contenido del file sea un contenido valido | |
|
405 | while( True ): | |
|
406 | countTries += 1 | |
|
407 | if countTries > self.__nTries: | |
|
408 | break | |
|
409 | ||
|
410 | try: | |
|
411 | fp = open(file) | |
|
412 | except: | |
|
413 | print "The file \"%s\" can't be opened" % file | |
|
414 | break | |
|
415 | ||
|
416 | fileSize = os.path.getsize( file ) | |
|
417 | currentSize = fileSize - fp.tell() | |
|
418 | neededSize = self.m_ProcessingHeader.blockSize + self.firstHeaderSize | |
|
419 | ||
|
420 | if currentSize > neededSize: | |
|
421 | fileStatus = 1 | |
|
422 | break | |
|
423 | ||
|
424 | fp.close() | |
|
425 | ||
|
426 | if bChangeDir: #si al buscar un file cambie de directorio ya no espero y salgo del bucle while | |
|
427 | print "\tsearching next \"%s\" file ..." % filename | |
|
428 | break | |
|
429 | ||
|
430 | print "\twaiting for block of \"%s\" file ..." % filename | |
|
431 | time.sleep( self.__delay ) | |
|
432 | ||
|
433 | if fileStatus == 1: | |
|
434 | break | |
|
435 | ||
|
436 | print "Skipping the file \"%s\" due to this files is empty" % filename | |
|
437 | countFiles = 0 | |
|
438 | ||
|
439 | ||
|
440 | if fileStatus == 1: | |
|
441 | self.fileSize = fileSize | |
|
442 | self.filename = file | |
|
443 | self.__flagNewFile = 1 | |
|
444 | self.__fp = fp | |
|
445 | self.flagNoMoreFiles = 0 | |
|
446 | print 'Setting the file: %s' % file | |
|
447 | else: | |
|
448 | self.fileSize = 0 | |
|
449 | self.filename = None | |
|
450 | self.__fp = None | |
|
451 | self.flagNoMoreFiles = 1 | |
|
452 | print 'No more Files' | |
|
453 | ||
|
454 | return fileStatus | |
|
455 | ||
|
456 | ||
|
457 | def __setNextFileOffline(self): | |
|
458 | ||
|
459 | idFile = self.__fileIndex | |
|
460 | while(True): | |
|
461 | ||
|
462 | idFile += 1 | |
|
463 | ||
|
464 | if not(idFile < len(self.__filenameList)): | |
|
465 | self.flagNoMoreFiles = 1 | |
|
466 | return 0 | |
|
467 | ||
|
468 | filename = self.__filenameList[idFile] | |
|
469 | fileSize = os.path.getsize(filename) | |
|
470 | ||
|
471 | try: | |
|
472 | fp = open(filename,'rb') | |
|
473 | except: | |
|
474 | raise IOError, "The file %s can't be opened" %filename | |
|
475 | ||
|
476 | currentSize = fileSize - fp.tell() | |
|
477 | neededSize = self.m_ProcessingHeader.blockSize + self.firstHeaderSize | |
|
478 | ||
|
479 | if (currentSize < neededSize): | |
|
480 | print "Skipping the file %s due to it hasn't enough data" %filename | |
|
481 | continue | |
|
482 | ||
|
483 | break | |
|
484 | ||
|
485 | self.__flagNewFile = 1 | |
|
486 | self.__fileIndex = idFile | |
|
487 | self.filename = filename | |
|
488 | self.fileSize = fileSize | |
|
489 | self.__fp = fp | |
|
490 | ||
|
491 | print 'Setting the file: %s'%self.filename | |
|
492 | ||
|
493 | return 1 | |
|
494 | ||
|
495 | def __setNextFile( self ): | |
|
496 | """ | |
|
497 | Determina el siguiente file a leer y si hay uno disponible lee el First Header | |
|
498 | ||
|
499 | Affected: | |
|
500 | self.m_BasicHeader | |
|
501 | self.m_SystemHeader | |
|
502 | self.m_RadarControllerHeader | |
|
503 | self.m_ProcessingHeader | |
|
504 | self.firstHeaderSize | |
|
505 | ||
|
506 | Return: | |
|
507 | 0 : Si no hay files disponibles | |
|
508 | 1 : Si hay mas files disponibles | |
|
509 | """ | |
|
510 | if self.__fp != None: | |
|
511 | self.__fp.close() | |
|
512 | ||
|
513 | if self.online: | |
|
514 | newFile = self.__setNextFileOnline() | |
|
515 | else: | |
|
516 | newFile = self.__setNextFileOffline() | |
|
517 | ||
|
518 | if not(newFile): | |
|
519 | return 0 | |
|
520 | ||
|
521 | self.__readFirstHeader() | |
|
522 | ||
|
523 | return 1 | |
|
524 | ||
|
525 | ||
|
526 | def __setNewBlock( self ): | |
|
527 | """ | |
|
528 | Lee el Basic Header y posiciona le file pointer en la posicion inicial del bloque a leer | |
|
529 | ||
|
530 | Affected: | |
|
531 | self.m_BasicHeader | |
|
532 | self.flagNoContinuousBlock | |
|
533 | self.ns | |
|
534 | ||
|
535 | Return: | |
|
536 | 0 : Si el file no tiene un Basic Header que pueda ser leido | |
|
537 | 1 : Si se pudo leer el Basic Header | |
|
538 | """ | |
|
539 | if self.__fp == None: | |
|
540 | return 0 | |
|
541 | ||
|
542 | if self.__flagNewFile: | |
|
543 | return 1 | |
|
544 | ||
|
545 | currentSize = self.fileSize - self.__fp.tell() | |
|
546 | neededSize = self.m_ProcessingHeader.blockSize + self.basicHeaderSize | |
|
547 | ||
|
548 | #If there is enough data setting new data block | |
|
549 | if ( currentSize >= neededSize ): | |
|
550 | self.__rdBasicHeader() | |
|
551 | return 1 | |
|
552 | elif self.online: | |
|
553 | nTries = 0 | |
|
554 | while( nTries < self.__nTries ): | |
|
555 | nTries += 1 | |
|
556 | print "Waiting for the next block, try %03d ..." % nTries | |
|
557 | time.sleep( self.__delay ) | |
|
558 | ||
|
559 | fileSize = os.path.getsize(self.filename) | |
|
560 | currentSize = fileSize - self.__fp.tell() | |
|
561 | neededSize = self.m_ProcessingHeader.blockSize + self.basicHeaderSize | |
|
562 | ||
|
563 | if ( currentSize >= neededSize ): | |
|
564 | self.__rdBasicHeader() | |
|
565 | return 1 | |
|
566 | ||
|
567 | #Setting new file | |
|
568 | if not( self.__setNextFile() ): | |
|
569 | return 0 | |
|
570 | ||
|
571 | deltaTime = self.m_BasicHeader.utc - self.__lastUTTime # check this | |
|
572 | ||
|
573 | self.flagNoContinuousBlock = 0 | |
|
574 | ||
|
575 | if deltaTime > self.__maxTimeStep: | |
|
576 | self.flagNoContinuousBlock = 1 | |
|
577 | self.nReadBlocks = 0 | |
|
578 | ||
|
579 | return 1 | |
|
580 | ||
|
581 | def __readBlock(self): | |
|
582 | """ | |
|
583 | __readBlock lee el bloque de datos desde la posicion actual del puntero del archivo | |
|
584 | (self.__fp) y actualiza todos los parametros relacionados al bloque de datos | |
|
585 | (metadata + data). La data leida es almacenada en el buffer y el contador del buffer | |
|
586 | es seteado a 0 | |
|
587 | ||
|
588 | ||
|
589 | Inputs: | |
|
590 | None | |
|
591 | ||
|
592 | Return: | |
|
593 | None | |
|
594 | ||
|
595 | Variables afectadas: | |
|
596 | ||
|
597 | self.__datablockIndex | |
|
598 | ||
|
599 | self.datablock | |
|
600 | ||
|
601 | self.__flagNewFile | |
|
602 | ||
|
603 | self.__flagNewBlock | |
|
604 | ||
|
605 | self.nReadBlocks | |
|
606 | ||
|
607 | """ | |
|
608 | ||
|
609 | #pts2read = self.m_ProcessingHeader.profilesPerBlock*self.m_ProcessingHeader.numHeights*self.m_SystemHeader.numChannels | |
|
610 | ||
|
611 | fpointer = self.__fp.tell() | |
|
612 | ||
|
613 | junk = numpy.fromfile( self.__fp, self.__dataType, self.__pts2read ) | |
|
614 | ||
|
615 | if self.online: | |
|
616 | if junk.size != self.__blocksize: | |
|
617 | nTries = 0 | |
|
618 | while( nTries < self.__nTries ): | |
|
619 | nTries += 1 | |
|
620 | print "Waiting for the next block, try %03d ..." % nTries | |
|
621 | time.sleep( self.__delay ) | |
|
622 | self.__fp.seek( fpointer ) | |
|
623 | fpointer = self.__fp.tell() | |
|
624 | junk = numpy.fromfile( self.__fp, self.__dataType, self.__pts2read ) | |
|
625 | if junk.size == self.__blocksize: | |
|
626 | nTries = 0 | |
|
627 | break | |
|
628 | if nTries > 0: | |
|
629 | return | |
|
630 | ||
|
631 | junk = junk.reshape( (self.m_ProcessingHeader.profilesPerBlock, self.m_ProcessingHeader.numHeights, self.m_SystemHeader.numChannels) ) | |
|
632 | ||
|
633 | data = junk['real'] + junk['imag']*1j | |
|
634 | ||
|
635 | self.__datablockIndex = 0 | |
|
636 | ||
|
637 | self.datablock = data | |
|
638 | ||
|
639 | self.__flagNewFile = 0 | |
|
640 | ||
|
641 | self.__flagNewBlock = 1 | |
|
642 | ||
|
643 | self.nReadBlocks += 1 | |
|
644 | ||
|
645 | def __hasNotDataInBuffer(self): | |
|
646 | if self.__datablockIndex >= self.m_ProcessingHeader.profilesPerBlock: | |
|
647 | return 1 | |
|
648 | ||
|
649 | return 0 | |
|
650 | ||
|
651 | ||
|
652 | def __searchFilesOnLine( self, path, startDateTime=None, ext = ".r" ): | |
|
653 | """ | |
|
654 | Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y | |
|
655 | devuelve el archivo encontrado ademas de otros datos. | |
|
656 | ||
|
657 | Input: | |
|
658 | path : carpeta donde estan contenidos los files que contiene data | |
|
659 | startDateTime : punto especifico en el tiempo del cual se requiere la data | |
|
660 | ext : extension de los files | |
|
661 | ||
|
662 | Return: | |
|
663 | year : el anho | |
|
664 | doy : el numero de dia del anho | |
|
665 | set : el set del archivo | |
|
666 | filename : el ultimo file de una determinada carpeta | |
|
667 | directory : eL directorio donde esta el file encontrado | |
|
668 | """ | |
|
669 | ||
|
670 | print "Searching files ..." | |
|
671 | ||
|
672 | dirList = [] | |
|
673 | directory = None | |
|
674 | ||
|
675 | if startDateTime == None: | |
|
676 | for thisPath in os.listdir(path): | |
|
677 | if os.path.isdir( os.path.join(path,thisPath) ): | |
|
678 | dirList.append( thisPath ) | |
|
679 | ||
|
680 | dirList = sorted( dirList, key=str.lower ) #para que quede ordenado al margen de si el nombre esta en mayusculas o minusculas, utilizo la funcion sorted | |
|
681 | if len(dirList) > 0 : | |
|
682 | directory = dirList[-1] | |
|
683 | else: | |
|
684 | year = startDateTime.timetuple().tm_year | |
|
685 | doy = startDateTime.timetuple().tm_yday | |
|
686 | ||
|
687 | doyPath = "D%04d%03d" % (year,doy) #caso del nombre en mayusculas | |
|
688 | if os.path.isdir( os.path.join(path,doyPath) ): | |
|
689 | directory = doyPath | |
|
690 | ||
|
691 | doyPath = doyPath.lower() #caso del nombre en minusculas | |
|
692 | if os.path.isdir( os.path.join(path,doyPath) ): | |
|
693 | directory = doyPath | |
|
694 | ||
|
695 | if directory == None: | |
|
696 | return 0, 0, 0, None, None | |
|
697 | ||
|
698 | filename = getlastFileFromPath( os.listdir( os.path.join(path,directory) ), ext ) | |
|
699 | ||
|
700 | if filename == None: | |
|
701 | return 0, 0, 0, None, None | |
|
702 | ||
|
703 | year = int( directory[-7:-3] ) | |
|
704 | doy = int( directory[-3:] ) | |
|
705 | ln = len( ext ) | |
|
706 | set = int( filename[-ln-3:-ln] ) | |
|
707 | ||
|
708 | return year, doy, set, filename, directory | |
|
709 | ||
|
710 | ||
|
711 | def __searchFilesOffLine(self, path, startDateTime, endDateTime, set=None, expLabel = "", ext = ".r"): | |
|
712 | """ | |
|
713 | Realiza una busqueda de los archivos que coincidan con los parametros | |
|
714 | especificados y se encuentren ubicados en el path indicado. Para realizar una busqueda | |
|
715 | correcta la estructura de directorios debe ser la siguiente: | |
|
716 | ||
|
717 | ...path/D[yyyy][ddd]/expLabel/D[yyyy][ddd][sss].ext | |
|
718 | ||
|
719 | [yyyy]: anio | |
|
720 | [ddd] : dia del anio | |
|
721 | [sss] : set del archivo | |
|
722 | ||
|
723 | Inputs: | |
|
724 | path : Directorio de datos donde se realizara la busqueda. Todos los | |
|
725 | ficheros que concidan con el criterio de busqueda seran | |
|
726 | almacenados en una lista y luego retornados. | |
|
727 | startDateTime : Fecha inicial. Rechaza todos los archivos donde | |
|
728 | file end time < startDateTime (obejto datetime.datetime) | |
|
729 | ||
|
730 | endDateTime : Fecha final. Rechaza todos los archivos donde | |
|
731 | file start time > endDateTime (obejto datetime.datetime) | |
|
732 | ||
|
733 | set : Set del primer archivo a leer. Por defecto None | |
|
734 | ||
|
735 | expLabel : Nombre del subdirectorio de datos. Por defecto "" | |
|
736 | ||
|
737 | ext : Extension de los archivos a leer. Por defecto .r | |
|
738 | ||
|
739 | Return: | |
|
740 | ||
|
741 | (pathList, filenameList) | |
|
742 | ||
|
743 | pathList : Lista de directorios donde se encontraron archivos dentro | |
|
744 | de los parametros especificados | |
|
745 | filenameList : Lista de archivos (ruta completa) que coincidieron con los | |
|
746 | parametros especificados. | |
|
747 | ||
|
748 | Variables afectadas: | |
|
749 | ||
|
750 | self.__filenameList: Lista de archivos (ruta completa) que la clase utiliza | |
|
751 | como fuente para leer los bloque de datos, si se termina | |
|
752 | de leer todos los bloques de datos de un determinado | |
|
753 | archivo se pasa al siguiente archivo de la lista. | |
|
754 | ||
|
755 | Excepciones: | |
|
756 | ||
|
757 | """ | |
|
758 | ||
|
759 | print "Searching files ..." | |
|
760 | ||
|
761 | dirList = [] | |
|
762 | for thisPath in os.listdir(path): | |
|
763 | if os.path.isdir(os.path.join(path,thisPath)): | |
|
764 | dirList.append(thisPath) | |
|
765 | ||
|
766 | pathList = [] | |
|
767 | ||
|
768 | thisDateTime = startDateTime | |
|
769 | ||
|
770 | while(thisDateTime <= endDateTime): | |
|
771 | year = thisDateTime.timetuple().tm_year | |
|
772 | doy = thisDateTime.timetuple().tm_yday | |
|
773 | ||
|
774 | match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy)) | |
|
775 | if len(match) == 0: | |
|
776 | thisDateTime += datetime.timedelta(1) | |
|
777 | continue | |
|
778 | ||
|
779 | pathList.append(os.path.join(path,match[0],expLabel)) | |
|
780 | thisDateTime += datetime.timedelta(1) | |
|
781 | ||
|
782 | startUtSeconds = time.mktime(startDateTime.timetuple()) | |
|
783 | endUtSeconds = time.mktime(endDateTime.timetuple()) | |
|
784 | ||
|
785 | filenameList = [] | |
|
786 | for thisPath in pathList: | |
|
787 | fileList = glob.glob1(thisPath, "*%s" %ext) | |
|
788 | fileList.sort() | |
|
789 | for file in fileList: | |
|
790 | filename = os.path.join(thisPath,file) | |
|
791 | if isThisFileinRange(filename, startUtSeconds, endUtSeconds): | |
|
792 | filenameList.append(filename) | |
|
793 | ||
|
794 | self.__filenameList = filenameList | |
|
795 | ||
|
796 | return pathList, filenameList | |
|
797 | ||
|
798 | ||
|
799 | def __initFilesOnline( self, path, dirfilename, filename ): | |
|
800 | """ | |
|
801 | Verifica que el primer file tenga una data valida, para ello leo el 1er bloque | |
|
802 | del file, si no es un file valido espera una cierta cantidad de tiempo a que | |
|
803 | lo sea, si transcurrido el tiempo no logra validar el file entonces el metodo | |
|
804 | devuelve 0 caso contrario devuelve 1 | |
|
805 | ||
|
806 | Affected: | |
|
807 | m_BasicHeader | |
|
808 | ||
|
809 | Return: | |
|
810 | 0 : file no valido para ser leido | |
|
811 | 1 : file valido para ser leido | |
|
812 | """ | |
|
813 | m_BasicHeader = BasicHeader() | |
|
814 | ||
|
815 | file = os.path.join( path, dirfilename, filename ) | |
|
816 | ||
|
817 | nTries = 0 | |
|
818 | while(True): | |
|
819 | ||
|
820 | nTries += 1 | |
|
821 | if nTries > self.__nTries: | |
|
822 | break | |
|
823 | ||
|
824 | try: | |
|
825 | fp = open( file,'rb' ) #lectura binaria | |
|
826 | except: | |
|
827 | raise IOError, "The file %s can't be opened" %(file) | |
|
828 | ||
|
829 | try: | |
|
830 | m_BasicHeader.read(fp) | |
|
831 | except: | |
|
832 | print "The file %s is empty" % filename | |
|
833 | ||
|
834 | fp.close() | |
|
835 | ||
|
836 | if m_BasicHeader.size > 24: | |
|
837 | break | |
|
838 | ||
|
839 | print 'waiting for new block: try %02d' % ( nTries ) | |
|
840 | time.sleep( self.__delay) | |
|
841 | ||
|
842 | if m_BasicHeader.size <= 24: | |
|
843 | return 0 | |
|
844 | ||
|
845 | return 1 | |
|
846 | ||
|
847 | ||
|
848 | def setup(self, path, startDateTime, endDateTime=None, set=None, expLabel = "", ext = ".r", online = 0): | |
|
849 | """ | |
|
850 | setup configura los parametros de lectura de la clase VoltageReader. | |
|
851 | ||
|
852 | Si el modo de lectura es offline, primero se realiza una busqueda de todos los archivos | |
|
853 | que coincidan con los parametros especificados; esta lista de archivos son almacenados en | |
|
854 | self.__filenameList. | |
|
855 | ||
|
856 | Input: | |
|
857 | path : Directorios donde se ubican los datos a leer. Dentro de este | |
|
858 | directorio deberia de estar subdirectorios de la forma: | |
|
859 | ||
|
860 | path/D[yyyy][ddd]/expLabel/P[yyyy][ddd][sss][ext] | |
|
861 | ||
|
862 | startDateTime : Fecha inicial. Rechaza todos los archivos donde | |
|
863 | file end time < startDatetime (obejto datetime.datetime) | |
|
864 | ||
|
865 | endDateTime : Fecha final. Si no es None, rechaza todos los archivos donde | |
|
866 | file end time < startDatetime (obejto datetime.datetime) | |
|
867 | ||
|
868 | set : Set del primer archivo a leer. Por defecto None | |
|
869 | ||
|
870 | expLabel : Nombre del subdirectorio de datos. Por defecto "" | |
|
871 | ||
|
872 | ext : Extension de los archivos a leer. Por defecto .r | |
|
873 | ||
|
874 | online : Si es == a 0 entonces busca files que cumplan con las condiciones dadas | |
|
875 | ||
|
876 | Return: | |
|
877 | 0 : Si no encuentra files que cumplan con las condiciones dadas | |
|
878 | 1 : Si encuentra files que cumplan con las condiciones dadas | |
|
879 | ||
|
880 | Affected: | |
|
881 | self.startUTCSeconds | |
|
882 | self.endUTCSeconds | |
|
883 | self.startYear | |
|
884 | self.endYear | |
|
885 | self.startDoy | |
|
886 | self.endDoy | |
|
887 | self.__pathList | |
|
888 | self.__filenameList | |
|
889 | self.online | |
|
890 | """ | |
|
891 | if online: | |
|
892 | nTries = 0 | |
|
893 | while( nTries < self.__nTries ): | |
|
894 | nTries += 1 | |
|
895 | subfolder = "D%04d%03d" % ( startDateTime.timetuple().tm_year, startDateTime.timetuple().tm_yday ) | |
|
896 | year, doy, set, filename, dirfilename = self.__searchFilesOnLine( path, startDateTime, ext ) | |
|
897 | if filename == None: | |
|
898 | file = os.path.join( path, subfolder ) | |
|
899 | print "Searching first file in \"%s\", try %03d ..." % ( file, nTries ) | |
|
900 | time.sleep( self.__delay ) | |
|
901 | else: | |
|
902 | break | |
|
903 | ||
|
904 | if filename == None: | |
|
905 | print "No files On Line" | |
|
906 | return 0 | |
|
907 | ||
|
908 | if self.__initFilesOnline( path, dirfilename, filename ) == 0: | |
|
909 | print "The file %s hasn't enough data" | |
|
910 | return 0 | |
|
911 | ||
|
912 | self.__year = year | |
|
913 | self.__doy = doy | |
|
914 | self.__set = set - 1 | |
|
915 | self.__path = path | |
|
916 | ||
|
917 | else: | |
|
918 | pathList, filenameList = self.__searchFilesOffLine( path, startDateTime, endDateTime, set, expLabel, ext ) | |
|
919 | self.__fileIndex = -1 | |
|
920 | self.__pathList = pathList | |
|
921 | self.__filenameList = filenameList | |
|
922 | ||
|
923 | self.online = online | |
|
924 | self.__ext = ext | |
|
925 | ||
|
926 | if not( self.__setNextFile() ): | |
|
927 | if (startDateTime != None) and (endDateTime != None): | |
|
928 | print "No files in range: %s - %s" %(startDateTime.ctime(), endDateTime.ctime()) | |
|
929 | elif startDateTime != None: | |
|
930 | print "No files in : %s" % startDateTime.ctime() | |
|
931 | else: | |
|
932 | print "No files" | |
|
933 | return 0 | |
|
934 | ||
|
935 | if startDateTime != None: | |
|
936 | self.startUTCSeconds = time.mktime(startDateTime.timetuple()) | |
|
937 | self.startYear = startDateTime.timetuple().tm_year | |
|
938 | self.startDoy = startDateTime.timetuple().tm_yday | |
|
939 | ||
|
940 | if endDateTime != None: | |
|
941 | self.endUTCSeconds = time.mktime(endDateTime.timetuple()) | |
|
942 | self.endYear = endDateTime.timetuple().tm_year | |
|
943 | self.endDoy = endDateTime.timetuple().tm_yday | |
|
944 | #call fillHeaderValues() - to Data Object | |
|
945 | ||
|
946 | self.m_Voltage.m_BasicHeader = self.m_BasicHeader.copy() | |
|
947 | self.m_Voltage.m_ProcessingHeader = self.m_ProcessingHeader.copy() | |
|
948 | self.m_Voltage.m_RadarControllerHeader = self.m_RadarControllerHeader.copy() | |
|
949 | self.m_Voltage.m_SystemHeader = self.m_SystemHeader.copy() | |
|
950 | self.m_Voltage.dataType = self.__dataType | |
|
951 | ||
|
952 | return 1 | |
|
953 | ||
|
954 | ||
|
955 | def readNextBlock( self ): | |
|
956 | """ | |
|
957 | Establece un nuevo bloque de datos a leer y los lee, si es que no existiese | |
|
958 | mas bloques disponibles en el archivo actual salta al siguiente. | |
|
959 | ||
|
960 | Affected: | |
|
961 | self.__lastUTTime | |
|
962 | ||
|
963 | Return: None | |
|
964 | """ | |
|
965 | if not(self.__setNewBlock()): | |
|
966 | return 0 | |
|
967 | ||
|
968 | self.__readBlock() | |
|
969 | ||
|
970 | self.__lastUTTime = self.m_BasicHeader.utc | |
|
971 | ||
|
972 | return 1 | |
|
973 | ||
|
974 | ||
|
975 | def getData( self ): | |
|
976 | """ | |
|
977 | getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage" | |
|
978 | con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de | |
|
979 | lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" | |
|
980 | ||
|
981 | Ademas incrementa el contador del buffer en 1. | |
|
982 | ||
|
983 | Return: | |
|
984 | data : retorna un perfil de voltages (alturas * canales) copiados desde el | |
|
985 | buffer. Si no hay mas archivos a leer retorna None. | |
|
986 | ||
|
987 | Variables afectadas: | |
|
988 | self.m_Voltage | |
|
989 | self.__datablockIndex | |
|
990 | ||
|
991 | Affected: | |
|
992 | self.m_Voltage | |
|
993 | self.__datablockIndex | |
|
994 | self.flagNoContinuousBlock | |
|
995 | self.__flagNewBlock | |
|
996 | """ | |
|
997 | self.flagNoContinuousBlock = 0 | |
|
998 | self.__flagNewBlock = 0 | |
|
999 | ||
|
1000 | if self.__hasNotDataInBuffer(): | |
|
1001 | ||
|
1002 | self.readNextBlock() | |
|
1003 | ||
|
1004 | self.m_Voltage.m_BasicHeader = self.m_BasicHeader.copy() | |
|
1005 | self.m_Voltage.m_ProcessingHeader = self.m_ProcessingHeader.copy() | |
|
1006 | self.m_Voltage.m_RadarControllerHeader = self.m_RadarControllerHeader.copy() | |
|
1007 | self.m_Voltage.m_SystemHeader = self.m_SystemHeader.copy() | |
|
1008 | self.m_Voltage.heights = self.__heights | |
|
1009 | self.m_Voltage.dataType = self.__dataType | |
|
1010 | ||
|
1011 | if self.flagNoMoreFiles == 1: | |
|
1012 | print 'Process finished' | |
|
1013 | return None | |
|
1014 | ||
|
1015 | #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) | |
|
1016 | ||
|
1017 | time = self.m_BasicHeader.utc + self.__datablockIndex * self.__ippSeconds | |
|
1018 | self.m_Voltage.m_BasicHeader.utc = time | |
|
1019 | ||
|
1020 | self.m_Voltage.flagNoData = False | |
|
1021 | self.m_Voltage.flagNoContinuousBlock = self.flagNoContinuousBlock | |
|
1022 | ||
|
1023 | self.m_Voltage.data = self.datablock[self.__datablockIndex,:,:] | |
|
1024 | self.m_Voltage.profileIndex = self.__datablockIndex | |
|
1025 | ||
|
1026 | self.__datablockIndex += 1 | |
|
1027 | ||
|
1028 | #call setData - to Data Object | |
|
1029 | ||
|
1030 | return self.m_Voltage.data | |
|
1031 | ||
|
1032 | ||
|
1033 | #class VoltageWriter(DataWriter): | |
|
1034 | # """ | |
|
1035 | # Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura | |
|
1036 | # de los datos siempre se realiza por bloques. | |
|
1037 | # """ | |
|
1038 | # __configHeaderFile = 'wrSetHeadet.txt' | |
|
1039 | # | |
|
1040 | # def __init__( self, m_Voltage = None ): | |
|
1041 | # """ | |
|
1042 | # Inicializador de la clase VoltageWriter para la escritura de datos de espectros. | |
|
1043 | # | |
|
1044 | # Affected: | |
|
1045 | # self.m_Voltage | |
|
1046 | # self.m_BasicHeader | |
|
1047 | # self.m_SystemHeader | |
|
1048 | # self.m_RadarControllerHeader | |
|
1049 | # self.m_ProcessingHeader | |
|
1050 | # | |
|
1051 | # Return: None | |
|
1052 | # """ | |
|
1053 | # if m_Voltage == None: | |
|
1054 | # m_Voltage = Voltage() | |
|
1055 | # | |
|
1056 | # self.m_Voltage = m_Voltage | |
|
1057 | # | |
|
1058 | # self.__path = None | |
|
1059 | # | |
|
1060 | # self.__fp = None | |
|
1061 | # | |
|
1062 | # self.__format = None | |
|
1063 | # | |
|
1064 | # self.__blocksCounter = 0 | |
|
1065 | # | |
|
1066 | # self.__setFile = None | |
|
1067 | # | |
|
1068 | # self.__flagNewFile = 1 | |
|
1069 | # | |
|
1070 | # self.datablock = None | |
|
1071 | # | |
|
1072 | # self.__datablockIndex = 0 | |
|
1073 | # | |
|
1074 | # self.__dataType = None | |
|
1075 | # | |
|
1076 | # self.__ext = None | |
|
1077 | # | |
|
1078 | # self.__shapeBuffer = None | |
|
1079 | # | |
|
1080 | # self.nWriteBlocks = 0 | |
|
1081 | # | |
|
1082 | # self.__flagNewBlock = 0 | |
|
1083 | # | |
|
1084 | # self.flagNoMoreFiles = 0 | |
|
1085 | # | |
|
1086 | # self.filename = None | |
|
1087 | # | |
|
1088 | # self.m_BasicHeader= BasicHeader() | |
|
1089 | # | |
|
1090 | # self.m_SystemHeader = SystemHeader() | |
|
1091 | # | |
|
1092 | # self.m_RadarControllerHeader = RadarControllerHeader() | |
|
1093 | # | |
|
1094 | # self.m_ProcessingHeader = ProcessingHeader() | |
|
1095 | # | |
|
1096 | # | |
|
1097 | # def __writeFirstHeader( self ): | |
|
1098 | # """ | |
|
1099 | # Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader) | |
|
1100 | # | |
|
1101 | # Affected: | |
|
1102 | # __dataType | |
|
1103 | # | |
|
1104 | # Return: | |
|
1105 | # None | |
|
1106 | # """ | |
|
1107 | # self.__writeBasicHeader() | |
|
1108 | # self.__wrSystemHeader() | |
|
1109 | # self.__wrRadarControllerHeader() | |
|
1110 | # self.__wrProcessingHeader() | |
|
1111 | # self.__dataType = self.m_Voltage.dataType | |
|
1112 | # | |
|
1113 | # | |
|
1114 | # def __writeBasicHeader( self, fp=None ): | |
|
1115 | # """ | |
|
1116 | # Escribe solo el Basic header en el file creado | |
|
1117 | # | |
|
1118 | # Return: | |
|
1119 | # None | |
|
1120 | # """ | |
|
1121 | # if fp == None: | |
|
1122 | # fp = self.__fp | |
|
1123 | # | |
|
1124 | # self.m_BasicHeader.write(fp) | |
|
1125 | # | |
|
1126 | # | |
|
1127 | # def __wrSystemHeader( self, fp=None ): | |
|
1128 | # """ | |
|
1129 | # Escribe solo el System header en el file creado | |
|
1130 | # | |
|
1131 | # Return: | |
|
1132 | # None | |
|
1133 | # """ | |
|
1134 | # if fp == None: | |
|
1135 | # fp = self.__fp | |
|
1136 | # | |
|
1137 | # self.m_SystemHeader.write(fp) | |
|
1138 | # | |
|
1139 | # | |
|
1140 | # def __wrRadarControllerHeader( self, fp=None ): | |
|
1141 | # """ | |
|
1142 | # Escribe solo el RadarController header en el file creado | |
|
1143 | # | |
|
1144 | # Return: | |
|
1145 | # None | |
|
1146 | # """ | |
|
1147 | # if fp == None: | |
|
1148 | # fp = self.__fp | |
|
1149 | # | |
|
1150 | # self.m_RadarControllerHeader.write(fp) | |
|
1151 | # | |
|
1152 | # | |
|
1153 | # def __wrProcessingHeader( self, fp=None ): | |
|
1154 | # """ | |
|
1155 | # Escribe solo el Processing header en el file creado | |
|
1156 | # | |
|
1157 | # Return: | |
|
1158 | # None | |
|
1159 | # """ | |
|
1160 | # if fp == None: | |
|
1161 | # fp = self.__fp | |
|
1162 | # | |
|
1163 | # self.m_ProcessingHeader.write(fp) | |
|
1164 | # | |
|
1165 | # def __setNextFile( self ): | |
|
1166 | # """ | |
|
1167 | # Determina el siguiente file que sera escrito | |
|
1168 | # | |
|
1169 | # Affected: | |
|
1170 | # self.filename | |
|
1171 | # self.__subfolder | |
|
1172 | # self.__fp | |
|
1173 | # self.__setFile | |
|
1174 | # self.__flagNewFile | |
|
1175 | # | |
|
1176 | # Return: | |
|
1177 | # 0 : Si el archivo no puede ser escrito | |
|
1178 | # 1 : Si el archivo esta listo para ser escrito | |
|
1179 | # """ | |
|
1180 | # #setFile = self.__setFile | |
|
1181 | # ext = self.__ext | |
|
1182 | # path = self.__path | |
|
1183 | # | |
|
1184 | # #setFile += 1 | |
|
1185 | # | |
|
1186 | # if self.__fp != None: | |
|
1187 | # self.__fp.close() | |
|
1188 | # | |
|
1189 | # """ | |
|
1190 | # timeTuple = time.localtime(self.m_Voltage.m_BasicHeader.utc) # utc from m_Voltage | |
|
1191 | # file = 'D%4.4d%3.3d%3.3d%s' % (timeTuple.tm_year,timeTuple.tm_yday,setFile,ext) | |
|
1192 | # subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | |
|
1193 | # tmp = os.path.join(path,subfolder) | |
|
1194 | # if not(os.path.exists(tmp)): | |
|
1195 | # os.mkdir(tmp) | |
|
1196 | # """ | |
|
1197 | # ################################## | |
|
1198 | # if self.m_BasicHeader.size <= 24: return 0 #no existe la suficiente data para ser escrita | |
|
1199 | # | |
|
1200 | # timeTuple = time.localtime( self.m_Voltage.m_BasicHeader.utc ) # utc from m_Voltage | |
|
1201 | # subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | |
|
1202 | # | |
|
1203 | # tmp = os.path.join( path, subfolder ) | |
|
1204 | # if not( os.path.exists(tmp) ): | |
|
1205 | # os.mkdir(tmp) | |
|
1206 | # self.__setFile = -1 #inicializo mi contador de seteo | |
|
1207 | # else: | |
|
1208 | # filesList = os.listdir( tmp ) | |
|
1209 | # if len( filesList ) > 0: | |
|
1210 | # filesList = sorted( filesList, key=str.lower ) | |
|
1211 | # filen = filesList[-1] | |
|
1212 | # # el filename debera tener el siguiente formato | |
|
1213 | # # 0 1234 567 89A BCDE (hex) | |
|
1214 | # # D YYYY DDD SSS .ext | |
|
1215 | # if isNumber( filen[8:11] ): | |
|
1216 | # self.__setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file | |
|
1217 | # else: | |
|
1218 | # self.__setFile = -1 | |
|
1219 | # else: | |
|
1220 | # self.__setFile = -1 #inicializo mi contador de seteo | |
|
1221 | # | |
|
1222 | # setFile = self.__setFile | |
|
1223 | # setFile += 1 | |
|
1224 | # file = 'D%4.4d%3.3d%3.3d%s' % ( timeTuple.tm_year, timeTuple.tm_yday, setFile, ext ) | |
|
1225 | # ################################## | |
|
1226 | # | |
|
1227 | # filename = os.path.join( path, subfolder, file ) | |
|
1228 | # | |
|
1229 | # fp = open( filename,'wb' ) | |
|
1230 | # | |
|
1231 | # self.__blocksCounter = 0 | |
|
1232 | # | |
|
1233 | # #guardando atributos | |
|
1234 | # self.filename = filename | |
|
1235 | # self.__subfolder = subfolder | |
|
1236 | # self.__fp = fp | |
|
1237 | # self.__setFile = setFile | |
|
1238 | # self.__flagNewFile = 1 | |
|
1239 | # | |
|
1240 | # print 'Writing the file: %s'%self.filename | |
|
1241 | # | |
|
1242 | # self.__writeFirstHeader() | |
|
1243 | # | |
|
1244 | # return 1 | |
|
1245 | # | |
|
1246 | # | |
|
1247 | # def __setNewBlock( self ): | |
|
1248 | # """ | |
|
1249 | # Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header | |
|
1250 | # | |
|
1251 | # Return: | |
|
1252 | # 0 : si no pudo escribir nada | |
|
1253 | # 1 : Si escribio el Basic el First Header | |
|
1254 | # """ | |
|
1255 | # if self.__fp == None: | |
|
1256 | # self.__setNextFile() | |
|
1257 | # | |
|
1258 | # if self.__flagNewFile: | |
|
1259 | # return 1 | |
|
1260 | # | |
|
1261 | # if self.__blocksCounter < self.m_ProcessingHeader.dataBlocksPerFile: | |
|
1262 | # self.__writeBasicHeader() | |
|
1263 | # return 1 | |
|
1264 | # | |
|
1265 | # if not( self.__setNextFile() ): | |
|
1266 | # return 0 | |
|
1267 | # | |
|
1268 | # return 1 | |
|
1269 | # | |
|
1270 | # def __writeBlock( self ): | |
|
1271 | # """ | |
|
1272 | # Escribe el buffer en el file designado | |
|
1273 | # | |
|
1274 | # Affected: | |
|
1275 | # self.__datablockIndex | |
|
1276 | # self.__flagNewFile | |
|
1277 | # self.__flagNewBlock | |
|
1278 | # self.nWriteBlocks | |
|
1279 | # self.__blocksCounter | |
|
1280 | # | |
|
1281 | # Return: None | |
|
1282 | # """ | |
|
1283 | # data = numpy.zeros( self.__shapeBuffer, self.__dataType ) | |
|
1284 | # | |
|
1285 | # data['real'] = self.datablock.real | |
|
1286 | # data['imag'] = self.datablock.imag | |
|
1287 | # | |
|
1288 | # data = data.reshape( (-1) ) | |
|
1289 | # | |
|
1290 | # data.tofile( self.__fp ) | |
|
1291 | # | |
|
1292 | # self.datablock.fill(0) | |
|
1293 | # | |
|
1294 | # self.__datablockIndex = 0 | |
|
1295 | # | |
|
1296 | # self.__flagNewFile = 0 | |
|
1297 | # | |
|
1298 | # self.__flagNewBlock = 1 | |
|
1299 | # | |
|
1300 | # self.nWriteBlocks += 1 | |
|
1301 | # | |
|
1302 | # self.__blocksCounter += 1 | |
|
1303 | # | |
|
1304 | # | |
|
1305 | # def writeNextBlock( self ): | |
|
1306 | # """ | |
|
1307 | # Selecciona el bloque siguiente de datos y los escribe en un file | |
|
1308 | # | |
|
1309 | # Return: | |
|
1310 | # 0 : Si no hizo pudo escribir el bloque de datos | |
|
1311 | # 1 : Si no pudo escribir el bloque de datos | |
|
1312 | # """ | |
|
1313 | # if not(self.__setNewBlock()): | |
|
1314 | # return 0 | |
|
1315 | # | |
|
1316 | # self.__writeBlock() | |
|
1317 | # | |
|
1318 | # return 1 | |
|
1319 | # | |
|
1320 | # | |
|
1321 | # def __hasAllDataInBuffer( self ): | |
|
1322 | # if self.__datablockIndex >= self.m_ProcessingHeader.profilesPerBlock: | |
|
1323 | # return 1 | |
|
1324 | # | |
|
1325 | # return 0 | |
|
1326 | # | |
|
1327 | # | |
|
1328 | # def putData( self ): | |
|
1329 | # """ | |
|
1330 | # Setea un bloque de datos y luego los escribe en un file | |
|
1331 | # | |
|
1332 | # Affected: | |
|
1333 | # self.__flagNewBlock | |
|
1334 | # self.__datablockIndex | |
|
1335 | # | |
|
1336 | # Return: | |
|
1337 | # 0 : Si no hay data o no hay mas files que puedan escribirse | |
|
1338 | # 1 : Si se escribio la data de un bloque en un file | |
|
1339 | # """ | |
|
1340 | # self.__flagNewBlock = 0 | |
|
1341 | # | |
|
1342 | # if self.m_Voltage.flagNoData: | |
|
1343 | # return 0 | |
|
1344 | # | |
|
1345 | # if self.m_Voltage.flagNoContinuousBlock: | |
|
1346 | # | |
|
1347 | # self.datablock.fill(0) | |
|
1348 | # | |
|
1349 | # self.__datablockIndex = 0 | |
|
1350 | # self.__setNextFile() | |
|
1351 | # | |
|
1352 | # self.datablock[self.__datablockIndex,:,:] = self.m_Voltage.data | |
|
1353 | # | |
|
1354 | # self.__datablockIndex += 1 | |
|
1355 | # | |
|
1356 | # if self.__hasAllDataInBuffer(): | |
|
1357 | # | |
|
1358 | # self.__getHeader() | |
|
1359 | # self.writeNextBlock() | |
|
1360 | # | |
|
1361 | # if self.flagNoMoreFiles: | |
|
1362 | # #print 'Process finished' | |
|
1363 | # return 0 | |
|
1364 | # | |
|
1365 | # return 1 | |
|
1366 | # | |
|
1367 | # | |
|
1368 | # def __getHeader( self ): | |
|
1369 | # """ | |
|
1370 | # Obtiene una copia del First Header | |
|
1371 | # | |
|
1372 | # Affected: | |
|
1373 | # self.m_BasicHeader | |
|
1374 | # self.m_SystemHeader | |
|
1375 | # self.m_RadarControllerHeader | |
|
1376 | # self.m_ProcessingHeader | |
|
1377 | # self.__dataType | |
|
1378 | # | |
|
1379 | # Return: | |
|
1380 | # None | |
|
1381 | # """ | |
|
1382 | # self.m_BasicHeader = self.m_Voltage.m_BasicHeader.copy() | |
|
1383 | # self.m_SystemHeader = self.m_Voltage.m_SystemHeader.copy() | |
|
1384 | # self.m_RadarControllerHeader = self.m_Voltage.m_RadarControllerHeader.copy() | |
|
1385 | # self.m_ProcessingHeader = self.m_Voltage.m_ProcessingHeader.copy() | |
|
1386 | # self.__dataType = self.m_Voltage.dataType | |
|
1387 | # | |
|
1388 | # | |
|
1389 | # def __setHeaderByFile( self ): | |
|
1390 | # | |
|
1391 | # format = self.__format | |
|
1392 | # header = ['Basic','System','RadarController','Processing'] | |
|
1393 | # | |
|
1394 | # fmtFromFile = None | |
|
1395 | # headerFromFile = None | |
|
1396 | # | |
|
1397 | # | |
|
1398 | # fileTable = self.__configHeaderFile | |
|
1399 | # | |
|
1400 | # if os.access(fileTable, os.R_OK): | |
|
1401 | # import re, string | |
|
1402 | # | |
|
1403 | # f = open(fileTable,'r') | |
|
1404 | # lines = f.read() | |
|
1405 | # f.close() | |
|
1406 | # | |
|
1407 | # #Delete comments into expConfig | |
|
1408 | # while 1: | |
|
1409 | # | |
|
1410 | # startComment = string.find(lines.lower(),'#') | |
|
1411 | # if startComment == -1: | |
|
1412 | # break | |
|
1413 | # endComment = string.find(lines.lower(),'\n',startComment) | |
|
1414 | # lines = string.replace(lines,lines[startComment:endComment+1],'', 1) | |
|
1415 | # | |
|
1416 | # while expFromFile == None: | |
|
1417 | # | |
|
1418 | # currFmt = string.find(lines.lower(),'format="%s"' %(expName)) | |
|
1419 | # nextFmt = string.find(lines.lower(),'format',currFmt+10) | |
|
1420 | # | |
|
1421 | # if currFmt == -1: | |
|
1422 | # break | |
|
1423 | # if nextFmt == -1: | |
|
1424 | # nextFmt = len(lines)-1 | |
|
1425 | # | |
|
1426 | # fmtTable = lines[currFmt:nextFmt] | |
|
1427 | # lines = lines[nextFmt:] | |
|
1428 | # | |
|
1429 | # fmtRead = self.__getValueFromArg(fmtTable,'format') | |
|
1430 | # if fmtRead != format: | |
|
1431 | # continue | |
|
1432 | # fmtFromFile = fmtRead | |
|
1433 | # | |
|
1434 | # lines2 = fmtTable | |
|
1435 | # | |
|
1436 | # while headerFromFile == None: | |
|
1437 | # | |
|
1438 | # currHeader = string.find(lines2.lower(),'header="%s"' %(header)) | |
|
1439 | # nextHeader = string.find(lines2.lower(),'header',currHeader+10) | |
|
1440 | # | |
|
1441 | # if currHeader == -1: | |
|
1442 | # break | |
|
1443 | # if nextHeader == -1: | |
|
1444 | # nextHeader = len(lines2)-1 | |
|
1445 | # | |
|
1446 | # headerTable = lines2[currHeader:nextHeader] | |
|
1447 | # lines2 = lines2[nextHeader:] | |
|
1448 | # | |
|
1449 | # headerRead = self.__getValueFromArg(headerTable,'site') | |
|
1450 | # if not(headerRead in header): | |
|
1451 | # continue | |
|
1452 | # headerFromFile = headerRead | |
|
1453 | # | |
|
1454 | # if headerRead == 'Basic': | |
|
1455 | # self.m_BasicHeader.size = self.__getValueFromArg(headerTable,'size',lower=False) | |
|
1456 | # self.m_BasicHeader.version = self.__getValueFromArg(headerTable,'version',lower=False) | |
|
1457 | # self.m_BasicHeader.dataBlock = self.__getValueFromArg(headerTable,'dataBlock',lower=False) | |
|
1458 | # self.m_BasicHeader.utc = self.__getValueFromArg(headerTable,'utc',lower=False) | |
|
1459 | # self.m_BasicHeader.miliSecond = self.__getValueFromArg(headerTable,'miliSecond',lower=False) | |
|
1460 | # self.m_BasicHeader.timeZone = self.__getValueFromArg(headerTable,'timeZone',lower=False) | |
|
1461 | # self.m_BasicHeader.dstFlag = self.__getValueFromArg(headerTable,'dstFlag',lower=False) | |
|
1462 | # self.m_BasicHeader.errorCount = self.__getValueFromArg(headerTable,'errorCount',lower=False) | |
|
1463 | # | |
|
1464 | # else: | |
|
1465 | # print "file access denied:%s"%fileTable | |
|
1466 | # sys.exit(0) | |
|
1467 | # | |
|
1468 | # | |
|
1469 | # def setup( self, path, set=0, format='rawdata' ): | |
|
1470 | # """ | |
|
1471 | # Setea el tipo de formato en la cual sera guardada la data y escribe el First Header | |
|
1472 | # | |
|
1473 | # Inputs: | |
|
1474 | # path : el path destino en el cual se escribiran los files a crear | |
|
1475 | # format : formato en el cual sera salvado un file | |
|
1476 | # set : el setebo del file | |
|
1477 | # | |
|
1478 | # Return: | |
|
1479 | # 0 : Si no realizo un buen seteo | |
|
1480 | # 1 : Si realizo un buen seteo | |
|
1481 | # """ | |
|
1482 | # if format == 'hdf5': | |
|
1483 | # ext = '.hdf5' | |
|
1484 | # format = 'hdf5' | |
|
1485 | # print 'call hdf5 library' | |
|
1486 | # return 0 | |
|
1487 | # | |
|
1488 | # if format == 'rawdata': | |
|
1489 | # ext = '.r' | |
|
1490 | # format = 'Jicamarca' | |
|
1491 | # | |
|
1492 | # #call to config_headers | |
|
1493 | # #self.__setHeaderByFile() | |
|
1494 | # | |
|
1495 | # self.__path = path | |
|
1496 | # self.__setFile = set - 1 | |
|
1497 | # self.__ext = ext | |
|
1498 | # self.__format = format | |
|
1499 | # | |
|
1500 | # self.__getHeader() | |
|
1501 | # self.__shapeBuffer = (self.m_ProcessingHeader.profilesPerBlock, | |
|
1502 | # self.m_ProcessingHeader.numHeights, | |
|
1503 | # self.m_SystemHeader.numChannels ) | |
|
1504 | # | |
|
1505 | # self.datablock = numpy.zeros(self.__shapeBuffer, numpy.dtype('complex')) | |
|
1506 | # | |
|
1507 | ## if not(self.__setNextFile()): | |
|
1508 | ## return 0 | |
|
1509 | # return 1 | |
|
1510 | ||
|
1511 | class JRODataWriter(): | |
|
16 | 1512 | |
|
17 | 1513 | def __init__(self): |
|
18 | __buffer = 0 | |
|
19 | __buffer_count = 0 No newline at end of file | |
|
1514 | pass No newline at end of file |
@@ -15,12 +15,11 import time, datetime | |||
|
15 | 15 | path = os.path.split(os.getcwd())[0] |
|
16 | 16 | sys.path.append(path) |
|
17 | 17 | |
|
18 |
from Header |
|
|
19 | from DataIO import DataReader | |
|
20 | from DataIO import DataWriter | |
|
21 | ||
|
18 | from Model.JROHeader import * | |
|
22 | 19 | from Model.Spectra import Spectra |
|
23 | 20 | |
|
21 | from DataIO import JRODataReader | |
|
22 | from DataIO import JRODataWriter | |
|
24 | 23 | |
|
25 | 24 | def isFileOK(filename): |
|
26 | 25 | """ |
@@ -194,7 +193,7 def isThisFileinRange(filename,startUTSeconds,endUTSeconds): | |||
|
194 | 193 | return 1 |
|
195 | 194 | |
|
196 | 195 | |
|
197 | class SpectraReader(DataReader): | |
|
196 | class SpectraReader( JRODataReader ): | |
|
198 | 197 | """ |
|
199 | 198 | Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura |
|
200 | 199 | de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones) |
@@ -1270,8 +1269,7 class SpectraReader(DataReader): | |||
|
1270 | 1269 | |
|
1271 | 1270 | return 1 |
|
1272 | 1271 | |
|
1273 | ||
|
1274 | class SpectraWriter(DataWriter): | |
|
1272 | class SpectraWriter( JRODataWriter ): | |
|
1275 | 1273 | """ |
|
1276 | 1274 | Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura |
|
1277 | 1275 | de los datos siempre se realiza por bloques. |
@@ -14,14 +14,13 import time, datetime | |||
|
14 | 14 | path = os.path.split(os.getcwd())[0] |
|
15 | 15 | sys.path.append(path) |
|
16 | 16 | |
|
17 |
from |
|
|
18 | from IO.DataIO import DataReader | |
|
19 | from IO.DataIO import DataWriter | |
|
20 | ||
|
17 | from Model.JROHeader import * | |
|
21 | 18 | from Model.Voltage import Voltage |
|
22 | 19 | |
|
20 | from IO.DataIO import JRODataReader | |
|
21 | from IO.DataIO import JRODataWriter | |
|
23 | 22 | |
|
24 |
def isFileOK(filename): |
|
|
23 | def isFileOK(filename): | |
|
25 | 24 | """ |
|
26 | 25 | Determina si la cabecera de un archivo es valido o no, si lo es entonces seria un archivo que podria contener data, |
|
27 | 26 | si no seria un archivo invalido |
@@ -192,8 +191,7 def isThisFileinRange(filename,startUTSeconds,endUTSeconds): | |||
|
192 | 191 | |
|
193 | 192 | return 1 |
|
194 | 193 | |
|
195 | ||
|
196 | class VoltageReader(DataReader): | |
|
194 | class VoltageReader(JRODataReader): | |
|
197 | 195 | """ |
|
198 | 196 | Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura |
|
199 | 197 | de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones: |
@@ -1150,8 +1148,7 class VoltageReader(DataReader): | |||
|
1150 | 1148 | |
|
1151 | 1149 | return 1 #self.m_Voltage.data |
|
1152 | 1150 | |
|
1153 | ||
|
1154 | class VoltageWriter(DataWriter): | |
|
1151 | class VoltageWriter( JRODataWriter ): | |
|
1155 | 1152 | """ |
|
1156 | 1153 | Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura |
|
1157 | 1154 | de los datos siempre se realiza por bloques. |
|
1 | NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now