@@ -1,1618 +1,1619 | |||||
1 | """ |
|
1 | """ | |
2 | Created on Jul 2, 2014 |
|
2 | Created on Jul 2, 2014 | |
3 |
|
3 | |||
4 | @author: roj-idl71 |
|
4 | @author: roj-idl71 | |
5 | """ |
|
5 | """ | |
6 | import os |
|
6 | import os | |
7 | import sys |
|
7 | import sys | |
8 | import glob |
|
8 | import glob | |
9 | import time |
|
9 | import time | |
10 | import numpy |
|
10 | import numpy | |
11 | import fnmatch |
|
11 | import fnmatch | |
12 | import inspect |
|
12 | import inspect | |
13 | import time |
|
13 | import time | |
14 | import datetime |
|
14 | import datetime | |
15 | import zmq |
|
15 | import zmq | |
16 |
|
16 | |||
17 | from schainpy.model.proc.jroproc_base import Operation, MPDecorator |
|
17 | from schainpy.model.proc.jroproc_base import Operation, MPDecorator | |
18 | from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader |
|
18 | from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader | |
19 | from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width |
|
19 | from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width | |
20 | from schainpy.utils import log |
|
20 | from schainpy.utils import log | |
21 | import schainpy.admin |
|
21 | import schainpy.admin | |
22 |
|
22 | |||
23 | LOCALTIME = True |
|
23 | LOCALTIME = True | |
24 | DT_DIRECTIVES = { |
|
24 | DT_DIRECTIVES = { | |
25 | '%Y': 4, |
|
25 | '%Y': 4, | |
26 | '%y': 2, |
|
26 | '%y': 2, | |
27 | '%m': 2, |
|
27 | '%m': 2, | |
28 | '%d': 2, |
|
28 | '%d': 2, | |
29 | '%j': 3, |
|
29 | '%j': 3, | |
30 | '%H': 2, |
|
30 | '%H': 2, | |
31 | '%M': 2, |
|
31 | '%M': 2, | |
32 | '%S': 2, |
|
32 | '%S': 2, | |
33 | '%f': 6 |
|
33 | '%f': 6 | |
34 | } |
|
34 | } | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | def isNumber(cad): |
|
37 | def isNumber(cad): | |
38 | """ |
|
38 | """ | |
39 | Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. |
|
39 | Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. | |
40 |
|
40 | |||
41 | Excepciones: |
|
41 | Excepciones: | |
42 | Si un determinado string no puede ser convertido a numero |
|
42 | Si un determinado string no puede ser convertido a numero | |
43 | Input: |
|
43 | Input: | |
44 | str, string al cual se le analiza para determinar si convertible a un numero o no |
|
44 | str, string al cual se le analiza para determinar si convertible a un numero o no | |
45 |
|
45 | |||
46 | Return: |
|
46 | Return: | |
47 | True : si el string es uno numerico |
|
47 | True : si el string es uno numerico | |
48 | False : no es un string numerico |
|
48 | False : no es un string numerico | |
49 | """ |
|
49 | """ | |
50 | try: |
|
50 | try: | |
51 | float(cad) |
|
51 | float(cad) | |
52 | return True |
|
52 | return True | |
53 | except: |
|
53 | except: | |
54 | return False |
|
54 | return False | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | def isFileInEpoch(filename, startUTSeconds, endUTSeconds): |
|
57 | def isFileInEpoch(filename, startUTSeconds, endUTSeconds): | |
58 | """ |
|
58 | """ | |
59 | Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado. |
|
59 | Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado. | |
60 |
|
60 | |||
61 | Inputs: |
|
61 | Inputs: | |
62 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) |
|
62 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) | |
63 |
|
63 | |||
64 | startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en |
|
64 | startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en | |
65 | segundos contados desde 01/01/1970. |
|
65 | segundos contados desde 01/01/1970. | |
66 | endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en |
|
66 | endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en | |
67 | segundos contados desde 01/01/1970. |
|
67 | segundos contados desde 01/01/1970. | |
68 |
|
68 | |||
69 | Return: |
|
69 | Return: | |
70 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de |
|
70 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de | |
71 | fecha especificado, de lo contrario retorna False. |
|
71 | fecha especificado, de lo contrario retorna False. | |
72 |
|
72 | |||
73 | Excepciones: |
|
73 | Excepciones: | |
74 | Si el archivo no existe o no puede ser abierto |
|
74 | Si el archivo no existe o no puede ser abierto | |
75 | Si la cabecera no puede ser leida. |
|
75 | Si la cabecera no puede ser leida. | |
76 |
|
76 | |||
77 | """ |
|
77 | """ | |
78 | basicHeaderObj = BasicHeader(LOCALTIME) |
|
78 | basicHeaderObj = BasicHeader(LOCALTIME) | |
79 |
|
79 | |||
80 | try: |
|
80 | try: | |
81 |
|
81 | |||
82 | fp = open(filename, 'rb') |
|
82 | fp = open(filename, 'rb') | |
83 | except IOError: |
|
83 | except IOError: | |
84 | print("The file %s can't be opened" % (filename)) |
|
84 | print("The file %s can't be opened" % (filename)) | |
85 | return 0 |
|
85 | return 0 | |
86 |
|
86 | |||
87 | sts = basicHeaderObj.read(fp) |
|
87 | sts = basicHeaderObj.read(fp) | |
88 | fp.close() |
|
88 | fp.close() | |
89 |
|
89 | |||
90 | if not(sts): |
|
90 | if not(sts): | |
91 | print("Skipping the file %s because it has not a valid header" % (filename)) |
|
91 | print("Skipping the file %s because it has not a valid header" % (filename)) | |
92 | return 0 |
|
92 | return 0 | |
93 |
|
93 | |||
94 | if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)): |
|
94 | if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)): | |
95 | return 0 |
|
95 | return 0 | |
96 |
|
96 | |||
97 | return 1 |
|
97 | return 1 | |
98 |
|
98 | |||
99 |
|
99 | |||
100 | def isTimeInRange(thisTime, startTime, endTime): |
|
100 | def isTimeInRange(thisTime, startTime, endTime): | |
101 | if endTime >= startTime: |
|
101 | if endTime >= startTime: | |
102 | if (thisTime < startTime) or (thisTime > endTime): |
|
102 | if (thisTime < startTime) or (thisTime > endTime): | |
103 | return 0 |
|
103 | return 0 | |
104 | return 1 |
|
104 | return 1 | |
105 | else: |
|
105 | else: | |
106 | if (thisTime < startTime) and (thisTime > endTime): |
|
106 | if (thisTime < startTime) and (thisTime > endTime): | |
107 | return 0 |
|
107 | return 0 | |
108 | return 1 |
|
108 | return 1 | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): |
|
111 | def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): | |
112 | """ |
|
112 | """ | |
113 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. |
|
113 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. | |
114 |
|
114 | |||
115 | Inputs: |
|
115 | Inputs: | |
116 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) |
|
116 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) | |
117 |
|
117 | |||
118 | startDate : fecha inicial del rango seleccionado en formato datetime.date |
|
118 | startDate : fecha inicial del rango seleccionado en formato datetime.date | |
119 |
|
119 | |||
120 | endDate : fecha final del rango seleccionado en formato datetime.date |
|
120 | endDate : fecha final del rango seleccionado en formato datetime.date | |
121 |
|
121 | |||
122 | startTime : tiempo inicial del rango seleccionado en formato datetime.time |
|
122 | startTime : tiempo inicial del rango seleccionado en formato datetime.time | |
123 |
|
123 | |||
124 | endTime : tiempo final del rango seleccionado en formato datetime.time |
|
124 | endTime : tiempo final del rango seleccionado en formato datetime.time | |
125 |
|
125 | |||
126 | Return: |
|
126 | Return: | |
127 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de |
|
127 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de | |
128 | fecha especificado, de lo contrario retorna False. |
|
128 | fecha especificado, de lo contrario retorna False. | |
129 |
|
129 | |||
130 | Excepciones: |
|
130 | Excepciones: | |
131 | Si el archivo no existe o no puede ser abierto |
|
131 | Si el archivo no existe o no puede ser abierto | |
132 | Si la cabecera no puede ser leida. |
|
132 | Si la cabecera no puede ser leida. | |
133 |
|
133 | |||
134 | """ |
|
134 | """ | |
135 |
|
135 | |||
136 | try: |
|
136 | try: | |
137 | fp = open(filename, 'rb') |
|
137 | fp = open(filename, 'rb') | |
138 | except IOError: |
|
138 | except IOError: | |
139 | print("The file %s can't be opened" % (filename)) |
|
139 | print("The file %s can't be opened" % (filename)) | |
140 | return None |
|
140 | return None | |
141 |
|
141 | |||
142 | firstBasicHeaderObj = BasicHeader(LOCALTIME) |
|
142 | firstBasicHeaderObj = BasicHeader(LOCALTIME) | |
143 | systemHeaderObj = SystemHeader() |
|
143 | systemHeaderObj = SystemHeader() | |
144 |
|
144 | |||
145 | radarControllerHeaderObj = RadarControllerHeader() |
|
145 | radarControllerHeaderObj = RadarControllerHeader() | |
146 | processingHeaderObj = ProcessingHeader() |
|
146 | processingHeaderObj = ProcessingHeader() | |
147 |
|
147 | |||
148 | lastBasicHeaderObj = BasicHeader(LOCALTIME) |
|
148 | lastBasicHeaderObj = BasicHeader(LOCALTIME) | |
149 |
|
149 | |||
150 | sts = firstBasicHeaderObj.read(fp) |
|
150 | sts = firstBasicHeaderObj.read(fp) | |
151 |
|
151 | |||
152 | if not(sts): |
|
152 | if not(sts): | |
153 | print("[Reading] Skipping the file %s because it has not a valid header" % (filename)) |
|
153 | print("[Reading] Skipping the file %s because it has not a valid header" % (filename)) | |
154 | return None |
|
154 | return None | |
155 |
|
155 | |||
156 | if not systemHeaderObj.read(fp): |
|
156 | if not systemHeaderObj.read(fp): | |
157 | return None |
|
157 | return None | |
158 |
|
158 | |||
159 | if not radarControllerHeaderObj.read(fp): |
|
159 | if not radarControllerHeaderObj.read(fp): | |
160 | return None |
|
160 | return None | |
161 |
|
161 | |||
162 | if not processingHeaderObj.read(fp): |
|
162 | if not processingHeaderObj.read(fp): | |
163 | return None |
|
163 | return None | |
164 |
|
164 | |||
165 | filesize = os.path.getsize(filename) |
|
165 | filesize = os.path.getsize(filename) | |
166 |
|
166 | |||
167 | offset = processingHeaderObj.blockSize + 24 # header size |
|
167 | offset = processingHeaderObj.blockSize + 24 # header size | |
168 |
|
168 | |||
169 | if filesize <= offset: |
|
169 | if filesize <= offset: | |
170 | print("[Reading] %s: This file has not enough data" % filename) |
|
170 | print("[Reading] %s: This file has not enough data" % filename) | |
171 | return None |
|
171 | return None | |
172 |
|
172 | |||
173 | fp.seek(-offset, 2) |
|
173 | fp.seek(-offset, 2) | |
174 |
|
174 | |||
175 | sts = lastBasicHeaderObj.read(fp) |
|
175 | sts = lastBasicHeaderObj.read(fp) | |
176 |
|
176 | |||
177 | fp.close() |
|
177 | fp.close() | |
178 |
|
178 | |||
179 | thisDatetime = lastBasicHeaderObj.datatime |
|
179 | thisDatetime = lastBasicHeaderObj.datatime | |
180 | thisTime_last_block = thisDatetime.time() |
|
180 | thisTime_last_block = thisDatetime.time() | |
181 |
|
181 | |||
182 | thisDatetime = firstBasicHeaderObj.datatime |
|
182 | thisDatetime = firstBasicHeaderObj.datatime | |
183 | thisDate = thisDatetime.date() |
|
183 | thisDate = thisDatetime.date() | |
184 | thisTime_first_block = thisDatetime.time() |
|
184 | thisTime_first_block = thisDatetime.time() | |
185 |
|
185 | |||
186 | # General case |
|
186 | # General case | |
187 | # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o |
|
187 | # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o | |
188 | #-----------o----------------------------o----------- |
|
188 | #-----------o----------------------------o----------- | |
189 | # startTime endTime |
|
189 | # startTime endTime | |
190 |
|
190 | |||
191 | if endTime >= startTime: |
|
191 | if endTime >= startTime: | |
192 | if (thisTime_last_block < startTime) or (thisTime_first_block > endTime): |
|
192 | if (thisTime_last_block < startTime) or (thisTime_first_block > endTime): | |
193 | return None |
|
193 | return None | |
194 |
|
194 | |||
195 | return thisDatetime |
|
195 | return thisDatetime | |
196 |
|
196 | |||
197 | # If endTime < startTime then endTime belongs to the next day |
|
197 | # If endTime < startTime then endTime belongs to the next day | |
198 |
|
198 | |||
199 | #<<<<<<<<<<<o o>>>>>>>>>>> |
|
199 | #<<<<<<<<<<<o o>>>>>>>>>>> | |
200 | #-----------o----------------------------o----------- |
|
200 | #-----------o----------------------------o----------- | |
201 | # endTime startTime |
|
201 | # endTime startTime | |
202 |
|
202 | |||
203 | if (thisDate == startDate) and (thisTime_last_block < startTime): |
|
203 | if (thisDate == startDate) and (thisTime_last_block < startTime): | |
204 | return None |
|
204 | return None | |
205 |
|
205 | |||
206 | if (thisDate == endDate) and (thisTime_first_block > endTime): |
|
206 | if (thisDate == endDate) and (thisTime_first_block > endTime): | |
207 | return None |
|
207 | return None | |
208 |
|
208 | |||
209 | if (thisTime_last_block < startTime) and (thisTime_first_block > endTime): |
|
209 | if (thisTime_last_block < startTime) and (thisTime_first_block > endTime): | |
210 | return None |
|
210 | return None | |
211 |
|
211 | |||
212 | return thisDatetime |
|
212 | return thisDatetime | |
213 |
|
213 | |||
214 |
|
214 | |||
215 | def isFolderInDateRange(folder, startDate=None, endDate=None): |
|
215 | def isFolderInDateRange(folder, startDate=None, endDate=None): | |
216 | """ |
|
216 | """ | |
217 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. |
|
217 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. | |
218 |
|
218 | |||
219 | Inputs: |
|
219 | Inputs: | |
220 | folder : nombre completo del directorio. |
|
220 | folder : nombre completo del directorio. | |
221 | Su formato deberia ser "/path_root/?YYYYDDD" |
|
221 | Su formato deberia ser "/path_root/?YYYYDDD" | |
222 |
|
222 | |||
223 | siendo: |
|
223 | siendo: | |
224 | YYYY : Anio (ejemplo 2015) |
|
224 | YYYY : Anio (ejemplo 2015) | |
225 | DDD : Dia del anio (ejemplo 305) |
|
225 | DDD : Dia del anio (ejemplo 305) | |
226 |
|
226 | |||
227 | startDate : fecha inicial del rango seleccionado en formato datetime.date |
|
227 | startDate : fecha inicial del rango seleccionado en formato datetime.date | |
228 |
|
228 | |||
229 | endDate : fecha final del rango seleccionado en formato datetime.date |
|
229 | endDate : fecha final del rango seleccionado en formato datetime.date | |
230 |
|
230 | |||
231 | Return: |
|
231 | Return: | |
232 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de |
|
232 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de | |
233 | fecha especificado, de lo contrario retorna False. |
|
233 | fecha especificado, de lo contrario retorna False. | |
234 | Excepciones: |
|
234 | Excepciones: | |
235 | Si el directorio no tiene el formato adecuado |
|
235 | Si el directorio no tiene el formato adecuado | |
236 | """ |
|
236 | """ | |
237 |
|
237 | |||
238 | basename = os.path.basename(folder) |
|
238 | basename = os.path.basename(folder) | |
239 |
|
239 | |||
240 | if not isRadarFolder(basename): |
|
240 | if not isRadarFolder(basename): | |
241 | print("The folder %s has not the rigth format" % folder) |
|
241 | print("The folder %s has not the rigth format" % folder) | |
242 | return 0 |
|
242 | return 0 | |
243 |
|
243 | |||
244 | if startDate and endDate: |
|
244 | if startDate and endDate: | |
245 | thisDate = getDateFromRadarFolder(basename) |
|
245 | thisDate = getDateFromRadarFolder(basename) | |
246 |
|
246 | |||
247 | if thisDate < startDate: |
|
247 | if thisDate < startDate: | |
248 | return 0 |
|
248 | return 0 | |
249 |
|
249 | |||
250 | if thisDate > endDate: |
|
250 | if thisDate > endDate: | |
251 | return 0 |
|
251 | return 0 | |
252 |
|
252 | |||
253 | return 1 |
|
253 | return 1 | |
254 |
|
254 | |||
255 |
|
255 | |||
256 | def isFileInDateRange(filename, startDate=None, endDate=None): |
|
256 | def isFileInDateRange(filename, startDate=None, endDate=None): | |
257 | """ |
|
257 | """ | |
258 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. |
|
258 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. | |
259 |
|
259 | |||
260 | Inputs: |
|
260 | Inputs: | |
261 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) |
|
261 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) | |
262 |
|
262 | |||
263 | Su formato deberia ser "?YYYYDDDsss" |
|
263 | Su formato deberia ser "?YYYYDDDsss" | |
264 |
|
264 | |||
265 | siendo: |
|
265 | siendo: | |
266 | YYYY : Anio (ejemplo 2015) |
|
266 | YYYY : Anio (ejemplo 2015) | |
267 | DDD : Dia del anio (ejemplo 305) |
|
267 | DDD : Dia del anio (ejemplo 305) | |
268 | sss : set |
|
268 | sss : set | |
269 |
|
269 | |||
270 | startDate : fecha inicial del rango seleccionado en formato datetime.date |
|
270 | startDate : fecha inicial del rango seleccionado en formato datetime.date | |
271 |
|
271 | |||
272 | endDate : fecha final del rango seleccionado en formato datetime.date |
|
272 | endDate : fecha final del rango seleccionado en formato datetime.date | |
273 |
|
273 | |||
274 | Return: |
|
274 | Return: | |
275 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de |
|
275 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de | |
276 | fecha especificado, de lo contrario retorna False. |
|
276 | fecha especificado, de lo contrario retorna False. | |
277 | Excepciones: |
|
277 | Excepciones: | |
278 | Si el archivo no tiene el formato adecuado |
|
278 | Si el archivo no tiene el formato adecuado | |
279 | """ |
|
279 | """ | |
280 |
|
280 | |||
281 | basename = os.path.basename(filename) |
|
281 | basename = os.path.basename(filename) | |
282 |
|
282 | |||
283 | if not isRadarFile(basename): |
|
283 | if not isRadarFile(basename): | |
284 | print("The filename %s has not the rigth format" % filename) |
|
284 | print("The filename %s has not the rigth format" % filename) | |
285 | return 0 |
|
285 | return 0 | |
286 |
|
286 | |||
287 | if startDate and endDate: |
|
287 | if startDate and endDate: | |
288 | thisDate = getDateFromRadarFile(basename) |
|
288 | thisDate = getDateFromRadarFile(basename) | |
289 |
|
289 | |||
290 | if thisDate < startDate: |
|
290 | if thisDate < startDate: | |
291 | return 0 |
|
291 | return 0 | |
292 |
|
292 | |||
293 | if thisDate > endDate: |
|
293 | if thisDate > endDate: | |
294 | return 0 |
|
294 | return 0 | |
295 |
|
295 | |||
296 | return 1 |
|
296 | return 1 | |
297 |
|
297 | |||
298 |
|
298 | |||
299 | def getFileFromSet(path, ext, set): |
|
299 | def getFileFromSet(path, ext, set): | |
300 | validFilelist = [] |
|
300 | validFilelist = [] | |
301 | fileList = os.listdir(path) |
|
301 | fileList = os.listdir(path) | |
302 |
|
302 | |||
303 | # 0 1234 567 89A BCDE |
|
303 | # 0 1234 567 89A BCDE | |
304 | # H YYYY DDD SSS .ext |
|
304 | # H YYYY DDD SSS .ext | |
305 |
|
305 | |||
306 | for thisFile in fileList: |
|
306 | for thisFile in fileList: | |
307 | try: |
|
307 | try: | |
308 | year = int(thisFile[1:5]) |
|
308 | year = int(thisFile[1:5]) | |
309 | doy = int(thisFile[5:8]) |
|
309 | doy = int(thisFile[5:8]) | |
310 | except: |
|
310 | except: | |
311 | continue |
|
311 | continue | |
312 |
|
312 | |||
313 | if (os.path.splitext(thisFile)[-1].lower() != ext.lower()): |
|
313 | if (os.path.splitext(thisFile)[-1].lower() != ext.lower()): | |
314 | continue |
|
314 | continue | |
315 |
|
315 | |||
316 | validFilelist.append(thisFile) |
|
316 | validFilelist.append(thisFile) | |
317 |
|
317 | |||
318 | myfile = fnmatch.filter( |
|
318 | myfile = fnmatch.filter( | |
319 | validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set)) |
|
319 | validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set)) | |
320 |
|
320 | |||
321 | if len(myfile) != 0: |
|
321 | if len(myfile) != 0: | |
322 | return myfile[0] |
|
322 | return myfile[0] | |
323 | else: |
|
323 | else: | |
324 | filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower()) |
|
324 | filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower()) | |
325 | print('the filename %s does not exist' % filename) |
|
325 | print('the filename %s does not exist' % filename) | |
326 | print('...going to the last file: ') |
|
326 | print('...going to the last file: ') | |
327 |
|
327 | |||
328 | if validFilelist: |
|
328 | if validFilelist: | |
329 | validFilelist = sorted(validFilelist, key=str.lower) |
|
329 | validFilelist = sorted(validFilelist, key=str.lower) | |
330 | return validFilelist[-1] |
|
330 | return validFilelist[-1] | |
331 |
|
331 | |||
332 | return None |
|
332 | return None | |
333 |
|
333 | |||
334 |
|
334 | |||
335 | def getlastFileFromPath(path, ext): |
|
335 | def getlastFileFromPath(path, ext): | |
336 | """ |
|
336 | """ | |
337 | Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext" |
|
337 | Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext" | |
338 | al final de la depuracion devuelve el ultimo file de la lista que quedo. |
|
338 | al final de la depuracion devuelve el ultimo file de la lista que quedo. | |
339 |
|
339 | |||
340 | Input: |
|
340 | Input: | |
341 | fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta |
|
341 | fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta | |
342 | ext : extension de los files contenidos en una carpeta |
|
342 | ext : extension de los files contenidos en una carpeta | |
343 |
|
343 | |||
344 | Return: |
|
344 | Return: | |
345 | El ultimo file de una determinada carpeta, no se considera el path. |
|
345 | El ultimo file de una determinada carpeta, no se considera el path. | |
346 | """ |
|
346 | """ | |
347 | validFilelist = [] |
|
347 | validFilelist = [] | |
348 | fileList = os.listdir(path) |
|
348 | fileList = os.listdir(path) | |
349 |
|
349 | |||
350 | # 0 1234 567 89A BCDE |
|
350 | # 0 1234 567 89A BCDE | |
351 | # H YYYY DDD SSS .ext |
|
351 | # H YYYY DDD SSS .ext | |
352 |
|
352 | |||
353 | for thisFile in fileList: |
|
353 | for thisFile in fileList: | |
354 |
|
354 | |||
355 | year = thisFile[1:5] |
|
355 | year = thisFile[1:5] | |
356 | if not isNumber(year): |
|
356 | if not isNumber(year): | |
357 | continue |
|
357 | continue | |
358 |
|
358 | |||
359 | doy = thisFile[5:8] |
|
359 | doy = thisFile[5:8] | |
360 | if not isNumber(doy): |
|
360 | if not isNumber(doy): | |
361 | continue |
|
361 | continue | |
362 |
|
362 | |||
363 | year = int(year) |
|
363 | year = int(year) | |
364 | doy = int(doy) |
|
364 | doy = int(doy) | |
365 |
|
365 | |||
366 | if (os.path.splitext(thisFile)[-1].lower() != ext.lower()): |
|
366 | if (os.path.splitext(thisFile)[-1].lower() != ext.lower()): | |
367 | continue |
|
367 | continue | |
368 |
|
368 | |||
369 | validFilelist.append(thisFile) |
|
369 | validFilelist.append(thisFile) | |
370 |
|
370 | |||
371 | if validFilelist: |
|
371 | if validFilelist: | |
372 | validFilelist = sorted(validFilelist, key=str.lower) |
|
372 | validFilelist = sorted(validFilelist, key=str.lower) | |
373 | return validFilelist[-1] |
|
373 | return validFilelist[-1] | |
374 |
|
374 | |||
375 | return None |
|
375 | return None | |
376 |
|
376 | |||
377 |
|
377 | |||
378 | def isRadarFolder(folder): |
|
378 | def isRadarFolder(folder): | |
379 | try: |
|
379 | try: | |
380 | year = int(folder[1:5]) |
|
380 | year = int(folder[1:5]) | |
381 | doy = int(folder[5:8]) |
|
381 | doy = int(folder[5:8]) | |
382 | except: |
|
382 | except: | |
383 | return 0 |
|
383 | return 0 | |
384 |
|
384 | |||
385 | return 1 |
|
385 | return 1 | |
386 |
|
386 | |||
387 |
|
387 | |||
388 | def isRadarFile(file): |
|
388 | def isRadarFile(file): | |
389 | try: |
|
389 | try: | |
390 | year = int(file[1:5]) |
|
390 | year = int(file[1:5]) | |
391 | doy = int(file[5:8]) |
|
391 | doy = int(file[5:8]) | |
392 | set = int(file[8:11]) |
|
392 | set = int(file[8:11]) | |
393 | except: |
|
393 | except: | |
394 | return 0 |
|
394 | return 0 | |
395 |
|
395 | |||
396 | return 1 |
|
396 | return 1 | |
397 |
|
397 | |||
398 |
|
398 | |||
399 | def getDateFromRadarFile(file): |
|
399 | def getDateFromRadarFile(file): | |
400 | try: |
|
400 | try: | |
401 | year = int(file[1:5]) |
|
401 | year = int(file[1:5]) | |
402 | doy = int(file[5:8]) |
|
402 | doy = int(file[5:8]) | |
403 | set = int(file[8:11]) |
|
403 | set = int(file[8:11]) | |
404 | except: |
|
404 | except: | |
405 | return None |
|
405 | return None | |
406 |
|
406 | |||
407 | thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1) |
|
407 | thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1) | |
408 | return thisDate |
|
408 | return thisDate | |
409 |
|
409 | |||
410 |
|
410 | |||
411 | def getDateFromRadarFolder(folder): |
|
411 | def getDateFromRadarFolder(folder): | |
412 | try: |
|
412 | try: | |
413 | year = int(folder[1:5]) |
|
413 | year = int(folder[1:5]) | |
414 | doy = int(folder[5:8]) |
|
414 | doy = int(folder[5:8]) | |
415 | except: |
|
415 | except: | |
416 | return None |
|
416 | return None | |
417 |
|
417 | |||
418 | thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1) |
|
418 | thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1) | |
419 | return thisDate |
|
419 | return thisDate | |
420 |
|
420 | |||
421 | def parse_format(s, fmt): |
|
421 | def parse_format(s, fmt): | |
422 |
|
422 | |||
423 | for i in range(fmt.count('%')): |
|
423 | for i in range(fmt.count('%')): | |
424 | x = fmt.index('%') |
|
424 | x = fmt.index('%') | |
425 | d = DT_DIRECTIVES[fmt[x:x+2]] |
|
425 | d = DT_DIRECTIVES[fmt[x:x+2]] | |
426 | fmt = fmt.replace(fmt[x:x+2], s[x:x+d]) |
|
426 | fmt = fmt.replace(fmt[x:x+2], s[x:x+d]) | |
427 | return fmt |
|
427 | return fmt | |
428 |
|
428 | |||
429 | class Reader(object): |
|
429 | class Reader(object): | |
430 |
|
430 | |||
431 | c = 3E8 |
|
431 | c = 3E8 | |
432 | isConfig = False |
|
432 | isConfig = False | |
433 | dtype = None |
|
433 | dtype = None | |
434 | pathList = [] |
|
434 | pathList = [] | |
435 | filenameList = [] |
|
435 | filenameList = [] | |
436 | datetimeList = [] |
|
436 | datetimeList = [] | |
437 | filename = None |
|
437 | filename = None | |
438 | ext = None |
|
438 | ext = None | |
439 | flagIsNewFile = 1 |
|
439 | flagIsNewFile = 1 | |
440 | flagDiscontinuousBlock = 0 |
|
440 | flagDiscontinuousBlock = 0 | |
441 | flagIsNewBlock = 0 |
|
441 | flagIsNewBlock = 0 | |
442 | flagNoMoreFiles = 0 |
|
442 | flagNoMoreFiles = 0 | |
443 | fp = None |
|
443 | fp = None | |
444 | firstHeaderSize = 0 |
|
444 | firstHeaderSize = 0 | |
445 | basicHeaderSize = 24 |
|
445 | basicHeaderSize = 24 | |
446 | versionFile = 1103 |
|
446 | versionFile = 1103 | |
447 | fileSize = None |
|
447 | fileSize = None | |
448 | fileSizeByHeader = None |
|
448 | fileSizeByHeader = None | |
449 | fileIndex = -1 |
|
449 | fileIndex = -1 | |
450 | profileIndex = None |
|
450 | profileIndex = None | |
451 | blockIndex = 0 |
|
451 | blockIndex = 0 | |
452 | nTotalBlocks = 0 |
|
452 | nTotalBlocks = 0 | |
453 | maxTimeStep = 30 |
|
453 | maxTimeStep = 30 | |
454 | lastUTTime = None |
|
454 | lastUTTime = None | |
455 | datablock = None |
|
455 | datablock = None | |
456 | dataOut = None |
|
456 | dataOut = None | |
457 | getByBlock = False |
|
457 | getByBlock = False | |
458 | path = None |
|
458 | path = None | |
459 | startDate = None |
|
459 | startDate = None | |
460 | endDate = None |
|
460 | endDate = None | |
461 | startTime = datetime.time(0, 0, 0) |
|
461 | startTime = datetime.time(0, 0, 0) | |
462 | endTime = datetime.time(23, 59, 59) |
|
462 | endTime = datetime.time(23, 59, 59) | |
463 | set = None |
|
463 | set = None | |
464 | expLabel = "" |
|
464 | expLabel = "" | |
465 | online = False |
|
465 | online = False | |
466 | delay = 60 |
|
466 | delay = 60 | |
467 | nTries = 3 # quantity tries |
|
467 | nTries = 3 # quantity tries | |
468 | nFiles = 3 # number of files for searching |
|
468 | nFiles = 3 # number of files for searching | |
469 | walk = True |
|
469 | walk = True | |
470 | getblock = False |
|
470 | getblock = False | |
471 | nTxs = 1 |
|
471 | nTxs = 1 | |
472 | realtime = False |
|
472 | realtime = False | |
473 | blocksize = 0 |
|
473 | blocksize = 0 | |
474 | blocktime = None |
|
474 | blocktime = None | |
475 | warnings = True |
|
475 | warnings = True | |
476 | verbose = True |
|
476 | verbose = True | |
477 | server = None |
|
477 | server = None | |
478 | topic = None |
|
478 | topic = None | |
479 | format = None |
|
479 | format = None | |
480 | oneDDict = None |
|
480 | oneDDict = None | |
481 | twoDDict = None |
|
481 | twoDDict = None | |
482 | independentParam = None |
|
482 | independentParam = None | |
483 | filefmt = None |
|
483 | filefmt = None | |
484 | folderfmt = None |
|
484 | folderfmt = None | |
485 | open_file = open |
|
485 | open_file = open | |
486 | open_mode = 'rb' |
|
486 | open_mode = 'rb' | |
487 |
|
487 | |||
488 | def run(self): |
|
488 | def run(self): | |
489 |
|
489 | |||
490 | raise NotImplementedError |
|
490 | raise NotImplementedError | |
491 |
|
491 | |||
492 | def getAllowedArgs(self): |
|
492 | def getAllowedArgs(self): | |
493 | if hasattr(self, '__attrs__'): |
|
493 | if hasattr(self, '__attrs__'): | |
494 | return self.__attrs__ |
|
494 | return self.__attrs__ | |
495 | else: |
|
495 | else: | |
496 | return inspect.getargspec(self.run).args |
|
496 | return inspect.getargspec(self.run).args | |
497 |
|
497 | |||
498 | def set_kwargs(self, **kwargs): |
|
498 | def set_kwargs(self, **kwargs): | |
499 |
|
499 | |||
500 | for key, value in kwargs.items(): |
|
500 | for key, value in kwargs.items(): | |
501 | setattr(self, key, value) |
|
501 | setattr(self, key, value) | |
502 |
|
502 | |||
503 | def find_folders(self, path, startDate, endDate, folderfmt, last=False): |
|
503 | def find_folders(self, path, startDate, endDate, folderfmt, last=False): | |
504 |
|
504 | |||
505 | folders = [x for f in path.split(',') |
|
505 | folders = [x for f in path.split(',') | |
506 | for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))] |
|
506 | for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))] | |
507 | folders.sort() |
|
507 | folders.sort() | |
508 |
|
508 | |||
509 | if last: |
|
509 | if last: | |
510 | folders = [folders[-1]] |
|
510 | folders = [folders[-1]] | |
511 |
|
511 | |||
512 | for folder in folders: |
|
512 | for folder in folders: | |
513 | try: |
|
513 | try: | |
514 | dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date() |
|
514 | dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date() | |
515 | if dt >= startDate and dt <= endDate: |
|
515 | if dt >= startDate and dt <= endDate: | |
516 | yield os.path.join(path, folder) |
|
516 | yield os.path.join(path, folder) | |
517 | else: |
|
517 | else: | |
518 | log.log('Skiping folder {}'.format(folder), self.name) |
|
518 | log.log('Skiping folder {}'.format(folder), self.name) | |
519 | except Exception as e: |
|
519 | except Exception as e: | |
520 | log.log('Skiping folder {}'.format(folder), self.name) |
|
520 | log.log('Skiping folder {}'.format(folder), self.name) | |
521 | continue |
|
521 | continue | |
522 | return |
|
522 | return | |
523 |
|
523 | |||
524 | def find_files(self, folders, ext, filefmt, startDate=None, endDate=None, |
|
524 | def find_files(self, folders, ext, filefmt, startDate=None, endDate=None, | |
525 | expLabel='', last=False): |
|
525 | expLabel='', last=False): | |
526 | for path in folders: |
|
526 | for path in folders: | |
527 | files = glob.glob1(path+'/'+expLabel, '*{}'.format(ext)) |
|
527 | files = glob.glob1(path+'/'+expLabel, '*{}'.format(ext)) | |
528 | files.sort() |
|
528 | files.sort() | |
529 | if last: |
|
529 | if last: | |
530 | if files: |
|
530 | if files: | |
531 | fo = files[-1] |
|
531 | fo = files[-1] | |
532 | try: |
|
532 | try: | |
533 | dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date() |
|
533 | dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date() | |
534 | yield os.path.join(path, expLabel, fo) |
|
534 | yield os.path.join(path, expLabel, fo) | |
535 | except Exception as e: |
|
535 | except Exception as e: | |
536 | pass |
|
536 | pass | |
537 | return |
|
537 | return | |
538 | else: |
|
538 | else: | |
539 | return |
|
539 | return | |
540 |
|
540 | |||
541 | for fo in files: |
|
541 | for fo in files: | |
542 | try: |
|
542 | try: | |
543 | dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date() |
|
543 | dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date() | |
544 | #print(dt) |
|
544 | #print(dt) | |
545 | #print(startDate) |
|
545 | #print(startDate) | |
546 | #print(endDate) |
|
546 | #print(endDate) | |
547 | if dt >= startDate and dt <= endDate: |
|
547 | if dt >= startDate and dt <= endDate: | |
548 |
|
548 | |||
549 | yield os.path.join(path, expLabel, fo) |
|
549 | yield os.path.join(path, expLabel, fo) | |
550 |
|
550 | |||
551 | else: |
|
551 | else: | |
552 |
|
552 | |||
553 | log.log('Skiping file {}'.format(fo), self.name) |
|
553 | log.log('Skiping file {}'.format(fo), self.name) | |
554 | except Exception as e: |
|
554 | except Exception as e: | |
555 | log.log('Skiping file {}'.format(fo), self.name) |
|
555 | log.log('Skiping file {}'.format(fo), self.name) | |
556 | continue |
|
556 | continue | |
557 |
|
557 | |||
558 | def searchFilesOffLine(self, path, startDate, endDate, |
|
558 | def searchFilesOffLine(self, path, startDate, endDate, | |
559 | expLabel, ext, walk, |
|
559 | expLabel, ext, walk, | |
560 | filefmt, folderfmt): |
|
560 | filefmt, folderfmt): | |
561 | """Search files in offline mode for the given arguments |
|
561 | """Search files in offline mode for the given arguments | |
562 |
|
562 | |||
563 | Return: |
|
563 | Return: | |
564 | Generator of files |
|
564 | Generator of files | |
565 | """ |
|
565 | """ | |
566 |
|
566 | |||
567 | if walk: |
|
567 | if walk: | |
568 | folders = self.find_folders( |
|
568 | folders = self.find_folders( | |
569 | path, startDate, endDate, folderfmt) |
|
569 | path, startDate, endDate, folderfmt) | |
570 | #print("folders: ", folders) |
|
570 | #print("folders: ", folders) | |
571 | else: |
|
571 | else: | |
572 | folders = path.split(',') |
|
572 | folders = path.split(',') | |
573 |
|
573 | |||
574 | return self.find_files( |
|
574 | return self.find_files( | |
575 | folders, ext, filefmt, startDate, endDate, expLabel) |
|
575 | folders, ext, filefmt, startDate, endDate, expLabel) | |
576 |
|
576 | |||
577 | def searchFilesOnLine(self, path, startDate, endDate, |
|
577 | def searchFilesOnLine(self, path, startDate, endDate, | |
578 | expLabel, ext, walk, |
|
578 | expLabel, ext, walk, | |
579 | filefmt, folderfmt): |
|
579 | filefmt, folderfmt): | |
580 | """Search for the last file of the last folder |
|
580 | """Search for the last file of the last folder | |
581 |
|
581 | |||
582 | Arguments: |
|
582 | Arguments: | |
583 | path : carpeta donde estan contenidos los files que contiene data |
|
583 | path : carpeta donde estan contenidos los files que contiene data | |
584 | expLabel : Nombre del subexperimento (subfolder) |
|
584 | expLabel : Nombre del subexperimento (subfolder) | |
585 | ext : extension de los files |
|
585 | ext : extension de los files | |
586 | walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath) |
|
586 | walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath) | |
587 |
|
587 | |||
588 | Return: |
|
588 | Return: | |
589 | generator with the full path of last filename |
|
589 | generator with the full path of last filename | |
590 | """ |
|
590 | """ | |
591 |
|
591 | |||
592 | if walk: |
|
592 | if walk: | |
593 | folders = self.find_folders( |
|
593 | folders = self.find_folders( | |
594 | path, startDate, endDate, folderfmt, last=True) |
|
594 | path, startDate, endDate, folderfmt, last=True) | |
595 | else: |
|
595 | else: | |
596 | folders = path.split(',') |
|
596 | folders = path.split(',') | |
597 |
|
597 | |||
598 | return self.find_files( |
|
598 | return self.find_files( | |
599 | folders, ext, filefmt, startDate, endDate, expLabel, last=True) |
|
599 | folders, ext, filefmt, startDate, endDate, expLabel, last=True) | |
600 |
|
600 | |||
601 | def setNextFile(self): |
|
601 | def setNextFile(self): | |
602 | """Set the next file to be readed open it and parse de file header""" |
|
602 | """Set the next file to be readed open it and parse de file header""" | |
603 |
|
603 | |||
604 | #print("fp: ",self.fp) |
|
604 | #print("fp: ",self.fp) | |
605 | while True: |
|
605 | while True: | |
606 |
|
606 | |||
607 | #print(self.fp) |
|
607 | #print(self.fp) | |
608 | if self.fp != None: |
|
608 | if self.fp != None: | |
609 | self.fp.close() |
|
609 | self.fp.close() | |
610 |
|
610 | |||
611 | #print("setNextFile") |
|
611 | #print("setNextFile") | |
612 | #print("BEFORE OPENING",self.filename) |
|
612 | #print("BEFORE OPENING",self.filename) | |
613 | if self.online: |
|
613 | if self.online: | |
614 | newFile = self.setNextFileOnline() |
|
614 | newFile = self.setNextFileOnline() | |
615 |
|
615 | |||
616 | else: |
|
616 | else: | |
617 |
|
617 | |||
618 | newFile = self.setNextFileOffline() |
|
618 | newFile = self.setNextFileOffline() | |
619 |
|
619 | |||
620 | #print("newFile: ",newFile) |
|
620 | #print("newFile: ",newFile) | |
621 | if not(newFile): |
|
621 | if not(newFile): | |
622 |
|
622 | |||
623 | if self.online: |
|
623 | if self.online: | |
624 | raise schainpy.admin.SchainError('Time to wait for new files reach') |
|
624 | raise schainpy.admin.SchainError('Time to wait for new files reach') | |
625 | else: |
|
625 | else: | |
626 | if self.fileIndex == -1: |
|
626 | if self.fileIndex == -1: | |
627 | #print("OKK") |
|
627 | #print("OKK") | |
628 | raise schainpy.admin.SchainWarning('No files found in the given path') |
|
628 | raise schainpy.admin.SchainWarning('No files found in the given path') | |
629 | else: |
|
629 | else: | |
630 |
|
630 | |||
631 | raise schainpy.admin.SchainWarning('No more files to read') |
|
631 | raise schainpy.admin.SchainWarning('No more files to read') | |
632 |
|
632 | |||
633 | if self.verifyFile(self.filename): |
|
633 | if self.verifyFile(self.filename): | |
634 |
|
634 | |||
635 | break |
|
635 | break | |
636 |
|
636 | |||
637 | ##print("BEFORE OPENING",self.filename) |
|
637 | ##print("BEFORE OPENING",self.filename) | |
638 |
|
638 | |||
639 | log.log('Opening file: %s' % self.filename, self.name) |
|
639 | log.log('Opening file: %s' % self.filename, self.name) | |
640 |
|
640 | |||
641 | self.readFirstHeader() |
|
641 | self.readFirstHeader() | |
642 | self.nReadBlocks = 0 |
|
642 | self.nReadBlocks = 0 | |
643 |
|
643 | |||
644 | def setNextFileOnline(self): |
|
644 | def setNextFileOnline(self): | |
645 | """Check for the next file to be readed in online mode. |
|
645 | """Check for the next file to be readed in online mode. | |
646 |
|
646 | |||
647 | Set: |
|
647 | Set: | |
648 | self.filename |
|
648 | self.filename | |
649 | self.fp |
|
649 | self.fp | |
650 | self.filesize |
|
650 | self.filesize | |
651 |
|
651 | |||
652 | Return: |
|
652 | Return: | |
653 | boolean |
|
653 | boolean | |
654 |
|
654 | |||
655 | """ |
|
655 | """ | |
656 |
|
656 | |||
657 | nextFile = True |
|
657 | nextFile = True | |
658 | nextDay = False |
|
658 | nextDay = False | |
659 |
|
659 | |||
660 | for nFiles in range(self.nFiles+1): |
|
660 | for nFiles in range(self.nFiles+1): | |
661 | for nTries in range(self.nTries): |
|
661 | for nTries in range(self.nTries): | |
662 | fullfilename, filename = self.checkForRealPath(nextFile, nextDay) |
|
662 | fullfilename, filename = self.checkForRealPath(nextFile, nextDay) | |
663 | if fullfilename is not None: |
|
663 | if fullfilename is not None: | |
664 | break |
|
664 | break | |
665 | log.warning( |
|
665 | log.warning( | |
666 | "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1), |
|
666 | "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1), | |
667 | self.name) |
|
667 | self.name) | |
668 | time.sleep(self.delay) |
|
668 | time.sleep(self.delay) | |
669 | nextFile = False |
|
669 | nextFile = False | |
670 | continue |
|
670 | continue | |
671 |
|
671 | |||
672 | if fullfilename is not None: |
|
672 | if fullfilename is not None: | |
673 | break |
|
673 | break | |
674 |
|
674 | |||
675 | #self.nTries = 1 |
|
675 | #self.nTries = 1 | |
676 | nextFile = True |
|
676 | nextFile = True | |
677 |
|
677 | |||
678 | if nFiles == (self.nFiles - 1): |
|
678 | if nFiles == (self.nFiles - 1): | |
679 | log.log('Trying with next day...', self.name) |
|
679 | log.log('Trying with next day...', self.name) | |
680 | nextDay = True |
|
680 | nextDay = True | |
681 | self.nTries = 3 |
|
681 | self.nTries = 3 | |
682 |
|
682 | |||
683 | if fullfilename: |
|
683 | if fullfilename: | |
684 | self.fileSize = os.path.getsize(fullfilename) |
|
684 | self.fileSize = os.path.getsize(fullfilename) | |
685 | self.filename = fullfilename |
|
685 | self.filename = fullfilename | |
686 | self.flagIsNewFile = 1 |
|
686 | self.flagIsNewFile = 1 | |
687 | if self.fp != None: |
|
687 | if self.fp != None: | |
688 | self.fp.close() |
|
688 | self.fp.close() | |
689 | #print(fullfilename) |
|
689 | #print(fullfilename) | |
690 | self.fp = self.open_file(fullfilename, self.open_mode) |
|
690 | self.fp = self.open_file(fullfilename, self.open_mode) | |
691 |
|
691 | |||
692 | self.flagNoMoreFiles = 0 |
|
692 | self.flagNoMoreFiles = 0 | |
693 | self.fileIndex += 1 |
|
693 | self.fileIndex += 1 | |
694 | return 1 |
|
694 | return 1 | |
695 | else: |
|
695 | else: | |
696 | return 0 |
|
696 | return 0 | |
697 |
|
697 | |||
698 | def setNextFileOffline(self): |
|
698 | def setNextFileOffline(self): | |
699 | """Open the next file to be readed in offline mode""" |
|
699 | """Open the next file to be readed in offline mode""" | |
700 |
|
700 | |||
701 | try: |
|
701 | try: | |
702 | filename = next(self.filenameList) |
|
702 | filename = next(self.filenameList) | |
703 | self.fileIndex +=1 |
|
703 | self.fileIndex +=1 | |
704 | except StopIteration: |
|
704 | except StopIteration: | |
705 | self.flagNoMoreFiles = 1 |
|
705 | self.flagNoMoreFiles = 1 | |
706 | return 0 |
|
706 | return 0 | |
707 | #print(self.fileIndex) |
|
707 | #print(self.fileIndex) | |
708 | #print(filename) |
|
708 | #print(filename) | |
709 | self.filename = filename |
|
709 | self.filename = filename | |
710 | self.fileSize = os.path.getsize(filename) |
|
710 | self.fileSize = os.path.getsize(filename) | |
711 | self.fp = self.open_file(filename, self.open_mode) |
|
711 | self.fp = self.open_file(filename, self.open_mode) | |
712 | self.flagIsNewFile = 1 |
|
712 | self.flagIsNewFile = 1 | |
713 |
|
713 | |||
714 | return 1 |
|
714 | return 1 | |
715 |
|
715 | |||
716 | @staticmethod |
|
716 | @staticmethod | |
717 | def isDateTimeInRange(dt, startDate, endDate, startTime, endTime): |
|
717 | def isDateTimeInRange(dt, startDate, endDate, startTime, endTime): | |
718 | """Check if the given datetime is in range""" |
|
718 | """Check if the given datetime is in range""" | |
719 |
|
719 | |||
720 | if startDate <= dt.date() <= endDate: |
|
720 | if startDate <= dt.date() <= endDate: | |
721 | if startTime <= dt.time() <= endTime: |
|
721 | if startTime <= dt.time() <= endTime: | |
722 | return True |
|
722 | return True | |
723 | return False |
|
723 | return False | |
724 |
|
724 | |||
725 | def verifyFile(self, filename): |
|
725 | def verifyFile(self, filename): | |
726 | """Check for a valid file |
|
726 | """Check for a valid file | |
727 |
|
727 | |||
728 | Arguments: |
|
728 | Arguments: | |
729 | filename -- full path filename |
|
729 | filename -- full path filename | |
730 |
|
730 | |||
731 | Return: |
|
731 | Return: | |
732 | boolean |
|
732 | boolean | |
733 | """ |
|
733 | """ | |
734 |
|
734 | |||
735 | return True |
|
735 | return True | |
736 |
|
736 | |||
737 | def checkForRealPath(self, nextFile, nextDay): |
|
737 | def checkForRealPath(self, nextFile, nextDay): | |
738 | """Check if the next file to be readed exists""" |
|
738 | """Check if the next file to be readed exists""" | |
739 |
|
739 | |||
740 | raise NotImplementedError |
|
740 | raise NotImplementedError | |
741 |
|
741 | |||
742 | def readFirstHeader(self): |
|
742 | def readFirstHeader(self): | |
743 | """Parse the file header""" |
|
743 | """Parse the file header""" | |
744 |
|
744 | |||
745 |
|
745 | |||
746 | pass |
|
746 | pass | |
747 |
|
747 | |||
748 | def waitDataBlock(self, pointer_location, blocksize=None): |
|
748 | def waitDataBlock(self, pointer_location, blocksize=None): | |
749 | """ |
|
749 | """ | |
750 | """ |
|
750 | """ | |
751 |
|
751 | |||
752 | currentPointer = pointer_location |
|
752 | currentPointer = pointer_location | |
753 | if blocksize is None: |
|
753 | if blocksize is None: | |
754 | neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize |
|
754 | neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize | |
755 | else: |
|
755 | else: | |
756 | neededSize = blocksize |
|
756 | neededSize = blocksize | |
757 |
|
757 | |||
758 | for nTries in range(self.nTries): |
|
758 | for nTries in range(self.nTries): | |
759 | self.fp.close() |
|
759 | self.fp.close() | |
760 | self.fp = open(self.filename, 'rb') |
|
760 | self.fp = open(self.filename, 'rb') | |
761 | self.fp.seek(currentPointer) |
|
761 | self.fp.seek(currentPointer) | |
762 |
|
762 | |||
763 | self.fileSize = os.path.getsize(self.filename) |
|
763 | self.fileSize = os.path.getsize(self.filename) | |
764 | currentSize = self.fileSize - currentPointer |
|
764 | currentSize = self.fileSize - currentPointer | |
765 |
|
765 | |||
766 | if (currentSize >= neededSize): |
|
766 | if (currentSize >= neededSize): | |
767 | return 1 |
|
767 | return 1 | |
768 |
|
768 | |||
769 | log.warning( |
|
769 | log.warning( | |
770 | "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1), |
|
770 | "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1), | |
771 | self.name |
|
771 | self.name | |
772 | ) |
|
772 | ) | |
773 | time.sleep(self.delay) |
|
773 | time.sleep(self.delay) | |
774 |
|
774 | |||
775 | return 0 |
|
775 | return 0 | |
776 |
|
776 | |||
777 | class JRODataReader(Reader): |
|
777 | class JRODataReader(Reader): | |
778 |
|
778 | |||
779 | utc = 0 |
|
779 | utc = 0 | |
780 | nReadBlocks = 0 |
|
780 | nReadBlocks = 0 | |
781 | foldercounter = 0 |
|
781 | foldercounter = 0 | |
782 | firstHeaderSize = 0 |
|
782 | firstHeaderSize = 0 | |
783 | basicHeaderSize = 24 |
|
783 | basicHeaderSize = 24 | |
784 | __isFirstTimeOnline = 1 |
|
784 | __isFirstTimeOnline = 1 | |
785 | topic = '' |
|
785 | topic = '' | |
786 | filefmt = "*%Y%j***" |
|
786 | filefmt = "*%Y%j***" | |
787 | folderfmt = "*%Y%j" |
|
787 | folderfmt = "*%Y%j" | |
788 | __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk'] |
|
788 | __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk'] | |
789 |
|
789 | |||
790 | def getDtypeWidth(self): |
|
790 | def getDtypeWidth(self): | |
791 |
|
791 | |||
792 | dtype_index = get_dtype_index(self.dtype) |
|
792 | dtype_index = get_dtype_index(self.dtype) | |
793 | dtype_width = get_dtype_width(dtype_index) |
|
793 | dtype_width = get_dtype_width(dtype_index) | |
794 |
|
794 | |||
795 | return dtype_width |
|
795 | return dtype_width | |
796 |
|
796 | |||
797 | def checkForRealPath(self, nextFile, nextDay): |
|
797 | def checkForRealPath(self, nextFile, nextDay): | |
798 | """Check if the next file to be readed exists. |
|
798 | """Check if the next file to be readed exists. | |
799 |
|
799 | |||
800 | Example : |
|
800 | Example : | |
801 | nombre correcto del file es .../.../D2009307/P2009307367.ext |
|
801 | nombre correcto del file es .../.../D2009307/P2009307367.ext | |
802 |
|
802 | |||
803 | Entonces la funcion prueba con las siguientes combinaciones |
|
803 | Entonces la funcion prueba con las siguientes combinaciones | |
804 | .../.../y2009307367.ext |
|
804 | .../.../y2009307367.ext | |
805 | .../.../Y2009307367.ext |
|
805 | .../.../Y2009307367.ext | |
806 | .../.../x2009307/y2009307367.ext |
|
806 | .../.../x2009307/y2009307367.ext | |
807 | .../.../x2009307/Y2009307367.ext |
|
807 | .../.../x2009307/Y2009307367.ext | |
808 | .../.../X2009307/y2009307367.ext |
|
808 | .../.../X2009307/y2009307367.ext | |
809 | .../.../X2009307/Y2009307367.ext |
|
809 | .../.../X2009307/Y2009307367.ext | |
810 | siendo para este caso, la ultima combinacion de letras, identica al file buscado |
|
810 | siendo para este caso, la ultima combinacion de letras, identica al file buscado | |
811 |
|
811 | |||
812 | Return: |
|
812 | Return: | |
813 | str -- fullpath of the file |
|
813 | str -- fullpath of the file | |
814 | """ |
|
814 | """ | |
815 |
|
815 | |||
816 |
|
816 | |||
817 | if nextFile: |
|
817 | if nextFile: | |
818 | self.set += 1 |
|
818 | self.set += 1 | |
819 | if nextDay: |
|
819 | if nextDay: | |
820 | self.set = 0 |
|
820 | self.set = 0 | |
821 | self.doy += 1 |
|
821 | self.doy += 1 | |
822 | foldercounter = 0 |
|
822 | foldercounter = 0 | |
823 | prefixDirList = [None, 'd', 'D'] |
|
823 | prefixDirList = [None, 'd', 'D'] | |
824 | if self.ext.lower() == ".r": # voltage |
|
824 | if self.ext.lower() == ".r": # voltage | |
825 | prefixFileList = ['d', 'D'] |
|
825 | prefixFileList = ['d', 'D'] | |
826 | elif self.ext.lower() == ".pdata": # spectra |
|
826 | elif self.ext.lower() == ".pdata": # spectra | |
827 | prefixFileList = ['p', 'P'] |
|
827 | prefixFileList = ['p', 'P'] | |
828 |
|
828 | |||
829 | ##############DP############## |
|
829 | ##############DP############## | |
830 |
|
830 | |||
831 | elif self.ext.lower() == ".dat": # dat |
|
831 | elif self.ext.lower() == ".dat": # dat | |
832 | prefixFileList = ['z', 'Z'] |
|
832 | prefixFileList = ['z', 'Z'] | |
833 |
|
833 | |||
834 |
|
834 | |||
835 |
|
835 | |||
836 | ##############DP############## |
|
836 | ##############DP############## | |
837 | # barrido por las combinaciones posibles |
|
837 | # barrido por las combinaciones posibles | |
838 | for prefixDir in prefixDirList: |
|
838 | for prefixDir in prefixDirList: | |
839 | thispath = self.path |
|
839 | thispath = self.path | |
840 | if prefixDir != None: |
|
840 | if prefixDir != None: | |
841 | # formo el nombre del directorio xYYYYDDD (x=d o x=D) |
|
841 | # formo el nombre del directorio xYYYYDDD (x=d o x=D) | |
842 | if foldercounter == 0: |
|
842 | if foldercounter == 0: | |
843 | thispath = os.path.join(self.path, "%s%04d%03d" % |
|
843 | thispath = os.path.join(self.path, "%s%04d%03d" % | |
844 | (prefixDir, self.year, self.doy)) |
|
844 | (prefixDir, self.year, self.doy)) | |
845 | else: |
|
845 | else: | |
846 | thispath = os.path.join(self.path, "%s%04d%03d_%02d" % ( |
|
846 | thispath = os.path.join(self.path, "%s%04d%03d_%02d" % ( | |
847 | prefixDir, self.year, self.doy, foldercounter)) |
|
847 | prefixDir, self.year, self.doy, foldercounter)) | |
848 | for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D" |
|
848 | for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D" | |
849 | # formo el nombre del file xYYYYDDDSSS.ext |
|
849 | # formo el nombre del file xYYYYDDDSSS.ext | |
850 | filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext) |
|
850 | filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext) | |
851 | fullfilename = os.path.join( |
|
851 | fullfilename = os.path.join( | |
852 | thispath, filename) |
|
852 | thispath, filename) | |
853 |
|
853 | |||
854 | if os.path.exists(fullfilename): |
|
854 | if os.path.exists(fullfilename): | |
855 | return fullfilename, filename |
|
855 | return fullfilename, filename | |
856 |
|
856 | |||
857 | return None, filename |
|
857 | return None, filename | |
858 |
|
858 | |||
859 | def __waitNewBlock(self): |
|
859 | def __waitNewBlock(self): | |
860 | """ |
|
860 | """ | |
861 | Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma. |
|
861 | Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma. | |
862 |
|
862 | |||
863 | Si el modo de lectura es OffLine siempre retorn 0 |
|
863 | Si el modo de lectura es OffLine siempre retorn 0 | |
864 | """ |
|
864 | """ | |
865 | if not self.online: |
|
865 | if not self.online: | |
866 | return 0 |
|
866 | return 0 | |
867 |
|
867 | |||
868 | if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile): |
|
868 | if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile): | |
869 | return 0 |
|
869 | return 0 | |
870 |
|
870 | |||
871 | currentPointer = self.fp.tell() |
|
871 | currentPointer = self.fp.tell() | |
872 |
|
872 | |||
873 | neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize |
|
873 | neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize | |
874 |
|
874 | |||
875 | for nTries in range(self.nTries): |
|
875 | for nTries in range(self.nTries): | |
876 |
|
876 | |||
877 | self.fp.close() |
|
877 | self.fp.close() | |
878 | self.fp = open(self.filename, 'rb') |
|
878 | self.fp = open(self.filename, 'rb') | |
879 | self.fp.seek(currentPointer) |
|
879 | self.fp.seek(currentPointer) | |
880 |
|
880 | |||
881 | self.fileSize = os.path.getsize(self.filename) |
|
881 | self.fileSize = os.path.getsize(self.filename) | |
882 | currentSize = self.fileSize - currentPointer |
|
882 | currentSize = self.fileSize - currentPointer | |
883 |
|
883 | |||
884 | if (currentSize >= neededSize): |
|
884 | if (currentSize >= neededSize): | |
885 | self.basicHeaderObj.read(self.fp) |
|
885 | self.basicHeaderObj.read(self.fp) | |
886 | return 1 |
|
886 | return 1 | |
887 |
|
887 | |||
888 | if self.fileSize == self.fileSizeByHeader: |
|
888 | if self.fileSize == self.fileSizeByHeader: | |
889 | # self.flagEoF = True |
|
889 | # self.flagEoF = True | |
890 | return 0 |
|
890 | return 0 | |
891 |
|
891 | |||
892 | print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)) |
|
892 | print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)) | |
893 | #print(self.filename) |
|
893 | #print(self.filename) | |
894 | time.sleep(self.delay) |
|
894 | time.sleep(self.delay) | |
895 |
|
895 | |||
896 | return 0 |
|
896 | return 0 | |
897 |
|
897 | |||
898 | def __setNewBlock(self): |
|
898 | def __setNewBlock(self): | |
899 |
|
899 | |||
900 | if self.fp == None: |
|
900 | if self.fp == None: | |
901 | return 0 |
|
901 | return 0 | |
902 |
|
902 | |||
903 | if self.flagIsNewFile: |
|
903 | if self.flagIsNewFile: | |
904 | self.lastUTTime = self.basicHeaderObj.utc |
|
904 | self.lastUTTime = self.basicHeaderObj.utc | |
905 | return 1 |
|
905 | return 1 | |
906 |
|
906 | |||
907 | if self.realtime: |
|
907 | if self.realtime: | |
908 | self.flagDiscontinuousBlock = 1 |
|
908 | self.flagDiscontinuousBlock = 1 | |
909 | if not(self.setNextFile()): |
|
909 | if not(self.setNextFile()): | |
910 | return 0 |
|
910 | return 0 | |
911 | else: |
|
911 | else: | |
912 | return 1 |
|
912 | return 1 | |
913 |
|
913 | |||
914 | currentSize = self.fileSize - self.fp.tell() |
|
914 | currentSize = self.fileSize - self.fp.tell() | |
915 | neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize |
|
915 | neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize | |
916 |
|
916 | |||
917 | if (currentSize >= neededSize): |
|
917 | if (currentSize >= neededSize): | |
918 | self.basicHeaderObj.read(self.fp) |
|
918 | self.basicHeaderObj.read(self.fp) | |
919 | self.lastUTTime = self.basicHeaderObj.utc |
|
919 | self.lastUTTime = self.basicHeaderObj.utc | |
920 | return 1 |
|
920 | return 1 | |
921 |
|
921 | |||
922 | if self.__waitNewBlock(): |
|
922 | if self.__waitNewBlock(): | |
923 | self.lastUTTime = self.basicHeaderObj.utc |
|
923 | self.lastUTTime = self.basicHeaderObj.utc | |
924 | return 1 |
|
924 | return 1 | |
925 |
|
925 | |||
926 | if not(self.setNextFile()): |
|
926 | if not(self.setNextFile()): | |
927 | return 0 |
|
927 | return 0 | |
928 |
|
928 | |||
929 | deltaTime = self.basicHeaderObj.utc - self.lastUTTime |
|
929 | deltaTime = self.basicHeaderObj.utc - self.lastUTTime | |
930 | self.lastUTTime = self.basicHeaderObj.utc |
|
930 | self.lastUTTime = self.basicHeaderObj.utc | |
931 |
|
931 | |||
932 | self.flagDiscontinuousBlock = 0 |
|
932 | self.flagDiscontinuousBlock = 0 | |
933 | if deltaTime > self.maxTimeStep: |
|
933 | if deltaTime > self.maxTimeStep: | |
934 | self.flagDiscontinuousBlock = 1 |
|
934 | self.flagDiscontinuousBlock = 1 | |
935 |
|
935 | |||
936 | return 1 |
|
936 | return 1 | |
937 |
|
937 | |||
938 | def readNextBlock(self): |
|
938 | def readNextBlock(self): | |
939 |
|
939 | |||
940 | while True: |
|
940 | while True: | |
941 | if not(self.__setNewBlock()): |
|
941 | if not(self.__setNewBlock()): | |
942 | continue |
|
942 | continue | |
943 |
|
943 | |||
944 | if not(self.readBlock()): |
|
944 | if not(self.readBlock()): | |
945 | return 0 |
|
945 | return 0 | |
946 |
|
946 | |||
947 | self.getBasicHeader() |
|
947 | self.getBasicHeader() | |
948 |
|
948 | |||
949 | if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime): |
|
949 | if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime): | |
950 | print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks, |
|
950 | print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks, | |
951 | self.processingHeaderObj.dataBlocksPerFile, |
|
951 | self.processingHeaderObj.dataBlocksPerFile, | |
952 | self.dataOut.datatime.ctime())) |
|
952 | self.dataOut.datatime.ctime())) | |
953 | continue |
|
953 | continue | |
954 |
|
954 | |||
955 | break |
|
955 | break | |
956 |
|
956 | |||
957 | if self.verbose: |
|
957 | if self.verbose: | |
958 | print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks, |
|
958 | print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks, | |
959 | self.processingHeaderObj.dataBlocksPerFile, |
|
959 | self.processingHeaderObj.dataBlocksPerFile, | |
960 | self.dataOut.datatime.ctime())) |
|
960 | self.dataOut.datatime.ctime())) | |
961 | #################DP################# |
|
961 | #################DP################# | |
962 | self.dataOut.TimeBlockDate=self.dataOut.datatime.ctime() |
|
962 | self.dataOut.TimeBlockDate=self.dataOut.datatime.ctime() | |
963 | self.dataOut.TimeBlockSeconds=time.mktime(time.strptime(self.dataOut.datatime.ctime())) |
|
963 | self.dataOut.TimeBlockSeconds=time.mktime(time.strptime(self.dataOut.datatime.ctime())) | |
964 | #################DP################# |
|
964 | #################DP################# | |
965 | return 1 |
|
965 | return 1 | |
966 |
|
966 | |||
967 | def readFirstHeader(self): |
|
967 | def readFirstHeader(self): | |
968 |
|
968 | |||
969 | self.basicHeaderObj.read(self.fp) |
|
969 | self.basicHeaderObj.read(self.fp) | |
970 | self.systemHeaderObj.read(self.fp) |
|
970 | self.systemHeaderObj.read(self.fp) | |
971 | self.radarControllerHeaderObj.read(self.fp) |
|
971 | self.radarControllerHeaderObj.read(self.fp) | |
972 | self.processingHeaderObj.read(self.fp) |
|
972 | self.processingHeaderObj.read(self.fp) | |
973 | self.firstHeaderSize = self.basicHeaderObj.size |
|
973 | self.firstHeaderSize = self.basicHeaderObj.size | |
974 |
|
974 | |||
975 | datatype = int(numpy.log2((self.processingHeaderObj.processFlags & |
|
975 | datatype = int(numpy.log2((self.processingHeaderObj.processFlags & | |
976 | PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR)) |
|
976 | PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR)) | |
977 | if datatype == 0: |
|
977 | if datatype == 0: | |
978 | datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')]) |
|
978 | datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')]) | |
979 | elif datatype == 1: |
|
979 | elif datatype == 1: | |
980 | datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')]) |
|
980 | datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')]) | |
981 | elif datatype == 2: |
|
981 | elif datatype == 2: | |
982 | datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')]) |
|
982 | datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')]) | |
983 | elif datatype == 3: |
|
983 | elif datatype == 3: | |
984 | datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')]) |
|
984 | datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')]) | |
985 | elif datatype == 4: |
|
985 | elif datatype == 4: | |
986 | datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')]) |
|
986 | datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')]) | |
987 | elif datatype == 5: |
|
987 | elif datatype == 5: | |
988 | datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')]) |
|
988 | datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')]) | |
989 | else: |
|
989 | else: | |
990 | raise ValueError('Data type was not defined') |
|
990 | raise ValueError('Data type was not defined') | |
991 |
|
991 | |||
992 | self.dtype = datatype_str |
|
992 | self.dtype = datatype_str | |
993 | #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c |
|
993 | #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c | |
994 | self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \ |
|
994 | self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \ | |
995 | self.firstHeaderSize + self.basicHeaderSize * \ |
|
995 | self.firstHeaderSize + self.basicHeaderSize * \ | |
996 | (self.processingHeaderObj.dataBlocksPerFile - 1) |
|
996 | (self.processingHeaderObj.dataBlocksPerFile - 1) | |
997 | # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels) |
|
997 | # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels) | |
998 | # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels) |
|
998 | # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels) | |
999 | self.getBlockDimension() |
|
999 | self.getBlockDimension() | |
1000 |
|
1000 | |||
1001 | def verifyFile(self, filename): |
|
1001 | def verifyFile(self, filename): | |
1002 |
|
1002 | |||
1003 | flag = True |
|
1003 | flag = True | |
1004 |
|
1004 | |||
1005 | try: |
|
1005 | try: | |
1006 | fp = open(filename, 'rb') |
|
1006 | fp = open(filename, 'rb') | |
1007 | except IOError: |
|
1007 | except IOError: | |
1008 | log.error("File {} can't be opened".format(filename), self.name) |
|
1008 | log.error("File {} can't be opened".format(filename), self.name) | |
1009 | return False |
|
1009 | return False | |
1010 |
|
1010 | |||
1011 | if self.online and self.waitDataBlock(0): |
|
1011 | if self.online and self.waitDataBlock(0): | |
1012 | pass |
|
1012 | pass | |
1013 |
|
1013 | |||
1014 | basicHeaderObj = BasicHeader(LOCALTIME) |
|
1014 | basicHeaderObj = BasicHeader(LOCALTIME) | |
1015 | systemHeaderObj = SystemHeader() |
|
1015 | systemHeaderObj = SystemHeader() | |
1016 | radarControllerHeaderObj = RadarControllerHeader() |
|
1016 | radarControllerHeaderObj = RadarControllerHeader() | |
1017 | processingHeaderObj = ProcessingHeader() |
|
1017 | processingHeaderObj = ProcessingHeader() | |
1018 |
|
1018 | |||
1019 | if not(basicHeaderObj.read(fp)): |
|
1019 | if not(basicHeaderObj.read(fp)): | |
1020 | flag = False |
|
1020 | flag = False | |
1021 | if not(systemHeaderObj.read(fp)): |
|
1021 | if not(systemHeaderObj.read(fp)): | |
1022 | flag = False |
|
1022 | flag = False | |
1023 | if not(radarControllerHeaderObj.read(fp)): |
|
1023 | if not(radarControllerHeaderObj.read(fp)): | |
1024 | flag = False |
|
1024 | flag = False | |
1025 | if not(processingHeaderObj.read(fp)): |
|
1025 | if not(processingHeaderObj.read(fp)): | |
1026 | flag = False |
|
1026 | flag = False | |
1027 | if not self.online: |
|
1027 | if not self.online: | |
1028 | dt1 = basicHeaderObj.datatime |
|
1028 | dt1 = basicHeaderObj.datatime | |
1029 | pos = self.fileSize-processingHeaderObj.blockSize-24 |
|
1029 | pos = self.fileSize-processingHeaderObj.blockSize-24 | |
1030 | if pos<0: |
|
1030 | if pos<0: | |
1031 | flag = False |
|
1031 | flag = False | |
1032 | log.error('Invalid size for file: {}'.format(self.filename), self.name) |
|
1032 | log.error('Invalid size for file: {}'.format(self.filename), self.name) | |
1033 | else: |
|
1033 | else: | |
1034 | fp.seek(pos) |
|
1034 | fp.seek(pos) | |
1035 | if not(basicHeaderObj.read(fp)): |
|
1035 | if not(basicHeaderObj.read(fp)): | |
1036 | flag = False |
|
1036 | flag = False | |
1037 | dt2 = basicHeaderObj.datatime |
|
1037 | dt2 = basicHeaderObj.datatime | |
1038 | if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \ |
|
1038 | if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \ | |
1039 | self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime): |
|
1039 | self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime): | |
1040 | flag = False |
|
1040 | flag = False | |
1041 |
|
1041 | |||
1042 | fp.close() |
|
1042 | fp.close() | |
1043 | return flag |
|
1043 | return flag | |
1044 |
|
1044 | |||
1045 | def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False): |
|
1045 | def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False): | |
1046 |
|
1046 | |||
1047 | path_empty = True |
|
1047 | path_empty = True | |
1048 |
|
1048 | |||
1049 | dateList = [] |
|
1049 | dateList = [] | |
1050 | pathList = [] |
|
1050 | pathList = [] | |
1051 |
|
1051 | |||
1052 | multi_path = path.split(',') |
|
1052 | multi_path = path.split(',') | |
1053 |
|
1053 | |||
1054 | if not walk: |
|
1054 | if not walk: | |
1055 |
|
1055 | |||
1056 | for single_path in multi_path: |
|
1056 | for single_path in multi_path: | |
1057 |
|
1057 | |||
1058 | if not os.path.isdir(single_path): |
|
1058 | if not os.path.isdir(single_path): | |
1059 | continue |
|
1059 | continue | |
1060 |
|
1060 | |||
1061 | fileList = glob.glob1(single_path, "*" + ext) |
|
1061 | fileList = glob.glob1(single_path, "*" + ext) | |
1062 |
|
1062 | |||
1063 | if not fileList: |
|
1063 | if not fileList: | |
1064 | continue |
|
1064 | continue | |
1065 |
|
1065 | |||
1066 | path_empty = False |
|
1066 | path_empty = False | |
1067 |
|
1067 | |||
1068 | fileList.sort() |
|
1068 | fileList.sort() | |
1069 |
|
1069 | |||
1070 | for thisFile in fileList: |
|
1070 | for thisFile in fileList: | |
1071 |
|
1071 | |||
1072 | if not os.path.isfile(os.path.join(single_path, thisFile)): |
|
1072 | if not os.path.isfile(os.path.join(single_path, thisFile)): | |
1073 | continue |
|
1073 | continue | |
1074 |
|
1074 | |||
1075 | if not isRadarFile(thisFile): |
|
1075 | if not isRadarFile(thisFile): | |
1076 | continue |
|
1076 | continue | |
1077 |
|
1077 | |||
1078 | if not isFileInDateRange(thisFile, startDate, endDate): |
|
1078 | if not isFileInDateRange(thisFile, startDate, endDate): | |
1079 | continue |
|
1079 | continue | |
1080 |
|
1080 | |||
1081 | thisDate = getDateFromRadarFile(thisFile) |
|
1081 | thisDate = getDateFromRadarFile(thisFile) | |
1082 |
|
1082 | |||
1083 | if thisDate in dateList or single_path in pathList: |
|
1083 | if thisDate in dateList or single_path in pathList: | |
1084 | continue |
|
1084 | continue | |
1085 |
|
1085 | |||
1086 | dateList.append(thisDate) |
|
1086 | dateList.append(thisDate) | |
1087 | pathList.append(single_path) |
|
1087 | pathList.append(single_path) | |
1088 |
|
1088 | |||
1089 | else: |
|
1089 | else: | |
1090 | for single_path in multi_path: |
|
1090 | for single_path in multi_path: | |
1091 |
|
1091 | |||
1092 | if not os.path.isdir(single_path): |
|
1092 | if not os.path.isdir(single_path): | |
1093 | continue |
|
1093 | continue | |
1094 |
|
1094 | |||
1095 | dirList = [] |
|
1095 | dirList = [] | |
1096 |
|
1096 | |||
1097 | for thisPath in os.listdir(single_path): |
|
1097 | for thisPath in os.listdir(single_path): | |
1098 |
|
1098 | |||
1099 | if not os.path.isdir(os.path.join(single_path, thisPath)): |
|
1099 | if not os.path.isdir(os.path.join(single_path, thisPath)): | |
1100 | continue |
|
1100 | continue | |
1101 |
|
1101 | |||
1102 | if not isRadarFolder(thisPath): |
|
1102 | if not isRadarFolder(thisPath): | |
1103 | continue |
|
1103 | continue | |
1104 |
|
1104 | |||
1105 | if not isFolderInDateRange(thisPath, startDate, endDate): |
|
1105 | if not isFolderInDateRange(thisPath, startDate, endDate): | |
1106 | continue |
|
1106 | continue | |
1107 |
|
1107 | |||
1108 | dirList.append(thisPath) |
|
1108 | dirList.append(thisPath) | |
1109 |
|
1109 | |||
1110 | if not dirList: |
|
1110 | if not dirList: | |
1111 | continue |
|
1111 | continue | |
1112 |
|
1112 | |||
1113 | dirList.sort() |
|
1113 | dirList.sort() | |
1114 |
|
1114 | |||
1115 | for thisDir in dirList: |
|
1115 | for thisDir in dirList: | |
1116 |
|
1116 | |||
1117 | datapath = os.path.join(single_path, thisDir, expLabel) |
|
1117 | datapath = os.path.join(single_path, thisDir, expLabel) | |
1118 | fileList = glob.glob1(datapath, "*" + ext) |
|
1118 | fileList = glob.glob1(datapath, "*" + ext) | |
1119 |
|
1119 | |||
1120 | if not fileList: |
|
1120 | if not fileList: | |
1121 | continue |
|
1121 | continue | |
1122 |
|
1122 | |||
1123 | path_empty = False |
|
1123 | path_empty = False | |
1124 |
|
1124 | |||
1125 | thisDate = getDateFromRadarFolder(thisDir) |
|
1125 | thisDate = getDateFromRadarFolder(thisDir) | |
1126 |
|
1126 | |||
1127 | pathList.append(datapath) |
|
1127 | pathList.append(datapath) | |
1128 | dateList.append(thisDate) |
|
1128 | dateList.append(thisDate) | |
1129 |
|
1129 | |||
1130 | dateList.sort() |
|
1130 | dateList.sort() | |
1131 |
|
1131 | |||
1132 | if walk: |
|
1132 | if walk: | |
1133 | pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel) |
|
1133 | pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel) | |
1134 | else: |
|
1134 | else: | |
1135 | pattern_path = multi_path[0] |
|
1135 | pattern_path = multi_path[0] | |
1136 |
|
1136 | |||
1137 | if path_empty: |
|
1137 | if path_empty: | |
1138 | raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate)) |
|
1138 | raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate)) | |
1139 | else: |
|
1139 | else: | |
1140 | if not dateList: |
|
1140 | if not dateList: | |
1141 | raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path)) |
|
1141 | raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path)) | |
1142 |
|
1142 | |||
1143 | if include_path: |
|
1143 | if include_path: | |
1144 | return dateList, pathList |
|
1144 | return dateList, pathList | |
1145 |
|
1145 | |||
1146 | return dateList |
|
1146 | return dateList | |
1147 |
|
1147 | |||
1148 | def setup(self, **kwargs): |
|
1148 | def setup(self, **kwargs): | |
1149 |
|
1149 | |||
1150 | self.set_kwargs(**kwargs) |
|
1150 | self.set_kwargs(**kwargs) | |
1151 | if not self.ext.startswith('.'): |
|
1151 | if not self.ext.startswith('.'): | |
1152 | self.ext = '.{}'.format(self.ext) |
|
1152 | self.ext = '.{}'.format(self.ext) | |
1153 |
|
1153 | |||
1154 | if self.server is not None: |
|
1154 | if self.server is not None: | |
1155 | if 'tcp://' in self.server: |
|
1155 | if 'tcp://' in self.server: | |
1156 | address = self.server |
|
1156 | address = self.server | |
1157 | else: |
|
1157 | else: | |
1158 | address = 'ipc:///tmp/%s' % self.server |
|
1158 | address = 'ipc:///tmp/%s' % self.server | |
1159 | self.server = address |
|
1159 | self.server = address | |
1160 | self.context = zmq.Context() |
|
1160 | self.context = zmq.Context() | |
1161 | self.receiver = self.context.socket(zmq.SUB) |
|
1161 | self.receiver = self.context.socket(zmq.SUB) | |
1162 | self.receiver.connect(self.server) |
|
1162 | self.receiver.connect(self.server) | |
1163 | self.receiver.setsockopt(zmq.SUBSCRIBE, str.encode(str(self.topic))) |
|
1163 | self.receiver.setsockopt(zmq.SUBSCRIBE, str.encode(str(self.topic))) | |
1164 | time.sleep(0.5) |
|
1164 | time.sleep(0.5) | |
1165 | print('[Starting] ReceiverData from {}'.format(self.server)) |
|
1165 | print('[Starting] ReceiverData from {}'.format(self.server)) | |
1166 | else: |
|
1166 | else: | |
1167 | self.server = None |
|
1167 | self.server = None | |
1168 | if self.path == None: |
|
1168 | if self.path == None: | |
1169 | raise ValueError("[Reading] The path is not valid") |
|
1169 | raise ValueError("[Reading] The path is not valid") | |
1170 |
|
1170 | |||
1171 | if self.online: |
|
1171 | if self.online: | |
1172 | log.log("[Reading] Searching files in online mode...", self.name) |
|
1172 | log.log("[Reading] Searching files in online mode...", self.name) | |
1173 |
|
1173 | |||
1174 | for nTries in range(self.nTries): |
|
1174 | for nTries in range(self.nTries): | |
1175 | fullpath = self.searchFilesOnLine(self.path, self.startDate, |
|
1175 | fullpath = self.searchFilesOnLine(self.path, self.startDate, | |
1176 | self.endDate, self.expLabel, self.ext, self.walk, |
|
1176 | self.endDate, self.expLabel, self.ext, self.walk, | |
1177 | self.filefmt, self.folderfmt) |
|
1177 | self.filefmt, self.folderfmt) | |
1178 |
|
1178 | |||
1179 | try: |
|
1179 | try: | |
1180 | fullpath = next(fullpath) |
|
1180 | fullpath = next(fullpath) | |
1181 | except: |
|
1181 | except: | |
1182 | fullpath = None |
|
1182 | fullpath = None | |
1183 |
|
1183 | |||
1184 | if fullpath: |
|
1184 | if fullpath: | |
1185 | break |
|
1185 | break | |
1186 |
|
1186 | |||
1187 | log.warning( |
|
1187 | log.warning( | |
1188 | 'Waiting {} sec for a valid file in {}: try {} ...'.format( |
|
1188 | 'Waiting {} sec for a valid file in {}: try {} ...'.format( | |
1189 | self.delay, self.path, nTries + 1), |
|
1189 | self.delay, self.path, nTries + 1), | |
1190 | self.name) |
|
1190 | self.name) | |
1191 | time.sleep(self.delay) |
|
1191 | time.sleep(self.delay) | |
1192 |
|
1192 | |||
1193 | if not(fullpath): |
|
1193 | if not(fullpath): | |
1194 | raise schainpy.admin.SchainError( |
|
1194 | raise schainpy.admin.SchainError( | |
1195 | 'There isn\'t any valid file in {}'.format(self.path)) |
|
1195 | 'There isn\'t any valid file in {}'.format(self.path)) | |
1196 |
|
1196 | |||
1197 | pathname, filename = os.path.split(fullpath) |
|
1197 | pathname, filename = os.path.split(fullpath) | |
1198 | self.year = int(filename[1:5]) |
|
1198 | self.year = int(filename[1:5]) | |
1199 | self.doy = int(filename[5:8]) |
|
1199 | self.doy = int(filename[5:8]) | |
1200 | self.set = int(filename[8:11]) - 1 |
|
1200 | self.set = int(filename[8:11]) - 1 | |
1201 | else: |
|
1201 | else: | |
1202 | log.log("Searching files in {}".format(self.path), self.name) |
|
1202 | log.log("Searching files in {}".format(self.path), self.name) | |
1203 | self.filenameList = self.searchFilesOffLine(self.path, self.startDate, |
|
1203 | self.filenameList = self.searchFilesOffLine(self.path, self.startDate, | |
1204 | self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) |
|
1204 | self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) | |
1205 |
|
1205 | |||
1206 | self.setNextFile() |
|
1206 | self.setNextFile() | |
1207 |
|
1207 | |||
1208 | return |
|
1208 | return | |
1209 |
|
1209 | |||
1210 | def getBasicHeader(self): |
|
1210 | def getBasicHeader(self): | |
1211 |
|
1211 | |||
1212 | self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \ |
|
1212 | self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \ | |
1213 | 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds |
|
1213 | 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds | |
1214 |
|
1214 | |||
1215 | self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock |
|
1215 | self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock | |
1216 |
|
1216 | |||
1217 | self.dataOut.timeZone = self.basicHeaderObj.timeZone |
|
1217 | self.dataOut.timeZone = self.basicHeaderObj.timeZone | |
1218 |
|
1218 | |||
1219 | self.dataOut.dstFlag = self.basicHeaderObj.dstFlag |
|
1219 | self.dataOut.dstFlag = self.basicHeaderObj.dstFlag | |
1220 |
|
1220 | |||
1221 | self.dataOut.errorCount = self.basicHeaderObj.errorCount |
|
1221 | self.dataOut.errorCount = self.basicHeaderObj.errorCount | |
1222 |
|
1222 | |||
1223 | self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime |
|
1223 | self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime | |
1224 |
|
1224 | |||
1225 | self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs |
|
1225 | self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs | |
1226 |
|
1226 | |||
1227 | def getFirstHeader(self): |
|
1227 | def getFirstHeader(self): | |
1228 |
|
1228 | |||
1229 | raise NotImplementedError |
|
1229 | raise NotImplementedError | |
1230 |
|
1230 | |||
1231 | def getData(self): |
|
1231 | def getData(self): | |
1232 |
|
1232 | |||
1233 | raise NotImplementedError |
|
1233 | raise NotImplementedError | |
1234 |
|
1234 | |||
1235 | def hasNotDataInBuffer(self): |
|
1235 | def hasNotDataInBuffer(self): | |
1236 |
|
1236 | |||
1237 | raise NotImplementedError |
|
1237 | raise NotImplementedError | |
1238 |
|
1238 | |||
1239 | def readBlock(self): |
|
1239 | def readBlock(self): | |
1240 |
|
1240 | |||
1241 | raise NotImplementedError |
|
1241 | raise NotImplementedError | |
1242 |
|
1242 | |||
1243 | def isEndProcess(self): |
|
1243 | def isEndProcess(self): | |
1244 |
|
1244 | |||
1245 | return self.flagNoMoreFiles |
|
1245 | return self.flagNoMoreFiles | |
1246 |
|
1246 | |||
1247 | def printReadBlocks(self): |
|
1247 | def printReadBlocks(self): | |
1248 |
|
1248 | |||
1249 | print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks) |
|
1249 | print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks) | |
1250 |
|
1250 | |||
1251 | def printTotalBlocks(self): |
|
1251 | def printTotalBlocks(self): | |
1252 |
|
1252 | |||
1253 | print("[Reading] Number of read blocks %04d" % self.nTotalBlocks) |
|
1253 | print("[Reading] Number of read blocks %04d" % self.nTotalBlocks) | |
1254 |
|
1254 | |||
1255 | def run(self, **kwargs): |
|
1255 | def run(self, **kwargs): | |
1256 | """ |
|
1256 | """ | |
1257 |
|
1257 | |||
1258 | Arguments: |
|
1258 | Arguments: | |
1259 | path : |
|
1259 | path : | |
1260 | startDate : |
|
1260 | startDate : | |
1261 | endDate : |
|
1261 | endDate : | |
1262 | startTime : |
|
1262 | startTime : | |
1263 | endTime : |
|
1263 | endTime : | |
1264 | set : |
|
1264 | set : | |
1265 | expLabel : |
|
1265 | expLabel : | |
1266 | ext : |
|
1266 | ext : | |
1267 | online : |
|
1267 | online : | |
1268 | delay : |
|
1268 | delay : | |
1269 | walk : |
|
1269 | walk : | |
1270 | getblock : |
|
1270 | getblock : | |
1271 | nTxs : |
|
1271 | nTxs : | |
1272 | realtime : |
|
1272 | realtime : | |
1273 | blocksize : |
|
1273 | blocksize : | |
1274 | blocktime : |
|
1274 | blocktime : | |
1275 | skip : |
|
1275 | skip : | |
1276 | cursor : |
|
1276 | cursor : | |
1277 | warnings : |
|
1277 | warnings : | |
1278 | server : |
|
1278 | server : | |
1279 | verbose : |
|
1279 | verbose : | |
1280 | format : |
|
1280 | format : | |
1281 | oneDDict : |
|
1281 | oneDDict : | |
1282 | twoDDict : |
|
1282 | twoDDict : | |
1283 | independentParam : |
|
1283 | independentParam : | |
1284 | """ |
|
1284 | """ | |
1285 |
|
1285 | |||
1286 | if not(self.isConfig): |
|
1286 | if not(self.isConfig): | |
1287 | self.setup(**kwargs) |
|
1287 | self.setup(**kwargs) | |
1288 | self.isConfig = True |
|
1288 | self.isConfig = True | |
1289 | if self.server is None: |
|
1289 | if self.server is None: | |
1290 | self.getData() |
|
1290 | self.getData() | |
1291 | else: |
|
1291 | else: | |
1292 | try: |
|
1292 | try: | |
1293 | self.getFromServer() |
|
1293 | self.getFromServer() | |
1294 | except Exception as e: |
|
1294 | except Exception as e: | |
1295 | log.warning('Invalid block...') |
|
1295 | log.warning('Invalid block...') | |
1296 | self.dataOut.flagNoData = True |
|
1296 | self.dataOut.flagNoData = True | |
1297 |
|
1297 | |||
1298 |
|
1298 | |||
1299 | class JRODataWriter(Reader): |
|
1299 | class JRODataWriter(Reader): | |
1300 |
|
1300 | |||
1301 | """ |
|
1301 | """ | |
1302 | Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura |
|
1302 | Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura | |
1303 | de los datos siempre se realiza por bloques. |
|
1303 | de los datos siempre se realiza por bloques. | |
1304 | """ |
|
1304 | """ | |
1305 |
|
1305 | |||
1306 | setFile = None |
|
1306 | setFile = None | |
1307 | profilesPerBlock = None |
|
1307 | profilesPerBlock = None | |
1308 | blocksPerFile = None |
|
1308 | blocksPerFile = None | |
1309 | nWriteBlocks = 0 |
|
1309 | nWriteBlocks = 0 | |
1310 | fileDate = None |
|
1310 | fileDate = None | |
1311 |
|
1311 | |||
1312 | def __init__(self, dataOut=None): |
|
1312 | def __init__(self, dataOut=None): | |
1313 | raise NotImplementedError |
|
1313 | raise NotImplementedError | |
1314 |
|
1314 | |||
1315 | def hasAllDataInBuffer(self): |
|
1315 | def hasAllDataInBuffer(self): | |
1316 | raise NotImplementedError |
|
1316 | raise NotImplementedError | |
1317 |
|
1317 | |||
1318 | def setBlockDimension(self): |
|
1318 | def setBlockDimension(self): | |
1319 | raise NotImplementedError |
|
1319 | raise NotImplementedError | |
1320 |
|
1320 | |||
1321 | def writeBlock(self): |
|
1321 | def writeBlock(self): | |
1322 | raise NotImplementedError |
|
1322 | raise NotImplementedError | |
1323 |
|
1323 | |||
1324 | def putData(self): |
|
1324 | def putData(self): | |
1325 | raise NotImplementedError |
|
1325 | raise NotImplementedError | |
1326 |
|
1326 | |||
1327 | def getDtypeWidth(self): |
|
1327 | def getDtypeWidth(self): | |
1328 |
|
1328 | |||
1329 | dtype_index = get_dtype_index(self.dtype) |
|
1329 | dtype_index = get_dtype_index(self.dtype) | |
1330 | dtype_width = get_dtype_width(dtype_index) |
|
1330 | dtype_width = get_dtype_width(dtype_index) | |
1331 |
|
1331 | |||
1332 | return dtype_width |
|
1332 | return dtype_width | |
1333 |
|
1333 | |||
1334 | def getProcessFlags(self): |
|
1334 | def getProcessFlags(self): | |
1335 |
|
1335 | |||
1336 | processFlags = 0 |
|
1336 | processFlags = 0 | |
1337 |
|
1337 | |||
1338 | dtype_index = get_dtype_index(self.dtype) |
|
1338 | dtype_index = get_dtype_index(self.dtype) | |
1339 | procflag_dtype = get_procflag_dtype(dtype_index) |
|
1339 | procflag_dtype = get_procflag_dtype(dtype_index) | |
1340 |
|
1340 | |||
1341 | processFlags += procflag_dtype |
|
1341 | processFlags += procflag_dtype | |
1342 |
|
1342 | |||
1343 | if self.dataOut.flagDecodeData: |
|
1343 | if self.dataOut.flagDecodeData: | |
1344 | processFlags += PROCFLAG.DECODE_DATA |
|
1344 | processFlags += PROCFLAG.DECODE_DATA | |
1345 |
|
1345 | |||
1346 | if self.dataOut.flagDeflipData: |
|
1346 | if self.dataOut.flagDeflipData: | |
1347 | processFlags += PROCFLAG.DEFLIP_DATA |
|
1347 | processFlags += PROCFLAG.DEFLIP_DATA | |
1348 |
|
1348 | |||
1349 | if self.dataOut.code is not None: |
|
1349 | if self.dataOut.code is not None: | |
1350 | processFlags += PROCFLAG.DEFINE_PROCESS_CODE |
|
1350 | processFlags += PROCFLAG.DEFINE_PROCESS_CODE | |
1351 |
|
1351 | |||
1352 | if self.dataOut.nCohInt > 1: |
|
1352 | if self.dataOut.nCohInt > 1: | |
1353 | processFlags += PROCFLAG.COHERENT_INTEGRATION |
|
1353 | processFlags += PROCFLAG.COHERENT_INTEGRATION | |
1354 |
|
1354 | |||
1355 | if self.dataOut.type == "Spectra": |
|
1355 | if self.dataOut.type == "Spectra": | |
1356 | if self.dataOut.nIncohInt > 1: |
|
1356 | if self.dataOut.nIncohInt > 1: | |
1357 | processFlags += PROCFLAG.INCOHERENT_INTEGRATION |
|
1357 | processFlags += PROCFLAG.INCOHERENT_INTEGRATION | |
1358 |
|
1358 | |||
1359 | if self.dataOut.data_dc is not None: |
|
1359 | if self.dataOut.data_dc is not None: | |
1360 | processFlags += PROCFLAG.SAVE_CHANNELS_DC |
|
1360 | processFlags += PROCFLAG.SAVE_CHANNELS_DC | |
1361 |
|
1361 | |||
1362 | if self.dataOut.flagShiftFFT: |
|
1362 | if self.dataOut.flagShiftFFT: | |
1363 | processFlags += PROCFLAG.SHIFT_FFT_DATA |
|
1363 | processFlags += PROCFLAG.SHIFT_FFT_DATA | |
1364 |
|
1364 | |||
1365 | return processFlags |
|
1365 | return processFlags | |
1366 |
|
1366 | |||
1367 | def setBasicHeader(self): |
|
1367 | def setBasicHeader(self): | |
1368 |
|
1368 | |||
1369 | self.basicHeaderObj.size = self.basicHeaderSize # bytes |
|
1369 | self.basicHeaderObj.size = self.basicHeaderSize # bytes | |
1370 | self.basicHeaderObj.version = self.versionFile |
|
1370 | self.basicHeaderObj.version = self.versionFile | |
1371 | self.basicHeaderObj.dataBlock = self.nTotalBlocks |
|
1371 | self.basicHeaderObj.dataBlock = self.nTotalBlocks | |
1372 | utc = numpy.floor(self.dataOut.utctime) |
|
1372 | utc = numpy.floor(self.dataOut.utctime) | |
1373 | milisecond = (self.dataOut.utctime - utc) * 1000.0 |
|
1373 | milisecond = (self.dataOut.utctime - utc) * 1000.0 | |
1374 | self.basicHeaderObj.utc = utc |
|
1374 | self.basicHeaderObj.utc = utc | |
1375 | self.basicHeaderObj.miliSecond = milisecond |
|
1375 | self.basicHeaderObj.miliSecond = milisecond | |
1376 | self.basicHeaderObj.timeZone = self.dataOut.timeZone |
|
1376 | self.basicHeaderObj.timeZone = self.dataOut.timeZone | |
1377 | self.basicHeaderObj.dstFlag = self.dataOut.dstFlag |
|
1377 | self.basicHeaderObj.dstFlag = self.dataOut.dstFlag | |
1378 | self.basicHeaderObj.errorCount = self.dataOut.errorCount |
|
1378 | self.basicHeaderObj.errorCount = self.dataOut.errorCount | |
1379 |
|
1379 | |||
1380 | def setFirstHeader(self): |
|
1380 | def setFirstHeader(self): | |
1381 | """ |
|
1381 | """ | |
1382 | Obtiene una copia del First Header |
|
1382 | Obtiene una copia del First Header | |
1383 |
|
1383 | |||
1384 | Affected: |
|
1384 | Affected: | |
1385 |
|
1385 | |||
1386 | self.basicHeaderObj |
|
1386 | self.basicHeaderObj | |
1387 | self.systemHeaderObj |
|
1387 | self.systemHeaderObj | |
1388 | self.radarControllerHeaderObj |
|
1388 | self.radarControllerHeaderObj | |
1389 | self.processingHeaderObj self. |
|
1389 | self.processingHeaderObj self. | |
1390 |
|
1390 | |||
1391 | Return: |
|
1391 | Return: | |
1392 | None |
|
1392 | None | |
1393 | """ |
|
1393 | """ | |
1394 |
|
1394 | |||
1395 | raise NotImplementedError |
|
1395 | raise NotImplementedError | |
1396 |
|
1396 | |||
1397 | def __writeFirstHeader(self): |
|
1397 | def __writeFirstHeader(self): | |
1398 | """ |
|
1398 | """ | |
1399 | Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader) |
|
1399 | Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader) | |
1400 |
|
1400 | |||
1401 | Affected: |
|
1401 | Affected: | |
1402 | __dataType |
|
1402 | __dataType | |
1403 |
|
1403 | |||
1404 | Return: |
|
1404 | Return: | |
1405 | None |
|
1405 | None | |
1406 | """ |
|
1406 | """ | |
1407 |
|
1407 | |||
1408 | # CALCULAR PARAMETROS |
|
1408 | # CALCULAR PARAMETROS | |
1409 |
|
1409 | |||
1410 | sizeLongHeader = self.systemHeaderObj.size + \ |
|
1410 | sizeLongHeader = self.systemHeaderObj.size + \ | |
1411 | self.radarControllerHeaderObj.size + self.processingHeaderObj.size |
|
1411 | self.radarControllerHeaderObj.size + self.processingHeaderObj.size | |
1412 | self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader |
|
1412 | self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader | |
1413 |
|
1413 | |||
1414 | self.basicHeaderObj.write(self.fp) |
|
1414 | self.basicHeaderObj.write(self.fp) | |
1415 | self.systemHeaderObj.write(self.fp) |
|
1415 | self.systemHeaderObj.write(self.fp) | |
1416 | self.radarControllerHeaderObj.write(self.fp) |
|
1416 | self.radarControllerHeaderObj.write(self.fp) | |
1417 | self.processingHeaderObj.write(self.fp) |
|
1417 | self.processingHeaderObj.write(self.fp) | |
1418 |
|
1418 | |||
1419 | def __setNewBlock(self): |
|
1419 | def __setNewBlock(self): | |
1420 | """ |
|
1420 | """ | |
1421 | Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header |
|
1421 | Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header | |
1422 |
|
1422 | |||
1423 | Return: |
|
1423 | Return: | |
1424 | 0 : si no pudo escribir nada |
|
1424 | 0 : si no pudo escribir nada | |
1425 | 1 : Si escribio el Basic el First Header |
|
1425 | 1 : Si escribio el Basic el First Header | |
1426 | """ |
|
1426 | """ | |
1427 | if self.fp == None: |
|
1427 | if self.fp == None: | |
1428 | self.setNextFile() |
|
1428 | self.setNextFile() | |
1429 |
|
1429 | |||
1430 | if self.flagIsNewFile: |
|
1430 | if self.flagIsNewFile: | |
1431 | return 1 |
|
1431 | return 1 | |
1432 |
|
1432 | |||
1433 | if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile: |
|
1433 | if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile: | |
1434 | self.basicHeaderObj.write(self.fp) |
|
1434 | self.basicHeaderObj.write(self.fp) | |
1435 | return 1 |
|
1435 | return 1 | |
1436 |
|
1436 | |||
1437 | if not(self.setNextFile()): |
|
1437 | if not(self.setNextFile()): | |
1438 | return 0 |
|
1438 | return 0 | |
1439 |
|
1439 | |||
1440 | return 1 |
|
1440 | return 1 | |
1441 |
|
1441 | |||
1442 | def writeNextBlock(self): |
|
1442 | def writeNextBlock(self): | |
1443 | """ |
|
1443 | """ | |
1444 | Selecciona el bloque siguiente de datos y los escribe en un file |
|
1444 | Selecciona el bloque siguiente de datos y los escribe en un file | |
1445 |
|
1445 | |||
1446 | Return: |
|
1446 | Return: | |
1447 | 0 : Si no hizo pudo escribir el bloque de datos |
|
1447 | 0 : Si no hizo pudo escribir el bloque de datos | |
1448 | 1 : Si no pudo escribir el bloque de datos |
|
1448 | 1 : Si no pudo escribir el bloque de datos | |
1449 | """ |
|
1449 | """ | |
1450 | if not(self.__setNewBlock()): |
|
1450 | if not(self.__setNewBlock()): | |
1451 | return 0 |
|
1451 | return 0 | |
1452 |
|
1452 | |||
1453 | self.writeBlock() |
|
1453 | self.writeBlock() | |
1454 |
|
1454 | |||
1455 | print("[Writing] Block No. %d/%d" % (self.blockIndex, |
|
1455 | print("[Writing] Block No. %d/%d" % (self.blockIndex, | |
1456 | self.processingHeaderObj.dataBlocksPerFile)) |
|
1456 | self.processingHeaderObj.dataBlocksPerFile)) | |
1457 |
|
1457 | |||
1458 | return 1 |
|
1458 | return 1 | |
1459 |
|
1459 | |||
1460 | def setNextFile(self): |
|
1460 | def setNextFile(self): | |
1461 | """Determina el siguiente file que sera escrito |
|
1461 | """Determina el siguiente file que sera escrito | |
1462 |
|
1462 | |||
1463 | Affected: |
|
1463 | Affected: | |
1464 | self.filename |
|
1464 | self.filename | |
1465 | self.subfolder |
|
1465 | self.subfolder | |
1466 | self.fp |
|
1466 | self.fp | |
1467 | self.setFile |
|
1467 | self.setFile | |
1468 | self.flagIsNewFile |
|
1468 | self.flagIsNewFile | |
1469 |
|
1469 | |||
1470 | Return: |
|
1470 | Return: | |
1471 | 0 : Si el archivo no puede ser escrito |
|
1471 | 0 : Si el archivo no puede ser escrito | |
1472 | 1 : Si el archivo esta listo para ser escrito |
|
1472 | 1 : Si el archivo esta listo para ser escrito | |
1473 | """ |
|
1473 | """ | |
1474 | ext = self.ext |
|
1474 | ext = self.ext | |
1475 | path = self.path |
|
1475 | path = self.path | |
1476 |
|
1476 | |||
1477 | if self.fp != None: |
|
1477 | if self.fp != None: | |
1478 | self.fp.close() |
|
1478 | self.fp.close() | |
1479 |
|
1479 | |||
1480 | timeTuple = time.localtime(self.dataOut.utctime) |
|
1480 | timeTuple = time.localtime(self.dataOut.utctime) | |
1481 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday) |
|
1481 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday) | |
1482 |
|
1482 | |||
1483 | fullpath = os.path.join(path, subfolder) |
|
1483 | fullpath = os.path.join(path, subfolder) | |
1484 | setFile = self.setFile |
|
1484 | setFile = self.setFile | |
1485 |
|
1485 | |||
1486 | if not(os.path.exists(fullpath)): |
|
1486 | if not(os.path.exists(fullpath)): | |
1487 | os.makedirs(fullpath) |
|
1487 | os.makedirs(fullpath) | |
1488 | setFile = -1 # inicializo mi contador de seteo |
|
1488 | setFile = -1 # inicializo mi contador de seteo | |
1489 | else: |
|
1489 | else: | |
1490 | filesList = os.listdir(fullpath) |
|
1490 | filesList = os.listdir(fullpath) | |
1491 | if len(filesList) > 0: |
|
1491 | if len(filesList) > 0: | |
1492 | filesList = sorted(filesList, key=str.lower) |
|
1492 | filesList = sorted(filesList, key=str.lower) | |
1493 | filen = filesList[-1] |
|
1493 | filen = filesList[-1] | |
1494 | # el filename debera tener el siguiente formato |
|
1494 | # el filename debera tener el siguiente formato | |
1495 | # 0 1234 567 89A BCDE (hex) |
|
1495 | # 0 1234 567 89A BCDE (hex) | |
1496 | # x YYYY DDD SSS .ext |
|
1496 | # x YYYY DDD SSS .ext | |
1497 | if isNumber(filen[8:11]): |
|
1497 | if isNumber(filen[8:11]): | |
1498 | # inicializo mi contador de seteo al seteo del ultimo file |
|
1498 | # inicializo mi contador de seteo al seteo del ultimo file | |
1499 | setFile = int(filen[8:11]) |
|
1499 | setFile = int(filen[8:11]) | |
1500 | else: |
|
1500 | else: | |
1501 | setFile = -1 |
|
1501 | setFile = -1 | |
1502 | else: |
|
1502 | else: | |
1503 | setFile = -1 # inicializo mi contador de seteo |
|
1503 | setFile = -1 # inicializo mi contador de seteo | |
1504 |
|
1504 | |||
1505 | setFile += 1 |
|
1505 | setFile += 1 | |
1506 |
|
1506 | |||
1507 | # If this is a new day it resets some values |
|
1507 | # If this is a new day it resets some values | |
1508 | if self.dataOut.datatime.date() > self.fileDate: |
|
1508 | if self.dataOut.datatime.date() > self.fileDate: | |
1509 | setFile = 0 |
|
1509 | setFile = 0 | |
1510 | self.nTotalBlocks = 0 |
|
1510 | self.nTotalBlocks = 0 | |
1511 |
|
1511 | |||
1512 | filen = '{}{:04d}{:03d}{:03d}{}'.format( |
|
1512 | filen = '{}{:04d}{:03d}{:03d}{}'.format( | |
1513 | self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext) |
|
1513 | self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext) | |
1514 |
|
1514 | |||
1515 | filename = os.path.join(path, subfolder, filen) |
|
1515 | filename = os.path.join(path, subfolder, filen) | |
1516 |
|
1516 | |||
1517 | fp = open(filename, 'wb') |
|
1517 | fp = open(filename, 'wb') | |
1518 |
|
1518 | |||
1519 | self.blockIndex = 0 |
|
1519 | self.blockIndex = 0 | |
1520 | self.filename = filename |
|
1520 | self.filename = filename | |
1521 | self.subfolder = subfolder |
|
1521 | self.subfolder = subfolder | |
1522 | self.fp = fp |
|
1522 | self.fp = fp | |
1523 | self.setFile = setFile |
|
1523 | self.setFile = setFile | |
1524 | self.flagIsNewFile = 1 |
|
1524 | self.flagIsNewFile = 1 | |
1525 | self.fileDate = self.dataOut.datatime.date() |
|
1525 | self.fileDate = self.dataOut.datatime.date() | |
1526 | self.setFirstHeader() |
|
1526 | self.setFirstHeader() | |
1527 |
|
1527 | |||
1528 | print('[Writing] Opening file: %s' % self.filename) |
|
1528 | print('[Writing] Opening file: %s' % self.filename) | |
1529 |
|
1529 | |||
1530 | self.__writeFirstHeader() |
|
1530 | self.__writeFirstHeader() | |
1531 |
|
1531 | |||
1532 | return 1 |
|
1532 | return 1 | |
1533 |
|
1533 | |||
1534 | def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4): |
|
1534 | def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4): | |
1535 | """ |
|
1535 | """ | |
1536 | Setea el tipo de formato en la cual sera guardada la data y escribe el First Header |
|
1536 | Setea el tipo de formato en la cual sera guardada la data y escribe el First Header | |
1537 |
|
1537 | |||
1538 | Inputs: |
|
1538 | Inputs: | |
1539 | path : directory where data will be saved |
|
1539 | path : directory where data will be saved | |
1540 | profilesPerBlock : number of profiles per block |
|
1540 | profilesPerBlock : number of profiles per block | |
1541 | set : initial file set |
|
1541 | set : initial file set | |
1542 | datatype : An integer number that defines data type: |
|
1542 | datatype : An integer number that defines data type: | |
1543 | 0 : int8 (1 byte) |
|
1543 | 0 : int8 (1 byte) | |
1544 | 1 : int16 (2 bytes) |
|
1544 | 1 : int16 (2 bytes) | |
1545 | 2 : int32 (4 bytes) |
|
1545 | 2 : int32 (4 bytes) | |
1546 | 3 : int64 (8 bytes) |
|
1546 | 3 : int64 (8 bytes) | |
1547 | 4 : float32 (4 bytes) |
|
1547 | 4 : float32 (4 bytes) | |
1548 | 5 : double64 (8 bytes) |
|
1548 | 5 : double64 (8 bytes) | |
1549 |
|
1549 | |||
1550 | Return: |
|
1550 | Return: | |
1551 | 0 : Si no realizo un buen seteo |
|
1551 | 0 : Si no realizo un buen seteo | |
1552 | 1 : Si realizo un buen seteo |
|
1552 | 1 : Si realizo un buen seteo | |
1553 | """ |
|
1553 | """ | |
1554 |
|
1554 | |||
1555 | if ext == None: |
|
1555 | if ext == None: | |
1556 | ext = self.ext |
|
1556 | ext = self.ext | |
1557 |
|
1557 | |||
1558 | self.ext = ext.lower() |
|
1558 | self.ext = ext.lower() | |
1559 |
|
1559 | |||
1560 | self.path = path |
|
1560 | self.path = path | |
1561 |
|
1561 | |||
1562 | if set is None: |
|
1562 | if set is None: | |
1563 | self.setFile = -1 |
|
1563 | self.setFile = -1 | |
1564 | else: |
|
1564 | else: | |
1565 | self.setFile = set - 1 |
|
1565 | self.setFile = set - 1 | |
1566 |
|
1566 | |||
1567 | self.blocksPerFile = blocksPerFile |
|
1567 | self.blocksPerFile = blocksPerFile | |
1568 | self.profilesPerBlock = profilesPerBlock |
|
1568 | self.profilesPerBlock = profilesPerBlock | |
1569 | self.dataOut = dataOut |
|
1569 | self.dataOut = dataOut | |
1570 | self.fileDate = self.dataOut.datatime.date() |
|
1570 | self.fileDate = self.dataOut.datatime.date() | |
1571 | self.dtype = self.dataOut.dtype |
|
1571 | self.dtype = self.dataOut.dtype | |
1572 |
|
1572 | |||
1573 | if datatype is not None: |
|
1573 | if datatype is not None: | |
1574 | self.dtype = get_numpy_dtype(datatype) |
|
1574 | self.dtype = get_numpy_dtype(datatype) | |
1575 |
|
1575 | |||
1576 | if not(self.setNextFile()): |
|
1576 | if not(self.setNextFile()): | |
1577 | print("[Writing] There isn't a next file") |
|
1577 | print("[Writing] There isn't a next file") | |
1578 | return 0 |
|
1578 | return 0 | |
1579 |
|
1579 | |||
1580 | self.setBlockDimension() |
|
1580 | self.setBlockDimension() | |
1581 |
|
1581 | |||
1582 | return 1 |
|
1582 | return 1 | |
1583 |
|
1583 | |||
1584 | def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs): |
|
1584 | def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, flagManualHeader = False,**kwargs): | |
|
1585 | self.flagManualHeader = flagManualHeader | |||
1585 |
|
1586 | |||
1586 | if not(self.isConfig): |
|
1587 | if not(self.isConfig): | |
1587 |
|
1588 | |||
1588 | self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, |
|
1589 | self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, | |
1589 | set=set, ext=ext, datatype=datatype, **kwargs) |
|
1590 | set=set, ext=ext, datatype=datatype, **kwargs) | |
1590 | self.isConfig = True |
|
1591 | self.isConfig = True | |
1591 |
|
1592 | |||
1592 | self.dataOut = dataOut |
|
1593 | self.dataOut = dataOut | |
1593 | self.putData() |
|
1594 | self.putData() | |
1594 | return self.dataOut |
|
1595 | return self.dataOut | |
1595 |
|
1596 | |||
1596 | @MPDecorator |
|
1597 | @MPDecorator | |
1597 | class printInfo(Operation): |
|
1598 | class printInfo(Operation): | |
1598 |
|
1599 | |||
1599 | def __init__(self): |
|
1600 | def __init__(self): | |
1600 |
|
1601 | |||
1601 | Operation.__init__(self) |
|
1602 | Operation.__init__(self) | |
1602 | self.__printInfo = True |
|
1603 | self.__printInfo = True | |
1603 |
|
1604 | |||
1604 | def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']): |
|
1605 | def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']): | |
1605 | if self.__printInfo == False: |
|
1606 | if self.__printInfo == False: | |
1606 | return |
|
1607 | return | |
1607 |
|
1608 | |||
1608 | for header in headers: |
|
1609 | for header in headers: | |
1609 | if hasattr(dataOut, header): |
|
1610 | if hasattr(dataOut, header): | |
1610 | obj = getattr(dataOut, header) |
|
1611 | obj = getattr(dataOut, header) | |
1611 | if hasattr(obj, 'printInfo'): |
|
1612 | if hasattr(obj, 'printInfo'): | |
1612 | obj.printInfo() |
|
1613 | obj.printInfo() | |
1613 | else: |
|
1614 | else: | |
1614 | print(obj) |
|
1615 | print(obj) | |
1615 | else: |
|
1616 | else: | |
1616 | log.warning('Header {} Not found in object'.format(header)) |
|
1617 | log.warning('Header {} Not found in object'.format(header)) | |
1617 |
|
1618 | |||
1618 | self.__printInfo = False |
|
1619 | self.__printInfo = False |
@@ -1,527 +1,537 | |||||
1 | ''' |
|
1 | ''' | |
2 | Created on Jul 2, 2014 |
|
2 | Created on Jul 2, 2014 | |
3 |
|
3 | |||
4 | @author: roj-idl71 |
|
4 | @author: roj-idl71 | |
5 | ''' |
|
5 | ''' | |
6 | import numpy |
|
6 | import numpy | |
7 |
|
7 | |||
8 | from schainpy.model.io.jroIO_base import LOCALTIME, JRODataReader, JRODataWriter |
|
8 | from schainpy.model.io.jroIO_base import LOCALTIME, JRODataReader, JRODataWriter | |
9 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator |
|
9 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator | |
10 | from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader |
|
10 | from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader | |
11 | from schainpy.model.data.jrodata import Spectra |
|
11 | from schainpy.model.data.jrodata import Spectra | |
12 | from schainpy.utils import log |
|
12 | from schainpy.utils import log | |
13 |
|
13 | |||
14 |
|
14 | |||
15 | class SpectraReader(JRODataReader, ProcessingUnit): |
|
15 | class SpectraReader(JRODataReader, ProcessingUnit): | |
16 | """ |
|
16 | """ | |
17 | Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura |
|
17 | Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura | |
18 | de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones) |
|
18 | de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones) | |
19 | son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel. |
|
19 | son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel. | |
20 |
|
20 | |||
21 | paresCanalesIguales * alturas * perfiles (Self Spectra) |
|
21 | paresCanalesIguales * alturas * perfiles (Self Spectra) | |
22 | paresCanalesDiferentes * alturas * perfiles (Cross Spectra) |
|
22 | paresCanalesDiferentes * alturas * perfiles (Cross Spectra) | |
23 | canales * alturas (DC Channels) |
|
23 | canales * alturas (DC Channels) | |
24 |
|
24 | |||
25 | Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, |
|
25 | Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, | |
26 | RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la |
|
26 | RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la | |
27 | cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de |
|
27 | cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de | |
28 | datos desde el "buffer" cada vez que se ejecute el metodo "getData". |
|
28 | datos desde el "buffer" cada vez que se ejecute el metodo "getData". | |
29 |
|
29 | |||
30 | Example: |
|
30 | Example: | |
31 | dpath = "/home/myuser/data" |
|
31 | dpath = "/home/myuser/data" | |
32 |
|
32 | |||
33 | startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0) |
|
33 | startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0) | |
34 |
|
34 | |||
35 | endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0) |
|
35 | endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0) | |
36 |
|
36 | |||
37 | readerObj = SpectraReader() |
|
37 | readerObj = SpectraReader() | |
38 |
|
38 | |||
39 | readerObj.setup(dpath, startTime, endTime) |
|
39 | readerObj.setup(dpath, startTime, endTime) | |
40 |
|
40 | |||
41 | while(True): |
|
41 | while(True): | |
42 |
|
42 | |||
43 | readerObj.getData() |
|
43 | readerObj.getData() | |
44 |
|
44 | |||
45 | print readerObj.data_spc |
|
45 | print readerObj.data_spc | |
46 |
|
46 | |||
47 | print readerObj.data_cspc |
|
47 | print readerObj.data_cspc | |
48 |
|
48 | |||
49 | print readerObj.data_dc |
|
49 | print readerObj.data_dc | |
50 |
|
50 | |||
51 | if readerObj.flagNoMoreFiles: |
|
51 | if readerObj.flagNoMoreFiles: | |
52 | break |
|
52 | break | |
53 |
|
53 | |||
54 | """ |
|
54 | """ | |
55 |
|
55 | |||
56 | def __init__(self):#, **kwargs): |
|
56 | def __init__(self):#, **kwargs): | |
57 | """ |
|
57 | """ | |
58 | Inicializador de la clase SpectraReader para la lectura de datos de espectros. |
|
58 | Inicializador de la clase SpectraReader para la lectura de datos de espectros. | |
59 |
|
59 | |||
60 | Inputs: |
|
60 | Inputs: | |
61 | dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para |
|
61 | dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para | |
62 | almacenar un perfil de datos cada vez que se haga un requerimiento |
|
62 | almacenar un perfil de datos cada vez que se haga un requerimiento | |
63 | (getData). El perfil sera obtenido a partir del buffer de datos, |
|
63 | (getData). El perfil sera obtenido a partir del buffer de datos, | |
64 | si el buffer esta vacio se hara un nuevo proceso de lectura de un |
|
64 | si el buffer esta vacio se hara un nuevo proceso de lectura de un | |
65 | bloque de datos. |
|
65 | bloque de datos. | |
66 | Si este parametro no es pasado se creara uno internamente. |
|
66 | Si este parametro no es pasado se creara uno internamente. | |
67 |
|
67 | |||
68 | Affected: |
|
68 | Affected: | |
69 | self.dataOut |
|
69 | self.dataOut | |
70 |
|
70 | |||
71 | Return : None |
|
71 | Return : None | |
72 | """ |
|
72 | """ | |
73 |
|
73 | |||
74 | ProcessingUnit.__init__(self) |
|
74 | ProcessingUnit.__init__(self) | |
75 |
|
75 | |||
76 | self.pts2read_SelfSpectra = 0 |
|
76 | self.pts2read_SelfSpectra = 0 | |
77 | self.pts2read_CrossSpectra = 0 |
|
77 | self.pts2read_CrossSpectra = 0 | |
78 | self.pts2read_DCchannels = 0 |
|
78 | self.pts2read_DCchannels = 0 | |
79 | self.ext = ".pdata" |
|
79 | self.ext = ".pdata" | |
80 | self.optchar = "P" |
|
80 | self.optchar = "P" | |
81 | self.basicHeaderObj = BasicHeader(LOCALTIME) |
|
81 | self.basicHeaderObj = BasicHeader(LOCALTIME) | |
82 | self.systemHeaderObj = SystemHeader() |
|
82 | self.systemHeaderObj = SystemHeader() | |
83 | self.radarControllerHeaderObj = RadarControllerHeader() |
|
83 | self.radarControllerHeaderObj = RadarControllerHeader() | |
84 | self.processingHeaderObj = ProcessingHeader() |
|
84 | self.processingHeaderObj = ProcessingHeader() | |
85 | self.lastUTTime = 0 |
|
85 | self.lastUTTime = 0 | |
86 | self.maxTimeStep = 30 |
|
86 | self.maxTimeStep = 30 | |
87 | self.dataOut = Spectra() |
|
87 | self.dataOut = Spectra() | |
88 | self.profileIndex = 1 |
|
88 | self.profileIndex = 1 | |
89 | self.nRdChannels = None |
|
89 | self.nRdChannels = None | |
90 | self.nRdPairs = None |
|
90 | self.nRdPairs = None | |
91 | self.rdPairList = [] |
|
91 | self.rdPairList = [] | |
92 |
|
92 | |||
93 | def createObjByDefault(self): |
|
93 | def createObjByDefault(self): | |
94 |
|
94 | |||
95 | dataObj = Spectra() |
|
95 | dataObj = Spectra() | |
96 |
|
96 | |||
97 | return dataObj |
|
97 | return dataObj | |
98 |
|
98 | |||
99 | def __hasNotDataInBuffer(self): |
|
99 | def __hasNotDataInBuffer(self): | |
100 | return 1 |
|
100 | return 1 | |
101 |
|
101 | |||
102 |
|
102 | |||
103 | def getBlockDimension(self): |
|
103 | def getBlockDimension(self): | |
104 | """ |
|
104 | """ | |
105 | Obtiene la cantidad de puntos a leer por cada bloque de datos |
|
105 | Obtiene la cantidad de puntos a leer por cada bloque de datos | |
106 |
|
106 | |||
107 | Affected: |
|
107 | Affected: | |
108 | self.nRdChannels |
|
108 | self.nRdChannels | |
109 | self.nRdPairs |
|
109 | self.nRdPairs | |
110 | self.pts2read_SelfSpectra |
|
110 | self.pts2read_SelfSpectra | |
111 | self.pts2read_CrossSpectra |
|
111 | self.pts2read_CrossSpectra | |
112 | self.pts2read_DCchannels |
|
112 | self.pts2read_DCchannels | |
113 | self.blocksize |
|
113 | self.blocksize | |
114 | self.dataOut.nChannels |
|
114 | self.dataOut.nChannels | |
115 | self.dataOut.nPairs |
|
115 | self.dataOut.nPairs | |
116 |
|
116 | |||
117 | Return: |
|
117 | Return: | |
118 | None |
|
118 | None | |
119 | """ |
|
119 | """ | |
120 | self.nRdChannels = 0 |
|
120 | self.nRdChannels = 0 | |
121 | self.nRdPairs = 0 |
|
121 | self.nRdPairs = 0 | |
122 | self.rdPairList = [] |
|
122 | self.rdPairList = [] | |
123 |
|
123 | |||
124 | for i in range(0, self.processingHeaderObj.totalSpectra*2, 2): |
|
124 | for i in range(0, self.processingHeaderObj.totalSpectra*2, 2): | |
125 | if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]: |
|
125 | if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]: | |
126 | self.nRdChannels = self.nRdChannels + 1 #par de canales iguales |
|
126 | self.nRdChannels = self.nRdChannels + 1 #par de canales iguales | |
127 | else: |
|
127 | else: | |
128 | self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes |
|
128 | self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes | |
129 | self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1])) |
|
129 | self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1])) | |
130 |
|
130 | |||
131 | pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock |
|
131 | pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock | |
132 |
|
132 | |||
133 | self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read) |
|
133 | self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read) | |
134 | self.blocksize = self.pts2read_SelfSpectra |
|
134 | self.blocksize = self.pts2read_SelfSpectra | |
135 |
|
135 | |||
136 | if self.processingHeaderObj.flag_cspc: |
|
136 | if self.processingHeaderObj.flag_cspc: | |
137 | self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read) |
|
137 | self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read) | |
138 | self.blocksize += self.pts2read_CrossSpectra |
|
138 | self.blocksize += self.pts2read_CrossSpectra | |
139 |
|
139 | |||
140 | if self.processingHeaderObj.flag_dc: |
|
140 | if self.processingHeaderObj.flag_dc: | |
141 | self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights) |
|
141 | self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights) | |
142 | self.blocksize += self.pts2read_DCchannels |
|
142 | self.blocksize += self.pts2read_DCchannels | |
143 |
|
143 | |||
144 | def readBlock(self): |
|
144 | def readBlock(self): | |
145 | """ |
|
145 | """ | |
146 | Lee el bloque de datos desde la posicion actual del puntero del archivo |
|
146 | Lee el bloque de datos desde la posicion actual del puntero del archivo | |
147 | (self.fp) y actualiza todos los parametros relacionados al bloque de datos |
|
147 | (self.fp) y actualiza todos los parametros relacionados al bloque de datos | |
148 | (metadata + data). La data leida es almacenada en el buffer y el contador del buffer |
|
148 | (metadata + data). La data leida es almacenada en el buffer y el contador del buffer | |
149 | es seteado a 0 |
|
149 | es seteado a 0 | |
150 |
|
150 | |||
151 | Return: None |
|
151 | Return: None | |
152 |
|
152 | |||
153 | Variables afectadas: |
|
153 | Variables afectadas: | |
154 |
|
154 | |||
155 | self.flagIsNewFile |
|
155 | self.flagIsNewFile | |
156 | self.flagIsNewBlock |
|
156 | self.flagIsNewBlock | |
157 | self.nTotalBlocks |
|
157 | self.nTotalBlocks | |
158 | self.data_spc |
|
158 | self.data_spc | |
159 | self.data_cspc |
|
159 | self.data_cspc | |
160 | self.data_dc |
|
160 | self.data_dc | |
161 |
|
161 | |||
162 | Exceptions: |
|
162 | Exceptions: | |
163 | Si un bloque leido no es un bloque valido |
|
163 | Si un bloque leido no es un bloque valido | |
164 | """ |
|
164 | """ | |
165 |
|
165 | |||
166 | fpointer = self.fp.tell() |
|
166 | fpointer = self.fp.tell() | |
167 |
|
167 | |||
168 | spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra ) |
|
168 | spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra ) | |
169 | spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D |
|
169 | spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D | |
170 |
|
170 | |||
171 | if self.processingHeaderObj.flag_cspc: |
|
171 | if self.processingHeaderObj.flag_cspc: | |
172 | cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra ) |
|
172 | cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra ) | |
173 | cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D |
|
173 | cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D | |
174 |
|
174 | |||
175 | if self.processingHeaderObj.flag_dc: |
|
175 | if self.processingHeaderObj.flag_dc: | |
176 | dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) ) |
|
176 | dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) ) | |
177 | dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D |
|
177 | dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D | |
178 |
|
178 | |||
179 | if not self.processingHeaderObj.shif_fft: |
|
179 | if not self.processingHeaderObj.shif_fft: | |
180 | #desplaza a la derecha en el eje 2 determinadas posiciones |
|
180 | #desplaza a la derecha en el eje 2 determinadas posiciones | |
181 | shift = int(self.processingHeaderObj.profilesPerBlock/2) |
|
181 | shift = int(self.processingHeaderObj.profilesPerBlock/2) | |
182 | spc = numpy.roll( spc, shift , axis=2 ) |
|
182 | spc = numpy.roll( spc, shift , axis=2 ) | |
183 |
|
183 | |||
184 | if self.processingHeaderObj.flag_cspc: |
|
184 | if self.processingHeaderObj.flag_cspc: | |
185 | #desplaza a la derecha en el eje 2 determinadas posiciones |
|
185 | #desplaza a la derecha en el eje 2 determinadas posiciones | |
186 | cspc = numpy.roll( cspc, shift, axis=2 ) |
|
186 | cspc = numpy.roll( cspc, shift, axis=2 ) | |
187 |
|
187 | |||
188 | #Dimensions : nChannels, nProfiles, nSamples |
|
188 | #Dimensions : nChannels, nProfiles, nSamples | |
189 | spc = numpy.transpose( spc, (0,2,1) ) |
|
189 | spc = numpy.transpose( spc, (0,2,1) ) | |
190 | self.data_spc = spc |
|
190 | self.data_spc = spc | |
191 |
|
191 | |||
192 | if self.processingHeaderObj.flag_cspc: |
|
192 | if self.processingHeaderObj.flag_cspc: | |
193 | cspc = numpy.transpose( cspc, (0,2,1) ) |
|
193 | cspc = numpy.transpose( cspc, (0,2,1) ) | |
194 | self.data_cspc = cspc['real'] + cspc['imag']*1j |
|
194 | self.data_cspc = cspc['real'] + cspc['imag']*1j | |
195 | else: |
|
195 | else: | |
196 | self.data_cspc = None |
|
196 | self.data_cspc = None | |
197 |
|
197 | |||
198 | if self.processingHeaderObj.flag_dc: |
|
198 | if self.processingHeaderObj.flag_dc: | |
199 | self.data_dc = dc['real'] + dc['imag']*1j |
|
199 | self.data_dc = dc['real'] + dc['imag']*1j | |
200 | else: |
|
200 | else: | |
201 | self.data_dc = None |
|
201 | self.data_dc = None | |
202 |
|
202 | |||
203 | self.flagIsNewFile = 0 |
|
203 | self.flagIsNewFile = 0 | |
204 | self.flagIsNewBlock = 1 |
|
204 | self.flagIsNewBlock = 1 | |
205 |
|
205 | |||
206 | self.nTotalBlocks += 1 |
|
206 | self.nTotalBlocks += 1 | |
207 | self.nReadBlocks += 1 |
|
207 | self.nReadBlocks += 1 | |
208 |
|
208 | |||
209 | return 1 |
|
209 | return 1 | |
210 |
|
210 | |||
211 | def getFirstHeader(self): |
|
211 | def getFirstHeader(self): | |
212 |
|
212 | |||
213 | self.getBasicHeader() |
|
213 | self.getBasicHeader() | |
214 | self.dataOut.systemHeaderObj = self.systemHeaderObj.copy() |
|
214 | self.dataOut.systemHeaderObj = self.systemHeaderObj.copy() | |
215 | self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() |
|
215 | self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() | |
216 | self.dataOut.dtype = self.dtype |
|
216 | self.dataOut.dtype = self.dtype | |
217 | self.dataOut.pairsList = self.rdPairList |
|
217 | self.dataOut.pairsList = self.rdPairList | |
218 | self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock |
|
218 | self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock | |
219 | self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock |
|
219 | self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock | |
220 | self.dataOut.nCohInt = self.processingHeaderObj.nCohInt |
|
220 | self.dataOut.nCohInt = self.processingHeaderObj.nCohInt | |
221 | self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt |
|
221 | self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt | |
222 | xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight |
|
222 | xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight | |
223 | self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) |
|
223 | self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) | |
224 | self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels)) |
|
224 | self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels)) | |
225 | self.dataOut.flagShiftFFT = True #Data is always shifted |
|
225 | self.dataOut.flagShiftFFT = True #Data is always shifted | |
226 | self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada |
|
226 | self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada | |
227 | self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip |
|
227 | self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip | |
228 |
|
228 | |||
229 | def getData(self): |
|
229 | def getData(self): | |
230 | """ |
|
230 | """ | |
231 | First method to execute before "RUN" is called. |
|
231 | First method to execute before "RUN" is called. | |
232 |
|
232 | |||
233 | Copia el buffer de lectura a la clase "Spectra", |
|
233 | Copia el buffer de lectura a la clase "Spectra", | |
234 | con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de |
|
234 | con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de | |
235 | lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" |
|
235 | lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" | |
236 |
|
236 | |||
237 | Return: |
|
237 | Return: | |
238 | 0 : Si no hay mas archivos disponibles |
|
238 | 0 : Si no hay mas archivos disponibles | |
239 | 1 : Si hizo una buena copia del buffer |
|
239 | 1 : Si hizo una buena copia del buffer | |
240 |
|
240 | |||
241 | Affected: |
|
241 | Affected: | |
242 | self.dataOut |
|
242 | self.dataOut | |
243 | self.flagDiscontinuousBlock |
|
243 | self.flagDiscontinuousBlock | |
244 | self.flagIsNewBlock |
|
244 | self.flagIsNewBlock | |
245 | """ |
|
245 | """ | |
246 |
|
246 | |||
247 | if self.flagNoMoreFiles: |
|
247 | if self.flagNoMoreFiles: | |
248 | self.dataOut.flagNoData = True |
|
248 | self.dataOut.flagNoData = True | |
249 | return 0 |
|
249 | return 0 | |
250 |
|
250 | |||
251 | self.flagDiscontinuousBlock = 0 |
|
251 | self.flagDiscontinuousBlock = 0 | |
252 | self.flagIsNewBlock = 0 |
|
252 | self.flagIsNewBlock = 0 | |
253 |
|
253 | |||
254 | if self.__hasNotDataInBuffer(): |
|
254 | if self.__hasNotDataInBuffer(): | |
255 |
|
255 | |||
256 | if not( self.readNextBlock() ): |
|
256 | if not( self.readNextBlock() ): | |
257 | self.dataOut.flagNoData = True |
|
257 | self.dataOut.flagNoData = True | |
258 | return 0 |
|
258 | return 0 | |
259 |
|
259 | |||
260 | #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) |
|
260 | #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) | |
261 |
|
261 | |||
262 | if self.data_spc is None: |
|
262 | if self.data_spc is None: | |
263 | self.dataOut.flagNoData = True |
|
263 | self.dataOut.flagNoData = True | |
264 | return 0 |
|
264 | return 0 | |
265 |
|
265 | |||
266 | self.getBasicHeader() |
|
266 | self.getBasicHeader() | |
267 | self.getFirstHeader() |
|
267 | self.getFirstHeader() | |
268 | self.dataOut.data_spc = self.data_spc |
|
268 | self.dataOut.data_spc = self.data_spc | |
269 | self.dataOut.data_cspc = self.data_cspc |
|
269 | self.dataOut.data_cspc = self.data_cspc | |
270 | self.dataOut.data_dc = self.data_dc |
|
270 | self.dataOut.data_dc = self.data_dc | |
271 | self.dataOut.flagNoData = False |
|
271 | self.dataOut.flagNoData = False | |
272 | self.dataOut.realtime = self.online |
|
272 | self.dataOut.realtime = self.online | |
273 |
|
273 | |||
274 | return self.dataOut.data_spc |
|
274 | return self.dataOut.data_spc | |
275 |
|
275 | |||
276 |
|
276 | |||
277 | @MPDecorator |
|
277 | @MPDecorator | |
278 | class SpectraWriter(JRODataWriter, Operation): |
|
278 | class SpectraWriter(JRODataWriter, Operation): | |
279 |
|
279 | |||
280 | """ |
|
280 | """ | |
281 | Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura |
|
281 | Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura | |
282 | de los datos siempre se realiza por bloques. |
|
282 | de los datos siempre se realiza por bloques. | |
283 | """ |
|
283 | """ | |
284 |
|
284 | |||
285 | def __init__(self): |
|
285 | def __init__(self): | |
286 | """ |
|
286 | """ | |
287 | Inicializador de la clase SpectraWriter para la escritura de datos de espectros. |
|
287 | Inicializador de la clase SpectraWriter para la escritura de datos de espectros. | |
288 |
|
288 | |||
289 | Affected: |
|
289 | Affected: | |
290 | self.dataOut |
|
290 | self.dataOut | |
291 | self.basicHeaderObj |
|
291 | self.basicHeaderObj | |
292 | self.systemHeaderObj |
|
292 | self.systemHeaderObj | |
293 | self.radarControllerHeaderObj |
|
293 | self.radarControllerHeaderObj | |
294 | self.processingHeaderObj |
|
294 | self.processingHeaderObj | |
295 |
|
295 | |||
296 | Return: None |
|
296 | Return: None | |
297 | """ |
|
297 | """ | |
298 |
|
298 | |||
299 | Operation.__init__(self) |
|
299 | Operation.__init__(self) | |
300 |
|
300 | |||
301 | self.ext = ".pdata" |
|
301 | self.ext = ".pdata" | |
302 | self.optchar = "P" |
|
302 | self.optchar = "P" | |
303 | self.shape_spc_Buffer = None |
|
303 | self.shape_spc_Buffer = None | |
304 | self.shape_cspc_Buffer = None |
|
304 | self.shape_cspc_Buffer = None | |
305 | self.shape_dc_Buffer = None |
|
305 | self.shape_dc_Buffer = None | |
306 | self.data_spc = None |
|
306 | self.data_spc = None | |
307 | self.data_cspc = None |
|
307 | self.data_cspc = None | |
308 | self.data_dc = None |
|
308 | self.data_dc = None | |
309 | self.setFile = None |
|
309 | self.setFile = None | |
310 | self.noMoreFiles = 0 |
|
310 | self.noMoreFiles = 0 | |
311 | self.basicHeaderObj = BasicHeader(LOCALTIME) |
|
311 | self.basicHeaderObj = BasicHeader(LOCALTIME) | |
312 | self.systemHeaderObj = SystemHeader() |
|
312 | self.systemHeaderObj = SystemHeader() | |
313 | self.radarControllerHeaderObj = RadarControllerHeader() |
|
313 | self.radarControllerHeaderObj = RadarControllerHeader() | |
314 | self.processingHeaderObj = ProcessingHeader() |
|
314 | self.processingHeaderObj = ProcessingHeader() | |
315 |
|
315 | |||
316 | def hasAllDataInBuffer(self): |
|
316 | def hasAllDataInBuffer(self): | |
317 | return 1 |
|
317 | return 1 | |
318 |
|
318 | |||
319 |
|
319 | |||
320 | def setBlockDimension(self): |
|
320 | def setBlockDimension(self): | |
321 | """ |
|
321 | """ | |
322 | Obtiene las formas dimensionales del los subbloques de datos que componen un bloque |
|
322 | Obtiene las formas dimensionales del los subbloques de datos que componen un bloque | |
323 |
|
323 | |||
324 | Affected: |
|
324 | Affected: | |
325 | self.shape_spc_Buffer |
|
325 | self.shape_spc_Buffer | |
326 | self.shape_cspc_Buffer |
|
326 | self.shape_cspc_Buffer | |
327 | self.shape_dc_Buffer |
|
327 | self.shape_dc_Buffer | |
328 |
|
328 | |||
329 | Return: None |
|
329 | Return: None | |
330 | """ |
|
330 | """ | |
331 | self.shape_spc_Buffer = (self.dataOut.nChannels, |
|
331 | self.shape_spc_Buffer = (self.dataOut.nChannels, | |
332 | self.processingHeaderObj.nHeights, |
|
332 | self.processingHeaderObj.nHeights, | |
333 | self.processingHeaderObj.profilesPerBlock) |
|
333 | self.processingHeaderObj.profilesPerBlock) | |
334 |
|
334 | |||
335 | self.shape_cspc_Buffer = (self.dataOut.nPairs, |
|
335 | self.shape_cspc_Buffer = (self.dataOut.nPairs, | |
336 | self.processingHeaderObj.nHeights, |
|
336 | self.processingHeaderObj.nHeights, | |
337 | self.processingHeaderObj.profilesPerBlock) |
|
337 | self.processingHeaderObj.profilesPerBlock) | |
338 |
|
338 | |||
339 | self.shape_dc_Buffer = (self.dataOut.nChannels, |
|
339 | self.shape_dc_Buffer = (self.dataOut.nChannels, | |
340 | self.processingHeaderObj.nHeights) |
|
340 | self.processingHeaderObj.nHeights) | |
341 |
|
341 | |||
342 |
|
342 | |||
343 | def writeBlock(self): |
|
343 | def writeBlock(self): | |
344 | """processingHeaderObj |
|
344 | """processingHeaderObj | |
345 | Escribe el buffer en el file designado |
|
345 | Escribe el buffer en el file designado | |
346 |
|
346 | |||
347 | Affected: |
|
347 | Affected: | |
348 | self.data_spc |
|
348 | self.data_spc | |
349 | self.data_cspc |
|
349 | self.data_cspc | |
350 | self.data_dc |
|
350 | self.data_dc | |
351 | self.flagIsNewFile |
|
351 | self.flagIsNewFile | |
352 | self.flagIsNewBlock |
|
352 | self.flagIsNewBlock | |
353 | self.nTotalBlocks |
|
353 | self.nTotalBlocks | |
354 | self.nWriteBlocks |
|
354 | self.nWriteBlocks | |
355 |
|
355 | |||
356 | Return: None |
|
356 | Return: None | |
357 | """ |
|
357 | """ | |
358 |
|
358 | |||
359 | spc = numpy.transpose( self.data_spc, (0,2,1) ) |
|
359 | spc = numpy.transpose( self.data_spc, (0,2,1) ) | |
360 | if not self.processingHeaderObj.shif_fft: |
|
360 | if not self.processingHeaderObj.shif_fft: | |
361 | spc = numpy.roll( spc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones |
|
361 | spc = numpy.roll( spc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
362 | data = spc.reshape((-1)) |
|
362 | data = spc.reshape((-1)) | |
363 | data = data.astype(self.dtype[0]) |
|
363 | data = data.astype(self.dtype[0]) | |
364 | data.tofile(self.fp) |
|
364 | data.tofile(self.fp) | |
365 |
|
365 | |||
366 | if self.data_cspc is not None: |
|
366 | if self.data_cspc is not None: | |
367 |
|
367 | |||
368 | cspc = numpy.transpose( self.data_cspc, (0,2,1) ) |
|
368 | cspc = numpy.transpose( self.data_cspc, (0,2,1) ) | |
369 | data = numpy.zeros( numpy.shape(cspc), self.dtype ) |
|
369 | data = numpy.zeros( numpy.shape(cspc), self.dtype ) | |
370 | #print 'data.shape', self.shape_cspc_Buffer |
|
370 | #print 'data.shape', self.shape_cspc_Buffer | |
371 | if not self.processingHeaderObj.shif_fft: |
|
371 | if not self.processingHeaderObj.shif_fft: | |
372 | cspc = numpy.roll( cspc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones |
|
372 | cspc = numpy.roll( cspc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
373 | data['real'] = cspc.real |
|
373 | data['real'] = cspc.real | |
374 | data['imag'] = cspc.imag |
|
374 | data['imag'] = cspc.imag | |
375 | data = data.reshape((-1)) |
|
375 | data = data.reshape((-1)) | |
376 | data.tofile(self.fp) |
|
376 | data.tofile(self.fp) | |
377 |
|
377 | |||
378 | if self.data_dc is not None: |
|
378 | if self.data_dc is not None: | |
379 |
|
379 | |||
380 | dc = self.data_dc |
|
380 | dc = self.data_dc | |
381 | data = numpy.zeros( numpy.shape(dc), self.dtype ) |
|
381 | data = numpy.zeros( numpy.shape(dc), self.dtype ) | |
382 | data['real'] = dc.real |
|
382 | data['real'] = dc.real | |
383 | data['imag'] = dc.imag |
|
383 | data['imag'] = dc.imag | |
384 | data = data.reshape((-1)) |
|
384 | data = data.reshape((-1)) | |
385 | data.tofile(self.fp) |
|
385 | data.tofile(self.fp) | |
386 |
|
386 | |||
387 | # self.data_spc.fill(0) |
|
387 | # self.data_spc.fill(0) | |
388 | # |
|
388 | # | |
389 | # if self.data_dc is not None: |
|
389 | # if self.data_dc is not None: | |
390 | # self.data_dc.fill(0) |
|
390 | # self.data_dc.fill(0) | |
391 | # |
|
391 | # | |
392 | # if self.data_cspc is not None: |
|
392 | # if self.data_cspc is not None: | |
393 | # self.data_cspc.fill(0) |
|
393 | # self.data_cspc.fill(0) | |
394 |
|
394 | |||
395 | self.flagIsNewFile = 0 |
|
395 | self.flagIsNewFile = 0 | |
396 | self.flagIsNewBlock = 1 |
|
396 | self.flagIsNewBlock = 1 | |
397 | self.nTotalBlocks += 1 |
|
397 | self.nTotalBlocks += 1 | |
398 | self.nWriteBlocks += 1 |
|
398 | self.nWriteBlocks += 1 | |
399 | self.blockIndex += 1 |
|
399 | self.blockIndex += 1 | |
400 |
|
400 | |||
401 | # print "[Writing] Block = %d04" %self.blockIndex |
|
401 | # print "[Writing] Block = %d04" %self.blockIndex | |
402 |
|
402 | |||
403 | def putData(self): |
|
403 | def putData(self): | |
404 | """ |
|
404 | """ | |
405 | Setea un bloque de datos y luego los escribe en un file |
|
405 | Setea un bloque de datos y luego los escribe en un file | |
406 |
|
406 | |||
407 | Affected: |
|
407 | Affected: | |
408 | self.data_spc |
|
408 | self.data_spc | |
409 | self.data_cspc |
|
409 | self.data_cspc | |
410 | self.data_dc |
|
410 | self.data_dc | |
411 |
|
411 | |||
412 | Return: |
|
412 | Return: | |
413 | 0 : Si no hay data o no hay mas files que puedan escribirse |
|
413 | 0 : Si no hay data o no hay mas files que puedan escribirse | |
414 | 1 : Si se escribio la data de un bloque en un file |
|
414 | 1 : Si se escribio la data de un bloque en un file | |
415 | """ |
|
415 | """ | |
416 |
|
416 | |||
417 | if self.dataOut.flagNoData: |
|
417 | if self.dataOut.flagNoData: | |
418 | return 0 |
|
418 | return 0 | |
419 |
|
419 | |||
420 | self.flagIsNewBlock = 0 |
|
420 | self.flagIsNewBlock = 0 | |
421 |
|
421 | |||
422 | if self.dataOut.flagDiscontinuousBlock: |
|
422 | if self.dataOut.flagDiscontinuousBlock: | |
423 | self.data_spc.fill(0) |
|
423 | self.data_spc.fill(0) | |
424 | if self.dataOut.data_cspc is not None: |
|
424 | if self.dataOut.data_cspc is not None: | |
425 | self.data_cspc.fill(0) |
|
425 | self.data_cspc.fill(0) | |
426 | if self.dataOut.data_dc is not None: |
|
426 | if self.dataOut.data_dc is not None: | |
427 | self.data_dc.fill(0) |
|
427 | self.data_dc.fill(0) | |
428 | self.setNextFile() |
|
428 | self.setNextFile() | |
429 |
|
429 | |||
430 | if self.flagIsNewFile == 0: |
|
430 | if self.flagIsNewFile == 0: | |
431 | self.setBasicHeader() |
|
431 | self.setBasicHeader() | |
432 |
|
432 | |||
433 | self.data_spc = self.dataOut.data_spc.copy() |
|
433 | self.data_spc = self.dataOut.data_spc.copy() | |
434 |
|
434 | |||
435 | if self.dataOut.data_cspc is not None: |
|
435 | if self.dataOut.data_cspc is not None: | |
436 | self.data_cspc = self.dataOut.data_cspc.copy() |
|
436 | self.data_cspc = self.dataOut.data_cspc.copy() | |
437 |
|
437 | |||
438 | if self.dataOut.data_dc is not None: |
|
438 | if self.dataOut.data_dc is not None: | |
439 | self.data_dc = self.dataOut.data_dc.copy() |
|
439 | self.data_dc = self.dataOut.data_dc.copy() | |
440 |
|
440 | |||
441 | # #self.processingHeaderObj.dataBlocksPerFile) |
|
441 | # #self.processingHeaderObj.dataBlocksPerFile) | |
442 | if self.hasAllDataInBuffer(): |
|
442 | if self.hasAllDataInBuffer(): | |
443 |
|
|
443 | self.setFirstHeader() | |
444 | self.writeNextBlock() |
|
444 | self.writeNextBlock() | |
445 |
|
445 | |||
446 | def __getBlockSize(self): |
|
446 | def __getBlockSize(self): | |
447 | ''' |
|
447 | ''' | |
448 | Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra |
|
448 | Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra | |
449 | ''' |
|
449 | ''' | |
450 |
|
450 | |||
451 | dtype_width = self.getDtypeWidth() |
|
451 | dtype_width = self.getDtypeWidth() | |
452 |
|
452 | |||
453 | pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints |
|
453 | pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints | |
454 |
|
454 | |||
455 | pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write) |
|
455 | pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write) | |
456 | blocksize = (pts2write_SelfSpectra*dtype_width) |
|
456 | blocksize = (pts2write_SelfSpectra*dtype_width) | |
457 |
|
457 | |||
458 | if self.dataOut.data_cspc is not None: |
|
458 | if self.dataOut.data_cspc is not None: | |
459 | pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write) |
|
459 | pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write) | |
460 | blocksize += (pts2write_CrossSpectra*dtype_width*2) |
|
460 | blocksize += (pts2write_CrossSpectra*dtype_width*2) | |
461 |
|
461 | |||
462 | if self.dataOut.data_dc is not None: |
|
462 | if self.dataOut.data_dc is not None: | |
463 | pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights) |
|
463 | pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights) | |
464 | blocksize += (pts2write_DCchannels*dtype_width*2) |
|
464 | blocksize += (pts2write_DCchannels*dtype_width*2) | |
465 |
|
465 | |||
466 | # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO |
|
466 | # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO | |
467 |
|
467 | |||
468 | return blocksize |
|
468 | return blocksize | |
469 |
|
469 | |||
470 | def setFirstHeader(self): |
|
470 | def setFirstHeader(self): | |
471 |
|
471 | |||
472 | """ |
|
472 | """ | |
473 | Obtiene una copia del First Header |
|
473 | Obtiene una copia del First Header | |
474 |
|
474 | |||
475 | Affected: |
|
475 | Affected: | |
476 | self.systemHeaderObj |
|
476 | self.systemHeaderObj | |
477 | self.radarControllerHeaderObj |
|
477 | self.radarControllerHeaderObj | |
478 | self.dtype |
|
478 | self.dtype | |
479 |
|
479 | |||
480 | Return: |
|
480 | Return: | |
481 | None |
|
481 | None | |
482 | """ |
|
482 | """ | |
483 |
|
483 | |||
484 | self.systemHeaderObj = self.dataOut.systemHeaderObj.copy() |
|
484 | self.systemHeaderObj = self.dataOut.systemHeaderObj.copy() | |
485 | self.systemHeaderObj.nChannels = self.dataOut.nChannels |
|
485 | self.systemHeaderObj.nChannels = self.dataOut.nChannels | |
486 | self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy() |
|
486 | self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy() | |
487 |
|
487 | |||
488 | self.processingHeaderObj.dtype = 1 # Spectra |
|
488 | self.processingHeaderObj.dtype = 1 # Spectra | |
489 | self.processingHeaderObj.blockSize = self.__getBlockSize() |
|
489 | self.processingHeaderObj.blockSize = self.__getBlockSize() | |
490 | self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints |
|
490 | self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints | |
491 | self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile |
|
491 | self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile | |
492 | self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows |
|
492 | self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows | |
493 | self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval |
|
493 | self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval | |
494 | self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt |
|
494 | self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt | |
495 | self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels |
|
495 | self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels | |
496 | self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT |
|
496 | self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT | |
497 |
|
497 | |||
498 | if self.processingHeaderObj.totalSpectra > 0: |
|
498 | if self.processingHeaderObj.totalSpectra > 0: | |
499 | channelList = [] |
|
499 | channelList = [] | |
500 | for channel in range(self.dataOut.nChannels): |
|
500 | for channel in range(self.dataOut.nChannels): | |
501 | channelList.append(channel) |
|
501 | channelList.append(channel) | |
502 | channelList.append(channel) |
|
502 | channelList.append(channel) | |
503 |
|
503 | |||
504 | pairsList = [] |
|
504 | pairsList = [] | |
505 | if self.dataOut.nPairs > 0: |
|
505 | if self.dataOut.nPairs > 0: | |
506 | for pair in self.dataOut.pairsList: |
|
506 | for pair in self.dataOut.pairsList: | |
507 | pairsList.append(pair[0]) |
|
507 | pairsList.append(pair[0]) | |
508 | pairsList.append(pair[1]) |
|
508 | pairsList.append(pair[1]) | |
509 |
|
509 | |||
510 | spectraComb = channelList + pairsList |
|
510 | spectraComb = channelList + pairsList | |
511 | spectraComb = numpy.array(spectraComb, dtype="u1") |
|
511 | spectraComb = numpy.array(spectraComb, dtype="u1") | |
512 | self.processingHeaderObj.spectraComb = spectraComb |
|
512 | self.processingHeaderObj.spectraComb = spectraComb | |
513 |
|
513 | |||
514 | if self.dataOut.code is not None: |
|
514 | if self.dataOut.code is not None: | |
515 | self.processingHeaderObj.code = self.dataOut.code |
|
515 | self.processingHeaderObj.code = self.dataOut.code | |
516 | self.processingHeaderObj.nCode = self.dataOut.nCode |
|
516 | self.processingHeaderObj.nCode = self.dataOut.nCode | |
517 | self.processingHeaderObj.nBaud = self.dataOut.nBaud |
|
517 | self.processingHeaderObj.nBaud = self.dataOut.nBaud | |
518 |
|
518 | |||
519 | if self.processingHeaderObj.nWindows != 0: |
|
519 | if self.processingHeaderObj.nWindows != 0: | |
520 | self.processingHeaderObj.firstHeight = self.dataOut.heightList[0] |
|
520 | self.processingHeaderObj.firstHeight = self.dataOut.heightList[0] | |
521 | self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0] |
|
521 | self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0] | |
522 | self.processingHeaderObj.nHeights = self.dataOut.nHeights |
|
522 | self.processingHeaderObj.nHeights = self.dataOut.nHeights | |
523 | self.processingHeaderObj.samplesWin = self.dataOut.nHeights |
|
523 | self.processingHeaderObj.samplesWin = self.dataOut.nHeights | |
524 |
|
524 | |||
|
525 | if self.flagManualHeader is True: | |||
|
526 | HeaderList1D = ["nCode" , "nBaud", "codeType", "txA", "txB", "nTx"] | |||
|
527 | for attr_ in HeaderList1D: # pass dataOut variables to radarControllerHeaderObj for manual header | |||
|
528 | try: setattr(self.radarControllerHeaderObj, attr_, getattr(self.dataOut,attr_)) | |||
|
529 | except: pass | |||
|
530 | ||||
|
531 | if self.dataOut.code is not None: | |||
|
532 | self.radarControllerHeaderObj.code = numpy.array(self.dataOut.code) | |||
|
533 | ||||
|
534 | ||||
525 | self.processingHeaderObj.processFlags = self.getProcessFlags() |
|
535 | self.processingHeaderObj.processFlags = self.getProcessFlags() | |
526 |
|
536 | |||
527 | self.setBasicHeader() |
|
537 | self.setBasicHeader() |
General Comments 0
You need to be logged in to leave comments.
Login now