##// END OF EJS Templates
Version con Lectura de Pdata y RawData operando correctamente,tambien la Escritura de Rawdata....
Daniel Valdez -
r124:5a0951cb35e4
parent child
Show More
@@ -0,0 +1,388
1 '''
2 Created on Feb 7, 2012
3
4 @author $Author: murco $
5 @version $Id: SpectraProcessor.py 119 2012-09-05 17:06:09Z murco $
6 '''
7 import os, sys
8 import numpy
9 import time
10
11 path = os.path.split(os.getcwd())[0]
12 sys.path.append(path)
13
14 from Data.Spectra import Spectra
15 from IO.SpectraIO import SpectraWriter
16 #from Graphics.SpectraPlot import Spectrum
17 #from JRONoise import Noise
18
19 class SpectraProcessor:
20 '''
21 classdocs
22 '''
23
24 dataInObj = None
25
26 dataOutObj = None
27
28 noiseObj = None
29
30 integratorObjList = []
31
32 writerObjList = []
33
34 integratorObjIndex = None
35
36 writerObjIndex = None
37
38 profIndex = 0 # Se emplea cuando el objeto de entrada es un Voltage
39
40 # integratorObjList = []
41 #
42 # decoderObjList = []
43 #
44 # writerObjList = []
45 #
46 # plotterObjList = []
47 #
48 # integratorObjIndex = None
49 #
50 # decoderObjIndex = None
51 #
52 # writerObjIndex = None
53 #
54 # plotterObjIndex = None
55 #
56 # buffer = None
57 #
58 # profIndex = 0
59 #
60 # nFFTPoints = None
61 #
62 # nChannels = None
63 #
64 # nHeights = None
65 #
66 # nPairs = None
67 #
68 # pairList = None
69
70
71 def __init__(self):
72 '''
73 Constructor
74 '''
75
76 self.integratorObjIndex = None
77 self.writerObjIndex = None
78 self.integratorObjList = []
79 self.writerObjList = []
80 self.noiseObj = None
81 self.buffer = None
82 self.profIndex = 0
83
84 def setup(self, dataInObj=None, dataOutObj=None, nFFTPoints=None, pairList=None):
85
86 if dataInObj == None:
87 raise ValueError, "This SpectraProcessor.setup() function needs dataInObj input variable"
88
89 if dataInObj.type == "Voltage":
90 if nFFTPoints == None:
91 raise ValueError, "This SpectraProcessor.setup() function needs nFFTPoints input variable"
92 else:
93 nFFTPoints = dataInObj.nFFTPoints
94
95 self.dataInObj = dataInObj
96
97 if dataOutObj == None:
98 dataOutObj = Spectra()
99
100 self.dataOutObj = dataOutObj
101
102 # self.noiseObj = Noise() #aun no se incluye el objeto Noise()
103
104 ##########################################
105 # self.nFFTPoints = nFFTPoints
106 # self.nChannels = self.dataInObj.nChannels
107 # self.nHeights = self.dataInObj.nHeights
108 # self.pairList = pairList
109 # if pairList != None:
110 # self.nPairs = len(pairList)
111 # else:
112 # self.nPairs = 0
113 #
114 # self.dataOutObj.heightList = self.dataInObj.heightList
115 # self.dataOutObj.channelIndexList = self.dataInObj.channelIndexList
116 # self.dataOutObj.m_BasicHeader = self.dataInObj.m_BasicHeader.copy()
117 # self.dataOutObj.m_ProcessingHeader = self.dataInObj.m_ProcessingHeader.copy()
118 # self.dataOutObj.m_RadarControllerHeader = self.dataInObj.m_RadarControllerHeader.copy()
119 # self.dataOutObj.m_SystemHeader = self.dataInObj.m_SystemHeader.copy()
120 #
121 # self.dataOutObj.dataType = self.dataInObj.dataType
122 # self.dataOutObj.nPairs = self.nPairs
123 # self.dataOutObj.nChannels = self.nChannels
124 # self.dataOutObj.nProfiles = self.nFFTPoints
125 # self.dataOutObj.nHeights = self.nHeights
126 # self.dataOutObj.nFFTPoints = self.nFFTPoints
127 # #self.dataOutObj.data = None
128 #
129 # self.dataOutObj.m_SystemHeader.numChannels = self.nChannels
130 # self.dataOutObj.m_SystemHeader.nProfiles = self.nFFTPoints
131 #
132 # self.dataOutObj.m_ProcessingHeader.totalSpectra = self.nChannels + self.nPairs
133 # self.dataOutObj.m_ProcessingHeader.profilesPerBlock = self.nFFTPoints
134 # self.dataOutObj.m_ProcessingHeader.numHeights = self.nHeights
135 # self.dataOutObj.m_ProcessingHeader.shif_fft = True
136 #
137 # spectraComb = numpy.zeros( (self.nChannels+self.nPairs)*2,numpy.dtype('u1'))
138 # k = 0
139 # for i in range( 0,self.nChannels*2,2 ):
140 # spectraComb[i] = k
141 # spectraComb[i+1] = k
142 # k += 1
143 #
144 # k *= 2
145 #
146 # if self.pairList != None:
147 #
148 # for pair in self.pairList:
149 # spectraComb[k] = pair[0]
150 # spectraComb[k+1] = pair[1]
151 # k += 2
152 #
153 # self.dataOutObj.m_ProcessingHeader.spectraComb = spectraComb
154
155 return self.dataOutObj
156
157 def init(self):
158 #
159 # self.nHeights = self.dataInObj.nHeights
160 # self.dataOutObj.nHeights = self.nHeights
161 # self.dataOutObj.heightList = self.dataInObj.heightList
162 #
163
164 self.integratorObjIndex = 0
165 self.writerObjIndex = 0
166
167 if self.dataInObj.type == "Voltage":
168
169 if self.buffer == None:
170 self.buffer = numpy.zeros((self.nChannels,
171 self.nFFTPoints,
172 self.dataInObj.nHeights),
173 dtype='complex')
174
175 self.buffer[:,self.profIndex,:] = self.dataInObj.data
176 self.profIndex += 1
177
178 if self.profIndex == self.nFFTPoints:
179 self.__getFft()
180 self.dataOutObj.flagNoData = False
181
182 self.buffer = None
183 self.profIndex = 0
184 return
185
186 self.dataOutObj.flagNoData = True
187
188 return
189
190 #Other kind of data
191 if self.dataInObj.type == "Spectra":
192 self.dataOutObj.copy(self.dataInObj)
193 self.dataOutObj.flagNoData = False
194 return
195
196 raise ValueError, "The datatype is not valid"
197
198 def __getFft(self):
199 """
200 Convierte valores de Voltaje a Spectra
201
202 Affected:
203 self.dataOutObj.data_spc
204 self.dataOutObj.data_cspc
205 self.dataOutObj.data_dc
206 self.dataOutObj.heightList
207 self.dataOutObj.m_BasicHeader
208 self.dataOutObj.m_ProcessingHeader
209 self.dataOutObj.m_RadarControllerHeader
210 self.dataOutObj.m_SystemHeader
211 self.profIndex
212 self.buffer
213 self.dataOutObj.flagNoData
214 self.dataOutObj.dataType
215 self.dataOutObj.nPairs
216 self.dataOutObj.nChannels
217 self.dataOutObj.nProfiles
218 self.dataOutObj.m_SystemHeader.numChannels
219 self.dataOutObj.m_ProcessingHeader.totalSpectra
220 self.dataOutObj.m_ProcessingHeader.profilesPerBlock
221 self.dataOutObj.m_ProcessingHeader.numHeights
222 self.dataOutObj.m_ProcessingHeader.spectraComb
223 self.dataOutObj.m_ProcessingHeader.shif_fft
224 """
225
226 if self.dataInObj.flagNoData:
227 return 0
228
229 fft_volt = numpy.fft.fft(self.buffer,axis=1)
230 dc = fft_volt[:,0,:]
231
232 #calculo de self-spectra
233 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
234 spc = fft_volt * numpy.conjugate(fft_volt)
235 spc = spc.real
236
237 blocksize = 0
238 blocksize += dc.size
239 blocksize += spc.size
240
241 cspc = None
242 pairIndex = 0
243 if self.pairList != None:
244 #calculo de cross-spectra
245 cspc = numpy.zeros((self.nPairs, self.nFFTPoints, self.nHeights), dtype='complex')
246 for pair in self.pairList:
247 cspc[pairIndex,:,:] = numpy.abs(fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:]))
248 pairIndex += 1
249 blocksize += cspc.size
250
251 self.dataOutObj.data_spc = spc
252 self.dataOutObj.data_cspc = cspc
253 self.dataOutObj.data_dc = dc
254 self.dataOutObj.m_ProcessingHeader.blockSize = blocksize
255 self.dataOutObj.m_BasicHeader.utc = self.dataInObj.m_BasicHeader.utc
256
257 # self.getNoise()
258
259 def addWriter(self, wrpath, profilesPerBlock, blocksPerFile):
260 objWriter = SpectraWriter(self.dataOutObj, pairList)
261 objWriter.setup(wrpath, profilesPerBlock, blocksPerFile)
262 self.writerObjList.append(objWriter)
263
264 def addIntegrator(self,N,timeInterval):
265
266 objIncohInt = IncoherentIntegration(N,timeInterval)
267 self.integratorObjList.append(objIncohInt)
268
269 def writeData(self, wrpath, profilesPerBlock, blocksPerFile):
270 if self.dataOutObj.flagNoData:
271 return 0
272
273 if len(self.writerObjList) <= self.writerObjIndex:
274 self.addWriter(wrpath, profilesPerBlock, blocksPerFile)
275
276 self.writerObjList[self.writerObjIndex].putData()
277
278 self.writerObjIndex += 1
279
280 def integrator(self, N=None, timeInterval=None):
281
282 if self.dataOutObj.flagNoData:
283 return 0
284
285 if len(self.integratorObjList) <= self.integratorObjIndex:
286 self.addIntegrator(N,timeInterval)
287
288 myIncohIntObj = self.integratorObjList[self.integratorObjIndex]
289 myIncohIntObj.exe(data=self.dataOutObj.data_spc,timeOfData=self.dataOutObj.m_BasicHeader.utc)
290
291 if myIncohIntObj.isReady:
292 self.dataOutObj.data_spc = myIncohIntObj.data
293 self.dataOutObj.nAvg = myIncohIntObj.navg
294 self.dataOutObj.m_ProcessingHeader.incoherentInt = self.dataInObj.m_ProcessingHeader.incoherentInt*myIncohIntObj.navg
295 #print "myIncohIntObj.navg: ",myIncohIntObj.navg
296 self.dataOutObj.flagNoData = False
297
298 """Calcular el ruido"""
299 self.getNoise()
300 else:
301 self.dataOutObj.flagNoData = True
302
303 self.integratorObjIndex += 1
304
305
306
307
308 class IncoherentIntegration:
309
310 integ_counter = None
311 data = None
312 navg = None
313 buffer = None
314 nIncohInt = None
315
316 def __init__(self, N = None, timeInterval = None):
317 """
318 N
319 timeInterval - interval time [min], integer value
320 """
321
322 self.data = None
323 self.navg = None
324 self.buffer = None
325 self.timeOut = None
326 self.exitCondition = False
327 self.isReady = False
328 self.nIncohInt = N
329 self.integ_counter = 0
330 if timeInterval!=None:
331 self.timeIntervalInSeconds = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
332
333 if ((timeInterval==None) and (N==None)):
334 print 'N = None ; timeInterval = None'
335 sys.exit(0)
336 elif timeInterval == None:
337 self.timeFlag = False
338 else:
339 self.timeFlag = True
340
341
342 def exe(self,data,timeOfData):
343 """
344 data
345
346 timeOfData [seconds]
347 """
348
349 if self.timeFlag:
350 if self.timeOut == None:
351 self.timeOut = timeOfData + self.timeIntervalInSeconds
352
353 if timeOfData < self.timeOut:
354 if self.buffer == None:
355 self.buffer = data
356 else:
357 self.buffer = self.buffer + data
358 self.integ_counter += 1
359 else:
360 self.exitCondition = True
361
362 else:
363 if self.integ_counter < self.nIncohInt:
364 if self.buffer == None:
365 self.buffer = data
366 else:
367 self.buffer = self.buffer + data
368
369 self.integ_counter += 1
370
371 if self.integ_counter == self.nIncohInt:
372 self.exitCondition = True
373
374 if self.exitCondition:
375 self.data = self.buffer
376 self.navg = self.integ_counter
377 self.isReady = True
378 self.buffer = None
379 self.timeOut = None
380 self.integ_counter = 0
381 self.exitCondition = False
382
383 if self.timeFlag:
384 self.buffer = data
385 self.timeOut = timeOfData + self.timeIntervalInSeconds
386 else:
387 self.isReady = False
388 No newline at end of file
@@ -1,63 +1,70
1 1 import os, sys
2 2 import copy
3 3 import numpy
4 4
5 5 path = os.path.split(os.getcwd())[0]
6 6 sys.path.append(path)
7 7
8 8 from IO.JROHeader import SystemHeader, RadarControllerHeader
9 9
10 10 class JROData:
11 11
12 12 # m_BasicHeader = BasicHeader()
13 13 # m_ProcessingHeader = ProcessingHeader()
14 14
15 15 systemHeaderObj = SystemHeader()
16 16
17 17 radarControllerHeaderObj = RadarControllerHeader()
18 18
19 19 # data = None
20 20
21 21 type = None
22 22
23 23 dtype = None
24 24
25 25 nChannels = None
26 26
27 27 nHeights = None
28 28
29 29 nProfiles = None
30 30
31 31 heightList = None
32 32
33 33 channelList = None
34 34
35 35 channelIndexList = None
36 36
37 37 flagNoData = False
38 38
39 39 flagTimeBlock = False
40 40
41 41 dataUtcTime = None
42 42
43 43 nCode = None
44 44
45 45 nBaud = None
46 46
47 47 code = None
48
49 flagDecodeData = True #asumo q la data esta decodificada
50
51 flagDeflipData = True #asumo q la data esta sin flip
52
53 flagShiftFFT = False
54
48 55
49 56 def __init__(self):
50 57
51 58 raise ValueError, "This class has not been implemented"
52 59
53 60 def copy(self, inputObj=None):
54 61
55 62 if inputObj == None:
56 63 return copy.deepcopy(self)
57 64
58 65 for key in inputObj.__dict__.keys():
59 66 self.__dict__[key] = inputObj.__dict__[key]
60 67
61 68 def deepcopy(self):
62 69
63 70 return copy.deepcopy(self) No newline at end of file
@@ -1,753 +1,751
1 1 import os, sys
2 2 import glob
3 3 import time
4 4 import numpy
5 5 import fnmatch
6 6 import time, datetime
7 7
8 8 path = os.path.split(os.getcwd())[0]
9 9 sys.path.append(path)
10 10
11 11 from JROHeader import *
12 12 from Data.JROData import JROData
13 13
14 14 def isNumber(str):
15 15 """
16 16 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
17 17
18 18 Excepciones:
19 19 Si un determinado string no puede ser convertido a numero
20 20 Input:
21 21 str, string al cual se le analiza para determinar si convertible a un numero o no
22 22
23 23 Return:
24 24 True : si el string es uno numerico
25 25 False : no es un string numerico
26 26 """
27 27 try:
28 28 float( str )
29 29 return True
30 30 except:
31 31 return False
32 32
33 33 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
34 34 """
35 35 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
36 36
37 37 Inputs:
38 38 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
39 39
40 40 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
41 41 segundos contados desde 01/01/1970.
42 42 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
43 43 segundos contados desde 01/01/1970.
44 44
45 45 Return:
46 46 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
47 47 fecha especificado, de lo contrario retorna False.
48 48
49 49 Excepciones:
50 50 Si el archivo no existe o no puede ser abierto
51 51 Si la cabecera no puede ser leida.
52 52
53 53 """
54 54 basicHeaderObj = BasicHeader()
55 55
56 56 try:
57 57 fp = open(filename,'rb')
58 58 except:
59 59 raise IOError, "The file %s can't be opened" %(filename)
60 60
61 61 sts = basicHeaderObj.read(fp)
62 62 fp.close()
63 63
64 64 if not(sts):
65 65 print "Skipping the file %s because it has not a valid header" %(filename)
66 66 return 0
67 67
68 68 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
69 69 return 0
70 70
71 71 return 1
72 72
73 73
74 74
75 75
76 76 class JRODataIO:
77 77
78 78 c = 3E8
79 79
80 80 basicHeaderObj = BasicHeader()
81 81
82 82 systemHeaderObj = SystemHeader()
83 83
84 84 radarControllerHeaderObj = RadarControllerHeader()
85 85
86 86 processingHeaderObj = ProcessingHeader()
87 87
88 88 online = 0
89 89
90 90 dtype = None
91 91
92 92 pathList = []
93 93
94 94 filenameList = []
95 95
96 96 filename = None
97 97
98 98 ext = None
99 99
100 100 flagNoMoreFiles = 0
101 101
102 102 flagIsNewFile = 1
103 103
104 104 flagTimeBlock = 0
105 105
106 106 flagIsNewBlock = 0
107 107
108 108 fp = None
109 109
110 110 firstHeaderSize = 0
111 111
112 112 basicHeaderSize = 24
113 113
114 114 versionFile = 1103
115 115
116 116 fileSize = None
117 117
118 118 ippSeconds = None
119 119
120 120 fileSizeByHeader = None
121 121
122 122 fileIndex = None
123 123
124 124 profileIndex = None
125 125
126 126 blockIndex = None
127 127
128 128 nTotalBlocks = None
129 129
130 130 maxTimeStep = 30
131 131
132 132 lastUTTime = None
133 133
134 134 datablock = None
135 135
136 136 dataOutObj = None
137 137
138 138 blocksize = None
139 139
140 140 def __init__(self):
141 141 pass
142 142
143 143 class JRODataReader(JRODataIO):
144 144
145 145 nReadBlocks = 0
146 146
147 147 def __init__(self):
148 148
149 149 pass
150 150
151 151 def createObjByDefault(self):
152 152 """
153 153
154 154 """
155 155 raise ValueError, "This method has not been implemented"
156 156
157 157 def getBlockDimension(self):
158 158
159 159 raise ValueError, "No implemented"
160 160
161 161 def __searchFilesOffLine(self,
162 162 path,
163 163 startDate,
164 164 endDate,
165 165 startTime=datetime.time(0,0,0),
166 166 endTime=datetime.time(23,59,59),
167 167 set=None,
168 168 expLabel="",
169 169 ext=".r"):
170 170 dirList = []
171 171 for thisPath in os.listdir(path):
172 172 if os.path.isdir(os.path.join(path,thisPath)):
173 173 dirList.append(thisPath)
174 174
175 175 if not(dirList):
176 176 return None, None
177 177
178 178 pathList = []
179 179 dateList = []
180 180
181 181 thisDate = startDate
182 182
183 183 while(thisDate <= endDate):
184 184 year = thisDate.timetuple().tm_year
185 185 doy = thisDate.timetuple().tm_yday
186 186
187 187 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
188 188 if len(match) == 0:
189 189 thisDate += datetime.timedelta(1)
190 190 continue
191 191
192 192 pathList.append(os.path.join(path,match[0],expLabel))
193 193 dateList.append(thisDate)
194 194 thisDate += datetime.timedelta(1)
195 195
196 196 filenameList = []
197 197 for index in range(len(pathList)):
198 198
199 199 thisPath = pathList[index]
200 200 fileList = glob.glob1(thisPath, "*%s" %ext)
201 201 fileList.sort()
202 202
203 203 #Busqueda de datos en el rango de horas indicados
204 204 thisDate = dateList[index]
205 205 startDT = datetime.datetime.combine(thisDate, startTime)
206 206 endDT = datetime.datetime.combine(thisDate, endTime)
207 207
208 208 startUtSeconds = time.mktime(startDT.timetuple())
209 209 endUtSeconds = time.mktime(endDT.timetuple())
210 210
211 211 for file in fileList:
212 212
213 213 filename = os.path.join(thisPath,file)
214 214
215 215 if isThisFileinRange(filename, startUtSeconds, endUtSeconds):
216 216 filenameList.append(filename)
217 217
218 218 if not(filenameList):
219 219 return None, None
220 220
221 221 self.filenameList = filenameList
222 222
223 223 return pathList, filenameList
224 224
225 225 def setup(self,dataOutObj=None,
226 226 path=None,
227 227 startDate=None,
228 228 endDate=None,
229 229 startTime=datetime.time(0,0,0),
230 230 endTime=datetime.time(23,59,59),
231 231 set=0,
232 232 expLabel = "",
233 233 ext = None,
234 234 online = 0):
235 235
236 236 if path == None:
237 237 raise ValueError, "The path is not valid"
238 238
239 239 if ext == None:
240 240 ext = self.ext
241 241
242 242 if dataOutObj == None:
243 243 dataOutObj = self.createObjByDefault()
244 244
245 245 self.dataOutObj = dataOutObj
246 246
247 247 if online:
248 248 pass
249 249
250 250 else:
251 251 print "Searching file in offline mode"
252 252 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
253 253 if not(pathList):
254 254 print "No files in range: %s - %s"%(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
255 255 return None
256 256 self.fileIndex = -1
257 257 self.pathList = pathList
258 258 self.filenameList = filenameList
259 259
260 260 self.online = online
261 261 ext = ext.lower()
262 262 self.ext = ext
263 263
264 264 if not(self.setNextFile()):
265 265 if (startDate!=None) and (endDate!=None):
266 266 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
267 267 elif startDate != None:
268 268 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
269 269 else:
270 270 print "No files"
271 271
272 272 return None
273 273
274 274 # self.updateDataHeader()
275 275
276 276 return self.dataOutObj
277 277
278 278 def __setNextFileOffline(self):
279 279 idFile = self.fileIndex
280 280
281 281 while (True):
282 282 idFile += 1
283 283 if not(idFile < len(self.filenameList)):
284 284 self.flagNoMoreFiles = 1
285 285 print "No more Files"
286 286 return 0
287 287
288 288 filename = self.filenameList[idFile]
289 289
290 290 if not(self.__verifyFile(filename)):
291 291 continue
292 292
293 293 fileSize = os.path.getsize(filename)
294 294 fp = open(filename,'rb')
295 295 break
296 296
297 297 self.flagIsNewFile = 1
298 298 self.fileIndex = idFile
299 299 self.filename = filename
300 300 self.fileSize = fileSize
301 301 self.fp = fp
302 302
303 303 print "Setting the file: %s"%self.filename
304 304
305 305 return 1
306 306
307 307
308 308
309 309 def setNextFile(self):
310 310 if self.fp != None:
311 311 self.fp.close()
312 312
313 313 if self.online:
314 314 newFile = self.__setNextFileOnline()
315 315 else:
316 316 newFile = self.__setNextFileOffline()
317 317
318 318 if not(newFile):
319 319 return 0
320 320
321 321 self.__readFirstHeader()
322 322 self.nReadBlocks = 0
323 323 return 1
324 324
325 325 def __setNewBlock(self):
326 326 if self.fp == None:
327 327 return 0
328 328
329 329 if self.flagIsNewFile:
330 330 return 1
331 331
332 332 self.lastUTTime = self.basicHeaderObj.utc
333 333 currentSize = self.fileSize - self.fp.tell()
334 334 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
335 335
336 336 if (currentSize >= neededSize):
337 337 self.__rdBasicHeader()
338 338 return 1
339 339
340 340 if not(self.setNextFile()):
341 341 return 0
342 342
343 343 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
344 344
345 345 self.flagTimeBlock = 0
346 346
347 347 if deltaTime > self.maxTimeStep:
348 348 self.flagTimeBlock = 1
349 349
350 350 return 1
351 351
352 352
353 353 def readNextBlock(self):
354 354 if not(self.__setNewBlock()):
355 355 return 0
356 356
357 357 if not(self.readBlock()):
358 358 return 0
359 359
360 360 return 1
361 361
362 362 def __rdProcessingHeader(self, fp=None):
363 363 if fp == None:
364 364 fp = self.fp
365 365
366 366 self.processingHeaderObj.read(fp)
367 367
368 368 def __rdRadarControllerHeader(self, fp=None):
369 369 if fp == None:
370 370 fp = self.fp
371 371
372 372 self.radarControllerHeaderObj.read(fp)
373 373
374 374 def __rdSystemHeader(self, fp=None):
375 375 if fp == None:
376 376 fp = self.fp
377 377
378 378 self.systemHeaderObj.read(fp)
379 379
380 380 def __rdBasicHeader(self, fp=None):
381 381 if fp == None:
382 382 fp = self.fp
383 383
384 384 self.basicHeaderObj.read(fp)
385 385
386 386
387 387 def __readFirstHeader(self):
388 388 self.__rdBasicHeader()
389 389 self.__rdSystemHeader()
390 390 self.__rdRadarControllerHeader()
391 391 self.__rdProcessingHeader()
392 392
393 393 self.firstHeaderSize = self.basicHeaderObj.size
394 394
395 395 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
396 396 if datatype == 0:
397 397 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
398 398 elif datatype == 1:
399 399 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
400 400 elif datatype == 2:
401 401 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
402 402 elif datatype == 3:
403 403 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
404 404 elif datatype == 4:
405 405 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
406 406 elif datatype == 5:
407 407 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
408 408 else:
409 409 raise ValueError, 'Data type was not defined'
410 410
411 411 self.dtype = datatype_str
412 412 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
413 413 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
414 414 # self.dataOutObj.channelList = numpy.arange(self.systemHeaderObj.numChannels)
415 415 # self.dataOutObj.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
416 416 self.getBlockDimension()
417 417
418 418
419 419 def __verifyFile(self, filename, msgFlag=True):
420 420 msg = None
421 421 try:
422 422 fp = open(filename, 'rb')
423 423 currentPosition = fp.tell()
424 424 except:
425 425 if msgFlag:
426 426 print "The file %s can't be opened" % (filename)
427 427 return False
428 428
429 429 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
430 430
431 431 if neededSize == 0:
432 432 basicHeaderObj = BasicHeader()
433 433 systemHeaderObj = SystemHeader()
434 434 radarControllerHeaderObj = RadarControllerHeader()
435 435 processingHeaderObj = ProcessingHeader()
436 436
437 437 try:
438 438 if not( basicHeaderObj.read(fp) ): raise ValueError
439 439 if not( systemHeaderObj.read(fp) ): raise ValueError
440 440 if not( radarControllerHeaderObj.read(fp) ): raise ValueError
441 441 if not( processingHeaderObj.read(fp) ): raise ValueError
442 442 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
443 443
444 444 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
445 445
446 446 except:
447 447 if msgFlag:
448 448 print "\tThe file %s is empty or it hasn't enough data" % filename
449 449
450 450 fp.close()
451 451 return False
452 452 else:
453 453 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
454 454
455 455 fp.close()
456 456 fileSize = os.path.getsize(filename)
457 457 currentSize = fileSize - currentPosition
458 458 if currentSize < neededSize:
459 459 if msgFlag and (msg != None):
460 460 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
461 461 return False
462 462
463 463 return True
464 464
465 465 def getData():
466 466 pass
467 467
468 468 def hasNotDataInBuffer():
469 469 pass
470 470
471 471 def readBlock():
472 472 pass
473 473
474 474 class JRODataWriter(JRODataIO):
475 475
476 476 """
477 477 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
478 478 de los datos siempre se realiza por bloques.
479 479 """
480 480
481 481 blockIndex = 0
482 482
483 483 path = None
484 484
485 485 setFile = None
486 486
487 487 profilesPerBlock = None
488 488
489 489 blocksPerFile = None
490 490
491 nWriteBlocks = 0
492
491 493 def __init__(self, dataOutObj=None):
492 494 raise ValueError, "Not implemented"
493 495
494 496
495 497 def hasAllDataInBuffer(self):
496 498 raise ValueError, "Not implemented"
497 499
498 500
499 501 def setBlockDimension(self):
500 502 raise ValueError, "Not implemented"
501 503
502 504
503 505 def writeBlock(self):
504 506 raise ValueError, "No implemented"
505 507
506 508
507 509 def putData(self):
508 510 raise ValueError, "No implemented"
509 511
510 512
511 513 def __writeFirstHeader(self):
512 514 """
513 515 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
514 516
515 517 Affected:
516 518 __dataType
517 519
518 520 Return:
519 521 None
520 522 """
521 523
522 524 # CALCULAR PARAMETROS
523 525
524 sizeLongHeader = 0#XXXX
525 self.basicHeaderObj.size = 24 + sizeLongHeader
526 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
527 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
526 528
527 529 self.__writeBasicHeader()
528 530 self.__wrSystemHeader()
529 531 self.__wrRadarControllerHeader()
530 532 self.__wrProcessingHeader()
531 533 self.dtype = self.dataOutObj.dtype
532 534
533 535
534 536 def __writeBasicHeader(self, fp=None):
535 537 """
536 538 Escribe solo el Basic header en el file creado
537 539
538 540 Return:
539 541 None
540 542 """
541 543 if fp == None:
542 544 fp = self.fp
543 545
544 self.dataOutObj.basicHeaderObj.write(fp)
546 self.basicHeaderObj.write(fp)
545 547
546 548
547 549 def __wrSystemHeader(self, fp=None):
548 550 """
549 551 Escribe solo el System header en el file creado
550 552
551 553 Return:
552 554 None
553 555 """
554 556 if fp == None:
555 557 fp = self.fp
556 558
557 self.dataOutObj.systemHeaderObj.write(fp)
559 self.systemHeaderObj.write(fp)
558 560
559 561
560 562 def __wrRadarControllerHeader(self, fp=None):
561 563 """
562 564 Escribe solo el RadarController header en el file creado
563 565
564 566 Return:
565 567 None
566 568 """
567 569 if fp == None:
568 570 fp = self.fp
569 571
570 self.dataOutObj.radarControllerHeaderObj.write(fp)
572 self.radarControllerHeaderObj.write(fp)
571 573
572 574
573 575 def __wrProcessingHeader(self, fp=None):
574 576 """
575 577 Escribe solo el Processing header en el file creado
576 578
577 579 Return:
578 580 None
579 581 """
580 582 if fp == None:
581 583 fp = self.fp
582 584
583 self.dataOutObj.processingHeaderObj.write(fp)
585 self.processingHeaderObj.write(fp)
584 586
585 587
586 588 def setNextFile(self):
587 589 """
588 590 Determina el siguiente file que sera escrito
589 591
590 592 Affected:
591 593 self.filename
592 594 self.subfolder
593 595 self.fp
594 596 self.setFile
595 597 self.flagIsNewFile
596 598
597 599 Return:
598 600 0 : Si el archivo no puede ser escrito
599 601 1 : Si el archivo esta listo para ser escrito
600 602 """
601 603 ext = self.ext
602 604 path = self.path
603 605
604 606 if self.fp != None:
605 607 self.fp.close()
606 608
607 609 timeTuple = time.localtime( self.dataOutObj.dataUtcTime)
608 610 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
609 611
610 612 doypath = os.path.join( path, subfolder )
611 613 if not( os.path.exists(doypath) ):
612 614 os.mkdir(doypath)
613 615 self.setFile = -1 #inicializo mi contador de seteo
614 616 else:
615 617 filesList = os.listdir( doypath )
616 618 if len( filesList ) > 0:
617 619 filesList = sorted( filesList, key=str.lower )
618 620 filen = filesList[-1]
619 621 # el filename debera tener el siguiente formato
620 622 # 0 1234 567 89A BCDE (hex)
621 623 # x YYYY DDD SSS .ext
622 624 if isNumber( filen[8:11] ):
623 625 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
624 626 else:
625 627 self.setFile = -1
626 628 else:
627 629 self.setFile = -1 #inicializo mi contador de seteo
628 630
629 631 setFile = self.setFile
630 632 setFile += 1
631 633
632 634 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
633 635 timeTuple.tm_year,
634 636 timeTuple.tm_yday,
635 637 setFile,
636 638 ext )
637 639
638 640 filename = os.path.join( path, subfolder, file )
639 641
640 642 fp = open( filename,'wb' )
641 643
642 644 self.blockIndex = 0
643 645
644 646 #guardando atributos
645 647 self.filename = filename
646 648 self.subfolder = subfolder
647 649 self.fp = fp
648 650 self.setFile = setFile
649 651 self.flagIsNewFile = 1
650 652
653 self.getDataHeader()
654
651 655 print 'Writing the file: %s'%self.filename
652 656
653 657 self.__writeFirstHeader()
654 658
655 659 return 1
656 660
657 661
658 662 def __setNewBlock(self):
659 663 """
660 664 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
661 665
662 666 Return:
663 667 0 : si no pudo escribir nada
664 668 1 : Si escribio el Basic el First Header
665 669 """
666 670 if self.fp == None:
667 671 self.setNextFile()
668 672
669 673 if self.flagIsNewFile:
670 674 return 1
671 675
672 676 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
673 677 self.__writeBasicHeader()
674 678 return 1
675 679
676 680 if not( self.setNextFile() ):
677 681 return 0
678 682
679 683 return 1
680 684
681 685
682 686 def writeNextBlock(self):
683 687 """
684 688 Selecciona el bloque siguiente de datos y los escribe en un file
685 689
686 690 Return:
687 691 0 : Si no hizo pudo escribir el bloque de datos
688 692 1 : Si no pudo escribir el bloque de datos
689 693 """
690 694 if not( self.__setNewBlock() ):
691 695 return 0
692 696
693 697 self.writeBlock()
694 698
695 699 return 1
696 700
697 701
698 702 def getDataHeader(self):
699 703 """Obtiene una copia del First Header Affected: self.basicHeaderObj self.
700 704 systemHeaderObj self.radarControllerHeaderObj self.processingHeaderObj self.
701 705 dtype Return: None
702 706 """
703 707
704 708 raise ValueError, "No implemented"
705 709
706 710 def setup(self, path, profilesPerBlock, blocksPerFile, set=0, ext=None):
707 711 """
708 712 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
709 713
710 714 Inputs:
711 715 path : el path destino en el cual se escribiran los files a crear
712 716 format : formato en el cual sera salvado un file
713 717 set : el setebo del file
714 718
715 719 Return:
716 720 0 : Si no realizo un buen seteo
717 721 1 : Si realizo un buen seteo
718 722 """
719 723
720 724 if ext == None:
721 725 ext = self.ext
722 726
723 727 ext = ext.lower()
724 728
725 729 self.ext = ext
726 730
727 731 self.path = path
728 732
729 733 self.setFile = set - 1
730 734
731 735 self.profilesPerBlock = profilesPerBlock
732 736
733 737 self.blocksPerFile = blocksPerFile
734 738
739 if not(self.setNextFile()):
740 print "There isn't a next file"
741 return 0
735 742
736
737
738
739 self.getDataHeader()
740
741 743 self.setBlockDimension()
742 744
743 if not( self.setNextFile() ):
744 print "There isn't a next file"
745 return 0
746
747 745 return 1
748 746
749 747
750 748
751 749
752 750
753 751
@@ -1,506 +1,506
1 1 '''
2 2 Created on 23/01/2012
3 3
4 4 @author $Author: vsarmiento $
5 5 @version $Id: HeaderIO.py 37 2012-03-26 22:55:13Z vsarmiento $
6 6 '''
7 7
8 8 import numpy
9 9 import copy
10 10
11 11 class Header:
12 12
13 13 def __init__(self):
14 14 raise
15 15
16 16 def copy(self):
17 17 return copy.deepcopy(self)
18 18
19 19 def read():
20 20 pass
21 21
22 22 def write():
23 23 pass
24 24
25 25 class BasicHeader(Header):
26 26
27 27 size = None
28 28 version = None
29 29 dataBlock = None
30 30 utc = None
31 31 miliSecond = None
32 32 timeZone = None
33 33 dstFlag = None
34 34 errorCount = None
35 35 struct = None
36 36
37 37 def __init__(self):
38 38
39 39 self.size = 0
40 40 self.version = 0
41 41 self.dataBlock = 0
42 42 self.utc = 0
43 43 self.miliSecond = 0
44 44 self.timeZone = 0
45 45 self.dstFlag = 0
46 46 self.errorCount = 0
47 47 self.struct = numpy.dtype([
48 48 ('nSize','<u4'),
49 49 ('nVersion','<u2'),
50 50 ('nDataBlockId','<u4'),
51 51 ('nUtime','<u4'),
52 52 ('nMilsec','<u2'),
53 53 ('nTimezone','<i2'),
54 54 ('nDstflag','<i2'),
55 55 ('nErrorCount','<u4')
56 56 ])
57 57
58 58
59 59 def read(self, fp):
60 60 try:
61 61 header = numpy.fromfile(fp, self.struct,1)
62 62 self.size = header['nSize'][0]
63 63 self.version = header['nVersion'][0]
64 64 self.dataBlock = header['nDataBlockId'][0]
65 65 self.utc = header['nUtime'][0]
66 66 self.miliSecond = header['nMilsec'][0]
67 67 self.timeZone = header['nTimezone'][0]
68 68 self.dstFlag = header['nDstflag'][0]
69 69 self.errorCount = header['nErrorCount'][0]
70 70 except:
71 71 return 0
72 72
73 73 return 1
74 74
75 75 def write(self, fp):
76 76 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
77 77 header = numpy.array(headerTuple,self.struct)
78 78 header.tofile(fp)
79 79
80 80 return 1
81 81
82 82 class SystemHeader(Header):
83 83
84 84 size = None
85 85 nSamples = None
86 86 nProfiles = None
87 87 nChannels = None
88 88 adcResolution = None
89 89 pciDioBusWidth = None
90 90 struct = None
91 91
92 92 def __init__(self):
93 93 self.size = 0
94 94 self.nSamples = 0
95 95 self.nProfiles = 0
96 96 self.nChannels = 0
97 97 self.adcResolution = 0
98 98 self.pciDioBusWidth = 0
99 99 self.struct = numpy.dtype([
100 100 ('nSize','<u4'),
101 101 ('nNumSamples','<u4'),
102 102 ('nNumProfiles','<u4'),
103 103 ('nNumChannels','<u4'),
104 104 ('nADCResolution','<u4'),
105 105 ('nPCDIOBusWidth','<u4'),
106 106 ])
107 107
108 108
109 109 def read(self, fp):
110 110 try:
111 111 header = numpy.fromfile(fp,self.struct,1)
112 112 self.size = header['nSize'][0]
113 113 self.nSamples = header['nNumSamples'][0]
114 114 self.nProfiles = header['nNumProfiles'][0]
115 115 self.nChannels = header['nNumChannels'][0]
116 116 self.adcResolution = header['nADCResolution'][0]
117 117 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
118 118 except:
119 119 return 0
120 120
121 121 return 1
122 122
123 123 def write(self, fp):
124 124 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
125 125 header = numpy.array(headerTuple,self.struct)
126 126 header.tofile(fp)
127 127
128 128 return 1
129 129
130 130 class RadarControllerHeader(Header):
131 131
132 132 size = None
133 133 expType = None
134 134 nTx = None
135 135 ipp = None
136 136 txA = None
137 137 txB = None
138 138 nWindows = None
139 139 numTaus = None
140 140 codeType = None
141 141 line6Function = None
142 142 line5Function = None
143 143 fClock = None
144 144 prePulseBefore = None
145 145 prePulserAfter = None
146 146 rangeIpp = None
147 147 rangeTxA = None
148 148 rangeTxB = None
149 149 struct = None
150 150
151 151 def __init__(self):
152 152 self.size = 0
153 153 self.expType = 0
154 154 self.nTx = 0
155 155 self.ipp = 0
156 156 self.txA = 0
157 157 self.txB = 0
158 158 self.nWindows = 0
159 159 self.numTaus = 0
160 160 self.codeType = 0
161 161 self.line6Function = 0
162 162 self.line5Function = 0
163 163 self.fClock = 0
164 164 self.prePulseBefore = 0
165 165 self.prePulserAfter = 0
166 166 self.rangeIpp = 0
167 167 self.rangeTxA = 0
168 168 self.rangeTxB = 0
169 169 self.struct = numpy.dtype([
170 170 ('nSize','<u4'),
171 171 ('nExpType','<u4'),
172 172 ('nNTx','<u4'),
173 173 ('fIpp','<f4'),
174 174 ('fTxA','<f4'),
175 175 ('fTxB','<f4'),
176 176 ('nNumWindows','<u4'),
177 177 ('nNumTaus','<u4'),
178 178 ('nCodeType','<u4'),
179 179 ('nLine6Function','<u4'),
180 180 ('nLine5Function','<u4'),
181 181 ('fClock','<f4'),
182 182 ('nPrePulseBefore','<u4'),
183 183 ('nPrePulseAfter','<u4'),
184 184 ('sRangeIPP','<a20'),
185 185 ('sRangeTxA','<a20'),
186 186 ('sRangeTxB','<a20'),
187 187 ])
188 188
189 189 self.samplingWindowStruct = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
190 190
191 191 self.samplingWindow = None
192 192 self.nHeights = None
193 193 self.firstHeight = None
194 194 self.deltaHeight = None
195 195 self.samplesWin = None
196 196
197 197 self.nCode = None
198 198 self.nBaud = None
199 199 self.code = None
200 200 self.flip1 = None
201 201 self.flip2 = None
202 202
203 203 self.dynamic = numpy.array([],numpy.dtype('byte'))
204 204
205 205
206 206 def read(self, fp):
207 207 try:
208 208 startFp = fp.tell()
209 209 header = numpy.fromfile(fp,self.struct,1)
210 210 self.size = header['nSize'][0]
211 211 self.expType = header['nExpType'][0]
212 212 self.nTx = header['nNTx'][0]
213 213 self.ipp = header['fIpp'][0]
214 214 self.txA = header['fTxA'][0]
215 215 self.txB = header['fTxB'][0]
216 216 self.nWindows = header['nNumWindows'][0]
217 217 self.numTaus = header['nNumTaus'][0]
218 218 self.codeType = header['nCodeType'][0]
219 219 self.line6Function = header['nLine6Function'][0]
220 220 self.line5Function = header['nLine5Function'][0]
221 221 self.fClock = header['fClock'][0]
222 222 self.prePulseBefore = header['nPrePulseBefore'][0]
223 223 self.prePulserAfter = header['nPrePulseAfter'][0]
224 224 self.rangeIpp = header['sRangeIPP'][0]
225 225 self.rangeTxA = header['sRangeTxA'][0]
226 226 self.rangeTxB = header['sRangeTxB'][0]
227 227 # jump Dynamic Radar Controller Header
228 228 jumpFp = self.size - 116
229 229 self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
230 230 #pointer backward to dynamic header and read
231 231 backFp = fp.tell() - jumpFp
232 232 fp.seek(backFp)
233 233
234 234 self.samplingWindow = numpy.fromfile(fp,self.samplingWindowStruct,self.nWindows)
235 235 self.nHeights = numpy.sum(self.samplingWindow['nsa'])
236 236 self.firstHeight = self.samplingWindow['h0']
237 237 self.deltaHeight = self.samplingWindow['dh']
238 238 self.samplesWin = self.samplingWindow['nsa']
239 239
240 240 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
241 241
242 242 if self.codeType != 0:
243 243 self.nCode = numpy.fromfile(fp,'<u4',1)
244 244 self.nBaud = numpy.fromfile(fp,'<u4',1)
245 245 self.code = numpy.empty([self.nCode,self.nBaud],dtype='u1')
246 246 tempList = []
247 247 for ic in range(self.nCode):
248 248 temp = numpy.fromfile(fp,'u1',4*numpy.ceil(self.nBaud/32.))
249 249 tempList.append(temp)
250 250 self.code[ic] = numpy.unpackbits(temp[::-1])[-1*self.nBaud:]
251 251 self.code = 2.0*self.code - 1.0
252 252
253 253 if self.line5Function == RCfunction.FLIP:
254 254 self.flip1 = numpy.fromfile(fp,'<u4',1)
255 255
256 256 if self.line6Function == RCfunction.FLIP:
257 257 self.flip2 = numpy.fromfile(fp,'<u4',1)
258 258
259 259 endFp = self.size + startFp
260 260 jumpFp = endFp - fp.tell()
261 261 if jumpFp > 0:
262 262 fp.seek(jumpFp)
263 263
264 264 except:
265 265 return 0
266 266
267 267 return 1
268 268
269 269 def write(self, fp):
270 270 headerTuple = (self.size,
271 271 self.expType,
272 272 self.nTx,
273 273 self.ipp,
274 274 self.txA,
275 275 self.txB,
276 276 self.nWindows,
277 277 self.numTaus,
278 278 self.codeType,
279 279 self.line6Function,
280 280 self.line5Function,
281 281 self.fClock,
282 282 self.prePulseBefore,
283 283 self.prePulserAfter,
284 284 self.rangeIpp,
285 285 self.rangeTxA,
286 286 self.rangeTxB)
287 287
288 288 header = numpy.array(headerTuple,self.struct)
289 289 header.tofile(fp)
290 290
291 291 dynamic = self.dynamic
292 292 dynamic.tofile(fp)
293 293
294 294 return 1
295 295
296 296
297 297
298 298 class ProcessingHeader(Header):
299 299
300 300 size = None
301 301 dtype = None
302 302 blockSize = None
303 303 profilesPerBlock = None
304 304 dataBlocksPerFile = None
305 305 nWindows = None
306 306 processFlags = None
307 307 nCohInt = None
308 308 nIncohInt = None
309 309 totalSpectra = None
310 310 struct = None
311 311 flag_dc = None
312 312 flag_cspc = None
313 313
314 314 def __init__(self):
315 315 self.size = 0
316 316 self.dataType = 0
317 317 self.blockSize = 0
318 318 self.profilesPerBlock = 0
319 319 self.dataBlocksPerFile = 0
320 320 self.nWindows = 0
321 321 self.processFlags = 0
322 322 self.nCohInt = 0
323 323 self.nIncohInt = 0
324 324 self.totalSpectra = 0
325 325 self.struct = numpy.dtype([
326 326 ('nSize','<u4'),
327 327 ('nDataType','<u4'),
328 328 ('nSizeOfDataBlock','<u4'),
329 329 ('nProfilesperBlock','<u4'),
330 330 ('nDataBlocksperFile','<u4'),
331 331 ('nNumWindows','<u4'),
332 332 ('nProcessFlags','<u4'),
333 333 ('nCoherentIntegrations','<u4'),
334 334 ('nIncoherentIntegrations','<u4'),
335 335 ('nTotalSpectra','<u4')
336 336 ])
337 337 self.samplingWindow = 0
338 338 self.structSamplingWindow = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
339 339 self.nHeights = 0
340 340 self.firstHeight = 0
341 341 self.deltaHeight = 0
342 342 self.samplesWin = 0
343 343 self.spectraComb = 0
344 self.nCode = 0
345 self.code = 0
346 self.nBaud = 0
344 self.nCode = None
345 self.code = None
346 self.nBaud = None
347 347 self.shif_fft = False
348 348 self.flag_dc = False
349 349 self.flag_cspc = False
350 350
351 351 def read(self, fp):
352 352 try:
353 353 header = numpy.fromfile(fp,self.struct,1)
354 354 self.size = header['nSize'][0]
355 355 self.dataType = header['nDataType'][0]
356 356 self.blockSize = header['nSizeOfDataBlock'][0]
357 357 self.profilesPerBlock = header['nProfilesperBlock'][0]
358 358 self.dataBlocksPerFile = header['nDataBlocksperFile'][0]
359 359 self.nWindows = header['nNumWindows'][0]
360 360 self.processFlags = header['nProcessFlags']
361 361 self.nCohInt = header['nCoherentIntegrations'][0]
362 362 self.nIncohInt = header['nIncoherentIntegrations'][0]
363 363 self.totalSpectra = header['nTotalSpectra'][0]
364 364 self.samplingWindow = numpy.fromfile(fp,self.structSamplingWindow,self.nWindows)
365 365 self.nHeights = numpy.sum(self.samplingWindow['nsa'])
366 366 self.firstHeight = self.samplingWindow['h0'][0]
367 367 self.deltaHeight = self.samplingWindow['dh'][0]
368 368 self.samplesWin = self.samplingWindow['nsa']
369 369 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
370 370
371 371 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
372 372 self.nCode = numpy.fromfile(fp,'<u4',1)
373 373 self.nBaud = numpy.fromfile(fp,'<u4',1)
374 374 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nBaud,self.nCode)
375 375
376 376 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
377 377 self.shif_fft = True
378 378 else:
379 379 self.shif_fft = False
380 380
381 381 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
382 382 self.flag_dc = True
383 383
384 384 nChannels = 0
385 385 nPairs = 0
386 386 pairList = []
387 387
388 388 for i in range( 0, self.totalSpectra*2, 2 ):
389 389 if self.spectraComb[i] == self.spectraComb[i+1]:
390 390 nChannels = nChannels + 1 #par de canales iguales
391 391 else:
392 392 nPairs = nPairs + 1 #par de canales diferentes
393 393 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
394 394
395 395 self.flag_cspc = False
396 396 if nPairs > 0:
397 397 self.flag_cspc = True
398 398
399 399 except:
400 400 return 0
401 401
402 402 return 1
403 403
404 404 def write(self, fp):
405 405 headerTuple = (self.size,
406 406 self.dataType,
407 407 self.blockSize,
408 408 self.profilesPerBlock,
409 409 self.dataBlocksPerFile,
410 410 self.nWindows,
411 411 self.processFlags,
412 412 self.nCohInt,
413 413 self.nIncohInt,
414 414 self.totalSpectra)
415 415
416 416 header = numpy.array(headerTuple,self.struct)
417 417 header.tofile(fp)
418 418
419 419 if self.nWindows != 0:
420 420 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
421 421 samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow)
422 422 samplingWindow.tofile(fp)
423 423
424 424
425 425 if self.totalSpectra != 0:
426 426 spectraComb = numpy.array([],numpy.dtype('u1'))
427 427 spectraComb = self.spectraComb
428 428 spectraComb.tofile(fp)
429 429
430 430
431 431 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
432 432 nCode = self.nCode
433 433 nCode.tofile(fp)
434 434
435 435 nBaud = self.nBaud
436 436 nBaud.tofile(fp)
437 437
438 438 code = self.code.reshape(nCode*nBaud)
439 439 code.tofile(fp)
440 440
441 441 return 1
442 442
443 443 class RCfunction:
444 444 NONE=0
445 445 FLIP=1
446 446 CODE=2
447 447 SAMPLING=3
448 448 LIN6DIV256=4
449 449 SYNCHRO=5
450 450
451 451 class nCodeType:
452 452 NONE=0
453 453 USERDEFINE=1
454 454 BARKER2=2
455 455 BARKER3=3
456 456 BARKER4=4
457 457 BARKER5=5
458 458 BARKER7=6
459 459 BARKER11=7
460 460 BARKER13=8
461 461 AC128=9
462 462 COMPLEMENTARYCODE2=10
463 463 COMPLEMENTARYCODE4=11
464 464 COMPLEMENTARYCODE8=12
465 465 COMPLEMENTARYCODE16=13
466 466 COMPLEMENTARYCODE32=14
467 467 COMPLEMENTARYCODE64=15
468 468 COMPLEMENTARYCODE128=16
469 469 CODE_BINARY28=17
470 470
471 471 class PROCFLAG:
472 472 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
473 473 DECODE_DATA = numpy.uint32(0x00000002)
474 474 SPECTRA_CALC = numpy.uint32(0x00000004)
475 475 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
476 476 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
477 477 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
478 478
479 479 DATATYPE_CHAR = numpy.uint32(0x00000040)
480 480 DATATYPE_SHORT = numpy.uint32(0x00000080)
481 481 DATATYPE_LONG = numpy.uint32(0x00000100)
482 482 DATATYPE_INT64 = numpy.uint32(0x00000200)
483 483 DATATYPE_FLOAT = numpy.uint32(0x00000400)
484 484 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
485 485
486 486 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
487 487 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
488 488 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
489 489
490 490 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
491 491 DEFLIP_DATA = numpy.uint32(0x00010000)
492 492 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
493 493
494 494 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
495 495 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
496 496 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
497 497 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
498 498 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
499 499
500 500 EXP_NAME_ESP = numpy.uint32(0x00200000)
501 501 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
502 502
503 503 OPERATION_MASK = numpy.uint32(0x0000003F)
504 504 DATATYPE_MASK = numpy.uint32(0x00000FC0)
505 505 DATAARRANGE_MASK = numpy.uint32(0x00007000)
506 506 ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file
@@ -1,574 +1,748
1 1 '''
2 2 File: SpectraIO.py
3 3 Created on 20/02/2012
4 4
5 5 @author $Author: dsuarez $
6 6 @version $Id: SpectraIO.py 110 2012-07-19 15:18:18Z dsuarez $
7 7 '''
8 8
9 9 import os, sys
10 10 import numpy
11 11 import glob
12 12 import fnmatch
13 13 import time, datetime
14 14
15 15 path = os.path.split(os.getcwd())[0]
16 16 sys.path.append(path)
17 17
18 18 from IO.JROHeader import *
19 19 from Data.Spectra import Spectra
20 20
21 21 from JRODataIO import JRODataReader
22 22 from JRODataIO import JRODataWriter
23 23 from JRODataIO import isNumber
24 24
25 25
26 26 class SpectraReader(JRODataReader):
27 27 """
28 28 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
29 29 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
30 30 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
31 31
32 32 paresCanalesIguales * alturas * perfiles (Self Spectra)
33 33 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
34 34 canales * alturas (DC Channels)
35 35
36 36 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
37 37 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
38 38 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
39 39 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
40 40
41 41 Example:
42 42 dpath = "/home/myuser/data"
43 43
44 44 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
45 45
46 46 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
47 47
48 48 readerObj = SpectraReader()
49 49
50 50 readerObj.setup(dpath, startTime, endTime)
51 51
52 52 while(True):
53 53
54 54 readerObj.getData()
55 55
56 56 print readerObj.dataOutObj.data
57 57
58 58 if readerObj.flagNoMoreFiles:
59 59 break
60 60
61 61 """
62 62
63 63 pts2read_SelfSpectra = 0
64 64
65 65 pts2read_CrossSpectra = 0
66 66
67 67 pts2read_DCchannels = 0
68 68
69 69 ext = ".pdata"
70 70
71 71 optchar = "P"
72 72
73 73 dataOutObj = None
74 74
75 75 nRdChannels = None
76 76
77 77 nRdPairs = None
78 78
79 79 rdPairList = []
80 80
81 81
82 82 def __init__(self, dataOutObj=None):
83 83 """
84 84 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
85 85
86 86 Inputs:
87 87 dataOutObj : Objeto de la clase Spectra. Este objeto sera utilizado para
88 88 almacenar un perfil de datos cada vez que se haga un requerimiento
89 89 (getData). El perfil sera obtenido a partir del buffer de datos,
90 90 si el buffer esta vacio se hara un nuevo proceso de lectura de un
91 91 bloque de datos.
92 92 Si este parametro no es pasado se creara uno internamente.
93 93
94 94 Affected:
95 95 self.dataOutObj
96 96
97 97 Return : None
98 98 """
99 99
100 100 self.pts2read_SelfSpectra = 0
101 101
102 102 self.pts2read_CrossSpectra = 0
103 103
104 104 self.pts2read_DCchannels = 0
105 105
106 106 self.datablock = None
107 107
108 108 self.utc = None
109 109
110 110 self.ext = ".pdata"
111 111
112 112 self.optchar = "P"
113 113
114 114 self.basicHeaderObj = BasicHeader()
115 115
116 116 self.systemHeaderObj = SystemHeader()
117 117
118 118 self.radarControllerHeaderObj = RadarControllerHeader()
119 119
120 120 self.processingHeaderObj = ProcessingHeader()
121 121
122 122 self.online = 0
123 123
124 124 self.fp = None
125 125
126 126 self.idFile = None
127 127
128 128 self.dtype = None
129 129
130 130 self.fileSizeByHeader = None
131 131
132 132 self.filenameList = []
133 133
134 134 self.filename = None
135 135
136 136 self.fileSize = None
137 137
138 138 self.firstHeaderSize = 0
139 139
140 140 self.basicHeaderSize = 24
141 141
142 142 self.pathList = []
143 143
144 144 self.lastUTTime = 0
145 145
146 146 self.maxTimeStep = 30
147 147
148 148 self.flagNoMoreFiles = 0
149 149
150 150 self.set = 0
151 151
152 152 self.path = None
153 153
154 154 self.delay = 3 #seconds
155 155
156 156 self.nTries = 3 #quantity tries
157 157
158 158 self.nFiles = 3 #number of files for searching
159 159
160 160 self.nReadBlocks = 0
161 161
162 162 self.flagIsNewFile = 1
163 163
164 164 self.ippSeconds = 0
165 165
166 166 self.flagTimeBlock = 0
167 167
168 168 self.flagIsNewBlock = 0
169 169
170 170 self.nTotalBlocks = 0
171 171
172 172 self.blocksize = 0
173 173
174 174
175 175 def createObjByDefault(self):
176 176
177 177 dataObj = Spectra()
178 178
179 179 return dataObj
180 180
181 181 def __hasNotDataInBuffer(self):
182 182 return 1
183 183
184 184
185 185 def getBlockDimension(self):
186 186 """
187 187 Obtiene la cantidad de puntos a leer por cada bloque de datos
188 188
189 189 Affected:
190 190 self.nRdChannels
191 191 self.nRdPairs
192 192 self.pts2read_SelfSpectra
193 193 self.pts2read_CrossSpectra
194 194 self.pts2read_DCchannels
195 195 self.blocksize
196 196 self.dataOutObj.nChannels
197 197 self.dataOutObj.nPairs
198 198
199 199 Return:
200 200 None
201 201 """
202 202 self.nRdChannels = 0
203 203 self.nRdPairs = 0
204 204 self.rdPairList = []
205 205
206 for i in range( 0, self.processingHeaderObj.totalSpectra*2, 2 ):
206 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
207 207 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
208 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
208 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
209 209 else:
210 210 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
211 self.rdPairList.append( (self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]) )
211 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
212 212
213 213 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
214 214
215 215 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
216 216 self.blocksize = self.pts2read_SelfSpectra
217 217
218 218 if self.processingHeaderObj.flag_cspc:
219 219 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
220 220 self.blocksize += self.pts2read_CrossSpectra
221 221
222 222 if self.processingHeaderObj.flag_dc:
223 223 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
224 224 self.blocksize += self.pts2read_DCchannels
225 225
226 226 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
227 227
228 228
229 229 def readBlock(self):
230 230 """
231 231 Lee el bloque de datos desde la posicion actual del puntero del archivo
232 232 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
233 233 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
234 234 es seteado a 0
235 235
236 236 Return: None
237 237
238 238 Variables afectadas:
239 239
240 240 self.flagIsNewFile
241 241 self.flagIsNewBlock
242 242 self.nTotalBlocks
243 243 self.data_spc
244 244 self.data_cspc
245 245 self.data_dc
246 246
247 247 Exceptions:
248 248 Si un bloque leido no es un bloque valido
249 249 """
250 250 blockOk_flag = False
251 251 fpointer = self.fp.tell()
252 252
253 253 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
254 254 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
255 255
256 256 if self.processingHeaderObj.flag_cspc:
257 257 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
258 258 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
259 259
260 260 if self.processingHeaderObj.flag_dc:
261 261 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
262 262 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
263 263
264 264
265 265 if not(self.processingHeaderObj.shif_fft):
266 266 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
267 267
268 268 if self.processingHeaderObj.flag_cspc:
269 269 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
270 270
271 271
272 272 spc = numpy.transpose( spc, (0,2,1) )
273 273 self.data_spc = spc
274 274
275 275 if self.processingHeaderObj.flag_cspc:
276 276 cspc = numpy.transpose( cspc, (0,2,1) )
277 277 self.data_cspc = cspc['real'] + cspc['imag']*1j
278 278 else:
279 279 self.data_cspc = None
280 280
281 281 if self.processingHeaderObj.flag_dc:
282 282 self.data_dc = dc['real'] + dc['imag']*1j
283 283 else:
284 284 self.data_dc = None
285 285
286 286 self.flagIsNewFile = 0
287 287 self.flagIsNewBlock = 1
288 288
289 289 self.nTotalBlocks += 1
290 290 self.nReadBlocks += 1
291 291
292 292 return 1
293 293
294 294
295 295 def getData(self):
296 296 """
297 297 Copia el buffer de lectura a la clase "Spectra",
298 298 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
299 299 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
300 300
301 301 Return:
302 302 0 : Si no hay mas archivos disponibles
303 303 1 : Si hizo una buena copia del buffer
304 304
305 305 Affected:
306 306 self.dataOutObj
307 307
308 308 self.flagTimeBlock
309 309 self.flagIsNewBlock
310 310 """
311 311
312 312 if self.flagNoMoreFiles: return 0
313 313
314 314 self.flagTimeBlock = 0
315 315 self.flagIsNewBlock = 0
316 316
317 317 if self.__hasNotDataInBuffer():
318 318
319 319 if not( self.readNextBlock() ):
320 320 return 0
321 321
322 322 # self.updateDataHeader()
323 323
324 324 if self.flagNoMoreFiles == 1:
325 325 print 'Process finished'
326 326 return 0
327 327
328 328 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
329 329
330 330 if self.data_dc == None:
331 331 self.dataOutObj.flagNoData = True
332 332 return 0
333 333
334 334
335 335 self.dataOutObj.data_spc = self.data_spc
336 336
337 337 self.dataOutObj.data_cspc = self.data_cspc
338 338
339 339 self.dataOutObj.data_dc = self.data_dc
340 340
341 341 self.dataOutObj.flagTimeBlock = self.flagTimeBlock
342 342
343 343 self.dataOutObj.flagNoData = False
344 344
345 345 self.dataOutObj.dtype = self.dtype
346 346
347 347 self.dataOutObj.nChannels = self.nRdChannels
348 348
349 349 self.dataOutObj.nPairs = self.nRdPairs
350 350
351 351 self.dataOutObj.pairsList = self.rdPairList
352 352
353 353 self.dataOutObj.nHeights = self.processingHeaderObj.nHeights
354 354
355 355 self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock
356 356
357 357 self.dataOutObj.nFFTPoints = self.processingHeaderObj.profilesPerBlock
358 358
359 359 self.dataOutObj.nIncohInt = self.processingHeaderObj.nIncohInt
360 360
361 361
362 362 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
363 363
364 364 self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
365 365
366 366 self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels)
367 367
368 368 self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels)
369 369
370 370 self.dataOutObj.dataUtcTime = self.basicHeaderObj.utc #+ self.profileIndex * self.ippSeconds
371 371
372 self.dataOutObj.flagShiftFFT = self.processingHeaderObj.shif_fft
373
372 374 # self.profileIndex += 1
373 375
374 376 self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy()
375 377
376 378 self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
377 379
378 380 return 1
379 381
380 382
381 383 class SpectraWriter(JRODataWriter):
382 384
383 385 """
384 386 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
385 387 de los datos siempre se realiza por bloques.
386 388 """
387 389
390 ext = ".pdata"
391
392 optchar = "P"
388 393
389 394 shape_spc_Buffer = None
395
390 396 shape_cspc_Buffer = None
397
391 398 shape_dc_Buffer = None
392 dataOutObj = None
399
400 data_spc = None
401
402 data_cspc = None
403
404 data_dc = None
405
406 wrPairList = []
407
408 nWrPairs = 0
409
410 nWrChannels = 0
411
412 # dataOutObj = None
393 413
394 414 def __init__(self, dataOutObj=None):
395 415 """
396 416 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
397 417
398 418 Affected:
399 419 self.dataOutObj
400 420 self.basicHeaderObj
401 421 self.systemHeaderObj
402 422 self.radarControllerHeaderObj
403 423 self.processingHeaderObj
404 424
405 425 Return: None
406 426 """
407 427 if dataOutObj == None:
408 428 dataOutObj = Spectra()
409 429
410 430 if not( isinstance(dataOutObj, Spectra) ):
411 431 raise ValueError, "in SpectraReader, dataOutObj must be an Spectra class object"
412 432
413 433 self.dataOutObj = dataOutObj
414
415 self.ext = ".pdata"
416
417 self.optchar = "P"
418
419 self.shape_spc_Buffer = None
420 self.shape_cspc_Buffer = None
421 self.shape_dc_Buffer = None
422
423 self.data_spc = None
424 self.data_cspc = None
425 self.data_dc = None
426
427 ####################################
428
429 self.fp = None
430 434
431 self.nWriteBlocks = 0
435 self.nTotalBlocks = 0
432 436
433 self.flagIsNewFile = 1
437 self.nWrChannels = self.dataOutObj.nChannels
434 438
435 self.nTotalBlocks = 0
439 # if len(pairList) > 0:
440 # self.wrPairList = pairList
441 #
442 # self.nWrPairs = len(pairList)
436 443
437 self.flagIsNewBlock = 0
444 self.wrPairList = self.dataOutObj.pairList
438 445
439 self.flagNoMoreFiles = 0
440
441 self.setFile = None
446 self.nWrPairs = self.dataOutObj.nPairs
442 447
443 self.dtype = None
444 448
445 self.path = None
446 449
447 self.noMoreFiles = 0
448 450
449 self.filename = None
450
451 self.basicHeaderObj = BasicHeader()
452
453 self.systemHeaderObj = SystemHeader()
454
455 self.radarControllerHeaderObj = RadarControllerHeader()
456
457 self.processingHeaderObj = ProcessingHeader()
451
452 # self.data_spc = None
453 # self.data_cspc = None
454 # self.data_dc = None
455
456 # self.fp = None
457
458 # self.flagIsNewFile = 1
459 #
460 # self.nTotalBlocks = 0
461 #
462 # self.flagIsNewBlock = 0
463 #
464 # self.flagNoMoreFiles = 0
465 #
466 # self.setFile = None
467 #
468 # self.dtype = None
469 #
470 # self.path = None
471 #
472 # self.noMoreFiles = 0
473 #
474 # self.filename = None
475 #
476 # self.basicHeaderObj = BasicHeader()
477 #
478 # self.systemHeaderObj = SystemHeader()
479 #
480 # self.radarControllerHeaderObj = RadarControllerHeader()
481 #
482 # self.processingHeaderObj = ProcessingHeader()
458 483
459 484
460 485 def hasAllDataInBuffer(self):
461 486 return 1
462 487
463 488
464 489 def setBlockDimension(self):
465 490 """
466 491 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
467 492
468 493 Affected:
469 494 self.shape_spc_Buffer
470 495 self.shape_cspc_Buffer
471 496 self.shape_dc_Buffer
472 497
473 498 Return: None
474 499 """
475 500 self.shape_spc_Buffer = (self.dataOutObj.nChannels,
476 501 self.processingHeaderObj.nHeights,
477 502 self.processingHeaderObj.profilesPerBlock)
478 503
479 504 self.shape_cspc_Buffer = (self.dataOutObj.nPairs,
480 505 self.processingHeaderObj.nHeights,
481 506 self.processingHeaderObj.profilesPerBlock)
482 507
483 508 self.shape_dc_Buffer = (self.systemHeaderObj.nChannels,
484 509 self.processingHeaderObj.nHeights)
485 510
486 511
487 512 def writeBlock(self):
488 513 """
489 514 Escribe el buffer en el file designado
490 515
491 516 Affected:
492 517 self.data_spc
493 518 self.data_cspc
494 519 self.data_dc
495 520 self.flagIsNewFile
496 521 self.flagIsNewBlock
497 522 self.nTotalBlocks
498 523 self.nWriteBlocks
499 524
500 525 Return: None
501 526 """
502 527
503 528 spc = numpy.transpose( self.data_spc, (0,2,1) )
504 529 if not( self.processingHeaderObj.shif_fft ):
505 530 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
506 531 data = spc.reshape((-1))
507 532 data.tofile(self.fp)
508 533
509 534 if self.data_cspc != None:
510 535 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
511 536 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
512 537 if not( self.processingHeaderObj.shif_fft ):
513 538 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
514 539 data['real'] = cspc.real
515 540 data['imag'] = cspc.imag
516 541 data = data.reshape((-1))
517 542 data.tofile(self.fp)
518 543
519 544 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
520 545 dc = self.data_dc
521 546 data['real'] = dc.real
522 547 data['imag'] = dc.imag
523 548 data = data.reshape((-1))
524 549 data.tofile(self.fp)
525 550
526 551 self.data_spc.fill(0)
527 552 self.data_dc.fill(0)
528 553 if self.data_cspc != None:
529 554 self.data_cspc.fill(0)
530 555
531 556 self.flagIsNewFile = 0
532 557 self.flagIsNewBlock = 1
533 558 self.nTotalBlocks += 1
534 559 self.nWriteBlocks += 1
535 560
536 561
537 562 def putData(self):
538 563 """
539 564 Setea un bloque de datos y luego los escribe en un file
540 565
541 566 Affected:
542 567 self.data_spc
543 568 self.data_cspc
544 569 self.data_dc
545 570
546 571 Return:
547 572 0 : Si no hay data o no hay mas files que puedan escribirse
548 573 1 : Si se escribio la data de un bloque en un file
549 574 """
550 575 self.flagIsNewBlock = 0
551 576
552 577 if self.dataOutObj.flagNoData:
553 578 return 0
554 579
555 580 if self.dataOutObj.flagTimeBlock:
556 581 self.data_spc.fill(0)
557 582 self.data_cspc.fill(0)
558 583 self.data_dc.fill(0)
559 584 self.setNextFile()
560 585
561 586 self.data_spc = self.dataOutObj.data_spc
562 587 self.data_cspc = self.dataOutObj.data_cspc
563 588 self.data_dc = self.dataOutObj.data_dc
564 589
565 590 # #self.processingHeaderObj.dataBlocksPerFile)
566 591 if self.hasAllDataInBuffer():
567 592 self.getDataHeader()
568 593 self.writeNextBlock()
569 594
570 595 if self.flagNoMoreFiles:
571 596 #print 'Process finished'
572 597 return 0
573 598
574 return 1 No newline at end of file
599 return 1
600
601
602 def __getProcessFlags(self):
603
604 processFlags = 0
605
606 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
607 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
608 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
609 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
610 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
611 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
612
613 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
614
615
616
617 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
618 PROCFLAG.DATATYPE_SHORT,
619 PROCFLAG.DATATYPE_LONG,
620 PROCFLAG.DATATYPE_INT64,
621 PROCFLAG.DATATYPE_FLOAT,
622 PROCFLAG.DATATYPE_DOUBLE]
623
624
625 for index in range(len(dtypeList)):
626 if self.dataOutObj.dtype == dtypeList[index]:
627 dtypeValue = datatypeValueList[index]
628 break
629
630 processFlags += dtypeValue
631
632 if self.dataOutObj.flagDecodeData:
633 processFlags += PROCFLAG.DECODE_DATA
634
635 if self.dataOutObj.flagDeflipData:
636 processFlags += PROCFLAG.DEFLIP_DATA
637
638 if self.dataOutObj.code != None:
639 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
640
641 if self.dataOutObj.nIncohInt > 1:
642 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
643
644 if self.dataOutObj.data_dc != None:
645 processFlags += PROCFLAG.SAVE_CHANNELS_DC
646
647 return processFlags
648
649
650 def __getBlockSize(self):
651 '''
652 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
653 '''
654
655 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
656 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
657 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
658 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
659 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
660 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
661
662 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
663 datatypeValueList = [1,2,4,8,4,8]
664 for index in range(len(dtypeList)):
665 if self.dataOutObj.dtype == dtypeList[index]:
666 datatypeValue = datatypeValueList[index]
667 break
668
669
670 pts2write = self.dataOutObj.nHeights * self.dataOutObj.nFFTPoints
671
672 pts2write_SelfSpectra = int(self.nWrChannels * pts2write)
673 blocksize = pts2write_SelfSpectra
674
675 if self.dataOutObj.data_cspc != None:
676 pts2write_CrossSpectra = int(self.nWrPairs * pts2write)
677 blocksize += pts2write_CrossSpectra
678
679 if self.dataOutObj.data_dc != None:
680 pts2write_DCchannels = int(self.nWrChannels * self.dataOutObj.nHeights)
681 blocksize += pts2write_DCchannels
682
683 blocksize = blocksize * datatypeValue * 2
684
685 return blocksize
686
687
688 def getBasicHeader(self):
689 self.basicHeaderObj.size = self.basicHeaderSize #bytes
690 self.basicHeaderObj.version = self.versionFile
691 self.basicHeaderObj.dataBlock = self.nTotalBlocks
692
693 utc = numpy.floor(self.dataOutObj.dataUtcTime)
694 milisecond = (self.dataOutObj.dataUtcTime - utc)* 1000.0
695
696 self.basicHeaderObj.utc = utc
697 self.basicHeaderObj.miliSecond = milisecond
698 self.basicHeaderObj.timeZone = 0
699 self.basicHeaderObj.dstFlag = 0
700 self.basicHeaderObj.errorCount = 0
701
702 def getDataHeader(self):
703
704 """
705 Obtiene una copia del First Header
706
707 Affected:
708 self.systemHeaderObj
709 self.radarControllerHeaderObj
710 self.dtype
711
712 Return:
713 None
714 """
715
716 self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy()
717 self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy()
718
719 self.getBasicHeader()
720
721 processingHeaderSize = 40 # bytes
722 self.processingHeaderObj.dtype = 0 # Voltage
723 self.processingHeaderObj.blockSize = self.__getBlockSize()
724 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
725 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
726 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOutObj.processingHeaderObj.nWindows
727 self.processingHeaderObj.processFlags = self.__getProcessFlags()
728 self.processingHeaderObj.nCohInt = self.dataOutObj.nCohInt
729 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
730 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
731
732 if self.dataOutObj.code != None:
733 self.processingHeaderObj.code = self.dataOutObj.code
734 self.processingHeaderObj.nCode = self.dataOutObj.nCode
735 self.processingHeaderObj.nBaud = self.dataOutObj.nBaud
736 codesize = int(8 + 4 * self.dataOutObj.nCode * self.dataOutObj.nBaud)
737 processingHeaderSize += codesize
738
739 if self.processingHeaderObj.nWindows != 0:
740 self.processingHeaderObj.firstHeight = self.dataOutObj.heightList[0]
741 self.processingHeaderObj.deltaHeight = self.dataOutObj.heightList[1] - self.dataOutObj.heightList[0]
742 self.processingHeaderObj.nHeights = self.dataOutObj.nHeights
743 self.processingHeaderObj.samplesWin = self.dataOutObj.nHeights
744 processingHeaderSize += 12
745
746 self.processingHeaderObj.size = processingHeaderSize
747
748 No newline at end of file
@@ -1,547 +1,584
1 1 '''
2 2 Created on 23/01/2012
3 3
4 4 @author $Author: dsuarez $
5 5 @version $Id: VoltageIO.py 110 2012-07-19 15:18:18Z dsuarez $
6 6 '''
7 7
8 8 import os, sys
9 9 import numpy
10 10 import glob
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 path = os.path.split(os.getcwd())[0]
15 15 sys.path.append(path)
16 16
17 17 from JROHeader import *
18 18 from JRODataIO import JRODataReader
19 19 from JRODataIO import JRODataWriter
20 20
21 21 from Data.Voltage import Voltage
22 22
23 23 class VoltageReader(JRODataReader):
24 24 """
25 25 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
26 26 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
27 27 perfiles*alturas*canales) son almacenados en la variable "buffer".
28 28
29 29 perfiles * alturas * canales
30 30
31 31 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
32 32 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
33 33 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
34 34 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
35 35
36 36 Example:
37 37
38 38 dpath = "/home/myuser/data"
39 39
40 40 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
41 41
42 42 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
43 43
44 44 readerObj = VoltageReader()
45 45
46 46 readerObj.setup(dpath, startTime, endTime)
47 47
48 48 while(True):
49 49
50 50 #to get one profile
51 51 profile = readerObj.getData()
52 52
53 53 #print the profile
54 54 print profile
55 55
56 56 #If you want to see all datablock
57 57 print readerObj.datablock
58 58
59 59 if readerObj.flagNoMoreFiles:
60 60 break
61 61
62 62 """
63 63
64 64 ext = ".r"
65 65
66 66 optchar = "D"
67 67 dataOutObj = None
68 68
69 69
70 70 def __init__(self, dataOutObj=None):
71 71 """
72 72 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
73 73
74 74 Input:
75 75 dataOutObj : Objeto de la clase Voltage. Este objeto sera utilizado para
76 76 almacenar un perfil de datos cada vez que se haga un requerimiento
77 77 (getData). El perfil sera obtenido a partir del buffer de datos,
78 78 si el buffer esta vacio se hara un nuevo proceso de lectura de un
79 79 bloque de datos.
80 80 Si este parametro no es pasado se creara uno internamente.
81 81
82 82 Variables afectadas:
83 83 self.dataOutObj
84 84
85 85 Return:
86 86 None
87 87 """
88 88
89 89 self.datablock = None
90 90
91 91 self.utc = 0
92 92
93 93 self.ext = ".r"
94 94
95 95 self.optchar = "D"
96 96
97 97 self.basicHeaderObj = BasicHeader()
98 98
99 99 self.systemHeaderObj = SystemHeader()
100 100
101 101 self.radarControllerHeaderObj = RadarControllerHeader()
102 102
103 103 self.processingHeaderObj = ProcessingHeader()
104 104
105 105 self.online = 0
106 106
107 107 self.fp = None
108 108
109 109 self.idFile = None
110 110
111 111 self.dtype = None
112 112
113 113 self.fileSizeByHeader = None
114 114
115 115 self.filenameList = []
116 116
117 117 self.filename = None
118 118
119 119 self.fileSize = None
120 120
121 121 self.firstHeaderSize = 0
122 122
123 123 self.basicHeaderSize = 24
124 124
125 125 self.pathList = []
126 126
127 127 self.filenameList = []
128 128
129 129 self.lastUTTime = 0
130 130
131 131 self.maxTimeStep = 30
132 132
133 133 self.flagNoMoreFiles = 0
134 134
135 135 self.set = 0
136 136
137 137 self.path = None
138 138
139 139 self.profileIndex = 9999
140 140
141 141 self.delay = 3 #seconds
142 142
143 143 self.nTries = 3 #quantity tries
144 144
145 145 self.nFiles = 3 #number of files for searching
146 146
147 147 self.nReadBlocks = 0
148 148
149 149 self.flagIsNewFile = 1
150 150
151 151 self.ippSeconds = 0
152 152
153 153 self.flagTimeBlock = 0
154 154
155 155 self.flagIsNewBlock = 0
156 156
157 157 self.nTotalBlocks = 0
158 158
159 159 self.blocksize = 0
160 160
161 161 def createObjByDefault(self):
162 162
163 163 dataObj = Voltage()
164 164
165 165 return dataObj
166 166
167 167 def __hasNotDataInBuffer(self):
168 168 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
169 169 return 1
170 170 return 0
171 171
172 172
173 173 def getBlockDimension(self):
174 174 """
175 175 Obtiene la cantidad de puntos a leer por cada bloque de datos
176 176
177 177 Affected:
178 178 self.blocksize
179 179
180 180 Return:
181 181 None
182 182 """
183 183 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
184 184 self.blocksize = pts2read
185 185
186 186
187 187 def readBlock(self):
188 188 """
189 189 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
190 190 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
191 191 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
192 192 es seteado a 0
193 193
194 194 Inputs:
195 195 None
196 196
197 197 Return:
198 198 None
199 199
200 200 Affected:
201 201 self.profileIndex
202 202 self.datablock
203 203 self.flagIsNewFile
204 204 self.flagIsNewBlock
205 205 self.nTotalBlocks
206 206
207 207 Exceptions:
208 208 Si un bloque leido no es un bloque valido
209 209 """
210 210
211 211 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
212 212
213 213 try:
214 214 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
215 215 except:
216 216 print "The read block (%3d) has not enough data" %self.nReadBlocks
217 217 return 0
218 218
219 219 junk = numpy.transpose(junk, (2,0,1))
220 220 self.datablock = junk['real'] + junk['imag']*1j
221 221
222 222 self.profileIndex = 0
223 223
224 224 self.flagIsNewFile = 0
225 225 self.flagIsNewBlock = 1
226 226
227 227 self.nTotalBlocks += 1
228 228 self.nReadBlocks += 1
229 229
230 230 return 1
231 231
232 232
233 233 def getData(self):
234 234 """
235 235 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
236 236 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
237 237 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
238 238
239 239 Ademas incrementa el contador del buffer en 1.
240 240
241 241 Return:
242 242 data : retorna un perfil de voltages (alturas * canales) copiados desde el
243 243 buffer. Si no hay mas archivos a leer retorna None.
244 244
245 245 Variables afectadas:
246 246 self.dataOutObj
247 247 self.profileIndex
248 248
249 249 Affected:
250 250 self.dataOutObj
251 251 self.profileIndex
252 252 self.flagTimeBlock
253 253 self.flagIsNewBlock
254 254 """
255 255 if self.flagNoMoreFiles: return 0
256 256
257 257 self.flagTimeBlock = 0
258 258 self.flagIsNewBlock = 0
259 259
260 260 if self.__hasNotDataInBuffer():
261 261
262 262 if not( self.readNextBlock() ):
263 263 return 0
264 264
265 265 # self.updateDataHeader()
266 266
267 267 if self.flagNoMoreFiles == 1:
268 268 print 'Process finished'
269 269 return 0
270 270
271 271 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
272 272
273 273 if self.datablock == None:
274 274 self.dataOutObj.flagNoData = True
275 275 return 0
276 276
277 277 self.dataOutObj.data = self.datablock[:,self.profileIndex,:]
278 278
279 279 self.dataOutObj.dtype = self.dtype
280 280
281 281 self.dataOutObj.nChannels = self.systemHeaderObj.nChannels
282 282
283 283 self.dataOutObj.nHeights = self.processingHeaderObj.nHeights
284 284
285 285 self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock
286 286
287 287 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
288 288
289 289 self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
290 290
291 291 self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels)
292 292
293 293 self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels)
294 294
295 295 self.dataOutObj.flagTimeBlock = self.flagTimeBlock
296 296
297 297 self.dataOutObj.dataUtcTime = self.basicHeaderObj.utc + self.profileIndex * self.ippSeconds
298 298
299 299 self.dataOutObj.nCohInt = self.processingHeaderObj.nCohInt
300 300
301 self.dataOutObj.flagShiftFFT = False
302
303 if self.processingHeaderObj.code != None:
304 self.dataOutObj.nCode = self.processingHeaderObj.nCode
305
306 self.dataOutObj.nBaud = self.processingHeaderObj.nBaud
307
308 self.dataOutObj.code = self.processingHeaderObj.code
309
301 310 self.profileIndex += 1
302 311
303 312 self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy()
304 313
305 314 self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
306 315
307 316 self.dataOutObj.flagNoData = False
308 317
309 318 return 1
310 319
311 320
312 321 class VoltageWriter(JRODataWriter):
313 322 """
314 323 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
315 324 de los datos siempre se realiza por bloques.
316 325 """
317 326
318 327 ext = ".r"
319 328
320 329 optchar = "D"
321 330
322 331 shapeBuffer = None
323 332
324 333
325 334 def __init__(self, dataOutObj=None):
326 335 """
327 336 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
328 337
329 338 Affected:
330 339 self.dataOutObj
331 340
332 341 Return: None
333 342 """
334 343 if dataOutObj == None:
335 344 dataOutObj = Voltage()
336 345
337 346 if not( isinstance(dataOutObj, Voltage) ):
338 347 raise ValueError, "in VoltageReader, dataOutObj must be an Spectra class object"
339 348
340 349 self.dataOutObj = dataOutObj
350
351 self.nTotalBlocks = 0
341 352
353 self.profileIndex = 0
342 354
343 355 def hasAllDataInBuffer(self):
344 356 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
345 357 return 1
346 358 return 0
347 359
348 360
349 361 def setBlockDimension(self):
350 362 """
351 363 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
352 364
353 365 Affected:
354 366 self.shape_spc_Buffer
355 367 self.shape_cspc_Buffer
356 368 self.shape_dc_Buffer
357 369
358 370 Return: None
359 371 """
360 372 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
361 373 self.processingHeaderObj.nHeights,
362 374 self.systemHeaderObj.nChannels)
363 375
364 376 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
365 377 self.processingHeaderObj.profilesPerBlock,
366 378 self.processingHeaderObj.nHeights),
367 379 dtype=numpy.dtype('complex'))
368 380
369 381
370 382 def writeBlock(self):
371 383 """
372 384 Escribe el buffer en el file designado
373 385
374 386 Affected:
375 387 self.profileIndex
376 388 self.flagIsNewFile
377 389 self.flagIsNewBlock
378 390 self.nTotalBlocks
379 391 self.blockIndex
380 392
381 393 Return: None
382 394 """
383 395 data = numpy.zeros( self.shapeBuffer, self.dtype )
384 396
385 397 junk = numpy.transpose(self.datablock, (1,2,0))
386 398
387 399 data['real'] = junk.real
388 400 data['imag'] = junk.imag
389 401
390 402 data = data.reshape( (-1) )
391 403
392 404 data.tofile( self.fp )
393 405
394 406 self.datablock.fill(0)
395 407
396 408 self.profileIndex = 0
397 409 self.flagIsNewFile = 0
398 410 self.flagIsNewBlock = 1
399 411
400 412 self.blockIndex += 1
401 413 self.nTotalBlocks += 1
402 414
403 415 def putData(self):
404 416 """
405 417 Setea un bloque de datos y luego los escribe en un file
406 418
407 419 Affected:
408 420 self.flagIsNewBlock
409 421 self.profileIndex
410 422
411 423 Return:
412 424 0 : Si no hay data o no hay mas files que puedan escribirse
413 425 1 : Si se escribio la data de un bloque en un file
414 426 """
415 427 self.flagIsNewBlock = 0
416 428
417 429 if self.dataOutObj.flagNoData:
418 430 return 0
419 431
420 432 if self.dataOutObj.flagTimeBlock:
421 433
422 434 self.datablock.fill(0)
423 435 self.profileIndex = 0
424 436 self.setNextFile()
425 437
438 if self.profileIndex == 0:
439 self.getBasicHeader()
440
426 441 self.datablock[:,self.profileIndex,:] = self.dataOutObj.data
427 442
428 443 self.profileIndex += 1
429 444
430 445 if self.hasAllDataInBuffer():
431 446 #if self.flagIsNewFile:
432 self.getDataHeader()
433 447 self.writeNextBlock()
448 # self.getDataHeader()
434 449
435 450 if self.flagNoMoreFiles:
436 451 #print 'Process finished'
437 452 return 0
438 453
439 454 return 1
440 455
441 def __getProcessFlag(self):
456 def __getProcessFlags(self):
442 457
443 458 processFlags = 0
444 459
445 460 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
446 461 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
447 462 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
448 463 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
449 464 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
450 465 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
451 466
452 467 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
453 468
454 469
455 470
456 471 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
457 472 PROCFLAG.DATATYPE_SHORT,
458 473 PROCFLAG.DATATYPE_LONG,
459 474 PROCFLAG.DATATYPE_INT64,
460 475 PROCFLAG.DATATYPE_FLOAT,
461 476 PROCFLAG.DATATYPE_DOUBLE]
462 477
463 478
464 479 for index in range(len(dtypeList)):
465 if dtypeList == self.dataOutObj.dtype:
480 if self.dataOutObj.dtype == dtypeList[index]:
466 481 dtypeValue = datatypeValueList[index]
467 482 break
468 483
469 484 processFlags += dtypeValue
470 485
471 486 if self.dataOutObj.flagDecodeData:
472 487 processFlags += PROCFLAG.DECODE_DATA
473 488
474 489 if self.dataOutObj.flagDeflipData:
475 490 processFlags += PROCFLAG.DEFLIP_DATA
476 491
492 if self.dataOutObj.code != None:
493 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
494
477 495 if self.dataOutObj.nCohInt > 1:
478 496 processFlags += PROCFLAG.COHERENT_INTEGRATION
497
498 return processFlags
479 499
480 500
481 501 def __getBlockSize(self):
482
502 '''
503 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
504 '''
483 505
484 506 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
485 507 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
486 508 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
487 509 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
488 510 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
489 511 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
490 512
491 513 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
492 datatypeValueList
514 datatypeValueList = [1,2,4,8,4,8]
493 515 for index in range(len(dtypeList)):
494 if dtypeList == self.dataOutObj.dtype:
495 datatypeValue = index
516 if self.dataOutObj.dtype == dtypeList[index]:
517 datatypeValue = datatypeValueList[index]
496 518 break
497 519
498 self.dataOutObj.nHeights * self.dataOutObj.nChannels * self.dataOutObj.nProfiles * datatypeValue * 2
520 blocksize = int(self.dataOutObj.nHeights * self.dataOutObj.nChannels * self.dataOutObj.nProfiles * datatypeValue * 2)
521
522 return blocksize
523
524
525 def getBasicHeader(self):
526 self.basicHeaderObj.size = self.basicHeaderSize #bytes
527 self.basicHeaderObj.version = self.versionFile
528 self.basicHeaderObj.dataBlock = self.nTotalBlocks
499 529
530 utc = numpy.floor(self.dataOutObj.dataUtcTime)
531 milisecond = (self.dataOutObj.dataUtcTime - utc)* 1000.0
500 532
533 self.basicHeaderObj.utc = utc
534 self.basicHeaderObj.miliSecond = milisecond
535 self.basicHeaderObj.timeZone = 0
536 self.basicHeaderObj.dstFlag = 0
537 self.basicHeaderObj.errorCount = 0
501 538
502 539 def getDataHeader(self):
503 540
504 541 """
505 542 Obtiene una copia del First Header
506 543
507 544 Affected:
508 545 self.systemHeaderObj
509 546 self.radarControllerHeaderObj
510 547 self.dtype
511 548
512 549 Return:
513 550 None
514 551 """
515 552
516 # CALCULAR PARAMETROS
517
518 553 self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy()
519 554 self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy()
520 555
521 self.basicHeaderObj.size = self.basicHeaderSize #bytes
522 self.basicHeaderObj.version = self.versionFile
523 self.basicHeaderObj.dataBlock = self.nTotalBlocks
524 self.basicHeaderObj.utc = self.dataOutObj.dataUtcTime
525 self.basicHeaderObj.miliSecond = 0
526 self.basicHeaderObj.timeZone = 0
527 self.basicHeaderObj.dstFlag = 0
528 self.basicHeaderObj.errorCount = 0
556 self.getBasicHeader()
529 557
530 staticProcessingHeaderSize = 40 # bytes
531 dynProcessingHeaderSize = 0 # bytes
532
533 self.processingHeaderObj.size = staticProcessingHeaderSize + dynProcessingHeaderSize
558 processingHeaderSize = 40 # bytes
534 559 self.processingHeaderObj.dtype = 0 # Voltage
535 # self.processingHeaderObj.dtype = self.dataOutObj.dtype
536 self.processingHeaderObj.blockSize = 0 # debe calcular el size en bytes del bloque de datos:
537 # usar funcion getBlockSize
538 self.processingHeaderObj.profilesPerBlock = self.dataOutObj.nProfiles
560 self.processingHeaderObj.blockSize = self.__getBlockSize()
561 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
539 562 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
540 self.processingHeaderObj.numWindows = 1
563 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOutObj.processingHeaderObj.nWindows
541 564 self.processingHeaderObj.processFlags = self.__getProcessFlags()
542 self.processingHeaderObj.coherentInt = self.dataOutObj.nCohInt
543 self.processingHeaderObj.incoherentInt = 1 # Cuando la data de origen sea de tipo Voltage
544 self.processingHeaderObj.totalSpectra = 0
545
546 self.dtype = self.dataOutObj.dtype
547 No newline at end of file
565 self.processingHeaderObj.nCohInt = self.dataOutObj.nCohInt
566 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
567 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
568
569 if self.dataOutObj.code != None:
570 self.processingHeaderObj.code = self.dataOutObj.code
571 self.processingHeaderObj.nCode = self.dataOutObj.nCode
572 self.processingHeaderObj.nBaud = self.dataOutObj.nBaud
573 codesize = int(8 + 4 * self.dataOutObj.nCode * self.dataOutObj.nBaud)
574 processingHeaderSize += codesize
575
576 if self.processingHeaderObj.nWindows != 0:
577 self.processingHeaderObj.firstHeight = self.dataOutObj.heightList[0]
578 self.processingHeaderObj.deltaHeight = self.dataOutObj.heightList[1] - self.dataOutObj.heightList[0]
579 self.processingHeaderObj.nHeights = self.dataOutObj.nHeights
580 self.processingHeaderObj.samplesWin = self.dataOutObj.nHeights
581 processingHeaderSize += 12
582
583 self.processingHeaderObj.size = processingHeaderSize
584 No newline at end of file
@@ -1,72 +1,81
1 1
2 2 import os, sys
3 3 import time, datetime
4 4
5 5 path = os.path.split(os.getcwd())[0]
6 6 sys.path.append(path)
7 7
8 8 from Data.Voltage import Voltage
9 9 from IO.VoltageIO import *
10 10
11 11 from Processing.VoltageProcessor import *
12 12
13 13
14 14
15 15 class TestSChain():
16 16
17 17 def __init__(self):
18 18 self.setValues()
19 19 self.createObjects()
20 20 self.testSChain()
21 21
22 22 def setValues(self):
23 23 self.path = "/Users/danielangelsuarezmunoz/Data/EW_Drifts"
24 self.startDate = datetime.date(2011,11,28)
25 self.endDate = datetime.date(2011,11,30)
24 26
27 # self.path = "/Users/danielangelsuarezmunoz/Data/Imaging_rawdata"
28 # self.startDate = datetime.date(2011,10,4)
29 # self.endDate = datetime.date(2011,10,4)
30
31 # Probando los escritos por Signal Chain
32 self.path = "/Users/danielangelsuarezmunoz/Data/testWR"
25 33 self.startDate = datetime.date(2011,11,28)
26 34 self.endDate = datetime.date(2011,11,30)
27 35
28 36 self.startTime = datetime.time(0,0,0)
29 37 self.endTime = datetime.time(23,59,59)
30 38
31 self.wrpath = "/Users/jro/Documents/RadarData/wr_data"
39 self.wrpath = "/Users/danielangelsuarezmunoz/Data/testWR"
32 40 self.profilesPerBlock = 40
33 self.blocksPerFile = 50
41 self.blocksPerFile = 50
34 42
35 43 def createObjects(self):
36 44
37 45 self.readerObj = VoltageReader()
38 46
39 47 self.voltObj1 = self.readerObj.setup(
40 48 path = self.path,
41 49 startDate = self.startDate,
42 50 endDate = self.endDate,
43 51 startTime = self.startTime,
44 52 endTime = self.endTime,
45 53 expLabel = '',
46 54 online = 0)
47 55
48 56 self.voltObjProc = VoltageProcessor()
49 57
50 58 self.voltObj2 = self.voltObjProc.setup(dataInObj = self.voltObj1)
51 59
52 60 def testSChain(self):
53 61
54 62 ini = time.time()
55 63
56 64 while(True):
57 65 self.readerObj.getData()
58 66
59 self.voltObjProc.init()
60
61 self.voltObjProc.writeData(self.wrpath,self.profilesPerBlock,self.blocksPerFile)
67 # self.voltObjProc.init()
68 #
69 # self.voltObjProc.writeData(self.wrpath,self.profilesPerBlock,self.blocksPerFile)
62 70
63 71 if self.readerObj.flagNoMoreFiles:
64 72 break
65 73
66 74 if self.readerObj.flagIsNewBlock:
67 print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks,
68 datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc),)
75 # print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc),)
76 print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks,
77 datetime.datetime.utcfromtimestamp(self.readerObj.basicHeaderObj.utc + self.readerObj.basicHeaderObj.miliSecond/1000.0),)
69 78
70 79
71 80 if __name__ == '__main__':
72 81 TestSChain() No newline at end of file
@@ -1,68 +1,80
1 1
2 2 import os, sys
3 3 import time, datetime
4 4
5 5 path = os.path.split(os.getcwd())[0]
6 6 sys.path.append(path)
7 7
8 from Data.Voltage import Voltage
8
9 9 from Data.Spectra import Spectra
10 from IO.VoltageIO import *
11 10 from IO.SpectraIO import *
12 from Processing.VoltageProcessor import *
11 from Processing.SpectraProcessor import *
13 12
14 13
15 14
16 15 class TestSChain:
17 16
18 17 def __init__(self):
19 18 self.setValues()
20 19 self.createObjects()
21 20 self.testSChain()
22 21
23 22 def setValues(self):
24 23 self.path = "/Users/jro/Documents/RadarData/MST_ISR/MST"
25 24 # self.path = "/home/roj-idl71/Data/RAWDATA/IMAGING"
26 25 self.path = "/Users/danielangelsuarezmunoz/Data/EW_Drifts"
27 26 self.path = "/Users/danielangelsuarezmunoz/Data/IMAGING"
28 27
29 self.wrpath = "/Users/jro/Documents/RadarData/wr_data"
30
31 28 self.startDate = datetime.date(2012,3,1)
32 29 self.endDate = datetime.date(2012,3,30)
33 30
34 31 self.startTime = datetime.time(0,0,0)
35 32 self.endTime = datetime.time(14,1,1)
33
34 # paramatros para Escritura de Pdata
35 self.wrpath = "/Users/danielangelsuarezmunoz/Data/testWR_pdata"
36 self.profilesPerBlock = 16
37 self.blocksPerFile = 5
38 # self.pairList = [(0,1),(0,2)]
39
36 40
37 41 def createObjects(self):
38 42
39 43 self.readerObj = SpectraReader()
40 44
41 self.voltObj1 = self.readerObj.setup(
45 self.specObj1 = self.readerObj.setup(
42 46 path = self.path,
43 47 startDate = self.startDate,
44 48 endDate = self.endDate,
45 49 startTime = self.startTime,
46 50 endTime = self.endTime,
47 51 expLabel = '',
48 52 online = 0)
53 # new lines
54 self.specObjProc = SpectraProcessor()
55
56 self.specObj2 = self.specObjProc.setup(dataInObj = self.specObj1)
49 57
50 58
51 59
52 60 def testSChain(self):
53 61
54 62 ini = time.time()
55 63
56 64 while(True):
57 65 self.readerObj.getData()
66
67 self.specObjProc.init()
68
69 self.specObjProc.writeData(self.wrpath,self.profilesPerBlock,self.blocksPerFile)
58 70
59 71 if self.readerObj.flagNoMoreFiles:
60 72 break
61 73
62 74 if self.readerObj.flagIsNewBlock:
63 75 print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks,
64 datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc),)
76 datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc))
65 77
66 78
67 79 if __name__ == '__main__':
68 80 TestSChain() No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now