##// END OF EJS Templates
merge revisado
José Chávez -
r1074:238914c187b3 merge
parent child
Show More
@@ -0,0 +1,404
1 '''
2
3 $Author: murco $
4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 '''
6 import sys
7 import numpy
8 import copy
9 import datetime
10 from __builtin__ import None
11
12 SPEED_OF_LIGHT = 299792458
13 SPEED_OF_LIGHT = 3e8
14
15 FILE_STRUCTURE = numpy.dtype([ #HEADER 48bytes
16 ('FileMgcNumber','<u4'), #0x23020100
17 ('nFDTdataRecors','<u4'), #No Of FDT data records in this file (0 or more)
18 ('RadarUnitId','<u4'),
19 ('SiteName','<s32'), #Null terminated
20 ])
21
22 RECORD_STRUCTURE = numpy.dtype([ #RECORD HEADER 180+20N bytes
23 ('RecMgcNumber','<u4'), #0x23030001
24 ('RecCounter','<u4'), #Record counter(0,1, ...)
25 ('Off2StartNxtRec','<u4'), #Offset to start of next record form start of this record
26 ('Off2StartData','<u4'), #Offset to start of data from start of this record
27 ('EpTimeStamp','<i4'), #Epoch time stamp of start of acquisition (seconds)
28 ('msCompTimeStamp','<u4'), #Millisecond component of time stamp (0,...,999)
29 ('ExpTagName','<s32'), #Experiment tag name (null terminated)
30 ('ExpComment','<s32'), #Experiment comment (null terminated)
31 ('SiteLatDegrees','<f4'), #Site latitude (from GPS) in degrees (positive implies North)
32 ('SiteLongDegrees','<f4'), #Site longitude (from GPS) in degrees (positive implies East)
33 ('RTCgpsStatus','<u4'), #RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
34 ('TransmitFrec','<u4'), #Transmit frequency (Hz)
35 ('ReceiveFrec','<u4'), #Receive frequency
36 ('FirstOsciFrec','<u4'), #First local oscillator frequency (Hz)
37 ('Polarisation','<u4'), #(0="O", 1="E", 2="linear 1", 3="linear2")
38 ('ReceiverFiltSett','<u4'), #Receiver filter settings (0,1,2,3)
39 ('nModesInUse','<u4'), #Number of modes in use (1 or 2)
40 ('DualModeIndex','<u4'), #Dual Mode index number for these data (0 or 1)
41 ('DualModeRange','<u4'), #Dual Mode range correction for these data (m)
42 ('nDigChannels','<u4'), #Number of digital channels acquired (2*N)
43 ('SampResolution','<u4'), #Sampling resolution (meters)
44 ('nRangeGatesSamp','<u4'), #Number of range gates sampled
45 ('StartRangeSamp','<u4'), #Start range of sampling (meters)
46 ('PRFhz','<u4'), #PRF (Hz)
47 ('Integrations','<u4'), #Integrations
48 ('nDataPointsTrsf','<u4'), #Number of data points transformed
49 ('nReceiveBeams','<u4'), #Number of receive beams stored in file (1 or N)
50 ('nSpectAverages','<u4'), #Number of spectral averages
51 ('FFTwindowingInd','<u4'), #FFT windowing index (0 = no window)
52 ('BeamAngleAzim','<f4'), #Beam steer angle (azimuth) in degrees (clockwise from true North)
53 ('BeamAngleZen','<f4'), #Beam steer angle (zenith) in degrees (0=> vertical)
54 ('AntennaCoord','<f24'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
55 ('RecPhaseCalibr','<f12'), #Receiver phase calibration (degrees) - N values
56 ('RecAmpCalibr','<f12'), #Receiver amplitude calibration (ratio relative to receiver one) - N values
57 ('ReceiverGaindB','<u12'), #Receiver gains in dB - N values
58 ])
59
60
61 class Header(object):
62
63 def __init__(self):
64 raise NotImplementedError
65
66
67 def read(self):
68
69 raise NotImplementedError
70
71 def write(self):
72
73 raise NotImplementedError
74
75 def printInfo(self):
76
77 message = "#"*50 + "\n"
78 message += self.__class__.__name__.upper() + "\n"
79 message += "#"*50 + "\n"
80
81 keyList = self.__dict__.keys()
82 keyList.sort()
83
84 for key in keyList:
85 message += "%s = %s" %(key, self.__dict__[key]) + "\n"
86
87 if "size" not in keyList:
88 attr = getattr(self, "size")
89
90 if attr:
91 message += "%s = %s" %("size", attr) + "\n"
92
93 print message
94
95 class FileHeader(Header):
96
97 FileMgcNumber= None
98 nFDTdataRecors=None #No Of FDT data records in this file (0 or more)
99 RadarUnitId= None
100 SiteName= None
101
102 #__LOCALTIME = None
103
104 def __init__(self, useLocalTime=True):
105
106 self.FileMgcNumber= 0 #0x23020100
107 self.nFDTdataRecors=0 #No Of FDT data records in this file (0 or more)
108 self.RadarUnitId= 0
109 self.SiteName= ""
110 self.size = 48
111
112 #self.useLocalTime = useLocalTime
113
114 def read(self, fp):
115
116 try:
117 header = numpy.fromfile(fp, FILE_STRUCTURE,1)
118 ''' numpy.fromfile(file, dtype, count, sep='')
119 file : file or str
120 Open file object or filename.
121
122 dtype : data-type
123 Data type of the returned array. For binary files, it is used to determine
124 the size and byte-order of the items in the file.
125
126 count : int
127 Number of items to read. -1 means all items (i.e., the complete file).
128
129 sep : str
130 Separator between items if file is a text file. Empty (“”) separator means
131 the file should be treated as binary. Spaces (” ”) in the separator match zero
132 or more whitespace characters. A separator consisting only of spaces must match
133 at least one whitespace.
134
135 '''
136
137 except Exception, e:
138 print "FileHeader: "
139 print eBasicHeader
140 return 0
141
142 self.FileMgcNumber= byte(header['FileMgcNumber'][0])
143 self.nFDTdataRecors=int(header['nFDTdataRecors'][0]) #No Of FDT data records in this file (0 or more)
144 self.RadarUnitId= int(header['RadarUnitId'][0])
145 self.SiteName= char(header['SiteName'][0])
146
147
148 if self.size <48:
149 return 0
150
151 return 1
152
153 def write(self, fp):
154
155 headerTuple = (self.FileMgcNumber,
156 self.nFDTdataRecors,
157 self.RadarUnitId,
158 self.SiteName,
159 self.size)
160
161
162 header = numpy.array(headerTuple, FILE_STRUCTURE)
163 # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0)
164 header.tofile(fp)
165 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
166
167 fid : file or str
168 An open file object, or a string containing a filename.
169
170 sep : str
171 Separator between array items for text output. If “” (empty), a binary file is written,
172 equivalent to file.write(a.tobytes()).
173
174 format : str
175 Format string for text file output. Each entry in the array is formatted to text by
176 first converting it to the closest Python type, and then using “format” % item.
177
178 '''
179
180 return 1
181
182
183 class RecordHeader(Header):
184
185 RecMgcNumber=None #0x23030001
186 RecCounter= None
187 Off2StartNxtRec= None
188 EpTimeStamp= None
189 msCompTimeStamp= None
190 ExpTagName= None
191 ExpComment=None
192 SiteLatDegrees=None
193 SiteLongDegrees= None
194 RTCgpsStatus= None
195 TransmitFrec= None
196 ReceiveFrec= None
197 FirstOsciFrec= None
198 Polarisation= None
199 ReceiverFiltSett= None
200 nModesInUse= None
201 DualModeIndex= None
202 DualModeRange= None
203 nDigChannels= None
204 SampResolution= None
205 nRangeGatesSamp= None
206 StartRangeSamp= None
207 PRFhz= None
208 Integrations= None
209 nDataPointsTrsf= None
210 nReceiveBeams= None
211 nSpectAverages= None
212 FFTwindowingInd= None
213 BeamAngleAzim= None
214 BeamAngleZen= None
215 AntennaCoord= None
216 RecPhaseCalibr= None
217 RecAmpCalibr= None
218 ReceiverGaindB= None
219
220 '''size = None
221 nSamples = None
222 nProfiles = None
223 nChannels = None
224 adcResolution = None
225 pciDioBusWidth = None'''
226
227 def __init__(self, RecMgcNumber=None, RecCounter= 0, Off2StartNxtRec= 0,
228 EpTimeStamp= 0, msCompTimeStamp= 0, ExpTagName= None,
229 ExpComment=None, SiteLatDegrees=0, SiteLongDegrees= 0,
230 RTCgpsStatus= 0, TransmitFrec= 0, ReceiveFrec= 0,
231 FirstOsciFrec= 0, Polarisation= 0, ReceiverFiltSett= 0,
232 nModesInUse= 0, DualModeIndex= 0, DualModeRange= 0,
233 nDigChannels= 0, SampResolution= 0, nRangeGatesSamp= 0,
234 StartRangeSamp= 0, PRFhz= 0, Integrations= 0,
235 nDataPointsTrsf= 0, nReceiveBeams= 0, nSpectAverages= 0,
236 FFTwindowingInd= 0, BeamAngleAzim= 0, BeamAngleZen= 0,
237 AntennaCoord= 0, RecPhaseCalibr= 0, RecAmpCalibr= 0,
238 ReceiverGaindB= 0):
239
240 self.RecMgcNumber = RecMgcNumber #0x23030001
241 self.RecCounter = RecCounter
242 self.Off2StartNxtRec = Off2StartNxtRec
243 self.EpTimeStamp = EpTimeStamp
244 self.msCompTimeStamp = msCompTimeStamp
245 self.ExpTagName = ExpTagName
246 self.ExpComment = ExpComment
247 self.SiteLatDegrees = SiteLatDegrees
248 self.SiteLongDegrees = SiteLongDegrees
249 self.RTCgpsStatus = RTCgpsStatus
250 self.TransmitFrec = TransmitFrec
251 self.ReceiveFrec = ReceiveFrec
252 self.FirstOsciFrec = FirstOsciFrec
253 self.Polarisation = Polarisation
254 self.ReceiverFiltSett = ReceiverFiltSett
255 self.nModesInUse = nModesInUse
256 self.DualModeIndex = DualModeIndex
257 self.DualModeRange = DualModeRange
258 self.nDigChannels = nDigChannels
259 self.SampResolution = SampResolution
260 self.nRangeGatesSamp = nRangeGatesSamp
261 self.StartRangeSamp = StartRangeSamp
262 self.PRFhz = PRFhz
263 self.Integrations = Integrations
264 self.nDataPointsTrsf = nDataPointsTrsf
265 self.nReceiveBeams = nReceiveBeams
266 self.nSpectAverages = nSpectAverages
267 self.FFTwindowingInd = FFTwindowingInd
268 self.BeamAngleAzim = BeamAngleAzim
269 self.BeamAngleZen = BeamAngleZen
270 self.AntennaCoord = AntennaCoord
271 self.RecPhaseCalibr = RecPhaseCalibr
272 self.RecAmpCalibr = RecAmpCalibr
273 self.ReceiverGaindB = ReceiverGaindB
274
275
276 def read(self, fp):
277
278 startFp = fp.tell() #The method tell() returns the current position of the file read/write pointer within the file.
279
280 try:
281 header = numpy.fromfile(fp,RECORD_STRUCTURE,1)
282 except Exception, e:
283 print "System Header: " + e
284 return 0
285
286 self.RecMgcNumber = header['RecMgcNumber'][0] #0x23030001
287 self.RecCounter = header['RecCounter'][0]
288 self.Off2StartNxtRec = header['Off2StartNxtRec'][0]
289 self.EpTimeStamp = header['EpTimeStamp'][0]
290 self.msCompTimeStamp = header['msCompTimeStamp'][0]
291 self.ExpTagName = header['ExpTagName'][0]
292 self.ExpComment = header['ExpComment'][0]
293 self.SiteLatDegrees = header['SiteLatDegrees'][0]
294 self.SiteLongDegrees = header['SiteLongDegrees'][0]
295 self.RTCgpsStatus = header['RTCgpsStatus'][0]
296 self.TransmitFrec = header['TransmitFrec'][0]
297 self.ReceiveFrec = header['ReceiveFrec'][0]
298 self.FirstOsciFrec = header['FirstOsciFrec'][0]
299 self.Polarisation = header['Polarisation'][0]
300 self.ReceiverFiltSett = header['ReceiverFiltSett'][0]
301 self.nModesInUse = header['nModesInUse'][0]
302 self.DualModeIndex = header['DualModeIndex'][0]
303 self.DualModeRange = header['DualModeRange'][0]
304 self.nDigChannels = header['nDigChannels'][0]
305 self.SampResolution = header['SampResolution'][0]
306 self.nRangeGatesSamp = header['nRangeGatesSamp'][0]
307 self.StartRangeSamp = header['StartRangeSamp'][0]
308 self.PRFhz = header['PRFhz'][0]
309 self.Integrations = header['Integrations'][0]
310 self.nDataPointsTrsf = header['nDataPointsTrsf'][0]
311 self.nReceiveBeams = header['nReceiveBeams'][0]
312 self.nSpectAverages = header['nSpectAverages'][0]
313 self.FFTwindowingInd = header['FFTwindowingInd'][0]
314 self.BeamAngleAzim = header['BeamAngleAzim'][0]
315 self.BeamAngleZen = header['BeamAngleZen'][0]
316 self.AntennaCoord = header['AntennaCoord'][0]
317 self.RecPhaseCalibr = header['RecPhaseCalibr'][0]
318 self.RecAmpCalibr = header['RecAmpCalibr'][0]
319 self.ReceiverGaindB = header['ReceiverGaindB'][0]
320
321 Self.size = 180+20*3
322
323 endFp = self.size + startFp
324
325 if fp.tell() > endFp:
326 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp.name)
327 return 0
328
329 if fp.tell() < endFp:
330 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp.name)
331 return 0
332
333 return 1
334
335 def write(self, fp):
336
337 headerTuple = (self.RecMgcNumber,
338 self.RecCounter,
339 self.Off2StartNxtRec,
340 self.EpTimeStamp,
341 self.msCompTimeStamp,
342 self.ExpTagName,
343 self.ExpComment,
344 self.SiteLatDegrees,
345 self.SiteLongDegrees,
346 self.RTCgpsStatus,
347 self.TransmitFrec,
348 self.ReceiveFrec,
349 self.FirstOsciFrec,
350 self.Polarisation,
351 self.ReceiverFiltSett,
352 self.nModesInUse,
353 self.DualModeIndex,
354 self.DualModeRange,
355 self.nDigChannels,
356 self.SampResolution,
357 self.nRangeGatesSamp,
358 self.StartRangeSamp,
359 self.PRFhz,
360 self.Integrations,
361 self.nDataPointsTrsf,
362 self.nReceiveBeams,
363 self.nSpectAverages,
364 self.FFTwindowingInd,
365 self.BeamAngleAzim,
366 self.BeamAngleZen,
367 self.AntennaCoord,
368 self.RecPhaseCalibr,
369 self.RecAmpCalibr,
370 self.ReceiverGaindB)
371
372 # self.size,self.nSamples,
373 # self.nProfiles,
374 # self.nChannels,
375 # self.adcResolution,
376 # self.pciDioBusWidth
377
378 header = numpy.array(headerTuple,RECORD_STRUCTURE)
379 header.tofile(fp)
380
381 return 1
382
383
384 def get_dtype_index(numpy_dtype):
385
386 index = None
387
388 for i in range(len(NUMPY_DTYPE_LIST)):
389 if numpy_dtype == NUMPY_DTYPE_LIST[i]:
390 index = i
391 break
392
393 return index
394
395 def get_numpy_dtype(index):
396
397 #dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
398
399 return NUMPY_DTYPE_LIST[index]
400
401
402 def get_dtype_width(index):
403
404 return DTYPE_WIDTH[index] No newline at end of file
@@ -0,0 +1,321
1 import os, sys
2 import glob
3 import fnmatch
4 import datetime
5 import time
6 import re
7 import h5py
8 import numpy
9 import matplotlib.pyplot as plt
10
11 import pylab as plb
12 from scipy.optimize import curve_fit
13 from scipy import asarray as ar,exp
14 from scipy import stats
15
16 from duplicity.path import Path
17 from numpy.ma.core import getdata
18
19 SPEED_OF_LIGHT = 299792458
20 SPEED_OF_LIGHT = 3e8
21
22 try:
23 from gevent import sleep
24 except:
25 from time import sleep
26
27 from schainpy.model.data.jrodata import Spectra
28 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
29 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
30 #from schainpy.model.io.jroIO_bltr import BLTRReader
31 from numpy import imag, shape, NaN
32
33
34 startFp = open('/home/erick/Documents/MIRA35C/20160117/20160117_0000.zspc',"rb")
35
36
37 FILE_HEADER = numpy.dtype([ #HEADER 1024bytes
38 ('Hname',numpy.str_,32), #Original file name
39 ('Htime',numpy.str_,32), #Date and time when the file was created
40 ('Hoper',numpy.str_,64), #Name of operator who created the file
41 ('Hplace',numpy.str_,128), #Place where the measurements was carried out
42 ('Hdescr',numpy.str_,256), #Description of measurements
43 ('Hdummy',numpy.str_,512), #Reserved space
44 #Main chunk
45 ('Msign','<i4'), #Main chunk signature FZKF or NUIG
46 ('MsizeData','<i4'), #Size of data block main chunk
47 #Processing DSP parameters
48 ('PPARsign','<i4'), #PPAR signature
49 ('PPARsize','<i4'), #PPAR size of block
50 ('PPARprf','<i4'), #Pulse repetition frequency
51 ('PPARpdr','<i4'), #Pulse duration
52 ('PPARsft','<i4'), #FFT length
53 ('PPARavc','<i4'), #Number of spectral (in-coherent) averages
54 ('PPARihp','<i4'), #Number of lowest range gate for moment estimation
55 ('PPARchg','<i4'), #Count for gates for moment estimation
56 ('PPARpol','<i4'), #switch on/off polarimetric measurements. Should be 1.
57 #Service DSP parameters
58 ('SPARatt','<i4'), #STC attenuation on the lowest ranges on/off
59 ('SPARtx','<i4'), #OBSOLETE
60 ('SPARaddGain0','<f4'), #OBSOLETE
61 ('SPARaddGain1','<f4'), #OBSOLETE
62 ('SPARwnd','<i4'), #Debug only. It normal mode it is 0.
63 ('SPARpos','<i4'), #Delay between sync pulse and tx pulse for phase corr, ns
64 ('SPARadd','<i4'), #"add to pulse" to compensate for delay between the leading edge of driver pulse and envelope of the RF signal.
65 ('SPARlen','<i4'), #Time for measuring txn pulse phase. OBSOLETE
66 ('SPARcal','<i4'), #OBSOLETE
67 ('SPARnos','<i4'), #OBSOLETE
68 ('SPARof0','<i4'), #detection threshold
69 ('SPARof1','<i4'), #OBSOLETE
70 ('SPARswt','<i4'), #2nd moment estimation threshold
71 ('SPARsum','<i4'), #OBSOLETE
72 ('SPARosc','<i4'), #flag Oscillosgram mode
73 ('SPARtst','<i4'), #OBSOLETE
74 ('SPARcor','<i4'), #OBSOLETE
75 ('SPARofs','<i4'), #OBSOLETE
76 ('SPARhsn','<i4'), #Hildebrand div noise detection on noise gate
77 ('SPARhsa','<f4'), #Hildebrand div noise detection on all gates
78 ('SPARcalibPow_M','<f4'), #OBSOLETE
79 ('SPARcalibSNR_M','<f4'), #OBSOLETE
80 ('SPARcalibPow_S','<f4'), #OBSOLETE
81 ('SPARcalibSNR_S','<f4'), #OBSOLETE
82 ('SPARrawGate1','<i4'), #Lowest range gate for spectra saving Raw_Gate1 >=5
83 ('SPARrawGate2','<i4'), #Number of range gates with atmospheric signal
84 ('SPARraw','<i4'), #flag - IQ or spectra saving on/off
85 ('SPARprc','<i4'),]) #flag - Moment estimation switched on/off
86
87
88
89 self.Hname= None
90 self.Htime= None
91 self.Hoper= None
92 self.Hplace= None
93 self.Hdescr= None
94 self.Hdummy= None
95
96 self.Msign=None
97 self.MsizeData=None
98
99 self.PPARsign=None
100 self.PPARsize=None
101 self.PPARprf=None
102 self.PPARpdr=None
103 self.PPARsft=None
104 self.PPARavc=None
105 self.PPARihp=None
106 self.PPARchg=None
107 self.PPARpol=None
108 #Service DSP parameters
109 self.SPARatt=None
110 self.SPARtx=None
111 self.SPARaddGain0=None
112 self.SPARaddGain1=None
113 self.SPARwnd=None
114 self.SPARpos=None
115 self.SPARadd=None
116 self.SPARlen=None
117 self.SPARcal=None
118 self.SPARnos=None
119 self.SPARof0=None
120 self.SPARof1=None
121 self.SPARswt=None
122 self.SPARsum=None
123 self.SPARosc=None
124 self.SPARtst=None
125 self.SPARcor=None
126 self.SPARofs=None
127 self.SPARhsn=None
128 self.SPARhsa=None
129 self.SPARcalibPow_M=None
130 self.SPARcalibSNR_M=None
131 self.SPARcalibPow_S=None
132 self.SPARcalibSNR_S=None
133 self.SPARrawGate1=None
134 self.SPARrawGate2=None
135 self.SPARraw=None
136 self.SPARprc=None
137
138
139
140 header = numpy.fromfile(fp, FILE_HEADER,1)
141 ''' numpy.fromfile(file, dtype, count, sep='')
142 file : file or str
143 Open file object or filename.
144
145 dtype : data-type
146 Data type of the returned array. For binary files, it is used to determine
147 the size and byte-order of the items in the file.
148
149 count : int
150 Number of items to read. -1 means all items (i.e., the complete file).
151
152 sep : str
153 Separator between items if file is a text file. Empty ("") separator means
154 the file should be treated as binary. Spaces (" ") in the separator match zero
155 or more whitespace characters. A separator consisting only of spaces must match
156 at least one whitespace.
157
158 '''
159
160 Hname= str(header['Hname'][0])
161 Htime= str(header['Htime'][0])
162 Hoper= str(header['Hoper'][0])
163 Hplace= str(header['Hplace'][0])
164 Hdescr= str(header['Hdescr'][0])
165 Hdummy= str(header['Hdummy'][0])
166
167 Msign=header['Msign'][0]
168 MsizeData=header['MsizeData'][0]
169
170 PPARsign=header['PPARsign'][0]
171 PPARsize=header['PPARsize'][0]
172 PPARprf=header['PPARprf'][0]
173 PPARpdr=header['PPARpdr'][0]
174 PPARsft=header['PPARsft'][0]
175 PPARavc=header['PPARavc'][0]
176 PPARihp=header['PPARihp'][0]
177 PPARchg=header['PPARchg'][0]
178 PPARpol=header['PPARpol'][0]
179 #Service DSP parameters
180 SPARatt=header['SPARatt'][0]
181 SPARtx=header['SPARtx'][0]
182 SPARaddGain0=header['SPARaddGain0'][0]
183 SPARaddGain1=header['SPARaddGain1'][0]
184 SPARwnd=header['SPARwnd'][0]
185 SPARpos=header['SPARpos'][0]
186 SPARadd=header['SPARadd'][0]
187 SPARlen=header['SPARlen'][0]
188 SPARcal=header['SPARcal'][0]
189 SPARnos=header['SPARnos'][0]
190 SPARof0=header['SPARof0'][0]
191 SPARof1=header['SPARof1'][0]
192 SPARswt=header['SPARswt'][0]
193 SPARsum=header['SPARsum'][0]
194 SPARosc=header['SPARosc'][0]
195 SPARtst=header['SPARtst'][0]
196 SPARcor=header['SPARcor'][0]
197 SPARofs=header['SPARofs'][0]
198 SPARhsn=header['SPARhsn'][0]
199 SPARhsa=header['SPARhsa'][0]
200 SPARcalibPow_M=header['SPARcalibPow_M'][0]
201 SPARcalibSNR_M=header['SPARcalibSNR_M'][0]
202 SPARcalibPow_S=header['SPARcalibPow_S'][0]
203 SPARcalibSNR_S=header['SPARcalibSNR_S'][0]
204 SPARrawGate1=header['SPARrawGate1'][0]
205 SPARrawGate2=header['SPARrawGate2'][0]
206 SPARraw=header['SPARraw'][0]
207 SPARprc=header['SPARprc'][0]
208
209
210
211 SRVI_STRUCTURE = numpy.dtype([
212 ('frame_cnt','<u4'),#
213 ('time_t','<u4'), #
214 ('tpow','<f4'), #
215 ('npw1','<f4'), #
216 ('npw2','<f4'), #
217 ('cpw1','<f4'), #
218 ('pcw2','<f4'), #
219 ('ps_err','<u4'), #
220 ('te_err','<u4'), #
221 ('rc_err','<u4'), #
222 ('grs1','<u4'), #
223 ('grs2','<u4'), #
224 ('azipos','<f4'), #
225 ('azivel','<f4'), #
226 ('elvpos','<f4'), #
227 ('elvvel','<f4'), #
228 ('northAngle','<f4'), #
229 ('microsec','<u4'), #
230 ('azisetvel','<f4'), #
231 ('elvsetpos','<f4'), #
232 ('RadarConst','<f4'),]) #
233
234 JUMP_STRUCTURE = numpy.dtype([
235 ('jump','<u140'),#
236 ('SizeOfDataBlock1',numpy.str_,32),#
237 ('jump','<i4'),#
238 ('DataBlockTitleSRVI1',numpy.str_,32),#
239 ('SizeOfSRVI1','<i4'),])#
240
241
242
243 #frame_cnt=0, time_t= 0, tpow=0, npw1=0, npw2=0,
244 #cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0,
245 #grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0,
246 #microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0
247
248
249 frame_cnt = frame_cnt
250 dwell = time_t
251 tpow = tpow
252 npw1 = npw1
253 npw2 = npw2
254 cpw1 = cpw1
255 pcw2 = pcw2
256 ps_err = ps_err
257 te_err = te_err
258 rc_err = rc_err
259 grs1 = grs1
260 grs2 = grs2
261 azipos = azipos
262 azivel = azivel
263 elvpos = elvpos
264 elvvel = elvvel
265 northAngle = northAngle
266 microsec = microsec
267 azisetvel = azisetvel
268 elvsetpos = elvsetpos
269 RadarConst5 = RadarConst
270
271
272
273 #print fp
274 #startFp = open('/home/erick/Documents/Data/huancayo.20161019.22.fdt',"rb") #The method tell() returns the current position of the file read/write pointer within the file.
275 #startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file.
276 #RecCounter=0
277 #Off2StartNxtRec=811248
278 #print 'OffsetStartHeader ',self.OffsetStartHeader,'RecCounter ', self.RecCounter, 'Off2StartNxtRec ' , self.Off2StartNxtRec
279 #OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
280 #startFp.seek(OffRHeader, os.SEEK_SET)
281 print 'debe ser 48, RecCounter*811248', self.OffsetStartHeader,self.RecCounter,self.Off2StartNxtRec
282 print 'Posicion del bloque: ',OffRHeader
283
284 header = numpy.fromfile(startFp,SRVI_STRUCTURE,1)
285
286 self.frame_cnt = header['frame_cnt'][0]#
287 self.time_t = header['frame_cnt'][0] #
288 self.tpow = header['frame_cnt'][0] #
289 self.npw1 = header['frame_cnt'][0] #
290 self.npw2 = header['frame_cnt'][0] #
291 self.cpw1 = header['frame_cnt'][0] #
292 self.pcw2 = header['frame_cnt'][0] #
293 self.ps_err = header['frame_cnt'][0] #
294 self.te_err = header['frame_cnt'][0] #
295 self.rc_err = header['frame_cnt'][0] #
296 self.grs1 = header['frame_cnt'][0] #
297 self.grs2 = header['frame_cnt'][0] #
298 self.azipos = header['frame_cnt'][0] #
299 self.azivel = header['frame_cnt'][0] #
300 self.elvpos = header['frame_cnt'][0] #
301 self.elvvel = header['frame_cnt'][0] #
302 self.northAngle = header['frame_cnt'][0] #
303 self.microsec = header['frame_cnt'][0] #
304 self.azisetvel = header['frame_cnt'][0] #
305 self.elvsetpos = header['frame_cnt'][0] #
306 self.RadarConst = header['frame_cnt'][0] #
307
308
309 self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz)
310
311 self.RHsize = 180+20*self.nChannels
312 self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4
313 #print 'Datasize',self.Datasize
314 endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
315
316 print '=============================================='
317
318 print '=============================================='
319
320
321 No newline at end of file
@@ -0,0 +1,362
1 '''
2 Created on Nov 9, 2016
3
4 @author: roj- LouVD
5 '''
6
7
8 import os
9 import sys
10 import time
11 import glob
12 import datetime
13
14 import numpy
15
16 from schainpy.model.proc.jroproc_base import ProcessingUnit
17 from schainpy.model.data.jrodata import Parameters
18 from schainpy.model.io.jroIO_base import JRODataReader, isNumber
19
20 FILE_HEADER_STRUCTURE = numpy.dtype([
21 ('FMN', '<u4'),
22 ('nrec', '<u4'),
23 ('fr_offset', '<u4'),
24 ('id', '<u4'),
25 ('site', 'u1', (32,))
26 ])
27
28 REC_HEADER_STRUCTURE = numpy.dtype([
29 ('rmn', '<u4'),
30 ('rcounter', '<u4'),
31 ('nr_offset', '<u4'),
32 ('tr_offset', '<u4'),
33 ('time', '<u4'),
34 ('time_msec', '<u4'),
35 ('tag', 'u1', (32,)),
36 ('comments', 'u1', (32,)),
37 ('lat', '<f4'),
38 ('lon', '<f4'),
39 ('gps_status', '<u4'),
40 ('freq', '<u4'),
41 ('freq0', '<u4'),
42 ('nchan', '<u4'),
43 ('delta_r', '<u4'),
44 ('nranges', '<u4'),
45 ('r0', '<u4'),
46 ('prf', '<u4'),
47 ('ncoh', '<u4'),
48 ('npoints', '<u4'),
49 ('polarization', '<i4'),
50 ('rx_filter', '<u4'),
51 ('nmodes', '<u4'),
52 ('dmode_index', '<u4'),
53 ('dmode_rngcorr', '<u4'),
54 ('nrxs', '<u4'),
55 ('acf_length', '<u4'),
56 ('acf_lags', '<u4'),
57 ('sea_to_atmos', '<f4'),
58 ('sea_notch', '<u4'),
59 ('lh_sea', '<u4'),
60 ('hh_sea', '<u4'),
61 ('nbins_sea', '<u4'),
62 ('min_snr', '<f4'),
63 ('min_cc', '<f4'),
64 ('max_time_diff', '<f4')
65 ])
66
67 DATA_STRUCTURE = numpy.dtype([
68 ('range', '<u4'),
69 ('status', '<u4'),
70 ('zonal', '<f4'),
71 ('meridional', '<f4'),
72 ('vertical', '<f4'),
73 ('zonal_a', '<f4'),
74 ('meridional_a', '<f4'),
75 ('corrected_fading', '<f4'), # seconds
76 ('uncorrected_fading', '<f4'), # seconds
77 ('time_diff', '<f4'),
78 ('major_axis', '<f4'),
79 ('axial_ratio', '<f4'),
80 ('orientation', '<f4'),
81 ('sea_power', '<u4'),
82 ('sea_algorithm', '<u4')
83 ])
84
85 class BLTRParamReader(JRODataReader, ProcessingUnit):
86 '''
87 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR from *.sswma files
88 '''
89
90 ext = '.sswma'
91
92 def __init__(self, **kwargs):
93
94 ProcessingUnit.__init__(self , **kwargs)
95
96 self.dataOut = Parameters()
97 self.counter_records = 0
98 self.flagNoMoreFiles = 0
99 self.isConfig = False
100 self.filename = None
101
102 def setup(self,
103 path=None,
104 startDate=None,
105 endDate=None,
106 ext=None,
107 startTime=datetime.time(0, 0, 0),
108 endTime=datetime.time(23, 59, 59),
109 timezone=0,
110 status_value=0,
111 **kwargs):
112
113 self.path = path
114 self.startTime = startTime
115 self.endTime = endTime
116 self.status_value = status_value
117
118 if self.path is None:
119 raise ValueError, "The path is not valid"
120
121 if ext is None:
122 ext = self.ext
123
124 self.search_files(self.path, startDate, endDate, ext)
125 self.timezone = timezone
126 self.fileIndex = 0
127
128 if not self.fileList:
129 raise Warning, "There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' "%(path)
130
131 self.setNextFile()
132
133 def search_files(self, path, startDate, endDate, ext):
134 '''
135 Searching for BLTR rawdata file in path
136 Creating a list of file to proces included in [startDate,endDate]
137
138 Input:
139 path - Path to find BLTR rawdata files
140 startDate - Select file from this date
141 enDate - Select file until this date
142 ext - Extension of the file to read
143
144 '''
145
146 print 'Searching file in %s ' % (path)
147 foldercounter = 0
148 fileList0 = glob.glob1(path, "*%s" % ext)
149 fileList0.sort()
150
151 self.fileList = []
152 self.dateFileList = []
153
154 for thisFile in fileList0:
155 year = thisFile[-14:-10]
156 if not isNumber(year):
157 continue
158
159 month = thisFile[-10:-8]
160 if not isNumber(month):
161 continue
162
163 day = thisFile[-8:-6]
164 if not isNumber(day):
165 continue
166
167 year, month, day = int(year), int(month), int(day)
168 dateFile = datetime.date(year, month, day)
169
170 if (startDate > dateFile) or (endDate < dateFile):
171 continue
172
173 self.fileList.append(thisFile)
174 self.dateFileList.append(dateFile)
175
176 return
177
178 def setNextFile(self):
179
180 file_id = self.fileIndex
181
182 if file_id == len(self.fileList):
183 print '\nNo more files in the folder'
184 print 'Total number of file(s) read : {}'.format(self.fileIndex + 1)
185 self.flagNoMoreFiles = 1
186 return 0
187
188 print '\n[Setting file] (%s) ...' % self.fileList[file_id]
189 filename = os.path.join(self.path, self.fileList[file_id])
190
191 dirname, name = os.path.split(filename)
192 self.siteFile = name.split('.')[0] # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
193 if self.filename is not None:
194 self.fp.close()
195 self.filename = filename
196 self.fp = open(self.filename, 'rb')
197 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
198 self.nrecords = self.header_file['nrec'][0]
199 self.sizeOfFile = os.path.getsize(self.filename)
200 self.counter_records = 0
201 self.flagIsNewFile = 0
202 self.fileIndex += 1
203
204 return 1
205
206 def readNextBlock(self):
207
208 while True:
209 if self.counter_records == self.nrecords:
210 self.flagIsNewFile = 1
211 if not self.setNextFile():
212 return 0
213
214 self.readBlock()
215
216 if (self.datatime.time() < self.startTime) or (self.datatime.time() > self.endTime):
217 print "[Reading] Record No. %d/%d -> %s [Skipping]" %(
218 self.counter_records,
219 self.nrecords,
220 self.datatime.ctime())
221 continue
222 break
223
224 print "[Reading] Record No. %d/%d -> %s" %(
225 self.counter_records,
226 self.nrecords,
227 self.datatime.ctime())
228
229 return 1
230
231 def readBlock(self):
232
233 pointer = self.fp.tell()
234 header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
235 self.nchannels = header_rec['nchan'][0]/2
236 self.kchan = header_rec['nrxs'][0]
237 self.nmodes = header_rec['nmodes'][0]
238 self.nranges = header_rec['nranges'][0]
239 self.fp.seek(pointer)
240 self.height = numpy.empty((self.nmodes, self.nranges))
241 self.snr = numpy.empty((self.nmodes, self.nchannels, self.nranges))
242 self.buffer = numpy.empty((self.nmodes, 3, self.nranges))
243
244 for mode in range(self.nmodes):
245 self.readHeader()
246 data = self.readData()
247 self.height[mode] = (data[0] - self.correction) / 1000.
248 self.buffer[mode] = data[1]
249 self.snr[mode] = data[2]
250
251 self.counter_records = self.counter_records + self.nmodes
252
253 return
254
255 def readHeader(self):
256 '''
257 RecordHeader of BLTR rawdata file
258 '''
259
260 header_structure = numpy.dtype(
261 REC_HEADER_STRUCTURE.descr + [
262 ('antenna_coord', 'f4', (2, self.nchannels)),
263 ('rx_gains', 'u4', (self.nchannels,)),
264 ('rx_analysis', 'u4', (self.nchannels,))
265 ]
266 )
267
268 self.header_rec = numpy.fromfile(self.fp, header_structure, 1)
269 self.lat = self.header_rec['lat'][0]
270 self.lon = self.header_rec['lon'][0]
271 self.delta = self.header_rec['delta_r'][0]
272 self.correction = self.header_rec['dmode_rngcorr'][0]
273 self.imode = self.header_rec['dmode_index'][0]
274 self.antenna = self.header_rec['antenna_coord']
275 self.rx_gains = self.header_rec['rx_gains']
276 self.time = self.header_rec['time'][0]
277 tseconds = self.header_rec['time'][0]
278 local_t1 = time.localtime(tseconds)
279 self.year = local_t1.tm_year
280 self.month = local_t1.tm_mon
281 self.day = local_t1.tm_mday
282 self.t = datetime.datetime(self.year, self.month, self.day)
283 self.datatime = datetime.datetime.utcfromtimestamp(self.time)
284
285 def readData(self):
286 '''
287 Reading and filtering data block record of BLTR rawdata file, filtering is according to status_value.
288
289 Input:
290 status_value - Array data is set to NAN for values that are not equal to status_value
291
292 '''
293
294 data_structure = numpy.dtype(
295 DATA_STRUCTURE.descr + [
296 ('rx_saturation', 'u4', (self.nchannels,)),
297 ('chan_offset', 'u4', (2 * self.nchannels,)),
298 ('rx_amp', 'u4', (self.nchannels,)),
299 ('rx_snr', 'f4', (self.nchannels,)),
300 ('cross_snr', 'f4', (self.kchan,)),
301 ('sea_power_relative', 'f4', (self.kchan,))]
302 )
303
304 data = numpy.fromfile(self.fp, data_structure, self.nranges)
305
306 height = data['range']
307 winds = numpy.array((data['zonal'], data['meridional'], data['vertical']))
308 snr = data['rx_snr'].T
309
310 winds[numpy.where(winds == -9999.)] = numpy.nan
311 winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
312 snr[numpy.where(snr == -9999.)] = numpy.nan
313 snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
314 snr = numpy.power(10, snr / 10)
315
316 return height, winds, snr
317
318 def set_output(self):
319 '''
320 Storing data from databuffer to dataOut object
321 '''
322
323 self.dataOut.data_SNR = self.snr
324 self.dataOut.height = self.height
325 self.dataOut.data_output = self.buffer
326 self.dataOut.utctimeInit = self.time
327 self.dataOut.utctime = self.dataOut.utctimeInit
328 self.dataOut.useLocalTime = False
329 self.dataOut.paramInterval = 157
330 self.dataOut.timezone = self.timezone
331 self.dataOut.site = self.siteFile
332 self.dataOut.nrecords = self.nrecords/self.nmodes
333 self.dataOut.sizeOfFile = self.sizeOfFile
334 self.dataOut.lat = self.lat
335 self.dataOut.lon = self.lon
336 self.dataOut.channelList = range(self.nchannels)
337 self.dataOut.kchan = self.kchan
338 # self.dataOut.nHeights = self.nranges
339 self.dataOut.delta = self.delta
340 self.dataOut.correction = self.correction
341 self.dataOut.nmodes = self.nmodes
342 self.dataOut.imode = self.imode
343 self.dataOut.antenna = self.antenna
344 self.dataOut.rx_gains = self.rx_gains
345 self.dataOut.flagNoData = False
346
347 def getData(self):
348 '''
349 Storing data from databuffer to dataOut object
350 '''
351 if self.flagNoMoreFiles:
352 self.dataOut.flagNoData = True
353 print 'No file left to process'
354 return 0
355
356 if not self.readNextBlock():
357 self.dataOut.flagNoData = True
358 return 0
359
360 self.set_output()
361
362 return 1
This diff has been collapsed as it changes many lines, (1154 lines changed) Show them Hide them
@@ -0,0 +1,1154
1 import os, sys
2 import glob
3 import fnmatch
4 import datetime
5 import time
6 import re
7 import h5py
8 import numpy
9 import matplotlib.pyplot as plt
10
11 import pylab as plb
12 from scipy.optimize import curve_fit
13 from scipy import asarray as ar, exp
14 from scipy import stats
15
16 from numpy.ma.core import getdata
17
18 SPEED_OF_LIGHT = 299792458
19 SPEED_OF_LIGHT = 3e8
20
21 try:
22 from gevent import sleep
23 except:
24 from time import sleep
25
26 from schainpy.model.data.jrodata import Spectra
27 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
28 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
29 #from schainpy.model.io.jroIO_bltr import BLTRReader
30 from numpy import imag, shape, NaN
31
32 from jroIO_base import JRODataReader
33
34
35 class Header(object):
36
37 def __init__(self):
38 raise NotImplementedError
39
40
41 def read(self):
42
43 raise NotImplementedError
44
45 def write(self):
46
47 raise NotImplementedError
48
49 def printInfo(self):
50
51 message = "#"*50 + "\n"
52 message += self.__class__.__name__.upper() + "\n"
53 message += "#"*50 + "\n"
54
55 keyList = self.__dict__.keys()
56 keyList.sort()
57
58 for key in keyList:
59 message += "%s = %s" %(key, self.__dict__[key]) + "\n"
60
61 if "size" not in keyList:
62 attr = getattr(self, "size")
63
64 if attr:
65 message += "%s = %s" %("size", attr) + "\n"
66
67 #print message
68
69
70
71
72
73 FILE_STRUCTURE = numpy.dtype([ #HEADER 48bytes
74 ('FileMgcNumber','<u4'), #0x23020100
75 ('nFDTdataRecors','<u4'), #No Of FDT data records in this file (0 or more)
76 ('OffsetStartHeader','<u4'),
77 ('RadarUnitId','<u4'),
78 ('SiteName',numpy.str_,32), #Null terminated
79 ])
80
81 class FileHeaderBLTR(Header):
82
83 def __init__(self):
84
85 self.FileMgcNumber= 0 #0x23020100
86 self.nFDTdataRecors=0 #No Of FDT data records in this file (0 or more)
87 self.RadarUnitId= 0
88 self.OffsetStartHeader=0
89 self.SiteName= ""
90 self.size = 48
91
92 def FHread(self, fp):
93 #try:
94 startFp = open(fp,"rb")
95
96 header = numpy.fromfile(startFp, FILE_STRUCTURE,1)
97
98 print ' '
99 print 'puntero file header', startFp.tell()
100 print ' '
101
102
103 ''' numpy.fromfile(file, dtype, count, sep='')
104 file : file or str
105 Open file object or filename.
106
107 dtype : data-type
108 Data type of the returned array. For binary files, it is used to determine
109 the size and byte-order of the items in the file.
110
111 count : int
112 Number of items to read. -1 means all items (i.e., the complete file).
113
114 sep : str
115 Separator between items if file is a text file. Empty ("") separator means
116 the file should be treated as binary. Spaces (" ") in the separator match zero
117 or more whitespace characters. A separator consisting only of spaces must match
118 at least one whitespace.
119
120 '''
121
122
123
124 self.FileMgcNumber= hex(header['FileMgcNumber'][0])
125 self.nFDTdataRecors=int(header['nFDTdataRecors'][0]) #No Of FDT data records in this file (0 or more)
126 self.RadarUnitId= int(header['RadarUnitId'][0])
127 self.OffsetStartHeader= int(header['OffsetStartHeader'][0])
128 self.SiteName= str(header['SiteName'][0])
129
130 #print 'Numero de bloques', self.nFDTdataRecors
131
132
133 if self.size <48:
134 return 0
135
136 return 1
137
138
139 def write(self, fp):
140
141 headerTuple = (self.FileMgcNumber,
142 self.nFDTdataRecors,
143 self.RadarUnitId,
144 self.SiteName,
145 self.size)
146
147
148 header = numpy.array(headerTuple, FILE_STRUCTURE)
149 # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0)
150 header.tofile(fp)
151 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
152
153 fid : file or str
154 An open file object, or a string containing a filename.
155
156 sep : str
157 Separator between array items for text output. If "" (empty), a binary file is written,
158 equivalent to file.write(a.tobytes()).
159
160 format : str
161 Format string for text file output. Each entry in the array is formatted to text by
162 first converting it to the closest Python type, and then using "format" % item.
163
164 '''
165
166 return 1
167
168
169
170
171
172 RECORD_STRUCTURE = numpy.dtype([ #RECORD HEADER 180+20N bytes
173 ('RecMgcNumber','<u4'), #0x23030001
174 ('RecCounter','<u4'), #Record counter(0,1, ...)
175 ('Off2StartNxtRec','<u4'), #Offset to start of next record form start of this record
176 ('Off2StartData','<u4'), #Offset to start of data from start of this record
177 ('nUtime','<i4'), #Epoch time stamp of start of acquisition (seconds)
178 ('nMilisec','<u4'), #Millisecond component of time stamp (0,...,999)
179 ('ExpTagName',numpy.str_,32), #Experiment tag name (null terminated)
180 ('ExpComment',numpy.str_,32), #Experiment comment (null terminated)
181 ('SiteLatDegrees','<f4'), #Site latitude (from GPS) in degrees (positive implies North)
182 ('SiteLongDegrees','<f4'), #Site longitude (from GPS) in degrees (positive implies East)
183 ('RTCgpsStatus','<u4'), #RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
184 ('TransmitFrec','<u4'), #Transmit frequency (Hz)
185 ('ReceiveFrec','<u4'), #Receive frequency
186 ('FirstOsciFrec','<u4'), #First local oscillator frequency (Hz)
187 ('Polarisation','<u4'), #(0="O", 1="E", 2="linear 1", 3="linear2")
188 ('ReceiverFiltSett','<u4'), #Receiver filter settings (0,1,2,3)
189 ('nModesInUse','<u4'), #Number of modes in use (1 or 2)
190 ('DualModeIndex','<u4'), #Dual Mode index number for these data (0 or 1)
191 ('DualModeRange','<u4'), #Dual Mode range correction for these data (m)
192 ('nDigChannels','<u4'), #Number of digital channels acquired (2*N)
193 ('SampResolution','<u4'), #Sampling resolution (meters)
194 ('nHeights','<u4'), #Number of range gates sampled
195 ('StartRangeSamp','<u4'), #Start range of sampling (meters)
196 ('PRFhz','<u4'), #PRF (Hz)
197 ('nCohInt','<u4'), #Integrations
198 ('nProfiles','<u4'), #Number of data points transformed
199 ('nChannels','<u4'), #Number of receive beams stored in file (1 or N)
200 ('nIncohInt','<u4'), #Number of spectral averages
201 ('FFTwindowingInd','<u4'), #FFT windowing index (0 = no window)
202 ('BeamAngleAzim','<f4'), #Beam steer angle (azimuth) in degrees (clockwise from true North)
203 ('BeamAngleZen','<f4'), #Beam steer angle (zenith) in degrees (0=> vertical)
204 ('AntennaCoord0','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
205 ('AntennaAngl0','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
206 ('AntennaCoord1','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
207 ('AntennaAngl1','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
208 ('AntennaCoord2','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
209 ('AntennaAngl2','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
210 ('RecPhaseCalibr0','<f4'), #Receiver phase calibration (degrees) - N values
211 ('RecPhaseCalibr1','<f4'), #Receiver phase calibration (degrees) - N values
212 ('RecPhaseCalibr2','<f4'), #Receiver phase calibration (degrees) - N values
213 ('RecAmpCalibr0','<f4'), #Receiver amplitude calibration (ratio relative to receiver one) - N values
214 ('RecAmpCalibr1','<f4'), #Receiver amplitude calibration (ratio relative to receiver one) - N values
215 ('RecAmpCalibr2','<f4'), #Receiver amplitude calibration (ratio relative to receiver one) - N values
216 ('ReceiverGaindB0','<i4'), #Receiver gains in dB - N values
217 ('ReceiverGaindB1','<i4'), #Receiver gains in dB - N values
218 ('ReceiverGaindB2','<i4'), #Receiver gains in dB - N values
219 ])
220
221
222 class RecordHeaderBLTR(Header):
223
224 def __init__(self, RecMgcNumber=None, RecCounter= 0, Off2StartNxtRec= 811248,
225 nUtime= 0, nMilisec= 0, ExpTagName= None,
226 ExpComment=None, SiteLatDegrees=0, SiteLongDegrees= 0,
227 RTCgpsStatus= 0, TransmitFrec= 0, ReceiveFrec= 0,
228 FirstOsciFrec= 0, Polarisation= 0, ReceiverFiltSett= 0,
229 nModesInUse= 0, DualModeIndex= 0, DualModeRange= 0,
230 nDigChannels= 0, SampResolution= 0, nHeights= 0,
231 StartRangeSamp= 0, PRFhz= 0, nCohInt= 0,
232 nProfiles= 0, nChannels= 0, nIncohInt= 0,
233 FFTwindowingInd= 0, BeamAngleAzim= 0, BeamAngleZen= 0,
234 AntennaCoord0= 0, AntennaCoord1= 0, AntennaCoord2= 0,
235 RecPhaseCalibr0= 0, RecPhaseCalibr1= 0, RecPhaseCalibr2= 0,
236 RecAmpCalibr0= 0, RecAmpCalibr1= 0, RecAmpCalibr2= 0,
237 AntennaAngl0=0, AntennaAngl1=0, AntennaAngl2=0,
238 ReceiverGaindB0= 0, ReceiverGaindB1= 0, ReceiverGaindB2= 0, Off2StartData=0, OffsetStartHeader=0):
239
240 self.RecMgcNumber = RecMgcNumber #0x23030001
241 self.RecCounter = RecCounter
242 self.Off2StartNxtRec = Off2StartNxtRec
243 self.Off2StartData = Off2StartData
244 self.nUtime = nUtime
245 self.nMilisec = nMilisec
246 self.ExpTagName = ExpTagName
247 self.ExpComment = ExpComment
248 self.SiteLatDegrees = SiteLatDegrees
249 self.SiteLongDegrees = SiteLongDegrees
250 self.RTCgpsStatus = RTCgpsStatus
251 self.TransmitFrec = TransmitFrec
252 self.ReceiveFrec = ReceiveFrec
253 self.FirstOsciFrec = FirstOsciFrec
254 self.Polarisation = Polarisation
255 self.ReceiverFiltSett = ReceiverFiltSett
256 self.nModesInUse = nModesInUse
257 self.DualModeIndex = DualModeIndex
258 self.DualModeRange = DualModeRange
259 self.nDigChannels = nDigChannels
260 self.SampResolution = SampResolution
261 self.nHeights = nHeights
262 self.StartRangeSamp = StartRangeSamp
263 self.PRFhz = PRFhz
264 self.nCohInt = nCohInt
265 self.nProfiles = nProfiles
266 self.nChannels = nChannels
267 self.nIncohInt = nIncohInt
268 self.FFTwindowingInd = FFTwindowingInd
269 self.BeamAngleAzim = BeamAngleAzim
270 self.BeamAngleZen = BeamAngleZen
271 self.AntennaCoord0 = AntennaCoord0
272 self.AntennaAngl0 = AntennaAngl0
273 self.AntennaAngl1 = AntennaAngl1
274 self.AntennaAngl2 = AntennaAngl2
275 self.AntennaCoord1 = AntennaCoord1
276 self.AntennaCoord2 = AntennaCoord2
277 self.RecPhaseCalibr0 = RecPhaseCalibr0
278 self.RecPhaseCalibr1 = RecPhaseCalibr1
279 self.RecPhaseCalibr2 = RecPhaseCalibr2
280 self.RecAmpCalibr0 = RecAmpCalibr0
281 self.RecAmpCalibr1 = RecAmpCalibr1
282 self.RecAmpCalibr2 = RecAmpCalibr2
283 self.ReceiverGaindB0 = ReceiverGaindB0
284 self.ReceiverGaindB1 = ReceiverGaindB1
285 self.ReceiverGaindB2 = ReceiverGaindB2
286 self.OffsetStartHeader = 48
287
288
289
290 def RHread(self, fp):
291 #print fp
292 #startFp = open('/home/erick/Documents/Data/huancayo.20161019.22.fdt',"rb") #The method tell() returns the current position of the file read/write pointer within the file.
293 startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file.
294 #RecCounter=0
295 #Off2StartNxtRec=811248
296 OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
297 print ' '
298 print 'puntero Record Header', startFp.tell()
299 print ' '
300
301
302 startFp.seek(OffRHeader, os.SEEK_SET)
303
304 print ' '
305 print 'puntero Record Header con seek', startFp.tell()
306 print ' '
307
308 #print 'Posicion del bloque: ',OffRHeader
309
310 header = numpy.fromfile(startFp,RECORD_STRUCTURE,1)
311
312 print ' '
313 print 'puntero Record Header con seek', startFp.tell()
314 print ' '
315
316 print ' '
317 #
318 #print 'puntero Record Header despues de seek', header.tell()
319 print ' '
320
321 self.RecMgcNumber = hex(header['RecMgcNumber'][0]) #0x23030001
322 self.RecCounter = int(header['RecCounter'][0])
323 self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0])
324 self.Off2StartData = int(header['Off2StartData'][0])
325 self.nUtime = header['nUtime'][0]
326 self.nMilisec = header['nMilisec'][0]
327 self.ExpTagName = str(header['ExpTagName'][0])
328 self.ExpComment = str(header['ExpComment'][0])
329 self.SiteLatDegrees = header['SiteLatDegrees'][0]
330 self.SiteLongDegrees = header['SiteLongDegrees'][0]
331 self.RTCgpsStatus = header['RTCgpsStatus'][0]
332 self.TransmitFrec = header['TransmitFrec'][0]
333 self.ReceiveFrec = header['ReceiveFrec'][0]
334 self.FirstOsciFrec = header['FirstOsciFrec'][0]
335 self.Polarisation = header['Polarisation'][0]
336 self.ReceiverFiltSett = header['ReceiverFiltSett'][0]
337 self.nModesInUse = header['nModesInUse'][0]
338 self.DualModeIndex = header['DualModeIndex'][0]
339 self.DualModeRange = header['DualModeRange'][0]
340 self.nDigChannels = header['nDigChannels'][0]
341 self.SampResolution = header['SampResolution'][0]
342 self.nHeights = header['nHeights'][0]
343 self.StartRangeSamp = header['StartRangeSamp'][0]
344 self.PRFhz = header['PRFhz'][0]
345 self.nCohInt = header['nCohInt'][0]
346 self.nProfiles = header['nProfiles'][0]
347 self.nChannels = header['nChannels'][0]
348 self.nIncohInt = header['nIncohInt'][0]
349 self.FFTwindowingInd = header['FFTwindowingInd'][0]
350 self.BeamAngleAzim = header['BeamAngleAzim'][0]
351 self.BeamAngleZen = header['BeamAngleZen'][0]
352 self.AntennaCoord0 = header['AntennaCoord0'][0]
353 self.AntennaAngl0 = header['AntennaAngl0'][0]
354 self.AntennaCoord1 = header['AntennaCoord1'][0]
355 self.AntennaAngl1 = header['AntennaAngl1'][0]
356 self.AntennaCoord2 = header['AntennaCoord2'][0]
357 self.AntennaAngl2 = header['AntennaAngl2'][0]
358 self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0]
359 self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0]
360 self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0]
361 self.RecAmpCalibr0 = header['RecAmpCalibr0'][0]
362 self.RecAmpCalibr1 = header['RecAmpCalibr1'][0]
363 self.RecAmpCalibr2 = header['RecAmpCalibr2'][0]
364 self.ReceiverGaindB0 = header['ReceiverGaindB0'][0]
365 self.ReceiverGaindB1 = header['ReceiverGaindB1'][0]
366 self.ReceiverGaindB2 = header['ReceiverGaindB2'][0]
367
368 self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz)
369
370 self.RHsize = 180+20*self.nChannels
371 self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4
372 #print 'Datasize',self.Datasize
373 endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
374
375 print '=============================================='
376 print 'RecMgcNumber ',self.RecMgcNumber
377 print 'RecCounter ',self.RecCounter
378 print 'Off2StartNxtRec ',self.Off2StartNxtRec
379 print 'Off2StartData ',self.Off2StartData
380 print 'Range Resolution ',self.SampResolution
381 print 'First Height ',self.StartRangeSamp
382 print 'PRF (Hz) ',self.PRFhz
383 print 'Heights (K) ',self.nHeights
384 print 'Channels (N) ',self.nChannels
385 print 'Profiles (J) ',self.nProfiles
386 print 'iCoh ',self.nCohInt
387 print 'iInCoh ',self.nIncohInt
388 print 'BeamAngleAzim ',self.BeamAngleAzim
389 print 'BeamAngleZen ',self.BeamAngleZen
390
391 #print 'ModoEnUso ',self.DualModeIndex
392 #print 'UtcTime ',self.nUtime
393 #print 'MiliSec ',self.nMilisec
394 #print 'Exp TagName ',self.ExpTagName
395 #print 'Exp Comment ',self.ExpComment
396 #print 'FFT Window Index ',self.FFTwindowingInd
397 #print 'N Dig. Channels ',self.nDigChannels
398 print 'Size de bloque ',self.RHsize
399 print 'DataSize ',self.Datasize
400 print 'BeamAngleAzim ',self.BeamAngleAzim
401 #print 'AntennaCoord0 ',self.AntennaCoord0
402 #print 'AntennaAngl0 ',self.AntennaAngl0
403 #print 'AntennaCoord1 ',self.AntennaCoord1
404 #print 'AntennaAngl1 ',self.AntennaAngl1
405 #print 'AntennaCoord2 ',self.AntennaCoord2
406 #print 'AntennaAngl2 ',self.AntennaAngl2
407 print 'RecPhaseCalibr0 ',self.RecPhaseCalibr0
408 print 'RecPhaseCalibr1 ',self.RecPhaseCalibr1
409 print 'RecPhaseCalibr2 ',self.RecPhaseCalibr2
410 print 'RecAmpCalibr0 ',self.RecAmpCalibr0
411 print 'RecAmpCalibr1 ',self.RecAmpCalibr1
412 print 'RecAmpCalibr2 ',self.RecAmpCalibr2
413 print 'ReceiverGaindB0 ',self.ReceiverGaindB0
414 print 'ReceiverGaindB1 ',self.ReceiverGaindB1
415 print 'ReceiverGaindB2 ',self.ReceiverGaindB2
416 print '=============================================='
417
418 if OffRHeader > endFp:
419 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp)
420 return 0
421
422 if OffRHeader < endFp:
423 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp)
424 return 0
425
426 return 1
427
428
429 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODataReader):
430
431 path = None
432 startDate = None
433 endDate = None
434 startTime = None
435 endTime = None
436 walk = None
437 isConfig = False
438
439
440 fileList= None
441
442 #metadata
443 TimeZone= None
444 Interval= None
445 heightList= None
446
447 #data
448 data= None
449 utctime= None
450
451
452
453 def __init__(self, **kwargs):
454
455 #Eliminar de la base la herencia
456 ProcessingUnit.__init__(self, **kwargs)
457
458 #self.isConfig = False
459
460 #self.pts2read_SelfSpectra = 0
461 #self.pts2read_CrossSpectra = 0
462 #self.pts2read_DCchannels = 0
463 #self.datablock = None
464 self.utc = None
465 self.ext = ".fdt"
466 self.optchar = "P"
467 self.fpFile=None
468 self.fp = None
469 self.BlockCounter=0
470 self.dtype = None
471 self.fileSizeByHeader = None
472 self.filenameList = []
473 self.fileSelector = 0
474 self.Off2StartNxtRec=0
475 self.RecCounter=0
476 self.flagNoMoreFiles = 0
477 self.data_spc=None
478 self.data_cspc=None
479 self.data_output=None
480 self.path = None
481 self.OffsetStartHeader=0
482 self.Off2StartData=0
483 self.ipp = 0
484 self.nFDTdataRecors=0
485 self.blocksize = 0
486 self.dataOut = Spectra()
487 self.profileIndex = 1 #Always
488 self.dataOut.flagNoData=False
489 self.dataOut.nRdPairs = 0
490 self.dataOut.pairsList = []
491 self.dataOut.data_spc=None
492 self.dataOut.noise=[]
493 self.dataOut.velocityX=[]
494 self.dataOut.velocityY=[]
495 self.dataOut.velocityV=[]
496
497
498
499 def Files2Read(self, fp):
500 '''
501 Function that indicates the number of .fdt files that exist in the folder to be read.
502 It also creates an organized list with the names of the files to read.
503 '''
504 #self.__checkPath()
505
506 ListaData=os.listdir(fp) #Gets the list of files within the fp address
507 ListaData=sorted(ListaData) #Sort the list of files from least to largest by names
508 nFiles=0 #File Counter
509 FileList=[] #A list is created that will contain the .fdt files
510 for IndexFile in ListaData :
511 if '.fdt' in IndexFile:
512 FileList.append(IndexFile)
513 nFiles+=1
514
515 #print 'Files2Read'
516 #print 'Existen '+str(nFiles)+' archivos .fdt'
517
518 self.filenameList=FileList #List of files from least to largest by names
519
520
521 def run(self, **kwargs):
522 '''
523 This method will be the one that will initiate the data entry, will be called constantly.
524 You should first verify that your Setup () is set up and then continue to acquire
525 the data to be processed with getData ().
526 '''
527 if not self.isConfig:
528 self.setup(**kwargs)
529 self.isConfig = True
530
531 self.getData()
532 #print 'running'
533
534
535 def setup(self, path=None,
536 startDate=None,
537 endDate=None,
538 startTime=None,
539 endTime=None,
540 walk=True,
541 timezone='utc',
542 code = None,
543 online=False,
544 ReadMode=None,
545 **kwargs):
546
547 self.isConfig = True
548
549 self.path=path
550 self.startDate=startDate
551 self.endDate=endDate
552 self.startTime=startTime
553 self.endTime=endTime
554 self.walk=walk
555 self.ReadMode=int(ReadMode)
556
557 pass
558
559
560 def getData(self):
561 '''
562 Before starting this function, you should check that there is still an unread file,
563 If there are still blocks to read or if the data block is empty.
564
565 You should call the file "read".
566
567 '''
568
569 if self.flagNoMoreFiles:
570 self.dataOut.flagNoData = True
571 print 'NoData se vuelve true'
572 return 0
573
574 self.fp=self.path
575 self.Files2Read(self.fp)
576 self.readFile(self.fp)
577 self.dataOut.data_spc = self.data_spc
578 self.dataOut.data_cspc =self.data_cspc
579 self.dataOut.data_output=self.data_output
580
581 print 'self.dataOut.data_output', shape(self.dataOut.data_output)
582
583 #self.removeDC()
584 return self.dataOut.data_spc
585
586
587 def readFile(self,fp):
588 '''
589 You must indicate if you are reading in Online or Offline mode and load the
590 The parameters for this file reading mode.
591
592 Then you must do 2 actions:
593
594 1. Get the BLTR FileHeader.
595 2. Start reading the first block.
596 '''
597
598 #The address of the folder is generated the name of the .fdt file that will be read
599 print "File: ",self.fileSelector+1
600
601 if self.fileSelector < len(self.filenameList):
602
603 self.fpFile=str(fp)+'/'+str(self.filenameList[self.fileSelector])
604 #print self.fpFile
605 fheader = FileHeaderBLTR()
606 fheader.FHread(self.fpFile) #Bltr FileHeader Reading
607 self.nFDTdataRecors=fheader.nFDTdataRecors
608
609 self.readBlock() #Block reading
610 else:
611 print 'readFile FlagNoData becomes true'
612 self.flagNoMoreFiles=True
613 self.dataOut.flagNoData = True
614 return 0
615
616 def getVelRange(self, extrapoints=0):
617 Lambda= SPEED_OF_LIGHT/50000000
618 PRF = self.dataOut.PRF#1./(self.dataOut.ippSeconds * self.dataOut.nCohInt)
619 Vmax=-Lambda/(4.*(1./PRF)*self.dataOut.nCohInt*2.)
620 deltafreq = PRF / (self.nProfiles)
621 deltavel = (Vmax*2) / (self.nProfiles)
622 freqrange = deltafreq*(numpy.arange(self.nProfiles)-self.nProfiles/2.) - deltafreq/2
623 velrange = deltavel*(numpy.arange(self.nProfiles)-self.nProfiles/2.)
624 return velrange
625
626 def readBlock(self):
627 '''
628 It should be checked if the block has data, if it is not passed to the next file.
629
630 Then the following is done:
631
632 1. Read the RecordHeader
633 2. Fill the buffer with the current block number.
634
635 '''
636
637 if self.BlockCounter < self.nFDTdataRecors-2:
638 print self.nFDTdataRecors, 'CONDICION!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
639 if self.ReadMode==1:
640 rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter+1)
641 elif self.ReadMode==0:
642 rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter)
643
644 rheader.RHread(self.fpFile) #Bltr FileHeader Reading
645
646 self.OffsetStartHeader=rheader.OffsetStartHeader
647 self.RecCounter=rheader.RecCounter
648 self.Off2StartNxtRec=rheader.Off2StartNxtRec
649 self.Off2StartData=rheader.Off2StartData
650 self.nProfiles=rheader.nProfiles
651 self.nChannels=rheader.nChannels
652 self.nHeights=rheader.nHeights
653 self.frequency=rheader.TransmitFrec
654 self.DualModeIndex=rheader.DualModeIndex
655
656 self.pairsList =[(0,1),(0,2),(1,2)]
657 self.dataOut.pairsList = self.pairsList
658
659 self.nRdPairs=len(self.dataOut.pairsList)
660 self.dataOut.nRdPairs = self.nRdPairs
661
662 self.__firstHeigth=rheader.StartRangeSamp
663 self.__deltaHeigth=rheader.SampResolution
664 self.dataOut.heightList= self.__firstHeigth + numpy.array(range(self.nHeights))*self.__deltaHeigth
665 self.dataOut.channelList = range(self.nChannels)
666 self.dataOut.nProfiles=rheader.nProfiles
667 self.dataOut.nIncohInt=rheader.nIncohInt
668 self.dataOut.nCohInt=rheader.nCohInt
669 self.dataOut.ippSeconds= 1/float(rheader.PRFhz)
670 self.dataOut.PRF=rheader.PRFhz
671 self.dataOut.nFFTPoints=rheader.nProfiles
672 self.dataOut.utctime=rheader.nUtime
673 self.dataOut.timeZone=0
674 self.dataOut.normFactor= self.dataOut.nProfiles*self.dataOut.nIncohInt*self.dataOut.nCohInt
675 self.dataOut.outputInterval= self.dataOut.ippSeconds * self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles
676
677 self.data_output=numpy.ones([3,rheader.nHeights])*numpy.NaN
678 print 'self.data_output', shape(self.data_output)
679 self.dataOut.velocityX=[]
680 self.dataOut.velocityY=[]
681 self.dataOut.velocityV=[]
682
683 '''Block Reading, the Block Data is received and Reshape is used to give it
684 shape.
685 '''
686
687 #Procedure to take the pointer to where the date block starts
688 startDATA = open(self.fpFile,"rb")
689 OffDATA= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec+self.Off2StartData
690 startDATA.seek(OffDATA, os.SEEK_SET)
691
692 def moving_average(x, N=2):
693 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
694
695 def gaus(xSamples,a,x0,sigma):
696 return a*exp(-(xSamples-x0)**2/(2*sigma**2))
697
698 def Find(x,value):
699 for index in range(len(x)):
700 if x[index]==value:
701 return index
702
703 def pol2cart(rho, phi):
704 x = rho * numpy.cos(phi)
705 y = rho * numpy.sin(phi)
706 return(x, y)
707
708
709
710
711 if self.DualModeIndex==self.ReadMode:
712
713 self.data_fft = numpy.fromfile( startDATA, [('complex','<c8')],self.nProfiles*self.nChannels*self.nHeights )
714
715 self.data_fft=self.data_fft.astype(numpy.dtype('complex'))
716
717 self.data_block=numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles ))
718
719 self.data_block = numpy.transpose(self.data_block, (1,2,0))
720
721 copy = self.data_block.copy()
722 spc = copy * numpy.conjugate(copy)
723
724 self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud
725
726 factor = self.dataOut.normFactor
727
728
729 z = self.data_spc.copy()#/factor
730 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
731 #zdB = 10*numpy.log10(z)
732 print ' '
733 print 'Z: '
734 print shape(z)
735 print ' '
736 print ' '
737
738 self.dataOut.data_spc=self.data_spc
739
740 self.noise = self.dataOut.getNoise(ymin_index=80, ymax_index=132)#/factor
741 #noisedB = 10*numpy.log10(self.noise)
742
743
744 ySamples=numpy.ones([3,self.nProfiles])
745 phase=numpy.ones([3,self.nProfiles])
746 CSPCSamples=numpy.ones([3,self.nProfiles],dtype=numpy.complex_)
747 coherence=numpy.ones([3,self.nProfiles])
748 PhaseSlope=numpy.ones(3)
749 PhaseInter=numpy.ones(3)
750
751 '''****** Getting CrossSpectra ******'''
752 cspc=self.data_block.copy()
753 self.data_cspc=self.data_block.copy()
754
755 xFrec=self.getVelRange(1)
756 VelRange=self.getVelRange(1)
757 self.dataOut.VelRange=VelRange
758 #print ' '
759 #print ' '
760 #print 'xFrec',xFrec
761 #print ' '
762 #print ' '
763 #Height=35
764 for i in range(self.nRdPairs):
765
766 chan_index0 = self.dataOut.pairsList[i][0]
767 chan_index1 = self.dataOut.pairsList[i][1]
768
769 self.data_cspc[i,:,:]=cspc[chan_index0,:,:] * numpy.conjugate(cspc[chan_index1,:,:])
770
771
772 '''Getting Eij and Nij'''
773 (AntennaX0,AntennaY0)=pol2cart(rheader.AntennaCoord0, rheader.AntennaAngl0*numpy.pi/180)
774 (AntennaX1,AntennaY1)=pol2cart(rheader.AntennaCoord1, rheader.AntennaAngl1*numpy.pi/180)
775 (AntennaX2,AntennaY2)=pol2cart(rheader.AntennaCoord2, rheader.AntennaAngl2*numpy.pi/180)
776
777 E01=AntennaX0-AntennaX1
778 N01=AntennaY0-AntennaY1
779
780 E02=AntennaX0-AntennaX2
781 N02=AntennaY0-AntennaY2
782
783 E12=AntennaX1-AntennaX2
784 N12=AntennaY1-AntennaY2
785
786 self.ChanDist= numpy.array([[E01, N01],[E02,N02],[E12,N12]])
787
788 self.dataOut.ChanDist = self.ChanDist
789
790
791 # for Height in range(self.nHeights):
792 #
793 # for i in range(self.nRdPairs):
794 #
795 # '''****** Line of Data SPC ******'''
796 # zline=z[i,:,Height]
797 #
798 # '''****** DC is removed ******'''
799 # DC=Find(zline,numpy.amax(zline))
800 # zline[DC]=(zline[DC-1]+zline[DC+1])/2
801 #
802 #
803 # '''****** SPC is normalized ******'''
804 # FactNorm= zline.copy() / numpy.sum(zline.copy())
805 # FactNorm= FactNorm/numpy.sum(FactNorm)
806 #
807 # SmoothSPC=moving_average(FactNorm,N=3)
808 #
809 # xSamples = ar(range(len(SmoothSPC)))
810 # ySamples[i] = SmoothSPC-self.noise[i]
811 #
812 # for i in range(self.nRdPairs):
813 #
814 # '''****** Line of Data CSPC ******'''
815 # cspcLine=self.data_cspc[i,:,Height].copy()
816 #
817 #
818 #
819 # '''****** CSPC is normalized ******'''
820 # chan_index0 = self.dataOut.pairsList[i][0]
821 # chan_index1 = self.dataOut.pairsList[i][1]
822 # CSPCFactor= numpy.sum(ySamples[chan_index0]) * numpy.sum(ySamples[chan_index1])
823 #
824 #
825 # CSPCNorm= cspcLine.copy() / numpy.sqrt(CSPCFactor)
826 #
827 #
828 # CSPCSamples[i] = CSPCNorm-self.noise[i]
829 # coherence[i] = numpy.abs(CSPCSamples[i]) / numpy.sqrt(CSPCFactor)
830 #
831 # '''****** DC is removed ******'''
832 # DC=Find(coherence[i],numpy.amax(coherence[i]))
833 # coherence[i][DC]=(coherence[i][DC-1]+coherence[i][DC+1])/2
834 # coherence[i]= moving_average(coherence[i],N=2)
835 #
836 # phase[i] = moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi
837 #
838 #
839 # '''****** Getting fij width ******'''
840 #
841 # yMean=[]
842 # yMean2=[]
843 #
844 # for j in range(len(ySamples[1])):
845 # yMean=numpy.append(yMean,numpy.average([ySamples[0,j],ySamples[1,j],ySamples[2,j]]))
846 #
847 # '''******* Getting fitting Gaussian ******'''
848 # meanGauss=sum(xSamples*yMean) / len(xSamples)
849 # sigma=sum(yMean*(xSamples-meanGauss)**2) / len(xSamples)
850 # #print 'Height',Height,'SNR', meanGauss/sigma**2
851 #
852 # if (abs(meanGauss/sigma**2) > 0.0001) :
853 #
854 # try:
855 # popt,pcov = curve_fit(gaus,xSamples,yMean,p0=[1,meanGauss,sigma])
856 #
857 # if numpy.amax(popt)>numpy.amax(yMean)*0.3:
858 # FitGauss=gaus(xSamples,*popt)
859 #
860 # else:
861 # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
862 # print 'Verificador: Dentro', Height
863 # except RuntimeError:
864 #
865 # try:
866 # for j in range(len(ySamples[1])):
867 # yMean2=numpy.append(yMean2,numpy.average([ySamples[1,j],ySamples[2,j]]))
868 # popt,pcov = curve_fit(gaus,xSamples,yMean2,p0=[1,meanGauss,sigma])
869 # FitGauss=gaus(xSamples,*popt)
870 # print 'Verificador: Exepcion1', Height
871 # except RuntimeError:
872 #
873 # try:
874 # popt,pcov = curve_fit(gaus,xSamples,ySamples[1],p0=[1,meanGauss,sigma])
875 # FitGauss=gaus(xSamples,*popt)
876 # print 'Verificador: Exepcion2', Height
877 # except RuntimeError:
878 # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
879 # print 'Verificador: Exepcion3', Height
880 # else:
881 # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
882 # #print 'Verificador: Fuera', Height
883 #
884 #
885 #
886 # Maximun=numpy.amax(yMean)
887 # eMinus1=Maximun*numpy.exp(-1)
888 #
889 # HWpos=Find(FitGauss,min(FitGauss, key=lambda value:abs(value-eMinus1)))
890 # HalfWidth= xFrec[HWpos]
891 # GCpos=Find(FitGauss, numpy.amax(FitGauss))
892 # Vpos=Find(FactNorm, numpy.amax(FactNorm))
893 # #Vpos=numpy.sum(FactNorm)/len(FactNorm)
894 # #Vpos=Find(FactNorm, min(FactNorm, key=lambda value:abs(value- numpy.mean(FactNorm) )))
895 # #print 'GCpos',GCpos, numpy.amax(FitGauss), 'HWpos',HWpos
896 # '''****** Getting Fij ******'''
897 #
898 # GaussCenter=xFrec[GCpos]
899 # if (GaussCenter<0 and HalfWidth>0) or (GaussCenter>0 and HalfWidth<0):
900 # Fij=abs(GaussCenter)+abs(HalfWidth)+0.0000001
901 # else:
902 # Fij=abs(GaussCenter-HalfWidth)+0.0000001
903 #
904 # '''****** Getting Frecuency range of significant data ******'''
905 #
906 # Rangpos=Find(FitGauss,min(FitGauss, key=lambda value:abs(value-Maximun*0.10)))
907 #
908 # if Rangpos<GCpos:
909 # Range=numpy.array([Rangpos,2*GCpos-Rangpos])
910 # else:
911 # Range=numpy.array([2*GCpos-Rangpos,Rangpos])
912 #
913 # FrecRange=xFrec[Range[0]:Range[1]]
914 #
915 # #print 'FrecRange', FrecRange
916 # '''****** Getting SCPC Slope ******'''
917 #
918 # for i in range(self.nRdPairs):
919 #
920 # if len(FrecRange)>5 and len(FrecRange)<self.nProfiles*0.5:
921 # PhaseRange=moving_average(phase[i,Range[0]:Range[1]],N=3)
922 #
923 # slope, intercept, r_value, p_value, std_err = stats.linregress(FrecRange,PhaseRange)
924 # PhaseSlope[i]=slope
925 # PhaseInter[i]=intercept
926 # else:
927 # PhaseSlope[i]=0
928 # PhaseInter[i]=0
929 #
930 # # plt.figure(i+15)
931 # # plt.title('FASE ( CH%s*CH%s )' %(self.dataOut.pairsList[i][0],self.dataOut.pairsList[i][1]))
932 # # plt.xlabel('Frecuencia (KHz)')
933 # # plt.ylabel('Magnitud')
934 # # #plt.subplot(311+i)
935 # # plt.plot(FrecRange,PhaseRange,'b')
936 # # plt.plot(FrecRange,FrecRange*PhaseSlope[i]+PhaseInter[i],'r')
937 #
938 # #plt.axis([-0.6, 0.2, -3.2, 3.2])
939 #
940 #
941 # '''Getting constant C'''
942 # cC=(Fij*numpy.pi)**2
943 #
944 # # '''Getting Eij and Nij'''
945 # # (AntennaX0,AntennaY0)=pol2cart(rheader.AntennaCoord0, rheader.AntennaAngl0*numpy.pi/180)
946 # # (AntennaX1,AntennaY1)=pol2cart(rheader.AntennaCoord1, rheader.AntennaAngl1*numpy.pi/180)
947 # # (AntennaX2,AntennaY2)=pol2cart(rheader.AntennaCoord2, rheader.AntennaAngl2*numpy.pi/180)
948 # #
949 # # E01=AntennaX0-AntennaX1
950 # # N01=AntennaY0-AntennaY1
951 # #
952 # # E02=AntennaX0-AntennaX2
953 # # N02=AntennaY0-AntennaY2
954 # #
955 # # E12=AntennaX1-AntennaX2
956 # # N12=AntennaY1-AntennaY2
957 #
958 # '''****** Getting constants F and G ******'''
959 # MijEijNij=numpy.array([[E02,N02], [E12,N12]])
960 # MijResult0=(-PhaseSlope[1]*cC) / (2*numpy.pi)
961 # MijResult1=(-PhaseSlope[2]*cC) / (2*numpy.pi)
962 # MijResults=numpy.array([MijResult0,MijResult1])
963 # (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
964 #
965 # '''****** Getting constants A, B and H ******'''
966 # W01=numpy.amax(coherence[0])
967 # W02=numpy.amax(coherence[1])
968 # W12=numpy.amax(coherence[2])
969 #
970 # WijResult0=((cF*E01+cG*N01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi/cC))
971 # WijResult1=((cF*E02+cG*N02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi/cC))
972 # WijResult2=((cF*E12+cG*N12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi/cC))
973 #
974 # WijResults=numpy.array([WijResult0, WijResult1, WijResult2])
975 #
976 # WijEijNij=numpy.array([ [E01**2, N01**2, 2*E01*N01] , [E02**2, N02**2, 2*E02*N02] , [E12**2, N12**2, 2*E12*N12] ])
977 # (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
978 #
979 # VxVy=numpy.array([[cA,cH],[cH,cB]])
980 #
981 # VxVyResults=numpy.array([-cF,-cG])
982 # (Vx,Vy) = numpy.linalg.solve(VxVy, VxVyResults)
983 # Vzon = Vy
984 # Vmer = Vx
985 # Vmag=numpy.sqrt(Vzon**2+Vmer**2)
986 # Vang=numpy.arctan2(Vmer,Vzon)
987 #
988 # if abs(Vy)<100 and abs(Vy)> 0.:
989 # self.dataOut.velocityX=numpy.append(self.dataOut.velocityX, Vzon) #Vmag
990 # #print 'Vmag',Vmag
991 # else:
992 # self.dataOut.velocityX=numpy.append(self.dataOut.velocityX, NaN)
993 #
994 # if abs(Vx)<100 and abs(Vx) > 0.:
995 # self.dataOut.velocityY=numpy.append(self.dataOut.velocityY, Vmer) #Vang
996 # #print 'Vang',Vang
997 # else:
998 # self.dataOut.velocityY=numpy.append(self.dataOut.velocityY, NaN)
999 #
1000 # if abs(GaussCenter)<2:
1001 # self.dataOut.velocityV=numpy.append(self.dataOut.velocityV, xFrec[Vpos])
1002 #
1003 # else:
1004 # self.dataOut.velocityV=numpy.append(self.dataOut.velocityV, NaN)
1005 #
1006 #
1007 # # print '********************************************'
1008 # # print 'HalfWidth ', HalfWidth
1009 # # print 'Maximun ', Maximun
1010 # # print 'eMinus1 ', eMinus1
1011 # # print 'Rangpos ', Rangpos
1012 # # print 'GaussCenter ',GaussCenter
1013 # # print 'E01 ',E01
1014 # # print 'N01 ',N01
1015 # # print 'E02 ',E02
1016 # # print 'N02 ',N02
1017 # # print 'E12 ',E12
1018 # # print 'N12 ',N12
1019 # #print 'self.dataOut.velocityX ', self.dataOut.velocityX
1020 # # print 'Fij ', Fij
1021 # # print 'cC ', cC
1022 # # print 'cF ', cF
1023 # # print 'cG ', cG
1024 # # print 'cA ', cA
1025 # # print 'cB ', cB
1026 # # print 'cH ', cH
1027 # # print 'Vx ', Vx
1028 # # print 'Vy ', Vy
1029 # # print 'Vmag ', Vmag
1030 # # print 'Vang ', Vang*180/numpy.pi
1031 # # print 'PhaseSlope ',PhaseSlope[0]
1032 # # print 'PhaseSlope ',PhaseSlope[1]
1033 # # print 'PhaseSlope ',PhaseSlope[2]
1034 # # print '********************************************'
1035 # #print 'data_output',shape(self.dataOut.velocityX), shape(self.dataOut.velocityY)
1036 #
1037 # #print 'self.dataOut.velocityX', len(self.dataOut.velocityX)
1038 # #print 'self.dataOut.velocityY', len(self.dataOut.velocityY)
1039 # #print 'self.dataOut.velocityV', self.dataOut.velocityV
1040 #
1041 # self.data_output[0]=numpy.array(self.dataOut.velocityX)
1042 # self.data_output[1]=numpy.array(self.dataOut.velocityY)
1043 # self.data_output[2]=numpy.array(self.dataOut.velocityV)
1044 #
1045 # prin= self.data_output[0][~numpy.isnan(self.data_output[0])]
1046 # print ' '
1047 # print 'VmagAverage',numpy.mean(prin)
1048 # print ' '
1049 # # plt.figure(5)
1050 # # plt.subplot(211)
1051 # # plt.plot(self.dataOut.velocityX,'yo:')
1052 # # plt.subplot(212)
1053 # # plt.plot(self.dataOut.velocityY,'yo:')
1054 #
1055 # # plt.figure(1)
1056 # # # plt.subplot(121)
1057 # # # plt.plot(xFrec,ySamples[0],'k',label='Ch0')
1058 # # # plt.plot(xFrec,ySamples[1],'g',label='Ch1')
1059 # # # plt.plot(xFrec,ySamples[2],'r',label='Ch2')
1060 # # # plt.plot(xFrec,FitGauss,'yo:',label='fit')
1061 # # # plt.legend()
1062 # # plt.title('DATOS A ALTURA DE 2850 METROS')
1063 # #
1064 # # plt.xlabel('Frecuencia (KHz)')
1065 # # plt.ylabel('Magnitud')
1066 # # # plt.subplot(122)
1067 # # # plt.title('Fit for Time Constant')
1068 # # #plt.plot(xFrec,zline)
1069 # # #plt.plot(xFrec,SmoothSPC,'g')
1070 # # plt.plot(xFrec,FactNorm)
1071 # # plt.axis([-4, 4, 0, 0.15])
1072 # # # plt.xlabel('SelfSpectra KHz')
1073 # #
1074 # # plt.figure(10)
1075 # # # plt.subplot(121)
1076 # # plt.plot(xFrec,ySamples[0],'b',label='Ch0')
1077 # # plt.plot(xFrec,ySamples[1],'y',label='Ch1')
1078 # # plt.plot(xFrec,ySamples[2],'r',label='Ch2')
1079 # # # plt.plot(xFrec,FitGauss,'yo:',label='fit')
1080 # # plt.legend()
1081 # # plt.title('SELFSPECTRA EN CANALES')
1082 # #
1083 # # plt.xlabel('Frecuencia (KHz)')
1084 # # plt.ylabel('Magnitud')
1085 # # # plt.subplot(122)
1086 # # # plt.title('Fit for Time Constant')
1087 # # #plt.plot(xFrec,zline)
1088 # # #plt.plot(xFrec,SmoothSPC,'g')
1089 # # # plt.plot(xFrec,FactNorm)
1090 # # # plt.axis([-4, 4, 0, 0.15])
1091 # # # plt.xlabel('SelfSpectra KHz')
1092 # #
1093 # # plt.figure(9)
1094 # #
1095 # #
1096 # # plt.title('DATOS SUAVIZADOS')
1097 # # plt.xlabel('Frecuencia (KHz)')
1098 # # plt.ylabel('Magnitud')
1099 # # plt.plot(xFrec,SmoothSPC,'g')
1100 # #
1101 # # #plt.plot(xFrec,FactNorm)
1102 # # plt.axis([-4, 4, 0, 0.15])
1103 # # # plt.xlabel('SelfSpectra KHz')
1104 # # #
1105 # # plt.figure(2)
1106 # # # #plt.subplot(121)
1107 # # plt.plot(xFrec,yMean,'r',label='Mean SelfSpectra')
1108 # # plt.plot(xFrec,FitGauss,'yo:',label='Ajuste Gaussiano')
1109 # # # plt.plot(xFrec[Rangpos],FitGauss[Find(FitGauss,min(FitGauss, key=lambda value:abs(value-Maximun*0.1)))],'bo')
1110 # # # #plt.plot(xFrec,phase)
1111 # # # plt.xlabel('Suavizado, promediado KHz')
1112 # # plt.title('SELFSPECTRA PROMEDIADO')
1113 # # # #plt.subplot(122)
1114 # # # #plt.plot(xSamples,zline)
1115 # # plt.xlabel('Frecuencia (KHz)')
1116 # # plt.ylabel('Magnitud')
1117 # # plt.legend()
1118 # # #
1119 # # # plt.figure(3)
1120 # # # plt.subplot(311)
1121 # # # #plt.plot(xFrec,phase[0])
1122 # # # plt.plot(xFrec,phase[0],'g')
1123 # # # plt.subplot(312)
1124 # # # plt.plot(xFrec,phase[1],'g')
1125 # # # plt.subplot(313)
1126 # # # plt.plot(xFrec,phase[2],'g')
1127 # # # #plt.plot(xFrec,phase[2])
1128 # # #
1129 # # # plt.figure(4)
1130 # # #
1131 # # # plt.plot(xSamples,coherence[0],'b')
1132 # # # plt.plot(xSamples,coherence[1],'r')
1133 # # # plt.plot(xSamples,coherence[2],'g')
1134 # # plt.show()
1135 # # #
1136 # # # plt.clf()
1137 # # # plt.cla()
1138 # # # plt.close()
1139 #
1140 # print ' '
1141
1142
1143
1144 self.BlockCounter+=2
1145
1146 else:
1147 self.fileSelector+=1
1148 self.BlockCounter=0
1149 print "Next File"
1150
1151
1152
1153
1154
This diff has been collapsed as it changes many lines, (632 lines changed) Show them Hide them
@@ -0,0 +1,632
1 '''
2 Created on Aug 1, 2017
3
4 @author: Juan C. Espinoza
5 '''
6
7 import os
8 import sys
9 import time
10 import json
11 import glob
12 import datetime
13
14 import numpy
15 import h5py
16
17 from schainpy.model.io.jroIO_base import JRODataReader
18 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
19 from schainpy.model.data.jrodata import Parameters
20 from schainpy.utils import log
21
22 try:
23 import madrigal.cedar
24 except:
25 log.warning(
26 'You should install "madrigal library" module if you want to read/write Madrigal data'
27 )
28
29 DEF_CATALOG = {
30 'principleInvestigator': 'Marco Milla',
31 'expPurpose': None,
32 'cycleTime': None,
33 'correlativeExp': None,
34 'sciRemarks': None,
35 'instRemarks': None
36 }
37 DEF_HEADER = {
38 'kindatDesc': None,
39 'analyst': 'Jicamarca User',
40 'comments': None,
41 'history': None
42 }
43 MNEMONICS = {
44 10: 'jro',
45 11: 'jbr',
46 840: 'jul',
47 13: 'jas',
48 1000: 'pbr',
49 1001: 'hbr',
50 1002: 'obr',
51 }
52
53 UT1970 = datetime.datetime(1970, 1, 1) - datetime.timedelta(seconds=time.timezone)
54
55 def load_json(obj):
56 '''
57 Parse json as string instead of unicode
58 '''
59
60 if isinstance(obj, str):
61 iterable = json.loads(obj)
62 else:
63 iterable = obj
64
65 if isinstance(iterable, dict):
66 return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, unicode) else v
67 for k, v in iterable.items()}
68 elif isinstance(iterable, (list, tuple)):
69 return [str(v) if isinstance(v, unicode) else v for v in iterable]
70
71 return iterable
72
73
74 class MADReader(JRODataReader, ProcessingUnit):
75
76 def __init__(self, **kwargs):
77
78 ProcessingUnit.__init__(self, **kwargs)
79
80 self.dataOut = Parameters()
81 self.counter_records = 0
82 self.nrecords = None
83 self.flagNoMoreFiles = 0
84 self.isConfig = False
85 self.filename = None
86 self.intervals = set()
87
88 def setup(self,
89 path=None,
90 startDate=None,
91 endDate=None,
92 format=None,
93 startTime=datetime.time(0, 0, 0),
94 endTime=datetime.time(23, 59, 59),
95 **kwargs):
96
97 self.path = path
98 self.startDate = startDate
99 self.endDate = endDate
100 self.startTime = startTime
101 self.endTime = endTime
102 self.datatime = datetime.datetime(1900,1,1)
103 self.oneDDict = load_json(kwargs.get('oneDDict',
104 "{\"GDLATR\":\"lat\", \"GDLONR\":\"lon\"}"))
105 self.twoDDict = load_json(kwargs.get('twoDDict',
106 "{\"GDALT\": \"heightList\"}"))
107 self.ind2DList = load_json(kwargs.get('ind2DList',
108 "[\"GDALT\"]"))
109 if self.path is None:
110 raise ValueError, 'The path is not valid'
111
112 if format is None:
113 raise ValueError, 'The format is not valid choose simple or hdf5'
114 elif format.lower() in ('simple', 'txt'):
115 self.ext = '.txt'
116 elif format.lower() in ('cedar',):
117 self.ext = '.001'
118 else:
119 self.ext = '.hdf5'
120
121 self.search_files(self.path)
122 self.fileId = 0
123
124 if not self.fileList:
125 raise Warning, 'There is no files matching these date in the folder: {}. \n Check startDate and endDate'.format(path)
126
127 self.setNextFile()
128
129 def search_files(self, path):
130 '''
131 Searching for madrigal files in path
132 Creating a list of files to procces included in [startDate,endDate]
133
134 Input:
135 path - Path to find files
136 '''
137
138 log.log('Searching files {} in {} '.format(self.ext, path), 'MADReader')
139 foldercounter = 0
140 fileList0 = glob.glob1(path, '*{}'.format(self.ext))
141 fileList0.sort()
142
143 self.fileList = []
144 self.dateFileList = []
145
146 startDate = self.startDate - datetime.timedelta(1)
147 endDate = self.endDate + datetime.timedelta(1)
148
149 for thisFile in fileList0:
150 year = thisFile[3:7]
151 if not year.isdigit():
152 continue
153
154 month = thisFile[7:9]
155 if not month.isdigit():
156 continue
157
158 day = thisFile[9:11]
159 if not day.isdigit():
160 continue
161
162 year, month, day = int(year), int(month), int(day)
163 dateFile = datetime.date(year, month, day)
164
165 if (startDate > dateFile) or (endDate < dateFile):
166 continue
167
168 self.fileList.append(thisFile)
169 self.dateFileList.append(dateFile)
170
171 return
172
173 def parseHeader(self):
174 '''
175 '''
176
177 self.output = {}
178 self.version = '2'
179 s_parameters = None
180 if self.ext == '.txt':
181 self.parameters = [s.strip().lower() for s in self.fp.readline().strip().split(' ') if s]
182 elif self.ext == '.hdf5':
183 metadata = self.fp['Metadata']
184 data = self.fp['Data']['Array Layout']
185 if 'Independent Spatial Parameters' in metadata:
186 s_parameters = [s[0].lower() for s in metadata['Independent Spatial Parameters']]
187 self.version = '3'
188 one = [s[0].lower() for s in data['1D Parameters']['Data Parameters']]
189 one_d = [1 for s in one]
190 two = [s[0].lower() for s in data['2D Parameters']['Data Parameters']]
191 two_d = [2 for s in two]
192 self.parameters = one + two
193 self.parameters_d = one_d + two_d
194
195 log.success('Parameters found: {}'.format(','.join(self.parameters)),
196 'MADReader')
197 if s_parameters:
198 log.success('Spatial parameters: {}'.format(','.join(s_parameters)),
199 'MADReader')
200
201 for param in self.oneDDict.keys():
202 if param.lower() not in self.parameters:
203 log.warning(
204 'Parameter {} not found will be ignored'.format(
205 param),
206 'MADReader')
207 self.oneDDict.pop(param, None)
208
209 for param, value in self.twoDDict.items():
210 if param.lower() not in self.parameters:
211 log.warning(
212 'Parameter {} not found, it will be ignored'.format(
213 param),
214 'MADReader')
215 self.twoDDict.pop(param, None)
216 continue
217 if isinstance(value, list):
218 if value[0] not in self.output:
219 self.output[value[0]] = []
220 self.output[value[0]].append(None)
221
222 def parseData(self):
223 '''
224 '''
225
226 if self.ext == '.txt':
227 self.data = numpy.genfromtxt(self.fp, missing_values=('missing'))
228 self.nrecords = self.data.shape[0]
229 self.ranges = numpy.unique(self.data[:,self.parameters.index(self.ind2DList[0].lower())])
230 elif self.ext == '.hdf5':
231 self.data = self.fp['Data']['Array Layout']
232 self.nrecords = len(self.data['timestamps'].value)
233 self.ranges = self.data['range'].value
234
235 def setNextFile(self):
236 '''
237 '''
238
239 file_id = self.fileId
240
241 if file_id == len(self.fileList):
242 log.success('No more files', 'MADReader')
243 self.flagNoMoreFiles = 1
244 return 0
245
246 log.success(
247 'Opening: {}'.format(self.fileList[file_id]),
248 'MADReader'
249 )
250
251 filename = os.path.join(self.path, self.fileList[file_id])
252
253 if self.filename is not None:
254 self.fp.close()
255
256 self.filename = filename
257 self.filedate = self.dateFileList[file_id]
258
259 if self.ext=='.hdf5':
260 self.fp = h5py.File(self.filename, 'r')
261 else:
262 self.fp = open(self.filename, 'rb')
263
264 self.parseHeader()
265 self.parseData()
266 self.sizeOfFile = os.path.getsize(self.filename)
267 self.counter_records = 0
268 self.flagIsNewFile = 0
269 self.fileId += 1
270
271 return 1
272
273 def readNextBlock(self):
274
275 while True:
276 self.flagDiscontinuousBlock = 0
277 if self.flagIsNewFile:
278 if not self.setNextFile():
279 return 0
280
281 self.readBlock()
282
283 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
284 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
285 log.warning(
286 'Reading Record No. {}/{} -> {} [Skipping]'.format(
287 self.counter_records,
288 self.nrecords,
289 self.datatime.ctime()),
290 'MADReader')
291 continue
292 break
293
294 log.log(
295 'Reading Record No. {}/{} -> {}'.format(
296 self.counter_records,
297 self.nrecords,
298 self.datatime.ctime()),
299 'MADReader')
300
301 return 1
302
303 def readBlock(self):
304 '''
305 '''
306 dum = []
307 if self.ext == '.txt':
308 dt = self.data[self.counter_records][:6].astype(int)
309 self.datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
310 while True:
311 dt = self.data[self.counter_records][:6].astype(int)
312 datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
313 if datatime == self.datatime:
314 dum.append(self.data[self.counter_records])
315 self.counter_records += 1
316 if self.counter_records == self.nrecords:
317 self.flagIsNewFile = True
318 break
319 continue
320 self.intervals.add((datatime-self.datatime).seconds)
321 if datatime.date() > self.datatime.date():
322 self.flagDiscontinuousBlock = 1
323 break
324 elif self.ext == '.hdf5':
325 datatime = datetime.datetime.utcfromtimestamp(
326 self.data['timestamps'][self.counter_records])
327 nHeights = len(self.ranges)
328 for n, param in enumerate(self.parameters):
329 if self.parameters_d[n] == 1:
330 dum.append(numpy.ones(nHeights)*self.data['1D Parameters'][param][self.counter_records])
331 else:
332 if self.version == '2':
333 dum.append(self.data['2D Parameters'][param][self.counter_records])
334 else:
335 tmp = self.data['2D Parameters'][param].value.T
336 dum.append(tmp[self.counter_records])
337 self.intervals.add((datatime-self.datatime).seconds)
338 if datatime.date()>self.datatime.date():
339 self.flagDiscontinuousBlock = 1
340 self.datatime = datatime
341 self.counter_records += 1
342 if self.counter_records == self.nrecords:
343 self.flagIsNewFile = True
344
345 self.buffer = numpy.array(dum)
346 return
347
348 def set_output(self):
349 '''
350 Storing data from buffer to dataOut object
351 '''
352
353 parameters = [None for __ in self.parameters]
354
355 for param, attr in self.oneDDict.items():
356 x = self.parameters.index(param.lower())
357 setattr(self.dataOut, attr, self.buffer[0][x])
358
359 for param, value in self.twoDDict.items():
360 x = self.parameters.index(param.lower())
361 if self.ext == '.txt':
362 y = self.parameters.index(self.ind2DList[0].lower())
363 ranges = self.buffer[:,y]
364 if self.ranges.size == ranges.size:
365 continue
366 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
367 dummy = numpy.zeros(self.ranges.shape) + numpy.nan
368 dummy[index] = self.buffer[:,x]
369 else:
370 dummy = self.buffer[x]
371
372 if isinstance(value, str):
373 if value not in self.ind2DList:
374 setattr(self.dataOut, value, dummy.reshape(1,-1))
375 elif isinstance(value, list):
376 self.output[value[0]][value[1]] = dummy
377 parameters[value[1]] = param
378
379 for key, value in self.output.items():
380 setattr(self.dataOut, key, numpy.array(value))
381
382 self.dataOut.parameters = [s for s in parameters if s]
383 self.dataOut.heightList = self.ranges
384 self.dataOut.utctime = (self.datatime - UT1970).total_seconds()
385 self.dataOut.utctimeInit = self.dataOut.utctime
386 self.dataOut.paramInterval = min(self.intervals)
387 self.dataOut.useLocalTime = False
388 self.dataOut.flagNoData = False
389 self.dataOut.nrecords = self.nrecords
390 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
391
392 def getData(self):
393 '''
394 Storing data from databuffer to dataOut object
395 '''
396 if self.flagNoMoreFiles:
397 self.dataOut.flagNoData = True
398 log.error('No file left to process', 'MADReader')
399 return 0
400
401 if not self.readNextBlock():
402 self.dataOut.flagNoData = True
403 return 0
404
405 self.set_output()
406
407 return 1
408
409
410 class MADWriter(Operation):
411
412 missing = -32767
413
414 def __init__(self, **kwargs):
415
416 Operation.__init__(self, **kwargs)
417 self.dataOut = Parameters()
418 self.path = None
419 self.fp = None
420
421 def run(self, dataOut, path, oneDDict, ind2DList='[]', twoDDict='{}',
422 metadata='{}', format='cedar', **kwargs):
423 '''
424 Inputs:
425 path - path where files will be created
426 oneDDict - json of one-dimensional parameters in record where keys
427 are Madrigal codes (integers or mnemonics) and values the corresponding
428 dataOut attribute e.g: {
429 'gdlatr': 'lat',
430 'gdlonr': 'lon',
431 'gdlat2':'lat',
432 'glon2':'lon'}
433 ind2DList - list of independent spatial two-dimensional parameters e.g:
434 ['heighList']
435 twoDDict - json of two-dimensional parameters in record where keys
436 are Madrigal codes (integers or mnemonics) and values the corresponding
437 dataOut attribute if multidimensional array specify as tupple
438 ('attr', pos) e.g: {
439 'gdalt': 'heightList',
440 'vn1p2': ('data_output', 0),
441 'vn2p2': ('data_output', 1),
442 'vn3': ('data_output', 2),
443 'snl': ('data_SNR', 'db')
444 }
445 metadata - json of madrigal metadata (kinst, kindat, catalog and header)
446 '''
447 if not self.isConfig:
448 self.setup(path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs)
449 self.isConfig = True
450
451 self.dataOut = dataOut
452 self.putData()
453 return
454
455 def setup(self, path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs):
456 '''
457 Configure Operation
458 '''
459
460 self.path = path
461 self.blocks = kwargs.get('blocks', None)
462 self.counter = 0
463 self.oneDDict = load_json(oneDDict)
464 self.twoDDict = load_json(twoDDict)
465 self.ind2DList = load_json(ind2DList)
466 meta = load_json(metadata)
467 self.kinst = meta.get('kinst')
468 self.kindat = meta.get('kindat')
469 self.catalog = meta.get('catalog', DEF_CATALOG)
470 self.header = meta.get('header', DEF_HEADER)
471 if format == 'cedar':
472 self.ext = '.dat'
473 self.extra_args = {}
474 elif format == 'hdf5':
475 self.ext = '.hdf5'
476 self.extra_args = {'ind2DList': self.ind2DList}
477
478 self.keys = [k.lower() for k in self.twoDDict]
479 if 'range' in self.keys:
480 self.keys.remove('range')
481 if 'gdalt' in self.keys:
482 self.keys.remove('gdalt')
483
484 def setFile(self):
485 '''
486 Create new cedar file object
487 '''
488
489 self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal
490 date = datetime.datetime.fromtimestamp(self.dataOut.utctime)
491
492 filename = '{}{}{}'.format(self.mnemonic,
493 date.strftime('%Y%m%d_%H%M%S'),
494 self.ext)
495
496 self.fullname = os.path.join(self.path, filename)
497
498 if os.path.isfile(self.fullname) :
499 log.warning(
500 'Destination path {} already exists. Previous file deleted.'.format(
501 self.fullname),
502 'MADWriter')
503 os.remove(self.fullname)
504
505 try:
506 log.success(
507 'Creating file: {}'.format(self.fullname),
508 'MADWriter')
509 self.fp = madrigal.cedar.MadrigalCedarFile(self.fullname, True)
510 except ValueError, e:
511 log.error(
512 'Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile"',
513 'MADWriter')
514 return
515
516 return 1
517
518 def writeBlock(self):
519 '''
520 Add data records to cedar file taking data from oneDDict and twoDDict
521 attributes.
522 Allowed parameters in: parcodes.tab
523 '''
524
525 startTime = datetime.datetime.fromtimestamp(self.dataOut.utctime)
526 endTime = startTime + datetime.timedelta(seconds=self.dataOut.paramInterval)
527 heights = self.dataOut.heightList
528
529 if self.ext == '.dat':
530 invalid = numpy.isnan(self.dataOut.data_output)
531 self.dataOut.data_output[invalid] = self.missing
532 out = {}
533 for key, value in self.twoDDict.items():
534 key = key.lower()
535 if isinstance(value, str):
536 if 'db' in value.lower():
537 tmp = getattr(self.dataOut, value.replace('_db', ''))
538 SNRavg = numpy.average(tmp, axis=0)
539 tmp = 10*numpy.log10(SNRavg)
540 else:
541 tmp = getattr(self.dataOut, value)
542 out[key] = tmp.flatten()
543 elif isinstance(value, (tuple, list)):
544 attr, x = value
545 data = getattr(self.dataOut, attr)
546 out[key] = data[int(x)]
547
548 a = numpy.array([out[k] for k in self.keys])
549 nrows = numpy.array([numpy.isnan(a[:, x]).all() for x in range(len(heights))])
550 index = numpy.where(nrows == False)[0]
551
552 rec = madrigal.cedar.MadrigalDataRecord(
553 self.kinst,
554 self.kindat,
555 startTime.year,
556 startTime.month,
557 startTime.day,
558 startTime.hour,
559 startTime.minute,
560 startTime.second,
561 startTime.microsecond/10000,
562 endTime.year,
563 endTime.month,
564 endTime.day,
565 endTime.hour,
566 endTime.minute,
567 endTime.second,
568 endTime.microsecond/10000,
569 self.oneDDict.keys(),
570 self.twoDDict.keys(),
571 len(index),
572 **self.extra_args
573 )
574
575 # Setting 1d values
576 for key in self.oneDDict:
577 rec.set1D(key, getattr(self.dataOut, self.oneDDict[key]))
578
579 # Setting 2d values
580 nrec = 0
581 for n in index:
582 for key in out:
583 rec.set2D(key, nrec, out[key][n])
584 nrec += 1
585
586 self.fp.append(rec)
587 if self.ext == '.hdf5' and self.counter % 500 == 0 and self.counter > 0:
588 self.fp.dump()
589 if self.counter % 10 == 0 and self.counter > 0:
590 log.log(
591 'Writing {} records'.format(
592 self.counter),
593 'MADWriter')
594
595 def setHeader(self):
596 '''
597 Create an add catalog and header to cedar file
598 '''
599
600 log.success('Closing file {}'.format(self.fullname), 'MADWriter')
601
602 if self.ext == '.dat':
603 self.fp.write()
604 else:
605 self.fp.dump()
606 self.fp.close()
607
608 header = madrigal.cedar.CatalogHeaderCreator(self.fullname)
609 header.createCatalog(**self.catalog)
610 header.createHeader(**self.header)
611 header.write()
612
613 def putData(self):
614
615 if self.dataOut.flagNoData:
616 return 0
617
618 if self.dataOut.flagDiscontinuousBlock or self.counter == self.blocks:
619 if self.counter > 0:
620 self.setHeader()
621 self.counter = 0
622
623 if self.counter == 0:
624 self.setFile()
625
626 self.writeBlock()
627 self.counter += 1
628
629 def close(self):
630
631 if self.counter > 0:
632 self.setHeader()
This diff has been collapsed as it changes many lines, (803 lines changed) Show them Hide them
@@ -0,0 +1,803
1 import os, sys
2 import glob
3 import fnmatch
4 import datetime
5 import time
6 import re
7 import h5py
8 import numpy
9 import matplotlib.pyplot as plt
10
11 import pylab as plb
12 from scipy.optimize import curve_fit
13 from scipy import asarray as ar,exp
14 from scipy import stats
15
16 from numpy.ma.core import getdata
17
18 SPEED_OF_LIGHT = 299792458
19 SPEED_OF_LIGHT = 3e8
20
21 try:
22 from gevent import sleep
23 except:
24 from time import sleep
25
26 from schainpy.model.data.jrodata import Spectra
27 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
28 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
29 #from schainpy.model.io.jroIO_bltr import BLTRReader
30 from numpy import imag, shape, NaN, empty
31
32
33
34 class Header(object):
35
36 def __init__(self):
37 raise NotImplementedError
38
39
40 def read(self):
41
42 raise NotImplementedError
43
44 def write(self):
45
46 raise NotImplementedError
47
48 def printInfo(self):
49
50 message = "#"*50 + "\n"
51 message += self.__class__.__name__.upper() + "\n"
52 message += "#"*50 + "\n"
53
54 keyList = self.__dict__.keys()
55 keyList.sort()
56
57 for key in keyList:
58 message += "%s = %s" %(key, self.__dict__[key]) + "\n"
59
60 if "size" not in keyList:
61 attr = getattr(self, "size")
62
63 if attr:
64 message += "%s = %s" %("size", attr) + "\n"
65
66 #print message
67
68
69 FILE_HEADER = numpy.dtype([ #HEADER 1024bytes
70 ('Hname','a32'), #Original file name
71 ('Htime',numpy.str_,32), #Date and time when the file was created
72 ('Hoper',numpy.str_,64), #Name of operator who created the file
73 ('Hplace',numpy.str_,128), #Place where the measurements was carried out
74 ('Hdescr',numpy.str_,256), #Description of measurements
75 ('Hdummy',numpy.str_,512), #Reserved space
76 #Main chunk 8bytes
77 ('Msign',numpy.str_,4), #Main chunk signature FZKF or NUIG
78 ('MsizeData','<i4'), #Size of data block main chunk
79 #Processing DSP parameters 36bytes
80 ('PPARsign',numpy.str_,4), #PPAR signature
81 ('PPARsize','<i4'), #PPAR size of block
82 ('PPARprf','<i4'), #Pulse repetition frequency
83 ('PPARpdr','<i4'), #Pulse duration
84 ('PPARsft','<i4'), #FFT length
85 ('PPARavc','<i4'), #Number of spectral (in-coherent) averages
86 ('PPARihp','<i4'), #Number of lowest range gate for moment estimation
87 ('PPARchg','<i4'), #Count for gates for moment estimation
88 ('PPARpol','<i4'), #switch on/off polarimetric measurements. Should be 1.
89 #Service DSP parameters 112bytes
90 ('SPARatt','<i4'), #STC attenuation on the lowest ranges on/off
91 ('SPARtx','<i4'), #OBSOLETE
92 ('SPARaddGain0','<f4'), #OBSOLETE
93 ('SPARaddGain1','<f4'), #OBSOLETE
94 ('SPARwnd','<i4'), #Debug only. It normal mode it is 0.
95 ('SPARpos','<i4'), #Delay between sync pulse and tx pulse for phase corr, ns
96 ('SPARadd','<i4'), #"add to pulse" to compensate for delay between the leading edge of driver pulse and envelope of the RF signal.
97 ('SPARlen','<i4'), #Time for measuring txn pulse phase. OBSOLETE
98 ('SPARcal','<i4'), #OBSOLETE
99 ('SPARnos','<i4'), #OBSOLETE
100 ('SPARof0','<i4'), #detection threshold
101 ('SPARof1','<i4'), #OBSOLETE
102 ('SPARswt','<i4'), #2nd moment estimation threshold
103 ('SPARsum','<i4'), #OBSOLETE
104 ('SPARosc','<i4'), #flag Oscillosgram mode
105 ('SPARtst','<i4'), #OBSOLETE
106 ('SPARcor','<i4'), #OBSOLETE
107 ('SPARofs','<i4'), #OBSOLETE
108 ('SPARhsn','<i4'), #Hildebrand div noise detection on noise gate
109 ('SPARhsa','<f4'), #Hildebrand div noise detection on all gates
110 ('SPARcalibPow_M','<f4'), #OBSOLETE
111 ('SPARcalibSNR_M','<f4'), #OBSOLETE
112 ('SPARcalibPow_S','<f4'), #OBSOLETE
113 ('SPARcalibSNR_S','<f4'), #OBSOLETE
114 ('SPARrawGate1','<i4'), #Lowest range gate for spectra saving Raw_Gate1 >=5
115 ('SPARrawGate2','<i4'), #Number of range gates with atmospheric signal
116 ('SPARraw','<i4'), #flag - IQ or spectra saving on/off
117 ('SPARprc','<i4'),]) #flag - Moment estimation switched on/off
118
119
120
121 class FileHeaderMIRA35c(Header):
122
123 def __init__(self):
124
125 self.Hname= None
126 self.Htime= None
127 self.Hoper= None
128 self.Hplace= None
129 self.Hdescr= None
130 self.Hdummy= None
131
132 self.Msign=None
133 self.MsizeData=None
134
135 self.PPARsign=None
136 self.PPARsize=None
137 self.PPARprf=None
138 self.PPARpdr=None
139 self.PPARsft=None
140 self.PPARavc=None
141 self.PPARihp=None
142 self.PPARchg=None
143 self.PPARpol=None
144 #Service DSP parameters
145 self.SPARatt=None
146 self.SPARtx=None
147 self.SPARaddGain0=None
148 self.SPARaddGain1=None
149 self.SPARwnd=None
150 self.SPARpos=None
151 self.SPARadd=None
152 self.SPARlen=None
153 self.SPARcal=None
154 self.SPARnos=None
155 self.SPARof0=None
156 self.SPARof1=None
157 self.SPARswt=None
158 self.SPARsum=None
159 self.SPARosc=None
160 self.SPARtst=None
161 self.SPARcor=None
162 self.SPARofs=None
163 self.SPARhsn=None
164 self.SPARhsa=None
165 self.SPARcalibPow_M=None
166 self.SPARcalibSNR_M=None
167 self.SPARcalibPow_S=None
168 self.SPARcalibSNR_S=None
169 self.SPARrawGate1=None
170 self.SPARrawGate2=None
171 self.SPARraw=None
172 self.SPARprc=None
173
174 self.FHsize=1180
175
176 def FHread(self, fp):
177
178 header = numpy.fromfile(fp, FILE_HEADER,1)
179 ''' numpy.fromfile(file, dtype, count, sep='')
180 file : file or str
181 Open file object or filename.
182
183 dtype : data-type
184 Data type of the returned array. For binary files, it is used to determine
185 the size and byte-order of the items in the file.
186
187 count : int
188 Number of items to read. -1 means all items (i.e., the complete file).
189
190 sep : str
191 Separator between items if file is a text file. Empty ("") separator means
192 the file should be treated as binary. Spaces (" ") in the separator match zero
193 or more whitespace characters. A separator consisting only of spaces must match
194 at least one whitespace.
195
196 '''
197
198
199 self.Hname= str(header['Hname'][0])
200 self.Htime= str(header['Htime'][0])
201 self.Hoper= str(header['Hoper'][0])
202 self.Hplace= str(header['Hplace'][0])
203 self.Hdescr= str(header['Hdescr'][0])
204 self.Hdummy= str(header['Hdummy'][0])
205 #1024
206
207 self.Msign=str(header['Msign'][0])
208 self.MsizeData=header['MsizeData'][0]
209 #8
210
211 self.PPARsign=str(header['PPARsign'][0])
212 self.PPARsize=header['PPARsize'][0]
213 self.PPARprf=header['PPARprf'][0]
214 self.PPARpdr=header['PPARpdr'][0]
215 self.PPARsft=header['PPARsft'][0]
216 self.PPARavc=header['PPARavc'][0]
217 self.PPARihp=header['PPARihp'][0]
218 self.PPARchg=header['PPARchg'][0]
219 self.PPARpol=header['PPARpol'][0]
220 #Service DSP parameters
221 #36
222
223 self.SPARatt=header['SPARatt'][0]
224 self.SPARtx=header['SPARtx'][0]
225 self.SPARaddGain0=header['SPARaddGain0'][0]
226 self.SPARaddGain1=header['SPARaddGain1'][0]
227 self.SPARwnd=header['SPARwnd'][0]
228 self.SPARpos=header['SPARpos'][0]
229 self.SPARadd=header['SPARadd'][0]
230 self.SPARlen=header['SPARlen'][0]
231 self.SPARcal=header['SPARcal'][0]
232 self.SPARnos=header['SPARnos'][0]
233 self.SPARof0=header['SPARof0'][0]
234 self.SPARof1=header['SPARof1'][0]
235 self.SPARswt=header['SPARswt'][0]
236 self.SPARsum=header['SPARsum'][0]
237 self.SPARosc=header['SPARosc'][0]
238 self.SPARtst=header['SPARtst'][0]
239 self.SPARcor=header['SPARcor'][0]
240 self.SPARofs=header['SPARofs'][0]
241 self.SPARhsn=header['SPARhsn'][0]
242 self.SPARhsa=header['SPARhsa'][0]
243 self.SPARcalibPow_M=header['SPARcalibPow_M'][0]
244 self.SPARcalibSNR_M=header['SPARcalibSNR_M'][0]
245 self.SPARcalibPow_S=header['SPARcalibPow_S'][0]
246 self.SPARcalibSNR_S=header['SPARcalibSNR_S'][0]
247 self.SPARrawGate1=header['SPARrawGate1'][0]
248 self.SPARrawGate2=header['SPARrawGate2'][0]
249 self.SPARraw=header['SPARraw'][0]
250 self.SPARprc=header['SPARprc'][0]
251 #112
252 #1180
253 #print 'Pointer fp header', fp.tell()
254 #print ' '
255 #print 'SPARrawGate'
256 #print self.SPARrawGate2 - self.SPARrawGate1
257
258 #print ' '
259 #print 'Hname'
260 #print self.Hname
261
262 #print ' '
263 #print 'Msign'
264 #print self.Msign
265
266 def write(self, fp):
267
268 headerTuple = (self.Hname,
269 self.Htime,
270 self.Hoper,
271 self.Hplace,
272 self.Hdescr,
273 self.Hdummy)
274
275
276 header = numpy.array(headerTuple, FILE_HEADER)
277 # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0)
278 header.tofile(fp)
279 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
280
281 fid : file or str
282 An open file object, or a string containing a filename.
283
284 sep : str
285 Separator between array items for text output. If "" (empty), a binary file is written,
286 equivalent to file.write(a.tobytes()).
287
288 format : str
289 Format string for text file output. Each entry in the array is formatted to text by
290 first converting it to the closest Python type, and then using "format" % item.
291
292 '''
293
294 return 1
295
296 SRVI_HEADER = numpy.dtype([
297 ('SignatureSRVI1',numpy.str_,4),#
298 ('SizeOfDataBlock1','<i4'),#
299 ('DataBlockTitleSRVI1',numpy.str_,4),#
300 ('SizeOfSRVI1','<i4'),])#
301
302 class SRVIHeader(Header):
303 def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
304
305 self.SignatureSRVI1 = SignatureSRVI1
306 self.SizeOfDataBlock1 = SizeOfDataBlock1
307 self.DataBlockTitleSRVI1 = DataBlockTitleSRVI1
308 self.SizeOfSRVI1 = SizeOfSRVI1
309
310 self.SRVIHsize=16
311
312 def SRVIread(self, fp):
313
314 header = numpy.fromfile(fp, SRVI_HEADER,1)
315
316 self.SignatureSRVI1 = str(header['SignatureSRVI1'][0])
317 self.SizeOfDataBlock1 = header['SizeOfDataBlock1'][0]
318 self.DataBlockTitleSRVI1 = str(header['DataBlockTitleSRVI1'][0])
319 self.SizeOfSRVI1 = header['SizeOfSRVI1'][0]
320 #16
321 print 'Pointer fp SRVIheader', fp.tell()
322
323
324 SRVI_STRUCTURE = numpy.dtype([
325 ('frame_cnt','<u4'),#
326 ('time_t','<u4'), #
327 ('tpow','<f4'), #
328 ('npw1','<f4'), #
329 ('npw2','<f4'), #
330 ('cpw1','<f4'), #
331 ('pcw2','<f4'), #
332 ('ps_err','<u4'), #
333 ('te_err','<u4'), #
334 ('rc_err','<u4'), #
335 ('grs1','<u4'), #
336 ('grs2','<u4'), #
337 ('azipos','<f4'), #
338 ('azivel','<f4'), #
339 ('elvpos','<f4'), #
340 ('elvvel','<f4'), #
341 ('northAngle','<f4'), #
342 ('microsec','<u4'), #
343 ('azisetvel','<f4'), #
344 ('elvsetpos','<f4'), #
345 ('RadarConst','<f4'),]) #
346
347
348
349
350 class RecordHeader(Header):
351
352
353 def __init__(self, frame_cnt=0, time_t= 0, tpow=0, npw1=0, npw2=0,
354 cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0,
355 grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0,
356 microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0 , RecCounter=0, Off2StartNxtRec=0):
357
358
359 self.frame_cnt = frame_cnt
360 self.dwell = time_t
361 self.tpow = tpow
362 self.npw1 = npw1
363 self.npw2 = npw2
364 self.cpw1 = cpw1
365 self.pcw2 = pcw2
366 self.ps_err = ps_err
367 self.te_err = te_err
368 self.rc_err = rc_err
369 self.grs1 = grs1
370 self.grs2 = grs2
371 self.azipos = azipos
372 self.azivel = azivel
373 self.elvpos = elvpos
374 self.elvvel = elvvel
375 self.northAngle = northangle
376 self.microsec = microsec
377 self.azisetvel = azisetvel
378 self.elvsetpos = elvsetpos
379 self.RadarConst = RadarConst
380 self.RHsize=84
381 self.RecCounter = RecCounter
382 self.Off2StartNxtRec=Off2StartNxtRec
383
384 def RHread(self, fp):
385
386 #startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file.
387
388 #OffRHeader= 1180 + self.RecCounter*(self.Off2StartNxtRec)
389 #startFp.seek(OffRHeader, os.SEEK_SET)
390
391 #print 'Posicion del bloque: ',OffRHeader
392
393 header = numpy.fromfile(fp,SRVI_STRUCTURE,1)
394
395 self.frame_cnt = header['frame_cnt'][0]#
396 self.time_t = header['time_t'][0] #
397 self.tpow = header['tpow'][0] #
398 self.npw1 = header['npw1'][0] #
399 self.npw2 = header['npw2'][0] #
400 self.cpw1 = header['cpw1'][0] #
401 self.pcw2 = header['pcw2'][0] #
402 self.ps_err = header['ps_err'][0] #
403 self.te_err = header['te_err'][0] #
404 self.rc_err = header['rc_err'][0] #
405 self.grs1 = header['grs1'][0] #
406 self.grs2 = header['grs2'][0] #
407 self.azipos = header['azipos'][0] #
408 self.azivel = header['azivel'][0] #
409 self.elvpos = header['elvpos'][0] #
410 self.elvvel = header['elvvel'][0] #
411 self.northAngle = header['northAngle'][0] #
412 self.microsec = header['microsec'][0] #
413 self.azisetvel = header['azisetvel'][0] #
414 self.elvsetpos = header['elvsetpos'][0] #
415 self.RadarConst = header['RadarConst'][0] #
416 #84
417
418 #print 'Pointer fp RECheader', fp.tell()
419
420 #self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz)
421
422 #self.RHsize = 180+20*self.nChannels
423 #self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4
424 #print 'Datasize',self.Datasize
425 #endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
426
427 print '=============================================='
428
429 print '=============================================='
430
431
432 return 1
433
434 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
435
436 path = None
437 startDate = None
438 endDate = None
439 startTime = None
440 endTime = None
441 walk = None
442 isConfig = False
443
444
445 fileList= None
446
447 #metadata
448 TimeZone= None
449 Interval= None
450 heightList= None
451
452 #data
453 data= None
454 utctime= None
455
456
457
458 def __init__(self, **kwargs):
459
460 #Eliminar de la base la herencia
461 ProcessingUnit.__init__(self, **kwargs)
462 self.PointerReader = 0
463 self.FileHeaderFlag = False
464 self.utc = None
465 self.ext = ".zspca"
466 self.optchar = "P"
467 self.fpFile=None
468 self.fp = None
469 self.BlockCounter=0
470 self.dtype = None
471 self.fileSizeByHeader = None
472 self.filenameList = []
473 self.fileSelector = 0
474 self.Off2StartNxtRec=0
475 self.RecCounter=0
476 self.flagNoMoreFiles = 0
477 self.data_spc=None
478 #self.data_cspc=None
479 self.data_output=None
480 self.path = None
481 self.OffsetStartHeader=0
482 self.Off2StartData=0
483 self.ipp = 0
484 self.nFDTdataRecors=0
485 self.blocksize = 0
486 self.dataOut = Spectra()
487 self.profileIndex = 1 #Always
488 self.dataOut.flagNoData=False
489 self.dataOut.nRdPairs = 0
490 self.dataOut.pairsList = []
491 self.dataOut.data_spc=None
492
493 self.dataOut.normFactor=1
494 self.nextfileflag = True
495 self.dataOut.RadarConst = 0
496 self.dataOut.HSDV = []
497 self.dataOut.NPW = []
498 self.dataOut.COFA = []
499 self.dataOut.noise = 0
500
501
502 def Files2Read(self, fp):
503 '''
504 Function that indicates the number of .fdt files that exist in the folder to be read.
505 It also creates an organized list with the names of the files to read.
506 '''
507 #self.__checkPath()
508
509 ListaData=os.listdir(fp) #Gets the list of files within the fp address
510 ListaData=sorted(ListaData) #Sort the list of files from least to largest by names
511 nFiles=0 #File Counter
512 FileList=[] #A list is created that will contain the .fdt files
513 for IndexFile in ListaData :
514 if '.zspca' in IndexFile and '.gz' not in IndexFile:
515 FileList.append(IndexFile)
516 nFiles+=1
517
518 #print 'Files2Read'
519 #print 'Existen '+str(nFiles)+' archivos .fdt'
520
521 self.filenameList=FileList #List of files from least to largest by names
522
523
524 def run(self, **kwargs):
525 '''
526 This method will be the one that will initiate the data entry, will be called constantly.
527 You should first verify that your Setup () is set up and then continue to acquire
528 the data to be processed with getData ().
529 '''
530 if not self.isConfig:
531 self.setup(**kwargs)
532 self.isConfig = True
533
534 self.getData()
535
536
537 def setup(self, path=None,
538 startDate=None,
539 endDate=None,
540 startTime=None,
541 endTime=None,
542 walk=True,
543 timezone='utc',
544 code = None,
545 online=False,
546 ReadMode=None, **kwargs):
547
548 self.isConfig = True
549
550 self.path=path
551 self.startDate=startDate
552 self.endDate=endDate
553 self.startTime=startTime
554 self.endTime=endTime
555 self.walk=walk
556 #self.ReadMode=int(ReadMode)
557
558 pass
559
560
561 def getData(self):
562 '''
563 Before starting this function, you should check that there is still an unread file,
564 If there are still blocks to read or if the data block is empty.
565
566 You should call the file "read".
567
568 '''
569
570 if self.flagNoMoreFiles:
571 self.dataOut.flagNoData = True
572 print 'NoData se vuelve true'
573 return 0
574
575 self.fp=self.path
576 self.Files2Read(self.fp)
577 self.readFile(self.fp)
578
579 self.dataOut.data_spc = self.dataOut_spc#self.data_spc.copy()
580 self.dataOut.RadarConst = self.RadarConst
581 self.dataOut.data_output=self.data_output
582 self.dataOut.noise = self.dataOut.getNoise()
583 #print 'ACAAAAAA', self.dataOut.noise
584 self.dataOut.data_spc = self.dataOut.data_spc+self.dataOut.noise
585 #print 'self.dataOut.noise',self.dataOut.noise
586
587
588 return self.dataOut.data_spc
589
590
591 def readFile(self,fp):
592 '''
593 You must indicate if you are reading in Online or Offline mode and load the
594 The parameters for this file reading mode.
595
596 Then you must do 2 actions:
597
598 1. Get the BLTR FileHeader.
599 2. Start reading the first block.
600 '''
601
602 #The address of the folder is generated the name of the .fdt file that will be read
603 print "File: ",self.fileSelector+1
604
605 if self.fileSelector < len(self.filenameList):
606
607 self.fpFile=str(fp)+'/'+str(self.filenameList[self.fileSelector])
608
609 if self.nextfileflag==True:
610 self.fp = open(self.fpFile,"rb")
611 self.nextfileflag==False
612
613 '''HERE STARTING THE FILE READING'''
614
615
616 self.fheader = FileHeaderMIRA35c()
617 self.fheader.FHread(self.fp) #Bltr FileHeader Reading
618
619
620 self.SPARrawGate1 = self.fheader.SPARrawGate1
621 self.SPARrawGate2 = self.fheader.SPARrawGate2
622 self.Num_Hei = self.SPARrawGate2 - self.SPARrawGate1
623 self.Num_Bins = self.fheader.PPARsft
624 self.dataOut.nFFTPoints = self.fheader.PPARsft
625
626
627 self.Num_inCoh = self.fheader.PPARavc
628 self.dataOut.PRF = self.fheader.PPARprf
629 self.dataOut.frequency = 34.85*10**9
630 self.Lambda = SPEED_OF_LIGHT/self.dataOut.frequency
631 self.dataOut.ippSeconds= 1./float(self.dataOut.PRF)
632
633 pulse_width = self.fheader.PPARpdr * 10**-9
634 self.__deltaHeigth = 0.5 * SPEED_OF_LIGHT * pulse_width
635
636 self.data_spc = numpy.zeros((self.Num_Hei, self.Num_Bins,2))#
637 self.dataOut.HSDV = numpy.zeros((self.Num_Hei, 2))
638
639 self.Ze = numpy.zeros(self.Num_Hei)
640 self.ETA = numpy.zeros(([2,self.Num_Hei]))
641
642
643
644 self.readBlock() #Block reading
645
646 else:
647 print 'readFile FlagNoData becomes true'
648 self.flagNoMoreFiles=True
649 self.dataOut.flagNoData = True
650 self.FileHeaderFlag == True
651 return 0
652
653
654
655 def readBlock(self):
656 '''
657 It should be checked if the block has data, if it is not passed to the next file.
658
659 Then the following is done:
660
661 1. Read the RecordHeader
662 2. Fill the buffer with the current block number.
663
664 '''
665
666 if self.PointerReader > 1180:
667 self.fp.seek(self.PointerReader , os.SEEK_SET)
668 self.FirstPoint = self.PointerReader
669
670 else :
671 self.FirstPoint = 1180
672
673
674
675 self.srviHeader = SRVIHeader()
676
677 self.srviHeader.SRVIread(self.fp) #Se obtiene la cabecera del SRVI
678
679 self.blocksize = self.srviHeader.SizeOfDataBlock1 # Se obtiene el tamao del bloque
680
681 if self.blocksize == 148:
682 print 'blocksize == 148 bug'
683 jump = numpy.fromfile(self.fp,[('jump',numpy.str_,140)] ,1)
684
685 self.srviHeader.SRVIread(self.fp) #Se obtiene la cabecera del SRVI
686
687 if not self.srviHeader.SizeOfSRVI1:
688 self.fileSelector+=1
689 self.nextfileflag==True
690 self.FileHeaderFlag == True
691
692 self.recordheader = RecordHeader()
693 self.recordheader.RHread(self.fp)
694 self.RadarConst = self.recordheader.RadarConst
695 dwell = self.recordheader.time_t
696 npw1 = self.recordheader.npw1
697 npw2 = self.recordheader.npw2
698
699
700 self.dataOut.channelList = range(1)
701 self.dataOut.nIncohInt = self.Num_inCoh
702 self.dataOut.nProfiles = self.Num_Bins
703 self.dataOut.nCohInt = 1
704 self.dataOut.windowOfFilter = 1
705 self.dataOut.utctime = dwell
706 self.dataOut.timeZone=0
707
708 self.dataOut.outputInterval = self.dataOut.getTimeInterval()
709 self.dataOut.heightList = self.SPARrawGate1*self.__deltaHeigth + numpy.array(range(self.Num_Hei))*self.__deltaHeigth
710
711
712
713 self.HSDVsign = numpy.fromfile( self.fp, [('HSDV',numpy.str_,4)],1)
714 self.SizeHSDV = numpy.fromfile( self.fp, [('SizeHSDV','<i4')],1)
715 self.HSDV_Co = numpy.fromfile( self.fp, [('HSDV_Co','<f4')],self.Num_Hei)
716 self.HSDV_Cx = numpy.fromfile( self.fp, [('HSDV_Cx','<f4')],self.Num_Hei)
717
718 self.COFAsign = numpy.fromfile( self.fp, [('COFA',numpy.str_,4)],1)
719 self.SizeCOFA = numpy.fromfile( self.fp, [('SizeCOFA','<i4')],1)
720 self.COFA_Co = numpy.fromfile( self.fp, [('COFA_Co','<f4')],self.Num_Hei)
721 self.COFA_Cx = numpy.fromfile( self.fp, [('COFA_Cx','<f4')],self.Num_Hei)
722
723 self.ZSPCsign = numpy.fromfile(self.fp, [('ZSPCsign',numpy.str_,4)],1)
724 self.SizeZSPC = numpy.fromfile(self.fp, [('SizeZSPC','<i4')],1)
725
726 self.dataOut.HSDV[0]=self.HSDV_Co[:][0]
727 self.dataOut.HSDV[1]=self.HSDV_Cx[:][0]
728
729 for irg in range(self.Num_Hei):
730 nspc = numpy.fromfile(self.fp, [('nspc','int16')],1)[0][0] # Number of spectral sub pieces containing significant power
731
732 for k in range(nspc):
733 binIndex = numpy.fromfile(self.fp, [('binIndex','int16')],1)[0][0] # Index of the spectral bin where the piece is beginning
734 nbins = numpy.fromfile(self.fp, [('nbins','int16')],1)[0][0] # Number of bins of the piece
735
736 #Co_Channel
737 jbin = numpy.fromfile(self.fp, [('jbin','uint16')],nbins)[0][0] # Spectrum piece to be normaliced
738 jmax = numpy.fromfile(self.fp, [('jmax','float32')],1)[0][0] # Maximun piece to be normaliced
739
740
741 self.data_spc[irg,binIndex:binIndex+nbins,0] = self.data_spc[irg,binIndex:binIndex+nbins,0]+jbin/65530.*jmax
742
743 #Cx_Channel
744 jbin = numpy.fromfile(self.fp, [('jbin','uint16')],nbins)[0][0]
745 jmax = numpy.fromfile(self.fp, [('jmax','float32')],1)[0][0]
746
747
748 self.data_spc[irg,binIndex:binIndex+nbins,1] = self.data_spc[irg,binIndex:binIndex+nbins,1]+jbin/65530.*jmax
749
750 for bin in range(self.Num_Bins):
751
752 self.data_spc[:,bin,0] = self.data_spc[:,bin,0] - self.dataOut.HSDV[:,0]
753
754 self.data_spc[:,bin,1] = self.data_spc[:,bin,1] - self.dataOut.HSDV[:,1]
755
756
757 numpy.set_printoptions(threshold='nan')
758
759 self.data_spc = numpy.where(self.data_spc > 0. , self.data_spc, 0)
760
761 self.dataOut.COFA = numpy.array([self.COFA_Co , self.COFA_Cx])
762
763 print ' '
764 print 'SPC',numpy.shape(self.dataOut.data_spc)
765 #print 'SPC',self.dataOut.data_spc
766
767 noinor1 = 713031680
768 noinor2 = 30
769
770 npw1 = 1#0**(npw1/10) * noinor1 * noinor2
771 npw2 = 1#0**(npw2/10) * noinor1 * noinor2
772 self.dataOut.NPW = numpy.array([npw1, npw2])
773
774 print ' '
775
776 self.data_spc = numpy.transpose(self.data_spc, (2,1,0))
777 self.data_spc = numpy.fft.fftshift(self.data_spc, axes = 1)
778
779 self.data_spc = numpy.fliplr(self.data_spc)
780
781 self.data_spc = numpy.where(self.data_spc > 0. , self.data_spc, 0)
782 self.dataOut_spc= numpy.ones([1, self.Num_Bins , self.Num_Hei])
783 self.dataOut_spc[0,:,:] = self.data_spc[0,:,:]
784 #print 'SHAPE', self.dataOut_spc.shape
785 #For nyquist correction:
786 #fix = 20 # ~3m/s
787 #shift = self.Num_Bins/2 + fix
788 #self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]])
789
790
791
792 '''Block Reading, the Block Data is received and Reshape is used to give it
793 shape.
794 '''
795
796 self.PointerReader = self.fp.tell()
797
798
799
800
801
802
803 No newline at end of file
@@ -0,0 +1,403
1 '''
2 Created on Oct 24, 2016
3
4 @author: roj- LouVD
5 '''
6
7 import numpy
8 import copy
9 import datetime
10 import time
11 from time import gmtime
12
13 from numpy import transpose
14
15 from jroproc_base import ProcessingUnit, Operation
16 from schainpy.model.data.jrodata import Parameters
17
18
19 class BLTRParametersProc(ProcessingUnit):
20 '''
21 Processing unit for BLTR parameters data (winds)
22
23 Inputs:
24 self.dataOut.nmodes - Number of operation modes
25 self.dataOut.nchannels - Number of channels
26 self.dataOut.nranges - Number of ranges
27
28 self.dataOut.data_SNR - SNR array
29 self.dataOut.data_output - Zonal, Vertical and Meridional velocity array
30 self.dataOut.height - Height array (km)
31 self.dataOut.time - Time array (seconds)
32
33 self.dataOut.fileIndex -Index of the file currently read
34 self.dataOut.lat - Latitude coordinate of BLTR location
35
36 self.dataOut.doy - Experiment doy (number of the day in the current year)
37 self.dataOut.month - Experiment month
38 self.dataOut.day - Experiment day
39 self.dataOut.year - Experiment year
40 '''
41
42 def __init__(self, **kwargs):
43 '''
44 Inputs: None
45 '''
46 ProcessingUnit.__init__(self, **kwargs)
47 self.dataOut = Parameters()
48 self.isConfig = False
49
50 def setup(self, mode):
51 '''
52 '''
53 self.dataOut.mode = mode
54
55 def run(self, mode, snr_threshold=None):
56 '''
57 Inputs:
58 mode = High resolution (0) or Low resolution (1) data
59 snr_threshold = snr filter value
60 '''
61
62 if not self.isConfig:
63 self.setup(mode)
64 self.isConfig = True
65
66 if self.dataIn.type == 'Parameters':
67 self.dataOut.copy(self.dataIn)
68
69 self.dataOut.data_output = self.dataOut.data_output[mode]
70 self.dataOut.heightList = self.dataOut.height[0]
71 self.dataOut.data_SNR = self.dataOut.data_SNR[mode]
72
73 if snr_threshold is not None:
74 SNRavg = numpy.average(self.dataOut.data_SNR, axis=0)
75 SNRavgdB = 10*numpy.log10(SNRavg)
76 for i in range(3):
77 self.dataOut.data_output[i][SNRavgdB <= snr_threshold] = numpy.nan
78
79 # TODO
80 class OutliersFilter(Operation):
81
82 def __init__(self, **kwargs):
83 '''
84 '''
85 Operation.__init__(self, **kwargs)
86
87 def run(self, svalue2, method, factor, filter, npoints=9):
88 '''
89 Inputs:
90 svalue - string to select array velocity
91 svalue2 - string to choose axis filtering
92 method - 0 for SMOOTH or 1 for MEDIAN
93 factor - number used to set threshold
94 filter - 1 for data filtering using the standard deviation criteria else 0
95 npoints - number of points for mask filter
96 '''
97
98 print ' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor)
99
100
101 yaxis = self.dataOut.heightList
102 xaxis = numpy.array([[self.dataOut.utctime]])
103
104 # Zonal
105 value_temp = self.dataOut.data_output[0]
106
107 # Zonal
108 value_temp = self.dataOut.data_output[1]
109
110 # Vertical
111 value_temp = numpy.transpose(self.dataOut.data_output[2])
112
113 htemp = yaxis
114 std = value_temp
115 for h in range(len(htemp)):
116 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
117 minvalid = npoints
118
119 #only if valid values greater than the minimum required (10%)
120 if nvalues_valid > minvalid:
121
122 if method == 0:
123 #SMOOTH
124 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
125
126
127 if method == 1:
128 #MEDIAN
129 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
130
131 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
132
133 threshold = dw*factor
134 value_temp[numpy.where(w > threshold),h] = numpy.nan
135 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
136
137
138 #At the end
139 if svalue2 == 'inHeight':
140 value_temp = numpy.transpose(value_temp)
141 output_array[:,m] = value_temp
142
143 if svalue == 'zonal':
144 self.dataOut.data_output[0] = output_array
145
146 elif svalue == 'meridional':
147 self.dataOut.data_output[1] = output_array
148
149 elif svalue == 'vertical':
150 self.dataOut.data_output[2] = output_array
151
152 return self.dataOut.data_output
153
154
155 def Median(self,input,width):
156 '''
157 Inputs:
158 input - Velocity array
159 width - Number of points for mask filter
160
161 '''
162
163 if numpy.mod(width,2) == 1:
164 pc = int((width - 1) / 2)
165 cont = 0
166 output = []
167
168 for i in range(len(input)):
169 if i >= pc and i < len(input) - pc:
170 new2 = input[i-pc:i+pc+1]
171 temp = numpy.where(numpy.isfinite(new2))
172 new = new2[temp]
173 value = numpy.median(new)
174 output.append(value)
175
176 output = numpy.array(output)
177 output = numpy.hstack((input[0:pc],output))
178 output = numpy.hstack((output,input[-pc:len(input)]))
179
180 return output
181
182 def Smooth(self,input,width,edge_truncate = None):
183 '''
184 Inputs:
185 input - Velocity array
186 width - Number of points for mask filter
187 edge_truncate - 1 for truncate the convolution product else
188
189 '''
190
191 if numpy.mod(width,2) == 0:
192 real_width = width + 1
193 nzeros = width / 2
194 else:
195 real_width = width
196 nzeros = (width - 1) / 2
197
198 half_width = int(real_width)/2
199 length = len(input)
200
201 gate = numpy.ones(real_width,dtype='float')
202 norm_of_gate = numpy.sum(gate)
203
204 nan_process = 0
205 nan_id = numpy.where(numpy.isnan(input))
206 if len(nan_id[0]) > 0:
207 nan_process = 1
208 pb = numpy.zeros(len(input))
209 pb[nan_id] = 1.
210 input[nan_id] = 0.
211
212 if edge_truncate == True:
213 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
214 elif edge_truncate == False or edge_truncate == None:
215 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
216 output = numpy.hstack((input[0:half_width],output))
217 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
218
219 if nan_process:
220 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
221 pb = numpy.hstack((numpy.zeros(half_width),pb))
222 pb = numpy.hstack((pb,numpy.zeros(half_width)))
223 output[numpy.where(pb > 0.9999)] = numpy.nan
224 input[nan_id] = numpy.nan
225 return output
226
227 def Average(self,aver=0,nhaver=1):
228 '''
229 Inputs:
230 aver - Indicates the time period over which is averaged or consensus data
231 nhaver - Indicates the decimation factor in heights
232
233 '''
234 nhpoints = 48
235
236 lat_piura = -5.17
237 lat_huancayo = -12.04
238 lat_porcuya = -5.8
239
240 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
241 hcm = 3.
242 if self.dataOut.year == 2003 :
243 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
244 nhpoints = 12
245
246 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
247 hcm = 3.
248 if self.dataOut.year == 2003 :
249 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
250 nhpoints = 12
251
252
253 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
254 hcm = 5.#2
255
256 pdata = 0.2
257 taver = [1,2,3,4,6,8,12,24]
258 t0 = 0
259 tf = 24
260 ntime =(tf-t0)/taver[aver]
261 ti = numpy.arange(ntime)
262 tf = numpy.arange(ntime) + taver[aver]
263
264
265 old_height = self.dataOut.heightList
266
267 if nhaver > 1:
268 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
269 deltha = 0.05*nhaver
270 minhvalid = pdata*nhaver
271 for im in range(self.dataOut.nmodes):
272 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
273
274
275 data_fHeigths_List = []
276 data_fZonal_List = []
277 data_fMeridional_List = []
278 data_fVertical_List = []
279 startDTList = []
280
281
282 for i in range(ntime):
283 height = old_height
284
285 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
286 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
287
288
289 limit_sec1 = time.mktime(start.timetuple())
290 limit_sec2 = time.mktime(stop.timetuple())
291
292 t1 = numpy.where(self.f_timesec >= limit_sec1)
293 t2 = numpy.where(self.f_timesec < limit_sec2)
294 time_select = []
295 for val_sec in t1[0]:
296 if val_sec in t2[0]:
297 time_select.append(val_sec)
298
299
300 time_select = numpy.array(time_select,dtype = 'int')
301 minvalid = numpy.ceil(pdata*nhpoints)
302
303 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
304 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
305 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
306
307 if nhaver > 1:
308 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
309 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
310 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
311
312 if len(time_select) > minvalid:
313 time_average = self.f_timesec[time_select]
314
315 for im in range(self.dataOut.nmodes):
316
317 for ih in range(self.dataOut.nranges):
318 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
319 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
320
321 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
322 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
323
324 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
325 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
326
327 if nhaver > 1:
328 for ih in range(num_hei):
329 hvalid = numpy.arange(nhaver) + nhaver*ih
330
331 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
332 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
333
334 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
335 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
336
337 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
338 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
339 if nhaver > 1:
340 zon_aver = new_zon_aver
341 mer_aver = new_mer_aver
342 ver_aver = new_ver_aver
343 height = new_height
344
345
346 tstart = time_average[0]
347 tend = time_average[-1]
348 startTime = time.gmtime(tstart)
349
350 year = startTime.tm_year
351 month = startTime.tm_mon
352 day = startTime.tm_mday
353 hour = startTime.tm_hour
354 minute = startTime.tm_min
355 second = startTime.tm_sec
356
357 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
358
359
360 o_height = numpy.array([])
361 o_zon_aver = numpy.array([])
362 o_mer_aver = numpy.array([])
363 o_ver_aver = numpy.array([])
364 if self.dataOut.nmodes > 1:
365 for im in range(self.dataOut.nmodes):
366
367 if im == 0:
368 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
369 else:
370 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
371
372
373 ht = h_select[0]
374
375 o_height = numpy.hstack((o_height,height[im,ht]))
376 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
377 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
378 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
379
380 data_fHeigths_List.append(o_height)
381 data_fZonal_List.append(o_zon_aver)
382 data_fMeridional_List.append(o_mer_aver)
383 data_fVertical_List.append(o_ver_aver)
384
385
386 else:
387 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
388 ht = h_select[0]
389 o_height = numpy.hstack((o_height,height[im,ht]))
390 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
391 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
392 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
393
394 data_fHeigths_List.append(o_height)
395 data_fZonal_List.append(o_zon_aver)
396 data_fMeridional_List.append(o_mer_aver)
397 data_fVertical_List.append(o_ver_aver)
398
399
400 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
401
402
403 No newline at end of file
@@ -4,4 +4,4 Created on Feb 7, 2012
4 @author $Author$
4 @author $Author$
5 @version $Id$
5 @version $Id$
6 '''
6 '''
7 __version__ = "2.3"
7 __version__ = '2.3'
@@ -1177,6 +1177,8 class Parameters(Spectra):
1177 nAvg = None
1177 nAvg = None
1178
1178
1179 noise_estimation = None
1179 noise_estimation = None
1180
1181 GauSPC = None #Fit gaussian SPC
1180
1182
1181
1183
1182 def __init__(self):
1184 def __init__(self):
@@ -1211,8 +1213,15 class Parameters(Spectra):
1211 else:
1213 else:
1212 return self.paramInterval
1214 return self.paramInterval
1213
1215
1216 def setValue(self, value):
1217
1218 print "This property should not be initialized"
1219
1220 return
1221
1214 def getNoise(self):
1222 def getNoise(self):
1215
1223
1216 return self.spc_noise
1224 return self.spc_noise
1217
1225
1218 timeInterval = property(getTimeInterval)
1226 timeInterval = property(getTimeInterval)
1227 noise = property(getNoise, setValue, "I'm the 'Noise' property.")
@@ -15,10 +15,16 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
15 from schainpy.model.proc.jroproc_base import Operation
15 from schainpy.model.proc.jroproc_base import Operation
16 from schainpy.utils import log
16 from schainpy.utils import log
17
17
18 func = lambda x, pos: ('%s') %(datetime.datetime.fromtimestamp(x).strftime('%H:%M'))
18 jet_values = matplotlib.pyplot.get_cmap("jet", 100)(numpy.arange(100))[10:90]
19 blu_values = matplotlib.pyplot.get_cmap("seismic_r", 20)(numpy.arange(20))[10:15]
20 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list("jro", numpy.vstack((blu_values, jet_values)))
21 matplotlib.pyplot.register_cmap(cmap=ncmap)
19
22
20 d1970 = datetime.datetime(1970, 1, 1)
23 func = lambda x, pos: '{}'.format(datetime.datetime.fromtimestamp(x).strftime('%H:%M'))
21
24
25 UT1970 = datetime.datetime(1970, 1, 1) - datetime.timedelta(seconds=time.timezone)
26
27 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'RdBu_r', 'seismic')]
22
28
23 class PlotData(Operation, Process):
29 class PlotData(Operation, Process):
24 '''
30 '''
@@ -59,9 +65,7 class PlotData(Operation, Process):
59 self.zmin = kwargs.get('zmin', None)
65 self.zmin = kwargs.get('zmin', None)
60 self.zmax = kwargs.get('zmax', None)
66 self.zmax = kwargs.get('zmax', None)
61 self.zlimits = kwargs.get('zlimits', None)
67 self.zlimits = kwargs.get('zlimits', None)
62 self.xmin = kwargs.get('xmin', None)
68 self.xmin = kwargs.get('xmin', None)
63 if self.xmin is not None:
64 self.xmin += 5
65 self.xmax = kwargs.get('xmax', None)
69 self.xmax = kwargs.get('xmax', None)
66 self.xrange = kwargs.get('xrange', 24)
70 self.xrange = kwargs.get('xrange', 24)
67 self.ymin = kwargs.get('ymin', None)
71 self.ymin = kwargs.get('ymin', None)
@@ -83,6 +87,8 class PlotData(Operation, Process):
83
87
84 self.setup()
88 self.setup()
85
89
90 self.time_label = 'LT' if self.localtime else 'UTC'
91
86 if self.width is None:
92 if self.width is None:
87 self.width = 8
93 self.width = 8
88
94
@@ -106,6 +112,7 class PlotData(Operation, Process):
106 ax = fig.add_subplot(self.nrows, self.ncols, n+1)
112 ax = fig.add_subplot(self.nrows, self.ncols, n+1)
107 ax.tick_params(labelsize=8)
113 ax.tick_params(labelsize=8)
108 ax.firsttime = True
114 ax.firsttime = True
115 ax.index = 0
109 self.axes.append(ax)
116 self.axes.append(ax)
110 if self.showprofile:
117 if self.showprofile:
111 cax = self.__add_axes(ax, size=size, pad=pad)
118 cax = self.__add_axes(ax, size=size, pad=pad)
@@ -121,6 +128,7 class PlotData(Operation, Process):
121 ax = fig.add_subplot(1, 1, 1)
128 ax = fig.add_subplot(1, 1, 1)
122 ax.tick_params(labelsize=8)
129 ax.tick_params(labelsize=8)
123 ax.firsttime = True
130 ax.firsttime = True
131 ax.index = 0
124 self.figures.append(fig)
132 self.figures.append(fig)
125 self.axes.append(ax)
133 self.axes.append(ax)
126 if self.showprofile:
134 if self.showprofile:
@@ -136,6 +144,29 class PlotData(Operation, Process):
136 cmap.set_bad(self.bgcolor, 1.)
144 cmap.set_bad(self.bgcolor, 1.)
137 self.cmaps.append(cmap)
145 self.cmaps.append(cmap)
138
146
147 for fig in self.figures:
148 fig.canvas.mpl_connect('key_press_event', self.event_key_press)
149
150 def event_key_press(self, event):
151 '''
152 '''
153
154 for ax in self.axes:
155 if ax == event.inaxes:
156 if event.key == 'down':
157 ax.index += 1
158 elif event.key == 'up':
159 ax.index -= 1
160 if ax.index < 0:
161 ax.index = len(CMAPS) - 1
162 elif ax.index == len(CMAPS):
163 ax.index = 0
164 cmap = CMAPS[ax.index]
165 ax.cbar.set_cmap(cmap)
166 ax.cbar.draw_all()
167 ax.plt.set_cmap(cmap)
168 ax.cbar.patch.figure.canvas.draw()
169
139 def __add_axes(self, ax, size='30%', pad='8%'):
170 def __add_axes(self, ax, size='30%', pad='8%'):
140 '''
171 '''
141 Add new axes to the given figure
172 Add new axes to the given figure
@@ -145,6 +176,7 class PlotData(Operation, Process):
145 ax.figure.add_axes(nax)
176 ax.figure.add_axes(nax)
146 return nax
177 return nax
147
178
179 self.setup()
148
180
149 def setup(self):
181 def setup(self):
150 '''
182 '''
@@ -203,7 +235,7 class PlotData(Operation, Process):
203 if self.xaxis is 'time':
235 if self.xaxis is 'time':
204 dt = datetime.datetime.fromtimestamp(self.min_time)
236 dt = datetime.datetime.fromtimestamp(self.min_time)
205 xmin = (datetime.datetime.combine(dt.date(),
237 xmin = (datetime.datetime.combine(dt.date(),
206 datetime.time(int(self.xmin), 0, 0))-d1970).total_seconds()
238 datetime.time(int(self.xmin), 0, 0))-UT1970).total_seconds()
207 else:
239 else:
208 xmin = self.xmin
240 xmin = self.xmin
209
241
@@ -213,7 +245,7 class PlotData(Operation, Process):
213 if self.xaxis is 'time':
245 if self.xaxis is 'time':
214 dt = datetime.datetime.fromtimestamp(self.min_time)
246 dt = datetime.datetime.fromtimestamp(self.min_time)
215 xmax = (datetime.datetime.combine(dt.date(),
247 xmax = (datetime.datetime.combine(dt.date(),
216 datetime.time(int(self.xmax), 0, 0))-d1970).total_seconds()
248 datetime.time(int(self.xmax), 0, 0))-UT1970).total_seconds()
217 else:
249 else:
218 xmax = self.xmax
250 xmax = self.xmax
219
251
@@ -240,20 +272,20 class PlotData(Operation, Process):
240 self.pf_axes[n].grid(b=True, axis='x')
272 self.pf_axes[n].grid(b=True, axis='x')
241 [tick.set_visible(False) for tick in self.pf_axes[n].get_yticklabels()]
273 [tick.set_visible(False) for tick in self.pf_axes[n].get_yticklabels()]
242 if self.colorbar:
274 if self.colorbar:
243 cb = plt.colorbar(ax.plt, ax=ax, pad=0.02)
275 ax.cbar = plt.colorbar(ax.plt, ax=ax, pad=0.02, aspect=10)
244 cb.ax.tick_params(labelsize=8)
276 ax.cbar.ax.tick_params(labelsize=8)
245 if self.cb_label:
277 if self.cb_label:
246 cb.set_label(self.cb_label, size=8)
278 ax.cbar.set_label(self.cb_label, size=8)
247 elif self.cb_labels:
279 elif self.cb_labels:
248 cb.set_label(self.cb_labels[n], size=8)
280 ax.cbar.set_label(self.cb_labels[n], size=8)
249
281
250 ax.set_title('{} - {} UTC'.format(
282 ax.set_title('{} - {} {}'.format(
251 self.titles[n],
283 self.titles[n],
252 datetime.datetime.fromtimestamp(self.max_time).strftime('%H:%M:%S')),
284 datetime.datetime.fromtimestamp(self.max_time).strftime('%H:%M:%S'),
285 self.time_label),
253 size=8)
286 size=8)
254 ax.set_xlim(xmin, xmax)
287 ax.set_xlim(xmin, xmax)
255 ax.set_ylim(ymin, ymax)
288 ax.set_ylim(ymin, ymax)
256
257
289
258 def __plot(self):
290 def __plot(self):
259 '''
291 '''
@@ -313,10 +345,15 class PlotData(Operation, Process):
313
345
314 while True:
346 while True:
315 try:
347 try:
316 self.data = receiver.recv_pyobj(flags=zmq.NOBLOCK)
348 self.data = receiver.recv_pyobj(flags=zmq.NOBLOCK)
317
349
318 self.min_time = self.data.times[0]
350 if self.localtime:
319 self.max_time = self.data.times[-1]
351 self.times = self.data.times - time.timezone
352 else:
353 self.times = self.data.times
354
355 self.min_time = self.times[0]
356 self.max_time = self.times[-1]
320
357
321 if self.isConfig is False:
358 if self.isConfig is False:
322 self.__setup()
359 self.__setup()
@@ -335,7 +372,6 class PlotData(Operation, Process):
335 if self.data:
372 if self.data:
336 self.__plot()
373 self.__plot()
337
374
338
339 class PlotSpectraData(PlotData):
375 class PlotSpectraData(PlotData):
340 '''
376 '''
341 Plot for Spectra data
377 Plot for Spectra data
@@ -532,7 +568,7 class PlotRTIData(PlotData):
532 self.titles = ['{} Channel {}'.format(self.CODE.upper(), x) for x in range(self.nrows)]
568 self.titles = ['{} Channel {}'.format(self.CODE.upper(), x) for x in range(self.nrows)]
533
569
534 def plot(self):
570 def plot(self):
535 self.x = self.data.times
571 self.x = self.times
536 self.y = self.data.heights
572 self.y = self.data.heights
537 self.z = self.data[self.CODE]
573 self.z = self.data[self.CODE]
538 self.z = numpy.ma.masked_invalid(self.z)
574 self.z = numpy.ma.masked_invalid(self.z)
@@ -613,7 +649,7 class PlotNoiseData(PlotData):
613
649
614 def plot(self):
650 def plot(self):
615
651
616 x = self.data.times
652 x = self.times
617 xmin = self.min_time
653 xmin = self.min_time
618 xmax = xmin+self.xrange*60*60
654 xmax = xmin+self.xrange*60*60
619 Y = self.data[self.CODE]
655 Y = self.data[self.CODE]
@@ -681,7 +717,7 class PlotSkyMapData(PlotData):
681
717
682 def plot(self):
718 def plot(self):
683
719
684 arrayParameters = numpy.concatenate([self.data['param'][t] for t in self.data.times])
720 arrayParameters = numpy.concatenate([self.data['param'][t] for t in self.times])
685 error = arrayParameters[:,-1]
721 error = arrayParameters[:,-1]
686 indValid = numpy.where(error == 0)[0]
722 indValid = numpy.where(error == 0)[0]
687 finalMeteor = arrayParameters[indValid,:]
723 finalMeteor = arrayParameters[indValid,:]
@@ -727,6 +763,7 class PlotParamData(PlotRTIData):
727 self.nplots = self.nrows
763 self.nplots = self.nrows
728 if self.showSNR:
764 if self.showSNR:
729 self.nrows += 1
765 self.nrows += 1
766 self.nplots += 1
730
767
731 self.ylabel = 'Height [Km]'
768 self.ylabel = 'Height [Km]'
732 self.titles = self.data.parameters \
769 self.titles = self.data.parameters \
@@ -736,7 +773,7 class PlotParamData(PlotRTIData):
736
773
737 def plot(self):
774 def plot(self):
738 self.data.normalize_heights()
775 self.data.normalize_heights()
739 self.x = self.data.times
776 self.x = self.times
740 self.y = self.data.heights
777 self.y = self.data.heights
741 if self.showSNR:
778 if self.showSNR:
742 self.z = numpy.concatenate(
779 self.z = numpy.concatenate(
@@ -779,4 +816,4 class PlotOuputData(PlotParamData):
779 '''
816 '''
780
817
781 CODE = 'output'
818 CODE = 'output'
782 colormap = 'seismic' No newline at end of file
819 colormap = 'seismic'
@@ -6,6 +6,217 from figure import Figure, isRealtime, isTimeInHourRange
6 from plotting_codes import *
6 from plotting_codes import *
7
7
8
8
9 class FitGauPlot(Figure):
10
11 isConfig = None
12 __nsubplots = None
13
14 WIDTHPROF = None
15 HEIGHTPROF = None
16 PREFIX = 'fitgau'
17
18 def __init__(self, **kwargs):
19 Figure.__init__(self, **kwargs)
20 self.isConfig = False
21 self.__nsubplots = 1
22
23 self.WIDTH = 250
24 self.HEIGHT = 250
25 self.WIDTHPROF = 120
26 self.HEIGHTPROF = 0
27 self.counter_imagwr = 0
28
29 self.PLOT_CODE = SPEC_CODE
30
31 self.FTP_WEI = None
32 self.EXP_CODE = None
33 self.SUB_EXP_CODE = None
34 self.PLOT_POS = None
35
36 self.__xfilter_ena = False
37 self.__yfilter_ena = False
38
39 def getSubplots(self):
40
41 ncol = int(numpy.sqrt(self.nplots)+0.9)
42 nrow = int(self.nplots*1./ncol + 0.9)
43
44 return nrow, ncol
45
46 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
47
48 self.__showprofile = showprofile
49 self.nplots = nplots
50
51 ncolspan = 1
52 colspan = 1
53 if showprofile:
54 ncolspan = 3
55 colspan = 2
56 self.__nsubplots = 2
57
58 self.createFigure(id = id,
59 wintitle = wintitle,
60 widthplot = self.WIDTH + self.WIDTHPROF,
61 heightplot = self.HEIGHT + self.HEIGHTPROF,
62 show=show)
63
64 nrow, ncol = self.getSubplots()
65
66 counter = 0
67 for y in range(nrow):
68 for x in range(ncol):
69
70 if counter >= self.nplots:
71 break
72
73 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
74
75 if showprofile:
76 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1)
77
78 counter += 1
79
80 def run(self, dataOut, id, wintitle="", channelList=None, showprofile=True,
81 xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None,
82 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
83 server=None, folder=None, username=None, password=None,
84 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, realtime=False,
85 xaxis="frequency", colormap='jet', normFactor=None , GauSelector = 1):
86
87 """
88
89 Input:
90 dataOut :
91 id :
92 wintitle :
93 channelList :
94 showProfile :
95 xmin : None,
96 xmax : None,
97 ymin : None,
98 ymax : None,
99 zmin : None,
100 zmax : None
101 """
102 if realtime:
103 if not(isRealtime(utcdatatime = dataOut.utctime)):
104 print 'Skipping this plot function'
105 return
106
107 if channelList == None:
108 channelIndexList = dataOut.channelIndexList
109 else:
110 channelIndexList = []
111 for channel in channelList:
112 if channel not in dataOut.channelList:
113 raise ValueError, "Channel %d is not in dataOut.channelList" %channel
114 channelIndexList.append(dataOut.channelList.index(channel))
115
116 # if normFactor is None:
117 # factor = dataOut.normFactor
118 # else:
119 # factor = normFactor
120 if xaxis == "frequency":
121 x = dataOut.spc_range[0]
122 xlabel = "Frequency (kHz)"
123
124 elif xaxis == "time":
125 x = dataOut.spc_range[1]
126 xlabel = "Time (ms)"
127
128 else:
129 x = dataOut.spc_range[2]
130 xlabel = "Velocity (m/s)"
131
132 ylabel = "Range (Km)"
133
134 y = dataOut.getHeiRange()
135
136 z = dataOut.GauSPC[:,GauSelector,:,:] #GauSelector] #dataOut.data_spc/factor
137 print 'GausSPC', z[0,32,10:40]
138 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
139 zdB = 10*numpy.log10(z)
140
141 avg = numpy.average(z, axis=1)
142 avgdB = 10*numpy.log10(avg)
143
144 noise = dataOut.spc_noise
145 noisedB = 10*numpy.log10(noise)
146
147 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
148 title = wintitle + " Spectra"
149 if ((dataOut.azimuth!=None) and (dataOut.zenith!=None)):
150 title = title + '_' + 'azimuth,zenith=%2.2f,%2.2f'%(dataOut.azimuth, dataOut.zenith)
151
152 if not self.isConfig:
153
154 nplots = len(channelIndexList)
155
156 self.setup(id=id,
157 nplots=nplots,
158 wintitle=wintitle,
159 showprofile=showprofile,
160 show=show)
161
162 if xmin == None: xmin = numpy.nanmin(x)
163 if xmax == None: xmax = numpy.nanmax(x)
164 if ymin == None: ymin = numpy.nanmin(y)
165 if ymax == None: ymax = numpy.nanmax(y)
166 if zmin == None: zmin = numpy.floor(numpy.nanmin(noisedB)) - 3
167 if zmax == None: zmax = numpy.ceil(numpy.nanmax(avgdB)) + 3
168
169 self.FTP_WEI = ftp_wei
170 self.EXP_CODE = exp_code
171 self.SUB_EXP_CODE = sub_exp_code
172 self.PLOT_POS = plot_pos
173
174 self.isConfig = True
175
176 self.setWinTitle(title)
177
178 for i in range(self.nplots):
179 index = channelIndexList[i]
180 str_datetime = '%s %s'%(thisDatetime.strftime("%Y/%m/%d"),thisDatetime.strftime("%H:%M:%S"))
181 title = "Channel %d: %4.2fdB: %s" %(dataOut.channelList[index], noisedB[index], str_datetime)
182 if len(dataOut.beam.codeList) != 0:
183 title = "Ch%d:%4.2fdB,%2.2f,%2.2f:%s" %(dataOut.channelList[index], noisedB[index], dataOut.beam.azimuthList[index], dataOut.beam.zenithList[index], str_datetime)
184
185 axes = self.axesList[i*self.__nsubplots]
186 axes.pcolor(x, y, zdB[index,:,:],
187 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax,
188 xlabel=xlabel, ylabel=ylabel, title=title, colormap=colormap,
189 ticksize=9, cblabel='')
190
191 if self.__showprofile:
192 axes = self.axesList[i*self.__nsubplots +1]
193 axes.pline(avgdB[index,:], y,
194 xmin=zmin, xmax=zmax, ymin=ymin, ymax=ymax,
195 xlabel='dB', ylabel='', title='',
196 ytick_visible=False,
197 grid='x')
198
199 noiseline = numpy.repeat(noisedB[index], len(y))
200 axes.addpline(noiseline, y, idline=1, color="black", linestyle="dashed", lw=2)
201
202 self.draw()
203
204 if figfile == None:
205 str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S")
206 name = str_datetime
207 if ((dataOut.azimuth!=None) and (dataOut.zenith!=None)):
208 name = name + '_az' + '_%2.2f'%(dataOut.azimuth) + '_zn' + '_%2.2f'%(dataOut.zenith)
209 figfile = self.getFilename(name)
210
211 self.save(figpath=figpath,
212 figfile=figfile,
213 save=save,
214 ftp=ftp,
215 wr_period=wr_period,
216 thisDatetime=thisDatetime)
217
218
219
9 class MomentsPlot(Figure):
220 class MomentsPlot(Figure):
10
221
11 isConfig = None
222 isConfig = None
@@ -445,10 +656,9 class WindProfilerPlot(Figure):
445 # tmin = None
656 # tmin = None
446 # tmax = None
657 # tmax = None
447
658
448
659 x = dataOut.getTimeRange1(dataOut.paramInterval)
449 x = dataOut.getTimeRange1(dataOut.outputInterval)
660 y = dataOut.heightList
450 y = dataOut.heightList
661 z = dataOut.data_output.copy()
451 z = dataOut.data_output.copy()
452 nplots = z.shape[0] #Number of wind dimensions estimated
662 nplots = z.shape[0] #Number of wind dimensions estimated
453 nplotsw = nplots
663 nplotsw = nplots
454
664
@@ -558,7 +768,7 class WindProfilerPlot(Figure):
558 thisDatetime=thisDatetime,
768 thisDatetime=thisDatetime,
559 update_figfile=update_figfile)
769 update_figfile=update_figfile)
560
770
561 if dataOut.ltctime + dataOut.outputInterval >= self.xmax:
771 if dataOut.ltctime + dataOut.paramInterval >= self.xmax:
562 self.counter_imagwr = wr_period
772 self.counter_imagwr = wr_period
563 self.isConfig = False
773 self.isConfig = False
564 update_figfile = True
774 update_figfile = True
@@ -635,12 +845,12 class ParametersPlot(Figure):
635
845
636 counter += 1
846 counter += 1
637
847
638 def run(self, dataOut, id, wintitle="", channelList=None, paramIndex = 0, colormap=True,
848 def run(self, dataOut, id, wintitle="", channelList=None, paramIndex = 0, colormap="jet",
639 xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, timerange=None,
849 xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, timerange=None,
640 showSNR=False, SNRthresh = -numpy.inf, SNRmin=None, SNRmax=None,
850 showSNR=False, SNRthresh = -numpy.inf, SNRmin=None, SNRmax=None,
641 save=False, figpath='./', lastone=0,figfile=None, ftp=False, wr_period=1, show=True,
851 save=False, figpath='./', lastone=0,figfile=None, ftp=False, wr_period=1, show=True,
642 server=None, folder=None, username=None, password=None,
852 server=None, folder=None, username=None, password=None,
643 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
853 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, HEIGHT=None):
644 """
854 """
645
855
646 Input:
856 Input:
@@ -656,12 +866,11 class ParametersPlot(Figure):
656 zmin : None,
866 zmin : None,
657 zmax : None
867 zmax : None
658 """
868 """
659
869
660 if colormap:
870 if HEIGHT is not None:
661 colormap="jet"
871 self.HEIGHT = HEIGHT
662 else:
872
663 colormap="RdBu_r"
873
664
665 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
874 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
666 return
875 return
667
876
@@ -906,25 +1115,23 class Parameters1Plot(Figure):
906
1115
907 x = dataOut.getTimeRange1(dataOut.paramInterval)
1116 x = dataOut.getTimeRange1(dataOut.paramInterval)
908 y = dataOut.heightList
1117 y = dataOut.heightList
909 z = data_param[channelIndexList,parameterIndex,:].copy()
910
1118
911 zRange = dataOut.abscissaList
1119 if dataOut.data_param.ndim == 3:
912 # nChannels = z.shape[0] #Number of wind dimensions estimated
1120 z = dataOut.data_param[channelIndexList,parameterIndex,:]
913 # thisDatetime = dataOut.datatime
1121 else:
1122 z = dataOut.data_param[channelIndexList,:]
914
1123
915 if dataOut.data_SNR is not None:
1124 if dataOut.data_SNR is not None:
916 SNRarray = dataOut.data_SNR[channelIndexList,:]
1125 if dataOut.data_SNR.ndim == 2:
917 SNRdB = 10*numpy.log10(SNRarray)
1126 SNRavg = numpy.average(dataOut.data_SNR, axis=0)
918 # SNRavgdB = 10*numpy.log10(SNRavg)
1127 else:
919 ind = numpy.where(SNRdB < 10**(SNRthresh/10))
1128 SNRavg = dataOut.data_SNR
920 z[ind] = numpy.nan
1129 SNRdB = 10*numpy.log10(SNRavg)
921
1130
922 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
1131 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
923 title = wintitle + " Parameters Plot" #: %s" %(thisDatetime.strftime("%d-%b-%Y"))
1132 title = wintitle + " Parameters Plot" #: %s" %(thisDatetime.strftime("%d-%b-%Y"))
924 xlabel = ""
1133 xlabel = ""
925 ylabel = "Range (Km)"
1134 ylabel = "Range (Km)"
926
927 if (SNR and not onlySNR): nplots = 2*nplots
928
1135
929 if onlyPositive:
1136 if onlyPositive:
930 colormap = "jet"
1137 colormap = "jet"
@@ -943,8 +1150,8 class Parameters1Plot(Figure):
943
1150
944 if ymin == None: ymin = numpy.nanmin(y)
1151 if ymin == None: ymin = numpy.nanmin(y)
945 if ymax == None: ymax = numpy.nanmax(y)
1152 if ymax == None: ymax = numpy.nanmax(y)
946 if zmin == None: zmin = numpy.nanmin(zRange)
1153 if zmin == None: zmin = numpy.nanmin(z)
947 if zmax == None: zmax = numpy.nanmax(zRange)
1154 if zmax == None: zmax = numpy.nanmax(z)
948
1155
949 if SNR:
1156 if SNR:
950 if SNRmin == None: SNRmin = numpy.nanmin(SNRdB)
1157 if SNRmin == None: SNRmin = numpy.nanmin(SNRdB)
@@ -994,19 +1201,18 class Parameters1Plot(Figure):
994 xlabel=xlabel, ylabel=ylabel, title=title, rti=True, XAxisAsTime=True,colormap=colormap,
1201 xlabel=xlabel, ylabel=ylabel, title=title, rti=True, XAxisAsTime=True,colormap=colormap,
995 ticksize=9, cblabel=zlabel, cbsize="1%")
1202 ticksize=9, cblabel=zlabel, cbsize="1%")
996
1203
997 if SNR:
1204 if SNR:
998 title = "Channel %d Signal Noise Ratio (SNR): %s" %(channelIndexList[i], thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
1205 title = "Channel %d Signal Noise Ratio (SNR): %s" %(channelIndexList[i], thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
999 axes = self.axesList[(j)*self.__nsubplots]
1206 axes = self.axesList[(j)*self.__nsubplots]
1000 if not onlySNR:
1207 if not onlySNR:
1001 axes = self.axesList[(j + 1)*self.__nsubplots]
1208 axes = self.axesList[(j + 1)*self.__nsubplots]
1002
1003 axes = self.axesList[(j + nGraphsByChannel-1)]
1004
1209
1005 z1 = SNRdB[i,:].reshape((1,-1))
1210 axes = self.axesList[(j + nGraphsByChannel-1)]
1006 axes.pcolorbuffer(x, y, z1,
1211 z1 = SNRdB.reshape((1,-1))
1007 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax, zmin=SNRmin, zmax=SNRmax,
1212 axes.pcolorbuffer(x, y, z1,
1008 xlabel=xlabel, ylabel=ylabel, title=title, rti=True, XAxisAsTime=True,colormap="jet",
1213 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax, zmin=SNRmin, zmax=SNRmax,
1009 ticksize=9, cblabel=zlabel, cbsize="1%")
1214 xlabel=xlabel, ylabel=ylabel, title=title, rti=True, XAxisAsTime=True,colormap="jet",
1215 ticksize=9, cblabel=zlabel, cbsize="1%")
1010
1216
1011
1217
1012
1218
@@ -87,7 +87,7 class SpectraPlot(Figure):
87 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
87 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
88 server=None, folder=None, username=None, password=None,
88 server=None, folder=None, username=None, password=None,
89 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, realtime=False,
89 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, realtime=False,
90 xaxis="velocity", **kwargs):
90 xaxis="frequency", colormap='jet', normFactor=None):
91
91
92 """
92 """
93
93
@@ -104,9 +104,6 class SpectraPlot(Figure):
104 zmin : None,
104 zmin : None,
105 zmax : None
105 zmax : None
106 """
106 """
107
108 colormap = kwargs.get('colormap','jet')
109
110 if realtime:
107 if realtime:
111 if not(isRealtime(utcdatatime = dataOut.utctime)):
108 if not(isRealtime(utcdatatime = dataOut.utctime)):
112 print 'Skipping this plot function'
109 print 'Skipping this plot function'
@@ -121,8 +118,10 class SpectraPlot(Figure):
121 raise ValueError, "Channel %d is not in dataOut.channelList" %channel
118 raise ValueError, "Channel %d is not in dataOut.channelList" %channel
122 channelIndexList.append(dataOut.channelList.index(channel))
119 channelIndexList.append(dataOut.channelList.index(channel))
123
120
124 factor = dataOut.normFactor
121 if normFactor is None:
125
122 factor = dataOut.normFactor
123 else:
124 factor = normFactor
126 if xaxis == "frequency":
125 if xaxis == "frequency":
127 x = dataOut.getFreqRange(1)/1000.
126 x = dataOut.getFreqRange(1)/1000.
128 xlabel = "Frequency (kHz)"
127 xlabel = "Frequency (kHz)"
@@ -283,7 +282,7 class CrossSpectraPlot(Figure):
283 save=False, figpath='./', figfile=None, ftp=False, wr_period=1,
282 save=False, figpath='./', figfile=None, ftp=False, wr_period=1,
284 power_cmap='jet', coherence_cmap='jet', phase_cmap='RdBu_r', show=True,
283 power_cmap='jet', coherence_cmap='jet', phase_cmap='RdBu_r', show=True,
285 server=None, folder=None, username=None, password=None,
284 server=None, folder=None, username=None, password=None,
286 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0,
285 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, normFactor=None,
287 xaxis='frequency'):
286 xaxis='frequency'):
288
287
289 """
288 """
@@ -316,8 +315,11 class CrossSpectraPlot(Figure):
316
315
317 if len(pairsIndexList) > 4:
316 if len(pairsIndexList) > 4:
318 pairsIndexList = pairsIndexList[0:4]
317 pairsIndexList = pairsIndexList[0:4]
319
318
320 factor = dataOut.normFactor
319 if normFactor is None:
320 factor = dataOut.normFactor
321 else:
322 factor = normFactor
321 x = dataOut.getVelRange(1)
323 x = dataOut.getVelRange(1)
322 y = dataOut.getHeiRange()
324 y = dataOut.getHeiRange()
323 z = dataOut.data_spc[:,:,:]/factor
325 z = dataOut.data_spc[:,:,:]/factor
@@ -518,10 +520,10 class RTIPlot(Figure):
518
520
519 def run(self, dataOut, id, wintitle="", channelList=None, showprofile='True',
521 def run(self, dataOut, id, wintitle="", channelList=None, showprofile='True',
520 xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None,
522 xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None,
521 timerange=None,
523 timerange=None, colormap='jet',
522 save=False, figpath='./', lastone=0,figfile=None, ftp=False, wr_period=1, show=True,
524 save=False, figpath='./', lastone=0,figfile=None, ftp=False, wr_period=1, show=True,
523 server=None, folder=None, username=None, password=None,
525 server=None, folder=None, username=None, password=None,
524 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, **kwargs):
526 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, normFactor=None, HEIGHT=None):
525
527
526 """
528 """
527
529
@@ -539,7 +541,10 class RTIPlot(Figure):
539 zmax : None
541 zmax : None
540 """
542 """
541
543
542 colormap = kwargs.get('colormap', 'jet')
544 #colormap = kwargs.get('colormap', 'jet')
545 if HEIGHT is not None:
546 self.HEIGHT = HEIGHT
547
543 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
548 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
544 return
549 return
545
550
@@ -552,20 +557,21 class RTIPlot(Figure):
552 raise ValueError, "Channel %d is not in dataOut.channelList"
557 raise ValueError, "Channel %d is not in dataOut.channelList"
553 channelIndexList.append(dataOut.channelList.index(channel))
558 channelIndexList.append(dataOut.channelList.index(channel))
554
559
555 if hasattr(dataOut, 'normFactor'):
560 if normFactor is None:
556 factor = dataOut.normFactor
561 factor = dataOut.normFactor
557 else:
562 else:
558 factor = 1
563 factor = normFactor
559
564
560 # factor = dataOut.normFactor
565 # factor = dataOut.normFactor
561 x = dataOut.getTimeRange()
566 x = dataOut.getTimeRange()
562 y = dataOut.getHeiRange()
567 y = dataOut.getHeiRange()
563
568
564 # z = dataOut.data_spc/factor
569 z = dataOut.data_spc/factor
565 # z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
570 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
566 # avg = numpy.average(z, axis=1)
571 avg = numpy.average(z, axis=1)
567 # avgdB = 10.*numpy.log10(avg)
572 avgdB = 10.*numpy.log10(avg)
568 avgdB = dataOut.getPower()
573 # avgdB = dataOut.getPower()
574
569
575
570 thisDatetime = dataOut.datatime
576 thisDatetime = dataOut.datatime
571 # thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
577 # thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
@@ -1112,6 +1118,7 class Noise(Figure):
1112
1118
1113 PREFIX = 'noise'
1119 PREFIX = 'noise'
1114
1120
1121
1115 def __init__(self, **kwargs):
1122 def __init__(self, **kwargs):
1116 Figure.__init__(self, **kwargs)
1123 Figure.__init__(self, **kwargs)
1117 self.timerange = 24*60*60
1124 self.timerange = 24*60*60
@@ -12,3 +12,9 from jroIO_digitalRF import *
12 from jroIO_kamisr import *
12 from jroIO_kamisr import *
13 from jroIO_param import *
13 from jroIO_param import *
14 from jroIO_hf import *
14 from jroIO_hf import *
15
16 from jroIO_madrigal import *
17
18 from bltrIO_param import *
19 from jroIO_bltr import *
20 from jroIO_mira35c import *
This diff has been collapsed as it changes many lines, (509 lines changed) Show them Hide them
@@ -10,7 +10,8 import time
10 import numpy
10 import numpy
11 import fnmatch
11 import fnmatch
12 import inspect
12 import inspect
13 import time, datetime
13 import time
14 import datetime
14 import traceback
15 import traceback
15 import zmq
16 import zmq
16
17
@@ -24,6 +25,7 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, ge
24
25
25 LOCALTIME = True
26 LOCALTIME = True
26
27
28
27 def isNumber(cad):
29 def isNumber(cad):
28 """
30 """
29 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
31 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
@@ -38,11 +40,12 def isNumber(cad):
38 False : no es un string numerico
40 False : no es un string numerico
39 """
41 """
40 try:
42 try:
41 float( cad )
43 float(cad)
42 return True
44 return True
43 except:
45 except:
44 return False
46 return False
45
47
48
46 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
49 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
47 """
50 """
48 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
51 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
@@ -67,16 +70,16 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
67 basicHeaderObj = BasicHeader(LOCALTIME)
70 basicHeaderObj = BasicHeader(LOCALTIME)
68
71
69 try:
72 try:
70 fp = open(filename,'rb')
73 fp = open(filename, 'rb')
71 except IOError:
74 except IOError:
72 print "The file %s can't be opened" %(filename)
75 print "The file %s can't be opened" % (filename)
73 return 0
76 return 0
74
77
75 sts = basicHeaderObj.read(fp)
78 sts = basicHeaderObj.read(fp)
76 fp.close()
79 fp.close()
77
80
78 if not(sts):
81 if not(sts):
79 print "Skipping the file %s because it has not a valid header" %(filename)
82 print "Skipping the file %s because it has not a valid header" % (filename)
80 return 0
83 return 0
81
84
82 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
85 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
@@ -84,6 +87,7 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
84
87
85 return 1
88 return 1
86
89
90
87 def isTimeInRange(thisTime, startTime, endTime):
91 def isTimeInRange(thisTime, startTime, endTime):
88
92
89 if endTime >= startTime:
93 if endTime >= startTime:
@@ -97,6 +101,7 def isTimeInRange(thisTime, startTime, endTime):
97
101
98 return 1
102 return 1
99
103
104
100 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
105 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
101 """
106 """
102 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
107 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
@@ -122,11 +127,10 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
122
127
123 """
128 """
124
129
125
126 try:
130 try:
127 fp = open(filename,'rb')
131 fp = open(filename, 'rb')
128 except IOError:
132 except IOError:
129 print "The file %s can't be opened" %(filename)
133 print "The file %s can't be opened" % (filename)
130 return None
134 return None
131
135
132 firstBasicHeaderObj = BasicHeader(LOCALTIME)
136 firstBasicHeaderObj = BasicHeader(LOCALTIME)
@@ -139,7 +143,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
139 sts = firstBasicHeaderObj.read(fp)
143 sts = firstBasicHeaderObj.read(fp)
140
144
141 if not(sts):
145 if not(sts):
142 print "[Reading] Skipping the file %s because it has not a valid header" %(filename)
146 print "[Reading] Skipping the file %s because it has not a valid header" % (filename)
143 return None
147 return None
144
148
145 if not systemHeaderObj.read(fp):
149 if not systemHeaderObj.read(fp):
@@ -153,10 +157,10 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
153
157
154 filesize = os.path.getsize(filename)
158 filesize = os.path.getsize(filename)
155
159
156 offset = processingHeaderObj.blockSize + 24 #header size
160 offset = processingHeaderObj.blockSize + 24 # header size
157
161
158 if filesize <= offset:
162 if filesize <= offset:
159 print "[Reading] %s: This file has not enough data" %filename
163 print "[Reading] %s: This file has not enough data" % filename
160 return None
164 return None
161
165
162 fp.seek(-offset, 2)
166 fp.seek(-offset, 2)
@@ -172,7 +176,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
172 thisDate = thisDatetime.date()
176 thisDate = thisDatetime.date()
173 thisTime_first_block = thisDatetime.time()
177 thisTime_first_block = thisDatetime.time()
174
178
175 #General case
179 # General case
176 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
180 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
177 #-----------o----------------------------o-----------
181 #-----------o----------------------------o-----------
178 # startTime endTime
182 # startTime endTime
@@ -183,8 +187,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
183
187
184 return thisDatetime
188 return thisDatetime
185
189
186 #If endTime < startTime then endTime belongs to the next day
190 # If endTime < startTime then endTime belongs to the next day
187
188
191
189 #<<<<<<<<<<<o o>>>>>>>>>>>
192 #<<<<<<<<<<<o o>>>>>>>>>>>
190 #-----------o----------------------------o-----------
193 #-----------o----------------------------o-----------
@@ -201,6 +204,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
201
204
202 return thisDatetime
205 return thisDatetime
203
206
207
204 def isFolderInDateRange(folder, startDate=None, endDate=None):
208 def isFolderInDateRange(folder, startDate=None, endDate=None):
205 """
209 """
206 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
210 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
@@ -227,7 +231,7 def isFolderInDateRange(folder, startDate=None, endDate=None):
227 basename = os.path.basename(folder)
231 basename = os.path.basename(folder)
228
232
229 if not isRadarFolder(basename):
233 if not isRadarFolder(basename):
230 print "The folder %s has not the rigth format" %folder
234 print "The folder %s has not the rigth format" % folder
231 return 0
235 return 0
232
236
233 if startDate and endDate:
237 if startDate and endDate:
@@ -241,6 +245,7 def isFolderInDateRange(folder, startDate=None, endDate=None):
241
245
242 return 1
246 return 1
243
247
248
244 def isFileInDateRange(filename, startDate=None, endDate=None):
249 def isFileInDateRange(filename, startDate=None, endDate=None):
245 """
250 """
246 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
251 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
@@ -269,7 +274,7 def isFileInDateRange(filename, startDate=None, endDate=None):
269 basename = os.path.basename(filename)
274 basename = os.path.basename(filename)
270
275
271 if not isRadarFile(basename):
276 if not isRadarFile(basename):
272 print "The filename %s has not the rigth format" %filename
277 print "The filename %s has not the rigth format" % filename
273 return 0
278 return 0
274
279
275 if startDate and endDate:
280 if startDate and endDate:
@@ -283,6 +288,7 def isFileInDateRange(filename, startDate=None, endDate=None):
283
288
284 return 1
289 return 1
285
290
291
286 def getFileFromSet(path, ext, set):
292 def getFileFromSet(path, ext, set):
287 validFilelist = []
293 validFilelist = []
288 fileList = os.listdir(path)
294 fileList = os.listdir(path)
@@ -293,7 +299,7 def getFileFromSet(path, ext, set):
293 for thisFile in fileList:
299 for thisFile in fileList:
294 try:
300 try:
295 year = int(thisFile[1:5])
301 year = int(thisFile[1:5])
296 doy = int(thisFile[5:8])
302 doy = int(thisFile[5:8])
297 except:
303 except:
298 continue
304 continue
299
305
@@ -302,21 +308,23 def getFileFromSet(path, ext, set):
302
308
303 validFilelist.append(thisFile)
309 validFilelist.append(thisFile)
304
310
305 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
311 myfile = fnmatch.filter(
312 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
306
313
307 if len(myfile)!= 0:
314 if len(myfile) != 0:
308 return myfile[0]
315 return myfile[0]
309 else:
316 else:
310 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
317 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
311 print 'the filename %s does not exist'%filename
318 print 'the filename %s does not exist' % filename
312 print '...going to the last file: '
319 print '...going to the last file: '
313
320
314 if validFilelist:
321 if validFilelist:
315 validFilelist = sorted( validFilelist, key=str.lower )
322 validFilelist = sorted(validFilelist, key=str.lower)
316 return validFilelist[-1]
323 return validFilelist[-1]
317
324
318 return None
325 return None
319
326
327
320 def getlastFileFromPath(path, ext):
328 def getlastFileFromPath(path, ext):
321 """
329 """
322 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
330 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
@@ -354,11 +362,12 def getlastFileFromPath(path, ext):
354 validFilelist.append(thisFile)
362 validFilelist.append(thisFile)
355
363
356 if validFilelist:
364 if validFilelist:
357 validFilelist = sorted( validFilelist, key=str.lower )
365 validFilelist = sorted(validFilelist, key=str.lower)
358 return validFilelist[-1]
366 return validFilelist[-1]
359
367
360 return None
368 return None
361
369
370
362 def checkForRealPath(path, foldercounter, year, doy, set, ext):
371 def checkForRealPath(path, foldercounter, year, doy, set, ext):
363 """
372 """
364 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
373 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
@@ -386,28 +395,32 def checkForRealPath(path, foldercounter, year, doy, set, ext):
386 find_flag = False
395 find_flag = False
387 filename = None
396 filename = None
388
397
389 prefixDirList = [None,'d','D']
398 prefixDirList = [None, 'd', 'D']
390 if ext.lower() == ".r": #voltage
399 if ext.lower() == ".r": # voltage
391 prefixFileList = ['d','D']
400 prefixFileList = ['d', 'D']
392 elif ext.lower() == ".pdata": #spectra
401 elif ext.lower() == ".pdata": # spectra
393 prefixFileList = ['p','P']
402 prefixFileList = ['p', 'P']
394 else:
403 else:
395 return None, filename
404 return None, filename
396
405
397 #barrido por las combinaciones posibles
406 # barrido por las combinaciones posibles
398 for prefixDir in prefixDirList:
407 for prefixDir in prefixDirList:
399 thispath = path
408 thispath = path
400 if prefixDir != None:
409 if prefixDir != None:
401 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
410 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
402 if foldercounter == 0:
411 if foldercounter == 0:
403 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
412 thispath = os.path.join(path, "%s%04d%03d" %
413 (prefixDir, year, doy))
404 else:
414 else:
405 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
415 thispath = os.path.join(path, "%s%04d%03d_%02d" % (
406 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
416 prefixDir, year, doy, foldercounter))
407 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
417 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
408 fullfilename = os.path.join( thispath, filename ) #formo el path completo
418 # formo el nombre del file xYYYYDDDSSS.ext
409
419 filename = "%s%04d%03d%03d%s" % (prefixFile, year, doy, set, ext)
410 if os.path.exists( fullfilename ): #verifico que exista
420 fullfilename = os.path.join(
421 thispath, filename) # formo el path completo
422
423 if os.path.exists(fullfilename): # verifico que exista
411 find_flag = True
424 find_flag = True
412 break
425 break
413 if find_flag:
426 if find_flag:
@@ -418,6 +431,7 def checkForRealPath(path, foldercounter, year, doy, set, ext):
418
431
419 return fullfilename, filename
432 return fullfilename, filename
420
433
434
421 def isRadarFolder(folder):
435 def isRadarFolder(folder):
422 try:
436 try:
423 year = int(folder[1:5])
437 year = int(folder[1:5])
@@ -427,15 +441,17 def isRadarFolder(folder):
427
441
428 return 1
442 return 1
429
443
444
430 def isRadarFile(file):
445 def isRadarFile(file):
431 try:
446 try:
432 year = int(file[1:5])
447 year = int(file[1:5])
433 doy = int(file[5:8])
448 doy = int(file[5:8])
434 set = int(file[8:11])
449 set = int(file[8:11])
435 except:
450 except:
436 return 0
451 return 0
452
453 return 1
437
454
438 return 1
439
455
440 def getDateFromRadarFile(file):
456 def getDateFromRadarFile(file):
441 try:
457 try:
@@ -445,9 +461,10 def getDateFromRadarFile(file):
445 except:
461 except:
446 return None
462 return None
447
463
448 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
464 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
449 return thisDate
465 return thisDate
450
466
467
451 def getDateFromRadarFolder(folder):
468 def getDateFromRadarFolder(folder):
452 try:
469 try:
453 year = int(folder[1:5])
470 year = int(folder[1:5])
@@ -455,9 +472,10 def getDateFromRadarFolder(folder):
455 except:
472 except:
456 return None
473 return None
457
474
458 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
475 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
459 return thisDate
476 return thisDate
460
477
478
461 class JRODataIO:
479 class JRODataIO:
462
480
463 c = 3E8
481 c = 3E8
@@ -540,6 +558,7 class JRODataIO:
540 def getAllowedArgs(self):
558 def getAllowedArgs(self):
541 return inspect.getargspec(self.run).args
559 return inspect.getargspec(self.run).args
542
560
561
543 class JRODataReader(JRODataIO):
562 class JRODataReader(JRODataIO):
544
563
545 online = 0
564 online = 0
@@ -548,11 +567,11 class JRODataReader(JRODataIO):
548
567
549 nReadBlocks = 0
568 nReadBlocks = 0
550
569
551 delay = 10 #number of seconds waiting a new file
570 delay = 10 # number of seconds waiting a new file
552
571
553 nTries = 3 #quantity tries
572 nTries = 3 # quantity tries
554
573
555 nFiles = 3 #number of files for searching
574 nFiles = 3 # number of files for searching
556
575
557 path = None
576 path = None
558
577
@@ -572,14 +591,13 class JRODataReader(JRODataIO):
572
591
573 txIndex = None
592 txIndex = None
574
593
575 #Added--------------------
594 # Added--------------------
576
595
577 selBlocksize = None
596 selBlocksize = None
578
597
579 selBlocktime = None
598 selBlocktime = None
580
599
581 def __init__(self):
600 def __init__(self):
582
583 """
601 """
584 This class is used to find data files
602 This class is used to find data files
585
603
@@ -590,7 +608,6 class JRODataReader(JRODataIO):
590 """
608 """
591 pass
609 pass
592
610
593
594 def createObjByDefault(self):
611 def createObjByDefault(self):
595 """
612 """
596
613
@@ -605,8 +622,8 class JRODataReader(JRODataIO):
605 path,
622 path,
606 startDate=None,
623 startDate=None,
607 endDate=None,
624 endDate=None,
608 startTime=datetime.time(0,0,0),
625 startTime=datetime.time(0, 0, 0),
609 endTime=datetime.time(23,59,59),
626 endTime=datetime.time(23, 59, 59),
610 set=None,
627 set=None,
611 expLabel='',
628 expLabel='',
612 ext='.r',
629 ext='.r',
@@ -619,22 +636,23 class JRODataReader(JRODataIO):
619
636
620 pathList = []
637 pathList = []
621
638
622 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
639 dateList, pathList = self.findDatafiles(
640 path, startDate, endDate, expLabel, ext, walk, include_path=True)
623
641
624 if dateList == []:
642 if dateList == []:
625 return [], []
643 return [], []
626
644
627 if len(dateList) > 1:
645 if len(dateList) > 1:
628 print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList))
646 print "[Reading] Data found for date range [%s - %s]: total days = %d" % (startDate, endDate, len(dateList))
629 else:
647 else:
630 print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0])
648 print "[Reading] Data found for date range [%s - %s]: date = %s" % (startDate, endDate, dateList[0])
631
649
632 filenameList = []
650 filenameList = []
633 datetimeList = []
651 datetimeList = []
634
652
635 for thisPath in pathList:
653 for thisPath in pathList:
636
654
637 fileList = glob.glob1(thisPath, "*%s" %ext)
655 fileList = glob.glob1(thisPath, "*%s" % ext)
638 fileList.sort()
656 fileList.sort()
639
657
640 skippedFileList = []
658 skippedFileList = []
@@ -644,19 +662,21 class JRODataReader(JRODataIO):
644 if skip == 0:
662 if skip == 0:
645 skippedFileList = []
663 skippedFileList = []
646 else:
664 else:
647 skippedFileList = fileList[cursor*skip: cursor*skip + skip]
665 skippedFileList = fileList[cursor *
666 skip: cursor * skip + skip]
648
667
649 else:
668 else:
650 skippedFileList = fileList
669 skippedFileList = fileList
651
670
652 for file in skippedFileList:
671 for file in skippedFileList:
653
672
654 filename = os.path.join(thisPath,file)
673 filename = os.path.join(thisPath, file)
655
674
656 if not isFileInDateRange(filename, startDate, endDate):
675 if not isFileInDateRange(filename, startDate, endDate):
657 continue
676 continue
658
677
659 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
678 thisDatetime = isFileInTimeRange(
679 filename, startDate, endDate, startTime, endTime)
660
680
661 if not(thisDatetime):
681 if not(thisDatetime):
662 continue
682 continue
@@ -665,10 +685,10 class JRODataReader(JRODataIO):
665 datetimeList.append(thisDatetime)
685 datetimeList.append(thisDatetime)
666
686
667 if not(filenameList):
687 if not(filenameList):
668 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path)
688 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" % (startTime, endTime, ext, path)
669 return [], []
689 return [], []
670
690
671 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
691 print "[Reading] %d file(s) was(were) found in time range: %s - %s" % (len(filenameList), startTime, endTime)
672 print
692 print
673
693
674 # for i in range(len(filenameList)):
694 # for i in range(len(filenameList)):
@@ -679,8 +699,7 class JRODataReader(JRODataIO):
679
699
680 return pathList, filenameList
700 return pathList, filenameList
681
701
682 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
702 def __searchFilesOnLine(self, path, expLabel="", ext=None, walk=True, set=None):
683
684 """
703 """
685 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
704 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
686 devuelve el archivo encontrado ademas de otros datos.
705 devuelve el archivo encontrado ademas de otros datos.
@@ -712,9 +731,9 class JRODataReader(JRODataIO):
712 fullpath = path
731 fullpath = path
713 foldercounter = 0
732 foldercounter = 0
714 else:
733 else:
715 #Filtra solo los directorios
734 # Filtra solo los directorios
716 for thisPath in os.listdir(path):
735 for thisPath in os.listdir(path):
717 if not os.path.isdir(os.path.join(path,thisPath)):
736 if not os.path.isdir(os.path.join(path, thisPath)):
718 continue
737 continue
719 if not isRadarFolder(thisPath):
738 if not isRadarFolder(thisPath):
720 continue
739 continue
@@ -724,14 +743,14 class JRODataReader(JRODataIO):
724 if not(dirList):
743 if not(dirList):
725 return None, None, None, None, None, None
744 return None, None, None, None, None, None
726
745
727 dirList = sorted( dirList, key=str.lower )
746 dirList = sorted(dirList, key=str.lower)
728
747
729 doypath = dirList[-1]
748 doypath = dirList[-1]
730 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
749 foldercounter = int(doypath.split('_')[1]) if len(
750 doypath.split('_')) > 1 else 0
731 fullpath = os.path.join(path, doypath, expLabel)
751 fullpath = os.path.join(path, doypath, expLabel)
732
752
733
753 print "[Reading] %s folder was found: " % (fullpath)
734 print "[Reading] %s folder was found: " %(fullpath )
735
754
736 if set == None:
755 if set == None:
737 filename = getlastFileFromPath(fullpath, ext)
756 filename = getlastFileFromPath(fullpath, ext)
@@ -741,14 +760,14 class JRODataReader(JRODataIO):
741 if not(filename):
760 if not(filename):
742 return None, None, None, None, None, None
761 return None, None, None, None, None, None
743
762
744 print "[Reading] %s file was found" %(filename)
763 print "[Reading] %s file was found" % (filename)
745
764
746 if not(self.__verifyFile(os.path.join(fullpath, filename))):
765 if not(self.__verifyFile(os.path.join(fullpath, filename))):
747 return None, None, None, None, None, None
766 return None, None, None, None, None, None
748
767
749 year = int( filename[1:5] )
768 year = int(filename[1:5])
750 doy = int( filename[5:8] )
769 doy = int(filename[5:8])
751 set = int( filename[8:11] )
770 set = int(filename[8:11])
752
771
753 return fullpath, foldercounter, filename, year, doy, set
772 return fullpath, foldercounter, filename, year, doy, set
754
773
@@ -769,7 +788,7 class JRODataReader(JRODataIO):
769 continue
788 continue
770
789
771 fileSize = os.path.getsize(filename)
790 fileSize = os.path.getsize(filename)
772 fp = open(filename,'rb')
791 fp = open(filename, 'rb')
773 break
792 break
774
793
775 self.flagIsNewFile = 1
794 self.flagIsNewFile = 1
@@ -813,29 +832,32 class JRODataReader(JRODataIO):
813 self.set = 0
832 self.set = 0
814 self.foldercounter += 1
833 self.foldercounter += 1
815
834
816 #busca el 1er file disponible
835 # busca el 1er file disponible
817 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
836 fullfilename, filename = checkForRealPath(
837 self.path, self.foldercounter, self.year, self.doy, self.set, self.ext)
818 if fullfilename:
838 if fullfilename:
819 if self.__verifyFile(fullfilename, False):
839 if self.__verifyFile(fullfilename, False):
820 fileOk_flag = True
840 fileOk_flag = True
821
841
822 #si no encuentra un file entonces espera y vuelve a buscar
842 # si no encuentra un file entonces espera y vuelve a buscar
823 if not(fileOk_flag):
843 if not(fileOk_flag):
824 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
844 # busco en los siguientes self.nFiles+1 files posibles
845 for nFiles in range(self.nFiles + 1):
825
846
826 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
847 if firstTime_flag: # si es la 1era vez entonces hace el for self.nTries veces
827 tries = self.nTries
848 tries = self.nTries
828 else:
849 else:
829 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
850 tries = 1 # si no es la 1era vez entonces solo lo hace una vez
830
851
831 for nTries in range( tries ):
852 for nTries in range(tries):
832 if firstTime_flag:
853 if firstTime_flag:
833 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
854 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % (self.delay, filename, nTries + 1)
834 sleep( self.delay )
855 sleep(self.delay)
835 else:
856 else:
836 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
857 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
837
858
838 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
859 fullfilename, filename = checkForRealPath(
860 self.path, self.foldercounter, self.year, self.doy, self.set, self.ext)
839 if fullfilename:
861 if fullfilename:
840 if self.__verifyFile(fullfilename):
862 if self.__verifyFile(fullfilename):
841 fileOk_flag = True
863 fileOk_flag = True
@@ -849,16 +871,18 class JRODataReader(JRODataIO):
849 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
871 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
850 self.set += 1
872 self.set += 1
851
873
852 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
874 # si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
875 if nFiles == (self.nFiles - 1):
853 self.set = 0
876 self.set = 0
854 self.doy += 1
877 self.doy += 1
855 self.foldercounter = 0
878 self.foldercounter = 0
856
879
857 if fileOk_flag:
880 if fileOk_flag:
858 self.fileSize = os.path.getsize( fullfilename )
881 self.fileSize = os.path.getsize(fullfilename)
859 self.filename = fullfilename
882 self.filename = fullfilename
860 self.flagIsNewFile = 1
883 self.flagIsNewFile = 1
861 if self.fp != None: self.fp.close()
884 if self.fp != None:
885 self.fp.close()
862 self.fp = open(fullfilename, 'rb')
886 self.fp = open(fullfilename, 'rb')
863 self.flagNoMoreFiles = 0
887 self.flagNoMoreFiles = 0
864 # print '[Reading] Setting the file: %s' % fullfilename
888 # print '[Reading] Setting the file: %s' % fullfilename
@@ -908,48 +932,47 class JRODataReader(JRODataIO):
908
932
909 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
933 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
910
934
911 for nTries in range( self.nTries ):
935 for nTries in range(self.nTries):
912
936
913 self.fp.close()
937 self.fp.close()
914 self.fp = open( self.filename, 'rb' )
938 self.fp = open(self.filename, 'rb')
915 self.fp.seek( currentPointer )
939 self.fp.seek(currentPointer)
916
940
917 self.fileSize = os.path.getsize( self.filename )
941 self.fileSize = os.path.getsize(self.filename)
918 currentSize = self.fileSize - currentPointer
942 currentSize = self.fileSize - currentPointer
919
943
920 if ( currentSize >= neededSize ):
944 if (currentSize >= neededSize):
921 self.basicHeaderObj.read(self.fp)
945 self.basicHeaderObj.read(self.fp)
922 return 1
946 return 1
923
947
924 if self.fileSize == self.fileSizeByHeader:
948 if self.fileSize == self.fileSizeByHeader:
925 # self.flagEoF = True
949 # self.flagEoF = True
926 return 0
950 return 0
927
951
928 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
952 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)
929 sleep( self.delay )
953 sleep(self.delay)
930
931
954
932 return 0
955 return 0
933
956
934 def waitDataBlock(self,pointer_location):
957 def waitDataBlock(self, pointer_location):
935
958
936 currentPointer = pointer_location
959 currentPointer = pointer_location
937
960
938 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
961 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
939
962
940 for nTries in range( self.nTries ):
963 for nTries in range(self.nTries):
941 self.fp.close()
964 self.fp.close()
942 self.fp = open( self.filename, 'rb' )
965 self.fp = open(self.filename, 'rb')
943 self.fp.seek( currentPointer )
966 self.fp.seek(currentPointer)
944
967
945 self.fileSize = os.path.getsize( self.filename )
968 self.fileSize = os.path.getsize(self.filename)
946 currentSize = self.fileSize - currentPointer
969 currentSize = self.fileSize - currentPointer
947
970
948 if ( currentSize >= neededSize ):
971 if (currentSize >= neededSize):
949 return 1
972 return 1
950
973
951 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
974 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)
952 sleep( self.delay )
975 sleep(self.delay)
953
976
954 return 0
977 return 0
955
978
@@ -961,7 +984,7 class JRODataReader(JRODataIO):
961 csize = self.fileSize - self.fp.tell()
984 csize = self.fileSize - self.fp.tell()
962 blocksize = self.processingHeaderObj.blockSize
985 blocksize = self.processingHeaderObj.blockSize
963
986
964 #salta el primer bloque de datos
987 # salta el primer bloque de datos
965 if csize > self.processingHeaderObj.blockSize:
988 if csize > self.processingHeaderObj.blockSize:
966 self.fp.seek(self.fp.tell() + blocksize)
989 self.fp.seek(self.fp.tell() + blocksize)
967 else:
990 else:
@@ -971,7 +994,7 class JRODataReader(JRODataIO):
971 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
994 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
972 while True:
995 while True:
973
996
974 if self.fp.tell()<self.fileSize:
997 if self.fp.tell() < self.fileSize:
975 self.fp.seek(self.fp.tell() + neededsize)
998 self.fp.seek(self.fp.tell() + neededsize)
976 else:
999 else:
977 self.fp.seek(self.fp.tell() - neededsize)
1000 self.fp.seek(self.fp.tell() - neededsize)
@@ -987,12 +1010,12 class JRODataReader(JRODataIO):
987 self.__isFirstTimeOnline = 0
1010 self.__isFirstTimeOnline = 0
988
1011
989 def __setNewBlock(self):
1012 def __setNewBlock(self):
990 #if self.server is None:
1013 # if self.server is None:
991 if self.fp == None:
1014 if self.fp == None:
992 return 0
1015 return 0
993
1016
994 # if self.online:
1017 # if self.online:
995 # self.__jumpToLastBlock()
1018 # self.__jumpToLastBlock()
996
1019
997 if self.flagIsNewFile:
1020 if self.flagIsNewFile:
998 self.lastUTTime = self.basicHeaderObj.utc
1021 self.lastUTTime = self.basicHeaderObj.utc
@@ -1003,8 +1026,8 class JRODataReader(JRODataIO):
1003 if not(self.setNextFile()):
1026 if not(self.setNextFile()):
1004 return 0
1027 return 0
1005 else:
1028 else:
1006 return 1
1029 return 1
1007 #if self.server is None:
1030 # if self.server is None:
1008 currentSize = self.fileSize - self.fp.tell()
1031 currentSize = self.fileSize - self.fp.tell()
1009 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1032 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1010 if (currentSize >= neededSize):
1033 if (currentSize >= neededSize):
@@ -1018,11 +1041,11 class JRODataReader(JRODataIO):
1018 if self.__waitNewBlock():
1041 if self.__waitNewBlock():
1019 self.lastUTTime = self.basicHeaderObj.utc
1042 self.lastUTTime = self.basicHeaderObj.utc
1020 return 1
1043 return 1
1021 #if self.server is None:
1044 # if self.server is None:
1022 if not(self.setNextFile()):
1045 if not(self.setNextFile()):
1023 return 0
1046 return 0
1024
1047
1025 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
1048 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
1026 self.lastUTTime = self.basicHeaderObj.utc
1049 self.lastUTTime = self.basicHeaderObj.utc
1027
1050
1028 self.flagDiscontinuousBlock = 0
1051 self.flagDiscontinuousBlock = 0
@@ -1034,11 +1057,11 class JRODataReader(JRODataIO):
1034
1057
1035 def readNextBlock(self):
1058 def readNextBlock(self):
1036
1059
1037 #Skip block out of startTime and endTime
1060 # Skip block out of startTime and endTime
1038 while True:
1061 while True:
1039 if not(self.__setNewBlock()):
1062 if not(self.__setNewBlock()):
1040 return 0
1063 return 0
1041
1064
1042 if not(self.readBlock()):
1065 if not(self.readBlock()):
1043 return 0
1066 return 0
1044
1067
@@ -1046,17 +1069,17 class JRODataReader(JRODataIO):
1046
1069
1047 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1070 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1048
1071
1049 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1072 print "[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
1050 self.processingHeaderObj.dataBlocksPerFile,
1073 self.processingHeaderObj.dataBlocksPerFile,
1051 self.dataOut.datatime.ctime())
1074 self.dataOut.datatime.ctime())
1052 continue
1075 continue
1053
1076
1054 break
1077 break
1055
1078
1056 if self.verbose:
1079 if self.verbose:
1057 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1080 print "[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
1058 self.processingHeaderObj.dataBlocksPerFile,
1081 self.processingHeaderObj.dataBlocksPerFile,
1059 self.dataOut.datatime.ctime())
1082 self.dataOut.datatime.ctime())
1060 return 1
1083 return 1
1061
1084
1062 def __readFirstHeader(self):
1085 def __readFirstHeader(self):
@@ -1068,25 +1091,28 class JRODataReader(JRODataIO):
1068
1091
1069 self.firstHeaderSize = self.basicHeaderObj.size
1092 self.firstHeaderSize = self.basicHeaderObj.size
1070
1093
1071 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1094 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
1095 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
1072 if datatype == 0:
1096 if datatype == 0:
1073 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1097 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
1074 elif datatype == 1:
1098 elif datatype == 1:
1075 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
1099 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
1076 elif datatype == 2:
1100 elif datatype == 2:
1077 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1101 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
1078 elif datatype == 3:
1102 elif datatype == 3:
1079 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1103 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
1080 elif datatype == 4:
1104 elif datatype == 4:
1081 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1105 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
1082 elif datatype == 5:
1106 elif datatype == 5:
1083 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1107 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
1084 else:
1108 else:
1085 raise ValueError, 'Data type was not defined'
1109 raise ValueError, 'Data type was not defined'
1086
1110
1087 self.dtype = datatype_str
1111 self.dtype = datatype_str
1088 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1112 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1089 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1113 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
1114 self.firstHeaderSize + self.basicHeaderSize * \
1115 (self.processingHeaderObj.dataBlocksPerFile - 1)
1090 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1116 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1091 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1117 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1092 self.getBlockDimension()
1118 self.getBlockDimension()
@@ -1113,25 +1139,25 class JRODataReader(JRODataIO):
1113 radarControllerHeaderObj = RadarControllerHeader()
1139 radarControllerHeaderObj = RadarControllerHeader()
1114 processingHeaderObj = ProcessingHeader()
1140 processingHeaderObj = ProcessingHeader()
1115
1141
1116 if not( basicHeaderObj.read(fp) ):
1142 if not(basicHeaderObj.read(fp)):
1117 fp.close()
1143 fp.close()
1118 return False
1144 return False
1119
1145
1120 if not( systemHeaderObj.read(fp) ):
1146 if not(systemHeaderObj.read(fp)):
1121 fp.close()
1147 fp.close()
1122 return False
1148 return False
1123
1149
1124 if not( radarControllerHeaderObj.read(fp) ):
1150 if not(radarControllerHeaderObj.read(fp)):
1125 fp.close()
1151 fp.close()
1126 return False
1152 return False
1127
1153
1128 if not( processingHeaderObj.read(fp) ):
1154 if not(processingHeaderObj.read(fp)):
1129 fp.close()
1155 fp.close()
1130 return False
1156 return False
1131
1157
1132 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1158 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1133 else:
1159 else:
1134 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1160 msg = "[Reading] Skipping the file %s due to it hasn't enough data" % filename
1135
1161
1136 fp.close()
1162 fp.close()
1137
1163
@@ -1161,7 +1187,7 class JRODataReader(JRODataIO):
1161 if not os.path.isdir(single_path):
1187 if not os.path.isdir(single_path):
1162 continue
1188 continue
1163
1189
1164 fileList = glob.glob1(single_path, "*"+ext)
1190 fileList = glob.glob1(single_path, "*" + ext)
1165
1191
1166 if not fileList:
1192 if not fileList:
1167 continue
1193 continue
@@ -1199,7 +1225,7 class JRODataReader(JRODataIO):
1199
1225
1200 for thisPath in os.listdir(single_path):
1226 for thisPath in os.listdir(single_path):
1201
1227
1202 if not os.path.isdir(os.path.join(single_path,thisPath)):
1228 if not os.path.isdir(os.path.join(single_path, thisPath)):
1203 continue
1229 continue
1204
1230
1205 if not isRadarFolder(thisPath):
1231 if not isRadarFolder(thisPath):
@@ -1218,7 +1244,7 class JRODataReader(JRODataIO):
1218 for thisDir in dirList:
1244 for thisDir in dirList:
1219
1245
1220 datapath = os.path.join(single_path, thisDir, expLabel)
1246 datapath = os.path.join(single_path, thisDir, expLabel)
1221 fileList = glob.glob1(datapath, "*"+ext)
1247 fileList = glob.glob1(datapath, "*" + ext)
1222
1248
1223 if not fileList:
1249 if not fileList:
1224 continue
1250 continue
@@ -1238,10 +1264,10 class JRODataReader(JRODataIO):
1238 pattern_path = multi_path[0]
1264 pattern_path = multi_path[0]
1239
1265
1240 if path_empty:
1266 if path_empty:
1241 print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate)
1267 print "[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate)
1242 else:
1268 else:
1243 if not dateList:
1269 if not dateList:
1244 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
1270 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path)
1245
1271
1246 if include_path:
1272 if include_path:
1247 return dateList, pathList
1273 return dateList, pathList
@@ -1249,28 +1275,31 class JRODataReader(JRODataIO):
1249 return dateList
1275 return dateList
1250
1276
1251 def setup(self,
1277 def setup(self,
1252 path=None,
1278 path=None,
1253 startDate=None,
1279 startDate=None,
1254 endDate=None,
1280 endDate=None,
1255 startTime=datetime.time(0,0,0),
1281 startTime=datetime.time(0, 0, 0),
1256 endTime=datetime.time(23,59,59),
1282 endTime=datetime.time(23, 59, 59),
1257 set=None,
1283 set=None,
1258 expLabel = "",
1284 expLabel="",
1259 ext = None,
1285 ext=None,
1260 online = False,
1286 online=False,
1261 delay = 60,
1287 delay=60,
1262 walk = True,
1288 walk=True,
1263 getblock = False,
1289 getblock=False,
1264 nTxs = 1,
1290 nTxs=1,
1265 realtime=False,
1291 realtime=False,
1266 blocksize=None,
1292 blocksize=None,
1267 blocktime=None,
1293 blocktime=None,
1268 skip=None,
1294 skip=None,
1269 cursor=None,
1295 cursor=None,
1270 warnings=True,
1296 warnings=True,
1271 verbose=True,
1297 verbose=True,
1272 server=None,
1298 server=None,
1273 **kwargs):
1299 format=None,
1300 oneDDict=None,
1301 twoDDict=None,
1302 ind2DList=None):
1274 if server is not None:
1303 if server is not None:
1275 if 'tcp://' in server:
1304 if 'tcp://' in server:
1276 address = server
1305 address = server
@@ -1282,7 +1311,7 class JRODataReader(JRODataIO):
1282 self.receiver.connect(self.server)
1311 self.receiver.connect(self.server)
1283 time.sleep(0.5)
1312 time.sleep(0.5)
1284 print '[Starting] ReceiverData from {}'.format(self.server)
1313 print '[Starting] ReceiverData from {}'.format(self.server)
1285 else:
1314 else:
1286 self.server = None
1315 self.server = None
1287 if path == None:
1316 if path == None:
1288 raise ValueError, "[Reading] The path is not valid"
1317 raise ValueError, "[Reading] The path is not valid"
@@ -1293,32 +1322,33 class JRODataReader(JRODataIO):
1293 if online:
1322 if online:
1294 print "[Reading] Searching files in online mode..."
1323 print "[Reading] Searching files in online mode..."
1295
1324
1296 for nTries in range( self.nTries ):
1325 for nTries in range(self.nTries):
1297 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1326 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(
1327 path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1298
1328
1299 if fullpath:
1329 if fullpath:
1300 break
1330 break
1301
1331
1302 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1332 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries + 1)
1303 sleep( self.delay )
1333 sleep(self.delay)
1304
1334
1305 if not(fullpath):
1335 if not(fullpath):
1306 print "[Reading] There 'isn't any valid file in %s" % path
1336 print "[Reading] There 'isn't any valid file in %s" % path
1307 return
1337 return
1308
1338
1309 self.year = year
1339 self.year = year
1310 self.doy = doy
1340 self.doy = doy
1311 self.set = set - 1
1341 self.set = set - 1
1312 self.path = path
1342 self.path = path
1313 self.foldercounter = foldercounter
1343 self.foldercounter = foldercounter
1314 last_set = None
1344 last_set = None
1315 else:
1345 else:
1316 print "[Reading] Searching files in offline mode ..."
1346 print "[Reading] Searching files in offline mode ..."
1317 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1347 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1318 startTime=startTime, endTime=endTime,
1348 startTime=startTime, endTime=endTime,
1319 set=set, expLabel=expLabel, ext=ext,
1349 set=set, expLabel=expLabel, ext=ext,
1320 walk=walk, cursor=cursor,
1350 walk=walk, cursor=cursor,
1321 skip=skip)
1351 skip=skip)
1322
1352
1323 if not(pathList):
1353 if not(pathList):
1324 self.fileIndex = -1
1354 self.fileIndex = -1
@@ -1343,7 +1373,7 class JRODataReader(JRODataIO):
1343 self.startTime = startTime
1373 self.startTime = startTime
1344 self.endTime = endTime
1374 self.endTime = endTime
1345
1375
1346 #Added-----------------
1376 # Added-----------------
1347 self.selBlocksize = blocksize
1377 self.selBlocksize = blocksize
1348 self.selBlocktime = blocktime
1378 self.selBlocktime = blocktime
1349
1379
@@ -1352,10 +1382,10 class JRODataReader(JRODataIO):
1352 self.warnings = warnings
1382 self.warnings = warnings
1353
1383
1354 if not(self.setNextFile()):
1384 if not(self.setNextFile()):
1355 if (startDate!=None) and (endDate!=None):
1385 if (startDate != None) and (endDate != None):
1356 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1386 print "[Reading] No files in range: %s - %s" % (datetime.datetime.combine(startDate, startTime).ctime(), datetime.datetime.combine(endDate, endTime).ctime())
1357 elif startDate != None:
1387 elif startDate != None:
1358 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1388 print "[Reading] No files in range: %s" % (datetime.datetime.combine(startDate, startTime).ctime())
1359 else:
1389 else:
1360 print "[Reading] No files"
1390 print "[Reading] No files"
1361
1391
@@ -1367,12 +1397,14 class JRODataReader(JRODataIO):
1367 # self.getBasicHeader()
1397 # self.getBasicHeader()
1368
1398
1369 if last_set != None:
1399 if last_set != None:
1370 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1400 self.dataOut.last_block = last_set * \
1401 self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1371 return
1402 return
1372
1403
1373 def getBasicHeader(self):
1404 def getBasicHeader(self):
1374
1405
1375 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1406 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1407 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1376
1408
1377 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1409 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1378
1410
@@ -1384,11 +1416,10 class JRODataReader(JRODataIO):
1384
1416
1385 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1417 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1386
1418
1387 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1419 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1388
1420
1389 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1421 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1390
1422
1391
1392 def getFirstHeader(self):
1423 def getFirstHeader(self):
1393
1424
1394 raise NotImplementedError
1425 raise NotImplementedError
@@ -1411,18 +1442,19 class JRODataReader(JRODataIO):
1411
1442
1412 def printReadBlocks(self):
1443 def printReadBlocks(self):
1413
1444
1414 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1445 print "[Reading] Number of read blocks per file %04d" % self.nReadBlocks
1415
1446
1416 def printTotalBlocks(self):
1447 def printTotalBlocks(self):
1417
1448
1418 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1449 print "[Reading] Number of read blocks %04d" % self.nTotalBlocks
1419
1450
1420 def printNumberOfBlock(self):
1451 def printNumberOfBlock(self):
1452 'SPAM!'
1421
1453
1422 if self.flagIsNewBlock:
1454 # if self.flagIsNewBlock:
1423 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1455 # print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1424 self.processingHeaderObj.dataBlocksPerFile,
1456 # self.processingHeaderObj.dataBlocksPerFile,
1425 self.dataOut.datatime.ctime())
1457 # self.dataOut.datatime.ctime())
1426
1458
1427 def printInfo(self):
1459 def printInfo(self):
1428
1460
@@ -1440,25 +1472,29 class JRODataReader(JRODataIO):
1440 path=None,
1472 path=None,
1441 startDate=None,
1473 startDate=None,
1442 endDate=None,
1474 endDate=None,
1443 startTime=datetime.time(0,0,0),
1475 startTime=datetime.time(0, 0, 0),
1444 endTime=datetime.time(23,59,59),
1476 endTime=datetime.time(23, 59, 59),
1445 set=None,
1477 set=None,
1446 expLabel = "",
1478 expLabel="",
1447 ext = None,
1479 ext=None,
1448 online = False,
1480 online=False,
1449 delay = 60,
1481 delay=60,
1450 walk = True,
1482 walk=True,
1451 getblock = False,
1483 getblock=False,
1452 nTxs = 1,
1484 nTxs=1,
1453 realtime=False,
1485 realtime=False,
1454 blocksize=None,
1486 blocksize=None,
1455 blocktime=None,
1487 blocktime=None,
1456 queue=None,
1457 skip=None,
1488 skip=None,
1458 cursor=None,
1489 cursor=None,
1459 warnings=True,
1490 warnings=True,
1460 server=None,
1491 server=None,
1461 verbose=True, **kwargs):
1492 verbose=True,
1493 format=None,
1494 oneDDict=None,
1495 twoDDict=None,
1496 ind2DList=None, **kwargs):
1497
1462 if not(self.isConfig):
1498 if not(self.isConfig):
1463 self.setup(path=path,
1499 self.setup(path=path,
1464 startDate=startDate,
1500 startDate=startDate,
@@ -1480,13 +1516,18 class JRODataReader(JRODataIO):
1480 cursor=cursor,
1516 cursor=cursor,
1481 warnings=warnings,
1517 warnings=warnings,
1482 server=server,
1518 server=server,
1483 verbose=verbose)
1519 verbose=verbose,
1520 format=format,
1521 oneDDict=oneDDict,
1522 twoDDict=twoDDict,
1523 ind2DList=ind2DList)
1484 self.isConfig = True
1524 self.isConfig = True
1485 if server is None:
1525 if server is None:
1486 self.getData()
1526 self.getData()
1487 else:
1527 else:
1488 self.getFromServer()
1528 self.getFromServer()
1489
1529
1530
1490 class JRODataWriter(JRODataIO):
1531 class JRODataWriter(JRODataIO):
1491
1532
1492 """
1533 """
@@ -1511,23 +1552,18 class JRODataWriter(JRODataIO):
1511 def __init__(self, dataOut=None):
1552 def __init__(self, dataOut=None):
1512 raise NotImplementedError
1553 raise NotImplementedError
1513
1554
1514
1515 def hasAllDataInBuffer(self):
1555 def hasAllDataInBuffer(self):
1516 raise NotImplementedError
1556 raise NotImplementedError
1517
1557
1518
1519 def setBlockDimension(self):
1558 def setBlockDimension(self):
1520 raise NotImplementedError
1559 raise NotImplementedError
1521
1560
1522
1523 def writeBlock(self):
1561 def writeBlock(self):
1524 raise NotImplementedError
1562 raise NotImplementedError
1525
1563
1526
1527 def putData(self):
1564 def putData(self):
1528 raise NotImplementedError
1565 raise NotImplementedError
1529
1566
1530
1531 def getProcessFlags(self):
1567 def getProcessFlags(self):
1532
1568
1533 processFlags = 0
1569 processFlags = 0
@@ -1563,12 +1599,12 class JRODataWriter(JRODataIO):
1563
1599
1564 def setBasicHeader(self):
1600 def setBasicHeader(self):
1565
1601
1566 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1602 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1567 self.basicHeaderObj.version = self.versionFile
1603 self.basicHeaderObj.version = self.versionFile
1568 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1604 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1569
1605
1570 utc = numpy.floor(self.dataOut.utctime)
1606 utc = numpy.floor(self.dataOut.utctime)
1571 milisecond = (self.dataOut.utctime - utc)* 1000.0
1607 milisecond = (self.dataOut.utctime - utc) * 1000.0
1572
1608
1573 self.basicHeaderObj.utc = utc
1609 self.basicHeaderObj.utc = utc
1574 self.basicHeaderObj.miliSecond = milisecond
1610 self.basicHeaderObj.miliSecond = milisecond
@@ -1606,7 +1642,8 class JRODataWriter(JRODataIO):
1606
1642
1607 # CALCULAR PARAMETROS
1643 # CALCULAR PARAMETROS
1608
1644
1609 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1645 sizeLongHeader = self.systemHeaderObj.size + \
1646 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1610 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1647 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1611
1648
1612 self.basicHeaderObj.write(self.fp)
1649 self.basicHeaderObj.write(self.fp)
@@ -1632,12 +1669,11 class JRODataWriter(JRODataIO):
1632 self.basicHeaderObj.write(self.fp)
1669 self.basicHeaderObj.write(self.fp)
1633 return 1
1670 return 1
1634
1671
1635 if not( self.setNextFile() ):
1672 if not(self.setNextFile()):
1636 return 0
1673 return 0
1637
1674
1638 return 1
1675 return 1
1639
1676
1640
1641 def writeNextBlock(self):
1677 def writeNextBlock(self):
1642 """
1678 """
1643 Selecciona el bloque siguiente de datos y los escribe en un file
1679 Selecciona el bloque siguiente de datos y los escribe en un file
@@ -1646,13 +1682,13 class JRODataWriter(JRODataIO):
1646 0 : Si no hizo pudo escribir el bloque de datos
1682 0 : Si no hizo pudo escribir el bloque de datos
1647 1 : Si no pudo escribir el bloque de datos
1683 1 : Si no pudo escribir el bloque de datos
1648 """
1684 """
1649 if not( self.__setNewBlock() ):
1685 if not(self.__setNewBlock()):
1650 return 0
1686 return 0
1651
1687
1652 self.writeBlock()
1688 self.writeBlock()
1653
1689
1654 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1690 print "[Writing] Block No. %d/%d" % (self.blockIndex,
1655 self.processingHeaderObj.dataBlocksPerFile)
1691 self.processingHeaderObj.dataBlocksPerFile)
1656
1692
1657 return 1
1693 return 1
1658
1694
@@ -1677,46 +1713,48 class JRODataWriter(JRODataIO):
1677 if self.fp != None:
1713 if self.fp != None:
1678 self.fp.close()
1714 self.fp.close()
1679
1715
1680 timeTuple = time.localtime( self.dataOut.utctime)
1716 timeTuple = time.localtime(self.dataOut.utctime)
1681 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1717 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1682
1718
1683 fullpath = os.path.join( path, subfolder )
1719 fullpath = os.path.join(path, subfolder)
1684 setFile = self.setFile
1720 setFile = self.setFile
1685
1721
1686 if not( os.path.exists(fullpath) ):
1722 if not(os.path.exists(fullpath)):
1687 os.mkdir(fullpath)
1723 os.mkdir(fullpath)
1688 setFile = -1 #inicializo mi contador de seteo
1724 setFile = -1 # inicializo mi contador de seteo
1689 else:
1725 else:
1690 filesList = os.listdir( fullpath )
1726 filesList = os.listdir(fullpath)
1691 if len( filesList ) > 0:
1727 if len(filesList) > 0:
1692 filesList = sorted( filesList, key=str.lower )
1728 filesList = sorted(filesList, key=str.lower)
1693 filen = filesList[-1]
1729 filen = filesList[-1]
1694 # el filename debera tener el siguiente formato
1730 # el filename debera tener el siguiente formato
1695 # 0 1234 567 89A BCDE (hex)
1731 # 0 1234 567 89A BCDE (hex)
1696 # x YYYY DDD SSS .ext
1732 # x YYYY DDD SSS .ext
1697 if isNumber( filen[8:11] ):
1733 if isNumber(filen[8:11]):
1698 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1734 # inicializo mi contador de seteo al seteo del ultimo file
1735 setFile = int(filen[8:11])
1699 else:
1736 else:
1700 setFile = -1
1737 setFile = -1
1701 else:
1738 else:
1702 setFile = -1 #inicializo mi contador de seteo
1739 setFile = -1 # inicializo mi contador de seteo
1703
1740
1704 setFile += 1
1741 setFile += 1
1705
1742
1706 #If this is a new day it resets some values
1743 # If this is a new day it resets some values
1707 if self.dataOut.datatime.date() > self.fileDate:
1744 if self.dataOut.datatime.date() > self.fileDate:
1708 setFile = 0
1745 setFile = 0
1709 self.nTotalBlocks = 0
1746 self.nTotalBlocks = 0
1710
1747
1711 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1748 filen = '%s%4.4d%3.3d%3.3d%s' % (
1749 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1712
1750
1713 filename = os.path.join( path, subfolder, filen )
1751 filename = os.path.join(path, subfolder, filen)
1714
1752
1715 fp = open( filename,'wb' )
1753 fp = open(filename, 'wb')
1716
1754
1717 self.blockIndex = 0
1755 self.blockIndex = 0
1718
1756
1719 #guardando atributos
1757 # guardando atributos
1720 self.filename = filename
1758 self.filename = filename
1721 self.subfolder = subfolder
1759 self.subfolder = subfolder
1722 self.fp = fp
1760 self.fp = fp
@@ -1726,7 +1764,7 class JRODataWriter(JRODataIO):
1726
1764
1727 self.setFirstHeader()
1765 self.setFirstHeader()
1728
1766
1729 print '[Writing] Opening file: %s'%self.filename
1767 print '[Writing] Opening file: %s' % self.filename
1730
1768
1731 self.__writeFirstHeader()
1769 self.__writeFirstHeader()
1732
1770
@@ -1771,7 +1809,7 class JRODataWriter(JRODataIO):
1771
1809
1772 self.dataOut = dataOut
1810 self.dataOut = dataOut
1773 self.fileDate = self.dataOut.datatime.date()
1811 self.fileDate = self.dataOut.datatime.date()
1774 #By default
1812 # By default
1775 self.dtype = self.dataOut.dtype
1813 self.dtype = self.dataOut.dtype
1776
1814
1777 if datatype is not None:
1815 if datatype is not None:
@@ -1789,7 +1827,8 class JRODataWriter(JRODataIO):
1789
1827
1790 if not(self.isConfig):
1828 if not(self.isConfig):
1791
1829
1792 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs)
1830 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1831 set=set, ext=ext, datatype=datatype, **kwargs)
1793 self.isConfig = True
1832 self.isConfig = True
1794
1833
1795 self.putData()
1834 self.putData()
@@ -178,8 +178,8 class ParamReader(ProcessingUnit):
178 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
178 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
179 print
179 print
180
180
181 for i in range(len(filenameList)):
181 # for i in range(len(filenameList)):
182 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
182 # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
183
183
184 self.filenameList = filenameList
184 self.filenameList = filenameList
185 self.datetimeList = datetimeList
185 self.datetimeList = datetimeList
@@ -11,4 +11,5 from jroproc_amisr import *
11 from jroproc_correlation import *
11 from jroproc_correlation import *
12 from jroproc_parameters import *
12 from jroproc_parameters import *
13 from jroproc_spectra_lags import *
13 from jroproc_spectra_lags import *
14 from jroproc_spectra_acf import * No newline at end of file
14 from jroproc_spectra_acf import *
15 from bltrproc_parameters import *
@@ -113,6 +113,8 class Data(object):
113 self.__heights = []
113 self.__heights = []
114 self.__all_heights = set()
114 self.__all_heights = set()
115 for plot in self.plottypes:
115 for plot in self.plottypes:
116 if 'snr' in plot:
117 plot = 'snr'
116 self.data[plot] = {}
118 self.data[plot] = {}
117
119
118 def shape(self, key):
120 def shape(self, key):
@@ -138,8 +140,9 class Data(object):
138 self.parameters = getattr(dataOut, 'parameters', [])
140 self.parameters = getattr(dataOut, 'parameters', [])
139 self.pairs = dataOut.pairsList
141 self.pairs = dataOut.pairsList
140 self.channels = dataOut.channelList
142 self.channels = dataOut.channelList
141 self.xrange = (dataOut.getFreqRange(1)/1000. , dataOut.getAcfRange(1) , dataOut.getVelRange(1))
142 self.interval = dataOut.getTimeInterval()
143 self.interval = dataOut.getTimeInterval()
144 if 'spc' in self.plottypes or 'cspc' in self.plottypes:
145 self.xrange = (dataOut.getFreqRange(1)/1000. , dataOut.getAcfRange(1) , dataOut.getVelRange(1))
143 self.__heights.append(dataOut.heightList)
146 self.__heights.append(dataOut.heightList)
144 self.__all_heights.update(dataOut.heightList)
147 self.__all_heights.update(dataOut.heightList)
145 self.__times.append(tm)
148 self.__times.append(tm)
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now