##// END OF EJS Templates
Fix merge
jespinoza -
r1107:0e8d1b4cd150 merge
parent child
Show More
@@ -0,0 +1,343
1 '''
2 Created on Set 10, 2017
3
4 @author: Juan C. Espinoza
5 '''
6
7
8 import os
9 import sys
10 import time
11 import glob
12 import datetime
13
14 import numpy
15
16 from schainpy.model.proc.jroproc_base import ProcessingUnit
17 from schainpy.model.data.jrodata import Parameters
18 from schainpy.model.io.jroIO_base import JRODataReader, isNumber
19 from schainpy.utils import log
20
21 FILE_HEADER_STRUCTURE = numpy.dtype([
22 ('year', 'f'),
23 ('doy', 'f'),
24 ('nint', 'f'),
25 ('navg', 'f'),
26 ('fh', 'f'),
27 ('dh', 'f'),
28 ('nheights', 'f'),
29 ('ipp', 'f')
30 ])
31
32 REC_HEADER_STRUCTURE = numpy.dtype([
33 ('magic', 'f'),
34 ('hours', 'f'),
35 ('interval', 'f'),
36 ('h0', 'f'),
37 ('nheights', 'f'),
38 ('snr1', 'f'),
39 ('snr2', 'f'),
40 ('snr', 'f'),
41 ])
42
43 DATA_STRUCTURE = numpy.dtype([
44 ('range', '<u4'),
45 ('status', '<u4'),
46 ('zonal', '<f4'),
47 ('meridional', '<f4'),
48 ('vertical', '<f4'),
49 ('zonal_a', '<f4'),
50 ('meridional_a', '<f4'),
51 ('corrected_fading', '<f4'), # seconds
52 ('uncorrected_fading', '<f4'), # seconds
53 ('time_diff', '<f4'),
54 ('major_axis', '<f4'),
55 ('axial_ratio', '<f4'),
56 ('orientation', '<f4'),
57 ('sea_power', '<u4'),
58 ('sea_algorithm', '<u4')
59 ])
60
61
62 class JULIAParamReader(JRODataReader, ProcessingUnit):
63 '''
64 Julia data (eej, spf, 150km) *.dat files
65 '''
66
67 ext = '.dat'
68
69 def __init__(self, **kwargs):
70
71 ProcessingUnit.__init__(self, **kwargs)
72
73 self.dataOut = Parameters()
74 self.counter_records = 0
75 self.flagNoMoreFiles = 0
76 self.isConfig = False
77 self.filename = None
78 self.clockpulse = 0.15
79 self.kd = 213.6
80
81 def setup(self,
82 path=None,
83 startDate=None,
84 endDate=None,
85 ext=None,
86 startTime=datetime.time(0, 0, 0),
87 endTime=datetime.time(23, 59, 59),
88 timezone=0,
89 format=None,
90 **kwargs):
91
92 self.path = path
93 self.startDate = startDate
94 self.endDate = endDate
95 self.startTime = startTime
96 self.endTime = endTime
97 self.datatime = datetime.datetime(1900, 1, 1)
98 self.format = format
99
100 if self.path is None:
101 raise ValueError, "The path is not valid"
102
103 if ext is None:
104 ext = self.ext
105
106 self.search_files(self.path, startDate, endDate, ext)
107 self.timezone = timezone
108 self.fileIndex = 0
109
110 if not self.fileList:
111 log.warning('There is no files matching these date in the folder: {}'.format(
112 path), self.name)
113
114 self.setNextFile()
115
116 def search_files(self, path, startDate, endDate, ext):
117 '''
118 Searching for BLTR rawdata file in path
119 Creating a list of file to proces included in [startDate,endDate]
120
121 Input:
122 path - Path to find BLTR rawdata files
123 startDate - Select file from this date
124 enDate - Select file until this date
125 ext - Extension of the file to read
126 '''
127
128 log.success('Searching files in {} '.format(path), self.name)
129 fileList0 = glob.glob1(path, '{}*{}'.format(self.format.upper(), ext))
130 fileList0.sort()
131
132 self.fileList = []
133 self.dateFileList = []
134
135 for thisFile in fileList0:
136 year = thisFile[2:4]
137 if not isNumber(year):
138 continue
139
140 month = thisFile[4:6]
141 if not isNumber(month):
142 continue
143
144 day = thisFile[6:8]
145 if not isNumber(day):
146 continue
147
148 year, month, day = int(year), int(month), int(day)
149 dateFile = datetime.date(year+2000, month, day)
150
151 if (startDate > dateFile) or (endDate < dateFile):
152 continue
153
154 self.fileList.append(thisFile)
155 self.dateFileList.append(dateFile)
156
157 return
158
159 def setNextFile(self):
160
161 file_id = self.fileIndex
162
163 if file_id == len(self.fileList):
164 log.success('No more files in the folder', self.name)
165 self.flagNoMoreFiles = 1
166 return 0
167
168 log.success('Opening {}'.format(self.fileList[file_id]), self.name)
169 filename = os.path.join(self.path, self.fileList[file_id])
170
171 dirname, name = os.path.split(filename)
172 self.siteFile = name.split('.')[0]
173 if self.filename is not None:
174 self.fp.close()
175 self.filename = filename
176 self.fp = open(self.filename, 'rb')
177
178 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
179 yy = self.header_file['year'] - 1900 * (self.header_file['year'] > 3000)
180 self.year = int(yy + 1900 * (yy < 1000))
181 self.doy = int(self.header_file['doy'])
182 self.dH = round(self.header_file['dh'], 2)
183 self.ipp = round(self.header_file['ipp'], 2)
184 self.sizeOfFile = os.path.getsize(self.filename)
185 self.counter_records = 0
186 self.flagIsNewFile = 0
187 self.fileIndex += 1
188
189 return 1
190
191 def readNextBlock(self):
192
193 while True:
194 if not self.readBlock():
195 self.flagIsNewFile = 1
196 if not self.setNextFile():
197 return 0
198
199 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
200 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
201 log.warning(
202 'Reading Record No. {} -> {} [Skipping]'.format(
203 self.counter_records,
204 self.datatime.ctime()),
205 self.name)
206 continue
207 break
208
209 log.log('Reading Record No. {} -> {}'.format(
210 self.counter_records,
211 self.datatime.ctime()), self.name)
212
213 return 1
214
215 def readBlock(self):
216
217 pointer = self.fp.tell()
218 heights, dt = self.readHeader()
219 self.fp.seek(pointer)
220 buffer_h = []
221 buffer_d = []
222 while True:
223 pointer = self.fp.tell()
224 if pointer == self.sizeOfFile:
225 return 0
226 heights, datatime = self.readHeader()
227 if dt == datatime:
228 buffer_h.append(heights)
229 buffer_d.append(self.readData(len(heights)))
230 continue
231 self.fp.seek(pointer)
232 break
233
234 if dt.date() > self.datatime.date():
235 self.flagDiscontinuousBlock = 1
236 self.datatime = dt
237 self.time = (dt - datetime.datetime(1970, 1, 1)).total_seconds() + time.timezone
238 self.heights = numpy.concatenate(buffer_h)
239 self.buffer = numpy.zeros((5, len(self.heights))) + numpy.nan
240 self.buffer[0, :] = numpy.concatenate([buf[0] for buf in buffer_d])
241 self.buffer[1, :] = numpy.concatenate([buf[1] for buf in buffer_d])
242 self.buffer[2, :] = numpy.concatenate([buf[2] for buf in buffer_d])
243 self.buffer[3, :] = numpy.concatenate([buf[3] for buf in buffer_d])
244 self.buffer[4, :] = numpy.concatenate([buf[4] for buf in buffer_d])
245
246 self.counter_records += 1
247
248 return 1
249
250 def readHeader(self):
251 '''
252 Parse recordHeader
253 '''
254
255 self.header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
256 self.interval = self.header_rec['interval']
257 if self.header_rec['magic'] == 888.:
258 self.header_rec['h0'] = round(self.header_rec['h0'], 2)
259 nheights = int(self.header_rec['nheights'])
260 hours = float(self.header_rec['hours'][0])
261 heights = numpy.arange(nheights) * self.dH + self.header_rec['h0']
262 datatime = datetime.datetime(self.year, 1, 1) + datetime.timedelta(days=self.doy-1, hours=hours)
263 return heights, datatime
264 else:
265 return False
266
267 def readData(self, N):
268 '''
269 Parse data
270 '''
271
272 buffer = numpy.fromfile(self.fp, 'f', 8*N).reshape(N, 8)
273
274 pow0 = buffer[:, 0]
275 pow1 = buffer[:, 1]
276 acf0 = (buffer[:,2] + buffer[:,3]*1j) / pow0
277 acf1 = (buffer[:,4] + buffer[:,5]*1j) / pow1
278 dccf = (buffer[:,6] + buffer[:,7]*1j) / (pow0*pow1)
279
280 ### SNR
281 sno = (pow0 + pow1 - self.header_rec['snr']) / self.header_rec['snr']
282 sno10 = numpy.log10(sno)
283 # dsno = 1.0 / numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * (1 + (1 / sno))
284
285 ### Vertical Drift
286 sp = numpy.sqrt(numpy.abs(acf0)*numpy.abs(acf1))
287 sp[numpy.where(numpy.abs(sp) >= 1.0)] = numpy.sqrt(0.9999)
288
289 vzo = -numpy.arctan2(acf0.imag + acf1.imag,acf0.real + acf1.real)*1.5E5*1.5/(self.ipp*numpy.pi)
290 dvzo = numpy.sqrt(1.0 - sp*sp)*0.338*1.5E5/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*sp*self.ipp)
291 err = numpy.where(dvzo <= 0.1)
292 dvzo[err] = 0.1
293
294 #Zonal Drifts
295 dt = self.header_file['nint']*self.ipp / 1.5E5
296 coh = numpy.sqrt(numpy.abs(dccf))
297 err = numpy.where(coh >= 1.0)
298 coh[err] = numpy.sqrt(0.99999)
299
300 err = numpy.where(coh <= 0.1)
301 coh[err] = numpy.sqrt(0.1)
302
303 vxo = numpy.arctan2(dccf.imag, dccf.real)*self.header_rec['h0']*1.0E3/(self.kd*dt)
304 dvxo = numpy.sqrt(1.0 - coh*coh)*self.header_rec['h0']*1.0E3/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*coh*self.kd*dt)
305
306 err = numpy.where(dvxo <= 0.1)
307 dvxo[err] = 0.1
308
309 return vzo, dvzo, vxo, dvxo, sno10
310
311 def set_output(self):
312 '''
313 Storing data from databuffer to dataOut object
314 '''
315
316 self.dataOut.data_SNR = self.buffer[4].reshape(1, -1)
317 self.dataOut.heightList = self.heights
318 self.dataOut.data_param = self.buffer[0:4,]
319 self.dataOut.utctimeInit = self.time
320 self.dataOut.utctime = self.time
321 self.dataOut.useLocalTime = True
322 self.dataOut.paramInterval = self.interval
323 self.dataOut.timezone = self.timezone
324 self.dataOut.sizeOfFile = self.sizeOfFile
325 self.dataOut.flagNoData = False
326 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
327
328 def getData(self):
329 '''
330 Storing data from databuffer to dataOut object
331 '''
332 if self.flagNoMoreFiles:
333 self.dataOut.flagNoData = True
334 log.success('No file left to process', self.name)
335 return 0
336
337 if not self.readNextBlock():
338 self.dataOut.flagNoData = True
339 return 0
340
341 self.set_output()
342
343 return 1
@@ -1,6 +1,9
1 ## CHANGELOG:
1 ## CHANGELOG:
2
2
3 ### 2.3
3 ### 2.3
4 * Added support for Madrigal formats (reading/writing).
5 * Added support for reading BLTR parameters (*.sswma).
6 * Added support for reading Julia format (*.dat).
4 * Added high order function `MPProject` for multiprocessing scripts.
7 * Added high order function `MPProject` for multiprocessing scripts.
5 * Added two new Processing Units `PublishData` and `ReceiverData` for receiving and sending dataOut through multiple ways (tcp, ipc, inproc).
8 * Added two new Processing Units `PublishData` and `ReceiverData` for receiving and sending dataOut through multiple ways (tcp, ipc, inproc).
6 * Added a new graphics Processing Unit `PlotterReceiver`. It is decoupled from normal processing sequence with support for data generated by multiprocessing scripts.
9 * Added a new graphics Processing Unit `PlotterReceiver`. It is decoupled from normal processing sequence with support for data generated by multiprocessing scripts.
@@ -58,6 +58,7 class PlotData(Operation, Process):
58
58
59 Operation.__init__(self, plot=True, **kwargs)
59 Operation.__init__(self, plot=True, **kwargs)
60 Process.__init__(self)
60 Process.__init__(self)
61
61 self.kwargs['code'] = self.CODE
62 self.kwargs['code'] = self.CODE
62 self.mp = False
63 self.mp = False
63 self.data = None
64 self.data = None
@@ -94,7 +95,7 class PlotData(Operation, Process):
94 self.height = kwargs.get('height', None)
95 self.height = kwargs.get('height', None)
95 self.colorbar = kwargs.get('colorbar', True)
96 self.colorbar = kwargs.get('colorbar', True)
96 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
97 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
97 self.titles = ['' for __ in range(16)]
98 self.titles = kwargs.get('titles', [])
98 self.polar = False
99 self.polar = False
99
100
100 def __fmtTime(self, x, pos):
101 def __fmtTime(self, x, pos):
@@ -434,7 +435,7 class PlotData(Operation, Process):
434 fig.tight_layout()
435 fig.tight_layout()
435 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
436 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
436 self.getDateTime(self.max_time).strftime('%Y/%m/%d')))
437 self.getDateTime(self.max_time).strftime('%Y/%m/%d')))
437 # fig.canvas.draw()
438 fig.canvas.draw()
438
439
439 if self.save and self.data.ended:
440 if self.save and self.data.ended:
440 channels = range(self.nrows)
441 channels = range(self.nrows)
@@ -447,7 +448,8 class PlotData(Operation, Process):
447 '{}{}_{}.png'.format(
448 '{}{}_{}.png'.format(
448 self.CODE,
449 self.CODE,
449 label,
450 label,
450 self.getDateTime(self.saveTime).strftime('%y%m%d_%H%M%S')
451 self.getDateTime(self.saveTime).strftime(
452 '%y%m%d_%H%M%S'),
451 )
453 )
452 )
454 )
453 log.log('Saving figure: {}'.format(figname), self.name)
455 log.log('Saving figure: {}'.format(figname), self.name)
@@ -895,6 +897,7 class PlotParamData(PlotRTIData):
895 self.nplots += 1
897 self.nplots += 1
896
898
897 self.ylabel = 'Height [Km]'
899 self.ylabel = 'Height [Km]'
900 if not self.titles:
898 self.titles = self.data.parameters \
901 self.titles = self.data.parameters \
899 if self.data.parameters else ['Param {}'.format(x) for x in xrange(self.nrows)]
902 if self.data.parameters else ['Param {}'.format(x) for x in xrange(self.nrows)]
900 if self.showSNR:
903 if self.showSNR:
@@ -18,3 +18,4 from jroIO_madrigal import *
18 from bltrIO_param import *
18 from bltrIO_param import *
19 from jroIO_bltr import *
19 from jroIO_bltr import *
20 from jroIO_mira35c import *
20 from jroIO_mira35c import *
21 from julIO_param import * No newline at end of file
@@ -138,12 +138,11 class Data(object):
138 return self.data[key][self.__times[0]].shape
138 return self.data[key][self.__times[0]].shape
139 return (0,)
139 return (0,)
140
140
141 def update(self, dataOut):
141 def update(self, dataOut, tm):
142 '''
142 '''
143 Update data object with new dataOut
143 Update data object with new dataOut
144 '''
144 '''
145
145
146 tm = dataOut.utctime
147 if tm in self.__times:
146 if tm in self.__times:
148 return
147 return
149
148
@@ -599,7 +598,7 class PlotterReceiver(ProcessingUnit, Process):
599 self.data.setup()
598 self.data.setup()
600 self.dates.append(dt)
599 self.dates.append(dt)
601
600
602 self.data.update(dataOut)
601 self.data.update(dataOut, tm)
603
602
604 if dataOut.finished is True:
603 if dataOut.finished is True:
605 self.connections -= 1
604 self.connections -= 1
General Comments 0
You need to be logged in to leave comments. Login now