##// END OF EJS Templates
Merge branch 'v2.3' of http://jro-dev.igp.gob.pe/rhodecode/schain into v2.3
José Chávez -
r1109:354077bea65d merge
parent child
Show More
@@ -0,0 +1,343
1 '''
2 Created on Set 10, 2017
3
4 @author: Juan C. Espinoza
5 '''
6
7
8 import os
9 import sys
10 import time
11 import glob
12 import datetime
13
14 import numpy
15
16 from schainpy.model.proc.jroproc_base import ProcessingUnit
17 from schainpy.model.data.jrodata import Parameters
18 from schainpy.model.io.jroIO_base import JRODataReader, isNumber
19 from schainpy.utils import log
20
21 FILE_HEADER_STRUCTURE = numpy.dtype([
22 ('year', 'f'),
23 ('doy', 'f'),
24 ('nint', 'f'),
25 ('navg', 'f'),
26 ('fh', 'f'),
27 ('dh', 'f'),
28 ('nheights', 'f'),
29 ('ipp', 'f')
30 ])
31
32 REC_HEADER_STRUCTURE = numpy.dtype([
33 ('magic', 'f'),
34 ('hours', 'f'),
35 ('interval', 'f'),
36 ('h0', 'f'),
37 ('nheights', 'f'),
38 ('snr1', 'f'),
39 ('snr2', 'f'),
40 ('snr', 'f'),
41 ])
42
43 DATA_STRUCTURE = numpy.dtype([
44 ('range', '<u4'),
45 ('status', '<u4'),
46 ('zonal', '<f4'),
47 ('meridional', '<f4'),
48 ('vertical', '<f4'),
49 ('zonal_a', '<f4'),
50 ('meridional_a', '<f4'),
51 ('corrected_fading', '<f4'), # seconds
52 ('uncorrected_fading', '<f4'), # seconds
53 ('time_diff', '<f4'),
54 ('major_axis', '<f4'),
55 ('axial_ratio', '<f4'),
56 ('orientation', '<f4'),
57 ('sea_power', '<u4'),
58 ('sea_algorithm', '<u4')
59 ])
60
61
62 class JULIAParamReader(JRODataReader, ProcessingUnit):
63 '''
64 Julia data (eej, spf, 150km) *.dat files
65 '''
66
67 ext = '.dat'
68
69 def __init__(self, **kwargs):
70
71 ProcessingUnit.__init__(self, **kwargs)
72
73 self.dataOut = Parameters()
74 self.counter_records = 0
75 self.flagNoMoreFiles = 0
76 self.isConfig = False
77 self.filename = None
78 self.clockpulse = 0.15
79 self.kd = 213.6
80
81 def setup(self,
82 path=None,
83 startDate=None,
84 endDate=None,
85 ext=None,
86 startTime=datetime.time(0, 0, 0),
87 endTime=datetime.time(23, 59, 59),
88 timezone=0,
89 format=None,
90 **kwargs):
91
92 self.path = path
93 self.startDate = startDate
94 self.endDate = endDate
95 self.startTime = startTime
96 self.endTime = endTime
97 self.datatime = datetime.datetime(1900, 1, 1)
98 self.format = format
99
100 if self.path is None:
101 raise ValueError, "The path is not valid"
102
103 if ext is None:
104 ext = self.ext
105
106 self.search_files(self.path, startDate, endDate, ext)
107 self.timezone = timezone
108 self.fileIndex = 0
109
110 if not self.fileList:
111 log.warning('There is no files matching these date in the folder: {}'.format(
112 path), self.name)
113
114 self.setNextFile()
115
116 def search_files(self, path, startDate, endDate, ext):
117 '''
118 Searching for BLTR rawdata file in path
119 Creating a list of file to proces included in [startDate,endDate]
120
121 Input:
122 path - Path to find BLTR rawdata files
123 startDate - Select file from this date
124 enDate - Select file until this date
125 ext - Extension of the file to read
126 '''
127
128 log.success('Searching files in {} '.format(path), self.name)
129 fileList0 = glob.glob1(path, '{}*{}'.format(self.format.upper(), ext))
130 fileList0.sort()
131
132 self.fileList = []
133 self.dateFileList = []
134
135 for thisFile in fileList0:
136 year = thisFile[2:4]
137 if not isNumber(year):
138 continue
139
140 month = thisFile[4:6]
141 if not isNumber(month):
142 continue
143
144 day = thisFile[6:8]
145 if not isNumber(day):
146 continue
147
148 year, month, day = int(year), int(month), int(day)
149 dateFile = datetime.date(year+2000, month, day)
150
151 if (startDate > dateFile) or (endDate < dateFile):
152 continue
153
154 self.fileList.append(thisFile)
155 self.dateFileList.append(dateFile)
156
157 return
158
159 def setNextFile(self):
160
161 file_id = self.fileIndex
162
163 if file_id == len(self.fileList):
164 log.success('No more files in the folder', self.name)
165 self.flagNoMoreFiles = 1
166 return 0
167
168 log.success('Opening {}'.format(self.fileList[file_id]), self.name)
169 filename = os.path.join(self.path, self.fileList[file_id])
170
171 dirname, name = os.path.split(filename)
172 self.siteFile = name.split('.')[0]
173 if self.filename is not None:
174 self.fp.close()
175 self.filename = filename
176 self.fp = open(self.filename, 'rb')
177
178 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
179 yy = self.header_file['year'] - 1900 * (self.header_file['year'] > 3000)
180 self.year = int(yy + 1900 * (yy < 1000))
181 self.doy = int(self.header_file['doy'])
182 self.dH = round(self.header_file['dh'], 2)
183 self.ipp = round(self.header_file['ipp'], 2)
184 self.sizeOfFile = os.path.getsize(self.filename)
185 self.counter_records = 0
186 self.flagIsNewFile = 0
187 self.fileIndex += 1
188
189 return 1
190
191 def readNextBlock(self):
192
193 while True:
194 if not self.readBlock():
195 self.flagIsNewFile = 1
196 if not self.setNextFile():
197 return 0
198
199 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
200 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
201 log.warning(
202 'Reading Record No. {} -> {} [Skipping]'.format(
203 self.counter_records,
204 self.datatime.ctime()),
205 self.name)
206 continue
207 break
208
209 log.log('Reading Record No. {} -> {}'.format(
210 self.counter_records,
211 self.datatime.ctime()), self.name)
212
213 return 1
214
215 def readBlock(self):
216
217 pointer = self.fp.tell()
218 heights, dt = self.readHeader()
219 self.fp.seek(pointer)
220 buffer_h = []
221 buffer_d = []
222 while True:
223 pointer = self.fp.tell()
224 if pointer == self.sizeOfFile:
225 return 0
226 heights, datatime = self.readHeader()
227 if dt == datatime:
228 buffer_h.append(heights)
229 buffer_d.append(self.readData(len(heights)))
230 continue
231 self.fp.seek(pointer)
232 break
233
234 if dt.date() > self.datatime.date():
235 self.flagDiscontinuousBlock = 1
236 self.datatime = dt
237 self.time = (dt - datetime.datetime(1970, 1, 1)).total_seconds() + time.timezone
238 self.heights = numpy.concatenate(buffer_h)
239 self.buffer = numpy.zeros((5, len(self.heights))) + numpy.nan
240 self.buffer[0, :] = numpy.concatenate([buf[0] for buf in buffer_d])
241 self.buffer[1, :] = numpy.concatenate([buf[1] for buf in buffer_d])
242 self.buffer[2, :] = numpy.concatenate([buf[2] for buf in buffer_d])
243 self.buffer[3, :] = numpy.concatenate([buf[3] for buf in buffer_d])
244 self.buffer[4, :] = numpy.concatenate([buf[4] for buf in buffer_d])
245
246 self.counter_records += 1
247
248 return 1
249
250 def readHeader(self):
251 '''
252 Parse recordHeader
253 '''
254
255 self.header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
256 self.interval = self.header_rec['interval']
257 if self.header_rec['magic'] == 888.:
258 self.header_rec['h0'] = round(self.header_rec['h0'], 2)
259 nheights = int(self.header_rec['nheights'])
260 hours = float(self.header_rec['hours'][0])
261 heights = numpy.arange(nheights) * self.dH + self.header_rec['h0']
262 datatime = datetime.datetime(self.year, 1, 1) + datetime.timedelta(days=self.doy-1, hours=hours)
263 return heights, datatime
264 else:
265 return False
266
267 def readData(self, N):
268 '''
269 Parse data
270 '''
271
272 buffer = numpy.fromfile(self.fp, 'f', 8*N).reshape(N, 8)
273
274 pow0 = buffer[:, 0]
275 pow1 = buffer[:, 1]
276 acf0 = (buffer[:,2] + buffer[:,3]*1j) / pow0
277 acf1 = (buffer[:,4] + buffer[:,5]*1j) / pow1
278 dccf = (buffer[:,6] + buffer[:,7]*1j) / (pow0*pow1)
279
280 ### SNR
281 sno = (pow0 + pow1 - self.header_rec['snr']) / self.header_rec['snr']
282 sno10 = numpy.log10(sno)
283 # dsno = 1.0 / numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * (1 + (1 / sno))
284
285 ### Vertical Drift
286 sp = numpy.sqrt(numpy.abs(acf0)*numpy.abs(acf1))
287 sp[numpy.where(numpy.abs(sp) >= 1.0)] = numpy.sqrt(0.9999)
288
289 vzo = -numpy.arctan2(acf0.imag + acf1.imag,acf0.real + acf1.real)*1.5E5*1.5/(self.ipp*numpy.pi)
290 dvzo = numpy.sqrt(1.0 - sp*sp)*0.338*1.5E5/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*sp*self.ipp)
291 err = numpy.where(dvzo <= 0.1)
292 dvzo[err] = 0.1
293
294 #Zonal Drifts
295 dt = self.header_file['nint']*self.ipp / 1.5E5
296 coh = numpy.sqrt(numpy.abs(dccf))
297 err = numpy.where(coh >= 1.0)
298 coh[err] = numpy.sqrt(0.99999)
299
300 err = numpy.where(coh <= 0.1)
301 coh[err] = numpy.sqrt(0.1)
302
303 vxo = numpy.arctan2(dccf.imag, dccf.real)*self.header_rec['h0']*1.0E3/(self.kd*dt)
304 dvxo = numpy.sqrt(1.0 - coh*coh)*self.header_rec['h0']*1.0E3/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*coh*self.kd*dt)
305
306 err = numpy.where(dvxo <= 0.1)
307 dvxo[err] = 0.1
308
309 return vzo, dvzo, vxo, dvxo, sno10
310
311 def set_output(self):
312 '''
313 Storing data from databuffer to dataOut object
314 '''
315
316 self.dataOut.data_SNR = self.buffer[4].reshape(1, -1)
317 self.dataOut.heightList = self.heights
318 self.dataOut.data_param = self.buffer[0:4,]
319 self.dataOut.utctimeInit = self.time
320 self.dataOut.utctime = self.time
321 self.dataOut.useLocalTime = True
322 self.dataOut.paramInterval = self.interval
323 self.dataOut.timezone = self.timezone
324 self.dataOut.sizeOfFile = self.sizeOfFile
325 self.dataOut.flagNoData = False
326 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
327
328 def getData(self):
329 '''
330 Storing data from databuffer to dataOut object
331 '''
332 if self.flagNoMoreFiles:
333 self.dataOut.flagNoData = True
334 log.success('No file left to process', self.name)
335 return 0
336
337 if not self.readNextBlock():
338 self.dataOut.flagNoData = True
339 return 0
340
341 self.set_output()
342
343 return 1
@@ -1,6 +1,9
1 1 ## CHANGELOG:
2 2
3 3 ### 2.3
4 * Added support for Madrigal formats (reading/writing).
5 * Added support for reading BLTR parameters (*.sswma).
6 * Added support for reading Julia format (*.dat).
4 7 * Added high order function `MPProject` for multiprocessing scripts.
5 8 * Added two new Processing Units `PublishData` and `ReceiverData` for receiving and sending dataOut through multiple ways (tcp, ipc, inproc).
6 9 * Added a new graphics Processing Unit `PlotterReceiver`. It is decoupled from normal processing sequence with support for data generated by multiprocessing scripts.
@@ -50,14 +50,15 class PlotData(Operation, Process):
50 50 __missing = 1E30
51 51
52 52 __attrs__ = ['show', 'save', 'xmin', 'xmax', 'ymin', 'ymax', 'zmin', 'zmax',
53 'zlimits', 'xlabel', 'ylabel', 'cb_label', 'title', 'titles', 'colorbar',
54 'bgcolor', 'width', 'height', 'localtime', 'oneFigure', 'showprofile']
53 'zlimits', 'xlabel', 'ylabel', 'xaxis','cb_label', 'title',
54 'colorbar', 'bgcolor', 'width', 'height', 'localtime', 'oneFigure',
55 'showprofile', 'decimation']
55 56
56 57 def __init__(self, **kwargs):
57 58
58 59 Operation.__init__(self, plot=True, **kwargs)
59 60 Process.__init__(self)
60 self.contador = 0
61
61 62 self.kwargs['code'] = self.CODE
62 63 self.mp = False
63 64 self.data = None
@@ -87,14 +88,14 class PlotData(Operation, Process):
87 88 self.ymin = kwargs.get('ymin', None)
88 89 self.ymax = kwargs.get('ymax', None)
89 90 self.xlabel = kwargs.get('xlabel', None)
90 self.__MAXNUMY = kwargs.get('decimation', 300)
91 self.__MAXNUMY = kwargs.get('decimation', 200)
91 92 self.showSNR = kwargs.get('showSNR', False)
92 93 self.oneFigure = kwargs.get('oneFigure', True)
93 94 self.width = kwargs.get('width', None)
94 95 self.height = kwargs.get('height', None)
95 96 self.colorbar = kwargs.get('colorbar', True)
96 97 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
97 self.titles = ['' for __ in range(16)]
98 self.titles = kwargs.get('titles', [])
98 99 self.polar = False
99 100
100 101 def __fmtTime(self, x, pos):
@@ -359,7 +360,7 class PlotData(Operation, Process):
359 360 if self.xaxis is 'time':
360 361 dt = self.getDateTime(self.max_time)
361 362 xmax = (dt.replace(hour=int(self.xmax), minute=59, second=59) -
362 datetime.datetime(1970, 1, 1)).total_seconds()
363 datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=1)).total_seconds()
363 364 if self.data.localtime:
364 365 xmax += time.timezone
365 366 else:
@@ -369,9 +370,8 class PlotData(Operation, Process):
369 370 ymax = self.ymax if self.ymax else numpy.nanmax(self.y)
370 371
371 372 Y = numpy.array([10, 20, 50, 100, 200, 500, 1000, 2000])
372 i = 1 if numpy.where(ymax < Y)[
373 0][0] < 0 else numpy.where(ymax < Y)[0][0]
374 ystep = Y[i - 1] / 5
373 i = 1 if numpy.where(ymax-ymin < Y)[0][0] < 0 else numpy.where(ymax-ymin < Y)[0][0]
374 ystep = Y[i] / 5
375 375
376 376 for n, ax in enumerate(self.axes):
377 377 if ax.firsttime:
@@ -429,15 +429,15 class PlotData(Operation, Process):
429 429 if self.nrows == 0 or self.nplots == 0:
430 430 log.warning('No data', self.name)
431 431 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
432 fig.canvas.manager.set_window_title(self.CODE)
432 433 continue
433 434
434 435 fig.tight_layout()
435 436 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
436 437 self.getDateTime(self.max_time).strftime('%Y/%m/%d')))
437 # fig.canvas.draw()
438 fig.canvas.draw()
438 439
439 if self.save: # and self.data.ended:
440 self.contador += 1
440 if self.save and self.data.ended:
441 441 channels = range(self.nrows)
442 442 if self.oneFigure:
443 443 label = ''
@@ -445,12 +445,11 class PlotData(Operation, Process):
445 445 label = '_{}'.format(channels[n])
446 446 figname = os.path.join(
447 447 self.save,
448 '{}{}_{}{}.png'.format(
448 '{}{}_{}.png'.format(
449 449 self.CODE,
450 450 label,
451 451 self.getDateTime(self.saveTime).strftime(
452 '%y%m%d_%H%M%S'),
453 str(self.contador),
452 '%y%m%d_%H%M%S'),
454 453 )
455 454 )
456 455 log.log('Saving figure: {}'.format(figname), self.name)
@@ -898,10 +897,11 class PlotParamData(PlotRTIData):
898 897 self.nplots += 1
899 898
900 899 self.ylabel = 'Height [Km]'
901 self.titles = self.data.parameters \
902 if self.data.parameters else ['Param {}'.format(x) for x in xrange(self.nrows)]
903 if self.showSNR:
904 self.titles.append('SNR')
900 if not self.titles:
901 self.titles = self.data.parameters \
902 if self.data.parameters else ['Param {}'.format(x) for x in xrange(self.nrows)]
903 if self.showSNR:
904 self.titles.append('SNR')
905 905
906 906 def plot(self):
907 907 self.data.normalize_heights()
@@ -18,3 +18,4 from jroIO_madrigal import *
18 18 from bltrIO_param import *
19 19 from jroIO_bltr import *
20 20 from jroIO_mira35c import *
21 from julIO_param import * No newline at end of file
@@ -138,12 +138,11 class Data(object):
138 138 return self.data[key][self.__times[0]].shape
139 139 return (0,)
140 140
141 def update(self, dataOut):
141 def update(self, dataOut, tm):
142 142 '''
143 143 Update data object with new dataOut
144 144 '''
145
146 tm = dataOut.utctime
145
147 146 if tm in self.__times:
148 147 return
149 148
@@ -578,7 +577,7 class PlotterReceiver(ProcessingUnit, Process):
578 577 while True:
579 578 dataOut = self.receiver.recv_pyobj()
580 579 if not dataOut.flagNoData:
581 if dataOut.type == 'Parameters':
580 if dataOut.type == 'Parameters':
582 581 tm = dataOut.utctimeInit
583 582 else:
584 583 tm = dataOut.utctime
@@ -599,7 +598,7 class PlotterReceiver(ProcessingUnit, Process):
599 598 self.data.setup()
600 599 self.dates.append(dt)
601 600
602 self.data.update(dataOut)
601 self.data.update(dataOut, tm)
603 602
604 603 if dataOut.finished is True:
605 604 self.connections -= 1
General Comments 0
You need to be logged in to leave comments. Login now