##// END OF EJS Templates
Fix h5py Dataset value attribute deprecation
jespinoza -
r1360:d1c8bd7ba7f9
parent child
Show More
@@ -1,358 +1,357
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot
7 7 from schainpy.utils import log
8 8
9 9 EARTH_RADIUS = 6.3710e3
10 10
11 11
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 20 theta = -theta + numpy.pi/2
21 21 return r*numpy.cos(theta), r*numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
25 25 '''
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 29 return numpy.rad2deg(km/EARTH_RADIUS)
30 30
31 31
32 32
33 33 class SpectralMomentsPlot(SpectraPlot):
34 34 '''
35 35 Plot for Spectral Moments
36 36 '''
37 37 CODE = 'spc_moments'
38 38 colormap = 'jet'
39 39 plot_type = 'pcolor'
40 40
41 41
42 42 class SnrPlot(RTIPlot):
43 43 '''
44 44 Plot for SNR Data
45 45 '''
46 46
47 47 CODE = 'snr'
48 48 colormap = 'jet'
49 49
50 50 def update(self, dataOut):
51 51
52 52 data = {
53 53 'snr': 10*numpy.log10(dataOut.data_snr)
54 54 }
55 55
56 56 return data, {}
57 57
58 58 class DopplerPlot(RTIPlot):
59 59 '''
60 60 Plot for DOPPLER Data (1st moment)
61 61 '''
62 62
63 63 CODE = 'dop'
64 64 colormap = 'jet'
65 65
66 66 def update(self, dataOut):
67 67
68 68 data = {
69 69 'dop': 10*numpy.log10(dataOut.data_dop)
70 70 }
71 71
72 72 return data, {}
73 73
74 74 class PowerPlot(RTIPlot):
75 75 '''
76 76 Plot for Power Data (0 moment)
77 77 '''
78 78
79 79 CODE = 'pow'
80 80 colormap = 'jet'
81 81
82 82 def update(self, dataOut):
83 83
84 84 data = {
85 85 'pow': 10*numpy.log10(dataOut.data_pow)
86 86 }
87 87
88 88 return data, {}
89 89
90 90 class SpectralWidthPlot(RTIPlot):
91 91 '''
92 92 Plot for Spectral Width Data (2nd moment)
93 93 '''
94 94
95 95 CODE = 'width'
96 96 colormap = 'jet'
97 97
98 98 def update(self, dataOut):
99 99
100 100 data = {
101 101 'width': dataOut.data_width
102 102 }
103 103
104 104 return data, {}
105 105
106 106 class SkyMapPlot(Plot):
107 107 '''
108 108 Plot for meteors detection data
109 109 '''
110 110
111 111 CODE = 'param'
112 112
113 113 def setup(self):
114 114
115 115 self.ncols = 1
116 116 self.nrows = 1
117 117 self.width = 7.2
118 118 self.height = 7.2
119 119 self.nplots = 1
120 120 self.xlabel = 'Zonal Zenith Angle (deg)'
121 121 self.ylabel = 'Meridional Zenith Angle (deg)'
122 122 self.polar = True
123 123 self.ymin = -180
124 124 self.ymax = 180
125 125 self.colorbar = False
126 126
127 127 def plot(self):
128 128
129 129 arrayParameters = numpy.concatenate(self.data['param'])
130 130 error = arrayParameters[:, -1]
131 131 indValid = numpy.where(error == 0)[0]
132 132 finalMeteor = arrayParameters[indValid, :]
133 133 finalAzimuth = finalMeteor[:, 3]
134 134 finalZenith = finalMeteor[:, 4]
135 135
136 136 x = finalAzimuth * numpy.pi / 180
137 137 y = finalZenith
138 138
139 139 ax = self.axes[0]
140 140
141 141 if ax.firsttime:
142 142 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
143 143 else:
144 144 ax.plot.set_data(x, y)
145 145
146 146 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
147 147 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
148 148 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
149 149 dt2,
150 150 len(x))
151 151 self.titles[0] = title
152 152
153 153
154 154 class GenericRTIPlot(Plot):
155 155 '''
156 156 Plot for data_xxxx object
157 157 '''
158 158
159 159 CODE = 'param'
160 160 colormap = 'viridis'
161 161 plot_type = 'pcolorbuffer'
162 162
163 163 def setup(self):
164 164 self.xaxis = 'time'
165 165 self.ncols = 1
166 166 self.nrows = self.data.shape('param')[0]
167 167 self.nplots = self.nrows
168 168 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
169 169
170 170 if not self.xlabel:
171 171 self.xlabel = 'Time'
172 172
173 173 self.ylabel = 'Height [km]'
174 174 if not self.titles:
175 self.titles = self.data.parameters \
176 if self.data.parameters else ['Param {}'.format(x) for x in range(self.nrows)]
175 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
177 176
178 177 def update(self, dataOut):
179 178
180 179 data = {
181 180 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
182 181 }
183 182
184 183 meta = {}
185 184
186 185 return data, meta
187 186
188 187 def plot(self):
189 188 # self.data.normalize_heights()
190 189 self.x = self.data.times
191 190 self.y = self.data.yrange
192 191 self.z = self.data['param']
193 192
194 193 self.z = numpy.ma.masked_invalid(self.z)
195 194
196 195 if self.decimation is None:
197 196 x, y, z = self.fill_gaps(self.x, self.y, self.z)
198 197 else:
199 198 x, y, z = self.fill_gaps(*self.decimate())
200 199
201 200 for n, ax in enumerate(self.axes):
202 201
203 202 self.zmax = self.zmax if self.zmax is not None else numpy.max(
204 203 self.z[n])
205 204 self.zmin = self.zmin if self.zmin is not None else numpy.min(
206 205 self.z[n])
207 206
208 207 if ax.firsttime:
209 208 if self.zlimits is not None:
210 209 self.zmin, self.zmax = self.zlimits[n]
211 210
212 211 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
213 212 vmin=self.zmin,
214 213 vmax=self.zmax,
215 214 cmap=self.cmaps[n]
216 215 )
217 216 else:
218 217 if self.zlimits is not None:
219 218 self.zmin, self.zmax = self.zlimits[n]
220 219 ax.collections.remove(ax.collections[0])
221 220 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
222 221 vmin=self.zmin,
223 222 vmax=self.zmax,
224 223 cmap=self.cmaps[n]
225 224 )
226 225
227 226
228 227 class PolarMapPlot(Plot):
229 228 '''
230 229 Plot for weather radar
231 230 '''
232 231
233 232 CODE = 'param'
234 233 colormap = 'seismic'
235 234
236 235 def setup(self):
237 236 self.ncols = 1
238 237 self.nrows = 1
239 238 self.width = 9
240 239 self.height = 8
241 240 self.mode = self.data.meta['mode']
242 241 if self.channels is not None:
243 242 self.nplots = len(self.channels)
244 243 self.nrows = len(self.channels)
245 244 else:
246 245 self.nplots = self.data.shape(self.CODE)[0]
247 246 self.nrows = self.nplots
248 247 self.channels = list(range(self.nplots))
249 248 if self.mode == 'E':
250 249 self.xlabel = 'Longitude'
251 250 self.ylabel = 'Latitude'
252 251 else:
253 252 self.xlabel = 'Range (km)'
254 253 self.ylabel = 'Height (km)'
255 254 self.bgcolor = 'white'
256 255 self.cb_labels = self.data.meta['units']
257 256 self.lat = self.data.meta['latitude']
258 257 self.lon = self.data.meta['longitude']
259 258 self.xmin, self.xmax = float(
260 259 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
261 260 self.ymin, self.ymax = float(
262 261 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
263 262 # self.polar = True
264 263
265 264 def plot(self):
266 265
267 266 for n, ax in enumerate(self.axes):
268 267 data = self.data['param'][self.channels[n]]
269 268
270 269 zeniths = numpy.linspace(
271 270 0, self.data.meta['max_range'], data.shape[1])
272 271 if self.mode == 'E':
273 272 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
274 273 r, theta = numpy.meshgrid(zeniths, azimuths)
275 274 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
276 275 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
277 276 x = km2deg(x) + self.lon
278 277 y = km2deg(y) + self.lat
279 278 else:
280 279 azimuths = numpy.radians(self.data.yrange)
281 280 r, theta = numpy.meshgrid(zeniths, azimuths)
282 281 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
283 282 self.y = zeniths
284 283
285 284 if ax.firsttime:
286 285 if self.zlimits is not None:
287 286 self.zmin, self.zmax = self.zlimits[n]
288 287 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
289 288 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
290 289 vmin=self.zmin,
291 290 vmax=self.zmax,
292 291 cmap=self.cmaps[n])
293 292 else:
294 293 if self.zlimits is not None:
295 294 self.zmin, self.zmax = self.zlimits[n]
296 295 ax.collections.remove(ax.collections[0])
297 296 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
298 297 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
299 298 vmin=self.zmin,
300 299 vmax=self.zmax,
301 300 cmap=self.cmaps[n])
302 301
303 302 if self.mode == 'A':
304 303 continue
305 304
306 305 # plot district names
307 306 f = open('/data/workspace/schain_scripts/distrito.csv')
308 307 for line in f:
309 308 label, lon, lat = [s.strip() for s in line.split(',') if s]
310 309 lat = float(lat)
311 310 lon = float(lon)
312 311 # ax.plot(lon, lat, '.b', ms=2)
313 312 ax.text(lon, lat, label.decode('utf8'), ha='center',
314 313 va='bottom', size='8', color='black')
315 314
316 315 # plot limites
317 316 limites = []
318 317 tmp = []
319 318 for line in open('/data/workspace/schain_scripts/lima.csv'):
320 319 if '#' in line:
321 320 if tmp:
322 321 limites.append(tmp)
323 322 tmp = []
324 323 continue
325 324 values = line.strip().split(',')
326 325 tmp.append((float(values[0]), float(values[1])))
327 326 for points in limites:
328 327 ax.add_patch(
329 328 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
330 329
331 330 # plot Cuencas
332 331 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
333 332 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
334 333 values = [line.strip().split(',') for line in f]
335 334 points = [(float(s[0]), float(s[1])) for s in values]
336 335 ax.add_patch(Polygon(points, ec='b', fc='none'))
337 336
338 337 # plot grid
339 338 for r in (15, 30, 45, 60):
340 339 ax.add_artist(plt.Circle((self.lon, self.lat),
341 340 km2deg(r), color='0.6', fill=False, lw=0.2))
342 341 ax.text(
343 342 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
344 343 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
345 344 '{}km'.format(r),
346 345 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
347 346
348 347 if self.mode == 'E':
349 348 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
350 349 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
351 350 else:
352 351 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
353 352 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
354 353
355 354 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
356 355 self.titles = ['{} {}'.format(
357 356 self.data.parameters[x], title) for x in self.channels]
358 357
@@ -1,462 +1,453
1 1 import os
2 2 import sys
3 3 import glob
4 import fnmatch
5 import datetime
6 import time
7 import re
8 import h5py
9 4 import numpy
10 5
11 import pylab as plb
12 from scipy.optimize import curve_fit
13 from scipy import asarray as ar, exp
14 6
15 7 SPEED_OF_LIGHT = 299792458
16 8 SPEED_OF_LIGHT = 3e8
17 9
18 10 from .utils import folder_in_range
19 11
20 12 import schainpy.admin
21 13 from schainpy.model.data.jrodata import Spectra
22 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
14 from schainpy.model.proc.jroproc_base import ProcessingUnit
23 15 from schainpy.utils import log
24 from schainpy.model.io.jroIO_base import JRODataReader
16
25 17
26 18 def pol2cart(rho, phi):
27 19 x = rho * numpy.cos(phi)
28 20 y = rho * numpy.sin(phi)
29 21 return(x, y)
30 22
31 23 FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes
32 24 ('FileMgcNumber', '<u4'), # 0x23020100
33 25 ('nFDTdataRecors', '<u4'),
34 26 ('OffsetStartHeader', '<u4'),
35 27 ('RadarUnitId', '<u4'),
36 28 ('SiteName', 'S32'), # Null terminated
37 29 ])
38 30
39 31
40 32 class FileHeaderBLTR():
41 33
42 34 def __init__(self, fo):
43 35
44 36 self.fo = fo
45 37 self.size = 48
46 38 self.read()
47 39
48 40 def read(self):
49 41
50 42 header = numpy.fromfile(self.fo, FILE_STRUCTURE, 1)
51 43 self.FileMgcNumber = hex(header['FileMgcNumber'][0])
52 44 self.nFDTdataRecors = int(header['nFDTdataRecors'][0])
53 45 self.RadarUnitId = int(header['RadarUnitId'][0])
54 46 self.OffsetStartHeader = int(header['OffsetStartHeader'][0])
55 47 self.SiteName = header['SiteName'][0]
56 48
57 49 def write(self, fp):
58 50
59 51 headerTuple = (self.FileMgcNumber,
60 52 self.nFDTdataRecors,
61 53 self.RadarUnitId,
62 54 self.SiteName,
63 55 self.size)
64 56
65 57 header = numpy.array(headerTuple, FILE_STRUCTURE)
66 58 header.tofile(fp)
67 59 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
68 60
69 61 fid : file or str
70 62 An open file object, or a string containing a filename.
71 63
72 64 sep : str
73 65 Separator between array items for text output. If "" (empty), a binary file is written,
74 66 equivalent to file.write(a.tobytes()).
75 67
76 68 format : str
77 69 Format string for text file output. Each entry in the array is formatted to text by
78 70 first converting it to the closest Python type, and then using "format" % item.
79 71
80 72 '''
81 73
82 74 return 1
83 75
84 76
85 77 RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes
86 78 ('RecMgcNumber', '<u4'), # 0x23030001
87 79 ('RecCounter', '<u4'), # Record counter(0,1, ...)
88 80 # Offset to start of next record form start of this record
89 81 ('Off2StartNxtRec', '<u4'),
90 82 # Offset to start of data from start of this record
91 83 ('Off2StartData', '<u4'),
92 84 # Epoch time stamp of start of acquisition (seconds)
93 85 ('nUtime', '<i4'),
94 86 # Millisecond component of time stamp (0,...,999)
95 87 ('nMilisec', '<u4'),
96 88 # Experiment tag name (null terminated)
97 89 ('ExpTagName', 'S32'),
98 90 # Experiment comment (null terminated)
99 91 ('ExpComment', 'S32'),
100 92 # Site latitude (from GPS) in degrees (positive implies North)
101 93 ('SiteLatDegrees', '<f4'),
102 94 # Site longitude (from GPS) in degrees (positive implies East)
103 95 ('SiteLongDegrees', '<f4'),
104 96 # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
105 97 ('RTCgpsStatus', '<u4'),
106 98 ('TransmitFrec', '<u4'), # Transmit frequency (Hz)
107 99 ('ReceiveFrec', '<u4'), # Receive frequency
108 100 # First local oscillator frequency (Hz)
109 101 ('FirstOsciFrec', '<u4'),
110 102 # (0="O", 1="E", 2="linear 1", 3="linear2")
111 103 ('Polarisation', '<u4'),
112 104 # Receiver filter settings (0,1,2,3)
113 105 ('ReceiverFiltSett', '<u4'),
114 106 # Number of modes in use (1 or 2)
115 107 ('nModesInUse', '<u4'),
116 108 # Dual Mode index number for these data (0 or 1)
117 109 ('DualModeIndex', '<u4'),
118 110 # Dual Mode range correction for these data (m)
119 111 ('DualModeRange', '<u4'),
120 112 # Number of digital channels acquired (2*N)
121 113 ('nDigChannels', '<u4'),
122 114 # Sampling resolution (meters)
123 115 ('SampResolution', '<u4'),
124 116 # Number of range gates sampled
125 117 ('nHeights', '<u4'),
126 118 # Start range of sampling (meters)
127 119 ('StartRangeSamp', '<u4'),
128 120 ('PRFhz', '<u4'), # PRF (Hz)
129 121 ('nCohInt', '<u4'), # Integrations
130 122 # Number of data points transformed
131 123 ('nProfiles', '<u4'),
132 124 # Number of receive beams stored in file (1 or N)
133 125 ('nChannels', '<u4'),
134 126 ('nIncohInt', '<u4'), # Number of spectral averages
135 127 # FFT windowing index (0 = no window)
136 128 ('FFTwindowingInd', '<u4'),
137 129 # Beam steer angle (azimuth) in degrees (clockwise from true North)
138 130 ('BeamAngleAzim', '<f4'),
139 131 # Beam steer angle (zenith) in degrees (0=> vertical)
140 132 ('BeamAngleZen', '<f4'),
141 133 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
142 134 ('AntennaCoord0', '<f4'),
143 135 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
144 136 ('AntennaAngl0', '<f4'),
145 137 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
146 138 ('AntennaCoord1', '<f4'),
147 139 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
148 140 ('AntennaAngl1', '<f4'),
149 141 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
150 142 ('AntennaCoord2', '<f4'),
151 143 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
152 144 ('AntennaAngl2', '<f4'),
153 145 # Receiver phase calibration (degrees) - N values
154 146 ('RecPhaseCalibr0', '<f4'),
155 147 # Receiver phase calibration (degrees) - N values
156 148 ('RecPhaseCalibr1', '<f4'),
157 149 # Receiver phase calibration (degrees) - N values
158 150 ('RecPhaseCalibr2', '<f4'),
159 151 # Receiver amplitude calibration (ratio relative to receiver one) - N values
160 152 ('RecAmpCalibr0', '<f4'),
161 153 # Receiver amplitude calibration (ratio relative to receiver one) - N values
162 154 ('RecAmpCalibr1', '<f4'),
163 155 # Receiver amplitude calibration (ratio relative to receiver one) - N values
164 156 ('RecAmpCalibr2', '<f4'),
165 157 # Receiver gains in dB - N values
166 158 ('ReceiverGaindB0', '<i4'),
167 159 # Receiver gains in dB - N values
168 160 ('ReceiverGaindB1', '<i4'),
169 161 # Receiver gains in dB - N values
170 162 ('ReceiverGaindB2', '<i4'),
171 163 ])
172 164
173 165
174 166 class RecordHeaderBLTR():
175 167
176 168 def __init__(self, fo):
177 169
178 170 self.fo = fo
179 171 self.OffsetStartHeader = 48
180 172 self.Off2StartNxtRec = 811248
181 173
182 174 def read(self, block):
183 175 OffRHeader = self.OffsetStartHeader + block * self.Off2StartNxtRec
184 176 self.fo.seek(OffRHeader, os.SEEK_SET)
185 177 header = numpy.fromfile(self.fo, RECORD_STRUCTURE, 1)
186 178 self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001
187 179 self.RecCounter = int(header['RecCounter'][0])
188 180 self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0])
189 181 self.Off2StartData = int(header['Off2StartData'][0])
190 182 self.nUtime = header['nUtime'][0]
191 183 self.nMilisec = header['nMilisec'][0]
192 184 self.ExpTagName = '' # str(header['ExpTagName'][0])
193 185 self.ExpComment = '' # str(header['ExpComment'][0])
194 186 self.SiteLatDegrees = header['SiteLatDegrees'][0]
195 187 self.SiteLongDegrees = header['SiteLongDegrees'][0]
196 188 self.RTCgpsStatus = header['RTCgpsStatus'][0]
197 189 self.TransmitFrec = header['TransmitFrec'][0]
198 190 self.ReceiveFrec = header['ReceiveFrec'][0]
199 191 self.FirstOsciFrec = header['FirstOsciFrec'][0]
200 192 self.Polarisation = header['Polarisation'][0]
201 193 self.ReceiverFiltSett = header['ReceiverFiltSett'][0]
202 194 self.nModesInUse = header['nModesInUse'][0]
203 195 self.DualModeIndex = header['DualModeIndex'][0]
204 196 self.DualModeRange = header['DualModeRange'][0]
205 197 self.nDigChannels = header['nDigChannels'][0]
206 198 self.SampResolution = header['SampResolution'][0]
207 199 self.nHeights = header['nHeights'][0]
208 200 self.StartRangeSamp = header['StartRangeSamp'][0]
209 201 self.PRFhz = header['PRFhz'][0]
210 202 self.nCohInt = header['nCohInt'][0]
211 203 self.nProfiles = header['nProfiles'][0]
212 204 self.nChannels = header['nChannels'][0]
213 205 self.nIncohInt = header['nIncohInt'][0]
214 206 self.FFTwindowingInd = header['FFTwindowingInd'][0]
215 207 self.BeamAngleAzim = header['BeamAngleAzim'][0]
216 208 self.BeamAngleZen = header['BeamAngleZen'][0]
217 209 self.AntennaCoord0 = header['AntennaCoord0'][0]
218 210 self.AntennaAngl0 = header['AntennaAngl0'][0]
219 211 self.AntennaCoord1 = header['AntennaCoord1'][0]
220 212 self.AntennaAngl1 = header['AntennaAngl1'][0]
221 213 self.AntennaCoord2 = header['AntennaCoord2'][0]
222 214 self.AntennaAngl2 = header['AntennaAngl2'][0]
223 215 self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0]
224 216 self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0]
225 217 self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0]
226 218 self.RecAmpCalibr0 = header['RecAmpCalibr0'][0]
227 219 self.RecAmpCalibr1 = header['RecAmpCalibr1'][0]
228 220 self.RecAmpCalibr2 = header['RecAmpCalibr2'][0]
229 221 self.ReceiverGaindB0 = header['ReceiverGaindB0'][0]
230 222 self.ReceiverGaindB1 = header['ReceiverGaindB1'][0]
231 223 self.ReceiverGaindB2 = header['ReceiverGaindB2'][0]
232 224 self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz)
233 225 self.RHsize = 180 + 20 * self.nChannels
234 226 self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4
235 227 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
236 228
237 229
238 230 if OffRHeader > endFp:
239 231 sys.stderr.write(
240 232 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp)
241 233 return 0
242 234
243 235 if OffRHeader < endFp:
244 236 sys.stderr.write(
245 237 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp)
246 238 return 0
247 239
248 240 return 1
249 241
250 242
251 243 class BLTRSpectraReader (ProcessingUnit):
252 244
253 245 def __init__(self):
254 246
255 247 ProcessingUnit.__init__(self)
256 248
257 249 self.ext = ".fdt"
258 250 self.optchar = "P"
259 251 self.fpFile = None
260 252 self.fp = None
261 253 self.BlockCounter = 0
262 254 self.fileSizeByHeader = None
263 255 self.filenameList = []
264 256 self.fileSelector = 0
265 257 self.Off2StartNxtRec = 0
266 258 self.RecCounter = 0
267 259 self.flagNoMoreFiles = 0
268 260 self.data_spc = None
269 261 self.data_cspc = None
270 262 self.path = None
271 263 self.OffsetStartHeader = 0
272 264 self.Off2StartData = 0
273 265 self.ipp = 0
274 266 self.nFDTdataRecors = 0
275 267 self.blocksize = 0
276 268 self.dataOut = Spectra()
277 269 self.dataOut.flagNoData = False
278 270
279 271 def search_files(self):
280 272 '''
281 273 Function that indicates the number of .fdt files that exist in the folder to be read.
282 274 It also creates an organized list with the names of the files to read.
283 275 '''
284 276
285 277 files = glob.glob(os.path.join(self.path, '*{}'.format(self.ext)))
286 278 files = sorted(files)
287 279 for f in files:
288 280 filename = f.split('/')[-1]
289 281 if folder_in_range(filename.split('.')[1], self.startDate, self.endDate, '%Y%m%d'):
290 282 self.filenameList.append(f)
291 283
292 284 def run(self, **kwargs):
293 285 '''
294 286 This method will be the one that will initiate the data entry, will be called constantly.
295 287 You should first verify that your Setup () is set up and then continue to acquire
296 288 the data to be processed with getData ().
297 289 '''
298 290 if not self.isConfig:
299 291 self.setup(**kwargs)
300 292 self.isConfig = True
301 293
302 294 self.getData()
303 295
304 296 def setup(self,
305 297 path=None,
306 298 startDate=None,
307 299 endDate=None,
308 300 startTime=None,
309 301 endTime=None,
310 302 walk=True,
311 303 code=None,
312 304 online=False,
313 305 mode=None,
314 306 **kwargs):
315 307
316 308 self.isConfig = True
317 309
318 310 self.path = path
319 311 self.startDate = startDate
320 312 self.endDate = endDate
321 313 self.startTime = startTime
322 314 self.endTime = endTime
323 315 self.walk = walk
324 316 self.mode = int(mode)
325 317 self.search_files()
326 318 if self.filenameList:
327 319 self.readFile()
328 320
329 321 def getData(self):
330 322 '''
331 323 Before starting this function, you should check that there is still an unread file,
332 324 If there are still blocks to read or if the data block is empty.
333 325
334 326 You should call the file "read".
335 327
336 328 '''
337 329
338 330 if self.flagNoMoreFiles:
339 331 self.dataOut.flagNoData = True
340 332 raise schainpy.admin.SchainError('No more files')
341 333
342 334 self.readBlock()
343 335
344 336 def readFile(self):
345 337 '''
346 338 You must indicate if you are reading in Online or Offline mode and load the
347 339 The parameters for this file reading mode.
348 340
349 341 Then you must do 2 actions:
350 342
351 343 1. Get the BLTR FileHeader.
352 344 2. Start reading the first block.
353 345 '''
354 346
355 347 if self.fileSelector < len(self.filenameList):
356 348 log.success('Opening file: {}'.format(self.filenameList[self.fileSelector]), self.name)
357 349 self.fp = open(self.filenameList[self.fileSelector])
358 350 self.fheader = FileHeaderBLTR(self.fp)
359 351 self.rheader = RecordHeaderBLTR(self.fp)
360 352 self.nFDTdataRecors = self.fheader.nFDTdataRecors
361 353 self.fileSelector += 1
362 354 self.BlockCounter = 0
363 355 return 1
364 356 else:
365 357 self.flagNoMoreFiles = True
366 358 self.dataOut.flagNoData = True
367 359 return 0
368 360
369 361 def readBlock(self):
370 362 '''
371 363 It should be checked if the block has data, if it is not passed to the next file.
372 364
373 365 Then the following is done:
374 366
375 367 1. Read the RecordHeader
376 368 2. Fill the buffer with the current block number.
377 369
378 370 '''
379 371
380 372 if self.BlockCounter == self.nFDTdataRecors:
381 373 if not self.readFile():
382 374 return
383 375
384 376 if self.mode == 1:
385 377 self.rheader.read(self.BlockCounter+1)
386 378 elif self.mode == 0:
387 379 self.rheader.read(self.BlockCounter)
388 380
389 381 self.RecCounter = self.rheader.RecCounter
390 382 self.OffsetStartHeader = self.rheader.OffsetStartHeader
391 383 self.Off2StartNxtRec = self.rheader.Off2StartNxtRec
392 384 self.Off2StartData = self.rheader.Off2StartData
393 385 self.nProfiles = self.rheader.nProfiles
394 386 self.nChannels = self.rheader.nChannels
395 387 self.nHeights = self.rheader.nHeights
396 388 self.frequency = self.rheader.TransmitFrec
397 389 self.DualModeIndex = self.rheader.DualModeIndex
398 390 self.pairsList = [(0, 1), (0, 2), (1, 2)]
399 391 self.dataOut.pairsList = self.pairsList
400 392 self.nRdPairs = len(self.dataOut.pairsList)
401 393 self.dataOut.nRdPairs = self.nRdPairs
402 394 self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000.
403 395 self.dataOut.channelList = range(self.nChannels)
404 396 self.dataOut.nProfiles=self.rheader.nProfiles
405 397 self.dataOut.nIncohInt=self.rheader.nIncohInt
406 398 self.dataOut.nCohInt=self.rheader.nCohInt
407 399 self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz)
408 400 self.dataOut.PRF=self.rheader.PRFhz
409 401 self.dataOut.nFFTPoints=self.rheader.nProfiles
410 402 self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000.
411 403 self.dataOut.timeZone = 0
412 404 self.dataOut.useLocalTime = False
413 405 self.dataOut.nmodes = 2
414 406 log.log('Reading block {} - {}'.format(self.BlockCounter, self.dataOut.datatime), self.name)
415 407 OffDATA = self.OffsetStartHeader + self.RecCounter * \
416 408 self.Off2StartNxtRec + self.Off2StartData
417 409 self.fp.seek(OffDATA, os.SEEK_SET)
418 410
419 411 self.data_fft = numpy.fromfile(self.fp, [('complex','<c8')], self.nProfiles*self.nChannels*self.nHeights )
420 412 self.data_fft = self.data_fft.astype(numpy.dtype('complex'))
421 413 self.data_block = numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles))
422 414 self.data_block = numpy.transpose(self.data_block, (1,2,0))
423 415 copy = self.data_block.copy()
424 416 spc = copy * numpy.conjugate(copy)
425 417 self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud
426 self.dataOut.data_spc = self.data_spc
427 418
428 419 cspc = self.data_block.copy()
429 420 self.data_cspc = self.data_block.copy()
430 421
431 422 for i in range(self.nRdPairs):
432 423
433 424 chan_index0 = self.dataOut.pairsList[i][0]
434 425 chan_index1 = self.dataOut.pairsList[i][1]
435 426
436 427 self.data_cspc[i, :, :] = cspc[chan_index0, :, :] * numpy.conjugate(cspc[chan_index1, :, :])
437 428
438 429 '''Getting Eij and Nij'''
439 430 (AntennaX0, AntennaY0) = pol2cart(
440 431 self.rheader.AntennaCoord0, self.rheader.AntennaAngl0 * numpy.pi / 180)
441 432 (AntennaX1, AntennaY1) = pol2cart(
442 433 self.rheader.AntennaCoord1, self.rheader.AntennaAngl1 * numpy.pi / 180)
443 434 (AntennaX2, AntennaY2) = pol2cart(
444 435 self.rheader.AntennaCoord2, self.rheader.AntennaAngl2 * numpy.pi / 180)
445 436
446 437 E01 = AntennaX0 - AntennaX1
447 438 N01 = AntennaY0 - AntennaY1
448 439
449 440 E02 = AntennaX0 - AntennaX2
450 441 N02 = AntennaY0 - AntennaY2
451 442
452 443 E12 = AntennaX1 - AntennaX2
453 444 N12 = AntennaY1 - AntennaY2
454 445
455 446 self.ChanDist = numpy.array(
456 447 [[E01, N01], [E02, N02], [E12, N12]])
457 448
458 449 self.dataOut.ChanDist = self.ChanDist
459 450
460 451 self.BlockCounter += 2
461 452 self.dataOut.data_spc = self.data_spc
462 453 self.dataOut.data_cspc =self.data_cspc
@@ -1,627 +1,627
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96
97 97 def setup(self, **kwargs):
98 98
99 99 self.set_kwargs(**kwargs)
100 100 if not self.ext.startswith('.'):
101 101 self.ext = '.{}'.format(self.ext)
102 102
103 103 if self.online:
104 104 log.log("Searching files in online mode...", self.name)
105 105
106 106 for nTries in range(self.nTries):
107 107 fullpath = self.searchFilesOnLine(self.path, self.startDate,
108 108 self.endDate, self.expLabel, self.ext, self.walk,
109 109 self.filefmt, self.folderfmt)
110 110 try:
111 111 fullpath = next(fullpath)
112 112 except:
113 113 fullpath = None
114 114
115 115 if fullpath:
116 116 break
117 117
118 118 log.warning(
119 119 'Waiting {} sec for a valid file in {}: try {} ...'.format(
120 120 self.delay, self.path, nTries + 1),
121 121 self.name)
122 122 time.sleep(self.delay)
123 123
124 124 if not(fullpath):
125 125 raise schainpy.admin.SchainError(
126 126 'There isn\'t any valid file in {}'.format(self.path))
127 127
128 128 pathname, filename = os.path.split(fullpath)
129 129 self.year = int(filename[1:5])
130 130 self.doy = int(filename[5:8])
131 131 self.set = int(filename[8:11]) - 1
132 132 else:
133 133 log.log("Searching files in {}".format(self.path), self.name)
134 134 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
135 135 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
136 136
137 137 self.setNextFile()
138 138
139 139 return
140 140
141 141 def readFirstHeader(self):
142 142 '''Read metadata and data'''
143 143
144 144 self.__readMetadata()
145 145 self.__readData()
146 146 self.__setBlockList()
147 147
148 148 if 'type' in self.meta:
149 149 self.dataOut = eval(self.meta['type'])()
150 150
151 151 for attr in self.meta:
152 152 setattr(self.dataOut, attr, self.meta[attr])
153 153
154 154 self.blockIndex = 0
155 155
156 156 return
157 157
158 158 def __setBlockList(self):
159 159 '''
160 160 Selects the data within the times defined
161 161
162 162 self.fp
163 163 self.startTime
164 164 self.endTime
165 165 self.blockList
166 166 self.blocksPerFile
167 167
168 168 '''
169 169
170 170 startTime = self.startTime
171 171 endTime = self.endTime
172 172
173 173 thisUtcTime = self.data['utctime']
174 174 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
175 175
176 176 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
177 177
178 178 thisDate = thisDatetime.date()
179 179 thisTime = thisDatetime.time()
180 180
181 181 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182 182 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
183 183
184 184 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
185 185
186 186 self.blockList = ind
187 187 self.blocksPerFile = len(ind)
188 188 return
189 189
190 190 def __readMetadata(self):
191 191 '''
192 192 Reads Metadata
193 193 '''
194 194
195 195 meta = {}
196 196
197 197 if self.description:
198 198 for key, value in self.description['Metadata'].items():
199 meta[key] = self.fp[value].value
199 meta[key] = self.fp[value][()]
200 200 else:
201 201 grp = self.fp['Metadata']
202 202 for name in grp:
203 meta[name] = grp[name].value
203 meta[name] = grp[name][()]
204 204
205 205 if self.extras:
206 206 for key, value in self.extras.items():
207 207 meta[key] = value
208 208 self.meta = meta
209 209
210 210 return
211 211
212 212 def __readData(self):
213 213
214 214 data = {}
215 215
216 216 if self.description:
217 217 for key, value in self.description['Data'].items():
218 218 if isinstance(value, str):
219 219 if isinstance(self.fp[value], h5py.Dataset):
220 data[key] = self.fp[value].value
220 data[key] = self.fp[value][()]
221 221 elif isinstance(self.fp[value], h5py.Group):
222 222 array = []
223 223 for ch in self.fp[value]:
224 array.append(self.fp[value][ch].value)
224 array.append(self.fp[value][ch][()])
225 225 data[key] = numpy.array(array)
226 226 elif isinstance(value, list):
227 227 array = []
228 228 for ch in value:
229 array.append(self.fp[ch].value)
229 array.append(self.fp[ch][()])
230 230 data[key] = numpy.array(array)
231 231 else:
232 232 grp = self.fp['Data']
233 233 for name in grp:
234 234 if isinstance(grp[name], h5py.Dataset):
235 array = grp[name].value
235 array = grp[name][()]
236 236 elif isinstance(grp[name], h5py.Group):
237 237 array = []
238 238 for ch in grp[name]:
239 array.append(grp[name][ch].value)
239 array.append(grp[name][ch][()])
240 240 array = numpy.array(array)
241 241 else:
242 242 log.warning('Unknown type: {}'.format(name))
243 243
244 244 if name in self.description:
245 245 key = self.description[name]
246 246 else:
247 247 key = name
248 248 data[key] = array
249 249
250 250 self.data = data
251 251 return
252 252
253 253 def getData(self):
254 254
255 255 for attr in self.data:
256 256 if self.data[attr].ndim == 1:
257 257 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
258 258 else:
259 259 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
260 260
261 261 self.dataOut.flagNoData = False
262 262 self.blockIndex += 1
263 263
264 264 log.log("Block No. {}/{} -> {}".format(
265 265 self.blockIndex,
266 266 self.blocksPerFile,
267 267 self.dataOut.datatime.ctime()), self.name)
268 268
269 269 return
270 270
271 271 def run(self, **kwargs):
272 272
273 273 if not(self.isConfig):
274 274 self.setup(**kwargs)
275 275 self.isConfig = True
276 276
277 277 if self.blockIndex == self.blocksPerFile:
278 278 self.setNextFile()
279 279
280 280 self.getData()
281 281
282 282 return
283 283
284 284 @MPDecorator
285 285 class HDFWriter(Operation):
286 286 """Operation to write HDF5 files.
287 287
288 288 The HDF5 file contains by default two groups Data and Metadata where
289 289 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
290 290 parameters, data attributes are normaly time dependent where the metadata
291 291 are not.
292 292 It is possible to customize the structure of the HDF5 file with the
293 293 optional description parameter see the examples.
294 294
295 295 Parameters:
296 296 -----------
297 297 path : str
298 298 Path where files will be saved.
299 299 blocksPerFile : int
300 300 Number of blocks per file
301 301 metadataList : list
302 302 List of the dataOut attributes that will be saved as metadata
303 303 dataList : int
304 304 List of the dataOut attributes that will be saved as data
305 305 setType : bool
306 306 If True the name of the files corresponds to the timestamp of the data
307 307 description : dict, optional
308 308 Dictionary with the desired description of the HDF5 file
309 309
310 310 Examples
311 311 --------
312 312
313 313 desc = {
314 314 'data_output': {'winds': ['z', 'w', 'v']},
315 315 'utctime': 'timestamps',
316 316 'heightList': 'heights'
317 317 }
318 318 desc = {
319 319 'data_output': ['z', 'w', 'v'],
320 320 'utctime': 'timestamps',
321 321 'heightList': 'heights'
322 322 }
323 323 desc = {
324 324 'Data': {
325 325 'data_output': 'winds',
326 326 'utctime': 'timestamps'
327 327 },
328 328 'Metadata': {
329 329 'heightList': 'heights'
330 330 }
331 331 }
332 332
333 333 writer = proc_unit.addOperation(name='HDFWriter')
334 334 writer.addParameter(name='path', value='/path/to/file')
335 335 writer.addParameter(name='blocksPerFile', value='32')
336 336 writer.addParameter(name='metadataList', value='heightList,timeZone')
337 337 writer.addParameter(name='dataList',value='data_output,utctime')
338 338 # writer.addParameter(name='description',value=json.dumps(desc))
339 339
340 340 """
341 341
342 342 ext = ".hdf5"
343 343 optchar = "D"
344 344 filename = None
345 345 path = None
346 346 setFile = None
347 347 fp = None
348 348 firsttime = True
349 349 #Configurations
350 350 blocksPerFile = None
351 351 blockIndex = None
352 352 dataOut = None
353 353 #Data Arrays
354 354 dataList = None
355 355 metadataList = None
356 356 currentDay = None
357 357 lastTime = None
358 358
359 359 def __init__(self):
360 360
361 361 Operation.__init__(self)
362 362 return
363 363
364 364 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None):
365 365 self.path = path
366 366 self.blocksPerFile = blocksPerFile
367 367 self.metadataList = metadataList
368 368 self.dataList = [s.strip() for s in dataList]
369 369 self.setType = setType
370 370 self.description = description
371 371
372 372 if self.metadataList is None:
373 373 self.metadataList = self.dataOut.metadata_list
374 374
375 375 tableList = []
376 376 dsList = []
377 377
378 378 for i in range(len(self.dataList)):
379 379 dsDict = {}
380 380 if hasattr(self.dataOut, self.dataList[i]):
381 381 dataAux = getattr(self.dataOut, self.dataList[i])
382 382 dsDict['variable'] = self.dataList[i]
383 383 else:
384 384 log.warning('Attribute {} not found in dataOut', self.name)
385 385 continue
386 386
387 387 if dataAux is None:
388 388 continue
389 389 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
390 390 dsDict['nDim'] = 0
391 391 else:
392 392 dsDict['nDim'] = len(dataAux.shape)
393 393 dsDict['shape'] = dataAux.shape
394 394 dsDict['dsNumber'] = dataAux.shape[0]
395 395 dsDict['dtype'] = dataAux.dtype
396 396
397 397 dsList.append(dsDict)
398 398
399 399 self.dsList = dsList
400 400 self.currentDay = self.dataOut.datatime.date()
401 401
402 402 def timeFlag(self):
403 403 currentTime = self.dataOut.utctime
404 404 timeTuple = time.localtime(currentTime)
405 405 dataDay = timeTuple.tm_yday
406 406
407 407 if self.lastTime is None:
408 408 self.lastTime = currentTime
409 409 self.currentDay = dataDay
410 410 return False
411 411
412 412 timeDiff = currentTime - self.lastTime
413 413
414 414 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
415 415 if dataDay != self.currentDay:
416 416 self.currentDay = dataDay
417 417 return True
418 418 elif timeDiff > 3*60*60:
419 419 self.lastTime = currentTime
420 420 return True
421 421 else:
422 422 self.lastTime = currentTime
423 423 return False
424 424
425 425 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
426 426 dataList=[], setType=None, description={}):
427 427
428 428 self.dataOut = dataOut
429 429 if not(self.isConfig):
430 430 self.setup(path=path, blocksPerFile=blocksPerFile,
431 431 metadataList=metadataList, dataList=dataList,
432 432 setType=setType, description=description)
433 433
434 434 self.isConfig = True
435 435 self.setNextFile()
436 436
437 437 self.putData()
438 438 return
439 439
440 440 def setNextFile(self):
441 441
442 442 ext = self.ext
443 443 path = self.path
444 444 setFile = self.setFile
445 445
446 446 timeTuple = time.localtime(self.dataOut.utctime)
447 447 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
448 448 fullpath = os.path.join(path, subfolder)
449 449
450 450 if os.path.exists(fullpath):
451 451 filesList = os.listdir(fullpath)
452 452 filesList = [k for k in filesList if k.startswith(self.optchar)]
453 453 if len( filesList ) > 0:
454 454 filesList = sorted(filesList, key=str.lower)
455 455 filen = filesList[-1]
456 456 # el filename debera tener el siguiente formato
457 457 # 0 1234 567 89A BCDE (hex)
458 458 # x YYYY DDD SSS .ext
459 459 if isNumber(filen[8:11]):
460 460 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
461 461 else:
462 462 setFile = -1
463 463 else:
464 464 setFile = -1 #inicializo mi contador de seteo
465 465 else:
466 466 os.makedirs(fullpath)
467 467 setFile = -1 #inicializo mi contador de seteo
468 468
469 469 if self.setType is None:
470 470 setFile += 1
471 471 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
472 472 timeTuple.tm_year,
473 473 timeTuple.tm_yday,
474 474 setFile,
475 475 ext )
476 476 else:
477 477 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
478 478 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
479 479 timeTuple.tm_year,
480 480 timeTuple.tm_yday,
481 481 setFile,
482 482 ext )
483 483
484 484 self.filename = os.path.join( path, subfolder, file )
485 485
486 486 #Setting HDF5 File
487 487 self.fp = h5py.File(self.filename, 'w')
488 488 #write metadata
489 489 self.writeMetadata(self.fp)
490 490 #Write data
491 491 self.writeData(self.fp)
492 492
493 493 def getLabel(self, name, x=None):
494 494
495 495 if x is None:
496 496 if 'Data' in self.description:
497 497 data = self.description['Data']
498 498 if 'Metadata' in self.description:
499 499 data.update(self.description['Metadata'])
500 500 else:
501 501 data = self.description
502 502 if name in data:
503 503 if isinstance(data[name], str):
504 504 return data[name]
505 505 elif isinstance(data[name], list):
506 506 return None
507 507 elif isinstance(data[name], dict):
508 508 for key, value in data[name].items():
509 509 return key
510 510 return name
511 511 else:
512 512 if 'Metadata' in self.description:
513 513 meta = self.description['Metadata']
514 514 else:
515 515 meta = self.description
516 516 if name in meta:
517 517 if isinstance(meta[name], list):
518 518 return meta[name][x]
519 519 elif isinstance(meta[name], dict):
520 520 for key, value in meta[name].items():
521 521 return value[x]
522 522 if 'cspc' in name:
523 523 return 'pair{:02d}'.format(x)
524 524 else:
525 525 return 'channel{:02d}'.format(x)
526 526
527 527 def writeMetadata(self, fp):
528 528
529 529 if self.description:
530 530 if 'Metadata' in self.description:
531 531 grp = fp.create_group('Metadata')
532 532 else:
533 533 grp = fp
534 534 else:
535 535 grp = fp.create_group('Metadata')
536 536
537 537 for i in range(len(self.metadataList)):
538 538 if not hasattr(self.dataOut, self.metadataList[i]):
539 539 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
540 540 continue
541 541 value = getattr(self.dataOut, self.metadataList[i])
542 542 if isinstance(value, bool):
543 543 if value is True:
544 544 value = 1
545 545 else:
546 546 value = 0
547 547 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
548 548 return
549 549
550 550 def writeData(self, fp):
551 551
552 552 if self.description:
553 553 if 'Data' in self.description:
554 554 grp = fp.create_group('Data')
555 555 else:
556 556 grp = fp
557 557 else:
558 558 grp = fp.create_group('Data')
559 559
560 560 dtsets = []
561 561 data = []
562 562
563 563 for dsInfo in self.dsList:
564 564 if dsInfo['nDim'] == 0:
565 565 ds = grp.create_dataset(
566 566 self.getLabel(dsInfo['variable']),
567 567 (self.blocksPerFile, ),
568 568 chunks=True,
569 569 dtype=numpy.float64)
570 570 dtsets.append(ds)
571 571 data.append((dsInfo['variable'], -1))
572 572 else:
573 573 label = self.getLabel(dsInfo['variable'])
574 574 if label is not None:
575 575 sgrp = grp.create_group(label)
576 576 else:
577 577 sgrp = grp
578 578 for i in range(dsInfo['dsNumber']):
579 579 ds = sgrp.create_dataset(
580 580 self.getLabel(dsInfo['variable'], i),
581 581 (self.blocksPerFile, ) + dsInfo['shape'][1:],
582 582 chunks=True,
583 583 dtype=dsInfo['dtype'])
584 584 dtsets.append(ds)
585 585 data.append((dsInfo['variable'], i))
586 586 fp.flush()
587 587
588 588 log.log('Creating file: {}'.format(fp.filename), self.name)
589 589
590 590 self.ds = dtsets
591 591 self.data = data
592 592 self.firsttime = True
593 593 self.blockIndex = 0
594 594 return
595 595
596 596 def putData(self):
597 597
598 598 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
599 599 self.closeFile()
600 600 self.setNextFile()
601 601
602 602 for i, ds in enumerate(self.ds):
603 603 attr, ch = self.data[i]
604 604 if ch == -1:
605 605 ds[self.blockIndex] = getattr(self.dataOut, attr)
606 606 else:
607 607 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
608 608
609 609 self.fp.flush()
610 610 self.blockIndex += 1
611 611 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
612 612
613 613 return
614 614
615 615 def closeFile(self):
616 616
617 617 if self.blockIndex != self.blocksPerFile:
618 618 for ds in self.ds:
619 619 ds.resize(self.blockIndex, axis=0)
620 620
621 621 if self.fp:
622 622 self.fp.flush()
623 623 self.fp.close()
624 624
625 625 def close(self):
626 626
627 627 self.closeFile()
@@ -1,402 +1,399
1 1 '''
2 2 Created on Oct 24, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7 import numpy
8 import copy
9 8 import datetime
10 9 import time
11 from time import gmtime
12 10
13 from numpy import transpose
14
15 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
16 12 from schainpy.model.data.jrodata import Parameters
17 13
18 14
19 15 class BLTRParametersProc(ProcessingUnit):
20 16 '''
21 17 Processing unit for BLTR parameters data (winds)
22 18
23 19 Inputs:
24 20 self.dataOut.nmodes - Number of operation modes
25 21 self.dataOut.nchannels - Number of channels
26 22 self.dataOut.nranges - Number of ranges
27 23
28 24 self.dataOut.data_snr - SNR array
29 25 self.dataOut.data_output - Zonal, Vertical and Meridional velocity array
30 26 self.dataOut.height - Height array (km)
31 27 self.dataOut.time - Time array (seconds)
32 28
33 29 self.dataOut.fileIndex -Index of the file currently read
34 30 self.dataOut.lat - Latitude coordinate of BLTR location
35 31
36 32 self.dataOut.doy - Experiment doy (number of the day in the current year)
37 33 self.dataOut.month - Experiment month
38 34 self.dataOut.day - Experiment day
39 35 self.dataOut.year - Experiment year
40 36 '''
41 37
42 38 def __init__(self):
43 39 '''
44 40 Inputs: None
45 41 '''
46 42 ProcessingUnit.__init__(self)
47 43 self.dataOut = Parameters()
48 44
49 45 def setup(self, mode):
50 46 '''
51 47 '''
52 48 self.dataOut.mode = mode
53 49
54 50 def run(self, mode, snr_threshold=None):
55 51 '''
56 52 Inputs:
57 53 mode = High resolution (0) or Low resolution (1) data
58 54 snr_threshold = snr filter value
59 55 '''
60 56
61 57 if not self.isConfig:
62 58 self.setup(mode)
63 59 self.isConfig = True
64 60
65 61 if self.dataIn.type == 'Parameters':
66 62 self.dataOut.copy(self.dataIn)
67 63
68 64 self.dataOut.data_param = self.dataOut.data[mode]
69 65 self.dataOut.heightList = self.dataOut.height[0]
70 66 self.dataOut.data_snr = self.dataOut.data_snr[mode]
71
72 if snr_threshold is not None:
73 67 SNRavg = numpy.average(self.dataOut.data_snr, axis=0)
74 68 SNRavgdB = 10*numpy.log10(SNRavg)
69 self.dataOut.data_snr_avg_db = SNRavgdB.reshape(1, *SNRavgdB.shape)
70
71 if snr_threshold is not None:
75 72 for i in range(3):
76 73 self.dataOut.data_param[i][SNRavgdB <= snr_threshold] = numpy.nan
77 74
78 75 # TODO
79 76
80 77 class OutliersFilter(Operation):
81 78
82 79 def __init__(self):
83 80 '''
84 81 '''
85 82 Operation.__init__(self)
86 83
87 84 def run(self, svalue2, method, factor, filter, npoints=9):
88 85 '''
89 86 Inputs:
90 87 svalue - string to select array velocity
91 88 svalue2 - string to choose axis filtering
92 89 method - 0 for SMOOTH or 1 for MEDIAN
93 90 factor - number used to set threshold
94 91 filter - 1 for data filtering using the standard deviation criteria else 0
95 92 npoints - number of points for mask filter
96 93 '''
97 94
98 95 print(' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor))
99 96
100 97
101 98 yaxis = self.dataOut.heightList
102 99 xaxis = numpy.array([[self.dataOut.utctime]])
103 100
104 101 # Zonal
105 102 value_temp = self.dataOut.data_output[0]
106 103
107 104 # Zonal
108 105 value_temp = self.dataOut.data_output[1]
109 106
110 107 # Vertical
111 108 value_temp = numpy.transpose(self.dataOut.data_output[2])
112 109
113 110 htemp = yaxis
114 111 std = value_temp
115 112 for h in range(len(htemp)):
116 113 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
117 114 minvalid = npoints
118 115
119 116 #only if valid values greater than the minimum required (10%)
120 117 if nvalues_valid > minvalid:
121 118
122 119 if method == 0:
123 120 #SMOOTH
124 121 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
125 122
126 123
127 124 if method == 1:
128 125 #MEDIAN
129 126 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
130 127
131 128 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
132 129
133 130 threshold = dw*factor
134 131 value_temp[numpy.where(w > threshold),h] = numpy.nan
135 132 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
136 133
137 134
138 135 #At the end
139 136 if svalue2 == 'inHeight':
140 137 value_temp = numpy.transpose(value_temp)
141 138 output_array[:,m] = value_temp
142 139
143 140 if svalue == 'zonal':
144 141 self.dataOut.data_output[0] = output_array
145 142
146 143 elif svalue == 'meridional':
147 144 self.dataOut.data_output[1] = output_array
148 145
149 146 elif svalue == 'vertical':
150 147 self.dataOut.data_output[2] = output_array
151 148
152 149 return self.dataOut.data_output
153 150
154 151
155 152 def Median(self,input,width):
156 153 '''
157 154 Inputs:
158 155 input - Velocity array
159 156 width - Number of points for mask filter
160 157
161 158 '''
162 159
163 160 if numpy.mod(width,2) == 1:
164 161 pc = int((width - 1) / 2)
165 162 cont = 0
166 163 output = []
167 164
168 165 for i in range(len(input)):
169 166 if i >= pc and i < len(input) - pc:
170 167 new2 = input[i-pc:i+pc+1]
171 168 temp = numpy.where(numpy.isfinite(new2))
172 169 new = new2[temp]
173 170 value = numpy.median(new)
174 171 output.append(value)
175 172
176 173 output = numpy.array(output)
177 174 output = numpy.hstack((input[0:pc],output))
178 175 output = numpy.hstack((output,input[-pc:len(input)]))
179 176
180 177 return output
181 178
182 179 def Smooth(self,input,width,edge_truncate = None):
183 180 '''
184 181 Inputs:
185 182 input - Velocity array
186 183 width - Number of points for mask filter
187 184 edge_truncate - 1 for truncate the convolution product else
188 185
189 186 '''
190 187
191 188 if numpy.mod(width,2) == 0:
192 189 real_width = width + 1
193 190 nzeros = width / 2
194 191 else:
195 192 real_width = width
196 193 nzeros = (width - 1) / 2
197 194
198 195 half_width = int(real_width)/2
199 196 length = len(input)
200 197
201 198 gate = numpy.ones(real_width,dtype='float')
202 199 norm_of_gate = numpy.sum(gate)
203 200
204 201 nan_process = 0
205 202 nan_id = numpy.where(numpy.isnan(input))
206 203 if len(nan_id[0]) > 0:
207 204 nan_process = 1
208 205 pb = numpy.zeros(len(input))
209 206 pb[nan_id] = 1.
210 207 input[nan_id] = 0.
211 208
212 209 if edge_truncate == True:
213 210 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
214 211 elif edge_truncate == False or edge_truncate == None:
215 212 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
216 213 output = numpy.hstack((input[0:half_width],output))
217 214 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
218 215
219 216 if nan_process:
220 217 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
221 218 pb = numpy.hstack((numpy.zeros(half_width),pb))
222 219 pb = numpy.hstack((pb,numpy.zeros(half_width)))
223 220 output[numpy.where(pb > 0.9999)] = numpy.nan
224 221 input[nan_id] = numpy.nan
225 222 return output
226 223
227 224 def Average(self,aver=0,nhaver=1):
228 225 '''
229 226 Inputs:
230 227 aver - Indicates the time period over which is averaged or consensus data
231 228 nhaver - Indicates the decimation factor in heights
232 229
233 230 '''
234 231 nhpoints = 48
235 232
236 233 lat_piura = -5.17
237 234 lat_huancayo = -12.04
238 235 lat_porcuya = -5.8
239 236
240 237 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
241 238 hcm = 3.
242 239 if self.dataOut.year == 2003 :
243 240 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
244 241 nhpoints = 12
245 242
246 243 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
247 244 hcm = 3.
248 245 if self.dataOut.year == 2003 :
249 246 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
250 247 nhpoints = 12
251 248
252 249
253 250 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
254 251 hcm = 5.#2
255 252
256 253 pdata = 0.2
257 254 taver = [1,2,3,4,6,8,12,24]
258 255 t0 = 0
259 256 tf = 24
260 257 ntime =(tf-t0)/taver[aver]
261 258 ti = numpy.arange(ntime)
262 259 tf = numpy.arange(ntime) + taver[aver]
263 260
264 261
265 262 old_height = self.dataOut.heightList
266 263
267 264 if nhaver > 1:
268 265 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
269 266 deltha = 0.05*nhaver
270 267 minhvalid = pdata*nhaver
271 268 for im in range(self.dataOut.nmodes):
272 269 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
273 270
274 271
275 272 data_fHeigths_List = []
276 273 data_fZonal_List = []
277 274 data_fMeridional_List = []
278 275 data_fVertical_List = []
279 276 startDTList = []
280 277
281 278
282 279 for i in range(ntime):
283 280 height = old_height
284 281
285 282 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
286 283 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
287 284
288 285
289 286 limit_sec1 = time.mktime(start.timetuple())
290 287 limit_sec2 = time.mktime(stop.timetuple())
291 288
292 289 t1 = numpy.where(self.f_timesec >= limit_sec1)
293 290 t2 = numpy.where(self.f_timesec < limit_sec2)
294 291 time_select = []
295 292 for val_sec in t1[0]:
296 293 if val_sec in t2[0]:
297 294 time_select.append(val_sec)
298 295
299 296
300 297 time_select = numpy.array(time_select,dtype = 'int')
301 298 minvalid = numpy.ceil(pdata*nhpoints)
302 299
303 300 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
304 301 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
305 302 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
306 303
307 304 if nhaver > 1:
308 305 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
309 306 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
310 307 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
311 308
312 309 if len(time_select) > minvalid:
313 310 time_average = self.f_timesec[time_select]
314 311
315 312 for im in range(self.dataOut.nmodes):
316 313
317 314 for ih in range(self.dataOut.nranges):
318 315 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
319 316 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
320 317
321 318 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
322 319 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
323 320
324 321 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
325 322 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
326 323
327 324 if nhaver > 1:
328 325 for ih in range(num_hei):
329 326 hvalid = numpy.arange(nhaver) + nhaver*ih
330 327
331 328 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
332 329 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
333 330
334 331 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
335 332 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
336 333
337 334 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
338 335 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
339 336 if nhaver > 1:
340 337 zon_aver = new_zon_aver
341 338 mer_aver = new_mer_aver
342 339 ver_aver = new_ver_aver
343 340 height = new_height
344 341
345 342
346 343 tstart = time_average[0]
347 344 tend = time_average[-1]
348 345 startTime = time.gmtime(tstart)
349 346
350 347 year = startTime.tm_year
351 348 month = startTime.tm_mon
352 349 day = startTime.tm_mday
353 350 hour = startTime.tm_hour
354 351 minute = startTime.tm_min
355 352 second = startTime.tm_sec
356 353
357 354 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
358 355
359 356
360 357 o_height = numpy.array([])
361 358 o_zon_aver = numpy.array([])
362 359 o_mer_aver = numpy.array([])
363 360 o_ver_aver = numpy.array([])
364 361 if self.dataOut.nmodes > 1:
365 362 for im in range(self.dataOut.nmodes):
366 363
367 364 if im == 0:
368 365 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
369 366 else:
370 367 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
371 368
372 369
373 370 ht = h_select[0]
374 371
375 372 o_height = numpy.hstack((o_height,height[im,ht]))
376 373 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
377 374 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
378 375 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
379 376
380 377 data_fHeigths_List.append(o_height)
381 378 data_fZonal_List.append(o_zon_aver)
382 379 data_fMeridional_List.append(o_mer_aver)
383 380 data_fVertical_List.append(o_ver_aver)
384 381
385 382
386 383 else:
387 384 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
388 385 ht = h_select[0]
389 386 o_height = numpy.hstack((o_height,height[im,ht]))
390 387 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
391 388 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
392 389 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
393 390
394 391 data_fHeigths_List.append(o_height)
395 392 data_fZonal_List.append(o_zon_aver)
396 393 data_fMeridional_List.append(o_mer_aver)
397 394 data_fVertical_List.append(o_ver_aver)
398 395
399 396
400 397 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
401 398
402 399
General Comments 0
You need to be logged in to leave comments. Login now