@@ -1,358 +1,357 | |||||
1 | import os |
|
1 | import os | |
2 | import datetime |
|
2 | import datetime | |
3 | import numpy |
|
3 | import numpy | |
4 |
|
4 | |||
5 | from schainpy.model.graphics.jroplot_base import Plot, plt |
|
5 | from schainpy.model.graphics.jroplot_base import Plot, plt | |
6 | from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot |
|
6 | from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot | |
7 | from schainpy.utils import log |
|
7 | from schainpy.utils import log | |
8 |
|
8 | |||
9 | EARTH_RADIUS = 6.3710e3 |
|
9 | EARTH_RADIUS = 6.3710e3 | |
10 |
|
10 | |||
11 |
|
11 | |||
12 | def ll2xy(lat1, lon1, lat2, lon2): |
|
12 | def ll2xy(lat1, lon1, lat2, lon2): | |
13 |
|
13 | |||
14 | p = 0.017453292519943295 |
|
14 | p = 0.017453292519943295 | |
15 | a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \ |
|
15 | a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \ | |
16 | numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2 |
|
16 | numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2 | |
17 | r = 12742 * numpy.arcsin(numpy.sqrt(a)) |
|
17 | r = 12742 * numpy.arcsin(numpy.sqrt(a)) | |
18 | theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p) |
|
18 | theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p) | |
19 | * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p)) |
|
19 | * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p)) | |
20 | theta = -theta + numpy.pi/2 |
|
20 | theta = -theta + numpy.pi/2 | |
21 | return r*numpy.cos(theta), r*numpy.sin(theta) |
|
21 | return r*numpy.cos(theta), r*numpy.sin(theta) | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | def km2deg(km): |
|
24 | def km2deg(km): | |
25 | ''' |
|
25 | ''' | |
26 | Convert distance in km to degrees |
|
26 | Convert distance in km to degrees | |
27 | ''' |
|
27 | ''' | |
28 |
|
28 | |||
29 | return numpy.rad2deg(km/EARTH_RADIUS) |
|
29 | return numpy.rad2deg(km/EARTH_RADIUS) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 |
|
32 | |||
33 | class SpectralMomentsPlot(SpectraPlot): |
|
33 | class SpectralMomentsPlot(SpectraPlot): | |
34 | ''' |
|
34 | ''' | |
35 | Plot for Spectral Moments |
|
35 | Plot for Spectral Moments | |
36 | ''' |
|
36 | ''' | |
37 | CODE = 'spc_moments' |
|
37 | CODE = 'spc_moments' | |
38 | colormap = 'jet' |
|
38 | colormap = 'jet' | |
39 | plot_type = 'pcolor' |
|
39 | plot_type = 'pcolor' | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | class SnrPlot(RTIPlot): |
|
42 | class SnrPlot(RTIPlot): | |
43 | ''' |
|
43 | ''' | |
44 | Plot for SNR Data |
|
44 | Plot for SNR Data | |
45 | ''' |
|
45 | ''' | |
46 |
|
46 | |||
47 | CODE = 'snr' |
|
47 | CODE = 'snr' | |
48 | colormap = 'jet' |
|
48 | colormap = 'jet' | |
49 |
|
49 | |||
50 | def update(self, dataOut): |
|
50 | def update(self, dataOut): | |
51 |
|
51 | |||
52 | data = { |
|
52 | data = { | |
53 | 'snr': 10*numpy.log10(dataOut.data_snr) |
|
53 | 'snr': 10*numpy.log10(dataOut.data_snr) | |
54 | } |
|
54 | } | |
55 |
|
55 | |||
56 | return data, {} |
|
56 | return data, {} | |
57 |
|
57 | |||
58 | class DopplerPlot(RTIPlot): |
|
58 | class DopplerPlot(RTIPlot): | |
59 | ''' |
|
59 | ''' | |
60 | Plot for DOPPLER Data (1st moment) |
|
60 | Plot for DOPPLER Data (1st moment) | |
61 | ''' |
|
61 | ''' | |
62 |
|
62 | |||
63 | CODE = 'dop' |
|
63 | CODE = 'dop' | |
64 | colormap = 'jet' |
|
64 | colormap = 'jet' | |
65 |
|
65 | |||
66 | def update(self, dataOut): |
|
66 | def update(self, dataOut): | |
67 |
|
67 | |||
68 | data = { |
|
68 | data = { | |
69 | 'dop': 10*numpy.log10(dataOut.data_dop) |
|
69 | 'dop': 10*numpy.log10(dataOut.data_dop) | |
70 | } |
|
70 | } | |
71 |
|
71 | |||
72 | return data, {} |
|
72 | return data, {} | |
73 |
|
73 | |||
74 | class PowerPlot(RTIPlot): |
|
74 | class PowerPlot(RTIPlot): | |
75 | ''' |
|
75 | ''' | |
76 | Plot for Power Data (0 moment) |
|
76 | Plot for Power Data (0 moment) | |
77 | ''' |
|
77 | ''' | |
78 |
|
78 | |||
79 | CODE = 'pow' |
|
79 | CODE = 'pow' | |
80 | colormap = 'jet' |
|
80 | colormap = 'jet' | |
81 |
|
81 | |||
82 | def update(self, dataOut): |
|
82 | def update(self, dataOut): | |
83 |
|
83 | |||
84 | data = { |
|
84 | data = { | |
85 | 'pow': 10*numpy.log10(dataOut.data_pow) |
|
85 | 'pow': 10*numpy.log10(dataOut.data_pow) | |
86 | } |
|
86 | } | |
87 |
|
87 | |||
88 | return data, {} |
|
88 | return data, {} | |
89 |
|
89 | |||
90 | class SpectralWidthPlot(RTIPlot): |
|
90 | class SpectralWidthPlot(RTIPlot): | |
91 | ''' |
|
91 | ''' | |
92 | Plot for Spectral Width Data (2nd moment) |
|
92 | Plot for Spectral Width Data (2nd moment) | |
93 | ''' |
|
93 | ''' | |
94 |
|
94 | |||
95 | CODE = 'width' |
|
95 | CODE = 'width' | |
96 | colormap = 'jet' |
|
96 | colormap = 'jet' | |
97 |
|
97 | |||
98 | def update(self, dataOut): |
|
98 | def update(self, dataOut): | |
99 |
|
99 | |||
100 | data = { |
|
100 | data = { | |
101 | 'width': dataOut.data_width |
|
101 | 'width': dataOut.data_width | |
102 | } |
|
102 | } | |
103 |
|
103 | |||
104 | return data, {} |
|
104 | return data, {} | |
105 |
|
105 | |||
106 | class SkyMapPlot(Plot): |
|
106 | class SkyMapPlot(Plot): | |
107 | ''' |
|
107 | ''' | |
108 | Plot for meteors detection data |
|
108 | Plot for meteors detection data | |
109 | ''' |
|
109 | ''' | |
110 |
|
110 | |||
111 | CODE = 'param' |
|
111 | CODE = 'param' | |
112 |
|
112 | |||
113 | def setup(self): |
|
113 | def setup(self): | |
114 |
|
114 | |||
115 | self.ncols = 1 |
|
115 | self.ncols = 1 | |
116 | self.nrows = 1 |
|
116 | self.nrows = 1 | |
117 | self.width = 7.2 |
|
117 | self.width = 7.2 | |
118 | self.height = 7.2 |
|
118 | self.height = 7.2 | |
119 | self.nplots = 1 |
|
119 | self.nplots = 1 | |
120 | self.xlabel = 'Zonal Zenith Angle (deg)' |
|
120 | self.xlabel = 'Zonal Zenith Angle (deg)' | |
121 | self.ylabel = 'Meridional Zenith Angle (deg)' |
|
121 | self.ylabel = 'Meridional Zenith Angle (deg)' | |
122 | self.polar = True |
|
122 | self.polar = True | |
123 | self.ymin = -180 |
|
123 | self.ymin = -180 | |
124 | self.ymax = 180 |
|
124 | self.ymax = 180 | |
125 | self.colorbar = False |
|
125 | self.colorbar = False | |
126 |
|
126 | |||
127 | def plot(self): |
|
127 | def plot(self): | |
128 |
|
128 | |||
129 | arrayParameters = numpy.concatenate(self.data['param']) |
|
129 | arrayParameters = numpy.concatenate(self.data['param']) | |
130 | error = arrayParameters[:, -1] |
|
130 | error = arrayParameters[:, -1] | |
131 | indValid = numpy.where(error == 0)[0] |
|
131 | indValid = numpy.where(error == 0)[0] | |
132 | finalMeteor = arrayParameters[indValid, :] |
|
132 | finalMeteor = arrayParameters[indValid, :] | |
133 | finalAzimuth = finalMeteor[:, 3] |
|
133 | finalAzimuth = finalMeteor[:, 3] | |
134 | finalZenith = finalMeteor[:, 4] |
|
134 | finalZenith = finalMeteor[:, 4] | |
135 |
|
135 | |||
136 | x = finalAzimuth * numpy.pi / 180 |
|
136 | x = finalAzimuth * numpy.pi / 180 | |
137 | y = finalZenith |
|
137 | y = finalZenith | |
138 |
|
138 | |||
139 | ax = self.axes[0] |
|
139 | ax = self.axes[0] | |
140 |
|
140 | |||
141 | if ax.firsttime: |
|
141 | if ax.firsttime: | |
142 | ax.plot = ax.plot(x, y, 'bo', markersize=5)[0] |
|
142 | ax.plot = ax.plot(x, y, 'bo', markersize=5)[0] | |
143 | else: |
|
143 | else: | |
144 | ax.plot.set_data(x, y) |
|
144 | ax.plot.set_data(x, y) | |
145 |
|
145 | |||
146 | dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S') |
|
146 | dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S') | |
147 | dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S') |
|
147 | dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S') | |
148 | title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1, |
|
148 | title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1, | |
149 | dt2, |
|
149 | dt2, | |
150 | len(x)) |
|
150 | len(x)) | |
151 | self.titles[0] = title |
|
151 | self.titles[0] = title | |
152 |
|
152 | |||
153 |
|
153 | |||
154 | class GenericRTIPlot(Plot): |
|
154 | class GenericRTIPlot(Plot): | |
155 | ''' |
|
155 | ''' | |
156 | Plot for data_xxxx object |
|
156 | Plot for data_xxxx object | |
157 | ''' |
|
157 | ''' | |
158 |
|
158 | |||
159 | CODE = 'param' |
|
159 | CODE = 'param' | |
160 | colormap = 'viridis' |
|
160 | colormap = 'viridis' | |
161 | plot_type = 'pcolorbuffer' |
|
161 | plot_type = 'pcolorbuffer' | |
162 |
|
162 | |||
163 | def setup(self): |
|
163 | def setup(self): | |
164 | self.xaxis = 'time' |
|
164 | self.xaxis = 'time' | |
165 | self.ncols = 1 |
|
165 | self.ncols = 1 | |
166 | self.nrows = self.data.shape('param')[0] |
|
166 | self.nrows = self.data.shape('param')[0] | |
167 | self.nplots = self.nrows |
|
167 | self.nplots = self.nrows | |
168 | self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95}) |
|
168 | self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95}) | |
169 |
|
169 | |||
170 | if not self.xlabel: |
|
170 | if not self.xlabel: | |
171 | self.xlabel = 'Time' |
|
171 | self.xlabel = 'Time' | |
172 |
|
172 | |||
173 | self.ylabel = 'Height [km]' |
|
173 | self.ylabel = 'Height [km]' | |
174 | if not self.titles: |
|
174 | if not self.titles: | |
175 | self.titles = self.data.parameters \ |
|
175 | self.titles = ['Param {}'.format(x) for x in range(self.nrows)] | |
176 | if self.data.parameters else ['Param {}'.format(x) for x in range(self.nrows)] |
|
|||
177 |
|
176 | |||
178 | def update(self, dataOut): |
|
177 | def update(self, dataOut): | |
179 |
|
178 | |||
180 | data = { |
|
179 | data = { | |
181 | 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0) |
|
180 | 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0) | |
182 | } |
|
181 | } | |
183 |
|
182 | |||
184 | meta = {} |
|
183 | meta = {} | |
185 |
|
184 | |||
186 | return data, meta |
|
185 | return data, meta | |
187 |
|
186 | |||
188 | def plot(self): |
|
187 | def plot(self): | |
189 | # self.data.normalize_heights() |
|
188 | # self.data.normalize_heights() | |
190 | self.x = self.data.times |
|
189 | self.x = self.data.times | |
191 | self.y = self.data.yrange |
|
190 | self.y = self.data.yrange | |
192 | self.z = self.data['param'] |
|
191 | self.z = self.data['param'] | |
193 |
|
192 | |||
194 | self.z = numpy.ma.masked_invalid(self.z) |
|
193 | self.z = numpy.ma.masked_invalid(self.z) | |
195 |
|
194 | |||
196 | if self.decimation is None: |
|
195 | if self.decimation is None: | |
197 | x, y, z = self.fill_gaps(self.x, self.y, self.z) |
|
196 | x, y, z = self.fill_gaps(self.x, self.y, self.z) | |
198 | else: |
|
197 | else: | |
199 | x, y, z = self.fill_gaps(*self.decimate()) |
|
198 | x, y, z = self.fill_gaps(*self.decimate()) | |
200 |
|
199 | |||
201 | for n, ax in enumerate(self.axes): |
|
200 | for n, ax in enumerate(self.axes): | |
202 |
|
201 | |||
203 | self.zmax = self.zmax if self.zmax is not None else numpy.max( |
|
202 | self.zmax = self.zmax if self.zmax is not None else numpy.max( | |
204 | self.z[n]) |
|
203 | self.z[n]) | |
205 | self.zmin = self.zmin if self.zmin is not None else numpy.min( |
|
204 | self.zmin = self.zmin if self.zmin is not None else numpy.min( | |
206 | self.z[n]) |
|
205 | self.z[n]) | |
207 |
|
206 | |||
208 | if ax.firsttime: |
|
207 | if ax.firsttime: | |
209 | if self.zlimits is not None: |
|
208 | if self.zlimits is not None: | |
210 | self.zmin, self.zmax = self.zlimits[n] |
|
209 | self.zmin, self.zmax = self.zlimits[n] | |
211 |
|
210 | |||
212 | ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n], |
|
211 | ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n], | |
213 | vmin=self.zmin, |
|
212 | vmin=self.zmin, | |
214 | vmax=self.zmax, |
|
213 | vmax=self.zmax, | |
215 | cmap=self.cmaps[n] |
|
214 | cmap=self.cmaps[n] | |
216 | ) |
|
215 | ) | |
217 | else: |
|
216 | else: | |
218 | if self.zlimits is not None: |
|
217 | if self.zlimits is not None: | |
219 | self.zmin, self.zmax = self.zlimits[n] |
|
218 | self.zmin, self.zmax = self.zlimits[n] | |
220 | ax.collections.remove(ax.collections[0]) |
|
219 | ax.collections.remove(ax.collections[0]) | |
221 | ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n], |
|
220 | ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n], | |
222 | vmin=self.zmin, |
|
221 | vmin=self.zmin, | |
223 | vmax=self.zmax, |
|
222 | vmax=self.zmax, | |
224 | cmap=self.cmaps[n] |
|
223 | cmap=self.cmaps[n] | |
225 | ) |
|
224 | ) | |
226 |
|
225 | |||
227 |
|
226 | |||
228 | class PolarMapPlot(Plot): |
|
227 | class PolarMapPlot(Plot): | |
229 | ''' |
|
228 | ''' | |
230 | Plot for weather radar |
|
229 | Plot for weather radar | |
231 | ''' |
|
230 | ''' | |
232 |
|
231 | |||
233 | CODE = 'param' |
|
232 | CODE = 'param' | |
234 | colormap = 'seismic' |
|
233 | colormap = 'seismic' | |
235 |
|
234 | |||
236 | def setup(self): |
|
235 | def setup(self): | |
237 | self.ncols = 1 |
|
236 | self.ncols = 1 | |
238 | self.nrows = 1 |
|
237 | self.nrows = 1 | |
239 | self.width = 9 |
|
238 | self.width = 9 | |
240 | self.height = 8 |
|
239 | self.height = 8 | |
241 | self.mode = self.data.meta['mode'] |
|
240 | self.mode = self.data.meta['mode'] | |
242 | if self.channels is not None: |
|
241 | if self.channels is not None: | |
243 | self.nplots = len(self.channels) |
|
242 | self.nplots = len(self.channels) | |
244 | self.nrows = len(self.channels) |
|
243 | self.nrows = len(self.channels) | |
245 | else: |
|
244 | else: | |
246 | self.nplots = self.data.shape(self.CODE)[0] |
|
245 | self.nplots = self.data.shape(self.CODE)[0] | |
247 | self.nrows = self.nplots |
|
246 | self.nrows = self.nplots | |
248 | self.channels = list(range(self.nplots)) |
|
247 | self.channels = list(range(self.nplots)) | |
249 | if self.mode == 'E': |
|
248 | if self.mode == 'E': | |
250 | self.xlabel = 'Longitude' |
|
249 | self.xlabel = 'Longitude' | |
251 | self.ylabel = 'Latitude' |
|
250 | self.ylabel = 'Latitude' | |
252 | else: |
|
251 | else: | |
253 | self.xlabel = 'Range (km)' |
|
252 | self.xlabel = 'Range (km)' | |
254 | self.ylabel = 'Height (km)' |
|
253 | self.ylabel = 'Height (km)' | |
255 | self.bgcolor = 'white' |
|
254 | self.bgcolor = 'white' | |
256 | self.cb_labels = self.data.meta['units'] |
|
255 | self.cb_labels = self.data.meta['units'] | |
257 | self.lat = self.data.meta['latitude'] |
|
256 | self.lat = self.data.meta['latitude'] | |
258 | self.lon = self.data.meta['longitude'] |
|
257 | self.lon = self.data.meta['longitude'] | |
259 | self.xmin, self.xmax = float( |
|
258 | self.xmin, self.xmax = float( | |
260 | km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon) |
|
259 | km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon) | |
261 | self.ymin, self.ymax = float( |
|
260 | self.ymin, self.ymax = float( | |
262 | km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat) |
|
261 | km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat) | |
263 | # self.polar = True |
|
262 | # self.polar = True | |
264 |
|
263 | |||
265 | def plot(self): |
|
264 | def plot(self): | |
266 |
|
265 | |||
267 | for n, ax in enumerate(self.axes): |
|
266 | for n, ax in enumerate(self.axes): | |
268 | data = self.data['param'][self.channels[n]] |
|
267 | data = self.data['param'][self.channels[n]] | |
269 |
|
268 | |||
270 | zeniths = numpy.linspace( |
|
269 | zeniths = numpy.linspace( | |
271 | 0, self.data.meta['max_range'], data.shape[1]) |
|
270 | 0, self.data.meta['max_range'], data.shape[1]) | |
272 | if self.mode == 'E': |
|
271 | if self.mode == 'E': | |
273 | azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2 |
|
272 | azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2 | |
274 | r, theta = numpy.meshgrid(zeniths, azimuths) |
|
273 | r, theta = numpy.meshgrid(zeniths, azimuths) | |
275 | x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin( |
|
274 | x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin( | |
276 | theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])) |
|
275 | theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])) | |
277 | x = km2deg(x) + self.lon |
|
276 | x = km2deg(x) + self.lon | |
278 | y = km2deg(y) + self.lat |
|
277 | y = km2deg(y) + self.lat | |
279 | else: |
|
278 | else: | |
280 | azimuths = numpy.radians(self.data.yrange) |
|
279 | azimuths = numpy.radians(self.data.yrange) | |
281 | r, theta = numpy.meshgrid(zeniths, azimuths) |
|
280 | r, theta = numpy.meshgrid(zeniths, azimuths) | |
282 | x, y = r*numpy.cos(theta), r*numpy.sin(theta) |
|
281 | x, y = r*numpy.cos(theta), r*numpy.sin(theta) | |
283 | self.y = zeniths |
|
282 | self.y = zeniths | |
284 |
|
283 | |||
285 | if ax.firsttime: |
|
284 | if ax.firsttime: | |
286 | if self.zlimits is not None: |
|
285 | if self.zlimits is not None: | |
287 | self.zmin, self.zmax = self.zlimits[n] |
|
286 | self.zmin, self.zmax = self.zlimits[n] | |
288 | ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)), |
|
287 | ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)), | |
289 | x, y, numpy.ma.array(data, mask=numpy.isnan(data)), |
|
288 | x, y, numpy.ma.array(data, mask=numpy.isnan(data)), | |
290 | vmin=self.zmin, |
|
289 | vmin=self.zmin, | |
291 | vmax=self.zmax, |
|
290 | vmax=self.zmax, | |
292 | cmap=self.cmaps[n]) |
|
291 | cmap=self.cmaps[n]) | |
293 | else: |
|
292 | else: | |
294 | if self.zlimits is not None: |
|
293 | if self.zlimits is not None: | |
295 | self.zmin, self.zmax = self.zlimits[n] |
|
294 | self.zmin, self.zmax = self.zlimits[n] | |
296 | ax.collections.remove(ax.collections[0]) |
|
295 | ax.collections.remove(ax.collections[0]) | |
297 | ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)), |
|
296 | ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)), | |
298 | x, y, numpy.ma.array(data, mask=numpy.isnan(data)), |
|
297 | x, y, numpy.ma.array(data, mask=numpy.isnan(data)), | |
299 | vmin=self.zmin, |
|
298 | vmin=self.zmin, | |
300 | vmax=self.zmax, |
|
299 | vmax=self.zmax, | |
301 | cmap=self.cmaps[n]) |
|
300 | cmap=self.cmaps[n]) | |
302 |
|
301 | |||
303 | if self.mode == 'A': |
|
302 | if self.mode == 'A': | |
304 | continue |
|
303 | continue | |
305 |
|
304 | |||
306 | # plot district names |
|
305 | # plot district names | |
307 | f = open('/data/workspace/schain_scripts/distrito.csv') |
|
306 | f = open('/data/workspace/schain_scripts/distrito.csv') | |
308 | for line in f: |
|
307 | for line in f: | |
309 | label, lon, lat = [s.strip() for s in line.split(',') if s] |
|
308 | label, lon, lat = [s.strip() for s in line.split(',') if s] | |
310 | lat = float(lat) |
|
309 | lat = float(lat) | |
311 | lon = float(lon) |
|
310 | lon = float(lon) | |
312 | # ax.plot(lon, lat, '.b', ms=2) |
|
311 | # ax.plot(lon, lat, '.b', ms=2) | |
313 | ax.text(lon, lat, label.decode('utf8'), ha='center', |
|
312 | ax.text(lon, lat, label.decode('utf8'), ha='center', | |
314 | va='bottom', size='8', color='black') |
|
313 | va='bottom', size='8', color='black') | |
315 |
|
314 | |||
316 | # plot limites |
|
315 | # plot limites | |
317 | limites = [] |
|
316 | limites = [] | |
318 | tmp = [] |
|
317 | tmp = [] | |
319 | for line in open('/data/workspace/schain_scripts/lima.csv'): |
|
318 | for line in open('/data/workspace/schain_scripts/lima.csv'): | |
320 | if '#' in line: |
|
319 | if '#' in line: | |
321 | if tmp: |
|
320 | if tmp: | |
322 | limites.append(tmp) |
|
321 | limites.append(tmp) | |
323 | tmp = [] |
|
322 | tmp = [] | |
324 | continue |
|
323 | continue | |
325 | values = line.strip().split(',') |
|
324 | values = line.strip().split(',') | |
326 | tmp.append((float(values[0]), float(values[1]))) |
|
325 | tmp.append((float(values[0]), float(values[1]))) | |
327 | for points in limites: |
|
326 | for points in limites: | |
328 | ax.add_patch( |
|
327 | ax.add_patch( | |
329 | Polygon(points, ec='k', fc='none', ls='--', lw=0.5)) |
|
328 | Polygon(points, ec='k', fc='none', ls='--', lw=0.5)) | |
330 |
|
329 | |||
331 | # plot Cuencas |
|
330 | # plot Cuencas | |
332 | for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'): |
|
331 | for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'): | |
333 | f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca)) |
|
332 | f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca)) | |
334 | values = [line.strip().split(',') for line in f] |
|
333 | values = [line.strip().split(',') for line in f] | |
335 | points = [(float(s[0]), float(s[1])) for s in values] |
|
334 | points = [(float(s[0]), float(s[1])) for s in values] | |
336 | ax.add_patch(Polygon(points, ec='b', fc='none')) |
|
335 | ax.add_patch(Polygon(points, ec='b', fc='none')) | |
337 |
|
336 | |||
338 | # plot grid |
|
337 | # plot grid | |
339 | for r in (15, 30, 45, 60): |
|
338 | for r in (15, 30, 45, 60): | |
340 | ax.add_artist(plt.Circle((self.lon, self.lat), |
|
339 | ax.add_artist(plt.Circle((self.lon, self.lat), | |
341 | km2deg(r), color='0.6', fill=False, lw=0.2)) |
|
340 | km2deg(r), color='0.6', fill=False, lw=0.2)) | |
342 | ax.text( |
|
341 | ax.text( | |
343 | self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180), |
|
342 | self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180), | |
344 | self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180), |
|
343 | self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180), | |
345 | '{}km'.format(r), |
|
344 | '{}km'.format(r), | |
346 | ha='center', va='bottom', size='8', color='0.6', weight='heavy') |
|
345 | ha='center', va='bottom', size='8', color='0.6', weight='heavy') | |
347 |
|
346 | |||
348 | if self.mode == 'E': |
|
347 | if self.mode == 'E': | |
349 | title = 'El={}$^\circ$'.format(self.data.meta['elevation']) |
|
348 | title = 'El={}$^\circ$'.format(self.data.meta['elevation']) | |
350 | label = 'E{:02d}'.format(int(self.data.meta['elevation'])) |
|
349 | label = 'E{:02d}'.format(int(self.data.meta['elevation'])) | |
351 | else: |
|
350 | else: | |
352 | title = 'Az={}$^\circ$'.format(self.data.meta['azimuth']) |
|
351 | title = 'Az={}$^\circ$'.format(self.data.meta['azimuth']) | |
353 | label = 'A{:02d}'.format(int(self.data.meta['azimuth'])) |
|
352 | label = 'A{:02d}'.format(int(self.data.meta['azimuth'])) | |
354 |
|
353 | |||
355 | self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels] |
|
354 | self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels] | |
356 | self.titles = ['{} {}'.format( |
|
355 | self.titles = ['{} {}'.format( | |
357 | self.data.parameters[x], title) for x in self.channels] |
|
356 | self.data.parameters[x], title) for x in self.channels] | |
358 |
|
357 |
@@ -1,462 +1,453 | |||||
1 | import os |
|
1 | import os | |
2 | import sys |
|
2 | import sys | |
3 | import glob |
|
3 | import glob | |
4 | import fnmatch |
|
|||
5 | import datetime |
|
|||
6 | import time |
|
|||
7 | import re |
|
|||
8 | import h5py |
|
|||
9 | import numpy |
|
4 | import numpy | |
10 |
|
5 | |||
11 | import pylab as plb |
|
|||
12 | from scipy.optimize import curve_fit |
|
|||
13 | from scipy import asarray as ar, exp |
|
|||
14 |
|
6 | |||
15 | SPEED_OF_LIGHT = 299792458 |
|
7 | SPEED_OF_LIGHT = 299792458 | |
16 | SPEED_OF_LIGHT = 3e8 |
|
8 | SPEED_OF_LIGHT = 3e8 | |
17 |
|
9 | |||
18 | from .utils import folder_in_range |
|
10 | from .utils import folder_in_range | |
19 |
|
11 | |||
20 | import schainpy.admin |
|
12 | import schainpy.admin | |
21 | from schainpy.model.data.jrodata import Spectra |
|
13 | from schainpy.model.data.jrodata import Spectra | |
22 |
from schainpy.model.proc.jroproc_base import ProcessingUnit |
|
14 | from schainpy.model.proc.jroproc_base import ProcessingUnit | |
23 | from schainpy.utils import log |
|
15 | from schainpy.utils import log | |
24 | from schainpy.model.io.jroIO_base import JRODataReader |
|
16 | ||
25 |
|
17 | |||
26 | def pol2cart(rho, phi): |
|
18 | def pol2cart(rho, phi): | |
27 | x = rho * numpy.cos(phi) |
|
19 | x = rho * numpy.cos(phi) | |
28 | y = rho * numpy.sin(phi) |
|
20 | y = rho * numpy.sin(phi) | |
29 | return(x, y) |
|
21 | return(x, y) | |
30 |
|
22 | |||
31 | FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes |
|
23 | FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes | |
32 | ('FileMgcNumber', '<u4'), # 0x23020100 |
|
24 | ('FileMgcNumber', '<u4'), # 0x23020100 | |
33 | ('nFDTdataRecors', '<u4'), |
|
25 | ('nFDTdataRecors', '<u4'), | |
34 | ('OffsetStartHeader', '<u4'), |
|
26 | ('OffsetStartHeader', '<u4'), | |
35 | ('RadarUnitId', '<u4'), |
|
27 | ('RadarUnitId', '<u4'), | |
36 | ('SiteName', 'S32'), # Null terminated |
|
28 | ('SiteName', 'S32'), # Null terminated | |
37 | ]) |
|
29 | ]) | |
38 |
|
30 | |||
39 |
|
31 | |||
40 | class FileHeaderBLTR(): |
|
32 | class FileHeaderBLTR(): | |
41 |
|
33 | |||
42 | def __init__(self, fo): |
|
34 | def __init__(self, fo): | |
43 |
|
35 | |||
44 | self.fo = fo |
|
36 | self.fo = fo | |
45 | self.size = 48 |
|
37 | self.size = 48 | |
46 | self.read() |
|
38 | self.read() | |
47 |
|
39 | |||
48 | def read(self): |
|
40 | def read(self): | |
49 |
|
41 | |||
50 | header = numpy.fromfile(self.fo, FILE_STRUCTURE, 1) |
|
42 | header = numpy.fromfile(self.fo, FILE_STRUCTURE, 1) | |
51 | self.FileMgcNumber = hex(header['FileMgcNumber'][0]) |
|
43 | self.FileMgcNumber = hex(header['FileMgcNumber'][0]) | |
52 | self.nFDTdataRecors = int(header['nFDTdataRecors'][0]) |
|
44 | self.nFDTdataRecors = int(header['nFDTdataRecors'][0]) | |
53 | self.RadarUnitId = int(header['RadarUnitId'][0]) |
|
45 | self.RadarUnitId = int(header['RadarUnitId'][0]) | |
54 | self.OffsetStartHeader = int(header['OffsetStartHeader'][0]) |
|
46 | self.OffsetStartHeader = int(header['OffsetStartHeader'][0]) | |
55 | self.SiteName = header['SiteName'][0] |
|
47 | self.SiteName = header['SiteName'][0] | |
56 |
|
48 | |||
57 | def write(self, fp): |
|
49 | def write(self, fp): | |
58 |
|
50 | |||
59 | headerTuple = (self.FileMgcNumber, |
|
51 | headerTuple = (self.FileMgcNumber, | |
60 | self.nFDTdataRecors, |
|
52 | self.nFDTdataRecors, | |
61 | self.RadarUnitId, |
|
53 | self.RadarUnitId, | |
62 | self.SiteName, |
|
54 | self.SiteName, | |
63 | self.size) |
|
55 | self.size) | |
64 |
|
56 | |||
65 | header = numpy.array(headerTuple, FILE_STRUCTURE) |
|
57 | header = numpy.array(headerTuple, FILE_STRUCTURE) | |
66 | header.tofile(fp) |
|
58 | header.tofile(fp) | |
67 | ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default). |
|
59 | ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default). | |
68 |
|
60 | |||
69 | fid : file or str |
|
61 | fid : file or str | |
70 | An open file object, or a string containing a filename. |
|
62 | An open file object, or a string containing a filename. | |
71 |
|
63 | |||
72 | sep : str |
|
64 | sep : str | |
73 | Separator between array items for text output. If "" (empty), a binary file is written, |
|
65 | Separator between array items for text output. If "" (empty), a binary file is written, | |
74 | equivalent to file.write(a.tobytes()). |
|
66 | equivalent to file.write(a.tobytes()). | |
75 |
|
67 | |||
76 | format : str |
|
68 | format : str | |
77 | Format string for text file output. Each entry in the array is formatted to text by |
|
69 | Format string for text file output. Each entry in the array is formatted to text by | |
78 | first converting it to the closest Python type, and then using "format" % item. |
|
70 | first converting it to the closest Python type, and then using "format" % item. | |
79 |
|
71 | |||
80 | ''' |
|
72 | ''' | |
81 |
|
73 | |||
82 | return 1 |
|
74 | return 1 | |
83 |
|
75 | |||
84 |
|
76 | |||
85 | RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes |
|
77 | RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes | |
86 | ('RecMgcNumber', '<u4'), # 0x23030001 |
|
78 | ('RecMgcNumber', '<u4'), # 0x23030001 | |
87 | ('RecCounter', '<u4'), # Record counter(0,1, ...) |
|
79 | ('RecCounter', '<u4'), # Record counter(0,1, ...) | |
88 | # Offset to start of next record form start of this record |
|
80 | # Offset to start of next record form start of this record | |
89 | ('Off2StartNxtRec', '<u4'), |
|
81 | ('Off2StartNxtRec', '<u4'), | |
90 | # Offset to start of data from start of this record |
|
82 | # Offset to start of data from start of this record | |
91 | ('Off2StartData', '<u4'), |
|
83 | ('Off2StartData', '<u4'), | |
92 | # Epoch time stamp of start of acquisition (seconds) |
|
84 | # Epoch time stamp of start of acquisition (seconds) | |
93 | ('nUtime', '<i4'), |
|
85 | ('nUtime', '<i4'), | |
94 | # Millisecond component of time stamp (0,...,999) |
|
86 | # Millisecond component of time stamp (0,...,999) | |
95 | ('nMilisec', '<u4'), |
|
87 | ('nMilisec', '<u4'), | |
96 | # Experiment tag name (null terminated) |
|
88 | # Experiment tag name (null terminated) | |
97 | ('ExpTagName', 'S32'), |
|
89 | ('ExpTagName', 'S32'), | |
98 | # Experiment comment (null terminated) |
|
90 | # Experiment comment (null terminated) | |
99 | ('ExpComment', 'S32'), |
|
91 | ('ExpComment', 'S32'), | |
100 | # Site latitude (from GPS) in degrees (positive implies North) |
|
92 | # Site latitude (from GPS) in degrees (positive implies North) | |
101 | ('SiteLatDegrees', '<f4'), |
|
93 | ('SiteLatDegrees', '<f4'), | |
102 | # Site longitude (from GPS) in degrees (positive implies East) |
|
94 | # Site longitude (from GPS) in degrees (positive implies East) | |
103 | ('SiteLongDegrees', '<f4'), |
|
95 | ('SiteLongDegrees', '<f4'), | |
104 | # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE) |
|
96 | # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE) | |
105 | ('RTCgpsStatus', '<u4'), |
|
97 | ('RTCgpsStatus', '<u4'), | |
106 | ('TransmitFrec', '<u4'), # Transmit frequency (Hz) |
|
98 | ('TransmitFrec', '<u4'), # Transmit frequency (Hz) | |
107 | ('ReceiveFrec', '<u4'), # Receive frequency |
|
99 | ('ReceiveFrec', '<u4'), # Receive frequency | |
108 | # First local oscillator frequency (Hz) |
|
100 | # First local oscillator frequency (Hz) | |
109 | ('FirstOsciFrec', '<u4'), |
|
101 | ('FirstOsciFrec', '<u4'), | |
110 | # (0="O", 1="E", 2="linear 1", 3="linear2") |
|
102 | # (0="O", 1="E", 2="linear 1", 3="linear2") | |
111 | ('Polarisation', '<u4'), |
|
103 | ('Polarisation', '<u4'), | |
112 | # Receiver filter settings (0,1,2,3) |
|
104 | # Receiver filter settings (0,1,2,3) | |
113 | ('ReceiverFiltSett', '<u4'), |
|
105 | ('ReceiverFiltSett', '<u4'), | |
114 | # Number of modes in use (1 or 2) |
|
106 | # Number of modes in use (1 or 2) | |
115 | ('nModesInUse', '<u4'), |
|
107 | ('nModesInUse', '<u4'), | |
116 | # Dual Mode index number for these data (0 or 1) |
|
108 | # Dual Mode index number for these data (0 or 1) | |
117 | ('DualModeIndex', '<u4'), |
|
109 | ('DualModeIndex', '<u4'), | |
118 | # Dual Mode range correction for these data (m) |
|
110 | # Dual Mode range correction for these data (m) | |
119 | ('DualModeRange', '<u4'), |
|
111 | ('DualModeRange', '<u4'), | |
120 | # Number of digital channels acquired (2*N) |
|
112 | # Number of digital channels acquired (2*N) | |
121 | ('nDigChannels', '<u4'), |
|
113 | ('nDigChannels', '<u4'), | |
122 | # Sampling resolution (meters) |
|
114 | # Sampling resolution (meters) | |
123 | ('SampResolution', '<u4'), |
|
115 | ('SampResolution', '<u4'), | |
124 | # Number of range gates sampled |
|
116 | # Number of range gates sampled | |
125 | ('nHeights', '<u4'), |
|
117 | ('nHeights', '<u4'), | |
126 | # Start range of sampling (meters) |
|
118 | # Start range of sampling (meters) | |
127 | ('StartRangeSamp', '<u4'), |
|
119 | ('StartRangeSamp', '<u4'), | |
128 | ('PRFhz', '<u4'), # PRF (Hz) |
|
120 | ('PRFhz', '<u4'), # PRF (Hz) | |
129 | ('nCohInt', '<u4'), # Integrations |
|
121 | ('nCohInt', '<u4'), # Integrations | |
130 | # Number of data points transformed |
|
122 | # Number of data points transformed | |
131 | ('nProfiles', '<u4'), |
|
123 | ('nProfiles', '<u4'), | |
132 | # Number of receive beams stored in file (1 or N) |
|
124 | # Number of receive beams stored in file (1 or N) | |
133 | ('nChannels', '<u4'), |
|
125 | ('nChannels', '<u4'), | |
134 | ('nIncohInt', '<u4'), # Number of spectral averages |
|
126 | ('nIncohInt', '<u4'), # Number of spectral averages | |
135 | # FFT windowing index (0 = no window) |
|
127 | # FFT windowing index (0 = no window) | |
136 | ('FFTwindowingInd', '<u4'), |
|
128 | ('FFTwindowingInd', '<u4'), | |
137 | # Beam steer angle (azimuth) in degrees (clockwise from true North) |
|
129 | # Beam steer angle (azimuth) in degrees (clockwise from true North) | |
138 | ('BeamAngleAzim', '<f4'), |
|
130 | ('BeamAngleAzim', '<f4'), | |
139 | # Beam steer angle (zenith) in degrees (0=> vertical) |
|
131 | # Beam steer angle (zenith) in degrees (0=> vertical) | |
140 | ('BeamAngleZen', '<f4'), |
|
132 | ('BeamAngleZen', '<f4'), | |
141 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
133 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs | |
142 | ('AntennaCoord0', '<f4'), |
|
134 | ('AntennaCoord0', '<f4'), | |
143 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
135 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs | |
144 | ('AntennaAngl0', '<f4'), |
|
136 | ('AntennaAngl0', '<f4'), | |
145 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
137 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs | |
146 | ('AntennaCoord1', '<f4'), |
|
138 | ('AntennaCoord1', '<f4'), | |
147 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
139 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs | |
148 | ('AntennaAngl1', '<f4'), |
|
140 | ('AntennaAngl1', '<f4'), | |
149 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
141 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs | |
150 | ('AntennaCoord2', '<f4'), |
|
142 | ('AntennaCoord2', '<f4'), | |
151 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
143 | # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs | |
152 | ('AntennaAngl2', '<f4'), |
|
144 | ('AntennaAngl2', '<f4'), | |
153 | # Receiver phase calibration (degrees) - N values |
|
145 | # Receiver phase calibration (degrees) - N values | |
154 | ('RecPhaseCalibr0', '<f4'), |
|
146 | ('RecPhaseCalibr0', '<f4'), | |
155 | # Receiver phase calibration (degrees) - N values |
|
147 | # Receiver phase calibration (degrees) - N values | |
156 | ('RecPhaseCalibr1', '<f4'), |
|
148 | ('RecPhaseCalibr1', '<f4'), | |
157 | # Receiver phase calibration (degrees) - N values |
|
149 | # Receiver phase calibration (degrees) - N values | |
158 | ('RecPhaseCalibr2', '<f4'), |
|
150 | ('RecPhaseCalibr2', '<f4'), | |
159 | # Receiver amplitude calibration (ratio relative to receiver one) - N values |
|
151 | # Receiver amplitude calibration (ratio relative to receiver one) - N values | |
160 | ('RecAmpCalibr0', '<f4'), |
|
152 | ('RecAmpCalibr0', '<f4'), | |
161 | # Receiver amplitude calibration (ratio relative to receiver one) - N values |
|
153 | # Receiver amplitude calibration (ratio relative to receiver one) - N values | |
162 | ('RecAmpCalibr1', '<f4'), |
|
154 | ('RecAmpCalibr1', '<f4'), | |
163 | # Receiver amplitude calibration (ratio relative to receiver one) - N values |
|
155 | # Receiver amplitude calibration (ratio relative to receiver one) - N values | |
164 | ('RecAmpCalibr2', '<f4'), |
|
156 | ('RecAmpCalibr2', '<f4'), | |
165 | # Receiver gains in dB - N values |
|
157 | # Receiver gains in dB - N values | |
166 | ('ReceiverGaindB0', '<i4'), |
|
158 | ('ReceiverGaindB0', '<i4'), | |
167 | # Receiver gains in dB - N values |
|
159 | # Receiver gains in dB - N values | |
168 | ('ReceiverGaindB1', '<i4'), |
|
160 | ('ReceiverGaindB1', '<i4'), | |
169 | # Receiver gains in dB - N values |
|
161 | # Receiver gains in dB - N values | |
170 | ('ReceiverGaindB2', '<i4'), |
|
162 | ('ReceiverGaindB2', '<i4'), | |
171 | ]) |
|
163 | ]) | |
172 |
|
164 | |||
173 |
|
165 | |||
174 | class RecordHeaderBLTR(): |
|
166 | class RecordHeaderBLTR(): | |
175 |
|
167 | |||
176 | def __init__(self, fo): |
|
168 | def __init__(self, fo): | |
177 |
|
169 | |||
178 | self.fo = fo |
|
170 | self.fo = fo | |
179 | self.OffsetStartHeader = 48 |
|
171 | self.OffsetStartHeader = 48 | |
180 | self.Off2StartNxtRec = 811248 |
|
172 | self.Off2StartNxtRec = 811248 | |
181 |
|
173 | |||
182 | def read(self, block): |
|
174 | def read(self, block): | |
183 | OffRHeader = self.OffsetStartHeader + block * self.Off2StartNxtRec |
|
175 | OffRHeader = self.OffsetStartHeader + block * self.Off2StartNxtRec | |
184 | self.fo.seek(OffRHeader, os.SEEK_SET) |
|
176 | self.fo.seek(OffRHeader, os.SEEK_SET) | |
185 | header = numpy.fromfile(self.fo, RECORD_STRUCTURE, 1) |
|
177 | header = numpy.fromfile(self.fo, RECORD_STRUCTURE, 1) | |
186 | self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001 |
|
178 | self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001 | |
187 | self.RecCounter = int(header['RecCounter'][0]) |
|
179 | self.RecCounter = int(header['RecCounter'][0]) | |
188 | self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0]) |
|
180 | self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0]) | |
189 | self.Off2StartData = int(header['Off2StartData'][0]) |
|
181 | self.Off2StartData = int(header['Off2StartData'][0]) | |
190 | self.nUtime = header['nUtime'][0] |
|
182 | self.nUtime = header['nUtime'][0] | |
191 | self.nMilisec = header['nMilisec'][0] |
|
183 | self.nMilisec = header['nMilisec'][0] | |
192 | self.ExpTagName = '' # str(header['ExpTagName'][0]) |
|
184 | self.ExpTagName = '' # str(header['ExpTagName'][0]) | |
193 | self.ExpComment = '' # str(header['ExpComment'][0]) |
|
185 | self.ExpComment = '' # str(header['ExpComment'][0]) | |
194 | self.SiteLatDegrees = header['SiteLatDegrees'][0] |
|
186 | self.SiteLatDegrees = header['SiteLatDegrees'][0] | |
195 | self.SiteLongDegrees = header['SiteLongDegrees'][0] |
|
187 | self.SiteLongDegrees = header['SiteLongDegrees'][0] | |
196 | self.RTCgpsStatus = header['RTCgpsStatus'][0] |
|
188 | self.RTCgpsStatus = header['RTCgpsStatus'][0] | |
197 | self.TransmitFrec = header['TransmitFrec'][0] |
|
189 | self.TransmitFrec = header['TransmitFrec'][0] | |
198 | self.ReceiveFrec = header['ReceiveFrec'][0] |
|
190 | self.ReceiveFrec = header['ReceiveFrec'][0] | |
199 | self.FirstOsciFrec = header['FirstOsciFrec'][0] |
|
191 | self.FirstOsciFrec = header['FirstOsciFrec'][0] | |
200 | self.Polarisation = header['Polarisation'][0] |
|
192 | self.Polarisation = header['Polarisation'][0] | |
201 | self.ReceiverFiltSett = header['ReceiverFiltSett'][0] |
|
193 | self.ReceiverFiltSett = header['ReceiverFiltSett'][0] | |
202 | self.nModesInUse = header['nModesInUse'][0] |
|
194 | self.nModesInUse = header['nModesInUse'][0] | |
203 | self.DualModeIndex = header['DualModeIndex'][0] |
|
195 | self.DualModeIndex = header['DualModeIndex'][0] | |
204 | self.DualModeRange = header['DualModeRange'][0] |
|
196 | self.DualModeRange = header['DualModeRange'][0] | |
205 | self.nDigChannels = header['nDigChannels'][0] |
|
197 | self.nDigChannels = header['nDigChannels'][0] | |
206 | self.SampResolution = header['SampResolution'][0] |
|
198 | self.SampResolution = header['SampResolution'][0] | |
207 | self.nHeights = header['nHeights'][0] |
|
199 | self.nHeights = header['nHeights'][0] | |
208 | self.StartRangeSamp = header['StartRangeSamp'][0] |
|
200 | self.StartRangeSamp = header['StartRangeSamp'][0] | |
209 | self.PRFhz = header['PRFhz'][0] |
|
201 | self.PRFhz = header['PRFhz'][0] | |
210 | self.nCohInt = header['nCohInt'][0] |
|
202 | self.nCohInt = header['nCohInt'][0] | |
211 | self.nProfiles = header['nProfiles'][0] |
|
203 | self.nProfiles = header['nProfiles'][0] | |
212 | self.nChannels = header['nChannels'][0] |
|
204 | self.nChannels = header['nChannels'][0] | |
213 | self.nIncohInt = header['nIncohInt'][0] |
|
205 | self.nIncohInt = header['nIncohInt'][0] | |
214 | self.FFTwindowingInd = header['FFTwindowingInd'][0] |
|
206 | self.FFTwindowingInd = header['FFTwindowingInd'][0] | |
215 | self.BeamAngleAzim = header['BeamAngleAzim'][0] |
|
207 | self.BeamAngleAzim = header['BeamAngleAzim'][0] | |
216 | self.BeamAngleZen = header['BeamAngleZen'][0] |
|
208 | self.BeamAngleZen = header['BeamAngleZen'][0] | |
217 | self.AntennaCoord0 = header['AntennaCoord0'][0] |
|
209 | self.AntennaCoord0 = header['AntennaCoord0'][0] | |
218 | self.AntennaAngl0 = header['AntennaAngl0'][0] |
|
210 | self.AntennaAngl0 = header['AntennaAngl0'][0] | |
219 | self.AntennaCoord1 = header['AntennaCoord1'][0] |
|
211 | self.AntennaCoord1 = header['AntennaCoord1'][0] | |
220 | self.AntennaAngl1 = header['AntennaAngl1'][0] |
|
212 | self.AntennaAngl1 = header['AntennaAngl1'][0] | |
221 | self.AntennaCoord2 = header['AntennaCoord2'][0] |
|
213 | self.AntennaCoord2 = header['AntennaCoord2'][0] | |
222 | self.AntennaAngl2 = header['AntennaAngl2'][0] |
|
214 | self.AntennaAngl2 = header['AntennaAngl2'][0] | |
223 | self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0] |
|
215 | self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0] | |
224 | self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0] |
|
216 | self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0] | |
225 | self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0] |
|
217 | self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0] | |
226 | self.RecAmpCalibr0 = header['RecAmpCalibr0'][0] |
|
218 | self.RecAmpCalibr0 = header['RecAmpCalibr0'][0] | |
227 | self.RecAmpCalibr1 = header['RecAmpCalibr1'][0] |
|
219 | self.RecAmpCalibr1 = header['RecAmpCalibr1'][0] | |
228 | self.RecAmpCalibr2 = header['RecAmpCalibr2'][0] |
|
220 | self.RecAmpCalibr2 = header['RecAmpCalibr2'][0] | |
229 | self.ReceiverGaindB0 = header['ReceiverGaindB0'][0] |
|
221 | self.ReceiverGaindB0 = header['ReceiverGaindB0'][0] | |
230 | self.ReceiverGaindB1 = header['ReceiverGaindB1'][0] |
|
222 | self.ReceiverGaindB1 = header['ReceiverGaindB1'][0] | |
231 | self.ReceiverGaindB2 = header['ReceiverGaindB2'][0] |
|
223 | self.ReceiverGaindB2 = header['ReceiverGaindB2'][0] | |
232 | self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz) |
|
224 | self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz) | |
233 | self.RHsize = 180 + 20 * self.nChannels |
|
225 | self.RHsize = 180 + 20 * self.nChannels | |
234 | self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4 |
|
226 | self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4 | |
235 | endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec |
|
227 | endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec | |
236 |
|
228 | |||
237 |
|
229 | |||
238 | if OffRHeader > endFp: |
|
230 | if OffRHeader > endFp: | |
239 | sys.stderr.write( |
|
231 | sys.stderr.write( | |
240 | "Warning %s: Size value read from System Header is lower than it has to be\n" % fp) |
|
232 | "Warning %s: Size value read from System Header is lower than it has to be\n" % fp) | |
241 | return 0 |
|
233 | return 0 | |
242 |
|
234 | |||
243 | if OffRHeader < endFp: |
|
235 | if OffRHeader < endFp: | |
244 | sys.stderr.write( |
|
236 | sys.stderr.write( | |
245 | "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp) |
|
237 | "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp) | |
246 | return 0 |
|
238 | return 0 | |
247 |
|
239 | |||
248 | return 1 |
|
240 | return 1 | |
249 |
|
241 | |||
250 |
|
242 | |||
251 | class BLTRSpectraReader (ProcessingUnit): |
|
243 | class BLTRSpectraReader (ProcessingUnit): | |
252 |
|
244 | |||
253 | def __init__(self): |
|
245 | def __init__(self): | |
254 |
|
246 | |||
255 | ProcessingUnit.__init__(self) |
|
247 | ProcessingUnit.__init__(self) | |
256 |
|
248 | |||
257 | self.ext = ".fdt" |
|
249 | self.ext = ".fdt" | |
258 | self.optchar = "P" |
|
250 | self.optchar = "P" | |
259 | self.fpFile = None |
|
251 | self.fpFile = None | |
260 | self.fp = None |
|
252 | self.fp = None | |
261 | self.BlockCounter = 0 |
|
253 | self.BlockCounter = 0 | |
262 | self.fileSizeByHeader = None |
|
254 | self.fileSizeByHeader = None | |
263 | self.filenameList = [] |
|
255 | self.filenameList = [] | |
264 | self.fileSelector = 0 |
|
256 | self.fileSelector = 0 | |
265 | self.Off2StartNxtRec = 0 |
|
257 | self.Off2StartNxtRec = 0 | |
266 | self.RecCounter = 0 |
|
258 | self.RecCounter = 0 | |
267 | self.flagNoMoreFiles = 0 |
|
259 | self.flagNoMoreFiles = 0 | |
268 | self.data_spc = None |
|
260 | self.data_spc = None | |
269 | self.data_cspc = None |
|
261 | self.data_cspc = None | |
270 | self.path = None |
|
262 | self.path = None | |
271 | self.OffsetStartHeader = 0 |
|
263 | self.OffsetStartHeader = 0 | |
272 | self.Off2StartData = 0 |
|
264 | self.Off2StartData = 0 | |
273 | self.ipp = 0 |
|
265 | self.ipp = 0 | |
274 | self.nFDTdataRecors = 0 |
|
266 | self.nFDTdataRecors = 0 | |
275 | self.blocksize = 0 |
|
267 | self.blocksize = 0 | |
276 | self.dataOut = Spectra() |
|
268 | self.dataOut = Spectra() | |
277 | self.dataOut.flagNoData = False |
|
269 | self.dataOut.flagNoData = False | |
278 |
|
270 | |||
279 | def search_files(self): |
|
271 | def search_files(self): | |
280 | ''' |
|
272 | ''' | |
281 | Function that indicates the number of .fdt files that exist in the folder to be read. |
|
273 | Function that indicates the number of .fdt files that exist in the folder to be read. | |
282 | It also creates an organized list with the names of the files to read. |
|
274 | It also creates an organized list with the names of the files to read. | |
283 | ''' |
|
275 | ''' | |
284 |
|
276 | |||
285 | files = glob.glob(os.path.join(self.path, '*{}'.format(self.ext))) |
|
277 | files = glob.glob(os.path.join(self.path, '*{}'.format(self.ext))) | |
286 | files = sorted(files) |
|
278 | files = sorted(files) | |
287 | for f in files: |
|
279 | for f in files: | |
288 | filename = f.split('/')[-1] |
|
280 | filename = f.split('/')[-1] | |
289 | if folder_in_range(filename.split('.')[1], self.startDate, self.endDate, '%Y%m%d'): |
|
281 | if folder_in_range(filename.split('.')[1], self.startDate, self.endDate, '%Y%m%d'): | |
290 | self.filenameList.append(f) |
|
282 | self.filenameList.append(f) | |
291 |
|
283 | |||
292 | def run(self, **kwargs): |
|
284 | def run(self, **kwargs): | |
293 | ''' |
|
285 | ''' | |
294 | This method will be the one that will initiate the data entry, will be called constantly. |
|
286 | This method will be the one that will initiate the data entry, will be called constantly. | |
295 | You should first verify that your Setup () is set up and then continue to acquire |
|
287 | You should first verify that your Setup () is set up and then continue to acquire | |
296 | the data to be processed with getData (). |
|
288 | the data to be processed with getData (). | |
297 | ''' |
|
289 | ''' | |
298 | if not self.isConfig: |
|
290 | if not self.isConfig: | |
299 | self.setup(**kwargs) |
|
291 | self.setup(**kwargs) | |
300 | self.isConfig = True |
|
292 | self.isConfig = True | |
301 |
|
293 | |||
302 | self.getData() |
|
294 | self.getData() | |
303 |
|
295 | |||
304 | def setup(self, |
|
296 | def setup(self, | |
305 | path=None, |
|
297 | path=None, | |
306 | startDate=None, |
|
298 | startDate=None, | |
307 | endDate=None, |
|
299 | endDate=None, | |
308 | startTime=None, |
|
300 | startTime=None, | |
309 | endTime=None, |
|
301 | endTime=None, | |
310 | walk=True, |
|
302 | walk=True, | |
311 | code=None, |
|
303 | code=None, | |
312 | online=False, |
|
304 | online=False, | |
313 | mode=None, |
|
305 | mode=None, | |
314 | **kwargs): |
|
306 | **kwargs): | |
315 |
|
307 | |||
316 | self.isConfig = True |
|
308 | self.isConfig = True | |
317 |
|
309 | |||
318 | self.path = path |
|
310 | self.path = path | |
319 | self.startDate = startDate |
|
311 | self.startDate = startDate | |
320 | self.endDate = endDate |
|
312 | self.endDate = endDate | |
321 | self.startTime = startTime |
|
313 | self.startTime = startTime | |
322 | self.endTime = endTime |
|
314 | self.endTime = endTime | |
323 | self.walk = walk |
|
315 | self.walk = walk | |
324 | self.mode = int(mode) |
|
316 | self.mode = int(mode) | |
325 | self.search_files() |
|
317 | self.search_files() | |
326 | if self.filenameList: |
|
318 | if self.filenameList: | |
327 | self.readFile() |
|
319 | self.readFile() | |
328 |
|
320 | |||
329 | def getData(self): |
|
321 | def getData(self): | |
330 | ''' |
|
322 | ''' | |
331 | Before starting this function, you should check that there is still an unread file, |
|
323 | Before starting this function, you should check that there is still an unread file, | |
332 | If there are still blocks to read or if the data block is empty. |
|
324 | If there are still blocks to read or if the data block is empty. | |
333 |
|
325 | |||
334 | You should call the file "read". |
|
326 | You should call the file "read". | |
335 |
|
327 | |||
336 | ''' |
|
328 | ''' | |
337 |
|
329 | |||
338 | if self.flagNoMoreFiles: |
|
330 | if self.flagNoMoreFiles: | |
339 | self.dataOut.flagNoData = True |
|
331 | self.dataOut.flagNoData = True | |
340 | raise schainpy.admin.SchainError('No more files') |
|
332 | raise schainpy.admin.SchainError('No more files') | |
341 |
|
333 | |||
342 | self.readBlock() |
|
334 | self.readBlock() | |
343 |
|
335 | |||
344 | def readFile(self): |
|
336 | def readFile(self): | |
345 | ''' |
|
337 | ''' | |
346 | You must indicate if you are reading in Online or Offline mode and load the |
|
338 | You must indicate if you are reading in Online or Offline mode and load the | |
347 | The parameters for this file reading mode. |
|
339 | The parameters for this file reading mode. | |
348 |
|
340 | |||
349 | Then you must do 2 actions: |
|
341 | Then you must do 2 actions: | |
350 |
|
342 | |||
351 | 1. Get the BLTR FileHeader. |
|
343 | 1. Get the BLTR FileHeader. | |
352 | 2. Start reading the first block. |
|
344 | 2. Start reading the first block. | |
353 | ''' |
|
345 | ''' | |
354 |
|
346 | |||
355 | if self.fileSelector < len(self.filenameList): |
|
347 | if self.fileSelector < len(self.filenameList): | |
356 | log.success('Opening file: {}'.format(self.filenameList[self.fileSelector]), self.name) |
|
348 | log.success('Opening file: {}'.format(self.filenameList[self.fileSelector]), self.name) | |
357 | self.fp = open(self.filenameList[self.fileSelector]) |
|
349 | self.fp = open(self.filenameList[self.fileSelector]) | |
358 | self.fheader = FileHeaderBLTR(self.fp) |
|
350 | self.fheader = FileHeaderBLTR(self.fp) | |
359 | self.rheader = RecordHeaderBLTR(self.fp) |
|
351 | self.rheader = RecordHeaderBLTR(self.fp) | |
360 | self.nFDTdataRecors = self.fheader.nFDTdataRecors |
|
352 | self.nFDTdataRecors = self.fheader.nFDTdataRecors | |
361 | self.fileSelector += 1 |
|
353 | self.fileSelector += 1 | |
362 | self.BlockCounter = 0 |
|
354 | self.BlockCounter = 0 | |
363 | return 1 |
|
355 | return 1 | |
364 | else: |
|
356 | else: | |
365 | self.flagNoMoreFiles = True |
|
357 | self.flagNoMoreFiles = True | |
366 | self.dataOut.flagNoData = True |
|
358 | self.dataOut.flagNoData = True | |
367 | return 0 |
|
359 | return 0 | |
368 |
|
360 | |||
369 | def readBlock(self): |
|
361 | def readBlock(self): | |
370 | ''' |
|
362 | ''' | |
371 | It should be checked if the block has data, if it is not passed to the next file. |
|
363 | It should be checked if the block has data, if it is not passed to the next file. | |
372 |
|
364 | |||
373 | Then the following is done: |
|
365 | Then the following is done: | |
374 |
|
366 | |||
375 | 1. Read the RecordHeader |
|
367 | 1. Read the RecordHeader | |
376 | 2. Fill the buffer with the current block number. |
|
368 | 2. Fill the buffer with the current block number. | |
377 |
|
369 | |||
378 | ''' |
|
370 | ''' | |
379 |
|
371 | |||
380 | if self.BlockCounter == self.nFDTdataRecors: |
|
372 | if self.BlockCounter == self.nFDTdataRecors: | |
381 | if not self.readFile(): |
|
373 | if not self.readFile(): | |
382 | return |
|
374 | return | |
383 |
|
375 | |||
384 | if self.mode == 1: |
|
376 | if self.mode == 1: | |
385 | self.rheader.read(self.BlockCounter+1) |
|
377 | self.rheader.read(self.BlockCounter+1) | |
386 | elif self.mode == 0: |
|
378 | elif self.mode == 0: | |
387 | self.rheader.read(self.BlockCounter) |
|
379 | self.rheader.read(self.BlockCounter) | |
388 |
|
380 | |||
389 | self.RecCounter = self.rheader.RecCounter |
|
381 | self.RecCounter = self.rheader.RecCounter | |
390 | self.OffsetStartHeader = self.rheader.OffsetStartHeader |
|
382 | self.OffsetStartHeader = self.rheader.OffsetStartHeader | |
391 | self.Off2StartNxtRec = self.rheader.Off2StartNxtRec |
|
383 | self.Off2StartNxtRec = self.rheader.Off2StartNxtRec | |
392 | self.Off2StartData = self.rheader.Off2StartData |
|
384 | self.Off2StartData = self.rheader.Off2StartData | |
393 | self.nProfiles = self.rheader.nProfiles |
|
385 | self.nProfiles = self.rheader.nProfiles | |
394 | self.nChannels = self.rheader.nChannels |
|
386 | self.nChannels = self.rheader.nChannels | |
395 | self.nHeights = self.rheader.nHeights |
|
387 | self.nHeights = self.rheader.nHeights | |
396 | self.frequency = self.rheader.TransmitFrec |
|
388 | self.frequency = self.rheader.TransmitFrec | |
397 | self.DualModeIndex = self.rheader.DualModeIndex |
|
389 | self.DualModeIndex = self.rheader.DualModeIndex | |
398 | self.pairsList = [(0, 1), (0, 2), (1, 2)] |
|
390 | self.pairsList = [(0, 1), (0, 2), (1, 2)] | |
399 | self.dataOut.pairsList = self.pairsList |
|
391 | self.dataOut.pairsList = self.pairsList | |
400 | self.nRdPairs = len(self.dataOut.pairsList) |
|
392 | self.nRdPairs = len(self.dataOut.pairsList) | |
401 | self.dataOut.nRdPairs = self.nRdPairs |
|
393 | self.dataOut.nRdPairs = self.nRdPairs | |
402 | self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000. |
|
394 | self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000. | |
403 | self.dataOut.channelList = range(self.nChannels) |
|
395 | self.dataOut.channelList = range(self.nChannels) | |
404 | self.dataOut.nProfiles=self.rheader.nProfiles |
|
396 | self.dataOut.nProfiles=self.rheader.nProfiles | |
405 | self.dataOut.nIncohInt=self.rheader.nIncohInt |
|
397 | self.dataOut.nIncohInt=self.rheader.nIncohInt | |
406 | self.dataOut.nCohInt=self.rheader.nCohInt |
|
398 | self.dataOut.nCohInt=self.rheader.nCohInt | |
407 | self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz) |
|
399 | self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz) | |
408 | self.dataOut.PRF=self.rheader.PRFhz |
|
400 | self.dataOut.PRF=self.rheader.PRFhz | |
409 | self.dataOut.nFFTPoints=self.rheader.nProfiles |
|
401 | self.dataOut.nFFTPoints=self.rheader.nProfiles | |
410 | self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000. |
|
402 | self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000. | |
411 | self.dataOut.timeZone = 0 |
|
403 | self.dataOut.timeZone = 0 | |
412 | self.dataOut.useLocalTime = False |
|
404 | self.dataOut.useLocalTime = False | |
413 | self.dataOut.nmodes = 2 |
|
405 | self.dataOut.nmodes = 2 | |
414 | log.log('Reading block {} - {}'.format(self.BlockCounter, self.dataOut.datatime), self.name) |
|
406 | log.log('Reading block {} - {}'.format(self.BlockCounter, self.dataOut.datatime), self.name) | |
415 | OffDATA = self.OffsetStartHeader + self.RecCounter * \ |
|
407 | OffDATA = self.OffsetStartHeader + self.RecCounter * \ | |
416 | self.Off2StartNxtRec + self.Off2StartData |
|
408 | self.Off2StartNxtRec + self.Off2StartData | |
417 | self.fp.seek(OffDATA, os.SEEK_SET) |
|
409 | self.fp.seek(OffDATA, os.SEEK_SET) | |
418 |
|
410 | |||
419 | self.data_fft = numpy.fromfile(self.fp, [('complex','<c8')], self.nProfiles*self.nChannels*self.nHeights ) |
|
411 | self.data_fft = numpy.fromfile(self.fp, [('complex','<c8')], self.nProfiles*self.nChannels*self.nHeights ) | |
420 | self.data_fft = self.data_fft.astype(numpy.dtype('complex')) |
|
412 | self.data_fft = self.data_fft.astype(numpy.dtype('complex')) | |
421 | self.data_block = numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles)) |
|
413 | self.data_block = numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles)) | |
422 | self.data_block = numpy.transpose(self.data_block, (1,2,0)) |
|
414 | self.data_block = numpy.transpose(self.data_block, (1,2,0)) | |
423 | copy = self.data_block.copy() |
|
415 | copy = self.data_block.copy() | |
424 | spc = copy * numpy.conjugate(copy) |
|
416 | spc = copy * numpy.conjugate(copy) | |
425 | self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud |
|
417 | self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud | |
426 | self.dataOut.data_spc = self.data_spc |
|
|||
427 |
|
418 | |||
428 | cspc = self.data_block.copy() |
|
419 | cspc = self.data_block.copy() | |
429 | self.data_cspc = self.data_block.copy() |
|
420 | self.data_cspc = self.data_block.copy() | |
430 |
|
421 | |||
431 | for i in range(self.nRdPairs): |
|
422 | for i in range(self.nRdPairs): | |
432 |
|
423 | |||
433 | chan_index0 = self.dataOut.pairsList[i][0] |
|
424 | chan_index0 = self.dataOut.pairsList[i][0] | |
434 | chan_index1 = self.dataOut.pairsList[i][1] |
|
425 | chan_index1 = self.dataOut.pairsList[i][1] | |
435 |
|
426 | |||
436 | self.data_cspc[i, :, :] = cspc[chan_index0, :, :] * numpy.conjugate(cspc[chan_index1, :, :]) |
|
427 | self.data_cspc[i, :, :] = cspc[chan_index0, :, :] * numpy.conjugate(cspc[chan_index1, :, :]) | |
437 |
|
428 | |||
438 | '''Getting Eij and Nij''' |
|
429 | '''Getting Eij and Nij''' | |
439 | (AntennaX0, AntennaY0) = pol2cart( |
|
430 | (AntennaX0, AntennaY0) = pol2cart( | |
440 | self.rheader.AntennaCoord0, self.rheader.AntennaAngl0 * numpy.pi / 180) |
|
431 | self.rheader.AntennaCoord0, self.rheader.AntennaAngl0 * numpy.pi / 180) | |
441 | (AntennaX1, AntennaY1) = pol2cart( |
|
432 | (AntennaX1, AntennaY1) = pol2cart( | |
442 | self.rheader.AntennaCoord1, self.rheader.AntennaAngl1 * numpy.pi / 180) |
|
433 | self.rheader.AntennaCoord1, self.rheader.AntennaAngl1 * numpy.pi / 180) | |
443 | (AntennaX2, AntennaY2) = pol2cart( |
|
434 | (AntennaX2, AntennaY2) = pol2cart( | |
444 | self.rheader.AntennaCoord2, self.rheader.AntennaAngl2 * numpy.pi / 180) |
|
435 | self.rheader.AntennaCoord2, self.rheader.AntennaAngl2 * numpy.pi / 180) | |
445 |
|
436 | |||
446 | E01 = AntennaX0 - AntennaX1 |
|
437 | E01 = AntennaX0 - AntennaX1 | |
447 | N01 = AntennaY0 - AntennaY1 |
|
438 | N01 = AntennaY0 - AntennaY1 | |
448 |
|
439 | |||
449 | E02 = AntennaX0 - AntennaX2 |
|
440 | E02 = AntennaX0 - AntennaX2 | |
450 | N02 = AntennaY0 - AntennaY2 |
|
441 | N02 = AntennaY0 - AntennaY2 | |
451 |
|
442 | |||
452 | E12 = AntennaX1 - AntennaX2 |
|
443 | E12 = AntennaX1 - AntennaX2 | |
453 | N12 = AntennaY1 - AntennaY2 |
|
444 | N12 = AntennaY1 - AntennaY2 | |
454 |
|
445 | |||
455 | self.ChanDist = numpy.array( |
|
446 | self.ChanDist = numpy.array( | |
456 | [[E01, N01], [E02, N02], [E12, N12]]) |
|
447 | [[E01, N01], [E02, N02], [E12, N12]]) | |
457 |
|
448 | |||
458 | self.dataOut.ChanDist = self.ChanDist |
|
449 | self.dataOut.ChanDist = self.ChanDist | |
459 |
|
450 | |||
460 | self.BlockCounter += 2 |
|
451 | self.BlockCounter += 2 | |
461 | self.dataOut.data_spc = self.data_spc |
|
452 | self.dataOut.data_spc = self.data_spc | |
462 | self.dataOut.data_cspc =self.data_cspc |
|
453 | self.dataOut.data_cspc =self.data_cspc |
@@ -1,627 +1,627 | |||||
1 | import os |
|
1 | import os | |
2 | import time |
|
2 | import time | |
3 | import datetime |
|
3 | import datetime | |
4 |
|
4 | |||
5 | import numpy |
|
5 | import numpy | |
6 | import h5py |
|
6 | import h5py | |
7 |
|
7 | |||
8 | import schainpy.admin |
|
8 | import schainpy.admin | |
9 | from schainpy.model.data.jrodata import * |
|
9 | from schainpy.model.data.jrodata import * | |
10 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator |
|
10 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator | |
11 | from schainpy.model.io.jroIO_base import * |
|
11 | from schainpy.model.io.jroIO_base import * | |
12 | from schainpy.utils import log |
|
12 | from schainpy.utils import log | |
13 |
|
13 | |||
14 |
|
14 | |||
15 | class HDFReader(Reader, ProcessingUnit): |
|
15 | class HDFReader(Reader, ProcessingUnit): | |
16 | """Processing unit to read HDF5 format files |
|
16 | """Processing unit to read HDF5 format files | |
17 |
|
17 | |||
18 | This unit reads HDF5 files created with `HDFWriter` operation contains |
|
18 | This unit reads HDF5 files created with `HDFWriter` operation contains | |
19 | by default two groups Data and Metadata all variables would be saved as `dataOut` |
|
19 | by default two groups Data and Metadata all variables would be saved as `dataOut` | |
20 | attributes. |
|
20 | attributes. | |
21 | It is possible to read any HDF5 file by given the structure in the `description` |
|
21 | It is possible to read any HDF5 file by given the structure in the `description` | |
22 | parameter, also you can add extra values to metadata with the parameter `extras`. |
|
22 | parameter, also you can add extra values to metadata with the parameter `extras`. | |
23 |
|
23 | |||
24 | Parameters: |
|
24 | Parameters: | |
25 | ----------- |
|
25 | ----------- | |
26 | path : str |
|
26 | path : str | |
27 | Path where files are located. |
|
27 | Path where files are located. | |
28 | startDate : date |
|
28 | startDate : date | |
29 | Start date of the files |
|
29 | Start date of the files | |
30 | endDate : list |
|
30 | endDate : list | |
31 | End date of the files |
|
31 | End date of the files | |
32 | startTime : time |
|
32 | startTime : time | |
33 | Start time of the files |
|
33 | Start time of the files | |
34 | endTime : time |
|
34 | endTime : time | |
35 | End time of the files |
|
35 | End time of the files | |
36 | description : dict, optional |
|
36 | description : dict, optional | |
37 | Dictionary with the description of the HDF5 file |
|
37 | Dictionary with the description of the HDF5 file | |
38 | extras : dict, optional |
|
38 | extras : dict, optional | |
39 | Dictionary with extra metadata to be be added to `dataOut` |
|
39 | Dictionary with extra metadata to be be added to `dataOut` | |
40 |
|
40 | |||
41 | Examples |
|
41 | Examples | |
42 | -------- |
|
42 | -------- | |
43 |
|
43 | |||
44 | desc = { |
|
44 | desc = { | |
45 | 'Data': { |
|
45 | 'Data': { | |
46 | 'data_output': ['u', 'v', 'w'], |
|
46 | 'data_output': ['u', 'v', 'w'], | |
47 | 'utctime': 'timestamps', |
|
47 | 'utctime': 'timestamps', | |
48 | } , |
|
48 | } , | |
49 | 'Metadata': { |
|
49 | 'Metadata': { | |
50 | 'heightList': 'heights' |
|
50 | 'heightList': 'heights' | |
51 | } |
|
51 | } | |
52 | } |
|
52 | } | |
53 |
|
53 | |||
54 | desc = { |
|
54 | desc = { | |
55 | 'Data': { |
|
55 | 'Data': { | |
56 | 'data_output': 'winds', |
|
56 | 'data_output': 'winds', | |
57 | 'utctime': 'timestamps' |
|
57 | 'utctime': 'timestamps' | |
58 | }, |
|
58 | }, | |
59 | 'Metadata': { |
|
59 | 'Metadata': { | |
60 | 'heightList': 'heights' |
|
60 | 'heightList': 'heights' | |
61 | } |
|
61 | } | |
62 | } |
|
62 | } | |
63 |
|
63 | |||
64 | extras = { |
|
64 | extras = { | |
65 | 'timeZone': 300 |
|
65 | 'timeZone': 300 | |
66 | } |
|
66 | } | |
67 |
|
67 | |||
68 | reader = project.addReadUnit( |
|
68 | reader = project.addReadUnit( | |
69 | name='HDFReader', |
|
69 | name='HDFReader', | |
70 | path='/path/to/files', |
|
70 | path='/path/to/files', | |
71 | startDate='2019/01/01', |
|
71 | startDate='2019/01/01', | |
72 | endDate='2019/01/31', |
|
72 | endDate='2019/01/31', | |
73 | startTime='00:00:00', |
|
73 | startTime='00:00:00', | |
74 | endTime='23:59:59', |
|
74 | endTime='23:59:59', | |
75 | # description=json.dumps(desc), |
|
75 | # description=json.dumps(desc), | |
76 | # extras=json.dumps(extras), |
|
76 | # extras=json.dumps(extras), | |
77 | ) |
|
77 | ) | |
78 |
|
78 | |||
79 | """ |
|
79 | """ | |
80 |
|
80 | |||
81 | __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras'] |
|
81 | __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras'] | |
82 |
|
82 | |||
83 | def __init__(self): |
|
83 | def __init__(self): | |
84 | ProcessingUnit.__init__(self) |
|
84 | ProcessingUnit.__init__(self) | |
85 | self.dataOut = Parameters() |
|
85 | self.dataOut = Parameters() | |
86 | self.ext = ".hdf5" |
|
86 | self.ext = ".hdf5" | |
87 | self.optchar = "D" |
|
87 | self.optchar = "D" | |
88 | self.meta = {} |
|
88 | self.meta = {} | |
89 | self.data = {} |
|
89 | self.data = {} | |
90 | self.open_file = h5py.File |
|
90 | self.open_file = h5py.File | |
91 | self.open_mode = 'r' |
|
91 | self.open_mode = 'r' | |
92 | self.description = {} |
|
92 | self.description = {} | |
93 | self.extras = {} |
|
93 | self.extras = {} | |
94 | self.filefmt = "*%Y%j***" |
|
94 | self.filefmt = "*%Y%j***" | |
95 | self.folderfmt = "*%Y%j" |
|
95 | self.folderfmt = "*%Y%j" | |
96 |
|
96 | |||
97 | def setup(self, **kwargs): |
|
97 | def setup(self, **kwargs): | |
98 |
|
98 | |||
99 | self.set_kwargs(**kwargs) |
|
99 | self.set_kwargs(**kwargs) | |
100 | if not self.ext.startswith('.'): |
|
100 | if not self.ext.startswith('.'): | |
101 | self.ext = '.{}'.format(self.ext) |
|
101 | self.ext = '.{}'.format(self.ext) | |
102 |
|
102 | |||
103 | if self.online: |
|
103 | if self.online: | |
104 | log.log("Searching files in online mode...", self.name) |
|
104 | log.log("Searching files in online mode...", self.name) | |
105 |
|
105 | |||
106 | for nTries in range(self.nTries): |
|
106 | for nTries in range(self.nTries): | |
107 | fullpath = self.searchFilesOnLine(self.path, self.startDate, |
|
107 | fullpath = self.searchFilesOnLine(self.path, self.startDate, | |
108 | self.endDate, self.expLabel, self.ext, self.walk, |
|
108 | self.endDate, self.expLabel, self.ext, self.walk, | |
109 | self.filefmt, self.folderfmt) |
|
109 | self.filefmt, self.folderfmt) | |
110 | try: |
|
110 | try: | |
111 | fullpath = next(fullpath) |
|
111 | fullpath = next(fullpath) | |
112 | except: |
|
112 | except: | |
113 | fullpath = None |
|
113 | fullpath = None | |
114 |
|
114 | |||
115 | if fullpath: |
|
115 | if fullpath: | |
116 | break |
|
116 | break | |
117 |
|
117 | |||
118 | log.warning( |
|
118 | log.warning( | |
119 | 'Waiting {} sec for a valid file in {}: try {} ...'.format( |
|
119 | 'Waiting {} sec for a valid file in {}: try {} ...'.format( | |
120 | self.delay, self.path, nTries + 1), |
|
120 | self.delay, self.path, nTries + 1), | |
121 | self.name) |
|
121 | self.name) | |
122 | time.sleep(self.delay) |
|
122 | time.sleep(self.delay) | |
123 |
|
123 | |||
124 | if not(fullpath): |
|
124 | if not(fullpath): | |
125 | raise schainpy.admin.SchainError( |
|
125 | raise schainpy.admin.SchainError( | |
126 | 'There isn\'t any valid file in {}'.format(self.path)) |
|
126 | 'There isn\'t any valid file in {}'.format(self.path)) | |
127 |
|
127 | |||
128 | pathname, filename = os.path.split(fullpath) |
|
128 | pathname, filename = os.path.split(fullpath) | |
129 | self.year = int(filename[1:5]) |
|
129 | self.year = int(filename[1:5]) | |
130 | self.doy = int(filename[5:8]) |
|
130 | self.doy = int(filename[5:8]) | |
131 | self.set = int(filename[8:11]) - 1 |
|
131 | self.set = int(filename[8:11]) - 1 | |
132 | else: |
|
132 | else: | |
133 | log.log("Searching files in {}".format(self.path), self.name) |
|
133 | log.log("Searching files in {}".format(self.path), self.name) | |
134 | self.filenameList = self.searchFilesOffLine(self.path, self.startDate, |
|
134 | self.filenameList = self.searchFilesOffLine(self.path, self.startDate, | |
135 | self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) |
|
135 | self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) | |
136 |
|
136 | |||
137 | self.setNextFile() |
|
137 | self.setNextFile() | |
138 |
|
138 | |||
139 | return |
|
139 | return | |
140 |
|
140 | |||
141 | def readFirstHeader(self): |
|
141 | def readFirstHeader(self): | |
142 | '''Read metadata and data''' |
|
142 | '''Read metadata and data''' | |
143 |
|
143 | |||
144 | self.__readMetadata() |
|
144 | self.__readMetadata() | |
145 | self.__readData() |
|
145 | self.__readData() | |
146 | self.__setBlockList() |
|
146 | self.__setBlockList() | |
147 |
|
147 | |||
148 | if 'type' in self.meta: |
|
148 | if 'type' in self.meta: | |
149 | self.dataOut = eval(self.meta['type'])() |
|
149 | self.dataOut = eval(self.meta['type'])() | |
150 |
|
150 | |||
151 | for attr in self.meta: |
|
151 | for attr in self.meta: | |
152 | setattr(self.dataOut, attr, self.meta[attr]) |
|
152 | setattr(self.dataOut, attr, self.meta[attr]) | |
153 |
|
153 | |||
154 | self.blockIndex = 0 |
|
154 | self.blockIndex = 0 | |
155 |
|
155 | |||
156 | return |
|
156 | return | |
157 |
|
157 | |||
158 | def __setBlockList(self): |
|
158 | def __setBlockList(self): | |
159 | ''' |
|
159 | ''' | |
160 | Selects the data within the times defined |
|
160 | Selects the data within the times defined | |
161 |
|
161 | |||
162 | self.fp |
|
162 | self.fp | |
163 | self.startTime |
|
163 | self.startTime | |
164 | self.endTime |
|
164 | self.endTime | |
165 | self.blockList |
|
165 | self.blockList | |
166 | self.blocksPerFile |
|
166 | self.blocksPerFile | |
167 |
|
167 | |||
168 | ''' |
|
168 | ''' | |
169 |
|
169 | |||
170 | startTime = self.startTime |
|
170 | startTime = self.startTime | |
171 | endTime = self.endTime |
|
171 | endTime = self.endTime | |
172 |
|
172 | |||
173 | thisUtcTime = self.data['utctime'] |
|
173 | thisUtcTime = self.data['utctime'] | |
174 | self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1]) |
|
174 | self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1]) | |
175 |
|
175 | |||
176 | thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0]) |
|
176 | thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0]) | |
177 |
|
177 | |||
178 | thisDate = thisDatetime.date() |
|
178 | thisDate = thisDatetime.date() | |
179 | thisTime = thisDatetime.time() |
|
179 | thisTime = thisDatetime.time() | |
180 |
|
180 | |||
181 | startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds() |
|
181 | startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds() | |
182 | endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds() |
|
182 | endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds() | |
183 |
|
183 | |||
184 | ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0] |
|
184 | ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0] | |
185 |
|
185 | |||
186 | self.blockList = ind |
|
186 | self.blockList = ind | |
187 | self.blocksPerFile = len(ind) |
|
187 | self.blocksPerFile = len(ind) | |
188 | return |
|
188 | return | |
189 |
|
189 | |||
190 | def __readMetadata(self): |
|
190 | def __readMetadata(self): | |
191 | ''' |
|
191 | ''' | |
192 | Reads Metadata |
|
192 | Reads Metadata | |
193 | ''' |
|
193 | ''' | |
194 |
|
194 | |||
195 | meta = {} |
|
195 | meta = {} | |
196 |
|
196 | |||
197 | if self.description: |
|
197 | if self.description: | |
198 | for key, value in self.description['Metadata'].items(): |
|
198 | for key, value in self.description['Metadata'].items(): | |
199 |
meta[key] = self.fp[value] |
|
199 | meta[key] = self.fp[value][()] | |
200 | else: |
|
200 | else: | |
201 | grp = self.fp['Metadata'] |
|
201 | grp = self.fp['Metadata'] | |
202 | for name in grp: |
|
202 | for name in grp: | |
203 |
meta[name] = grp[name] |
|
203 | meta[name] = grp[name][()] | |
204 |
|
204 | |||
205 | if self.extras: |
|
205 | if self.extras: | |
206 | for key, value in self.extras.items(): |
|
206 | for key, value in self.extras.items(): | |
207 | meta[key] = value |
|
207 | meta[key] = value | |
208 | self.meta = meta |
|
208 | self.meta = meta | |
209 |
|
209 | |||
210 | return |
|
210 | return | |
211 |
|
211 | |||
212 | def __readData(self): |
|
212 | def __readData(self): | |
213 |
|
213 | |||
214 | data = {} |
|
214 | data = {} | |
215 |
|
215 | |||
216 | if self.description: |
|
216 | if self.description: | |
217 | for key, value in self.description['Data'].items(): |
|
217 | for key, value in self.description['Data'].items(): | |
218 | if isinstance(value, str): |
|
218 | if isinstance(value, str): | |
219 | if isinstance(self.fp[value], h5py.Dataset): |
|
219 | if isinstance(self.fp[value], h5py.Dataset): | |
220 |
data[key] = self.fp[value] |
|
220 | data[key] = self.fp[value][()] | |
221 | elif isinstance(self.fp[value], h5py.Group): |
|
221 | elif isinstance(self.fp[value], h5py.Group): | |
222 | array = [] |
|
222 | array = [] | |
223 | for ch in self.fp[value]: |
|
223 | for ch in self.fp[value]: | |
224 |
array.append(self.fp[value][ch] |
|
224 | array.append(self.fp[value][ch][()]) | |
225 | data[key] = numpy.array(array) |
|
225 | data[key] = numpy.array(array) | |
226 | elif isinstance(value, list): |
|
226 | elif isinstance(value, list): | |
227 | array = [] |
|
227 | array = [] | |
228 | for ch in value: |
|
228 | for ch in value: | |
229 |
array.append(self.fp[ch] |
|
229 | array.append(self.fp[ch][()]) | |
230 | data[key] = numpy.array(array) |
|
230 | data[key] = numpy.array(array) | |
231 | else: |
|
231 | else: | |
232 | grp = self.fp['Data'] |
|
232 | grp = self.fp['Data'] | |
233 | for name in grp: |
|
233 | for name in grp: | |
234 | if isinstance(grp[name], h5py.Dataset): |
|
234 | if isinstance(grp[name], h5py.Dataset): | |
235 |
array = grp[name] |
|
235 | array = grp[name][()] | |
236 | elif isinstance(grp[name], h5py.Group): |
|
236 | elif isinstance(grp[name], h5py.Group): | |
237 | array = [] |
|
237 | array = [] | |
238 | for ch in grp[name]: |
|
238 | for ch in grp[name]: | |
239 |
array.append(grp[name][ch] |
|
239 | array.append(grp[name][ch][()]) | |
240 | array = numpy.array(array) |
|
240 | array = numpy.array(array) | |
241 | else: |
|
241 | else: | |
242 | log.warning('Unknown type: {}'.format(name)) |
|
242 | log.warning('Unknown type: {}'.format(name)) | |
243 |
|
243 | |||
244 | if name in self.description: |
|
244 | if name in self.description: | |
245 | key = self.description[name] |
|
245 | key = self.description[name] | |
246 | else: |
|
246 | else: | |
247 | key = name |
|
247 | key = name | |
248 | data[key] = array |
|
248 | data[key] = array | |
249 |
|
249 | |||
250 | self.data = data |
|
250 | self.data = data | |
251 | return |
|
251 | return | |
252 |
|
252 | |||
253 | def getData(self): |
|
253 | def getData(self): | |
254 |
|
254 | |||
255 | for attr in self.data: |
|
255 | for attr in self.data: | |
256 | if self.data[attr].ndim == 1: |
|
256 | if self.data[attr].ndim == 1: | |
257 | setattr(self.dataOut, attr, self.data[attr][self.blockIndex]) |
|
257 | setattr(self.dataOut, attr, self.data[attr][self.blockIndex]) | |
258 | else: |
|
258 | else: | |
259 | setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex]) |
|
259 | setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex]) | |
260 |
|
260 | |||
261 | self.dataOut.flagNoData = False |
|
261 | self.dataOut.flagNoData = False | |
262 | self.blockIndex += 1 |
|
262 | self.blockIndex += 1 | |
263 |
|
263 | |||
264 | log.log("Block No. {}/{} -> {}".format( |
|
264 | log.log("Block No. {}/{} -> {}".format( | |
265 | self.blockIndex, |
|
265 | self.blockIndex, | |
266 | self.blocksPerFile, |
|
266 | self.blocksPerFile, | |
267 | self.dataOut.datatime.ctime()), self.name) |
|
267 | self.dataOut.datatime.ctime()), self.name) | |
268 |
|
268 | |||
269 | return |
|
269 | return | |
270 |
|
270 | |||
271 | def run(self, **kwargs): |
|
271 | def run(self, **kwargs): | |
272 |
|
272 | |||
273 | if not(self.isConfig): |
|
273 | if not(self.isConfig): | |
274 | self.setup(**kwargs) |
|
274 | self.setup(**kwargs) | |
275 | self.isConfig = True |
|
275 | self.isConfig = True | |
276 |
|
276 | |||
277 | if self.blockIndex == self.blocksPerFile: |
|
277 | if self.blockIndex == self.blocksPerFile: | |
278 | self.setNextFile() |
|
278 | self.setNextFile() | |
279 |
|
279 | |||
280 | self.getData() |
|
280 | self.getData() | |
281 |
|
281 | |||
282 | return |
|
282 | return | |
283 |
|
283 | |||
284 | @MPDecorator |
|
284 | @MPDecorator | |
285 | class HDFWriter(Operation): |
|
285 | class HDFWriter(Operation): | |
286 | """Operation to write HDF5 files. |
|
286 | """Operation to write HDF5 files. | |
287 |
|
287 | |||
288 | The HDF5 file contains by default two groups Data and Metadata where |
|
288 | The HDF5 file contains by default two groups Data and Metadata where | |
289 | you can save any `dataOut` attribute specified by `dataList` and `metadataList` |
|
289 | you can save any `dataOut` attribute specified by `dataList` and `metadataList` | |
290 | parameters, data attributes are normaly time dependent where the metadata |
|
290 | parameters, data attributes are normaly time dependent where the metadata | |
291 | are not. |
|
291 | are not. | |
292 | It is possible to customize the structure of the HDF5 file with the |
|
292 | It is possible to customize the structure of the HDF5 file with the | |
293 | optional description parameter see the examples. |
|
293 | optional description parameter see the examples. | |
294 |
|
294 | |||
295 | Parameters: |
|
295 | Parameters: | |
296 | ----------- |
|
296 | ----------- | |
297 | path : str |
|
297 | path : str | |
298 | Path where files will be saved. |
|
298 | Path where files will be saved. | |
299 | blocksPerFile : int |
|
299 | blocksPerFile : int | |
300 | Number of blocks per file |
|
300 | Number of blocks per file | |
301 | metadataList : list |
|
301 | metadataList : list | |
302 | List of the dataOut attributes that will be saved as metadata |
|
302 | List of the dataOut attributes that will be saved as metadata | |
303 | dataList : int |
|
303 | dataList : int | |
304 | List of the dataOut attributes that will be saved as data |
|
304 | List of the dataOut attributes that will be saved as data | |
305 | setType : bool |
|
305 | setType : bool | |
306 | If True the name of the files corresponds to the timestamp of the data |
|
306 | If True the name of the files corresponds to the timestamp of the data | |
307 | description : dict, optional |
|
307 | description : dict, optional | |
308 | Dictionary with the desired description of the HDF5 file |
|
308 | Dictionary with the desired description of the HDF5 file | |
309 |
|
309 | |||
310 | Examples |
|
310 | Examples | |
311 | -------- |
|
311 | -------- | |
312 |
|
312 | |||
313 | desc = { |
|
313 | desc = { | |
314 | 'data_output': {'winds': ['z', 'w', 'v']}, |
|
314 | 'data_output': {'winds': ['z', 'w', 'v']}, | |
315 | 'utctime': 'timestamps', |
|
315 | 'utctime': 'timestamps', | |
316 | 'heightList': 'heights' |
|
316 | 'heightList': 'heights' | |
317 | } |
|
317 | } | |
318 | desc = { |
|
318 | desc = { | |
319 | 'data_output': ['z', 'w', 'v'], |
|
319 | 'data_output': ['z', 'w', 'v'], | |
320 | 'utctime': 'timestamps', |
|
320 | 'utctime': 'timestamps', | |
321 | 'heightList': 'heights' |
|
321 | 'heightList': 'heights' | |
322 | } |
|
322 | } | |
323 | desc = { |
|
323 | desc = { | |
324 | 'Data': { |
|
324 | 'Data': { | |
325 | 'data_output': 'winds', |
|
325 | 'data_output': 'winds', | |
326 | 'utctime': 'timestamps' |
|
326 | 'utctime': 'timestamps' | |
327 | }, |
|
327 | }, | |
328 | 'Metadata': { |
|
328 | 'Metadata': { | |
329 | 'heightList': 'heights' |
|
329 | 'heightList': 'heights' | |
330 | } |
|
330 | } | |
331 | } |
|
331 | } | |
332 |
|
332 | |||
333 | writer = proc_unit.addOperation(name='HDFWriter') |
|
333 | writer = proc_unit.addOperation(name='HDFWriter') | |
334 | writer.addParameter(name='path', value='/path/to/file') |
|
334 | writer.addParameter(name='path', value='/path/to/file') | |
335 | writer.addParameter(name='blocksPerFile', value='32') |
|
335 | writer.addParameter(name='blocksPerFile', value='32') | |
336 | writer.addParameter(name='metadataList', value='heightList,timeZone') |
|
336 | writer.addParameter(name='metadataList', value='heightList,timeZone') | |
337 | writer.addParameter(name='dataList',value='data_output,utctime') |
|
337 | writer.addParameter(name='dataList',value='data_output,utctime') | |
338 | # writer.addParameter(name='description',value=json.dumps(desc)) |
|
338 | # writer.addParameter(name='description',value=json.dumps(desc)) | |
339 |
|
339 | |||
340 | """ |
|
340 | """ | |
341 |
|
341 | |||
342 | ext = ".hdf5" |
|
342 | ext = ".hdf5" | |
343 | optchar = "D" |
|
343 | optchar = "D" | |
344 | filename = None |
|
344 | filename = None | |
345 | path = None |
|
345 | path = None | |
346 | setFile = None |
|
346 | setFile = None | |
347 | fp = None |
|
347 | fp = None | |
348 | firsttime = True |
|
348 | firsttime = True | |
349 | #Configurations |
|
349 | #Configurations | |
350 | blocksPerFile = None |
|
350 | blocksPerFile = None | |
351 | blockIndex = None |
|
351 | blockIndex = None | |
352 | dataOut = None |
|
352 | dataOut = None | |
353 | #Data Arrays |
|
353 | #Data Arrays | |
354 | dataList = None |
|
354 | dataList = None | |
355 | metadataList = None |
|
355 | metadataList = None | |
356 | currentDay = None |
|
356 | currentDay = None | |
357 | lastTime = None |
|
357 | lastTime = None | |
358 |
|
358 | |||
359 | def __init__(self): |
|
359 | def __init__(self): | |
360 |
|
360 | |||
361 | Operation.__init__(self) |
|
361 | Operation.__init__(self) | |
362 | return |
|
362 | return | |
363 |
|
363 | |||
364 | def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None): |
|
364 | def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None): | |
365 | self.path = path |
|
365 | self.path = path | |
366 | self.blocksPerFile = blocksPerFile |
|
366 | self.blocksPerFile = blocksPerFile | |
367 | self.metadataList = metadataList |
|
367 | self.metadataList = metadataList | |
368 | self.dataList = [s.strip() for s in dataList] |
|
368 | self.dataList = [s.strip() for s in dataList] | |
369 | self.setType = setType |
|
369 | self.setType = setType | |
370 | self.description = description |
|
370 | self.description = description | |
371 |
|
371 | |||
372 | if self.metadataList is None: |
|
372 | if self.metadataList is None: | |
373 | self.metadataList = self.dataOut.metadata_list |
|
373 | self.metadataList = self.dataOut.metadata_list | |
374 |
|
374 | |||
375 | tableList = [] |
|
375 | tableList = [] | |
376 | dsList = [] |
|
376 | dsList = [] | |
377 |
|
377 | |||
378 | for i in range(len(self.dataList)): |
|
378 | for i in range(len(self.dataList)): | |
379 | dsDict = {} |
|
379 | dsDict = {} | |
380 | if hasattr(self.dataOut, self.dataList[i]): |
|
380 | if hasattr(self.dataOut, self.dataList[i]): | |
381 | dataAux = getattr(self.dataOut, self.dataList[i]) |
|
381 | dataAux = getattr(self.dataOut, self.dataList[i]) | |
382 | dsDict['variable'] = self.dataList[i] |
|
382 | dsDict['variable'] = self.dataList[i] | |
383 | else: |
|
383 | else: | |
384 | log.warning('Attribute {} not found in dataOut', self.name) |
|
384 | log.warning('Attribute {} not found in dataOut', self.name) | |
385 | continue |
|
385 | continue | |
386 |
|
386 | |||
387 | if dataAux is None: |
|
387 | if dataAux is None: | |
388 | continue |
|
388 | continue | |
389 | elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)): |
|
389 | elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)): | |
390 | dsDict['nDim'] = 0 |
|
390 | dsDict['nDim'] = 0 | |
391 | else: |
|
391 | else: | |
392 | dsDict['nDim'] = len(dataAux.shape) |
|
392 | dsDict['nDim'] = len(dataAux.shape) | |
393 | dsDict['shape'] = dataAux.shape |
|
393 | dsDict['shape'] = dataAux.shape | |
394 | dsDict['dsNumber'] = dataAux.shape[0] |
|
394 | dsDict['dsNumber'] = dataAux.shape[0] | |
395 | dsDict['dtype'] = dataAux.dtype |
|
395 | dsDict['dtype'] = dataAux.dtype | |
396 |
|
396 | |||
397 | dsList.append(dsDict) |
|
397 | dsList.append(dsDict) | |
398 |
|
398 | |||
399 | self.dsList = dsList |
|
399 | self.dsList = dsList | |
400 | self.currentDay = self.dataOut.datatime.date() |
|
400 | self.currentDay = self.dataOut.datatime.date() | |
401 |
|
401 | |||
402 | def timeFlag(self): |
|
402 | def timeFlag(self): | |
403 | currentTime = self.dataOut.utctime |
|
403 | currentTime = self.dataOut.utctime | |
404 | timeTuple = time.localtime(currentTime) |
|
404 | timeTuple = time.localtime(currentTime) | |
405 | dataDay = timeTuple.tm_yday |
|
405 | dataDay = timeTuple.tm_yday | |
406 |
|
406 | |||
407 | if self.lastTime is None: |
|
407 | if self.lastTime is None: | |
408 | self.lastTime = currentTime |
|
408 | self.lastTime = currentTime | |
409 | self.currentDay = dataDay |
|
409 | self.currentDay = dataDay | |
410 | return False |
|
410 | return False | |
411 |
|
411 | |||
412 | timeDiff = currentTime - self.lastTime |
|
412 | timeDiff = currentTime - self.lastTime | |
413 |
|
413 | |||
414 | #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora |
|
414 | #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora | |
415 | if dataDay != self.currentDay: |
|
415 | if dataDay != self.currentDay: | |
416 | self.currentDay = dataDay |
|
416 | self.currentDay = dataDay | |
417 | return True |
|
417 | return True | |
418 | elif timeDiff > 3*60*60: |
|
418 | elif timeDiff > 3*60*60: | |
419 | self.lastTime = currentTime |
|
419 | self.lastTime = currentTime | |
420 | return True |
|
420 | return True | |
421 | else: |
|
421 | else: | |
422 | self.lastTime = currentTime |
|
422 | self.lastTime = currentTime | |
423 | return False |
|
423 | return False | |
424 |
|
424 | |||
425 | def run(self, dataOut, path, blocksPerFile=10, metadataList=None, |
|
425 | def run(self, dataOut, path, blocksPerFile=10, metadataList=None, | |
426 | dataList=[], setType=None, description={}): |
|
426 | dataList=[], setType=None, description={}): | |
427 |
|
427 | |||
428 | self.dataOut = dataOut |
|
428 | self.dataOut = dataOut | |
429 | if not(self.isConfig): |
|
429 | if not(self.isConfig): | |
430 | self.setup(path=path, blocksPerFile=blocksPerFile, |
|
430 | self.setup(path=path, blocksPerFile=blocksPerFile, | |
431 | metadataList=metadataList, dataList=dataList, |
|
431 | metadataList=metadataList, dataList=dataList, | |
432 | setType=setType, description=description) |
|
432 | setType=setType, description=description) | |
433 |
|
433 | |||
434 | self.isConfig = True |
|
434 | self.isConfig = True | |
435 | self.setNextFile() |
|
435 | self.setNextFile() | |
436 |
|
436 | |||
437 | self.putData() |
|
437 | self.putData() | |
438 | return |
|
438 | return | |
439 |
|
439 | |||
440 | def setNextFile(self): |
|
440 | def setNextFile(self): | |
441 |
|
441 | |||
442 | ext = self.ext |
|
442 | ext = self.ext | |
443 | path = self.path |
|
443 | path = self.path | |
444 | setFile = self.setFile |
|
444 | setFile = self.setFile | |
445 |
|
445 | |||
446 | timeTuple = time.localtime(self.dataOut.utctime) |
|
446 | timeTuple = time.localtime(self.dataOut.utctime) | |
447 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) |
|
447 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | |
448 | fullpath = os.path.join(path, subfolder) |
|
448 | fullpath = os.path.join(path, subfolder) | |
449 |
|
449 | |||
450 | if os.path.exists(fullpath): |
|
450 | if os.path.exists(fullpath): | |
451 | filesList = os.listdir(fullpath) |
|
451 | filesList = os.listdir(fullpath) | |
452 | filesList = [k for k in filesList if k.startswith(self.optchar)] |
|
452 | filesList = [k for k in filesList if k.startswith(self.optchar)] | |
453 | if len( filesList ) > 0: |
|
453 | if len( filesList ) > 0: | |
454 | filesList = sorted(filesList, key=str.lower) |
|
454 | filesList = sorted(filesList, key=str.lower) | |
455 | filen = filesList[-1] |
|
455 | filen = filesList[-1] | |
456 | # el filename debera tener el siguiente formato |
|
456 | # el filename debera tener el siguiente formato | |
457 | # 0 1234 567 89A BCDE (hex) |
|
457 | # 0 1234 567 89A BCDE (hex) | |
458 | # x YYYY DDD SSS .ext |
|
458 | # x YYYY DDD SSS .ext | |
459 | if isNumber(filen[8:11]): |
|
459 | if isNumber(filen[8:11]): | |
460 | setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file |
|
460 | setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file | |
461 | else: |
|
461 | else: | |
462 | setFile = -1 |
|
462 | setFile = -1 | |
463 | else: |
|
463 | else: | |
464 | setFile = -1 #inicializo mi contador de seteo |
|
464 | setFile = -1 #inicializo mi contador de seteo | |
465 | else: |
|
465 | else: | |
466 | os.makedirs(fullpath) |
|
466 | os.makedirs(fullpath) | |
467 | setFile = -1 #inicializo mi contador de seteo |
|
467 | setFile = -1 #inicializo mi contador de seteo | |
468 |
|
468 | |||
469 | if self.setType is None: |
|
469 | if self.setType is None: | |
470 | setFile += 1 |
|
470 | setFile += 1 | |
471 | file = '%s%4.4d%3.3d%03d%s' % (self.optchar, |
|
471 | file = '%s%4.4d%3.3d%03d%s' % (self.optchar, | |
472 | timeTuple.tm_year, |
|
472 | timeTuple.tm_year, | |
473 | timeTuple.tm_yday, |
|
473 | timeTuple.tm_yday, | |
474 | setFile, |
|
474 | setFile, | |
475 | ext ) |
|
475 | ext ) | |
476 | else: |
|
476 | else: | |
477 | setFile = timeTuple.tm_hour*60+timeTuple.tm_min |
|
477 | setFile = timeTuple.tm_hour*60+timeTuple.tm_min | |
478 | file = '%s%4.4d%3.3d%04d%s' % (self.optchar, |
|
478 | file = '%s%4.4d%3.3d%04d%s' % (self.optchar, | |
479 | timeTuple.tm_year, |
|
479 | timeTuple.tm_year, | |
480 | timeTuple.tm_yday, |
|
480 | timeTuple.tm_yday, | |
481 | setFile, |
|
481 | setFile, | |
482 | ext ) |
|
482 | ext ) | |
483 |
|
483 | |||
484 | self.filename = os.path.join( path, subfolder, file ) |
|
484 | self.filename = os.path.join( path, subfolder, file ) | |
485 |
|
485 | |||
486 | #Setting HDF5 File |
|
486 | #Setting HDF5 File | |
487 | self.fp = h5py.File(self.filename, 'w') |
|
487 | self.fp = h5py.File(self.filename, 'w') | |
488 | #write metadata |
|
488 | #write metadata | |
489 | self.writeMetadata(self.fp) |
|
489 | self.writeMetadata(self.fp) | |
490 | #Write data |
|
490 | #Write data | |
491 | self.writeData(self.fp) |
|
491 | self.writeData(self.fp) | |
492 |
|
492 | |||
493 | def getLabel(self, name, x=None): |
|
493 | def getLabel(self, name, x=None): | |
494 |
|
494 | |||
495 | if x is None: |
|
495 | if x is None: | |
496 | if 'Data' in self.description: |
|
496 | if 'Data' in self.description: | |
497 | data = self.description['Data'] |
|
497 | data = self.description['Data'] | |
498 | if 'Metadata' in self.description: |
|
498 | if 'Metadata' in self.description: | |
499 | data.update(self.description['Metadata']) |
|
499 | data.update(self.description['Metadata']) | |
500 | else: |
|
500 | else: | |
501 | data = self.description |
|
501 | data = self.description | |
502 | if name in data: |
|
502 | if name in data: | |
503 | if isinstance(data[name], str): |
|
503 | if isinstance(data[name], str): | |
504 | return data[name] |
|
504 | return data[name] | |
505 | elif isinstance(data[name], list): |
|
505 | elif isinstance(data[name], list): | |
506 | return None |
|
506 | return None | |
507 | elif isinstance(data[name], dict): |
|
507 | elif isinstance(data[name], dict): | |
508 | for key, value in data[name].items(): |
|
508 | for key, value in data[name].items(): | |
509 | return key |
|
509 | return key | |
510 | return name |
|
510 | return name | |
511 | else: |
|
511 | else: | |
512 | if 'Metadata' in self.description: |
|
512 | if 'Metadata' in self.description: | |
513 | meta = self.description['Metadata'] |
|
513 | meta = self.description['Metadata'] | |
514 | else: |
|
514 | else: | |
515 | meta = self.description |
|
515 | meta = self.description | |
516 | if name in meta: |
|
516 | if name in meta: | |
517 | if isinstance(meta[name], list): |
|
517 | if isinstance(meta[name], list): | |
518 | return meta[name][x] |
|
518 | return meta[name][x] | |
519 | elif isinstance(meta[name], dict): |
|
519 | elif isinstance(meta[name], dict): | |
520 | for key, value in meta[name].items(): |
|
520 | for key, value in meta[name].items(): | |
521 | return value[x] |
|
521 | return value[x] | |
522 | if 'cspc' in name: |
|
522 | if 'cspc' in name: | |
523 | return 'pair{:02d}'.format(x) |
|
523 | return 'pair{:02d}'.format(x) | |
524 | else: |
|
524 | else: | |
525 | return 'channel{:02d}'.format(x) |
|
525 | return 'channel{:02d}'.format(x) | |
526 |
|
526 | |||
527 | def writeMetadata(self, fp): |
|
527 | def writeMetadata(self, fp): | |
528 |
|
528 | |||
529 | if self.description: |
|
529 | if self.description: | |
530 | if 'Metadata' in self.description: |
|
530 | if 'Metadata' in self.description: | |
531 | grp = fp.create_group('Metadata') |
|
531 | grp = fp.create_group('Metadata') | |
532 | else: |
|
532 | else: | |
533 | grp = fp |
|
533 | grp = fp | |
534 | else: |
|
534 | else: | |
535 | grp = fp.create_group('Metadata') |
|
535 | grp = fp.create_group('Metadata') | |
536 |
|
536 | |||
537 | for i in range(len(self.metadataList)): |
|
537 | for i in range(len(self.metadataList)): | |
538 | if not hasattr(self.dataOut, self.metadataList[i]): |
|
538 | if not hasattr(self.dataOut, self.metadataList[i]): | |
539 | log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name) |
|
539 | log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name) | |
540 | continue |
|
540 | continue | |
541 | value = getattr(self.dataOut, self.metadataList[i]) |
|
541 | value = getattr(self.dataOut, self.metadataList[i]) | |
542 | if isinstance(value, bool): |
|
542 | if isinstance(value, bool): | |
543 | if value is True: |
|
543 | if value is True: | |
544 | value = 1 |
|
544 | value = 1 | |
545 | else: |
|
545 | else: | |
546 | value = 0 |
|
546 | value = 0 | |
547 | grp.create_dataset(self.getLabel(self.metadataList[i]), data=value) |
|
547 | grp.create_dataset(self.getLabel(self.metadataList[i]), data=value) | |
548 | return |
|
548 | return | |
549 |
|
549 | |||
550 | def writeData(self, fp): |
|
550 | def writeData(self, fp): | |
551 |
|
551 | |||
552 | if self.description: |
|
552 | if self.description: | |
553 | if 'Data' in self.description: |
|
553 | if 'Data' in self.description: | |
554 | grp = fp.create_group('Data') |
|
554 | grp = fp.create_group('Data') | |
555 | else: |
|
555 | else: | |
556 | grp = fp |
|
556 | grp = fp | |
557 | else: |
|
557 | else: | |
558 | grp = fp.create_group('Data') |
|
558 | grp = fp.create_group('Data') | |
559 |
|
559 | |||
560 | dtsets = [] |
|
560 | dtsets = [] | |
561 | data = [] |
|
561 | data = [] | |
562 |
|
562 | |||
563 | for dsInfo in self.dsList: |
|
563 | for dsInfo in self.dsList: | |
564 | if dsInfo['nDim'] == 0: |
|
564 | if dsInfo['nDim'] == 0: | |
565 | ds = grp.create_dataset( |
|
565 | ds = grp.create_dataset( | |
566 | self.getLabel(dsInfo['variable']), |
|
566 | self.getLabel(dsInfo['variable']), | |
567 | (self.blocksPerFile, ), |
|
567 | (self.blocksPerFile, ), | |
568 | chunks=True, |
|
568 | chunks=True, | |
569 | dtype=numpy.float64) |
|
569 | dtype=numpy.float64) | |
570 | dtsets.append(ds) |
|
570 | dtsets.append(ds) | |
571 | data.append((dsInfo['variable'], -1)) |
|
571 | data.append((dsInfo['variable'], -1)) | |
572 | else: |
|
572 | else: | |
573 | label = self.getLabel(dsInfo['variable']) |
|
573 | label = self.getLabel(dsInfo['variable']) | |
574 | if label is not None: |
|
574 | if label is not None: | |
575 | sgrp = grp.create_group(label) |
|
575 | sgrp = grp.create_group(label) | |
576 | else: |
|
576 | else: | |
577 | sgrp = grp |
|
577 | sgrp = grp | |
578 | for i in range(dsInfo['dsNumber']): |
|
578 | for i in range(dsInfo['dsNumber']): | |
579 | ds = sgrp.create_dataset( |
|
579 | ds = sgrp.create_dataset( | |
580 | self.getLabel(dsInfo['variable'], i), |
|
580 | self.getLabel(dsInfo['variable'], i), | |
581 | (self.blocksPerFile, ) + dsInfo['shape'][1:], |
|
581 | (self.blocksPerFile, ) + dsInfo['shape'][1:], | |
582 | chunks=True, |
|
582 | chunks=True, | |
583 | dtype=dsInfo['dtype']) |
|
583 | dtype=dsInfo['dtype']) | |
584 | dtsets.append(ds) |
|
584 | dtsets.append(ds) | |
585 | data.append((dsInfo['variable'], i)) |
|
585 | data.append((dsInfo['variable'], i)) | |
586 | fp.flush() |
|
586 | fp.flush() | |
587 |
|
587 | |||
588 | log.log('Creating file: {}'.format(fp.filename), self.name) |
|
588 | log.log('Creating file: {}'.format(fp.filename), self.name) | |
589 |
|
589 | |||
590 | self.ds = dtsets |
|
590 | self.ds = dtsets | |
591 | self.data = data |
|
591 | self.data = data | |
592 | self.firsttime = True |
|
592 | self.firsttime = True | |
593 | self.blockIndex = 0 |
|
593 | self.blockIndex = 0 | |
594 | return |
|
594 | return | |
595 |
|
595 | |||
596 | def putData(self): |
|
596 | def putData(self): | |
597 |
|
597 | |||
598 | if (self.blockIndex == self.blocksPerFile) or self.timeFlag(): |
|
598 | if (self.blockIndex == self.blocksPerFile) or self.timeFlag(): | |
599 | self.closeFile() |
|
599 | self.closeFile() | |
600 | self.setNextFile() |
|
600 | self.setNextFile() | |
601 |
|
601 | |||
602 | for i, ds in enumerate(self.ds): |
|
602 | for i, ds in enumerate(self.ds): | |
603 | attr, ch = self.data[i] |
|
603 | attr, ch = self.data[i] | |
604 | if ch == -1: |
|
604 | if ch == -1: | |
605 | ds[self.blockIndex] = getattr(self.dataOut, attr) |
|
605 | ds[self.blockIndex] = getattr(self.dataOut, attr) | |
606 | else: |
|
606 | else: | |
607 | ds[self.blockIndex] = getattr(self.dataOut, attr)[ch] |
|
607 | ds[self.blockIndex] = getattr(self.dataOut, attr)[ch] | |
608 |
|
608 | |||
609 | self.fp.flush() |
|
609 | self.fp.flush() | |
610 | self.blockIndex += 1 |
|
610 | self.blockIndex += 1 | |
611 | log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name) |
|
611 | log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name) | |
612 |
|
612 | |||
613 | return |
|
613 | return | |
614 |
|
614 | |||
615 | def closeFile(self): |
|
615 | def closeFile(self): | |
616 |
|
616 | |||
617 | if self.blockIndex != self.blocksPerFile: |
|
617 | if self.blockIndex != self.blocksPerFile: | |
618 | for ds in self.ds: |
|
618 | for ds in self.ds: | |
619 | ds.resize(self.blockIndex, axis=0) |
|
619 | ds.resize(self.blockIndex, axis=0) | |
620 |
|
620 | |||
621 | if self.fp: |
|
621 | if self.fp: | |
622 | self.fp.flush() |
|
622 | self.fp.flush() | |
623 | self.fp.close() |
|
623 | self.fp.close() | |
624 |
|
624 | |||
625 | def close(self): |
|
625 | def close(self): | |
626 |
|
626 | |||
627 | self.closeFile() |
|
627 | self.closeFile() |
@@ -1,402 +1,399 | |||||
1 | ''' |
|
1 | ''' | |
2 | Created on Oct 24, 2016 |
|
2 | Created on Oct 24, 2016 | |
3 |
|
3 | |||
4 | @author: roj- LouVD |
|
4 | @author: roj- LouVD | |
5 | ''' |
|
5 | ''' | |
6 |
|
6 | |||
7 | import numpy |
|
7 | import numpy | |
8 | import copy |
|
|||
9 | import datetime |
|
8 | import datetime | |
10 | import time |
|
9 | import time | |
11 | from time import gmtime |
|
|||
12 |
|
10 | |||
13 | from numpy import transpose |
|
11 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | |
14 |
|
||||
15 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator |
|
|||
16 | from schainpy.model.data.jrodata import Parameters |
|
12 | from schainpy.model.data.jrodata import Parameters | |
17 |
|
13 | |||
18 |
|
14 | |||
19 | class BLTRParametersProc(ProcessingUnit): |
|
15 | class BLTRParametersProc(ProcessingUnit): | |
20 | ''' |
|
16 | ''' | |
21 | Processing unit for BLTR parameters data (winds) |
|
17 | Processing unit for BLTR parameters data (winds) | |
22 |
|
18 | |||
23 | Inputs: |
|
19 | Inputs: | |
24 | self.dataOut.nmodes - Number of operation modes |
|
20 | self.dataOut.nmodes - Number of operation modes | |
25 | self.dataOut.nchannels - Number of channels |
|
21 | self.dataOut.nchannels - Number of channels | |
26 | self.dataOut.nranges - Number of ranges |
|
22 | self.dataOut.nranges - Number of ranges | |
27 |
|
23 | |||
28 | self.dataOut.data_snr - SNR array |
|
24 | self.dataOut.data_snr - SNR array | |
29 | self.dataOut.data_output - Zonal, Vertical and Meridional velocity array |
|
25 | self.dataOut.data_output - Zonal, Vertical and Meridional velocity array | |
30 | self.dataOut.height - Height array (km) |
|
26 | self.dataOut.height - Height array (km) | |
31 | self.dataOut.time - Time array (seconds) |
|
27 | self.dataOut.time - Time array (seconds) | |
32 |
|
28 | |||
33 | self.dataOut.fileIndex -Index of the file currently read |
|
29 | self.dataOut.fileIndex -Index of the file currently read | |
34 | self.dataOut.lat - Latitude coordinate of BLTR location |
|
30 | self.dataOut.lat - Latitude coordinate of BLTR location | |
35 |
|
31 | |||
36 | self.dataOut.doy - Experiment doy (number of the day in the current year) |
|
32 | self.dataOut.doy - Experiment doy (number of the day in the current year) | |
37 | self.dataOut.month - Experiment month |
|
33 | self.dataOut.month - Experiment month | |
38 | self.dataOut.day - Experiment day |
|
34 | self.dataOut.day - Experiment day | |
39 | self.dataOut.year - Experiment year |
|
35 | self.dataOut.year - Experiment year | |
40 | ''' |
|
36 | ''' | |
41 |
|
37 | |||
42 | def __init__(self): |
|
38 | def __init__(self): | |
43 | ''' |
|
39 | ''' | |
44 | Inputs: None |
|
40 | Inputs: None | |
45 | ''' |
|
41 | ''' | |
46 | ProcessingUnit.__init__(self) |
|
42 | ProcessingUnit.__init__(self) | |
47 | self.dataOut = Parameters() |
|
43 | self.dataOut = Parameters() | |
48 |
|
44 | |||
49 | def setup(self, mode): |
|
45 | def setup(self, mode): | |
50 | ''' |
|
46 | ''' | |
51 | ''' |
|
47 | ''' | |
52 | self.dataOut.mode = mode |
|
48 | self.dataOut.mode = mode | |
53 |
|
49 | |||
54 | def run(self, mode, snr_threshold=None): |
|
50 | def run(self, mode, snr_threshold=None): | |
55 | ''' |
|
51 | ''' | |
56 | Inputs: |
|
52 | Inputs: | |
57 | mode = High resolution (0) or Low resolution (1) data |
|
53 | mode = High resolution (0) or Low resolution (1) data | |
58 | snr_threshold = snr filter value |
|
54 | snr_threshold = snr filter value | |
59 | ''' |
|
55 | ''' | |
60 |
|
56 | |||
61 | if not self.isConfig: |
|
57 | if not self.isConfig: | |
62 | self.setup(mode) |
|
58 | self.setup(mode) | |
63 | self.isConfig = True |
|
59 | self.isConfig = True | |
64 |
|
60 | |||
65 | if self.dataIn.type == 'Parameters': |
|
61 | if self.dataIn.type == 'Parameters': | |
66 | self.dataOut.copy(self.dataIn) |
|
62 | self.dataOut.copy(self.dataIn) | |
67 |
|
63 | |||
68 | self.dataOut.data_param = self.dataOut.data[mode] |
|
64 | self.dataOut.data_param = self.dataOut.data[mode] | |
69 | self.dataOut.heightList = self.dataOut.height[0] |
|
65 | self.dataOut.heightList = self.dataOut.height[0] | |
70 | self.dataOut.data_snr = self.dataOut.data_snr[mode] |
|
66 | self.dataOut.data_snr = self.dataOut.data_snr[mode] | |
|
67 | SNRavg = numpy.average(self.dataOut.data_snr, axis=0) | |||
|
68 | SNRavgdB = 10*numpy.log10(SNRavg) | |||
|
69 | self.dataOut.data_snr_avg_db = SNRavgdB.reshape(1, *SNRavgdB.shape) | |||
71 |
|
70 | |||
72 | if snr_threshold is not None: |
|
71 | if snr_threshold is not None: | |
73 | SNRavg = numpy.average(self.dataOut.data_snr, axis=0) |
|
|||
74 | SNRavgdB = 10*numpy.log10(SNRavg) |
|
|||
75 | for i in range(3): |
|
72 | for i in range(3): | |
76 | self.dataOut.data_param[i][SNRavgdB <= snr_threshold] = numpy.nan |
|
73 | self.dataOut.data_param[i][SNRavgdB <= snr_threshold] = numpy.nan | |
77 |
|
74 | |||
78 | # TODO |
|
75 | # TODO | |
79 |
|
76 | |||
80 | class OutliersFilter(Operation): |
|
77 | class OutliersFilter(Operation): | |
81 |
|
78 | |||
82 | def __init__(self): |
|
79 | def __init__(self): | |
83 | ''' |
|
80 | ''' | |
84 | ''' |
|
81 | ''' | |
85 | Operation.__init__(self) |
|
82 | Operation.__init__(self) | |
86 |
|
83 | |||
87 | def run(self, svalue2, method, factor, filter, npoints=9): |
|
84 | def run(self, svalue2, method, factor, filter, npoints=9): | |
88 | ''' |
|
85 | ''' | |
89 | Inputs: |
|
86 | Inputs: | |
90 | svalue - string to select array velocity |
|
87 | svalue - string to select array velocity | |
91 | svalue2 - string to choose axis filtering |
|
88 | svalue2 - string to choose axis filtering | |
92 | method - 0 for SMOOTH or 1 for MEDIAN |
|
89 | method - 0 for SMOOTH or 1 for MEDIAN | |
93 | factor - number used to set threshold |
|
90 | factor - number used to set threshold | |
94 | filter - 1 for data filtering using the standard deviation criteria else 0 |
|
91 | filter - 1 for data filtering using the standard deviation criteria else 0 | |
95 | npoints - number of points for mask filter |
|
92 | npoints - number of points for mask filter | |
96 | ''' |
|
93 | ''' | |
97 |
|
94 | |||
98 | print(' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor)) |
|
95 | print(' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor)) | |
99 |
|
96 | |||
100 |
|
97 | |||
101 | yaxis = self.dataOut.heightList |
|
98 | yaxis = self.dataOut.heightList | |
102 | xaxis = numpy.array([[self.dataOut.utctime]]) |
|
99 | xaxis = numpy.array([[self.dataOut.utctime]]) | |
103 |
|
100 | |||
104 | # Zonal |
|
101 | # Zonal | |
105 | value_temp = self.dataOut.data_output[0] |
|
102 | value_temp = self.dataOut.data_output[0] | |
106 |
|
103 | |||
107 | # Zonal |
|
104 | # Zonal | |
108 | value_temp = self.dataOut.data_output[1] |
|
105 | value_temp = self.dataOut.data_output[1] | |
109 |
|
106 | |||
110 | # Vertical |
|
107 | # Vertical | |
111 | value_temp = numpy.transpose(self.dataOut.data_output[2]) |
|
108 | value_temp = numpy.transpose(self.dataOut.data_output[2]) | |
112 |
|
109 | |||
113 | htemp = yaxis |
|
110 | htemp = yaxis | |
114 | std = value_temp |
|
111 | std = value_temp | |
115 | for h in range(len(htemp)): |
|
112 | for h in range(len(htemp)): | |
116 | nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0]) |
|
113 | nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0]) | |
117 | minvalid = npoints |
|
114 | minvalid = npoints | |
118 |
|
115 | |||
119 | #only if valid values greater than the minimum required (10%) |
|
116 | #only if valid values greater than the minimum required (10%) | |
120 | if nvalues_valid > minvalid: |
|
117 | if nvalues_valid > minvalid: | |
121 |
|
118 | |||
122 | if method == 0: |
|
119 | if method == 0: | |
123 | #SMOOTH |
|
120 | #SMOOTH | |
124 | w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1) |
|
121 | w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1) | |
125 |
|
122 | |||
126 |
|
123 | |||
127 | if method == 1: |
|
124 | if method == 1: | |
128 | #MEDIAN |
|
125 | #MEDIAN | |
129 | w = value_temp[h] - self.Median(input=value_temp[h], width = npoints) |
|
126 | w = value_temp[h] - self.Median(input=value_temp[h], width = npoints) | |
130 |
|
127 | |||
131 | dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1) |
|
128 | dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1) | |
132 |
|
129 | |||
133 | threshold = dw*factor |
|
130 | threshold = dw*factor | |
134 | value_temp[numpy.where(w > threshold),h] = numpy.nan |
|
131 | value_temp[numpy.where(w > threshold),h] = numpy.nan | |
135 | value_temp[numpy.where(w < -1*threshold),h] = numpy.nan |
|
132 | value_temp[numpy.where(w < -1*threshold),h] = numpy.nan | |
136 |
|
133 | |||
137 |
|
134 | |||
138 | #At the end |
|
135 | #At the end | |
139 | if svalue2 == 'inHeight': |
|
136 | if svalue2 == 'inHeight': | |
140 | value_temp = numpy.transpose(value_temp) |
|
137 | value_temp = numpy.transpose(value_temp) | |
141 | output_array[:,m] = value_temp |
|
138 | output_array[:,m] = value_temp | |
142 |
|
139 | |||
143 | if svalue == 'zonal': |
|
140 | if svalue == 'zonal': | |
144 | self.dataOut.data_output[0] = output_array |
|
141 | self.dataOut.data_output[0] = output_array | |
145 |
|
142 | |||
146 | elif svalue == 'meridional': |
|
143 | elif svalue == 'meridional': | |
147 | self.dataOut.data_output[1] = output_array |
|
144 | self.dataOut.data_output[1] = output_array | |
148 |
|
145 | |||
149 | elif svalue == 'vertical': |
|
146 | elif svalue == 'vertical': | |
150 | self.dataOut.data_output[2] = output_array |
|
147 | self.dataOut.data_output[2] = output_array | |
151 |
|
148 | |||
152 | return self.dataOut.data_output |
|
149 | return self.dataOut.data_output | |
153 |
|
150 | |||
154 |
|
151 | |||
155 | def Median(self,input,width): |
|
152 | def Median(self,input,width): | |
156 | ''' |
|
153 | ''' | |
157 | Inputs: |
|
154 | Inputs: | |
158 | input - Velocity array |
|
155 | input - Velocity array | |
159 | width - Number of points for mask filter |
|
156 | width - Number of points for mask filter | |
160 |
|
157 | |||
161 | ''' |
|
158 | ''' | |
162 |
|
159 | |||
163 | if numpy.mod(width,2) == 1: |
|
160 | if numpy.mod(width,2) == 1: | |
164 | pc = int((width - 1) / 2) |
|
161 | pc = int((width - 1) / 2) | |
165 | cont = 0 |
|
162 | cont = 0 | |
166 | output = [] |
|
163 | output = [] | |
167 |
|
164 | |||
168 | for i in range(len(input)): |
|
165 | for i in range(len(input)): | |
169 | if i >= pc and i < len(input) - pc: |
|
166 | if i >= pc and i < len(input) - pc: | |
170 | new2 = input[i-pc:i+pc+1] |
|
167 | new2 = input[i-pc:i+pc+1] | |
171 | temp = numpy.where(numpy.isfinite(new2)) |
|
168 | temp = numpy.where(numpy.isfinite(new2)) | |
172 | new = new2[temp] |
|
169 | new = new2[temp] | |
173 | value = numpy.median(new) |
|
170 | value = numpy.median(new) | |
174 | output.append(value) |
|
171 | output.append(value) | |
175 |
|
172 | |||
176 | output = numpy.array(output) |
|
173 | output = numpy.array(output) | |
177 | output = numpy.hstack((input[0:pc],output)) |
|
174 | output = numpy.hstack((input[0:pc],output)) | |
178 | output = numpy.hstack((output,input[-pc:len(input)])) |
|
175 | output = numpy.hstack((output,input[-pc:len(input)])) | |
179 |
|
176 | |||
180 | return output |
|
177 | return output | |
181 |
|
178 | |||
182 | def Smooth(self,input,width,edge_truncate = None): |
|
179 | def Smooth(self,input,width,edge_truncate = None): | |
183 | ''' |
|
180 | ''' | |
184 | Inputs: |
|
181 | Inputs: | |
185 | input - Velocity array |
|
182 | input - Velocity array | |
186 | width - Number of points for mask filter |
|
183 | width - Number of points for mask filter | |
187 | edge_truncate - 1 for truncate the convolution product else |
|
184 | edge_truncate - 1 for truncate the convolution product else | |
188 |
|
185 | |||
189 | ''' |
|
186 | ''' | |
190 |
|
187 | |||
191 | if numpy.mod(width,2) == 0: |
|
188 | if numpy.mod(width,2) == 0: | |
192 | real_width = width + 1 |
|
189 | real_width = width + 1 | |
193 | nzeros = width / 2 |
|
190 | nzeros = width / 2 | |
194 | else: |
|
191 | else: | |
195 | real_width = width |
|
192 | real_width = width | |
196 | nzeros = (width - 1) / 2 |
|
193 | nzeros = (width - 1) / 2 | |
197 |
|
194 | |||
198 | half_width = int(real_width)/2 |
|
195 | half_width = int(real_width)/2 | |
199 | length = len(input) |
|
196 | length = len(input) | |
200 |
|
197 | |||
201 | gate = numpy.ones(real_width,dtype='float') |
|
198 | gate = numpy.ones(real_width,dtype='float') | |
202 | norm_of_gate = numpy.sum(gate) |
|
199 | norm_of_gate = numpy.sum(gate) | |
203 |
|
200 | |||
204 | nan_process = 0 |
|
201 | nan_process = 0 | |
205 | nan_id = numpy.where(numpy.isnan(input)) |
|
202 | nan_id = numpy.where(numpy.isnan(input)) | |
206 | if len(nan_id[0]) > 0: |
|
203 | if len(nan_id[0]) > 0: | |
207 | nan_process = 1 |
|
204 | nan_process = 1 | |
208 | pb = numpy.zeros(len(input)) |
|
205 | pb = numpy.zeros(len(input)) | |
209 | pb[nan_id] = 1. |
|
206 | pb[nan_id] = 1. | |
210 | input[nan_id] = 0. |
|
207 | input[nan_id] = 0. | |
211 |
|
208 | |||
212 | if edge_truncate == True: |
|
209 | if edge_truncate == True: | |
213 | output = numpy.convolve(input/norm_of_gate,gate,mode='same') |
|
210 | output = numpy.convolve(input/norm_of_gate,gate,mode='same') | |
214 | elif edge_truncate == False or edge_truncate == None: |
|
211 | elif edge_truncate == False or edge_truncate == None: | |
215 | output = numpy.convolve(input/norm_of_gate,gate,mode='valid') |
|
212 | output = numpy.convolve(input/norm_of_gate,gate,mode='valid') | |
216 | output = numpy.hstack((input[0:half_width],output)) |
|
213 | output = numpy.hstack((input[0:half_width],output)) | |
217 | output = numpy.hstack((output,input[len(input)-half_width:len(input)])) |
|
214 | output = numpy.hstack((output,input[len(input)-half_width:len(input)])) | |
218 |
|
215 | |||
219 | if nan_process: |
|
216 | if nan_process: | |
220 | pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid') |
|
217 | pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid') | |
221 | pb = numpy.hstack((numpy.zeros(half_width),pb)) |
|
218 | pb = numpy.hstack((numpy.zeros(half_width),pb)) | |
222 | pb = numpy.hstack((pb,numpy.zeros(half_width))) |
|
219 | pb = numpy.hstack((pb,numpy.zeros(half_width))) | |
223 | output[numpy.where(pb > 0.9999)] = numpy.nan |
|
220 | output[numpy.where(pb > 0.9999)] = numpy.nan | |
224 | input[nan_id] = numpy.nan |
|
221 | input[nan_id] = numpy.nan | |
225 | return output |
|
222 | return output | |
226 |
|
223 | |||
227 | def Average(self,aver=0,nhaver=1): |
|
224 | def Average(self,aver=0,nhaver=1): | |
228 | ''' |
|
225 | ''' | |
229 | Inputs: |
|
226 | Inputs: | |
230 | aver - Indicates the time period over which is averaged or consensus data |
|
227 | aver - Indicates the time period over which is averaged or consensus data | |
231 | nhaver - Indicates the decimation factor in heights |
|
228 | nhaver - Indicates the decimation factor in heights | |
232 |
|
229 | |||
233 | ''' |
|
230 | ''' | |
234 | nhpoints = 48 |
|
231 | nhpoints = 48 | |
235 |
|
232 | |||
236 | lat_piura = -5.17 |
|
233 | lat_piura = -5.17 | |
237 | lat_huancayo = -12.04 |
|
234 | lat_huancayo = -12.04 | |
238 | lat_porcuya = -5.8 |
|
235 | lat_porcuya = -5.8 | |
239 |
|
236 | |||
240 | if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura: |
|
237 | if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura: | |
241 | hcm = 3. |
|
238 | hcm = 3. | |
242 | if self.dataOut.year == 2003 : |
|
239 | if self.dataOut.year == 2003 : | |
243 | if self.dataOut.doy >= 25 and self.dataOut.doy < 64: |
|
240 | if self.dataOut.doy >= 25 and self.dataOut.doy < 64: | |
244 | nhpoints = 12 |
|
241 | nhpoints = 12 | |
245 |
|
242 | |||
246 | elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo: |
|
243 | elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo: | |
247 | hcm = 3. |
|
244 | hcm = 3. | |
248 | if self.dataOut.year == 2003 : |
|
245 | if self.dataOut.year == 2003 : | |
249 | if self.dataOut.doy >= 25 and self.dataOut.doy < 64: |
|
246 | if self.dataOut.doy >= 25 and self.dataOut.doy < 64: | |
250 | nhpoints = 12 |
|
247 | nhpoints = 12 | |
251 |
|
248 | |||
252 |
|
249 | |||
253 | elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya: |
|
250 | elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya: | |
254 | hcm = 5.#2 |
|
251 | hcm = 5.#2 | |
255 |
|
252 | |||
256 | pdata = 0.2 |
|
253 | pdata = 0.2 | |
257 | taver = [1,2,3,4,6,8,12,24] |
|
254 | taver = [1,2,3,4,6,8,12,24] | |
258 | t0 = 0 |
|
255 | t0 = 0 | |
259 | tf = 24 |
|
256 | tf = 24 | |
260 | ntime =(tf-t0)/taver[aver] |
|
257 | ntime =(tf-t0)/taver[aver] | |
261 | ti = numpy.arange(ntime) |
|
258 | ti = numpy.arange(ntime) | |
262 | tf = numpy.arange(ntime) + taver[aver] |
|
259 | tf = numpy.arange(ntime) + taver[aver] | |
263 |
|
260 | |||
264 |
|
261 | |||
265 | old_height = self.dataOut.heightList |
|
262 | old_height = self.dataOut.heightList | |
266 |
|
263 | |||
267 | if nhaver > 1: |
|
264 | if nhaver > 1: | |
268 | num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes |
|
265 | num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes | |
269 | deltha = 0.05*nhaver |
|
266 | deltha = 0.05*nhaver | |
270 | minhvalid = pdata*nhaver |
|
267 | minhvalid = pdata*nhaver | |
271 | for im in range(self.dataOut.nmodes): |
|
268 | for im in range(self.dataOut.nmodes): | |
272 | new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2. |
|
269 | new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2. | |
273 |
|
270 | |||
274 |
|
271 | |||
275 | data_fHeigths_List = [] |
|
272 | data_fHeigths_List = [] | |
276 | data_fZonal_List = [] |
|
273 | data_fZonal_List = [] | |
277 | data_fMeridional_List = [] |
|
274 | data_fMeridional_List = [] | |
278 | data_fVertical_List = [] |
|
275 | data_fVertical_List = [] | |
279 | startDTList = [] |
|
276 | startDTList = [] | |
280 |
|
277 | |||
281 |
|
278 | |||
282 | for i in range(ntime): |
|
279 | for i in range(ntime): | |
283 | height = old_height |
|
280 | height = old_height | |
284 |
|
281 | |||
285 | start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5) |
|
282 | start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5) | |
286 | stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5) |
|
283 | stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5) | |
287 |
|
284 | |||
288 |
|
285 | |||
289 | limit_sec1 = time.mktime(start.timetuple()) |
|
286 | limit_sec1 = time.mktime(start.timetuple()) | |
290 | limit_sec2 = time.mktime(stop.timetuple()) |
|
287 | limit_sec2 = time.mktime(stop.timetuple()) | |
291 |
|
288 | |||
292 | t1 = numpy.where(self.f_timesec >= limit_sec1) |
|
289 | t1 = numpy.where(self.f_timesec >= limit_sec1) | |
293 | t2 = numpy.where(self.f_timesec < limit_sec2) |
|
290 | t2 = numpy.where(self.f_timesec < limit_sec2) | |
294 | time_select = [] |
|
291 | time_select = [] | |
295 | for val_sec in t1[0]: |
|
292 | for val_sec in t1[0]: | |
296 | if val_sec in t2[0]: |
|
293 | if val_sec in t2[0]: | |
297 | time_select.append(val_sec) |
|
294 | time_select.append(val_sec) | |
298 |
|
295 | |||
299 |
|
296 | |||
300 | time_select = numpy.array(time_select,dtype = 'int') |
|
297 | time_select = numpy.array(time_select,dtype = 'int') | |
301 | minvalid = numpy.ceil(pdata*nhpoints) |
|
298 | minvalid = numpy.ceil(pdata*nhpoints) | |
302 |
|
299 | |||
303 | zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
300 | zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan | |
304 | mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
301 | mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan | |
305 | ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
302 | ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan | |
306 |
|
303 | |||
307 | if nhaver > 1: |
|
304 | if nhaver > 1: | |
308 | new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
305 | new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan | |
309 | new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
306 | new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan | |
310 | new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
307 | new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan | |
311 |
|
308 | |||
312 | if len(time_select) > minvalid: |
|
309 | if len(time_select) > minvalid: | |
313 | time_average = self.f_timesec[time_select] |
|
310 | time_average = self.f_timesec[time_select] | |
314 |
|
311 | |||
315 | for im in range(self.dataOut.nmodes): |
|
312 | for im in range(self.dataOut.nmodes): | |
316 |
|
313 | |||
317 | for ih in range(self.dataOut.nranges): |
|
314 | for ih in range(self.dataOut.nranges): | |
318 | if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid: |
|
315 | if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid: | |
319 | zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) |
|
316 | zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) | |
320 |
|
317 | |||
321 | if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid: |
|
318 | if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid: | |
322 | mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) |
|
319 | mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) | |
323 |
|
320 | |||
324 | if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid: |
|
321 | if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid: | |
325 | ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) |
|
322 | ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) | |
326 |
|
323 | |||
327 | if nhaver > 1: |
|
324 | if nhaver > 1: | |
328 | for ih in range(num_hei): |
|
325 | for ih in range(num_hei): | |
329 | hvalid = numpy.arange(nhaver) + nhaver*ih |
|
326 | hvalid = numpy.arange(nhaver) + nhaver*ih | |
330 |
|
327 | |||
331 | if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid: |
|
328 | if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid: | |
332 | new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) |
|
329 | new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) | |
333 |
|
330 | |||
334 | if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid: |
|
331 | if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid: | |
335 | new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) |
|
332 | new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) | |
336 |
|
333 | |||
337 | if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid: |
|
334 | if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid: | |
338 | new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) |
|
335 | new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) | |
339 | if nhaver > 1: |
|
336 | if nhaver > 1: | |
340 | zon_aver = new_zon_aver |
|
337 | zon_aver = new_zon_aver | |
341 | mer_aver = new_mer_aver |
|
338 | mer_aver = new_mer_aver | |
342 | ver_aver = new_ver_aver |
|
339 | ver_aver = new_ver_aver | |
343 | height = new_height |
|
340 | height = new_height | |
344 |
|
341 | |||
345 |
|
342 | |||
346 | tstart = time_average[0] |
|
343 | tstart = time_average[0] | |
347 | tend = time_average[-1] |
|
344 | tend = time_average[-1] | |
348 | startTime = time.gmtime(tstart) |
|
345 | startTime = time.gmtime(tstart) | |
349 |
|
346 | |||
350 | year = startTime.tm_year |
|
347 | year = startTime.tm_year | |
351 | month = startTime.tm_mon |
|
348 | month = startTime.tm_mon | |
352 | day = startTime.tm_mday |
|
349 | day = startTime.tm_mday | |
353 | hour = startTime.tm_hour |
|
350 | hour = startTime.tm_hour | |
354 | minute = startTime.tm_min |
|
351 | minute = startTime.tm_min | |
355 | second = startTime.tm_sec |
|
352 | second = startTime.tm_sec | |
356 |
|
353 | |||
357 | startDTList.append(datetime.datetime(year,month,day,hour,minute,second)) |
|
354 | startDTList.append(datetime.datetime(year,month,day,hour,minute,second)) | |
358 |
|
355 | |||
359 |
|
356 | |||
360 | o_height = numpy.array([]) |
|
357 | o_height = numpy.array([]) | |
361 | o_zon_aver = numpy.array([]) |
|
358 | o_zon_aver = numpy.array([]) | |
362 | o_mer_aver = numpy.array([]) |
|
359 | o_mer_aver = numpy.array([]) | |
363 | o_ver_aver = numpy.array([]) |
|
360 | o_ver_aver = numpy.array([]) | |
364 | if self.dataOut.nmodes > 1: |
|
361 | if self.dataOut.nmodes > 1: | |
365 | for im in range(self.dataOut.nmodes): |
|
362 | for im in range(self.dataOut.nmodes): | |
366 |
|
363 | |||
367 | if im == 0: |
|
364 | if im == 0: | |
368 | h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:]))) |
|
365 | h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:]))) | |
369 | else: |
|
366 | else: | |
370 | h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:]))) |
|
367 | h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:]))) | |
371 |
|
368 | |||
372 |
|
369 | |||
373 | ht = h_select[0] |
|
370 | ht = h_select[0] | |
374 |
|
371 | |||
375 | o_height = numpy.hstack((o_height,height[im,ht])) |
|
372 | o_height = numpy.hstack((o_height,height[im,ht])) | |
376 | o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im])) |
|
373 | o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im])) | |
377 | o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im])) |
|
374 | o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im])) | |
378 | o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im])) |
|
375 | o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im])) | |
379 |
|
376 | |||
380 | data_fHeigths_List.append(o_height) |
|
377 | data_fHeigths_List.append(o_height) | |
381 | data_fZonal_List.append(o_zon_aver) |
|
378 | data_fZonal_List.append(o_zon_aver) | |
382 | data_fMeridional_List.append(o_mer_aver) |
|
379 | data_fMeridional_List.append(o_mer_aver) | |
383 | data_fVertical_List.append(o_ver_aver) |
|
380 | data_fVertical_List.append(o_ver_aver) | |
384 |
|
381 | |||
385 |
|
382 | |||
386 | else: |
|
383 | else: | |
387 | h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:]))) |
|
384 | h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:]))) | |
388 | ht = h_select[0] |
|
385 | ht = h_select[0] | |
389 | o_height = numpy.hstack((o_height,height[im,ht])) |
|
386 | o_height = numpy.hstack((o_height,height[im,ht])) | |
390 | o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im])) |
|
387 | o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im])) | |
391 | o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im])) |
|
388 | o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im])) | |
392 | o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im])) |
|
389 | o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im])) | |
393 |
|
390 | |||
394 | data_fHeigths_List.append(o_height) |
|
391 | data_fHeigths_List.append(o_height) | |
395 | data_fZonal_List.append(o_zon_aver) |
|
392 | data_fZonal_List.append(o_zon_aver) | |
396 | data_fMeridional_List.append(o_mer_aver) |
|
393 | data_fMeridional_List.append(o_mer_aver) | |
397 | data_fVertical_List.append(o_ver_aver) |
|
394 | data_fVertical_List.append(o_ver_aver) | |
398 |
|
395 | |||
399 |
|
396 | |||
400 | return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List |
|
397 | return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List | |
401 |
|
398 | |||
402 |
|
399 |
General Comments 0
You need to be logged in to leave comments.
Login now