##// END OF EJS Templates
Update2 for EW-Drifts
Percy Condor -
r1383:3e971ac8dea1
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -129,7 +129,7 class SchainConfigure():
129 129 __SCHAIN_SENDER_EMAIL = "MAILSERVER_ACCOUNT"
130 130 __SCHAIN_SENDER_PASS = "MAILSERVER_PASSWORD"
131 131
132 def __init__(self, initFile = None):
132 def __init__(self, initFile=None):
133 133
134 134 # Set configuration file
135 135 if (initFile == None):
@@ -251,7 +251,7 class SchainNotify:
251 251 Written by "Miguel Urco":mailto:miguel.urco@jro.igp.gob.pe Dec. 1, 2015
252 252 """
253 253
254 #constants
254 # constants
255 255
256 256 def __init__(self):
257 257 """__init__ initializes SchainNotify by getting some basic information from SchainDB and SchainSite.
@@ -275,7 +275,7 class SchainNotify:
275 275 self.__emailFromAddress = confObj.getSenderEmail()
276 276 self.__emailPass = confObj.getSenderPass()
277 277 self.__emailToAddress = confObj.getAdminEmail()
278 self.__emailServer = confObj.getEmailServer()
278 self.__emailServer = confObj.getEmailServer()
279 279
280 280 def sendEmail(self, email_from, email_to, subject='Error running ...', message="", subtitle="", filename="", html_format=True):
281 281
@@ -297,7 +297,7 class SchainNotify:
297 297 msg.preamble = 'SChainPy'
298 298
299 299 if html_format:
300 message = "<h1> %s </h1>" %subject + "<h3>" + subtitle.replace("\n", "</h3><h3>\n") + "</h3>" + message.replace("\n", "<br>\n")
300 message = "<h1> %s </h1>" % subject + "<h3>" + subtitle.replace("\n", "</h3><h3>\n") + "</h3>" + message.replace("\n", "<br>\n")
301 301 message = "<html>\n" + message + '</html>'
302 302
303 303 # This is the textual part:
@@ -310,8 +310,8 class SchainNotify:
310 310
311 311 if filename and os.path.isfile(filename):
312 312 # This is the binary part(The Attachment):
313 part = MIMEApplication(open(filename,"rb").read())
314 part.add_header('Content-Disposition',
313 part = MIMEApplication(open(filename, "rb").read())
314 part.add_header('Content-Disposition',
315 315 'attachment',
316 316 filename=os.path.basename(filename))
317 317 msg.attach(part)
@@ -342,7 +342,7 class SchainNotify:
342 342
343 343 return 1
344 344
345 def sendAlert(self, message, subject = "", subtitle="", filename=""):
345 def sendAlert(self, message, subject="", subtitle="", filename=""):
346 346 """sendAlert sends an email with the given message and optional title.
347 347
348 348 Inputs: message (string), and optional title (string)
@@ -357,14 +357,14 class SchainNotify:
357 357 if not self.__emailToAddress:
358 358 return 0
359 359
360 print("***** Sending alert to %s *****" %self.__emailToAddress)
360 print("***** Sending alert to %s *****" % self.__emailToAddress)
361 361 # set up message
362 362
363 sent=self.sendEmail(email_from=self.__emailFromAddress,
363 sent = self.sendEmail(email_from=self.__emailFromAddress,
364 364 email_to=self.__emailToAddress,
365 365 subject=subject,
366 366 message=message,
367 subtitle=subtitle,
367 subtitle=subtitle,
368 368 filename=filename)
369 369
370 370 if not sent:
@@ -372,7 +372,7 class SchainNotify:
372 372
373 373 return 1
374 374
375 def notify(self, email, message, subject = "", subtitle="", filename=""):
375 def notify(self, email, message, subject="", subtitle="", filename=""):
376 376 """notify sends an email with the given message and title to email.
377 377
378 378 Inputs: email (string), message (string), and subject (string)
@@ -392,7 +392,7 class SchainNotify:
392 392 email_to=email,
393 393 subject=subject,
394 394 message=message,
395 subtitle=subtitle,
395 subtitle=subtitle,
396 396 filename=filename
397 397 )
398 398
@@ -502,4 +502,4 if __name__ == '__main__':
502 502
503 503 test.sendAlert('This is a message from the python module SchainNotify', 'Test from SchainNotify')
504 504
505 print('Hopefully message sent - check.') No newline at end of file
505 print('Hopefully message sent - check.')
@@ -161,7 +161,7 def search(nextcommand):
161 161 except Exception as e:
162 162 log.error('Module `{}` does not exists'.format(nextcommand), '')
163 163 allModules = getAll()
164 similar = [t[0] for t in process.extract(nextcommand, allModules, limit=12) if t[1]>80]
164 similar = [t[0] for t in process.extract(nextcommand, allModules, limit=12) if t[1] > 80]
165 165 log.success('Possible modules are: {}'.format(', '.join(similar)), '')
166 166
167 167 def runschain(nextcommand):
@@ -203,7 +203,7 class ProcUnitConf(ConfBase):
203 203
204 204 def removeOperation(self, id):
205 205
206 i = [1 if x.id==id else 0 for x in self.operations]
206 i = [1 if x.id == id else 0 for x in self.operations]
207 207 self.operations.pop(i.index(1))
208 208
209 209 def getOperation(self, id):
@@ -293,12 +293,12 class ReadUnitConf(ProcUnitConf):
293 293 if name == None:
294 294 if 'Reader' in datatype:
295 295 name = datatype
296 datatype = name.replace('Reader','')
296 datatype = name.replace('Reader', '')
297 297 else:
298 298 name = '{}Reader'.format(datatype)
299 299 if datatype == None:
300 300 if 'Reader' in name:
301 datatype = name.replace('Reader','')
301 datatype = name.replace('Reader', '')
302 302 else:
303 303 datatype = name
304 304 name = '{}Reader'.format(name)
@@ -572,7 +572,7 class Project(Process):
572 572 if '#_start_#' in msg:
573 573 procs += 1
574 574 elif '#_end_#' in msg:
575 procs -=1
575 procs -= 1
576 576 else:
577 577 err_msg = msg
578 578
@@ -617,7 +617,7 class Project(Process):
617 617 subtitle += '[End time = %s]\n' % readUnitConfObj.parameters['endTime']
618 618
619 619 a = Alarm(
620 modes=self.alarm,
620 modes=self.alarm,
621 621 email=self.email,
622 622 message=message,
623 623 subject=subject,
@@ -656,4 +656,4 class Project(Process):
656 656 self.runProcs()
657 657 log.success('{} Done (Time: {:4.2f}s)'.format(
658 658 self.name,
659 time.time()-self.start_time), '')
659 time.time() - self.start_time), '')
@@ -12,49 +12,49 import datetime
12 12 SPEED_OF_LIGHT = 299792458
13 13 SPEED_OF_LIGHT = 3e8
14 14
15 FILE_STRUCTURE = numpy.dtype([ #HEADER 48bytes
16 ('FileMgcNumber','<u4'), #0x23020100
17 ('nFDTdataRecors','<u4'), #No Of FDT data records in this file (0 or more)
18 ('RadarUnitId','<u4'),
19 ('SiteName','<s32'), #Null terminated
15 FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes
16 ('FileMgcNumber', '<u4'), # 0x23020100
17 ('nFDTdataRecors', '<u4'), # No Of FDT data records in this file (0 or more)
18 ('RadarUnitId', '<u4'),
19 ('SiteName', '<s32'), # Null terminated
20 20 ])
21 21
22 RECORD_STRUCTURE = numpy.dtype([ #RECORD HEADER 180+20N bytes
23 ('RecMgcNumber','<u4'), #0x23030001
24 ('RecCounter','<u4'), #Record counter(0,1, ...)
25 ('Off2StartNxtRec','<u4'), #Offset to start of next record form start of this record
26 ('Off2StartData','<u4'), #Offset to start of data from start of this record
27 ('EpTimeStamp','<i4'), #Epoch time stamp of start of acquisition (seconds)
28 ('msCompTimeStamp','<u4'), #Millisecond component of time stamp (0,...,999)
29 ('ExpTagName','<s32'), #Experiment tag name (null terminated)
30 ('ExpComment','<s32'), #Experiment comment (null terminated)
31 ('SiteLatDegrees','<f4'), #Site latitude (from GPS) in degrees (positive implies North)
32 ('SiteLongDegrees','<f4'), #Site longitude (from GPS) in degrees (positive implies East)
33 ('RTCgpsStatus','<u4'), #RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
34 ('TransmitFrec','<u4'), #Transmit frequency (Hz)
35 ('ReceiveFrec','<u4'), #Receive frequency
36 ('FirstOsciFrec','<u4'), #First local oscillator frequency (Hz)
37 ('Polarisation','<u4'), #(0="O", 1="E", 2="linear 1", 3="linear2")
38 ('ReceiverFiltSett','<u4'), #Receiver filter settings (0,1,2,3)
39 ('nModesInUse','<u4'), #Number of modes in use (1 or 2)
40 ('DualModeIndex','<u4'), #Dual Mode index number for these data (0 or 1)
41 ('DualModeRange','<u4'), #Dual Mode range correction for these data (m)
42 ('nDigChannels','<u4'), #Number of digital channels acquired (2*N)
43 ('SampResolution','<u4'), #Sampling resolution (meters)
44 ('nRangeGatesSamp','<u4'), #Number of range gates sampled
45 ('StartRangeSamp','<u4'), #Start range of sampling (meters)
46 ('PRFhz','<u4'), #PRF (Hz)
47 ('Integrations','<u4'), #Integrations
48 ('nDataPointsTrsf','<u4'), #Number of data points transformed
49 ('nReceiveBeams','<u4'), #Number of receive beams stored in file (1 or N)
50 ('nSpectAverages','<u4'), #Number of spectral averages
51 ('FFTwindowingInd','<u4'), #FFT windowing index (0 = no window)
52 ('BeamAngleAzim','<f4'), #Beam steer angle (azimuth) in degrees (clockwise from true North)
53 ('BeamAngleZen','<f4'), #Beam steer angle (zenith) in degrees (0=> vertical)
54 ('AntennaCoord','<f24'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
55 ('RecPhaseCalibr','<f12'), #Receiver phase calibration (degrees) - N values
56 ('RecAmpCalibr','<f12'), #Receiver amplitude calibration (ratio relative to receiver one) - N values
57 ('ReceiverGaindB','<u12'), #Receiver gains in dB - N values
22 RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes
23 ('RecMgcNumber', '<u4'), # 0x23030001
24 ('RecCounter', '<u4'), # Record counter(0,1, ...)
25 ('Off2StartNxtRec', '<u4'), # Offset to start of next record form start of this record
26 ('Off2StartData', '<u4'), # Offset to start of data from start of this record
27 ('EpTimeStamp', '<i4'), # Epoch time stamp of start of acquisition (seconds)
28 ('msCompTimeStamp', '<u4'), # Millisecond component of time stamp (0,...,999)
29 ('ExpTagName', '<s32'), # Experiment tag name (null terminated)
30 ('ExpComment', '<s32'), # Experiment comment (null terminated)
31 ('SiteLatDegrees', '<f4'), # Site latitude (from GPS) in degrees (positive implies North)
32 ('SiteLongDegrees', '<f4'), # Site longitude (from GPS) in degrees (positive implies East)
33 ('RTCgpsStatus', '<u4'), # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
34 ('TransmitFrec', '<u4'), # Transmit frequency (Hz)
35 ('ReceiveFrec', '<u4'), # Receive frequency
36 ('FirstOsciFrec', '<u4'), # First local oscillator frequency (Hz)
37 ('Polarisation', '<u4'), # (0="O", 1="E", 2="linear 1", 3="linear2")
38 ('ReceiverFiltSett', '<u4'), # Receiver filter settings (0,1,2,3)
39 ('nModesInUse', '<u4'), # Number of modes in use (1 or 2)
40 ('DualModeIndex', '<u4'), # Dual Mode index number for these data (0 or 1)
41 ('DualModeRange', '<u4'), # Dual Mode range correction for these data (m)
42 ('nDigChannels', '<u4'), # Number of digital channels acquired (2*N)
43 ('SampResolution', '<u4'), # Sampling resolution (meters)
44 ('nRangeGatesSamp', '<u4'), # Number of range gates sampled
45 ('StartRangeSamp', '<u4'), # Start range of sampling (meters)
46 ('PRFhz', '<u4'), # PRF (Hz)
47 ('Integrations', '<u4'), # Integrations
48 ('nDataPointsTrsf', '<u4'), # Number of data points transformed
49 ('nReceiveBeams', '<u4'), # Number of receive beams stored in file (1 or N)
50 ('nSpectAverages', '<u4'), # Number of spectral averages
51 ('FFTwindowingInd', '<u4'), # FFT windowing index (0 = no window)
52 ('BeamAngleAzim', '<f4'), # Beam steer angle (azimuth) in degrees (clockwise from true North)
53 ('BeamAngleZen', '<f4'), # Beam steer angle (zenith) in degrees (0=> vertical)
54 ('AntennaCoord', '<f24'), # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
55 ('RecPhaseCalibr', '<f12'), # Receiver phase calibration (degrees) - N values
56 ('RecAmpCalibr', '<f12'), # Receiver amplitude calibration (ratio relative to receiver one) - N values
57 ('ReceiverGaindB', '<u12'), # Receiver gains in dB - N values
58 58 ])
59 59
60 60
@@ -82,39 +82,39 class Header(object):
82 82 keyList.sort()
83 83
84 84 for key in keyList:
85 message += "%s = %s" %(key, self.__dict__[key]) + "\n"
85 message += "%s = %s" % (key, self.__dict__[key]) + "\n"
86 86
87 87 if "size" not in keyList:
88 88 attr = getattr(self, "size")
89 89
90 90 if attr:
91 message += "%s = %s" %("size", attr) + "\n"
91 message += "%s = %s" % ("size", attr) + "\n"
92 92
93 93 print(message)
94 94
95 95 class FileHeader(Header):
96 96
97 FileMgcNumber= None
98 nFDTdataRecors=None #No Of FDT data records in this file (0 or more)
99 RadarUnitId= None
100 SiteName= None
97 FileMgcNumber = None
98 nFDTdataRecors = None # No Of FDT data records in this file (0 or more)
99 RadarUnitId = None
100 SiteName = None
101 101
102 #__LOCALTIME = None
102 # __LOCALTIME = None
103 103
104 104 def __init__(self, useLocalTime=True):
105 105
106 self.FileMgcNumber= 0 #0x23020100
107 self.nFDTdataRecors=0 #No Of FDT data records in this file (0 or more)
108 self.RadarUnitId= 0
109 self.SiteName= ""
106 self.FileMgcNumber = 0 # 0x23020100
107 self.nFDTdataRecors = 0 # No Of FDT data records in this file (0 or more)
108 self.RadarUnitId = 0
109 self.SiteName = ""
110 110 self.size = 48
111 111
112 #self.useLocalTime = useLocalTime
112 # self.useLocalTime = useLocalTime
113 113
114 114 def read(self, fp):
115 115
116 116 try:
117 header = numpy.fromfile(fp, FILE_STRUCTURE,1)
117 header = numpy.fromfile(fp, FILE_STRUCTURE, 1)
118 118 ''' numpy.fromfile(file, dtype, count, sep='')
119 119 file : file or str
120 120 Open file object or filename.
@@ -139,13 +139,13 class FileHeader(Header):
139 139 print(eBasicHeader)
140 140 return 0
141 141
142 self.FileMgcNumber= byte(header['FileMgcNumber'][0])
143 self.nFDTdataRecors=int(header['nFDTdataRecors'][0]) #No Of FDT data records in this file (0 or more)
144 self.RadarUnitId= int(header['RadarUnitId'][0])
145 self.SiteName= char(header['SiteName'][0])
142 self.FileMgcNumber = byte(header['FileMgcNumber'][0])
143 self.nFDTdataRecors = int(header['nFDTdataRecors'][0]) # No Of FDT data records in this file (0 or more)
144 self.RadarUnitId = int(header['RadarUnitId'][0])
145 self.SiteName = char(header['SiteName'][0])
146 146
147 147
148 if self.size <48:
148 if self.size < 48:
149 149 return 0
150 150
151 151 return 1
@@ -182,40 +182,40 class FileHeader(Header):
182 182
183 183 class RecordHeader(Header):
184 184
185 RecMgcNumber=None #0x23030001
186 RecCounter= None
187 Off2StartNxtRec= None
188 EpTimeStamp= None
189 msCompTimeStamp= None
190 ExpTagName= None
191 ExpComment=None
192 SiteLatDegrees=None
193 SiteLongDegrees= None
194 RTCgpsStatus= None
195 TransmitFrec= None
196 ReceiveFrec= None
197 FirstOsciFrec= None
198 Polarisation= None
199 ReceiverFiltSett= None
200 nModesInUse= None
201 DualModeIndex= None
202 DualModeRange= None
203 nDigChannels= None
204 SampResolution= None
205 nRangeGatesSamp= None
206 StartRangeSamp= None
207 PRFhz= None
208 Integrations= None
209 nDataPointsTrsf= None
210 nReceiveBeams= None
211 nSpectAverages= None
212 FFTwindowingInd= None
213 BeamAngleAzim= None
214 BeamAngleZen= None
215 AntennaCoord= None
216 RecPhaseCalibr= None
217 RecAmpCalibr= None
218 ReceiverGaindB= None
185 RecMgcNumber = None # 0x23030001
186 RecCounter = None
187 Off2StartNxtRec = None
188 EpTimeStamp = None
189 msCompTimeStamp = None
190 ExpTagName = None
191 ExpComment = None
192 SiteLatDegrees = None
193 SiteLongDegrees = None
194 RTCgpsStatus = None
195 TransmitFrec = None
196 ReceiveFrec = None
197 FirstOsciFrec = None
198 Polarisation = None
199 ReceiverFiltSett = None
200 nModesInUse = None
201 DualModeIndex = None
202 DualModeRange = None
203 nDigChannels = None
204 SampResolution = None
205 nRangeGatesSamp = None
206 StartRangeSamp = None
207 PRFhz = None
208 Integrations = None
209 nDataPointsTrsf = None
210 nReceiveBeams = None
211 nSpectAverages = None
212 FFTwindowingInd = None
213 BeamAngleAzim = None
214 BeamAngleZen = None
215 AntennaCoord = None
216 RecPhaseCalibr = None
217 RecAmpCalibr = None
218 ReceiverGaindB = None
219 219
220 220 '''size = None
221 221 nSamples = None
@@ -224,20 +224,20 class RecordHeader(Header):
224 224 adcResolution = None
225 225 pciDioBusWidth = None'''
226 226
227 def __init__(self, RecMgcNumber=None, RecCounter= 0, Off2StartNxtRec= 0,
228 EpTimeStamp= 0, msCompTimeStamp= 0, ExpTagName= None,
229 ExpComment=None, SiteLatDegrees=0, SiteLongDegrees= 0,
230 RTCgpsStatus= 0, TransmitFrec= 0, ReceiveFrec= 0,
231 FirstOsciFrec= 0, Polarisation= 0, ReceiverFiltSett= 0,
232 nModesInUse= 0, DualModeIndex= 0, DualModeRange= 0,
233 nDigChannels= 0, SampResolution= 0, nRangeGatesSamp= 0,
234 StartRangeSamp= 0, PRFhz= 0, Integrations= 0,
235 nDataPointsTrsf= 0, nReceiveBeams= 0, nSpectAverages= 0,
236 FFTwindowingInd= 0, BeamAngleAzim= 0, BeamAngleZen= 0,
237 AntennaCoord= 0, RecPhaseCalibr= 0, RecAmpCalibr= 0,
238 ReceiverGaindB= 0):
227 def __init__(self, RecMgcNumber=None, RecCounter=0, Off2StartNxtRec=0,
228 EpTimeStamp=0, msCompTimeStamp=0, ExpTagName=None,
229 ExpComment=None, SiteLatDegrees=0, SiteLongDegrees=0,
230 RTCgpsStatus=0, TransmitFrec=0, ReceiveFrec=0,
231 FirstOsciFrec=0, Polarisation=0, ReceiverFiltSett=0,
232 nModesInUse=0, DualModeIndex=0, DualModeRange=0,
233 nDigChannels=0, SampResolution=0, nRangeGatesSamp=0,
234 StartRangeSamp=0, PRFhz=0, Integrations=0,
235 nDataPointsTrsf=0, nReceiveBeams=0, nSpectAverages=0,
236 FFTwindowingInd=0, BeamAngleAzim=0, BeamAngleZen=0,
237 AntennaCoord=0, RecPhaseCalibr=0, RecAmpCalibr=0,
238 ReceiverGaindB=0):
239 239
240 self.RecMgcNumber = RecMgcNumber #0x23030001
240 self.RecMgcNumber = RecMgcNumber # 0x23030001
241 241 self.RecCounter = RecCounter
242 242 self.Off2StartNxtRec = Off2StartNxtRec
243 243 self.EpTimeStamp = EpTimeStamp
@@ -275,15 +275,15 class RecordHeader(Header):
275 275
276 276 def read(self, fp):
277 277
278 startFp = fp.tell() #The method tell() returns the current position of the file read/write pointer within the file.
278 startFp = fp.tell() # The method tell() returns the current position of the file read/write pointer within the file.
279 279
280 280 try:
281 header = numpy.fromfile(fp,RECORD_STRUCTURE,1)
281 header = numpy.fromfile(fp, RECORD_STRUCTURE, 1)
282 282 except Exception as e:
283 283 print("System Header: " + e)
284 284 return 0
285 285
286 self.RecMgcNumber = header['RecMgcNumber'][0] #0x23030001
286 self.RecMgcNumber = header['RecMgcNumber'][0] # 0x23030001
287 287 self.RecCounter = header['RecCounter'][0]
288 288 self.Off2StartNxtRec = header['Off2StartNxtRec'][0]
289 289 self.EpTimeStamp = header['EpTimeStamp'][0]
@@ -318,16 +318,16 class RecordHeader(Header):
318 318 self.RecAmpCalibr = header['RecAmpCalibr'][0]
319 319 self.ReceiverGaindB = header['ReceiverGaindB'][0]
320 320
321 Self.size = 180+20*3
321 Self.size = 180 + 20 * 3
322 322
323 323 endFp = self.size + startFp
324 324
325 325 if fp.tell() > endFp:
326 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp.name)
326 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" % fp.name)
327 327 return 0
328 328
329 329 if fp.tell() < endFp:
330 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp.name)
330 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" % fp.name)
331 331 return 0
332 332
333 333 return 1
@@ -335,38 +335,38 class RecordHeader(Header):
335 335 def write(self, fp):
336 336
337 337 headerTuple = (self.RecMgcNumber,
338 self.RecCounter,
339 self.Off2StartNxtRec,
340 self.EpTimeStamp,
341 self.msCompTimeStamp,
342 self.ExpTagName,
343 self.ExpComment,
344 self.SiteLatDegrees,
345 self.SiteLongDegrees,
346 self.RTCgpsStatus,
347 self.TransmitFrec,
348 self.ReceiveFrec,
349 self.FirstOsciFrec,
350 self.Polarisation,
351 self.ReceiverFiltSett,
352 self.nModesInUse,
353 self.DualModeIndex,
354 self.DualModeRange,
338 self.RecCounter,
339 self.Off2StartNxtRec,
340 self.EpTimeStamp,
341 self.msCompTimeStamp,
342 self.ExpTagName,
343 self.ExpComment,
344 self.SiteLatDegrees,
345 self.SiteLongDegrees,
346 self.RTCgpsStatus,
347 self.TransmitFrec,
348 self.ReceiveFrec,
349 self.FirstOsciFrec,
350 self.Polarisation,
351 self.ReceiverFiltSett,
352 self.nModesInUse,
353 self.DualModeIndex,
354 self.DualModeRange,
355 355 self.nDigChannels,
356 self.SampResolution,
357 self.nRangeGatesSamp,
358 self.StartRangeSamp,
359 self.PRFhz,
360 self.Integrations,
361 self.nDataPointsTrsf,
362 self.nReceiveBeams,
363 self.nSpectAverages,
364 self.FFTwindowingInd,
365 self.BeamAngleAzim,
366 self.BeamAngleZen,
367 self.AntennaCoord,
368 self.RecPhaseCalibr,
369 self.RecAmpCalibr,
356 self.SampResolution,
357 self.nRangeGatesSamp,
358 self.StartRangeSamp,
359 self.PRFhz,
360 self.Integrations,
361 self.nDataPointsTrsf,
362 self.nReceiveBeams,
363 self.nSpectAverages,
364 self.FFTwindowingInd,
365 self.BeamAngleAzim,
366 self.BeamAngleZen,
367 self.AntennaCoord,
368 self.RecPhaseCalibr,
369 self.RecAmpCalibr,
370 370 self.ReceiverGaindB)
371 371
372 372 # self.size,self.nSamples,
@@ -375,7 +375,7 class RecordHeader(Header):
375 375 # self.adcResolution,
376 376 # self.pciDioBusWidth
377 377
378 header = numpy.array(headerTuple,RECORD_STRUCTURE)
378 header = numpy.array(headerTuple, RECORD_STRUCTURE)
379 379 header.tofile(fp)
380 380
381 381 return 1
@@ -394,11 +394,11 def get_dtype_index(numpy_dtype):
394 394
395 395 def get_numpy_dtype(index):
396 396
397 #dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
397 # dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
398 398
399 399 return NUMPY_DTYPE_LIST[index]
400 400
401 401
402 402 def get_dtype_width(index):
403 403
404 return DTYPE_WIDTH[index] No newline at end of file
404 return DTYPE_WIDTH[index]
@@ -1,3 +1,3
1 1 from .jrodata import *
2 2 from .jroheaderIO import *
3 from .jroamisr import * No newline at end of file
3 from .jroamisr import *
@@ -15,41 +15,41 class AMISR:
15 15 self.utctime = None
16 16 self.type = "AMISR"
17 17
18 #propiedades para compatibilidad con Voltages
19 self.timeZone = 0#timezone like jroheader, difference in minutes between UTC and localtime
20 self.dstFlag = 0#self.dataIn.dstFlag
21 self.errorCount = 0#self.dataIn.errorCount
22 self.useLocalTime = True#self.dataIn.useLocalTime
18 # propiedades para compatibilidad con Voltages
19 self.timeZone = 0 # timezone like jroheader, difference in minutes between UTC and localtime
20 self.dstFlag = 0 # self.dataIn.dstFlag
21 self.errorCount = 0 # self.dataIn.errorCount
22 self.useLocalTime = True # self.dataIn.useLocalTime
23 23
24 self.radarControllerHeaderObj = None#self.dataIn.radarControllerHeaderObj.copy()
25 self.systemHeaderObj = None#self.dataIn.systemHeaderObj.copy()
26 self.channelList = [0]#self.dataIn.channelList esto solo aplica para el caso de AMISR
27 self.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
24 self.radarControllerHeaderObj = None # self.dataIn.radarControllerHeaderObj.copy()
25 self.systemHeaderObj = None # self.dataIn.systemHeaderObj.copy()
26 self.channelList = [0] # self.dataIn.channelList esto solo aplica para el caso de AMISR
27 self.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
28 28
29 self.flagDiscontinuousBlock = None#self.dataIn.flagDiscontinuousBlock
30 #self.utctime = #self.firstdatatime
31 self.flagDecodeData = None#self.dataIn.flagDecodeData #asumo q la data esta decodificada
32 self.flagDeflipData = None#self.dataIn.flagDeflipData #asumo q la data esta sin flip
29 self.flagDiscontinuousBlock = None # self.dataIn.flagDiscontinuousBlock
30 # self.utctime = #self.firstdatatime
31 self.flagDecodeData = None # self.dataIn.flagDecodeData #asumo q la data esta decodificada
32 self.flagDeflipData = None # self.dataIn.flagDeflipData #asumo q la data esta sin flip
33 33
34 self.nCohInt = 1#self.dataIn.nCohInt
34 self.nCohInt = 1 # self.dataIn.nCohInt
35 35 self.nIncohInt = 1
36 self.ippSeconds = None#self.dataIn.ippSeconds, segun el filename/Setup/Tufile
37 self.windowOfFilter = None#self.dataIn.windowOfFilter
36 self.ippSeconds = None # self.dataIn.ippSeconds, segun el filename/Setup/Tufile
37 self.windowOfFilter = None # self.dataIn.windowOfFilter
38 38
39 self.timeInterval = None#self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
40 self.frequency = None#self.dataIn.frequency
41 self.realtime = 0#self.dataIn.realtime
39 self.timeInterval = None # self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
40 self.frequency = None # self.dataIn.frequency
41 self.realtime = 0 # self.dataIn.realtime
42 42
43 #actualizar en la lectura de datos
44 self.heightList = None#self.dataIn.heightList
45 self.nProfiles = None#Number of samples or nFFTPoints
43 # actualizar en la lectura de datos
44 self.heightList = None # self.dataIn.heightList
45 self.nProfiles = None # Number of samples or nFFTPoints
46 46 self.nRecords = None
47 47 self.nBeams = None
48 self.nBaud = None#self.dataIn.nBaud
49 self.nCode = None#self.dataIn.nCode
50 self.code = None#self.dataIn.code
48 self.nBaud = None # self.dataIn.nBaud
49 self.nCode = None # self.dataIn.nCode
50 self.code = None # self.dataIn.code
51 51
52 #consideracion para los Beams
52 # consideracion para los Beams
53 53 self.beamCodeDict = None
54 54 self.beamRangeDict = None
55 55 self.beamcode = None
@@ -246,7 +246,7 class JROData(GenericData):
246 246
247 247 def getFmaxTimeResponse(self):
248 248
249 period = (10**-6) * self.getDeltaH() / (0.15)
249 period = (10 ** -6) * self.getDeltaH() / (0.15)
250 250
251 251 PRF = 1. / (period * self.nCohInt)
252 252
@@ -338,10 +338,10 class JROData(GenericData):
338 338
339 339 class Voltage(JROData):
340 340
341 dataPP_POW = None
342 dataPP_DOP = None
341 dataPP_POW = None
342 dataPP_DOP = None
343 343 dataPP_WIDTH = None
344 dataPP_SNR = None
344 dataPP_SNR = None
345 345
346 346 def __init__(self):
347 347 '''
@@ -370,7 +370,7 class Voltage(JROData):
370 370 self.flagShiftFFT = False
371 371 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
372 372 self.profileIndex = 0
373 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
373 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
374 374 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp']
375 375
376 376 def getNoisebyHildebrand(self, channel=None):
@@ -458,8 +458,8 class Spectra(JROData):
458 458 self.ippFactor = 1
459 459 self.beacon_heiIndexList = []
460 460 self.noise_estimation = None
461 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
462 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp','nIncohInt', 'nFFTPoints', 'nProfiles']
461 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
462 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp', 'nIncohInt', 'nFFTPoints', 'nProfiles']
463 463
464 464 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
465 465 """
@@ -498,14 +498,14 class Spectra(JROData):
498 498 def getAcfRange(self, extrapoints=0):
499 499
500 500 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
501 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
501 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
502 502
503 503 return freqrange
504 504
505 505 def getFreqRange(self, extrapoints=0):
506 506
507 507 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
508 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
508 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
509 509
510 510 return freqrange
511 511
@@ -515,7 +515,7 class Spectra(JROData):
515 515 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
516 516
517 517 if self.nmodes:
518 return velrange/self.nmodes
518 return velrange / self.nmodes
519 519 else:
520 520 return velrange
521 521
@@ -535,8 +535,8 class Spectra(JROData):
535 535 pwcode = 1
536 536
537 537 if self.flagDecodeData:
538 pwcode = numpy.sum(self.code[0]**2)
539 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
538 pwcode = numpy.sum(self.code[0] ** 2)
539 # normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
540 540 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
541 541
542 542 return normFactor
@@ -562,7 +562,7 class Spectra(JROData):
562 562
563 563 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
564 564 if self.nmodes:
565 return self.nmodes*timeInterval
565 return self.nmodes * timeInterval
566 566 else:
567 567 return timeInterval
568 568
@@ -634,7 +634,7 class SpectraHeis(Spectra):
634 634 def normFactor(self):
635 635 pwcode = 1
636 636 if self.flagDecodeData:
637 pwcode = numpy.sum(self.code[0]**2)
637 pwcode = numpy.sum(self.code[0] ** 2)
638 638
639 639 normFactor = self.nIncohInt * self.nCohInt * pwcode
640 640
@@ -765,7 +765,7 class Correlation(JROData):
765 765 xx = numpy.zeros([4, 4])
766 766
767 767 for fil in range(4):
768 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
768 xx[fil, :] = vel[fil] ** numpy.asarray(list(range(4)))
769 769
770 770 xx_inv = numpy.linalg.inv(xx)
771 771 xx_aux = xx_inv[0, :]
@@ -997,13 +997,13 class PlotterData(object):
997 997
998 998 meta = {}
999 999 meta['xrange'] = []
1000 dy = int(len(self.yrange)/self.MAXNUMY) + 1
1000 dy = int(len(self.yrange) / self.MAXNUMY) + 1
1001 1001 tmp = self.data[tm][self.key]
1002 1002 shape = tmp.shape
1003 1003 if len(shape) == 2:
1004 1004 data = self.roundFloats(self.data[tm][self.key][::, ::dy].tolist())
1005 1005 elif len(shape) == 3:
1006 dx = int(self.data[tm][self.key].shape[1]/self.MAXNUMX) + 1
1006 dx = int(self.data[tm][self.key].shape[1] / self.MAXNUMX) + 1
1007 1007 data = self.roundFloats(
1008 1008 self.data[tm][self.key][::, ::dx, ::dy].tolist())
1009 1009 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
@@ -516,7 +516,7 class RadarControllerHeader(Header):
516 516 for j in range(len(code_selected) - 1, -1, -1):
517 517 if code_selected[j] == 1:
518 518 tempx[i] = tempx[i] + \
519 2**(len(code_selected) - 1 - j)
519 2 ** (len(code_selected) - 1 - j)
520 520 start = start + 32
521 521 end = end + 32
522 522
@@ -903,4 +903,4 def get_procflag_dtype(index):
903 903
904 904 def get_dtype_width(index):
905 905
906 return DTYPE_WIDTH[index] No newline at end of file
906 return DTYPE_WIDTH[index]
@@ -52,13 +52,13 EARTH_RADIUS = 6.3710e3
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
55 a = 0.5 - numpy.cos((lat2 - lat1) * p) / 2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 theta = -theta + numpy.pi/2
61 return r*numpy.cos(theta), r*numpy.sin(theta)
58 theta = numpy.arctan2(numpy.sin((lon2 - lon1) * p) * numpy.cos(lat2 * p), numpy.cos(lat1 * p)
59 * numpy.sin(lat2 * p) - numpy.sin(lat1 * p) * numpy.cos(lat2 * p) * numpy.cos((lon2 - lon1) * p))
60 theta = -theta + numpy.pi / 2
61 return r * numpy.cos(theta), r * numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
@@ -66,7 +66,7 def km2deg(km):
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 return numpy.rad2deg(km/EARTH_RADIUS)
69 return numpy.rad2deg(km / EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
@@ -385,7 +385,7 class Plot(Operation):
385 385 xmax = self.xmax
386 386 else:
387 387 xmin = self.tmin
388 xmax = self.tmin + self.xrange*60*60
388 xmax = self.tmin + self.xrange * 60 * 60
389 389 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
390 390 ax.xaxis.set_major_locator(LinearLocator(9))
391 391 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
@@ -393,10 +393,10 class Plot(Operation):
393 393 ax.set_facecolor(self.bgcolor)
394 394 if self.xscale:
395 395 ax.xaxis.set_major_formatter(FuncFormatter(
396 lambda x, pos: '{0:g}'.format(x*self.xscale)))
396 lambda x, pos: '{0:g}'.format(x * self.xscale)))
397 397 if self.yscale:
398 398 ax.yaxis.set_major_formatter(FuncFormatter(
399 lambda x, pos: '{0:g}'.format(x*self.yscale)))
399 lambda x, pos: '{0:g}'.format(x * self.yscale)))
400 400 if self.xlabel is not None:
401 401 ax.set_xlabel(self.xlabel)
402 402 if self.ylabel is not None:
@@ -447,7 +447,7 class Plot(Operation):
447 447 Reset axes for redraw plots
448 448 '''
449 449
450 for ax in self.axes+self.pf_axes+self.cb_axes:
450 for ax in self.axes + self.pf_axes + self.cb_axes:
451 451 ax.clear()
452 452 ax.firsttime = True
453 453 if hasattr(ax, 'cbar') and ax.cbar:
@@ -510,7 +510,7 class Plot(Operation):
510 510 figname = os.path.join(
511 511 self.save,
512 512 self.save_code,
513 '{}_{}.png'.format(
513 '{}_{}.png'.format(
514 514 self.save_code,
515 515 self.getDateTime(self.data.max_time).strftime(
516 516 '%Y%m%d_%H%M%S'
@@ -649,10 +649,10 class Plot(Operation):
649 649
650 650 tm = getattr(dataOut, self.attr_time)
651 651
652 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
652 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange * 60 * 60:
653 653 self.save_time = tm
654 654 self.__plot()
655 self.tmin += self.xrange*60*60
655 self.tmin += self.xrange * 60 * 60
656 656 self.data.setup()
657 657 self.clear_figures()
658 658
@@ -666,9 +666,9 class Plot(Operation):
666 666 if self.xmin is None:
667 667 self.tmin = tm
668 668 self.xmin = dt.hour
669 minutes = (self.xmin-int(self.xmin)) * 60
669 minutes = (self.xmin - int(self.xmin)) * 60
670 670 seconds = (minutes - int(minutes)) * 60
671 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
671 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
672 672 datetime.datetime(1970, 1, 1)).total_seconds()
673 673 if self.localtime:
674 674 self.tmin += time.timezone
@@ -679,7 +679,7 class Plot(Operation):
679 679 if self.throttle == 0:
680 680 self.__plot()
681 681 else:
682 self.__throttle_plot(self.__plot)#, coerce=coerce)
682 self.__throttle_plot(self.__plot) # , coerce=coerce)
683 683
684 684 def close(self):
685 685
@@ -32,8 +32,8 class CorrelationPlot(Plot):
32 32
33 33 def getSubplots(self):
34 34
35 ncol = int(numpy.sqrt(self.nplots)+0.9)
36 nrow = int(self.nplots*1./ncol + 0.9)
35 ncol = int(numpy.sqrt(self.nplots) + 0.9)
36 nrow = int(self.nplots * 1. / ncol + 0.9)
37 37
38 38 return nrow, ncol
39 39
@@ -50,10 +50,10 class CorrelationPlot(Plot):
50 50 colspan = 2
51 51 self.__nsubplots = 2
52 52
53 self.createFigure(id = id,
54 wintitle = wintitle,
55 widthplot = self.WIDTH + self.WIDTHPROF,
56 heightplot = self.HEIGHT + self.HEIGHTPROF,
53 self.createFigure(id=id,
54 wintitle=wintitle,
55 widthplot=self.WIDTH + self.WIDTHPROF,
56 heightplot=self.HEIGHT + self.HEIGHTPROF,
57 57 show=show)
58 58
59 59 nrow, ncol = self.getSubplots()
@@ -65,10 +65,10 class CorrelationPlot(Plot):
65 65 if counter >= self.nplots:
66 66 break
67 67
68 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
68 self.addAxes(nrow, ncol * ncolspan, y, x * ncolspan, colspan, 1)
69 69
70 70 if showprofile:
71 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1)
71 self.addAxes(nrow, ncol * ncolspan, y, x * ncolspan + colspan, 1, 1)
72 72
73 73 counter += 1
74 74
@@ -98,7 +98,7 class CorrelationPlot(Plot):
98 98 return None
99 99
100 100 if realtime:
101 if not(isRealtime(utcdatatime = dataOut.utctime)):
101 if not(isRealtime(utcdatatime=dataOut.utctime)):
102 102 print('Skipping this plot function')
103 103 return
104 104
@@ -116,16 +116,16 class CorrelationPlot(Plot):
116 116 x = dataOut.getLagTRange(1)
117 117 y = dataOut.heightList
118 118
119 z = copy.copy(dataOut.data_corr[:,:,0,:])
119 z = copy.copy(dataOut.data_corr[:, :, 0, :])
120 120 for i in range(dataOut.data_corr.shape[0]):
121 z[i,:,:] = z[i,:,:]/factor[i,:]
121 z[i, :, :] = z[i, :, :] / factor[i, :]
122 122 zdB = numpy.abs(z)
123 123
124 124 avg = numpy.average(z, axis=1)
125 125 # avg = numpy.nanmean(z, axis=1)
126 126 # noise = dataOut.noise/factor
127 127
128 #thisDatetime = dataOut.datatime
128 # thisDatetime = dataOut.datatime
129 129 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
130 130 title = wintitle + " Correlation"
131 131 xlabel = "Lag T (s)"
@@ -158,10 +158,10 class CorrelationPlot(Plot):
158 158 self.setWinTitle(title)
159 159
160 160 for i in range(self.nplots):
161 str_datetime = '%s %s'%(thisDatetime.strftime("%Y/%m/%d"),thisDatetime.strftime("%H:%M:%S"))
162 title = "Channel %d and %d: : %s" %(dataOut.pairsList[i][0],dataOut.pairsList[i][1] , str_datetime)
163 axes = self.axesList[i*self.__nsubplots]
164 axes.pcolor(x, y, zdB[i,:,:],
161 str_datetime = '%s %s' % (thisDatetime.strftime("%Y/%m/%d"), thisDatetime.strftime("%H:%M:%S"))
162 title = "Channel %d and %d: : %s" % (dataOut.pairsList[i][0], dataOut.pairsList[i][1] , str_datetime)
163 axes = self.axesList[i * self.__nsubplots]
164 axes.pcolor(x, y, zdB[i, :, :],
165 165 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax,
166 166 xlabel=xlabel, ylabel=ylabel, title=title,
167 167 ticksize=9, cblabel='')
@@ -184,4 +184,4 class CorrelationPlot(Plot):
184 184 save=save,
185 185 ftp=ftp,
186 186 wr_period=wr_period,
187 thisDatetime=thisDatetime) No newline at end of file
187 thisDatetime=thisDatetime)
@@ -31,7 +31,7 class SpectraHeisPlot(Plot):
31 31
32 32 data = {}
33 33 meta = {}
34 spc = 10*numpy.log10(dataOut.data_spc / dataOut.normFactor)
34 spc = 10 * numpy.log10(dataOut.data_spc / dataOut.normFactor)
35 35 data['spc_heis'] = spc
36 36
37 37 return data, meta
@@ -40,12 +40,12 class SpectraHeisPlot(Plot):
40 40
41 41 c = 3E8
42 42 deltaHeight = self.data.yrange[1] - self.data.yrange[0]
43 x = numpy.arange(-1*len(self.data.yrange)/2., len(self.data.yrange)/2.)*(c/(2*deltaHeight*len(self.data.yrange)*1000))
43 x = numpy.arange(-1 * len(self.data.yrange) / 2., len(self.data.yrange) / 2.) * (c / (2 * deltaHeight * len(self.data.yrange) * 1000))
44 44 self.y = self.data[-1]['spc_heis']
45 45 self.titles = []
46 46
47 47 for n, ax in enumerate(self.axes):
48 ychannel = self.y[n,:]
48 ychannel = self.y[n, :]
49 49 if ax.firsttime:
50 50 self.xmin = min(x) if self.xmin is None else self.xmin
51 51 self.xmax = max(x) if self.xmax is None else self.xmax
@@ -78,7 +78,7 class RTIHeisPlot(Plot):
78 78 data = {}
79 79 meta = {}
80 80 spc = dataOut.data_spc / dataOut.normFactor
81 spc = 10*numpy.log10(numpy.average(spc, axis=1))
81 spc = 10 * numpy.log10(numpy.average(spc, axis=1))
82 82 data['rti_heis'] = spc
83 83
84 84 return data, meta
@@ -12,13 +12,13 EARTH_RADIUS = 6.3710e3
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
15 a = 0.5 - numpy.cos((lat2 - lat1) * p) / 2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 theta = -theta + numpy.pi/2
21 return r*numpy.cos(theta), r*numpy.sin(theta)
18 theta = numpy.arctan2(numpy.sin((lon2 - lon1) * p) * numpy.cos(lat2 * p), numpy.cos(lat1 * p)
19 * numpy.sin(lat2 * p) - numpy.sin(lat1 * p) * numpy.cos(lat2 * p) * numpy.cos((lon2 - lon1) * p))
20 theta = -theta + numpy.pi / 2
21 return r * numpy.cos(theta), r * numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
@@ -26,7 +26,7 def km2deg(km):
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 return numpy.rad2deg(km/EARTH_RADIUS)
29 return numpy.rad2deg(km / EARTH_RADIUS)
30 30
31 31
32 32
@@ -50,7 +50,7 class SnrPlot(RTIPlot):
50 50 def update(self, dataOut):
51 51
52 52 data = {
53 'snr': 10*numpy.log10(dataOut.data_snr)
53 'snr': 10 * numpy.log10(dataOut.data_snr)
54 54 }
55 55
56 56 return data, {}
@@ -66,7 +66,7 class DopplerPlot(RTIPlot):
66 66 def update(self, dataOut):
67 67
68 68 data = {
69 'dop': 10*numpy.log10(dataOut.data_dop)
69 'dop': 10 * numpy.log10(dataOut.data_dop)
70 70 }
71 71
72 72 return data, {}
@@ -82,7 +82,7 class PowerPlot(RTIPlot):
82 82 def update(self, dataOut):
83 83
84 84 data = {
85 'pow': 10*numpy.log10(dataOut.data_pow)
85 'pow': 10 * numpy.log10(dataOut.data_pow)
86 86 }
87 87
88 88 return data, {}
@@ -269,22 +269,22 class PolarMapPlot(Plot):
269 269 zeniths = numpy.linspace(
270 270 0, self.data.meta['max_range'], data.shape[1])
271 271 if self.mode == 'E':
272 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
272 azimuths = -numpy.radians(self.data.yrange) + numpy.pi / 2
273 273 r, theta = numpy.meshgrid(zeniths, azimuths)
274 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
275 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
274 x, y = r * numpy.cos(theta) * numpy.cos(numpy.radians(self.data.meta['elevation'])), r * numpy.sin(
275 theta) * numpy.cos(numpy.radians(self.data.meta['elevation']))
276 276 x = km2deg(x) + self.lon
277 277 y = km2deg(y) + self.lat
278 278 else:
279 279 azimuths = numpy.radians(self.data.yrange)
280 280 r, theta = numpy.meshgrid(zeniths, azimuths)
281 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
281 x, y = r * numpy.cos(theta), r * numpy.sin(theta)
282 282 self.y = zeniths
283 283
284 284 if ax.firsttime:
285 285 if self.zlimits is not None:
286 286 self.zmin, self.zmax = self.zlimits[n]
287 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
287 ax.plt = ax.pcolormesh(# r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
288 288 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
289 289 vmin=self.zmin,
290 290 vmax=self.zmax,
@@ -293,7 +293,7 class PolarMapPlot(Plot):
293 293 if self.zlimits is not None:
294 294 self.zmin, self.zmax = self.zlimits[n]
295 295 ax.collections.remove(ax.collections[0])
296 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
296 ax.plt = ax.pcolormesh(# r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
297 297 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
298 298 vmin=self.zmin,
299 299 vmax=self.zmax,
@@ -339,8 +339,8 class PolarMapPlot(Plot):
339 339 ax.add_artist(plt.Circle((self.lon, self.lat),
340 340 km2deg(r), color='0.6', fill=False, lw=0.2))
341 341 ax.text(
342 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
343 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
342 self.lon + (km2deg(r)) * numpy.cos(60 * numpy.pi / 180),
343 self.lat + (km2deg(r)) * numpy.sin(60 * numpy.pi / 180),
344 344 '{}km'.format(r),
345 345 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
346 346
@@ -17,7 +17,7 class SpectraPlot(Plot):
17 17 Plot for Spectra data
18 18 '''
19 19
20 CODE = 'spc'
20 CODE = 'spc_moments'
21 21 colormap = 'jet'
22 22 plot_type = 'pcolor'
23 23 buffering = False
@@ -39,11 +39,11 class SpectraPlot(Plot):
39 39
40 40 data = {}
41 41 meta = {}
42 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
42 spc = 10 * numpy.log10(dataOut.data_spc / dataOut.normFactor)
43 43 data['spc'] = spc
44 44 data['rti'] = dataOut.getPower()
45 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
46 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
45 data['noise'] = 10 * numpy.log10(dataOut.getNoise() / dataOut.normFactor)
46 meta['xrange'] = (dataOut.getFreqRange(1) / 1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
47 47 if self.CODE == 'spc_moments':
48 48 data['moments'] = dataOut.moments
49 49
@@ -71,11 +71,12 class SpectraPlot(Plot):
71 71
72 72 data = self.data[-1]
73 73 z = data['spc']
74
74 #self.CODE = 'spc_moments'
75 75 for n, ax in enumerate(self.axes):
76 76 noise = data['noise'][n]
77 print(n,self.CODE)
77 78 if self.CODE == 'spc_moments':
78 mean = data['moments'][n, 1]
79 mean = data['moments'][n,1]
79 80 if ax.firsttime:
80 81 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
81 82 self.xmin = self.xmin if self.xmin else -self.xmax
@@ -132,7 +133,7 class CrossSpectraPlot(Plot):
132 133
133 134 spc = dataOut.data_spc
134 135 cspc = dataOut.data_cspc
135 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
136 meta['xrange'] = (dataOut.getFreqRange(1) / 1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
136 137 meta['pairs'] = dataOut.pairsList
137 138
138 139 tmp = []
@@ -170,8 +171,8 class CrossSpectraPlot(Plot):
170 171
171 172 for n in range(len(self.data.pairs)):
172 173 pair = self.data.pairs[n]
173 coh = cspc[n*2]
174 phase = cspc[n*2+1]
174 coh = cspc[n * 2]
175 phase = cspc[n * 2 + 1]
175 176 ax = self.axes[2 * n]
176 177 if ax.firsttime:
177 178 ax.plt = ax.pcolormesh(x, y, coh.T,
@@ -222,7 +223,7 class RTIPlot(Plot):
222 223 data = {}
223 224 meta = {}
224 225 data['rti'] = dataOut.getPower()
225 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
226 data['noise'] = 10 * numpy.log10(dataOut.getNoise() / dataOut.normFactor)
226 227
227 228 return data, meta
228 229
@@ -279,7 +280,7 class CoherencePlot(RTIPlot):
279 280 self.nplots = len(self.data.pairs)
280 281 self.ylabel = 'Range [km]'
281 282 self.xlabel = 'Time'
282 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
283 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1, 'right':0.95})
283 284 if self.CODE == 'coh':
284 285 self.cb_label = ''
285 286 self.titles = [
@@ -338,7 +339,7 class NoisePlot(Plot):
338 339
339 340 data = {}
340 341 meta = {}
341 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
342 data['noise'] = 10 * numpy.log10(dataOut.getNoise() / dataOut.normFactor).reshape(dataOut.nChannels, 1)
342 343 meta['yrange'] = numpy.array([])
343 344
344 345 return data, meta
@@ -395,8 +396,8 class PowerProfilePlot(Plot):
395 396
396 397 x = self.data[-1][self.CODE]
397 398
398 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
399 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
399 if self.xmin is None: self.xmin = numpy.nanmin(x) * 0.9
400 if self.xmax is None: self.xmax = numpy.nanmax(x) * 1.1
400 401
401 402 if self.axes[0].firsttime:
402 403 for ch in self.data.channels:
@@ -428,9 +429,9 class SpectraCutPlot(Plot):
428 429
429 430 data = {}
430 431 meta = {}
431 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
432 spc = 10 * numpy.log10(dataOut.data_spc / dataOut.normFactor)
432 433 data['spc'] = spc
433 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
434 meta['xrange'] = (dataOut.getFreqRange(1) / 1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
434 435
435 436 return data, meta
436 437
@@ -453,7 +454,7 class SpectraCutPlot(Plot):
453 454 if self.height_index:
454 455 index = numpy.array(self.height_index)
455 456 else:
456 index = numpy.arange(0, len(y), int((len(y))/9))
457 index = numpy.arange(0, len(y), int((len(y)) / 9))
457 458
458 459 for n, ax in enumerate(self.axes):
459 460 if ax.firsttime:
@@ -479,7 +480,7 class BeaconPhase(Plot):
479 480
480 481 def __init__(self):
481 482 Plot.__init__(self)
482 self.timerange = 24*60*60
483 self.timerange = 24 * 60 * 60
483 484 self.isConfig = False
484 485 self.__nsubplots = 1
485 486 self.counter_imagwr = 0
@@ -520,25 +521,25 class BeaconPhase(Plot):
520 521 colspan = 6
521 522 self.__nsubplots = 2
522 523
523 self.createFigure(id = id,
524 wintitle = wintitle,
525 widthplot = self.WIDTH+self.WIDTHPROF,
526 heightplot = self.HEIGHT+self.HEIGHTPROF,
524 self.createFigure(id=id,
525 wintitle=wintitle,
526 widthplot=self.WIDTH + self.WIDTHPROF,
527 heightplot=self.HEIGHT + self.HEIGHTPROF,
527 528 show=show)
528 529
529 530 nrow, ncol = self.getSubplots()
530 531
531 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
532 self.addAxes(nrow, ncol * ncolspan, 0, 0, colspan, 1)
532 533
533 534 def save_phase(self, filename_phase):
534 f = open(filename_phase,'w+')
535 f = open(filename_phase, 'w+')
535 536 f.write('\n\n')
536 537 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
537 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
538 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n')
538 539 f.close()
539 540
540 541 def save_data(self, filename_phase, data, data_datetime):
541 f=open(filename_phase,'a')
542 f = open(filename_phase, 'a')
542 543 timetuple_data = data_datetime.timetuple()
543 544 day = str(timetuple_data.tm_mday)
544 545 month = str(timetuple_data.tm_mon)
@@ -546,7 +547,7 class BeaconPhase(Plot):
546 547 hour = str(timetuple_data.tm_hour)
547 548 minute = str(timetuple_data.tm_min)
548 549 second = str(timetuple_data.tm_sec)
549 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
550 f.write(day + ' ' + month + ' ' + year + ' ' + hour + ' ' + minute + ' ' + second + ' ' + str(data[0]) + ' ' + str(data[1]) + ' ' + str(data[2]) + ' ' + str(data[3]) + '\n')
550 551 f.close()
551 552
552 553 def plot(self):
@@ -571,7 +572,7 class BeaconPhase(Plot):
571 572 pairsIndexList = []
572 573 for pair in pairsList:
573 574 if pair not in dataOut.pairsList:
574 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
575 raise ValueError("Pair %s is not in dataOut.pairsList" % (pair))
575 576 pairsIndexList.append(dataOut.pairsList.index(pair))
576 577
577 578 if pairsIndexList == []:
@@ -592,28 +593,28 class BeaconPhase(Plot):
592 593 hmin_index = hmin_list[0]
593 594
594 595 if hmax_list.any():
595 hmax_index = hmax_list[-1]+1
596 hmax_index = hmax_list[-1] + 1
596 597
597 598 x = dataOut.getTimeRange()
598 599
599 600 thisDatetime = dataOut.datatime
600 601
601 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
602 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
602 603 xlabel = "Local Time"
603 604 ylabel = "Phase (degrees)"
604 605
605 606 update_figfile = False
606 607
607 608 nplots = len(pairsIndexList)
608 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
609 # phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
609 610 phase_beacon = numpy.zeros(len(pairsIndexList))
610 611 for i in range(nplots):
611 612 pair = dataOut.pairsList[pairsIndexList[i]]
612 613 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
613 614 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
614 615 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
615 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
616 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
616 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
617 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real) * 180 / numpy.pi
617 618
618 619 if dataOut.beacon_heiIndexList:
619 620 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
@@ -651,31 +652,31 class BeaconPhase(Plot):
651 652
652 653 update_figfile = True
653 654
654 #open file beacon phase
655 path = '%s%03d' %(self.PREFIX, self.id)
656 beacon_file = os.path.join(path,'%s.txt'%self.name)
657 self.filename_phase = os.path.join(figpath,beacon_file)
658 #self.save_phase(self.filename_phase)
655 # open file beacon phase
656 path = '%s%03d' % (self.PREFIX, self.id)
657 beacon_file = os.path.join(path, '%s.txt' % self.name)
658 self.filename_phase = os.path.join(figpath, beacon_file)
659 # self.save_phase(self.filename_phase)
659 660
660 661
661 #store data beacon phase
662 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
662 # store data beacon phase
663 # self.save_data(self.filename_phase, phase_beacon, thisDatetime)
663 664
664 665 self.setWinTitle(title)
665 666
666 667
667 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
668 title = "Phase Plot %s" % (thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
668 669
669 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
670 legendlabels = ["Pair (%d,%d)" % (pair[0], pair[1]) for pair in dataOut.pairsList]
670 671
671 672 axes = self.axesList[0]
672 673
673 674 self.xdata = numpy.hstack((self.xdata, x[0:1]))
674 675
675 if len(self.ydata)==0:
676 self.ydata = phase_beacon.reshape(-1,1)
676 if len(self.ydata) == 0:
677 self.ydata = phase_beacon.reshape(-1, 1)
677 678 else:
678 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
679 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1, 1)))
679 680
680 681
681 682 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
@@ -699,4 +700,4 class BeaconPhase(Plot):
699 700 thisDatetime=thisDatetime,
700 701 update_figfile=update_figfile)
701 702
702 return dataOut No newline at end of file
703 return dataOut
@@ -54,9 +54,9 class ScopePlot(Plot):
54 54
55 55 def plot_iq(self, x, y, channelIndexList, thisDatetime, wintitle):
56 56
57 yreal = y[channelIndexList,:].real
58 yimag = y[channelIndexList,:].imag
59 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y"))
57 yreal = y[channelIndexList, :].real
58 yimag = y[channelIndexList, :].imag
59 title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y"))
60 60 self.xlabel = "Range (Km)"
61 61 self.ylabel = "Intensity - IQ"
62 62
@@ -65,32 +65,32 class ScopePlot(Plot):
65 65
66 66 self.titles[0] = title
67 67
68 for i,ax in enumerate(self.axes):
69 title = "Channel %d" %(i)
68 for i, ax in enumerate(self.axes):
69 title = "Channel %d" % (i)
70 70 if ax.firsttime:
71 71 self.xmin = min(x)
72 72 self.xmax = max(x)
73 ax.plt_r = ax.plot(x, yreal[i,:], color='b')[0]
74 ax.plt_i = ax.plot(x, yimag[i,:], color='r')[0]
73 ax.plt_r = ax.plot(x, yreal[i, :], color='b')[0]
74 ax.plt_i = ax.plot(x, yimag[i, :], color='r')[0]
75 75 else:
76 ax.plt_r.set_data(x, yreal[i,:])
77 ax.plt_i.set_data(x, yimag[i,:])
76 ax.plt_r.set_data(x, yreal[i, :])
77 ax.plt_i.set_data(x, yimag[i, :])
78 78
79 79 def plot_power(self, x, y, channelIndexList, thisDatetime, wintitle):
80 y = y[channelIndexList,:] * numpy.conjugate(y[channelIndexList,:])
80 y = y[channelIndexList, :] * numpy.conjugate(y[channelIndexList, :])
81 81 yreal = y.real
82 yreal = 10*numpy.log10(yreal)
82 yreal = 10 * numpy.log10(yreal)
83 83 self.y = yreal
84 title = wintitle + " Power: %s" %(thisDatetime.strftime("%d-%b-%Y"))
84 title = wintitle + " Power: %s" % (thisDatetime.strftime("%d-%b-%Y"))
85 85 self.xlabel = "Range (Km)"
86 86 self.ylabel = "Intensity [dB]"
87 87
88 88
89 89 self.titles[0] = title
90 90
91 for i,ax in enumerate(self.axes):
92 title = "Channel %d" %(i)
93 ychannel = yreal[i,:]
91 for i, ax in enumerate(self.axes):
92 title = "Channel %d" % (i)
93 ychannel = yreal[i, :]
94 94
95 95 if ax.firsttime:
96 96 self.xmin = min(x)
@@ -102,66 +102,66 class ScopePlot(Plot):
102 102 def plot_weatherpower(self, x, y, channelIndexList, thisDatetime, wintitle):
103 103
104 104
105 y = y[channelIndexList,:]
106 yreal = y.real
107 yreal = 10*numpy.log10(yreal)
105 y = y[channelIndexList, :]
106 yreal = y.real
107 yreal = 10 * numpy.log10(yreal)
108 108 self.y = yreal
109 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
109 title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
110 110 self.xlabel = "Range (Km)"
111 111 self.ylabel = "Intensity"
112 self.xmin = min(x)
113 self.xmax = max(x)
112 self.xmin = min(x)
113 self.xmax = max(x)
114 114
115 self.titles[0] =title
116 for i,ax in enumerate(self.axes):
117 title = "Channel %d" %(i)
115 self.titles[0] = title
116 for i, ax in enumerate(self.axes):
117 title = "Channel %d" % (i)
118 118
119 ychannel = yreal[i,:]
119 ychannel = yreal[i, :]
120 120
121 121 if ax.firsttime:
122 122 ax.plt_r = ax.plot(x, ychannel)[0]
123 123 else:
124 #pass
124 # pass
125 125 ax.plt_r.set_data(x, ychannel)
126 126
127 127 def plot_weathervelocity(self, x, y, channelIndexList, thisDatetime, wintitle):
128 128
129 x = x[channelIndexList,:]
130 yreal = y
129 x = x[channelIndexList, :]
130 yreal = y
131 131 self.y = yreal
132 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
132 title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
133 133 self.xlabel = "Velocity (m/s)"
134 134 self.ylabel = "Range (Km)"
135 self.xmin = numpy.min(x)
136 self.xmax = numpy.max(x)
137 self.titles[0] =title
138 for i,ax in enumerate(self.axes):
139 title = "Channel %d" %(i)
140 xchannel = x[i,:]
135 self.xmin = numpy.min(x)
136 self.xmax = numpy.max(x)
137 self.titles[0] = title
138 for i, ax in enumerate(self.axes):
139 title = "Channel %d" % (i)
140 xchannel = x[i, :]
141 141 if ax.firsttime:
142 142 ax.plt_r = ax.plot(xchannel, yreal)[0]
143 143 else:
144 #pass
144 # pass
145 145 ax.plt_r.set_data(xchannel, yreal)
146 146
147 147 def plot_weatherspecwidth(self, x, y, channelIndexList, thisDatetime, wintitle):
148 148
149 x = x[channelIndexList,:]
150 yreal = y
149 x = x[channelIndexList, :]
150 yreal = y
151 151 self.y = yreal
152 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
152 title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
153 153 self.xlabel = "width "
154 154 self.ylabel = "Range (Km)"
155 self.xmin = numpy.min(x)
156 self.xmax = numpy.max(x)
157 self.titles[0] =title
158 for i,ax in enumerate(self.axes):
159 title = "Channel %d" %(i)
160 xchannel = x[i,:]
155 self.xmin = numpy.min(x)
156 self.xmax = numpy.max(x)
157 self.titles[0] = title
158 for i, ax in enumerate(self.axes):
159 title = "Channel %d" % (i)
160 xchannel = x[i, :]
161 161 if ax.firsttime:
162 162 ax.plt_r = ax.plot(xchannel, yreal)[0]
163 163 else:
164 #pass
164 # pass
165 165 ax.plt_r.set_data(xchannel, yreal)
166 166
167 167 def plot(self):
@@ -178,11 +178,11 class ScopePlot(Plot):
178 178
179 179 for i in range(self.data.nProfiles):
180 180
181 wintitle1 = " [Profile = %d] " %i
182 if self.CODE =="scope":
181 wintitle1 = " [Profile = %d] " % i
182 if self.CODE == "scope":
183 183 if self.type == "power":
184 184 self.plot_power(self.data.yrange,
185 scope[:,i,:],
185 scope[:, i, :],
186 186 channels,
187 187 thisDatetime,
188 188 wintitle1
@@ -190,42 +190,42 class ScopePlot(Plot):
190 190
191 191 if self.type == "iq":
192 192 self.plot_iq(self.data.yrange,
193 scope[:,i,:],
193 scope[:, i, :],
194 194 channels,
195 195 thisDatetime,
196 196 wintitle1
197 197 )
198 if self.CODE=="pp_power":
198 if self.CODE == "pp_power":
199 199 self.plot_weatherpower(self.data.yrange,
200 scope[:,i,:],
200 scope[:, i, :],
201 201 channels,
202 202 thisDatetime,
203 203 wintitle
204 204 )
205 if self.CODE=="pp_signal":
205 if self.CODE == "pp_signal":
206 206 self.plot_weatherpower(self.data.yrange,
207 scope[:,i,:],
207 scope[:, i, :],
208 208 channels,
209 209 thisDatetime,
210 210 wintitle
211 211 )
212 if self.CODE=="pp_velocity":
213 self.plot_weathervelocity(scope[:,i,:],
212 if self.CODE == "pp_velocity":
213 self.plot_weathervelocity(scope[:, i, :],
214 214 self.data.yrange,
215 215 channels,
216 216 thisDatetime,
217 217 wintitle
218 218 )
219 if self.CODE=="pp_spcwidth":
220 self.plot_weatherspecwidth(scope[:,i,:],
219 if self.CODE == "pp_spcwidth":
220 self.plot_weatherspecwidth(scope[:, i, :],
221 221 self.data.yrange,
222 222 channels,
223 223 thisDatetime,
224 224 wintitle
225 225 )
226 226 else:
227 wintitle = " [Profile = %d] " %self.data.profileIndex
228 if self.CODE== "scope":
227 wintitle = " [Profile = %d] " % self.data.profileIndex
228 if self.CODE == "scope":
229 229 if self.type == "power":
230 230 self.plot_power(self.data.yrange,
231 231 scope,
@@ -241,28 +241,28 class ScopePlot(Plot):
241 241 thisDatetime,
242 242 wintitle
243 243 )
244 if self.CODE=="pp_power":
244 if self.CODE == "pp_power":
245 245 self.plot_weatherpower(self.data.yrange,
246 246 scope,
247 247 channels,
248 248 thisDatetime,
249 249 wintitle
250 250 )
251 if self.CODE=="pp_signal":
251 if self.CODE == "pp_signal":
252 252 self.plot_weatherpower(self.data.yrange,
253 253 scope,
254 254 channels,
255 255 thisDatetime,
256 256 wintitle
257 257 )
258 if self.CODE=="pp_velocity":
258 if self.CODE == "pp_velocity":
259 259 self.plot_weathervelocity(scope,
260 260 self.data.yrange,
261 261 channels,
262 262 thisDatetime,
263 263 wintitle
264 264 )
265 if self.CODE=="pp_specwidth":
265 if self.CODE == "pp_specwidth":
266 266 self.plot_weatherspecwidth(scope,
267 267 self.data.yrange,
268 268 channels,
@@ -1,23 +1,23
1 1 '''
2 2 @author: roj-idl71
3 3 '''
4 #USED IN jroplot_spectra.py
5 RTI_CODE = 0 #Range time intensity (RTI).
6 SPEC_CODE = 1 #Spectra (and Cross-spectra) information.
7 CROSS_CODE = 2 #Cross-Correlation information.
8 COH_CODE = 3 #Coherence map.
9 BASE_CODE = 4 #Base lines graphic.
10 ROW_CODE = 5 #Row Spectra.
11 TOTAL_CODE = 6 #Total Power.
12 DRIFT_CODE = 7 #Drifts graphics.
13 HEIGHT_CODE = 8 #Height profile.
14 PHASE_CODE = 9 #Signal Phase.
4 # USED IN jroplot_spectra.py
5 RTI_CODE = 0 # Range time intensity (RTI).
6 SPEC_CODE = 1 # Spectra (and Cross-spectra) information.
7 CROSS_CODE = 2 # Cross-Correlation information.
8 COH_CODE = 3 # Coherence map.
9 BASE_CODE = 4 # Base lines graphic.
10 ROW_CODE = 5 # Row Spectra.
11 TOTAL_CODE = 6 # Total Power.
12 DRIFT_CODE = 7 # Drifts graphics.
13 HEIGHT_CODE = 8 # Height profile.
14 PHASE_CODE = 9 # Signal Phase.
15 15
16 16 POWER_CODE = 16
17 17 NOISE_CODE = 17
18 18 BEACON_CODE = 18
19 19
20 #USED IN jroplot_parameters.py
20 # USED IN jroplot_parameters.py
21 21 WIND_CODE = 22
22 22 MSKYMAP_CODE = 23
23 23 MPHASE_CODE = 24
@@ -24,9 +24,9 except:
24 24 from time import sleep
25 25
26 26 from schainpy.model.data.jrodata import Spectra
27 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
27 # from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
28 28 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
29 #from schainpy.model.io.jroIO_bltr import BLTRReader
29 # from schainpy.model.io.jroIO_bltr import BLTRReader
30 30 from numpy import imag, shape, NaN
31 31
32 32
@@ -225,26 +225,26 SPARprc = header['SPARprc'][0]
225 225
226 226 SRVI_STRUCTURE = numpy.dtype([
227 227 ('frame_cnt', '<u4'),
228 ('time_t', '<u4'), #
229 ('tpow', '<f4'), #
230 ('npw1', '<f4'), #
231 ('npw2', '<f4'), #
232 ('cpw1', '<f4'), #
233 ('pcw2', '<f4'), #
234 ('ps_err', '<u4'), #
235 ('te_err', '<u4'), #
236 ('rc_err', '<u4'), #
237 ('grs1', '<u4'), #
238 ('grs2', '<u4'), #
239 ('azipos', '<f4'), #
240 ('azivel', '<f4'), #
241 ('elvpos', '<f4'), #
242 ('elvvel', '<f4'), #
228 ('time_t', '<u4'), #
229 ('tpow', '<f4'), #
230 ('npw1', '<f4'), #
231 ('npw2', '<f4'), #
232 ('cpw1', '<f4'), #
233 ('pcw2', '<f4'), #
234 ('ps_err', '<u4'), #
235 ('te_err', '<u4'), #
236 ('rc_err', '<u4'), #
237 ('grs1', '<u4'), #
238 ('grs2', '<u4'), #
239 ('azipos', '<f4'), #
240 ('azivel', '<f4'), #
241 ('elvpos', '<f4'), #
242 ('elvvel', '<f4'), #
243 243 ('northAngle', '<f4'),
244 ('microsec', '<u4'), #
244 ('microsec', '<u4'), #
245 245 ('azisetvel', '<f4'), #
246 246 ('elvsetpos', '<f4'), #
247 ('RadarConst', '<f4'), ]) #
247 ('RadarConst', '<f4'), ]) #
248 248
249 249 JUMP_STRUCTURE = numpy.dtype([
250 250 ('jump', '<u140'),
@@ -289,34 +289,34 RadarConst5 = RadarConst
289 289 # RecCounter=0
290 290 # Off2StartNxtRec=811248
291 291 # print 'OffsetStartHeader ',self.OffsetStartHeader,'RecCounter ', self.RecCounter, 'Off2StartNxtRec ' , self.Off2StartNxtRec
292 #OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
293 #startFp.seek(OffRHeader, os.SEEK_SET)
292 # OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
293 # startFp.seek(OffRHeader, os.SEEK_SET)
294 294 print('debe ser 48, RecCounter*811248', self.OffsetStartHeader, self.RecCounter, self.Off2StartNxtRec)
295 295 print('Posicion del bloque: ', OffRHeader)
296 296
297 297 header = numpy.fromfile(startFp, SRVI_STRUCTURE, 1)
298 298
299 299 self.frame_cnt = header['frame_cnt'][0]
300 self.time_t = header['frame_cnt'][0] #
301 self.tpow = header['frame_cnt'][0] #
302 self.npw1 = header['frame_cnt'][0] #
303 self.npw2 = header['frame_cnt'][0] #
304 self.cpw1 = header['frame_cnt'][0] #
305 self.pcw2 = header['frame_cnt'][0] #
306 self.ps_err = header['frame_cnt'][0] #
307 self.te_err = header['frame_cnt'][0] #
308 self.rc_err = header['frame_cnt'][0] #
309 self.grs1 = header['frame_cnt'][0] #
310 self.grs2 = header['frame_cnt'][0] #
311 self.azipos = header['frame_cnt'][0] #
312 self.azivel = header['frame_cnt'][0] #
313 self.elvpos = header['frame_cnt'][0] #
314 self.elvvel = header['frame_cnt'][0] #
315 self.northAngle = header['frame_cnt'][0] #
316 self.microsec = header['frame_cnt'][0] #
317 self.azisetvel = header['frame_cnt'][0] #
318 self.elvsetpos = header['frame_cnt'][0] #
319 self.RadarConst = header['frame_cnt'][0] #
300 self.time_t = header['frame_cnt'][0] #
301 self.tpow = header['frame_cnt'][0] #
302 self.npw1 = header['frame_cnt'][0] #
303 self.npw2 = header['frame_cnt'][0] #
304 self.cpw1 = header['frame_cnt'][0] #
305 self.pcw2 = header['frame_cnt'][0] #
306 self.ps_err = header['frame_cnt'][0] #
307 self.te_err = header['frame_cnt'][0] #
308 self.rc_err = header['frame_cnt'][0] #
309 self.grs1 = header['frame_cnt'][0] #
310 self.grs2 = header['frame_cnt'][0] #
311 self.azipos = header['frame_cnt'][0] #
312 self.azivel = header['frame_cnt'][0] #
313 self.elvpos = header['frame_cnt'][0] #
314 self.elvvel = header['frame_cnt'][0] #
315 self.northAngle = header['frame_cnt'][0] #
316 self.microsec = header['frame_cnt'][0] #
317 self.azisetvel = header['frame_cnt'][0] #
318 self.elvsetpos = header['frame_cnt'][0] #
319 self.RadarConst = header['frame_cnt'][0] #
320 320
321 321
322 322 self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz)
@@ -328,4 +328,4 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
328 328
329 329 print('==============================================')
330 330
331 print('==============================================') No newline at end of file
331 print('==============================================')
@@ -21,4 +21,4 from .jroIO_mira35c import *
21 21 from .julIO_param import *
22 22
23 23 from .pxIO_param import *
24 from .jroIO_simulator import * No newline at end of file
24 from .jroIO_simulator import *
@@ -104,7 +104,7 class BLTRParamReader(Reader, ProcessingUnit):
104 104 self.isConfig = False
105 105 self.filename = None
106 106 self.status_value = 0
107 self.datatime = datetime.datetime(1900,1,1)
107 self.datatime = datetime.datetime(1900, 1, 1)
108 108 self.filefmt = "*********%Y%m%d******"
109 109
110 110 def setup(self, **kwargs):
@@ -119,7 +119,7 class BLTRParamReader(Reader, ProcessingUnit):
119 119
120 120 for nTries in range(self.nTries):
121 121 fullpath = self.searchFilesOnLine(self.path, self.startDate,
122 self.endDate, self.expLabel, self.ext, self.walk,
122 self.endDate, self.expLabel, self.ext, self.walk,
123 123 self.filefmt, self.folderfmt)
124 124 try:
125 125 fullpath = next(fullpath)
@@ -138,7 +138,7 class BLTRParamReader(Reader, ProcessingUnit):
138 138
139 139 log.warning(
140 140 'Waiting {} sec for a valid file in {}: try {} ...'.format(
141 self.delay, self.path, nTries + 1),
141 self.delay, self.path, nTries + 1),
142 142 self.name)
143 143 time.sleep(self.delay)
144 144
@@ -148,7 +148,7 class BLTRParamReader(Reader, ProcessingUnit):
148 148 self.readFirstHeader()
149 149 else:
150 150 log.log("Searching files in {}".format(self.path), self.name)
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
152 152 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
153 153 self.setNextFile()
154 154
@@ -258,7 +258,7 class BLTRParamReader(Reader, ProcessingUnit):
258 258 self.rx_gains = self.header_rec['rx_gains']
259 259 self.time = self.header_rec['time'][0]
260 260 dt = datetime.datetime.utcfromtimestamp(self.time)
261 if dt.date()>self.datatime.date():
261 if dt.date() > self.datatime.date():
262 262 self.flagDiscontinuousBlock = 1
263 263 self.datatime = dt
264 264
@@ -352,4 +352,4 class BLTRParamReader(Reader, ProcessingUnit):
352 352
353 353 self.getData()
354 354
355 return No newline at end of file
355 return
@@ -181,8 +181,8 class RecordHeaderBLTR():
181 181 self.Off2StartData = int(header['Off2StartData'][0])
182 182 self.nUtime = header['nUtime'][0]
183 183 self.nMilisec = header['nMilisec'][0]
184 self.ExpTagName = '' # str(header['ExpTagName'][0])
185 self.ExpComment = '' # str(header['ExpComment'][0])
184 self.ExpTagName = '' # str(header['ExpTagName'][0])
185 self.ExpComment = '' # str(header['ExpComment'][0])
186 186 self.SiteLatDegrees = header['SiteLatDegrees'][0]
187 187 self.SiteLongDegrees = header['SiteLongDegrees'][0]
188 188 self.RTCgpsStatus = header['RTCgpsStatus'][0]
@@ -293,7 +293,7 class BLTRSpectraReader (ProcessingUnit):
293 293
294 294 self.getData()
295 295
296 def setup(self,
296 def setup(self,
297 297 path=None,
298 298 startDate=None,
299 299 endDate=None,
@@ -374,7 +374,7 class BLTRSpectraReader (ProcessingUnit):
374 374 return
375 375
376 376 if self.mode == 1:
377 self.rheader.read(self.BlockCounter+1)
377 self.rheader.read(self.BlockCounter + 1)
378 378 elif self.mode == 0:
379 379 self.rheader.read(self.BlockCounter)
380 380
@@ -393,13 +393,13 class BLTRSpectraReader (ProcessingUnit):
393 393 self.dataOut.nRdPairs = self.nRdPairs
394 394 self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000.
395 395 self.dataOut.channelList = range(self.nChannels)
396 self.dataOut.nProfiles=self.rheader.nProfiles
397 self.dataOut.nIncohInt=self.rheader.nIncohInt
398 self.dataOut.nCohInt=self.rheader.nCohInt
399 self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz)
400 self.dataOut.PRF=self.rheader.PRFhz
401 self.dataOut.nFFTPoints=self.rheader.nProfiles
402 self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000.
396 self.dataOut.nProfiles = self.rheader.nProfiles
397 self.dataOut.nIncohInt = self.rheader.nIncohInt
398 self.dataOut.nCohInt = self.rheader.nCohInt
399 self.dataOut.ippSeconds = 1 / float(self.rheader.PRFhz)
400 self.dataOut.PRF = self.rheader.PRFhz
401 self.dataOut.nFFTPoints = self.rheader.nProfiles
402 self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec / 1000.
403 403 self.dataOut.timeZone = 0
404 404 self.dataOut.useLocalTime = False
405 405 self.dataOut.nmodes = 2
@@ -408,10 +408,10 class BLTRSpectraReader (ProcessingUnit):
408 408 self.Off2StartNxtRec + self.Off2StartData
409 409 self.fp.seek(OffDATA, os.SEEK_SET)
410 410
411 self.data_fft = numpy.fromfile(self.fp, [('complex','<c8')], self.nProfiles*self.nChannels*self.nHeights )
411 self.data_fft = numpy.fromfile(self.fp, [('complex', '<c8')], self.nProfiles * self.nChannels * self.nHeights)
412 412 self.data_fft = self.data_fft.astype(numpy.dtype('complex'))
413 self.data_block = numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles))
414 self.data_block = numpy.transpose(self.data_block, (1,2,0))
413 self.data_block = numpy.reshape(self.data_fft, (self.nHeights, self.nChannels, self.nProfiles))
414 self.data_block = numpy.transpose(self.data_block, (1, 2, 0))
415 415 copy = self.data_block.copy()
416 416 spc = copy * numpy.conjugate(copy)
417 417 self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud
@@ -450,4 +450,4 class BLTRSpectraReader (ProcessingUnit):
450 450
451 451 self.BlockCounter += 2
452 452 self.dataOut.data_spc = self.data_spc
453 self.dataOut.data_cspc =self.data_cspc
453 self.dataOut.data_cspc = self.data_cspc
@@ -23,36 +23,36 except:
23 23 class RadacHeader():
24 24 def __init__(self, fp):
25 25 header = 'Raw11/Data/RadacHeader'
26 self.beamCodeByPulse = fp.get(header+'/BeamCode')
26 self.beamCodeByPulse = fp.get(header + '/BeamCode')
27 27 self.beamCode = fp.get('Raw11/Data/Beamcodes')
28 self.code = fp.get(header+'/Code')
29 self.frameCount = fp.get(header+'/FrameCount')
30 self.modeGroup = fp.get(header+'/ModeGroup')
31 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')
32 self.pulseCount = fp.get(header+'/PulseCount')
33 self.radacTime = fp.get(header+'/RadacTime')
34 self.timeCount = fp.get(header+'/TimeCount')
35 self.timeStatus = fp.get(header+'/TimeStatus')
36
37 self.nrecords = self.pulseCount.shape[0] #nblocks
38 self.npulses = self.pulseCount.shape[1] #nprofile
39 self.nsamples = self.nsamplesPulse[0,0] #ngates
28 self.code = fp.get(header + '/Code')
29 self.frameCount = fp.get(header + '/FrameCount')
30 self.modeGroup = fp.get(header + '/ModeGroup')
31 self.nsamplesPulse = fp.get(header + '/NSamplesPulse')
32 self.pulseCount = fp.get(header + '/PulseCount')
33 self.radacTime = fp.get(header + '/RadacTime')
34 self.timeCount = fp.get(header + '/TimeCount')
35 self.timeStatus = fp.get(header + '/TimeStatus')
36
37 self.nrecords = self.pulseCount.shape[0] # nblocks
38 self.npulses = self.pulseCount.shape[1] # nprofile
39 self.nsamples = self.nsamplesPulse[0, 0] # ngates
40 40 self.nbeams = self.beamCode.shape[1]
41 41
42 42
43 43 def getIndexRangeToPulse(self, idrecord=0):
44 #indexToZero = numpy.where(self.pulseCount.value[idrecord,:]==0)
45 #startPulseCountId = indexToZero[0][0]
46 #endPulseCountId = startPulseCountId - 1
47 #range1 = numpy.arange(startPulseCountId,self.npulses,1)
48 #range2 = numpy.arange(0,startPulseCountId,1)
49 #return range1, range2
44 # indexToZero = numpy.where(self.pulseCount.value[idrecord,:]==0)
45 # startPulseCountId = indexToZero[0][0]
46 # endPulseCountId = startPulseCountId - 1
47 # range1 = numpy.arange(startPulseCountId,self.npulses,1)
48 # range2 = numpy.arange(0,startPulseCountId,1)
49 # return range1, range2
50 50 zero = 0
51 npulse = max(self.pulseCount[0,:]+1)-1
52 looking_index = numpy.where(self.pulseCount.value[idrecord,:]==npulse)[0]
51 npulse = max(self.pulseCount[0, :] + 1) - 1
52 looking_index = numpy.where(self.pulseCount.value[idrecord, :] == npulse)[0]
53 53 getLastIndex = looking_index[-1]
54 index_data = numpy.arange(0,getLastIndex+1,1)
55 index_buffer = numpy.arange(getLastIndex+1,self.npulses,1)
54 index_data = numpy.arange(0, getLastIndex + 1, 1)
55 index_buffer = numpy.arange(getLastIndex + 1, self.npulses, 1)
56 56 return index_data, index_buffer
57 57
58 58 class AMISRReader(ProcessingUnit):
@@ -91,7 +91,7 class AMISRReader(ProcessingUnit):
91 91 self.index_amisr_buffer = None
92 92 self.beamCodeByFrame = None
93 93 self.radacTimeByFrame = None
94 #atributos originales tal y como esta en el archivo de datos
94 # atributos originales tal y como esta en el archivo de datos
95 95 self.beamCodesFromFile = None
96 96 self.radacTimeFromFile = None
97 97 self.rangeFromFile = None
@@ -101,7 +101,7 class AMISRReader(ProcessingUnit):
101 101 self.beamCodeDict = {}
102 102 self.beamRangeDict = {}
103 103
104 #experiment cgf file
104 # experiment cgf file
105 105 self.npulsesint_fromfile = None
106 106 self.recordsperfile_fromfile = None
107 107 self.nbeamcodes_fromfile = None
@@ -131,7 +131,7 class AMISRReader(ProcessingUnit):
131 131
132 132 return dataObj
133 133
134 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
134 def __setParameters(self, path='', startDate='', endDate='', startTime='', endTime='', walk=''):
135 135 self.path = path
136 136 self.startDate = startDate
137 137 self.endDate = endDate
@@ -144,7 +144,7 class AMISRReader(ProcessingUnit):
144 144 self.status = 1
145 145 else:
146 146 self.status = 0
147 print('Path:%s does not exists'%self.path)
147 print('Path:%s does not exists' % self.path)
148 148
149 149 return
150 150
@@ -153,14 +153,14 class AMISRReader(ProcessingUnit):
153 153 year = int(amisr_dirname_format[0:4])
154 154 month = int(amisr_dirname_format[4:6])
155 155 dom = int(amisr_dirname_format[6:8])
156 thisDate = datetime.date(year,month,dom)
156 thisDate = datetime.date(year, month, dom)
157 157
158 if (thisDate>=self.startDate and thisDate <= self.endDate):
158 if (thisDate >= self.startDate and thisDate <= self.endDate):
159 159 return amisr_dirname_format
160 160 except:
161 161 return None
162 162
163 def __findDataForDates(self,online=False):
163 def __findDataForDates(self, online=False):
164 164
165 165
166 166
@@ -168,13 +168,13 class AMISRReader(ProcessingUnit):
168 168 return None
169 169
170 170 pat = '\d+.\d+'
171 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
172 dirnameList = [x for x in dirnameList if x!=None]
171 dirnameList = [re.search(pat, x) for x in os.listdir(self.path)]
172 dirnameList = [x for x in dirnameList if x != None]
173 173 dirnameList = [x.string for x in dirnameList]
174 174 if not(online):
175 175 dirnameList = [self.__selDates(x) for x in dirnameList]
176 dirnameList = [x for x in dirnameList if x!=None]
177 if len(dirnameList)>0:
176 dirnameList = [x for x in dirnameList if x != None]
177 if len(dirnameList) > 0:
178 178 self.status = 1
179 179 self.dirnameList = dirnameList
180 180 self.dirnameList.sort()
@@ -183,34 +183,34 class AMISRReader(ProcessingUnit):
183 183 return None
184 184
185 185 def __getTimeFromData(self):
186 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
187 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
186 startDateTime_Reader = datetime.datetime.combine(self.startDate, self.startTime)
187 endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
188 188
189 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
189 print('Filtering Files from %s to %s' % (startDateTime_Reader, endDateTime_Reader))
190 190 print('........................................')
191 191 filter_filenameList = []
192 192 self.filenameList.sort()
193 for i in range(len(self.filenameList)-1):
193 for i in range(len(self.filenameList) - 1):
194 194 filename = self.filenameList[i]
195 fp = h5py.File(filename,'r')
195 fp = h5py.File(filename, 'r')
196 196 time_str = fp.get('Time/RadacTimeString')
197 197
198 198 startDateTimeStr_File = time_str[0][0].split('.')[0]
199 199 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
200 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
200 startDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
201 201
202 202 endDateTimeStr_File = time_str[-1][-1].split('.')[0]
203 203 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
204 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
204 endDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
205 205
206 206 fp.close()
207 207
208 208 if self.timezone == 'lt':
209 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
210 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
209 startDateTime_File = startDateTime_File - datetime.timedelta(minutes=300)
210 endDateTime_File = endDateTime_File - datetime.timedelta(minutes=300)
211 211
212 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<endDateTime_Reader):
213 #self.filenameList.remove(filename)
212 if (endDateTime_File >= startDateTime_Reader and endDateTime_File < endDateTime_Reader):
213 # self.filenameList.remove(filename)
214 214 filter_filenameList.append(filename)
215 215
216 216 filter_filenameList.sort()
@@ -218,7 +218,7 class AMISRReader(ProcessingUnit):
218 218 return 1
219 219
220 220 def __filterByGlob1(self, dirName):
221 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
221 filter_files = glob.glob1(dirName, '*.*%s' % self.extension_file)
222 222 filterDict = {}
223 223 filterDict.setdefault(dirName)
224 224 filterDict[dirName] = filter_files
@@ -233,21 +233,21 class AMISRReader(ProcessingUnit):
233 233
234 234
235 235 def __selectDataForTimes(self, online=False):
236 #aun no esta implementado el filtro for tiempo
236 # aun no esta implementado el filtro for tiempo
237 237 if not(self.status):
238 238 return None
239 239
240 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
240 dirList = [os.path.join(self.path, x) for x in self.dirnameList]
241 241
242 242 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
243 243
244 244 self.__getFilenameList(fileListInKeys, dirList)
245 245 if not(online):
246 #filtro por tiempo
246 # filtro por tiempo
247 247 if not(self.all):
248 248 self.__getTimeFromData()
249 249
250 if len(self.filenameList)>0:
250 if len(self.filenameList) > 0:
251 251 self.status = 1
252 252 self.filenameList.sort()
253 253 else:
@@ -255,7 +255,7 class AMISRReader(ProcessingUnit):
255 255 return None
256 256
257 257 else:
258 #get the last file - 1
258 # get the last file - 1
259 259 self.filenameList = [self.filenameList[-2]]
260 260
261 261 new_dirnameList = []
@@ -291,8 +291,8 class AMISRReader(ProcessingUnit):
291 291 path,
292 292 startDate,
293 293 endDate,
294 startTime=datetime.time(0,0,0),
295 endTime=datetime.time(23,59,59),
294 startTime=datetime.time(0, 0, 0),
295 endTime=datetime.time(23, 59, 59),
296 296 walk=True):
297 297
298 298 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
@@ -304,7 +304,7 class AMISRReader(ProcessingUnit):
304 304 self.__selectDataForTimes()
305 305
306 306 for i in range(len(self.filenameList)):
307 print("%s" %(self.filenameList[i]))
307 print("%s" % (self.filenameList[i]))
308 308
309 309 return
310 310
@@ -320,7 +320,7 class AMISRReader(ProcessingUnit):
320 320
321 321 filename = self.filenameList[idFile]
322 322
323 amisrFilePointer = h5py.File(filename,'r')
323 amisrFilePointer = h5py.File(filename, 'r')
324 324
325 325 break
326 326
@@ -330,7 +330,7 class AMISRReader(ProcessingUnit):
330 330
331 331 self.amisrFilePointer = amisrFilePointer
332 332
333 print("Setting the file: %s"%self.filename)
333 print("Setting the file: %s" % self.filename)
334 334
335 335 return 1
336 336
@@ -341,47 +341,47 class AMISRReader(ProcessingUnit):
341 341 self.__selectDataForTimes(online=True)
342 342 filename = self.filenameList[0]
343 343 while self.__filename_online == filename:
344 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
344 print('waiting %d seconds to get a new file...' % (self.__waitForNewFile))
345 345 sleep(self.__waitForNewFile)
346 346 self.__selectDataForTimes(online=True)
347 347 filename = self.filenameList[0]
348 348
349 349 self.__filename_online = filename
350 350
351 self.amisrFilePointer = h5py.File(filename,'r')
351 self.amisrFilePointer = h5py.File(filename, 'r')
352 352 self.flagIsNewFile = 1
353 353 self.filename = filename
354 print("Setting the file: %s"%self.filename)
354 print("Setting the file: %s" % self.filename)
355 355 return 1
356 356
357 357
358 358 def __readHeader(self):
359 359 self.radacHeaderObj = RadacHeader(self.amisrFilePointer)
360 360
361 #update values from experiment cfg file
361 # update values from experiment cfg file
362 362 if self.radacHeaderObj.nrecords == self.recordsperfile_fromfile:
363 363 self.radacHeaderObj.nrecords = self.recordsperfile_fromfile
364 364 self.radacHeaderObj.nbeams = self.nbeamcodes_fromfile
365 365 self.radacHeaderObj.npulses = self.npulsesint_fromfile
366 366 self.radacHeaderObj.nsamples = self.ngates_fromfile
367 367
368 #looking index list for data
369 start_index = self.radacHeaderObj.pulseCount[0,:][0]
368 # looking index list for data
369 start_index = self.radacHeaderObj.pulseCount[0, :][0]
370 370 end_index = self.radacHeaderObj.npulses
371 371 range4data = list(range(start_index, end_index))
372 372 self.index4_schain_datablock = numpy.array(range4data)
373 373
374 374 buffer_start_index = 0
375 buffer_end_index = self.radacHeaderObj.pulseCount[0,:][0]
375 buffer_end_index = self.radacHeaderObj.pulseCount[0, :][0]
376 376 range4buffer = list(range(buffer_start_index, buffer_end_index))
377 377 self.index4_buffer = numpy.array(range4buffer)
378 378
379 379 self.linear_pulseCount = numpy.array(range4data + range4buffer)
380 self.npulseByFrame = max(self.radacHeaderObj.pulseCount[0,:]+1)
380 self.npulseByFrame = max(self.radacHeaderObj.pulseCount[0, :] + 1)
381 381
382 #get tuning frequency
383 frequency_h5file_dataset = self.amisrFilePointer.get('Rx'+'/TuningFrequency')
384 self.frequency_h5file = frequency_h5file_dataset[0,0]
382 # get tuning frequency
383 frequency_h5file_dataset = self.amisrFilePointer.get('Rx' + '/TuningFrequency')
384 self.frequency_h5file = frequency_h5file_dataset[0, 0]
385 385
386 386 self.flagIsNewFile = 1
387 387
@@ -391,20 +391,20 class AMISRReader(ProcessingUnit):
391 391
392 392 beamCodeMap = self.amisrFilePointer.get('Setup/BeamcodeMap')
393 393
394 for i in range(len(self.radacHeaderObj.beamCode[0,:])):
394 for i in range(len(self.radacHeaderObj.beamCode[0, :])):
395 395 self.beamCodeDict.setdefault(i)
396 396 self.beamRangeDict.setdefault(i)
397 beamcodeValue = self.radacHeaderObj.beamCode[0,i]
398 beamcodeIndex = numpy.where(beamCodeMap[:,0] == beamcodeValue)[0][0]
397 beamcodeValue = self.radacHeaderObj.beamCode[0, i]
398 beamcodeIndex = numpy.where(beamCodeMap[:, 0] == beamcodeValue)[0][0]
399 399 x = beamCodeMap[beamcodeIndex][1]
400 400 y = beamCodeMap[beamcodeIndex][2]
401 401 z = beamCodeMap[beamcodeIndex][3]
402 402 self.beamCodeDict[i] = [beamcodeValue, x, y, z]
403 403
404 just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:]
404 just4record0 = self.radacHeaderObj.beamCodeByPulse[0, :]
405 405
406 406 for i in range(len(list(self.beamCodeDict.values()))):
407 xx = numpy.where(just4record0==list(self.beamCodeDict.values())[i][0])
407 xx = numpy.where(just4record0 == list(self.beamCodeDict.values())[i][0])
408 408 indexPulseByBeam = self.linear_pulseCount[xx[0]]
409 409 self.beamRangeDict[i] = indexPulseByBeam
410 410
@@ -414,22 +414,22 class AMISRReader(ProcessingUnit):
414 414
415 415 experimentCfgPath = os.path.join(self.path, self.dirnameList[0], 'Setup')
416 416
417 expFinder = glob.glob1(experimentCfgPath,'*.exp')
418 if len(expFinder)== 0:
417 expFinder = glob.glob1(experimentCfgPath, '*.exp')
418 if len(expFinder) == 0:
419 419 self.status = 0
420 420 return None
421 421
422 experimentFilename = os.path.join(experimentCfgPath,expFinder[0])
422 experimentFilename = os.path.join(experimentCfgPath, expFinder[0])
423 423
424 424 f = open(experimentFilename)
425 425 lines = f.readlines()
426 426 f.close()
427 427
428 parmsList = ['npulsesint*','recordsperfile*','nbeamcodes*','ngates*']
428 parmsList = ['npulsesint*', 'recordsperfile*', 'nbeamcodes*', 'ngates*']
429 429 filterList = [fnmatch.filter(lines, x) for x in parmsList]
430 430
431 431
432 values = [re.sub(r'\D',"",x[0]) for x in filterList]
432 values = [re.sub(r'\D', "", x[0]) for x in filterList]
433 433
434 434 self.npulsesint_fromfile = int(values[0])
435 435 self.recordsperfile_fromfile = int(values[1])
@@ -439,12 +439,12 class AMISRReader(ProcessingUnit):
439 439 tufileFinder = fnmatch.filter(lines, 'tufile=*')
440 440 tufile = tufileFinder[0].split('=')[1].split('\n')[0]
441 441 tufile = tufile.split('\r')[0]
442 tufilename = os.path.join(experimentCfgPath,tufile)
442 tufilename = os.path.join(experimentCfgPath, tufile)
443 443
444 444 f = open(tufilename)
445 445 lines = f.readlines()
446 446 f.close()
447 self.ippSeconds_fromfile = float(lines[1].split()[2])/1E6
447 self.ippSeconds_fromfile = float(lines[1].split()[2]) / 1E6
448 448
449 449
450 450 self.status = 1
@@ -459,7 +459,7 class AMISRReader(ProcessingUnit):
459 459 self.buffer_radactime = numpy.zeros_like(self.radacTimeByFrame)
460 460
461 461
462 def __setNextFile(self,online=False):
462 def __setNextFile(self, online=False):
463 463
464 464 if not(online):
465 465 newFile = self.__setNextFileOffline()
@@ -479,11 +479,11 class AMISRReader(ProcessingUnit):
479 479 self.readDataBlock()
480 480
481 481
482 def setup(self,path=None,
483 startDate=None,
484 endDate=None,
485 startTime=datetime.time(0,0,0),
486 endTime=datetime.time(23,59,59),
482 def setup(self, path=None,
483 startDate=None,
484 endDate=None,
485 startTime=datetime.time(0, 0, 0),
486 endTime=datetime.time(23, 59, 59),
487 487 walk=True,
488 488 timezone='ut',
489 489 all=0,
@@ -493,13 +493,13 class AMISRReader(ProcessingUnit):
493 493 self.all = all
494 494 self.online = online
495 495 if not(online):
496 #Busqueda de archivos offline
496 # Busqueda de archivos offline
497 497 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
498 498 else:
499 499 self.searchFilesOnLine(path, walk)
500 500
501 501 if not(self.filenameList):
502 print("There is no files into the folder: %s"%(path))
502 print("There is no files into the folder: %s" % (path))
503 503
504 504 sys.exit(-1)
505 505
@@ -511,22 +511,22 class AMISRReader(ProcessingUnit):
511 511
512 512 # first_beamcode = self.radacHeaderObj.beamCodeByPulse[0,0]
513 513 # index = numpy.where(self.radacHeaderObj.beamCodeByPulse[0,:]!=first_beamcode)[0][0]
514 self.profileIndex_offset = self.radacHeaderObj.pulseCount[0,:][0]
514 self.profileIndex_offset = self.radacHeaderObj.pulseCount[0, :][0]
515 515 self.profileIndex = self.profileIndex_offset
516 516
517 517 def readRanges(self):
518 518 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Range')
519 519
520 self.rangeFromFile = numpy.reshape(dataset.value,(-1))
520 self.rangeFromFile = numpy.reshape(dataset.value, (-1))
521 521 return self.rangeFromFile
522 522
523 523
524 def readRadacTime(self,idrecord, range1, range2):
524 def readRadacTime(self, idrecord, range1, range2):
525 525 self.radacTimeFromFile = self.radacHeaderObj.radacTime.value
526 526
527 527 radacTimeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
528 #radacTimeByFrame = dataset[idrecord - 1,range1]
529 #radacTimeByFrame = dataset[idrecord,range2]
528 # radacTimeByFrame = dataset[idrecord - 1,range1]
529 # radacTimeByFrame = dataset[idrecord,range2]
530 530
531 531 return radacTimeByFrame
532 532
@@ -535,8 +535,8 class AMISRReader(ProcessingUnit):
535 535 beamcodeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
536 536 self.beamCodesFromFile = dataset.value
537 537
538 #beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
539 #beamcodeByFrame[range2] = dataset[idrecord, range2]
538 # beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
539 # beamcodeByFrame[range2] = dataset[idrecord, range2]
540 540 beamcodeByFrame[range1] = dataset[idrecord, range1]
541 541 beamcodeByFrame[range2] = dataset[idrecord, range2]
542 542
@@ -544,7 +544,7 class AMISRReader(ProcessingUnit):
544 544
545 545
546 546 def __setDataByFrame(self):
547 ndata = 2 # porque es complejo
547 ndata = 2 # porque es complejo
548 548 dataByFrame = numpy.zeros((self.radacHeaderObj.npulses, self.radacHeaderObj.nsamples, ndata))
549 549 return dataByFrame
550 550
@@ -553,31 +553,31 class AMISRReader(ProcessingUnit):
553 553 return dataset
554 554
555 555 def __setDataBlock(self,):
556 real = self.dataByFrame[:,:,0] #asumo que 0 es real
557 imag = self.dataByFrame[:,:,1] #asumo que 1 es imaginario
558 datablock = real + imag*1j #armo el complejo
556 real = self.dataByFrame[:, :, 0] # asumo que 0 es real
557 imag = self.dataByFrame[:, :, 1] # asumo que 1 es imaginario
558 datablock = real + imag * 1j # armo el complejo
559 559 return datablock
560 560
561 def readSamples_version1(self,idrecord):
562 #estas tres primeras lineas solo se deben ejecutar una vez
561 def readSamples_version1(self, idrecord):
562 # estas tres primeras lineas solo se deben ejecutar una vez
563 563 if self.flagIsNewFile:
564 #reading dataset
564 # reading dataset
565 565 self.dataset = self.__readDataSet()
566 566 self.flagIsNewFile = 0
567 567
568 568 if idrecord == 0:
569 self.dataByFrame[self.index4_schain_datablock, : ,:] = self.dataset[0, self.index_amisr_sample,:,:]
569 self.dataByFrame[self.index4_schain_datablock, : , :] = self.dataset[0, self.index_amisr_sample, :, :]
570 570 self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[0, self.index_amisr_sample]
571 571 datablock = self.__setDataBlock()
572 572 if len(self.index_amisr_buffer) > 0:
573 self.buffer = self.dataset[0, self.index_amisr_buffer,:,:]
573 self.buffer = self.dataset[0, self.index_amisr_buffer, :, :]
574 574 self.buffer_radactime = self.radacHeaderObj.radacTime[0, self.index_amisr_buffer]
575 575
576 576 return datablock
577 577 if len(self.index_amisr_buffer) > 0:
578 self.dataByFrame[self.index4_buffer,:,:] = self.buffer.copy()
578 self.dataByFrame[self.index4_buffer, :, :] = self.buffer.copy()
579 579 self.radacTimeByFrame[self.index4_buffer] = self.buffer_radactime.copy()
580 self.dataByFrame[self.index4_schain_datablock,:,:] = self.dataset[idrecord, self.index_amisr_sample,:,:]
580 self.dataByFrame[self.index4_schain_datablock, :, :] = self.dataset[idrecord, self.index_amisr_sample, :, :]
581 581 self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[idrecord, self.index_amisr_sample]
582 582 datablock = self.__setDataBlock()
583 583 if len(self.index_amisr_buffer) > 0:
@@ -587,14 +587,14 class AMISRReader(ProcessingUnit):
587 587 return datablock
588 588
589 589
590 def readSamples(self,idrecord):
590 def readSamples(self, idrecord):
591 591 if self.flagIsNewFile:
592 592 self.dataByFrame = self.__setDataByFrame()
593 593 self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[idrecord, :]
594 594
595 #reading ranges
595 # reading ranges
596 596 self.readRanges()
597 #reading dataset
597 # reading dataset
598 598 self.dataset = self.__readDataSet()
599 599
600 600 self.flagIsNewFile = 0
@@ -607,8 +607,8 class AMISRReader(ProcessingUnit):
607 607 def readDataBlock(self):
608 608
609 609 self.datablock = self.readSamples_version1(self.idrecord_count)
610 #self.datablock = self.readSamples(self.idrecord_count)
611 #print 'record:', self.idrecord_count
610 # self.datablock = self.readSamples(self.idrecord_count)
611 # print 'record:', self.idrecord_count
612 612
613 613 self.idrecord_count += 1
614 614 self.profileIndex = 0
@@ -626,7 +626,7 class AMISRReader(ProcessingUnit):
626 626 pass
627 627
628 628 def __hasNotDataInBuffer(self):
629 #self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
629 # self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
630 630 if self.profileIndex >= self.radacHeaderObj.npulses:
631 631 return 1
632 632 return 0
@@ -637,7 +637,7 class AMISRReader(ProcessingUnit):
637 637
638 638 def setObjProperties(self):
639 639
640 self.dataOut.heightList = self.rangeFromFile/1000.0 #km
640 self.dataOut.heightList = self.rangeFromFile / 1000.0 # km
641 641 self.dataOut.nProfiles = self.radacHeaderObj.npulses
642 642 self.dataOut.nRecords = self.radacHeaderObj.nrecords
643 643 self.dataOut.nBeams = self.radacHeaderObj.nbeams
@@ -653,9 +653,9 class AMISRReader(ProcessingUnit):
653 653 self.dataOut.beamRangeDict = self.beamRangeDict
654 654
655 655 if self.timezone == 'lt':
656 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
656 self.dataOut.timeZone = time.timezone / 60. # get the timezone in minutes
657 657 else:
658 self.dataOut.timeZone = 0 #by default time is UTC
658 self.dataOut.timeZone = 0 # by default time is UTC
659 659
660 660 def getData(self):
661 661
@@ -667,11 +667,11 class AMISRReader(ProcessingUnit):
667 667 self.readNextBlock()
668 668
669 669
670 if self.datablock is None: # setear esta condicion cuando no hayan datos por leers
670 if self.datablock is None: # setear esta condicion cuando no hayan datos por leers
671 671 self.dataOut.flagNoData = True
672 672 return 0
673 673
674 self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex,:],(1,-1))
674 self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex, :], (1, -1))
675 675
676 676 self.dataOut.utctime = self.radacTimeByFrame[self.profileIndex]
677 677 self.dataOut.profileIndex = self.profileIndex
@@ -688,4 +688,4 class AMISRReader(ProcessingUnit):
688 688 self.setObjProperties()
689 689 self.isConfig = True
690 690
691 self.getData() No newline at end of file
691 self.getData()
@@ -194,7 +194,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 #<<<<<<<<<<<o o>>>>>>>>>>>
197 # <<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
@@ -420,8 +420,8 def parse_format(s, fmt):
420 420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
423 d = DT_DIRECTIVES[fmt[x:x + 2]]
424 fmt = fmt.replace(fmt[x:x + 2], s[x:x + d])
425 425 return fmt
426 426
427 427 class Reader(object):
@@ -518,7 +518,7 class Reader(object):
518 518 continue
519 519 return
520 520
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 522 expLabel='', last=False):
523 523
524 524 for path in folders:
@@ -548,7 +548,7 class Reader(object):
548 548 continue
549 549
550 550 def searchFilesOffLine(self, path, startDate, endDate,
551 expLabel, ext, walk,
551 expLabel, ext, walk,
552 552 filefmt, folderfmt):
553 553 """Search files in offline mode for the given arguments
554 554
@@ -566,7 +566,7 class Reader(object):
566 566 folders, ext, filefmt, startDate, endDate, expLabel)
567 567
568 568 def searchFilesOnLine(self, path, startDate, endDate,
569 expLabel, ext, walk,
569 expLabel, ext, walk,
570 570 filefmt, folderfmt):
571 571 """Search for the last file of the last folder
572 572
@@ -633,7 +633,7 class Reader(object):
633 633 nextFile = True
634 634 nextDay = False
635 635
636 for nFiles in range(self.nFiles+1):
636 for nFiles in range(self.nFiles + 1):
637 637 for nTries in range(self.nTries):
638 638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 639 if fullfilename is not None:
@@ -674,7 +674,7 class Reader(object):
674 674
675 675 try:
676 676 filename = next(self.filenameList)
677 self.fileIndex +=1
677 self.fileIndex += 1
678 678 except StopIteration:
679 679 self.flagNoMoreFiles = 1
680 680 return 0
@@ -803,7 +803,7 class JRODataReader(Reader):
803 803 if prefixDir != None:
804 804 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
805 805 if foldercounter == 0:
806 thispath = os.path.join(self.path, "%s%04d%03d" %
806 thispath = os.path.join(self.path, "%s%04d%03d" %
807 807 (prefixDir, self.year, self.doy))
808 808 else:
809 809 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
@@ -931,7 +931,7 class JRODataReader(Reader):
931 931 self.processingHeaderObj.read(self.fp)
932 932 self.firstHeaderSize = self.basicHeaderObj.size
933 933
934 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
934 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
935 935 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
936 936 if datatype == 0:
937 937 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
@@ -949,7 +949,7 class JRODataReader(Reader):
949 949 raise ValueError('Data type was not defined')
950 950
951 951 self.dtype = datatype_str
952 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
952 # self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
953 953 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
954 954 self.firstHeaderSize + self.basicHeaderSize * \
955 955 (self.processingHeaderObj.dataBlocksPerFile - 1)
@@ -985,8 +985,8 class JRODataReader(Reader):
985 985 flag = False
986 986 if not self.online:
987 987 dt1 = basicHeaderObj.datatime
988 pos = self.fileSize-processingHeaderObj.blockSize-24
989 if pos<0:
988 pos = self.fileSize - processingHeaderObj.blockSize - 24
989 if pos < 0:
990 990 flag = False
991 991 log.error('Invalid size for file: {}'.format(self.filename), self.name)
992 992 else:
@@ -1131,7 +1131,7 class JRODataReader(Reader):
1131 1131
1132 1132 for nTries in range(self.nTries):
1133 1133 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1134 self.endDate, self.expLabel, self.ext, self.walk,
1134 self.endDate, self.expLabel, self.ext, self.walk,
1135 1135 self.filefmt, self.folderfmt)
1136 1136
1137 1137 try:
@@ -1144,7 +1144,7 class JRODataReader(Reader):
1144 1144
1145 1145 log.warning(
1146 1146 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1147 self.delay, self.path, nTries + 1),
1147 self.delay, self.path, nTries + 1),
1148 1148 self.name)
1149 1149 time.sleep(self.delay)
1150 1150
@@ -1158,7 +1158,7 class JRODataReader(Reader):
1158 1158 self.set = int(filename[8:11]) - 1
1159 1159 else:
1160 1160 log.log("Searching files in {}".format(self.path), self.name)
1161 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1161 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1162 1162 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1163 1163
1164 1164 self.setNextFile()
@@ -1558,7 +1558,7 class printInfo(Operation):
1558 1558 Operation.__init__(self)
1559 1559 self.__printInfo = True
1560 1560
1561 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1561 def run(self, dataOut, headers=['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1562 1562 if self.__printInfo == False:
1563 1563 return
1564 1564
@@ -43,18 +43,18 class DigitalRFReader(ProcessingUnit):
43 43
44 44 ProcessingUnit.__init__(self)
45 45
46 self.dataOut = Voltage()
47 self.__printInfo = True
46 self.dataOut = Voltage()
47 self.__printInfo = True
48 48 self.__flagDiscontinuousBlock = False
49 49 self.__bufferIndex = 9999999
50 self.__codeType = 0
51 self.__ippKm = None
52 self.__nCode = None
53 self.__nBaud = None
54 self.__code = None
55 self.dtype = None
56 self.oldAverage = None
57 self.path = None
50 self.__codeType = 0
51 self.__ippKm = None
52 self.__nCode = None
53 self.__nBaud = None
54 self.__code = None
55 self.dtype = None
56 self.oldAverage = None
57 self.path = None
58 58
59 59 def close(self):
60 60 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
@@ -97,9 +97,9 class DigitalRFReader(ProcessingUnit):
97 97 nChannels=len(
98 98 self.__channelList),
99 99 adcResolution=14)
100 self.dataOut.type = "Voltage"
100 self.dataOut.type = "Voltage"
101 101
102 self.dataOut.data = None
102 self.dataOut.data = None
103 103
104 104 self.dataOut.dtype = self.dtype
105 105
@@ -107,32 +107,32 class DigitalRFReader(ProcessingUnit):
107 107
108 108 # self.dataOut.nHeights = 0
109 109
110 self.dataOut.nProfiles = int(nProfiles)
110 self.dataOut.nProfiles = int(nProfiles)
111 111
112 self.dataOut.heightList = self.__firstHeigth + \
112 self.dataOut.heightList = self.__firstHeigth + \
113 113 numpy.arange(self.__nSamples, dtype=numpy.float) * \
114 114 self.__deltaHeigth
115 115
116 116 self.dataOut.channelList = list(range(self.__num_subchannels))
117 117
118 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
118 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
119 119
120 120 # self.dataOut.channelIndexList = None
121 121
122 self.dataOut.flagNoData = True
122 self.dataOut.flagNoData = True
123 123
124 124 self.dataOut.flagDataAsBlock = False
125 125 # Set to TRUE if the data is discontinuous
126 126 self.dataOut.flagDiscontinuousBlock = False
127 127
128 self.dataOut.utctime = None
128 self.dataOut.utctime = None
129 129
130 130 # timezone like jroheader, difference in minutes between UTC and localtime
131 self.dataOut.timeZone = self.__timezone / 60
131 self.dataOut.timeZone = self.__timezone / 60
132 132
133 self.dataOut.dstFlag = 0
133 self.dataOut.dstFlag = 0
134 134
135 self.dataOut.errorCount = 0
135 self.dataOut.errorCount = 0
136 136
137 137 try:
138 138 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
@@ -145,9 +145,9 class DigitalRFReader(ProcessingUnit):
145 145 # asumo que la data esta sin flip
146 146 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
147 147
148 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
148 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
149 149
150 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
150 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
151 151 except:
152 152 pass
153 153
@@ -156,9 +156,9 class DigitalRFReader(ProcessingUnit):
156 156 # Time interval between profiles
157 157 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
158 158
159 self.dataOut.frequency = self.__frequency
159 self.dataOut.frequency = self.__frequency
160 160
161 self.dataOut.realtime = self.__online
161 self.dataOut.realtime = self.__online
162 162
163 163 def findDatafiles(self, path, startDate=None, endDate=None):
164 164
@@ -171,46 +171,46 class DigitalRFReader(ProcessingUnit):
171 171 except:
172 172 digitalReadObj = digital_rf.DigitalRFReader(path)
173 173
174 channelNameList = digitalReadObj.get_channels()
174 channelNameList = digitalReadObj.get_channels()
175 175
176 176 if not channelNameList:
177 177 return []
178 178
179 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
179 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
180 180
181 sample_rate = metadata_dict['sample_rate'][0]
181 sample_rate = metadata_dict['sample_rate'][0]
182 182
183 183 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
184 184
185 185 try:
186 timezone = this_metadata_file['timezone'].value
186 timezone = this_metadata_file['timezone'].value
187 187 except:
188 timezone = 0
188 timezone = 0
189 189
190 190 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
191 191 channelNameList[0]) / sample_rate - timezone
192 192
193 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
194 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
193 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
194 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
195 195
196 196 if not startDate:
197 startDate = startDatetime.date()
197 startDate = startDatetime.date()
198 198
199 199 if not endDate:
200 endDate = endDatatime.date()
200 endDate = endDatatime.date()
201 201
202 dateList = []
202 dateList = []
203 203
204 thisDatetime = startDatetime
204 thisDatetime = startDatetime
205 205
206 206 while(thisDatetime <= endDatatime):
207 207
208 thisDate = thisDatetime.date()
208 thisDate = thisDatetime.date()
209 209
210 if thisDate < startDate:
210 if thisDate < startDate:
211 211 continue
212 212
213 if thisDate > endDate:
213 if thisDate > endDate:
214 214 break
215 215
216 216 dateList.append(thisDate)
@@ -250,10 +250,10 class DigitalRFReader(ProcessingUnit):
250 250 online
251 251 delay
252 252 '''
253 self.path = path
254 self.nCohInt = nCohInt
253 self.path = path
254 self.nCohInt = nCohInt
255 255 self.flagDecodeData = flagDecodeData
256 self.i = 0
256 self.i = 0
257 257 if not os.path.isdir(path):
258 258 raise ValueError("[Reading] Directory %s does not exist" % path)
259 259
@@ -263,7 +263,7 class DigitalRFReader(ProcessingUnit):
263 263 except:
264 264 self.digitalReadObj = digital_rf.DigitalRFReader(path)
265 265
266 channelNameList = self.digitalReadObj.get_channels()
266 channelNameList = self.digitalReadObj.get_channels()
267 267
268 268 if not channelNameList:
269 269 raise ValueError("[Reading] Directory %s does not have any files" % path)
@@ -273,27 +273,27 class DigitalRFReader(ProcessingUnit):
273 273
274 274 ########## Reading metadata ######################
275 275
276 top_properties = self.digitalReadObj.get_properties(
276 top_properties = self.digitalReadObj.get_properties(
277 277 channelNameList[channelList[0]])
278 278
279 self.__num_subchannels = top_properties['num_subchannels']
280 self.__sample_rate = 1.0 * \
279 self.__num_subchannels = top_properties['num_subchannels']
280 self.__sample_rate = 1.0 * \
281 281 top_properties['sample_rate_numerator'] / \
282 282 top_properties['sample_rate_denominator']
283 283 # self.__samples_per_file = top_properties['samples_per_file'][0]
284 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
284 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
285 285
286 this_metadata_file = self.digitalReadObj.get_digital_metadata(
286 this_metadata_file = self.digitalReadObj.get_digital_metadata(
287 287 channelNameList[channelList[0]])
288 metadata_bounds = this_metadata_file.get_bounds()
288 metadata_bounds = this_metadata_file.get_bounds()
289 289 self.fixed_metadata_dict = this_metadata_file.read(
290 290 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
291 291
292 292 try:
293 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
293 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
294 294 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
295 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
295 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
297 297 except:
298 298 pass
299 299
@@ -311,15 +311,15 class DigitalRFReader(ProcessingUnit):
311 311 self.__firstHeigth = 0
312 312
313 313 try:
314 codeType = self.__radarControllerHeader['codeType']
314 codeType = self.__radarControllerHeader['codeType']
315 315 except:
316 codeType = 0
316 codeType = 0
317 317
318 318 try:
319 319 if codeType:
320 320 nCode = self.__radarControllerHeader['nCode']
321 321 nBaud = self.__radarControllerHeader['nBaud']
322 code = self.__radarControllerHeader['code']
322 code = self.__radarControllerHeader['code']
323 323 except:
324 324 pass
325 325
@@ -330,18 +330,18 class DigitalRFReader(ProcessingUnit):
330 330 except:
331 331 ippKm = None
332 332 ####################################################
333 self.__ippKm = ippKm
333 self.__ippKm = ippKm
334 334 startUTCSecond = None
335 endUTCSecond = None
335 endUTCSecond = None
336 336
337 337 if startDate:
338 startDatetime = datetime.datetime.combine(startDate, startTime)
338 startDatetime = datetime.datetime.combine(startDate, startTime)
339 339 startUTCSecond = (
340 340 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
341 341
342 342 if endDate:
343 endDatetime = datetime.datetime.combine(endDate, endTime)
344 endUTCSecond = (endDatetime - datetime.datetime(1970,
343 endDatetime = datetime.datetime.combine(endDate, endTime)
344 endUTCSecond = (endDatetime - datetime.datetime(1970,
345 345 1, 1)).total_seconds() + self.__timezone
346 346
347 347 start_index, end_index = self.digitalReadObj.get_bounds(
@@ -350,50 +350,50 class DigitalRFReader(ProcessingUnit):
350 350 if not startUTCSecond:
351 351 startUTCSecond = start_index / self.__sample_rate
352 352
353 if start_index > startUTCSecond * self.__sample_rate:
353 if start_index > startUTCSecond * self.__sample_rate:
354 354 startUTCSecond = start_index / self.__sample_rate
355 355
356 356 if not endUTCSecond:
357 endUTCSecond = end_index / self.__sample_rate
357 endUTCSecond = end_index / self.__sample_rate
358 358
359 if end_index < endUTCSecond * self.__sample_rate:
360 endUTCSecond = end_index / self.__sample_rate
359 if end_index < endUTCSecond * self.__sample_rate:
360 endUTCSecond = end_index / self.__sample_rate
361 361 if not nSamples:
362 362 if not ippKm:
363 363 raise ValueError("[Reading] nSamples or ippKm should be defined")
364 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
365 channelBoundList = []
364 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
365 channelBoundList = []
366 366 channelNameListFiltered = []
367 367
368 368 for thisIndexChannel in channelList:
369 thisChannelName = channelNameList[thisIndexChannel]
369 thisChannelName = channelNameList[thisIndexChannel]
370 370 start_index, end_index = self.digitalReadObj.get_bounds(
371 371 thisChannelName)
372 372 channelBoundList.append((start_index, end_index))
373 373 channelNameListFiltered.append(thisChannelName)
374 374
375 375 self.profileIndex = 0
376 self.i = 0
377 self.__delay = delay
378
379 self.__codeType = codeType
380 self.__nCode = nCode
381 self.__nBaud = nBaud
382 self.__code = code
383
384 self.__datapath = path
385 self.__online = online
386 self.__channelList = channelList
387 self.__channelNameList = channelNameListFiltered
376 self.i = 0
377 self.__delay = delay
378
379 self.__codeType = codeType
380 self.__nCode = nCode
381 self.__nBaud = nBaud
382 self.__code = code
383
384 self.__datapath = path
385 self.__online = online
386 self.__channelList = channelList
387 self.__channelNameList = channelNameListFiltered
388 388 self.__channelBoundList = channelBoundList
389 self.__nSamples = nSamples
390 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
391 self.__nChannels = len(self.__channelList)
389 self.__nSamples = nSamples
390 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
391 self.__nChannels = len(self.__channelList)
392 392
393 self.__startUTCSecond = startUTCSecond
394 self.__endUTCSecond = endUTCSecond
393 self.__startUTCSecond = startUTCSecond
394 self.__endUTCSecond = endUTCSecond
395 395
396 self.__timeInterval = 1.0 * self.__samples_to_read / \
396 self.__timeInterval = 1.0 * self.__samples_to_read / \
397 397 self.__sample_rate # Time interval
398 398
399 399 if online:
@@ -403,7 +403,7 class DigitalRFReader(ProcessingUnit):
403 403 # por que en el otro metodo lo primero q se hace es sumar samplestoread
404 404 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
405 405
406 self.__data_buffer = numpy.zeros(
406 self.__data_buffer = numpy.zeros(
407 407 (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
408 408
409 409 self.__setFileHeader()
@@ -420,8 +420,8 class DigitalRFReader(ProcessingUnit):
420 420 datetime.datetime.utcfromtimestamp(
421 421 endUTCSecond - self.__timezone)
422 422 ))
423 self.oldAverage = None
424 self.count = 0
423 self.oldAverage = None
424 self.count = 0
425 425 self.executionTime = 0
426 426
427 427 def __reload(self):
@@ -438,13 +438,13 class DigitalRFReader(ProcessingUnit):
438 438 except:
439 439 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
440 440
441 start_index, end_index = self.digitalReadObj.get_bounds(
441 start_index, end_index = self.digitalReadObj.get_bounds(
442 442 self.__channelNameList[self.__channelList[0]])
443 443
444 if start_index > self.__startUTCSecond * self.__sample_rate:
444 if start_index > self.__startUTCSecond * self.__sample_rate:
445 445 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
446 446
447 if end_index > self.__endUTCSecond * self.__sample_rate:
447 if end_index > self.__endUTCSecond * self.__sample_rate:
448 448 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
449 449 print()
450 450 print("[Reading] New timerange found [%s, %s] " % (
@@ -459,14 +459,14 class DigitalRFReader(ProcessingUnit):
459 459 return False
460 460
461 461 def timeit(self, toExecute):
462 t0 = time.time()
462 t0 = time.time()
463 463 toExecute()
464 self.executionTime = time.time() - t0
464 self.executionTime = time.time() - t0
465 465 if self.oldAverage is None:
466 466 self.oldAverage = self.executionTime
467 self.oldAverage = (self.executionTime + self.count *
467 self.oldAverage = (self.executionTime + self.count *
468 468 self.oldAverage) / (self.count + 1.0)
469 self.count = self.count + 1.0
469 self.count = self.count + 1.0
470 470 return
471 471
472 472 def __readNextBlock(self, seconds=30, volt_scale=1):
@@ -475,7 +475,7 class DigitalRFReader(ProcessingUnit):
475 475
476 476 # Set the next data
477 477 self.__flagDiscontinuousBlock = False
478 self.__thisUnixSample += self.__samples_to_read
478 self.__thisUnixSample += self.__samples_to_read
479 479
480 480 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
481 481 print ("[Reading] There are no more data into selected time-range")
@@ -496,14 +496,14 class DigitalRFReader(ProcessingUnit):
496 496 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
497 497 for indexSubchannel in range(self.__num_subchannels):
498 498 try:
499 t0 = time()
499 t0 = time()
500 500 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
501 501 self.__samples_to_read,
502 502 thisChannelName, sub_channel=indexSubchannel)
503 self.executionTime = time() - t0
503 self.executionTime = time() - t0
504 504 if self.oldAverage is None:
505 505 self.oldAverage = self.executionTime
506 self.oldAverage = (
506 self.oldAverage = (
507 507 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
508 508 self.count = self.count + 1.0
509 509
@@ -521,11 +521,11 class DigitalRFReader(ProcessingUnit):
521 521 break
522 522
523 523 self.__data_buffer[indexSubchannel, :] = result * volt_scale
524 indexChannel+=1
524 indexChannel += 1
525 525
526 dataOk = True
526 dataOk = True
527 527
528 self.__utctime = self.__thisUnixSample / self.__sample_rate
528 self.__utctime = self.__thisUnixSample / self.__sample_rate
529 529
530 530 if not dataOk:
531 531 return False
@@ -534,7 +534,7 class DigitalRFReader(ProcessingUnit):
534 534 self.__samples_to_read,
535 535 self.__timeInterval))
536 536
537 self.__bufferIndex = 0
537 self.__bufferIndex = 0
538 538
539 539 return True
540 540
@@ -557,16 +557,16 class DigitalRFReader(ProcessingUnit):
557 557 self.flagDiscontinuousBlock
558 558 self.flagIsNewBlock
559 559 '''
560 #print("getdata")
560 # print("getdata")
561 561 err_counter = 0
562 562 self.dataOut.flagNoData = True
563 563
564 564 if self.__isBufferEmpty():
565 #print("hi")
565 # print("hi")
566 566 self.__flagDiscontinuousBlock = False
567 567
568 568 while True:
569 #print ("q ha pasado")
569 # print ("q ha pasado")
570 570 if self.__readNextBlock():
571 571 break
572 572 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
@@ -589,14 +589,14 class DigitalRFReader(ProcessingUnit):
589 589 print('[Reading] waiting %d seconds to read a new block' % seconds)
590 590 time.sleep(seconds)
591 591
592 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
593 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
594 self.dataOut.flagNoData = False
592 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
593 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
594 self.dataOut.flagNoData = False
595 595 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
596 self.dataOut.profileIndex = self.profileIndex
596 self.dataOut.profileIndex = self.profileIndex
597 597
598 598 self.__bufferIndex += self.__nSamples
599 self.profileIndex += 1
599 self.profileIndex += 1
600 600
601 601 if self.profileIndex == self.dataOut.nProfiles:
602 602 self.profileIndex = 0
@@ -627,7 +627,7 class DigitalRFReader(ProcessingUnit):
627 627
628 628 if not self.isConfig:
629 629 self.setup(**kwargs)
630 #self.i = self.i+1
630 # self.i = self.i+1
631 631 self.getData(seconds=self.__delay)
632 632
633 633 return
@@ -644,25 +644,25 class DigitalRFWriter(Operation):
644 644 '''
645 645 Operation.__init__(self, **kwargs)
646 646 self.metadata_dict = {}
647 self.dataOut = None
648 self.dtype = None
649 self.oldAverage = 0
647 self.dataOut = None
648 self.dtype = None
649 self.oldAverage = 0
650 650
651 651 def setHeader(self):
652 652
653 self.metadata_dict['frequency'] = self.dataOut.frequency
654 self.metadata_dict['timezone'] = self.dataOut.timeZone
655 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
656 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
657 self.metadata_dict['heightList'] = self.dataOut.heightList
658 self.metadata_dict['channelList'] = self.dataOut.channelList
653 self.metadata_dict['frequency'] = self.dataOut.frequency
654 self.metadata_dict['timezone'] = self.dataOut.timeZone
655 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
656 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
657 self.metadata_dict['heightList'] = self.dataOut.heightList
658 self.metadata_dict['channelList'] = self.dataOut.channelList
659 659 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
660 660 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
661 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
662 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
663 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
664 self.metadata_dict['type'] = self.dataOut.type
665 self.metadata_dict['flagDataAsBlock']= getattr(
661 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
662 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
663 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
664 self.metadata_dict['type'] = self.dataOut.type
665 self.metadata_dict['flagDataAsBlock'] = getattr(
666 666 self.dataOut, 'flagDataAsBlock', None) # chequear
667 667
668 668 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
@@ -672,13 +672,13 class DigitalRFWriter(Operation):
672 672 dataOut: Input data will also be outputa data
673 673 '''
674 674 self.setHeader()
675 self.__ippSeconds = dataOut.ippSeconds
676 self.__deltaH = dataOut.getDeltaH()
675 self.__ippSeconds = dataOut.ippSeconds
676 self.__deltaH = dataOut.getDeltaH()
677 677 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
678 self.__dtype = dataOut.dtype
678 self.__dtype = dataOut.dtype
679 679 if len(dataOut.dtype) == 2:
680 680 self.__dtype = dataOut.dtype[0]
681 self.__nSamples = dataOut.systemHeaderObj.nSamples
681 self.__nSamples = dataOut.systemHeaderObj.nSamples
682 682 self.__nProfiles = dataOut.nProfiles
683 683
684 684 if self.dataOut.type != 'Voltage':
@@ -689,44 +689,44 class DigitalRFWriter(Operation):
689 689 self.arr_data = numpy.ones((self.__nSamples, len(
690 690 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
691 691
692 file_cadence_millisecs = 1000
692 file_cadence_millisecs = 1000
693 693
694 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
695 sample_rate_numerator = int(sample_rate_fraction.numerator)
694 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
695 sample_rate_numerator = int(sample_rate_fraction.numerator)
696 696 sample_rate_denominator = int(sample_rate_fraction.denominator)
697 start_global_index = dataOut.utctime * self.__sample_rate
697 start_global_index = dataOut.utctime * self.__sample_rate
698 698
699 uuid = 'prueba'
699 uuid = 'prueba'
700 700 compression_level = 0
701 checksum = False
702 is_complex = True
703 num_subchannels = len(dataOut.channelList)
704 is_continuous = True
705 marching_periods = False
701 checksum = False
702 is_complex = True
703 num_subchannels = len(dataOut.channelList)
704 is_continuous = True
705 marching_periods = False
706 706
707 707 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
708 708 fileCadence, start_global_index,
709 709 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
710 710 is_complex, num_subchannels, is_continuous, marching_periods)
711 metadata_dir = os.path.join(path, 'metadata')
711 metadata_dir = os.path.join(path, 'metadata')
712 712 os.system('mkdir %s' % (metadata_dir))
713 713 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
714 714 sample_rate_numerator, sample_rate_denominator,
715 715 metadataFile)
716 self.isConfig = True
716 self.isConfig = True
717 717 self.currentSample = 0
718 self.oldAverage = 0
719 self.count = 0
718 self.oldAverage = 0
719 self.count = 0
720 720 return
721 721
722 722 def writeMetadata(self):
723 start_idx = self.__sample_rate * self.dataOut.utctime
723 start_idx = self.__sample_rate * self.dataOut.utctime
724 724
725 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
725 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
726 726 )
727 727 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
728 728 )
729 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
729 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
730 730 )
731 731 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
732 732 return
@@ -734,12 +734,12 class DigitalRFWriter(Operation):
734 734 def timeit(self, toExecute):
735 735 t0 = time()
736 736 toExecute()
737 self.executionTime = time() - t0
737 self.executionTime = time() - t0
738 738 if self.oldAverage is None:
739 739 self.oldAverage = self.executionTime
740 self.oldAverage = (self.executionTime + self.count *
740 self.oldAverage = (self.executionTime + self.count *
741 741 self.oldAverage) / (self.count + 1.0)
742 self.count = self.count + 1.0
742 self.count = self.count + 1.0
743 743 return
744 744
745 745 def writeData(self):
@@ -747,9 +747,9 class DigitalRFWriter(Operation):
747 747 raise 'Digital RF cannot be used with this data type'
748 748 for channel in self.dataOut.channelList:
749 749 for i in range(self.dataOut.nFFTPoints):
750 self.arr_data[1][channel * self.dataOut.nFFTPoints +
750 self.arr_data[1][channel * self.dataOut.nFFTPoints +
751 751 i]['r'] = self.dataOut.data[channel][i].real
752 self.arr_data[1][channel * self.dataOut.nFFTPoints +
752 self.arr_data[1][channel * self.dataOut.nFFTPoints +
753 753 i]['i'] = self.dataOut.data[channel][i].imag
754 754 else:
755 755 for i in range(self.dataOut.systemHeaderObj.nSamples):
@@ -777,12 +777,12 class DigitalRFWriter(Operation):
777 777
778 778 self.writeData()
779 779
780 ## self.currentSample += 1
780 # # self.currentSample += 1
781 781 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
782 782 # self.writeMetadata()
783 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
783 # # if self.currentSample == self.__nProfiles: self.currentSample = 0
784 784
785 return dataOut# en la version 2.7 no aparece este return
785 return dataOut # en la version 2.7 no aparece este return
786 786
787 787 def close(self):
788 788 print('[Writing] - Closing files ')
@@ -25,23 +25,23 from schainpy.utils import log
25 25
26 26
27 27 class PyFits(object):
28 name=None
29 format=None
30 array =None
31 data =None
32 thdulist=None
33 prihdr=None
34 hdu=None
28 name = None
29 format = None
30 array = None
31 data = None
32 thdulist = None
33 prihdr = None
34 hdu = None
35 35
36 36 def __init__(self):
37 37
38 38 pass
39 39
40 def setColF(self,name,format,array):
41 self.name=name
42 self.format=format
43 self.array=array
44 a1=numpy.array([self.array],dtype=numpy.float32)
40 def setColF(self, name, format, array):
41 self.name = name
42 self.format = format
43 self.array = array
44 a1 = numpy.array([self.array], dtype=numpy.float32)
45 45 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
46 46 return self.col1
47 47
@@ -54,35 +54,35 class PyFits(object):
54 54 # return self.col2
55 55
56 56
57 def writeData(self,name,format,data):
58 self.name=name
59 self.format=format
60 self.data=data
61 a2=numpy.array([self.data],dtype=numpy.float32)
57 def writeData(self, name, format, data):
58 self.name = name
59 self.format = format
60 self.data = data
61 a2 = numpy.array([self.data], dtype=numpy.float32)
62 62 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
63 63 return self.col2
64 64
65 def cFImage(self,idblock,year,month,day,hour,minute,second):
66 self.hdu= pyfits.PrimaryHDU(idblock)
67 self.hdu.header.set("Year",year)
68 self.hdu.header.set("Month",month)
69 self.hdu.header.set("Day",day)
70 self.hdu.header.set("Hour",hour)
71 self.hdu.header.set("Minute",minute)
72 self.hdu.header.set("Second",second)
65 def cFImage(self, idblock, year, month, day, hour, minute, second):
66 self.hdu = pyfits.PrimaryHDU(idblock)
67 self.hdu.header.set("Year", year)
68 self.hdu.header.set("Month", month)
69 self.hdu.header.set("Day", day)
70 self.hdu.header.set("Hour", hour)
71 self.hdu.header.set("Minute", minute)
72 self.hdu.header.set("Second", second)
73 73 return self.hdu
74 74
75 75
76 def Ctable(self,colList):
77 self.cols=pyfits.ColDefs(colList)
76 def Ctable(self, colList):
77 self.cols = pyfits.ColDefs(colList)
78 78 self.tbhdu = pyfits.new_table(self.cols)
79 79 return self.tbhdu
80 80
81 81
82 def CFile(self,hdu,tbhdu):
83 self.thdulist=pyfits.HDUList([hdu,tbhdu])
82 def CFile(self, hdu, tbhdu):
83 self.thdulist = pyfits.HDUList([hdu, tbhdu])
84 84
85 def wFile(self,filename):
85 def wFile(self, filename):
86 86 if os.path.isfile(filename):
87 87 os.remove(filename)
88 88 self.thdulist.writeto(filename)
@@ -154,7 +154,7 class FitsWriter(Operation):
154 154 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
155 155 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
156 156 header_data.header['NCHANNELS'] = dataOut.nChannels
157 #header_data.header['HEIGHTS'] = dataOut.heightList
157 # header_data.header['HEIGHTS'] = dataOut.heightList
158 158 header_data.header['NHEIGHTS'] = dataOut.nHeights
159 159
160 160 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
@@ -165,7 +165,7 class FitsWriter(Operation):
165 165
166 166 header_data.writeto(self.filename)
167 167
168 self.addExtension(dataOut.heightList,'HEIGHTLIST')
168 self.addExtension(dataOut.heightList, 'HEIGHTLIST')
169 169
170 170
171 171 def setup(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None):
@@ -182,7 +182,7 class FitsWriter(Operation):
182 182 def addExtension(self, data, tagname):
183 183 self.open()
184 184 extension = pyfits.ImageHDU(data=data, name=tagname)
185 #extension.header['TAG'] = tagname
185 # extension.header['TAG'] = tagname
186 186 self.fitsObj.append(extension)
187 187 self.write()
188 188
@@ -207,25 +207,25 class FitsWriter(Operation):
207 207 ext = self.ext
208 208 path = self.path
209 209
210 timeTuple = time.localtime( self.dataOut.utctime)
211 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
210 timeTuple = time.localtime(self.dataOut.utctime)
211 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
212 212
213 fullpath = os.path.join( path, subfolder )
214 if not( os.path.exists(fullpath) ):
213 fullpath = os.path.join(path, subfolder)
214 if not(os.path.exists(fullpath)):
215 215 os.mkdir(fullpath)
216 self.setFile = -1 #inicializo mi contador de seteo
216 self.setFile = -1 # inicializo mi contador de seteo
217 217 else:
218 filesList = os.listdir( fullpath )
219 if len( filesList ) > 0:
220 filesList = sorted( filesList, key=str.lower )
218 filesList = os.listdir(fullpath)
219 if len(filesList) > 0:
220 filesList = sorted(filesList, key=str.lower)
221 221 filen = filesList[-1]
222 222
223 if isNumber( filen[8:11] ):
224 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
223 if isNumber(filen[8:11]):
224 self.setFile = int(filen[8:11]) # inicializo mi contador de seteo al seteo del ultimo file
225 225 else:
226 226 self.setFile = -1
227 227 else:
228 self.setFile = -1 #inicializo mi contador de seteo
228 self.setFile = -1 # inicializo mi contador de seteo
229 229
230 230 setFile = self.setFile
231 231 setFile += 1
@@ -234,16 +234,16 class FitsWriter(Operation):
234 234 timeTuple.tm_year,
235 235 timeTuple.tm_yday,
236 236 setFile,
237 ext )
237 ext)
238 238
239 filename = os.path.join( path, subfolder, thisFile )
239 filename = os.path.join(path, subfolder, thisFile)
240 240
241 241 self.blockIndex = 0
242 242 self.filename = filename
243 243 self.setFile = setFile
244 244 self.flagIsNewFile = 1
245 245
246 print('Writing the file: %s'%self.filename)
246 print('Writing the file: %s' % self.filename)
247 247
248 248 self.setFitsHeader(self.dataOut, self.metadatafile)
249 249
@@ -262,13 +262,13 class FitsWriter(Operation):
262 262 if self.blockIndex < self.dataBlocksPerFile:
263 263 return 1
264 264
265 if not( self.setNextFile() ):
265 if not(self.setNextFile()):
266 266 return 0
267 267
268 268 return 1
269 269
270 270 def writeNextBlock(self):
271 if not( self.__setNewBlock() ):
271 if not(self.__setNewBlock()):
272 272 return 0
273 273 self.writeBlock()
274 274 return 1
@@ -301,8 +301,8 class FitsReader(ProcessingUnit):
301 301 data = None
302 302 data_header_dict = None
303 303
304 def __init__(self):#, **kwargs):
305 ProcessingUnit.__init__(self)#, **kwargs)
304 def __init__(self): # , **kwargs):
305 ProcessingUnit.__init__(self) # , **kwargs)
306 306 self.isConfig = False
307 307 self.ext = '.fits'
308 308 self.setFile = 0
@@ -317,7 +317,7 class FitsReader(ProcessingUnit):
317 317 self.nReadBlocks = 0
318 318 self.nTotalBlocks = 0
319 319 self.dataOut = self.createObjByDefault()
320 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
320 self.maxTimeStep = 10 # deberia ser definido por el usuario usando el metodo setup()
321 321 self.blockIndex = 1
322 322
323 323 def createObjByDefault(self):
@@ -328,14 +328,14 class FitsReader(ProcessingUnit):
328 328
329 329 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
330 330 try:
331 fitsObj = pyfits.open(filename,'readonly')
331 fitsObj = pyfits.open(filename, 'readonly')
332 332 except:
333 print("File %s can't be opened" %(filename))
333 print("File %s can't be opened" % (filename))
334 334 return None
335 335
336 336 header = fitsObj[0].header
337 337 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
338 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
338 utc = time.mktime(struct_time) - time.timezone # TIMEZONE debe ser un parametro del header FITS
339 339
340 340 ltc = utc
341 341 if useLocalTime:
@@ -367,7 +367,7 class FitsReader(ProcessingUnit):
367 367 # continue
368 368
369 369 fileSize = os.path.getsize(filename)
370 fitsObj = pyfits.open(filename,'readonly')
370 fitsObj = pyfits.open(filename, 'readonly')
371 371 break
372 372
373 373 self.flagIsNewFile = 1
@@ -376,7 +376,7 class FitsReader(ProcessingUnit):
376 376 self.fileSize = fileSize
377 377 self.fitsObj = fitsObj
378 378 self.blockIndex = 0
379 print("Setting the file: %s"%self.filename)
379 print("Setting the file: %s" % self.filename)
380 380
381 381 return 1
382 382
@@ -459,8 +459,8 class FitsReader(ProcessingUnit):
459 459 path,
460 460 startDate,
461 461 endDate,
462 startTime=datetime.time(0,0,0),
463 endTime=datetime.time(23,59,59),
462 startTime=datetime.time(0, 0, 0),
463 endTime=datetime.time(23, 59, 59),
464 464 set=None,
465 465 expLabel='',
466 466 ext='.fits',
@@ -474,7 +474,7 class FitsReader(ProcessingUnit):
474 474 else:
475 475 dirList = []
476 476 for thisPath in os.listdir(path):
477 if not os.path.isdir(os.path.join(path,thisPath)):
477 if not os.path.isdir(os.path.join(path, thisPath)):
478 478 continue
479 479 if not isRadarFolder(thisPath):
480 480 continue
@@ -490,20 +490,20 class FitsReader(ProcessingUnit):
490 490 year = thisDate.timetuple().tm_year
491 491 doy = thisDate.timetuple().tm_yday
492 492
493 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
493 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year, doy) + '*')
494 494 if len(matchlist) == 0:
495 495 thisDate += datetime.timedelta(1)
496 496 continue
497 497 for match in matchlist:
498 pathList.append(os.path.join(path,match,expLabel))
498 pathList.append(os.path.join(path, match, expLabel))
499 499
500 500 thisDate += datetime.timedelta(1)
501 501
502 502 if pathList == []:
503 print("Any folder was found for the date range: %s-%s" %(startDate, endDate))
503 print("Any folder was found for the date range: %s-%s" % (startDate, endDate))
504 504 return None, None
505 505
506 print("%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate))
506 print("%d folder(s) was(were) found for the date range: %s - %s" % (len(pathList), startDate, endDate))
507 507
508 508 filenameList = []
509 509 datetimeList = []
@@ -512,12 +512,12 class FitsReader(ProcessingUnit):
512 512
513 513 thisPath = pathList[i]
514 514
515 fileList = glob.glob1(thisPath, "*%s" %ext)
515 fileList = glob.glob1(thisPath, "*%s" % ext)
516 516 fileList.sort()
517 517
518 518 for thisFile in fileList:
519 519
520 filename = os.path.join(thisPath,thisFile)
520 filename = os.path.join(thisPath, thisFile)
521 521 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
522 522
523 523 if not(thisDatetime):
@@ -527,14 +527,14 class FitsReader(ProcessingUnit):
527 527 datetimeList.append(thisDatetime)
528 528
529 529 if not(filenameList):
530 print("Any file was found for the time range %s - %s" %(startTime, endTime))
530 print("Any file was found for the time range %s - %s" % (startTime, endTime))
531 531 return None, None
532 532
533 print("%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime))
533 print("%d file(s) was(were) found for the time range: %s - %s" % (len(filenameList), startTime, endTime))
534 534 print()
535 535
536 536 for i in range(len(filenameList)):
537 print("%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()))
537 print("%s -> [%s]" % (filenameList[i], datetimeList[i].ctime()))
538 538
539 539 self.filenameList = filenameList
540 540 self.datetimeList = datetimeList
@@ -544,14 +544,14 class FitsReader(ProcessingUnit):
544 544 def setup(self, path=None,
545 545 startDate=None,
546 546 endDate=None,
547 startTime=datetime.time(0,0,0),
548 endTime=datetime.time(23,59,59),
547 startTime=datetime.time(0, 0, 0),
548 endTime=datetime.time(23, 59, 59),
549 549 set=0,
550 expLabel = "",
551 ext = None,
552 online = False,
553 delay = 60,
554 walk = True):
550 expLabel="",
551 ext=None,
552 online=False,
553 delay=60,
554 walk=True):
555 555
556 556 if path == None:
557 557 raise ValueError("The path is not valid")
@@ -567,9 +567,9 class FitsReader(ProcessingUnit):
567 567 walk=walk)
568 568
569 569 if not(pathList):
570 print("No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
571 datetime.datetime.combine(startDate,startTime).ctime(),
572 datetime.datetime.combine(endDate,endTime).ctime()))
570 print("No *%s files into the folder %s \nfor the range: %s - %s" % (ext, path,
571 datetime.datetime.combine(startDate, startTime).ctime(),
572 datetime.datetime.combine(endDate, endTime).ctime()))
573 573
574 574 sys.exit(-1)
575 575
@@ -583,10 +583,10 class FitsReader(ProcessingUnit):
583 583 self.ext = ext
584 584
585 585 if not(self.setNextFile()):
586 if (startDate!=None) and (endDate!=None):
587 print("No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
586 if (startDate != None) and (endDate != None):
587 print("No files in range: %s - %s" % (datetime.datetime.combine(startDate, startTime).ctime(), datetime.datetime.combine(endDate, endTime).ctime()))
588 588 elif startDate != None:
589 print("No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()))
589 print("No files in range: %s" % (datetime.datetime.combine(startDate, startTime).ctime()))
590 590 else:
591 591 print("No files")
592 592
@@ -627,21 +627,21 class FitsReader(ProcessingUnit):
627 627
628 628 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
629 629
630 for nTries in range( self.nTries ):
630 for nTries in range(self.nTries):
631 631
632 632 self.fp.close()
633 self.fp = open( self.filename, 'rb' )
634 self.fp.seek( currentPointer )
633 self.fp = open(self.filename, 'rb')
634 self.fp.seek(currentPointer)
635 635
636 self.fileSize = os.path.getsize( self.filename )
636 self.fileSize = os.path.getsize(self.filename)
637 637 currentSize = self.fileSize - currentPointer
638 638
639 if ( currentSize >= neededSize ):
639 if (currentSize >= neededSize):
640 640 self.__rdBasicHeader()
641 641 return 1
642 642
643 print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1))
644 sleep( self.delay )
643 print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
644 sleep(self.delay)
645 645
646 646
647 647 return 0
@@ -737,11 +737,11 class SpectraHeisWriter(Operation):
737 737 doypath = None
738 738 subfolder = None
739 739
740 def __init__(self):#, **kwargs):
741 Operation.__init__(self)#, **kwargs)
740 def __init__(self): # , **kwargs):
741 Operation.__init__(self) # , **kwargs)
742 742 self.wrObj = PyFits()
743 743 # self.dataOut = dataOut
744 self.nTotalBlocks=0
744 self.nTotalBlocks = 0
745 745 # self.set = None
746 746 self.setFile = None
747 747 self.idblock = 0
@@ -764,7 +764,7 class SpectraHeisWriter(Operation):
764 764 False : no es un string numerico
765 765 """
766 766 try:
767 float( str )
767 float(str)
768 768 return True
769 769 except:
770 770 return False
@@ -779,28 +779,28 class SpectraHeisWriter(Operation):
779 779 self.dataOut = dataOut
780 780
781 781 def putData(self):
782 name= time.localtime( self.dataOut.utctime)
783 ext=".fits"
782 name = time.localtime(self.dataOut.utctime)
783 ext = ".fits"
784 784
785 785 if self.doypath == None:
786 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
787 self.doypath = os.path.join( self.wrpath, self.subfolder )
786 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year, name.tm_yday, time.mktime(datetime.datetime.now().timetuple()))
787 self.doypath = os.path.join(self.wrpath, self.subfolder)
788 788 os.mkdir(self.doypath)
789 789
790 790 if self.setFile == None:
791 791 # self.set = self.dataOut.set
792 792 self.setFile = 0
793 793 # if self.set != self.dataOut.set:
794 ## self.set = self.dataOut.set
794 # # self.set = self.dataOut.set
795 795 # self.setFile = 0
796 796
797 #make the filename
798 thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
797 # make the filename
798 thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year, name.tm_yday, self.setFile, ext)
799 799
800 filename = os.path.join(self.wrpath,self.subfolder, thisFile)
800 filename = os.path.join(self.wrpath, self.subfolder, thisFile)
801 801
802 idblock = numpy.array([self.idblock],dtype="int64")
803 header=self.wrObj.cFImage(idblock=idblock,
802 idblock = numpy.array([self.idblock], dtype="int64")
803 header = self.wrObj.cFImage(idblock=idblock,
804 804 year=time.gmtime(self.dataOut.utctime).tm_year,
805 805 month=time.gmtime(self.dataOut.utctime).tm_mon,
806 806 day=time.gmtime(self.dataOut.utctime).tm_mday,
@@ -808,32 +808,32 class SpectraHeisWriter(Operation):
808 808 minute=time.gmtime(self.dataOut.utctime).tm_min,
809 809 second=time.gmtime(self.dataOut.utctime).tm_sec)
810 810
811 c=3E8
811 c = 3E8
812 812 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
813 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
813 freq = numpy.arange(-1 * self.dataOut.nHeights / 2., self.dataOut.nHeights / 2.) * (c / (2 * deltaHeight * 1000))
814 814
815 815 colList = []
816 816
817 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
817 colFreq = self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints) + 'E', array=freq)
818 818
819 819 colList.append(colFreq)
820 820
821 nchannel=self.dataOut.nChannels
821 nchannel = self.dataOut.nChannels
822 822
823 823 for i in range(nchannel):
824 col = self.wrObj.writeData(name="PCh"+str(i+1),
825 format=str(self.dataOut.nFFTPoints)+'E',
826 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
824 col = self.wrObj.writeData(name="PCh" + str(i + 1),
825 format=str(self.dataOut.nFFTPoints) + 'E',
826 data=10 * numpy.log10(self.dataOut.data_spc[i, :]))
827 827
828 828 colList.append(col)
829 829
830 data=self.wrObj.Ctable(colList=colList)
830 data = self.wrObj.Ctable(colList=colList)
831 831
832 self.wrObj.CFile(header,data)
832 self.wrObj.CFile(header, data)
833 833
834 834 self.wrObj.wFile(filename)
835 835
836 #update the setFile
836 # update the setFile
837 837 self.setFile += 1
838 838 self.idblock += 1
839 839
@@ -847,4 +847,4 class SpectraHeisWriter(Operation):
847 847 self.isConfig = True
848 848
849 849 self.putData()
850 return dataOut No newline at end of file
850 return dataOut
This diff has been collapsed as it changes many lines, (506 lines changed) Show them Hide them
@@ -4,8 +4,8 Created on Jul 3, 2014
4 4 @author: roj-com0419
5 5 '''
6 6
7 import os,sys
8 import time,datetime
7 import os, sys
8 import time, datetime
9 9 import h5py
10 10 import numpy
11 11 import fnmatch
@@ -30,7 +30,7 def isNumber(str):
30 30 False : no es un string numerico
31 31 """
32 32 try:
33 float( str )
33 float(str)
34 34 return True
35 35 except:
36 36 return False
@@ -48,7 +48,7 def getFileFromSet(path, ext, set=None):
48 48
49 49 for thisFile in fileList:
50 50 try:
51 number= int(thisFile[6:16])
51 number = int(thisFile[6:16])
52 52
53 53 # year = int(thisFile[1:5])
54 54 # doy = int(thisFile[5:8])
@@ -63,31 +63,31 def getFileFromSet(path, ext, set=None):
63 63 if len(validFilelist) < 1:
64 64 return None
65 65
66 validFilelist = sorted( validFilelist, key=str.lower )
66 validFilelist = sorted(validFilelist, key=str.lower)
67 67
68 68 if set == None:
69 69 return validFilelist[-1]
70 70
71 print("set =" ,set)
71 print("set =" , set)
72 72 for thisFile in validFilelist:
73 73 if set <= int(thisFile[6:16]):
74 print(thisFile,int(thisFile[6:16]))
74 print(thisFile, int(thisFile[6:16]))
75 75 return thisFile
76 76
77 77 return validFilelist[-1]
78 78
79 myfile = fnmatch.filter(validFilelist,'*%10d*'%(set))
80 #myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
79 myfile = fnmatch.filter(validFilelist, '*%10d*' % (set))
80 # myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
81 81
82 if len(myfile)!= 0:
82 if len(myfile) != 0:
83 83 return myfile[0]
84 84 else:
85 filename = '*%10.10d%s'%(set,ext.lower())
86 print('the filename %s does not exist'%filename)
85 filename = '*%10.10d%s' % (set, ext.lower())
86 print('the filename %s does not exist' % filename)
87 87 print('...going to the last file: ')
88 88
89 89 if validFilelist:
90 validFilelist = sorted( validFilelist, key=str.lower )
90 validFilelist = sorted(validFilelist, key=str.lower)
91 91 return validFilelist[-1]
92 92
93 93 return None
@@ -113,7 +113,7 Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
113 113 for thisFile in fileList:
114 114
115 115 try:
116 number= int(thisFile[6:16])
116 number = int(thisFile[6:16])
117 117 except:
118 118 print("There is a file or folder with different format")
119 119 if not isNumber(number):
@@ -127,7 +127,7 Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
127 127 # if not isNumber(doy):
128 128 # continue
129 129
130 number= int(number)
130 number = int(number)
131 131 # year = int(year)
132 132 # doy = int(doy)
133 133
@@ -139,7 +139,7 Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
139 139
140 140
141 141 if validFilelist:
142 validFilelist = sorted( validFilelist, key=str.lower )
142 validFilelist = sorted(validFilelist, key=str.lower)
143 143 return validFilelist[-1]
144 144
145 145 return None
@@ -150,16 +150,16 class HFReader(ProcessingUnit):
150 150 '''
151 151 classdocs
152 152 '''
153 path = None
154 startDate= None
155 endDate = None
156 startTime= None
157 endTime = None
158 walk = None
153 path = None
154 startDate = None
155 endDate = None
156 startTime = None
157 endTime = None
158 walk = None
159 159 isConfig = False
160 dataOut=None
160 dataOut = None
161 161 nTries = 3
162 ext = ".hdf5"
162 ext = ".hdf5"
163 163
164 164 def __init__(self, **kwargs):
165 165 '''
@@ -167,32 +167,32 class HFReader(ProcessingUnit):
167 167 '''
168 168 ProcessingUnit.__init__(self, **kwargs)
169 169
170 self.isConfig =False
170 self.isConfig = False
171 171
172 172 self.datablock = None
173 173
174 self.filename_current=None
174 self.filename_current = None
175 175
176 176 self.utc = 0
177 177
178 self.ext='.hdf5'
178 self.ext = '.hdf5'
179 179
180 180 self.flagIsNewFile = 1
181 181
182 182 #-------------------------------------------------
183 self.fileIndex=None
183 self.fileIndex = None
184 184
185 self.profileIndex_offset=None
185 self.profileIndex_offset = None
186 186
187 self.filenameList=[]
187 self.filenameList = []
188 188
189 self.hfFilePointer= None
189 self.hfFilePointer = None
190 190
191 191 self.filename_online = None
192 192
193 self.status=True
193 self.status = True
194 194
195 self.flagNoMoreFiles= False
195 self.flagNoMoreFiles = False
196 196
197 197 self.__waitForNewFile = 20
198 198
@@ -222,7 +222,7 class HFReader(ProcessingUnit):
222 222 Return:
223 223 None
224 224 """
225 pts2read =self.nChannels*self.nHeights*self.nProfiles
225 pts2read = self.nChannels * self.nHeights * self.nProfiles
226 226 self.blocksize = pts2read
227 227
228 228 def __readHeader(self):
@@ -230,20 +230,20 class HFReader(ProcessingUnit):
230 230 self.nProfiles = 100
231 231 self.nHeights = 1000
232 232 self.nChannels = 2
233 self.__firstHeigth=0
234 self.__nSamples=1000
235 self.__deltaHeigth=1.5
236 self.__sample_rate=1e5
237 #self.__frequency=2.72e6
238 #self.__frequency=3.64e6
239 self.__frequency=None
233 self.__firstHeigth = 0
234 self.__nSamples = 1000
235 self.__deltaHeigth = 1.5
236 self.__sample_rate = 1e5
237 # self.__frequency=2.72e6
238 # self.__frequency=3.64e6
239 self.__frequency = None
240 240 self.__online = False
241 self.filename_next_set=None
241 self.filename_next_set = None
242 242
243 #print "Frequency of Operation:", self.__frequency
243 # print "Frequency of Operation:", self.__frequency
244 244
245 245
246 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
246 def __setParameters(self, path='', startDate='', endDate='', startTime='', endTime='', walk=''):
247 247 self.path = path
248 248 self.startDate = startDate
249 249 self.endDate = endDate
@@ -253,43 +253,43 class HFReader(ProcessingUnit):
253 253
254 254 def __checkPath(self):
255 255 if os.path.exists(self.path):
256 self.status=1
256 self.status = 1
257 257 else:
258 self.status=0
259 print('Path %s does not exits'%self.path)
258 self.status = 0
259 print('Path %s does not exits' % self.path)
260 260 return
261 261 return
262 262
263 263 def __selDates(self, hf_dirname_format):
264 264 try:
265 dir_hf_filename= self.path+"/"+hf_dirname_format
266 fp= h5py.File(dir_hf_filename,'r')
267 hipoc=fp['t'].value
265 dir_hf_filename = self.path + "/" + hf_dirname_format
266 fp = h5py.File(dir_hf_filename, 'r')
267 hipoc = fp['t'].value
268 268 fp.close()
269 date_time=datetime.datetime.utcfromtimestamp(hipoc)
270 year =int(date_time[0:4])
271 month=int(date_time[5:7])
272 dom =int(date_time[8:10])
273 thisDate= datetime.date(year,month,dom)
274 if (thisDate>=self.startDate and thisDate <= self.endDate):
269 date_time = datetime.datetime.utcfromtimestamp(hipoc)
270 year = int(date_time[0:4])
271 month = int(date_time[5:7])
272 dom = int(date_time[8:10])
273 thisDate = datetime.date(year, month, dom)
274 if (thisDate >= self.startDate and thisDate <= self.endDate):
275 275 return hf_dirname_format
276 276 except:
277 277 return None
278 278
279 def __findDataForDates(self,online=False):
279 def __findDataForDates(self, online=False):
280 280 if not(self.status):
281 281 return None
282 282
283 283 pat = '\d+.\d+'
284 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
285 dirnameList = [x for x in dirnameList if x!=None]
284 dirnameList = [re.search(pat, x) for x in os.listdir(self.path)]
285 dirnameList = [x for x in dirnameList if x != None]
286 286 dirnameList = [x.string for x in dirnameList]
287 287 if not(online):
288 288
289 289 dirnameList = [self.__selDates(x) for x in dirnameList]
290 dirnameList = [x for x in dirnameList if x!=None]
290 dirnameList = [x for x in dirnameList if x != None]
291 291
292 if len(dirnameList)>0:
292 if len(dirnameList) > 0:
293 293 self.status = 1
294 294 self.dirnameList = dirnameList
295 295 self.dirnameList.sort()
@@ -299,40 +299,40 class HFReader(ProcessingUnit):
299 299 return None
300 300
301 301 def __getTimeFromData(self):
302 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
303 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
304 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
302 startDateTime_Reader = datetime.datetime.combine(self.startDate, self.startTime)
303 endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
304 print('Filtering Files from %s to %s' % (startDateTime_Reader, endDateTime_Reader))
305 305 print('........................................')
306 filter_filenameList=[]
306 filter_filenameList = []
307 307 self.filenameList.sort()
308 for i in range(len(self.filenameList)-1):
309 filename=self.filenameList[i]
310 dir_hf_filename= filename
311 fp= h5py.File(dir_hf_filename,'r')
312 hipoc=fp['t'].value
313 hipoc=hipoc+self.timezone
314 date_time=datetime.datetime.utcfromtimestamp(hipoc)
308 for i in range(len(self.filenameList) - 1):
309 filename = self.filenameList[i]
310 dir_hf_filename = filename
311 fp = h5py.File(dir_hf_filename, 'r')
312 hipoc = fp['t'].value
313 hipoc = hipoc + self.timezone
314 date_time = datetime.datetime.utcfromtimestamp(hipoc)
315 315 fp.close()
316 year =int(date_time[0:4])
317 month=int(date_time[5:7])
318 dom =int(date_time[8:10])
319 hour =int(date_time[11:13])
320 min =int(date_time[14:16])
321 sec =int(date_time[17:19])
322 this_time=datetime.datetime(year,month,dom,hour,min,sec)
323 if (this_time>=startDateTime_Reader and this_time <= endDateTime_Reader):
316 year = int(date_time[0:4])
317 month = int(date_time[5:7])
318 dom = int(date_time[8:10])
319 hour = int(date_time[11:13])
320 min = int(date_time[14:16])
321 sec = int(date_time[17:19])
322 this_time = datetime.datetime(year, month, dom, hour, min, sec)
323 if (this_time >= startDateTime_Reader and this_time <= endDateTime_Reader):
324 324 filter_filenameList.append(filename)
325 325 filter_filenameList.sort()
326 326 self.filenameList = filter_filenameList
327 327 return 1
328 328
329 329 def __getFilenameList(self):
330 #print "hola"
331 #print self.dirnameList
332 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
333 self.filenameList= dirList
334 #print self.filenameList
335 #print "pase",len(self.filenameList)
330 # print "hola"
331 # print self.dirnameList
332 dirList = [os.path.join(self.path, x) for x in self.dirnameList]
333 self.filenameList = dirList
334 # print self.filenameList
335 # print "pase",len(self.filenameList)
336 336
337 337 def __selectDataForTimes(self, online=False):
338 338
@@ -344,70 +344,70 class HFReader(ProcessingUnit):
344 344 if not(online):
345 345 if not(self.all):
346 346 self.__getTimeFromData()
347 if len(self.filenameList)>0:
348 self.status=1
347 if len(self.filenameList) > 0:
348 self.status = 1
349 349 self.filenameList.sort()
350 350 else:
351 self.status=0
351 self.status = 0
352 352 return None
353 353 else:
354 354 if self.set != None:
355 355
356 filename=getFileFromSet(self.path,self.ext,self.set)
356 filename = getFileFromSet(self.path, self.ext, self.set)
357 357
358 if self.flag_nextfile==True:
359 self.dirnameList=[filename]
360 fullfilename=self.path+"/"+filename
361 self.filenameList=[fullfilename]
362 self.filename_next_set=int(filename[6:16])+10
358 if self.flag_nextfile == True:
359 self.dirnameList = [filename]
360 fullfilename = self.path + "/" + filename
361 self.filenameList = [fullfilename]
362 self.filename_next_set = int(filename[6:16]) + 10
363 363
364 self.flag_nextfile=False
364 self.flag_nextfile = False
365 365 else:
366 366 print(filename)
367 367 print("PRIMERA CONDICION")
368 #if self.filename_next_set== int(filename[6:16]):
368 # if self.filename_next_set== int(filename[6:16]):
369 369 print("TODO BIEN")
370 370
371 371 if filename == None:
372 372 raise ValueError("corregir")
373 373
374 self.dirnameList=[filename]
375 fullfilename=self.path+"/"+filename
376 self.filenameList=[fullfilename]
377 self.filename_next_set=int(filename[6:16])+10
378 print("Setting next file",self.filename_next_set)
379 self.set=int(filename[6:16])
374 self.dirnameList = [filename]
375 fullfilename = self.path + "/" + filename
376 self.filenameList = [fullfilename]
377 self.filename_next_set = int(filename[6:16]) + 10
378 print("Setting next file", self.filename_next_set)
379 self.set = int(filename[6:16])
380 380 if True:
381 381 pass
382 382 else:
383 383 print("ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO")
384 384
385 385 else:
386 filename =getlastFileFromPath(self.path,self.ext)
386 filename = getlastFileFromPath(self.path, self.ext)
387 387
388 if self.flag_nextfile==True:
389 self.dirnameList=[filename]
390 fullfilename=self.path+"/"+filename
391 self.filenameList=[self.filenameList[-1]]
392 self.filename_next_set=int(filename[6:16])+10
388 if self.flag_nextfile == True:
389 self.dirnameList = [filename]
390 fullfilename = self.path + "/" + filename
391 self.filenameList = [self.filenameList[-1]]
392 self.filename_next_set = int(filename[6:16]) + 10
393 393
394 self.flag_nextfile=False
394 self.flag_nextfile = False
395 395 else:
396 filename=getFileFromSet(self.path,self.ext,self.set)
396 filename = getFileFromSet(self.path, self.ext, self.set)
397 397 print(filename)
398 398 print("PRIMERA CONDICION")
399 #if self.filename_next_set== int(filename[6:16]):
399 # if self.filename_next_set== int(filename[6:16]):
400 400 print("TODO BIEN")
401 401
402 402 if filename == None:
403 403 raise ValueError("corregir")
404 404
405 self.dirnameList=[filename]
406 fullfilename=self.path+"/"+filename
407 self.filenameList=[fullfilename]
408 self.filename_next_set=int(filename[6:16])+10
409 print("Setting next file",self.filename_next_set)
410 self.set=int(filename[6:16])
405 self.dirnameList = [filename]
406 fullfilename = self.path + "/" + filename
407 self.filenameList = [fullfilename]
408 self.filename_next_set = int(filename[6:16]) + 10
409 print("Setting next file", self.filename_next_set)
410 self.set = int(filename[6:16])
411 411 if True:
412 412 pass
413 413 else:
@@ -420,8 +420,8 class HFReader(ProcessingUnit):
420 420 startDate,
421 421 endDate,
422 422 ext,
423 startTime=datetime.time(0,0,0),
424 endTime=datetime.time(23,59,59),
423 startTime=datetime.time(0, 0, 0),
424 endTime=datetime.time(23, 59, 59),
425 425 walk=True):
426 426
427 427 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
@@ -429,18 +429,18 class HFReader(ProcessingUnit):
429 429 self.__checkPath()
430 430
431 431 self.__findDataForDates()
432 #print self.dirnameList
432 # print self.dirnameList
433 433
434 434 self.__selectDataForTimes()
435 435
436 436 for i in range(len(self.filenameList)):
437 print("%s"% (self.filenameList[i]))
437 print("%s" % (self.filenameList[i]))
438 438
439 439 return
440 440
441 441 def searchFilesOnLine(self,
442 442 path,
443 expLabel= "",
443 expLabel="",
444 444 ext=None,
445 445 startDate=None,
446 446 endDate=None,
@@ -451,27 +451,27 class HFReader(ProcessingUnit):
451 451 startDate = datetime.datetime.utcnow().date()
452 452 endDate = datetime.datetime.utcnow().date()
453 453
454 self.__setParameters(path=path,startDate=startDate,endDate=endDate,walk=walk)
454 self.__setParameters(path=path, startDate=startDate, endDate=endDate, walk=walk)
455 455
456 456 self.__checkPath()
457 457
458 fullpath=path
459 print("%s folder was found: " %(fullpath ))
458 fullpath = path
459 print("%s folder was found: " % (fullpath))
460 460
461 461 if set == None:
462 self.set=None
463 filename =getlastFileFromPath(fullpath,ext)
464 startDate= datetime.datetime.utcnow().date
465 endDate= datetime.datetime.utcnow().date()
462 self.set = None
463 filename = getlastFileFromPath(fullpath, ext)
464 startDate = datetime.datetime.utcnow().date
465 endDate = datetime.datetime.utcnow().date()
466 466 #
467 467 else:
468 filename= getFileFromSet(fullpath,ext,set)
469 startDate=None
470 endDate=None
468 filename = getFileFromSet(fullpath, ext, set)
469 startDate = None
470 endDate = None
471 471 #
472 472 if not (filename):
473 return None,None,None,None,None
474 #print "%s file was found" %(filename)
473 return None, None, None, None, None
474 # print "%s file was found" %(filename)
475 475
476 476 #
477 477 # dir_hf_filename= self.path+"/"+filename
@@ -485,20 +485,20 class HFReader(ProcessingUnit):
485 485 # dom =int(date_time[8:10])
486 486 # set= int(filename[4:10])
487 487 # self.set=set-1
488 #self.dirnameList=[filename]
489 filenameList= fullpath+"/"+filename
490 self.dirnameList=[filename]
491 self.filenameList=[filenameList]
492 self.flag_nextfile=True
493
494 #self.__findDataForDates(online=True)
495 #self.dirnameList=[self.dirnameList[-1]]
496 #print self.dirnameList
497 #self.__selectDataForTimes(online=True)
498 #return fullpath,filename,year,month,dom,set
488 # self.dirnameList=[filename]
489 filenameList = fullpath + "/" + filename
490 self.dirnameList = [filename]
491 self.filenameList = [filenameList]
492 self.flag_nextfile = True
493
494 # self.__findDataForDates(online=True)
495 # self.dirnameList=[self.dirnameList[-1]]
496 # print self.dirnameList
497 # self.__selectDataForTimes(online=True)
498 # return fullpath,filename,year,month,dom,set
499 499 return
500 500
501 def __setNextFile(self,online=False):
501 def __setNextFile(self, online=False):
502 502 """
503 503 """
504 504 if not(online):
@@ -513,7 +513,7 class HFReader(ProcessingUnit):
513 513 def __setNextFileOffline(self):
514 514 """
515 515 """
516 idFile= self.fileIndex
516 idFile = self.fileIndex
517 517 while(True):
518 518 idFile += 1
519 519 if not (idFile < len(self.filenameList)):
@@ -521,10 +521,10 class HFReader(ProcessingUnit):
521 521 print("No more Files")
522 522 return 0
523 523 filename = self.filenameList[idFile]
524 hfFilePointer =h5py.File(filename,'r')
524 hfFilePointer = h5py.File(filename, 'r')
525 525
526 epoc=hfFilePointer['t'].value
527 #this_time=datetime.datetime(year,month,dom,hour,min,sec)
526 epoc = hfFilePointer['t'].value
527 # this_time=datetime.datetime(year,month,dom,hour,min,sec)
528 528 break
529 529
530 530 self.flagIsNewFile = 1
@@ -533,70 +533,70 class HFReader(ProcessingUnit):
533 533
534 534 self.hfFilePointer = hfFilePointer
535 535 hfFilePointer.close()
536 self.__t0=epoc
537 print("Setting the file: %s"%self.filename)
536 self.__t0 = epoc
537 print("Setting the file: %s" % self.filename)
538 538
539 539 return 1
540 540
541 541 def __setNextFileOnline(self):
542 542 """
543 543 """
544 print("SOY NONE",self.set)
545 if self.set==None:
544 print("SOY NONE", self.set)
545 if self.set == None:
546 546 pass
547 547 else:
548 self.set +=10
548 self.set += 10
549 549
550 filename = self.filenameList[0]#fullfilename
550 filename = self.filenameList[0] # fullfilename
551 551 if self.filename_online != None:
552 552 self.__selectDataForTimes(online=True)
553 553 filename = self.filenameList[0]
554 554 while self.filename_online == filename:
555 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
555 print('waiting %d seconds to get a new file...' % (self.__waitForNewFile))
556 556 time.sleep(self.__waitForNewFile)
557 #self.__findDataForDates(online=True)
558 self.set=self.filename_next_set
557 # self.__findDataForDates(online=True)
558 self.set = self.filename_next_set
559 559 self.__selectDataForTimes(online=True)
560 560 filename = self.filenameList[0]
561 sizeoffile=os.path.getsize(filename)
562
563 #print filename
564 sizeoffile=os.path.getsize(filename)
565 if sizeoffile<1670240:
566 print("%s is not the rigth size"%filename)
567 delay=50
568 print('waiting %d seconds for delay...'%(delay))
561 sizeoffile = os.path.getsize(filename)
562
563 # print filename
564 sizeoffile = os.path.getsize(filename)
565 if sizeoffile < 1670240:
566 print("%s is not the rigth size" % filename)
567 delay = 50
568 print('waiting %d seconds for delay...' % (delay))
569 569 time.sleep(delay)
570 sizeoffile=os.path.getsize(filename)
571 if sizeoffile<1670240:
572 delay=50
573 print('waiting %d more seconds for delay...'%(delay))
570 sizeoffile = os.path.getsize(filename)
571 if sizeoffile < 1670240:
572 delay = 50
573 print('waiting %d more seconds for delay...' % (delay))
574 574 time.sleep(delay)
575 575
576 sizeoffile=os.path.getsize(filename)
577 if sizeoffile<1670240:
578 delay=50
579 print('waiting %d more seconds for delay...'%(delay))
576 sizeoffile = os.path.getsize(filename)
577 if sizeoffile < 1670240:
578 delay = 50
579 print('waiting %d more seconds for delay...' % (delay))
580 580 time.sleep(delay)
581 581
582 582 try:
583 hfFilePointer=h5py.File(filename,'r')
583 hfFilePointer = h5py.File(filename, 'r')
584 584
585 585 except:
586 print("Error reading file %s"%filename)
586 print("Error reading file %s" % filename)
587 587
588 self.filename_online=filename
589 epoc=hfFilePointer['t'].value
588 self.filename_online = filename
589 epoc = hfFilePointer['t'].value
590 590
591 self.hfFilePointer=hfFilePointer
591 self.hfFilePointer = hfFilePointer
592 592 hfFilePointer.close()
593 self.__t0=epoc
593 self.__t0 = epoc
594 594
595 595
596 596 self.flagIsNewFile = 1
597 597 self.filename = filename
598 598
599 print("Setting the file: %s"%self.filename)
599 print("Setting the file: %s" % self.filename)
600 600 return 1
601 601
602 602 def __getExpParameters(self):
@@ -604,46 +604,46 class HFReader(ProcessingUnit):
604 604 return None
605 605
606 606 def setup(self,
607 path = None,
608 startDate = None,
609 endDate = None,
610 startTime = datetime.time(0,0,0),
611 endTime = datetime.time(23,59,59),
612 set = None,
613 expLabel = "",
614 ext = None,
607 path=None,
608 startDate=None,
609 endDate=None,
610 startTime=datetime.time(0, 0, 0),
611 endTime=datetime.time(23, 59, 59),
612 set=None,
613 expLabel="",
614 ext=None,
615 615 all=0,
616 616 timezone=0,
617 online = False,
618 delay = 60,
619 walk = True):
617 online=False,
618 delay=60,
619 walk=True):
620 620 '''
621 621 In this method we should set all initial parameters.
622 622
623 623 '''
624 if path==None:
624 if path == None:
625 625 raise ValueError("The path is not valid")
626 626
627 if ext==None:
627 if ext == None:
628 628 ext = self.ext
629 629
630 self.timezone= timezone
631 self.online= online
632 self.all=all
633 #if set==None:
630 self.timezone = timezone
631 self.online = online
632 self.all = all
633 # if set==None:
634 634
635 #print set
635 # print set
636 636 if not(online):
637 637 print("Searching files in offline mode...")
638 638
639 639 self.searchFilesOffLine(path, startDate, endDate, ext, startTime, endTime, walk)
640 640 else:
641 641 print("Searching files in online mode...")
642 self.searchFilesOnLine(path, walk,ext,set=set)
643 if set==None:
642 self.searchFilesOnLine(path, walk, ext, set=set)
643 if set == None:
644 644 pass
645 645 else:
646 self.set=set-10
646 self.set = set - 10
647 647
648 648 # for nTries in range(self.nTries):
649 649 #
@@ -659,7 +659,7 class HFReader(ProcessingUnit):
659 659
660 660
661 661 if not(self.filenameList):
662 print("There is no files into the folder: %s"%(path))
662 print("There is no files into the folder: %s" % (path))
663 663 sys.exit(-1)
664 664
665 665 self.__getExpParameters()
@@ -674,9 +674,9 class HFReader(ProcessingUnit):
674 674 self.__setLocalVariables()
675 675
676 676 self.__setHeaderDO()
677 #self.profileIndex_offset= 0
677 # self.profileIndex_offset= 0
678 678
679 #self.profileIndex = self.profileIndex_offset
679 # self.profileIndex = self.profileIndex_offset
680 680
681 681 self.isConfig = True
682 682
@@ -686,7 +686,7 class HFReader(ProcessingUnit):
686 686
687 687 def __setLocalVariables(self):
688 688
689 self.datablock = numpy.zeros((self.nChannels, self.nHeights,self.nProfiles), dtype = numpy.complex)
689 self.datablock = numpy.zeros((self.nChannels, self.nHeights, self.nProfiles), dtype=numpy.complex)
690 690 #
691 691
692 692
@@ -703,34 +703,34 class HFReader(ProcessingUnit):
703 703
704 704
705 705 #---------------------------------------------------------
706 self.dataOut.systemHeaderObj.nProfiles=100
707 self.dataOut.systemHeaderObj.nSamples=1000
706 self.dataOut.systemHeaderObj.nProfiles = 100
707 self.dataOut.systemHeaderObj.nSamples = 1000
708 708
709 709
710 SAMPLING_STRUCTURE=[('h0', '<f4'), ('dh', '<f4'), ('nsa', '<u4')]
711 self.dataOut.radarControllerHeaderObj.samplingWindow=numpy.zeros((1,),SAMPLING_STRUCTURE)
712 self.dataOut.radarControllerHeaderObj.samplingWindow['h0']=0
713 self.dataOut.radarControllerHeaderObj.samplingWindow['dh']=1.5
714 self.dataOut.radarControllerHeaderObj.samplingWindow['nsa']=1000
715 self.dataOut.radarControllerHeaderObj.nHeights=int(self.dataOut.radarControllerHeaderObj.samplingWindow['nsa'])
710 SAMPLING_STRUCTURE = [('h0', '<f4'), ('dh', '<f4'), ('nsa', '<u4')]
711 self.dataOut.radarControllerHeaderObj.samplingWindow = numpy.zeros((1,), SAMPLING_STRUCTURE)
712 self.dataOut.radarControllerHeaderObj.samplingWindow['h0'] = 0
713 self.dataOut.radarControllerHeaderObj.samplingWindow['dh'] = 1.5
714 self.dataOut.radarControllerHeaderObj.samplingWindow['nsa'] = 1000
715 self.dataOut.radarControllerHeaderObj.nHeights = int(self.dataOut.radarControllerHeaderObj.samplingWindow['nsa'])
716 716 self.dataOut.radarControllerHeaderObj.firstHeight = self.dataOut.radarControllerHeaderObj.samplingWindow['h0']
717 717 self.dataOut.radarControllerHeaderObj.deltaHeight = self.dataOut.radarControllerHeaderObj.samplingWindow['dh']
718 718 self.dataOut.radarControllerHeaderObj.samplesWin = self.dataOut.radarControllerHeaderObj.samplingWindow['nsa']
719 719
720 self.dataOut.radarControllerHeaderObj.nWindows=1
721 self.dataOut.radarControllerHeaderObj.codetype=0
722 self.dataOut.radarControllerHeaderObj.numTaus=0
723 #self.dataOut.radarControllerHeaderObj.Taus = numpy.zeros((1,),'<f4')
720 self.dataOut.radarControllerHeaderObj.nWindows = 1
721 self.dataOut.radarControllerHeaderObj.codetype = 0
722 self.dataOut.radarControllerHeaderObj.numTaus = 0
723 # self.dataOut.radarControllerHeaderObj.Taus = numpy.zeros((1,),'<f4')
724 724
725 725
726 #self.dataOut.radarControllerHeaderObj.nCode=numpy.zeros((1,), '<u4')
727 #self.dataOut.radarControllerHeaderObj.nBaud=numpy.zeros((1,), '<u4')
728 #self.dataOut.radarControllerHeaderObj.code=numpy.zeros(0)
726 # self.dataOut.radarControllerHeaderObj.nCode=numpy.zeros((1,), '<u4')
727 # self.dataOut.radarControllerHeaderObj.nBaud=numpy.zeros((1,), '<u4')
728 # self.dataOut.radarControllerHeaderObj.code=numpy.zeros(0)
729 729
730 self.dataOut.radarControllerHeaderObj.code_size=0
731 self.dataOut.nBaud=0
732 self.dataOut.nCode=0
733 self.dataOut.nPairs=0
730 self.dataOut.radarControllerHeaderObj.code_size = 0
731 self.dataOut.nBaud = 0
732 self.dataOut.nCode = 0
733 self.dataOut.nPairs = 0
734 734
735 735
736 736 #---------------------------------------------------------
@@ -739,19 +739,19 class HFReader(ProcessingUnit):
739 739
740 740 self.dataOut.data = None
741 741
742 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
742 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
743 743
744 744 self.dataOut.nProfiles = 1
745 745
746 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
746 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype=numpy.float) * self.__deltaHeigth
747 747
748 748 self.dataOut.channelList = list(range(self.nChannels))
749 749
750 #self.dataOut.channelIndexList = None
750 # self.dataOut.channelIndexList = None
751 751
752 752 self.dataOut.flagNoData = True
753 753
754 #Set to TRUE if the data is discontinuous
754 # Set to TRUE if the data is discontinuous
755 755 self.dataOut.flagDiscontinuousBlock = False
756 756
757 757 self.dataOut.utctime = None
@@ -766,16 +766,16 class HFReader(ProcessingUnit):
766 766
767 767 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
768 768
769 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
769 self.dataOut.flagDecodeData = False # asumo que la data esta decodificada
770 770
771 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
771 self.dataOut.flagDeflipData = False # asumo que la data esta sin flip
772 772
773 773 self.dataOut.flagShiftFFT = False
774 774
775 self.dataOut.ippSeconds = 1.0*self.__nSamples/self.__sample_rate
775 self.dataOut.ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
776 776
777 #Time interval between profiles
778 #self.dataOut.timeInterval =self.dataOut.ippSeconds * self.dataOut.nCohInt
777 # Time interval between profiles
778 # self.dataOut.timeInterval =self.dataOut.ippSeconds * self.dataOut.nCohInt
779 779
780 780
781 781 self.dataOut.frequency = self.__frequency
@@ -800,7 +800,7 class HFReader(ProcessingUnit):
800 800
801 801 def __setNewBlock(self):
802 802
803 if self.hfFilePointer==None:
803 if self.hfFilePointer == None:
804 804 return 0
805 805
806 806 if self.flagIsNewFile:
@@ -816,17 +816,17 class HFReader(ProcessingUnit):
816 816
817 817
818 818 def readBlock(self):
819 fp=h5py.File(self.filename,'r')
820 #Puntero que apunta al archivo hdf5
821 ch0=(fp['ch0']).value #Primer canal (100,1000)--(perfiles,alturas)
822 ch1=(fp['ch1']).value #Segundo canal (100,1000)--(perfiles,alturas)
819 fp = h5py.File(self.filename, 'r')
820 # Puntero que apunta al archivo hdf5
821 ch0 = (fp['ch0']).value # Primer canal (100,1000)--(perfiles,alturas)
822 ch1 = (fp['ch1']).value # Segundo canal (100,1000)--(perfiles,alturas)
823 823 fp.close()
824 ch0= ch0.swapaxes(0,1) #Primer canal (100,1000)--(alturas,perfiles)
825 ch1= ch1.swapaxes(0,1) #Segundo canal (100,1000)--(alturas,perfiles)
826 self.datablock = numpy.array([ch0,ch1])
827 self.flagIsNewFile=0
824 ch0 = ch0.swapaxes(0, 1) # Primer canal (100,1000)--(alturas,perfiles)
825 ch1 = ch1.swapaxes(0, 1) # Segundo canal (100,1000)--(alturas,perfiles)
826 self.datablock = numpy.array([ch0, ch1])
827 self.flagIsNewFile = 0
828 828
829 self.profileIndex=0
829 self.profileIndex = 0
830 830
831 831 return 1
832 832
@@ -837,16 +837,16 class HFReader(ProcessingUnit):
837 837
838 838 if self.__hasNotDataInBuffer():
839 839 if not(self.readNextBlock()):
840 self.dataOut.flagNodata=True
840 self.dataOut.flagNodata = True
841 841 return 0
842 842
843 843 ##############################
844 844 ##############################
845 self.dataOut.data = self.datablock[:,:,self.profileIndex]
846 self.dataOut.utctime = self.__t0 + self.dataOut.ippSeconds*self.profileIndex
847 self.dataOut.profileIndex= self.profileIndex
848 self.dataOut.flagNoData=False
849 self.profileIndex +=1
845 self.dataOut.data = self.datablock[:, :, self.profileIndex]
846 self.dataOut.utctime = self.__t0 + self.dataOut.ippSeconds * self.profileIndex
847 self.dataOut.profileIndex = self.profileIndex
848 self.dataOut.flagNoData = False
849 self.profileIndex += 1
850 850
851 851 return self.dataOut.data
852 852
@@ -859,4 +859,4 class HFReader(ProcessingUnit):
859 859 if not self.isConfig:
860 860 self.setup(**kwargs)
861 861 self.isConfig = True
862 self.getData() No newline at end of file
862 self.getData()
@@ -77,20 +77,20 class AMISRReader(ProcessingUnit):
77 77
78 78 self.__waitForNewFile = 20
79 79 self.__filename_online = None
80 #Is really necessary create the output object in the initializer
80 # Is really necessary create the output object in the initializer
81 81 self.dataOut = Voltage()
82 82
83 def setup(self,path=None,
84 startDate=None,
85 endDate=None,
86 startTime=None,
83 def setup(self, path=None,
84 startDate=None,
85 endDate=None,
86 startTime=None,
87 87 endTime=None,
88 88 walk=True,
89 89 timezone='ut',
90 90 all=0,
91 code = None,
92 nCode = 0,
93 nBaud = 0,
91 code=None,
92 nCode=0,
93 nBaud=0,
94 94 online=False):
95 95
96 96 self.timezone = timezone
@@ -103,15 +103,15 class AMISRReader(ProcessingUnit):
103 103
104 104
105 105
106 #self.findFiles()
106 # self.findFiles()
107 107 if not(online):
108 #Busqueda de archivos offline
108 # Busqueda de archivos offline
109 109 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
110 110 else:
111 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
111 self.searchFilesOnLine(path, startDate, endDate, startTime, endTime, walk)
112 112
113 113 if not(self.filenameList):
114 print("There is no files into the folder: %s"%(path))
114 print("There is no files into the folder: %s" % (path))
115 115
116 116 sys.exit(-1)
117 117
@@ -127,43 +127,43 class AMISRReader(ProcessingUnit):
127 127 pass
128 128
129 129
130 def readAMISRHeader(self,fp):
130 def readAMISRHeader(self, fp):
131 131 header = 'Raw11/Data/RadacHeader'
132 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
133 self.beamCode = fp.get('Raw11/Data/Beamcodes') # NUMBER OF CHANNELS AND IDENTIFY POSITION TO CREATE A FILE WITH THAT INFO
134 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
135 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
136 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
137 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
138 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
139 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
140 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
141 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
132 self.beamCodeByPulse = fp.get(header + '/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
133 self.beamCode = fp.get('Raw11/Data/Beamcodes') # NUMBER OF CHANNELS AND IDENTIFY POSITION TO CREATE A FILE WITH THAT INFO
134 # self.code = fp.get(header+'/Code') # NOT USE FOR THIS
135 self.frameCount = fp.get(header + '/FrameCount') # NOT USE FOR THIS
136 self.modeGroup = fp.get(header + '/ModeGroup') # NOT USE FOR THIS
137 self.nsamplesPulse = fp.get(header + '/NSamplesPulse') # TO GET NSA OR USING DATA FOR THAT
138 self.pulseCount = fp.get(header + '/PulseCount') # NOT USE FOR THIS
139 self.radacTime = fp.get(header + '/RadacTime') # 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
140 self.timeCount = fp.get(header + '/TimeCount') # NOT USE FOR THIS
141 self.timeStatus = fp.get(header + '/TimeStatus') # NOT USE FOR THIS
142 142 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
143 self.frequency = fp.get('Rx/Frequency')
143 self.frequency = fp.get('Rx/Frequency')
144 144 txAus = fp.get('Raw11/Data/Pulsewidth')
145 145
146 146
147 self.nblocks = self.pulseCount.shape[0] #nblocks
147 self.nblocks = self.pulseCount.shape[0] # nblocks
148 148
149 self.nprofiles = self.pulseCount.shape[1] #nprofile
150 self.nsa = self.nsamplesPulse[0,0] #ngates
149 self.nprofiles = self.pulseCount.shape[1] # nprofile
150 self.nsa = self.nsamplesPulse[0, 0] # ngates
151 151 self.nchannels = self.beamCode.shape[1]
152 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
153 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
154 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
152 self.ippSeconds = (self.radacTime[0][1] - self.radacTime[0][0]) # Ipp in seconds
153 # self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
154 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
155 155
156 #filling radar controller header parameters
157 self.__ippKm = self.ippSeconds *.15*1e6 # in km
158 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
156 # filling radar controller header parameters
157 self.__ippKm = self.ippSeconds * .15 * 1e6 # in km
158 self.__txA = (txAus.value) * .15 # (ipp[us]*.15km/1us) in km
159 159 self.__txB = 0
160 nWindows=1
160 nWindows = 1
161 161 self.__nSamples = self.nsa
162 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
163 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
162 self.__firstHeight = self.rangeFromFile[0][0] / 1000 # in km
163 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0]) / 1000
164 164
165 #for now until understand why the code saved is different (code included even though code not in tuf file)
166 #self.__codeType = 0
165 # for now until understand why the code saved is different (code included even though code not in tuf file)
166 # self.__codeType = 0
167 167 # self.__nCode = None
168 168 # self.__nBaud = None
169 169 self.__code = self.code
@@ -172,11 +172,11 class AMISRReader(ProcessingUnit):
172 172 self.__codeType = 1
173 173 self.__nCode = self.nCode
174 174 self.__nBaud = self.nBaud
175 #self.__code = 0
175 # self.__code = 0
176 176
177 #filling system header parameters
177 # filling system header parameters
178 178 self.__nSamples = self.nsa
179 self.newProfiles = self.nprofiles/self.nchannels
179 self.newProfiles = self.nprofiles / self.nchannels
180 180 self.__channelList = list(range(self.nchannels))
181 181
182 182 self.__frequency = self.frequency[0][0]
@@ -187,7 +187,7 class AMISRReader(ProcessingUnit):
187 187
188 188 pass
189 189
190 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
190 def __setParameters(self, path='', startDate='', endDate='', startTime='', endTime='', walk=''):
191 191 self.path = path
192 192 self.startDate = startDate
193 193 self.endDate = endDate
@@ -200,7 +200,7 class AMISRReader(ProcessingUnit):
200 200 self.status = 1
201 201 else:
202 202 self.status = 0
203 print('Path:%s does not exists'%self.path)
203 print('Path:%s does not exists' % self.path)
204 204
205 205 return
206 206
@@ -210,27 +210,27 class AMISRReader(ProcessingUnit):
210 210 year = int(amisr_dirname_format[0:4])
211 211 month = int(amisr_dirname_format[4:6])
212 212 dom = int(amisr_dirname_format[6:8])
213 thisDate = datetime.date(year,month,dom)
213 thisDate = datetime.date(year, month, dom)
214 214
215 if (thisDate>=self.startDate and thisDate <= self.endDate):
215 if (thisDate >= self.startDate and thisDate <= self.endDate):
216 216 return amisr_dirname_format
217 217 except:
218 218 return None
219 219
220 220
221 def __findDataForDates(self,online=False):
221 def __findDataForDates(self, online=False):
222 222
223 223 if not(self.status):
224 224 return None
225 225
226 226 pat = '\d+.\d+'
227 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
228 dirnameList = [x for x in dirnameList if x!=None]
227 dirnameList = [re.search(pat, x) for x in os.listdir(self.path)]
228 dirnameList = [x for x in dirnameList if x != None]
229 229 dirnameList = [x.string for x in dirnameList]
230 230 if not(online):
231 231 dirnameList = [self.__selDates(x) for x in dirnameList]
232 dirnameList = [x for x in dirnameList if x!=None]
233 if len(dirnameList)>0:
232 dirnameList = [x for x in dirnameList if x != None]
233 if len(dirnameList) > 0:
234 234 self.status = 1
235 235 self.dirnameList = dirnameList
236 236 self.dirnameList.sort()
@@ -239,38 +239,38 class AMISRReader(ProcessingUnit):
239 239 return None
240 240
241 241 def __getTimeFromData(self):
242 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
243 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
242 startDateTime_Reader = datetime.datetime.combine(self.startDate, self.startTime)
243 endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
244 244
245 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
245 print('Filtering Files from %s to %s' % (startDateTime_Reader, endDateTime_Reader))
246 246 print('........................................')
247 247 filter_filenameList = []
248 248 self.filenameList.sort()
249 #for i in range(len(self.filenameList)-1):
249 # for i in range(len(self.filenameList)-1):
250 250 for i in range(len(self.filenameList)):
251 251 filename = self.filenameList[i]
252 fp = h5py.File(filename,'r')
252 fp = h5py.File(filename, 'r')
253 253 time_str = fp.get('Time/RadacTimeString')
254 254
255 255 startDateTimeStr_File = time_str[0][0].split('.')[0]
256 256 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
257 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
257 startDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
258 258
259 259 endDateTimeStr_File = time_str[-1][-1].split('.')[0]
260 260 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
261 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
261 endDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
262 262
263 263 fp.close()
264 264
265 265 if self.timezone == 'lt':
266 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
267 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
266 startDateTime_File = startDateTime_File - datetime.timedelta(minutes=300)
267 endDateTime_File = endDateTime_File - datetime.timedelta(minutes=300)
268 268
269 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<endDateTime_Reader):
270 #self.filenameList.remove(filename)
269 if (endDateTime_File >= startDateTime_Reader and endDateTime_File < endDateTime_Reader):
270 # self.filenameList.remove(filename)
271 271 filter_filenameList.append(filename)
272 272
273 if (endDateTime_File>=endDateTime_Reader):
273 if (endDateTime_File >= endDateTime_Reader):
274 274 break
275 275
276 276
@@ -279,7 +279,7 class AMISRReader(ProcessingUnit):
279 279 return 1
280 280
281 281 def __filterByGlob1(self, dirName):
282 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
282 filter_files = glob.glob1(dirName, '*.*%s' % self.extension_file)
283 283 filter_files.sort()
284 284 filterDict = {}
285 285 filterDict.setdefault(dirName)
@@ -295,21 +295,21 class AMISRReader(ProcessingUnit):
295 295
296 296
297 297 def __selectDataForTimes(self, online=False):
298 #aun no esta implementado el filtro for tiempo
298 # aun no esta implementado el filtro for tiempo
299 299 if not(self.status):
300 300 return None
301 301
302 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
302 dirList = [os.path.join(self.path, x) for x in self.dirnameList]
303 303
304 304 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
305 305
306 306 self.__getFilenameList(fileListInKeys, dirList)
307 307 if not(online):
308 #filtro por tiempo
308 # filtro por tiempo
309 309 if not(self.all):
310 310 self.__getTimeFromData()
311 311
312 if len(self.filenameList)>0:
312 if len(self.filenameList) > 0:
313 313 self.status = 1
314 314 self.filenameList.sort()
315 315 else:
@@ -317,7 +317,7 class AMISRReader(ProcessingUnit):
317 317 return None
318 318
319 319 else:
320 #get the last file - 1
320 # get the last file - 1
321 321 self.filenameList = [self.filenameList[-2]]
322 322
323 323 new_dirnameList = []
@@ -329,14 +329,14 class AMISRReader(ProcessingUnit):
329 329 self.dirnameList = new_dirnameList
330 330 return 1
331 331
332 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
333 endTime=datetime.time(23,59,59),walk=True):
332 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0, 0, 0),
333 endTime=datetime.time(23, 59, 59), walk=True):
334 334
335 if endDate ==None:
335 if endDate == None:
336 336 startDate = datetime.datetime.utcnow().date()
337 337 endDate = datetime.datetime.utcnow().date()
338 338
339 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
339 self.__setParameters(path=path, startDate=startDate, endDate=endDate, startTime=startTime, endTime=endTime, walk=walk)
340 340
341 341 self.__checkPath()
342 342
@@ -353,8 +353,8 class AMISRReader(ProcessingUnit):
353 353 path,
354 354 startDate,
355 355 endDate,
356 startTime=datetime.time(0,0,0),
357 endTime=datetime.time(23,59,59),
356 startTime=datetime.time(0, 0, 0),
357 endTime=datetime.time(23, 59, 59),
358 358 walk=True):
359 359
360 360 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
@@ -366,7 +366,7 class AMISRReader(ProcessingUnit):
366 366 self.__selectDataForTimes()
367 367
368 368 for i in range(len(self.filenameList)):
369 print("%s" %(self.filenameList[i]))
369 print("%s" % (self.filenameList[i]))
370 370
371 371 return
372 372
@@ -382,7 +382,7 class AMISRReader(ProcessingUnit):
382 382
383 383 filename = self.filenameList[idFile]
384 384
385 amisrFilePointer = h5py.File(filename,'r')
385 amisrFilePointer = h5py.File(filename, 'r')
386 386
387 387 break
388 388
@@ -392,7 +392,7 class AMISRReader(ProcessingUnit):
392 392
393 393 self.amisrFilePointer = amisrFilePointer
394 394
395 print("Setting the file: %s"%self.filename)
395 print("Setting the file: %s" % self.filename)
396 396
397 397 return 1
398 398
@@ -404,7 +404,7 class AMISRReader(ProcessingUnit):
404 404 filename = self.filenameList[0]
405 405 wait = 0
406 406 while self.__filename_online == filename:
407 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
407 print('waiting %d seconds to get a new file...' % (self.__waitForNewFile))
408 408 if wait == 5:
409 409 return 0
410 410 sleep(self.__waitForNewFile)
@@ -414,40 +414,40 class AMISRReader(ProcessingUnit):
414 414
415 415 self.__filename_online = filename
416 416
417 self.amisrFilePointer = h5py.File(filename,'r')
417 self.amisrFilePointer = h5py.File(filename, 'r')
418 418 self.flagIsNewFile = 1
419 419 self.filename = filename
420 print("Setting the file: %s"%self.filename)
420 print("Setting the file: %s" % self.filename)
421 421 return 1
422 422
423 423
424 424 def readData(self):
425 425 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
426 re = buffer[:,:,:,0]
427 im = buffer[:,:,:,1]
428 dataset = re + im*1j
426 re = buffer[:, :, :, 0]
427 im = buffer[:, :, :, 1]
428 dataset = re + im * 1j
429 429 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
430 timeset = self.radacTime[:,0]
431 return dataset,timeset
430 timeset = self.radacTime[:, 0]
431 return dataset, timeset
432 432
433 433 def reshapeData(self):
434 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
435 channels = self.beamCodeByPulse[0,:]
434 # self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
435 channels = self.beamCodeByPulse[0, :]
436 436 nchan = self.nchannels
437 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
437 # self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
438 438 nblocks = self.nblocks
439 439 nsamples = self.nsa
440 440
441 #Dimensions : nChannels, nProfiles, nSamples
441 # Dimensions : nChannels, nProfiles, nSamples
442 442 new_block = numpy.empty((nblocks, nchan, self.newProfiles, nsamples), dtype="complex64")
443 443 ############################################
444 444
445 445 for thisChannel in range(nchan):
446 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[0][thisChannel])[0],:]
446 new_block[:, thisChannel, :, :] = self.dataset[:, numpy.where(channels == self.beamCode[0][thisChannel])[0], :]
447 447
448 448
449 new_block = numpy.transpose(new_block, (1,0,2,3))
450 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
449 new_block = numpy.transpose(new_block, (1, 0, 2, 3))
450 new_block = numpy.reshape(new_block, (nchan, -1, nsamples))
451 451
452 452 return new_block
453 453
@@ -457,7 +457,7 class AMISRReader(ProcessingUnit):
457 457
458 458 def fillJROHeader(self):
459 459
460 #fill radar controller header
460 # fill radar controller header
461 461 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
462 462 txA=self.__txA,
463 463 txB=0,
@@ -467,12 +467,12 class AMISRReader(ProcessingUnit):
467 467 deltaHeight=self.__deltaHeight,
468 468 codeType=self.__codeType,
469 469 nCode=self.__nCode, nBaud=self.__nBaud,
470 code = self.__code,
470 code=self.__code,
471 471 fClock=1)
472 472
473 473
474 474
475 #fill system header
475 # fill system header
476 476 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
477 477 nProfiles=self.newProfiles,
478 478 nChannels=len(self.__channelList),
@@ -483,17 +483,17 class AMISRReader(ProcessingUnit):
483 483
484 484 self.dataOut.data = None
485 485
486 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
486 self.dataOut.dtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
487 487
488 488 # self.dataOut.nChannels = 0
489 489
490 490 # self.dataOut.nHeights = 0
491 491
492 self.dataOut.nProfiles = self.newProfiles*self.nblocks
492 self.dataOut.nProfiles = self.newProfiles * self.nblocks
493 493
494 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
495 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
496 self.dataOut.heightList = ranges/1000.0 #km
494 # self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
495 ranges = numpy.reshape(self.rangeFromFile.value, (-1))
496 self.dataOut.heightList = ranges / 1000.0 # km
497 497
498 498
499 499 self.dataOut.channelList = self.__channelList
@@ -504,16 +504,16 class AMISRReader(ProcessingUnit):
504 504
505 505 self.dataOut.flagNoData = True
506 506
507 #Set to TRUE if the data is discontinuous
507 # Set to TRUE if the data is discontinuous
508 508 self.dataOut.flagDiscontinuousBlock = False
509 509
510 510 self.dataOut.utctime = None
511 511
512 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
512 # self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
513 513 if self.timezone == 'lt':
514 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
514 self.dataOut.timeZone = time.timezone / 60. # get the timezone in minutes
515 515 else:
516 self.dataOut.timeZone = 0 #by default time is UTC
516 self.dataOut.timeZone = 0 # by default time is UTC
517 517
518 518 self.dataOut.dstFlag = 0
519 519
@@ -521,23 +521,23 class AMISRReader(ProcessingUnit):
521 521
522 522 self.dataOut.nCohInt = 1
523 523
524 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
524 self.dataOut.flagDecodeData = False # asumo que la data esta decodificada
525 525
526 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
526 self.dataOut.flagDeflipData = False # asumo que la data esta sin flip
527 527
528 528 self.dataOut.flagShiftFFT = False
529 529
530 530 self.dataOut.ippSeconds = self.ippSeconds
531 531
532 #Time interval between profiles
533 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
532 # Time interval between profiles
533 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
534 534
535 535 self.dataOut.frequency = self.__frequency
536 536
537 537 self.dataOut.realtime = self.online
538 538 pass
539 539
540 def readNextFile(self,online=False):
540 def readNextFile(self, online=False):
541 541
542 542 if not(online):
543 543 newFile = self.__setNextFileOffline()
@@ -547,25 +547,25 class AMISRReader(ProcessingUnit):
547 547 if not(newFile):
548 548 return 0
549 549
550 #if self.__firstFile:
550 # if self.__firstFile:
551 551 self.readAMISRHeader(self.amisrFilePointer)
552 552 self.createBuffers()
553 553 self.fillJROHeader()
554 #self.__firstFile = False
554 # self.__firstFile = False
555 555
556 556
557 557
558 self.dataset,self.timeset = self.readData()
558 self.dataset, self.timeset = self.readData()
559 559
560 if self.endDate!=None:
561 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
560 if self.endDate != None:
561 endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
562 562 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
563 563 startDateTimeStr_File = time_str[0][0].split('.')[0]
564 564 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
565 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
565 startDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
566 566 if self.timezone == 'lt':
567 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
568 if (startDateTime_File>endDateTime_Reader):
567 startDateTime_File = startDateTime_File - datetime.timedelta(minutes=300)
568 if (startDateTime_File > endDateTime_Reader):
569 569 return 0
570 570
571 571 self.jrodataset = self.reshapeData()
@@ -576,7 +576,7 class AMISRReader(ProcessingUnit):
576 576
577 577
578 578 def __hasNotDataInBuffer(self):
579 if self.profileIndex >= (self.newProfiles*self.nblocks):
579 if self.profileIndex >= (self.newProfiles * self.nblocks):
580 580 return 1
581 581 return 0
582 582
@@ -592,20 +592,20 class AMISRReader(ProcessingUnit):
592 592 return 0
593 593
594 594
595 if self.dataset is None: # setear esta condicion cuando no hayan datos por leers
595 if self.dataset is None: # setear esta condicion cuando no hayan datos por leers
596 596 self.dataOut.flagNoData = True
597 597 return 0
598 598
599 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
599 # self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
600 600
601 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
601 self.dataOut.data = self.jrodataset[:, self.profileIndex, :]
602 602
603 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
604 #verificar basic header de jro data y ver si es compatible con este valor
605 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
603 # self.dataOut.utctime = self.jrotimeset[self.profileIndex]
604 # verificar basic header de jro data y ver si es compatible con este valor
605 # self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
606 606 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
607 indexblock = self.profileIndex/self.newProfiles
608 #print indexblock, indexprof
607 indexblock = self.profileIndex / self.newProfiles
608 # print indexblock, indexprof
609 609 self.dataOut.utctime = self.timeset[indexblock] + (indexprof * self.ippSeconds * self.nchannels)
610 610 self.dataOut.profileIndex = self.profileIndex
611 611 self.dataOut.flagNoData = False
@@ -91,7 +91,7 class MADReader(Reader, ProcessingUnit):
91 91 self.flagNoMoreFiles = 0
92 92 self.filename = None
93 93 self.intervals = set()
94 self.datatime = datetime.datetime(1900,1,1)
94 self.datatime = datetime.datetime(1900, 1, 1)
95 95 self.format = None
96 96 self.filefmt = "***%Y%m%d*******"
97 97
@@ -125,7 +125,7 class MADReader(Reader, ProcessingUnit):
125 125
126 126 for nTries in range(self.nTries):
127 127 fullpath = self.searchFilesOnLine(self.path, self.startDate,
128 self.endDate, self.expLabel, self.ext, self.walk,
128 self.endDate, self.expLabel, self.ext, self.walk,
129 129 self.filefmt, self.folderfmt)
130 130
131 131 try:
@@ -138,7 +138,7 class MADReader(Reader, ProcessingUnit):
138 138
139 139 log.warning(
140 140 'Waiting {} sec for a valid file in {}: try {} ...'.format(
141 self.delay, self.path, nTries + 1),
141 self.delay, self.path, nTries + 1),
142 142 self.name)
143 143 time.sleep(self.delay)
144 144
@@ -148,7 +148,7 class MADReader(Reader, ProcessingUnit):
148 148
149 149 else:
150 150 log.log("Searching files in {}".format(self.path), self.name)
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
152 152 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
153 153
154 154 self.setNextFile()
@@ -212,7 +212,7 class MADReader(Reader, ProcessingUnit):
212 212 if self.ext == '.txt':
213 213 self.data = numpy.genfromtxt(self.fp, missing_values=('missing'))
214 214 self.nrecords = self.data.shape[0]
215 self.ranges = numpy.unique(self.data[:,self.parameters.index(self.independentParam.lower())])
215 self.ranges = numpy.unique(self.data[:, self.parameters.index(self.independentParam.lower())])
216 216 self.counter_records = 0
217 217 elif self.ext == '.hdf5':
218 218 self.data = self.fp['Data']
@@ -268,14 +268,14 class MADReader(Reader, ProcessingUnit):
268 268 if self.counter_records == self.nrecords:
269 269 break
270 270 continue
271 self.intervals.add((datatime-self.datatime).seconds)
271 self.intervals.add((datatime - self.datatime).seconds)
272 272 break
273 273 elif self.ext == '.hdf5':
274 274 datatime = datetime.datetime.utcfromtimestamp(
275 275 self.times[self.counter_records])
276 dum = self.data['Table Layout'][self.data['Table Layout']['recno']==self.counter_records]
277 self.intervals.add((datatime-self.datatime).seconds)
278 if datatime.date()>self.datatime.date():
276 dum = self.data['Table Layout'][self.data['Table Layout']['recno'] == self.counter_records]
277 self.intervals.add((datatime - self.datatime).seconds)
278 if datatime.date() > self.datatime.date():
279 279 self.flagDiscontinuousBlock = 1
280 280 self.datatime = datatime
281 281 self.counter_records += 1
@@ -299,11 +299,11 class MADReader(Reader, ProcessingUnit):
299 299 if self.ext == '.txt':
300 300 x = self.parameters.index(param.lower())
301 301 y = self.parameters.index(self.independentParam.lower())
302 ranges = self.buffer[:,y]
303 #if self.ranges.size == ranges.size:
302 ranges = self.buffer[:, y]
303 # if self.ranges.size == ranges.size:
304 304 # continue
305 305 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
306 dummy[index] = self.buffer[:,x]
306 dummy[index] = self.buffer[:, x]
307 307 else:
308 308 ranges = self.buffer[self.independentParam.lower()]
309 309 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
@@ -311,7 +311,7 class MADReader(Reader, ProcessingUnit):
311 311
312 312 if isinstance(value, str):
313 313 if value not in self.independentParam:
314 setattr(self.dataOut, value, dummy.reshape(1,-1))
314 setattr(self.dataOut, value, dummy.reshape(1, -1))
315 315 elif isinstance(value, list):
316 316 self.output[value[0]][value[1]] = dummy
317 317 parameters[value[1]] = param
@@ -382,7 +382,7 Inputs:
382 382 format hdf5, cedar
383 383 blocks number of blocks per file'''
384 384
385 __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict','metadata', 'format', 'blocks']
385 __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict', 'metadata', 'format', 'blocks']
386 386 missing = -32767
387 387
388 388 def __init__(self):
@@ -438,7 +438,7 Inputs:
438 438 Create new cedar file object
439 439 '''
440 440
441 self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal
441 self.mnemonic = MNEMONICS[self.kinst] # TODO get mnemonic from madrigal
442 442 date = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
443 443
444 444 filename = '{}{}{}'.format(self.mnemonic,
@@ -499,7 +499,7 Inputs:
499 499 if 'db' in value.lower():
500 500 tmp = getattr(self.dataOut, value.replace('_db', ''))
501 501 SNRavg = numpy.average(tmp, axis=0)
502 tmp = 10*numpy.log10(SNRavg)
502 tmp = 10 * numpy.log10(SNRavg)
503 503 else:
504 504 tmp = getattr(self.dataOut, value)
505 505 out[key] = tmp.flatten()[:len(heights)]
@@ -521,14 +521,14 Inputs:
521 521 startTime.hour,
522 522 startTime.minute,
523 523 startTime.second,
524 startTime.microsecond/10000,
524 startTime.microsecond / 10000,
525 525 endTime.year,
526 526 endTime.month,
527 527 endTime.day,
528 528 endTime.hour,
529 529 endTime.minute,
530 530 endTime.second,
531 endTime.microsecond/10000,
531 endTime.microsecond / 10000,
532 532 list(self.oneDDict.keys()),
533 533 list(self.twoDDict.keys()),
534 534 len(index),
@@ -592,4 +592,4 Inputs:
592 592 def close(self):
593 593
594 594 if self.counter > 0:
595 self.setHeader() No newline at end of file
595 self.setHeader()
@@ -12,14 +12,14 import cmath
12 12
13 13 class matoffReader(ProcessingUnit):
14 14
15 index=None
16 list=None
17 firsttime=True
18 utccounter=None
19 utcfiletime=None
20 utcmatcounter=0
21 utcfirst=None
22 utclist=None
15 index = None
16 list = None
17 firsttime = True
18 utccounter = None
19 utcfiletime = None
20 utcmatcounter = 0
21 utcfirst = None
22 utclist = None
23 23
24 24 def __init__(self):
25 25 self.dataOut = Spectra()
@@ -28,12 +28,12 class matoffReader(ProcessingUnit):
28 28
29 29 def __setHeader(self, datastuff):
30 30
31 self.dataOut.pairsList=[(0,1)]
32 self.dataOut.channelList = list(range(np.array(datastuff.get('power')).shape[1]))
33 self.dataOut.nProfiles = len(np.array(datastuff.get('vel')).flatten()) #this!
31 self.dataOut.pairsList = [(0, 1)]
32 self.dataOut.channelList = list(range(np.array(datastuff.get('power')).shape[1]))
33 self.dataOut.nProfiles = len(np.array(datastuff.get('vel')).flatten()) # this!
34 34 self.dataOut.nIncohInt = 20
35 self.dataOut.nCohInt = 1 #this!
36 self.dataOut.ippSeconds = 0.004 #this!
35 self.dataOut.nCohInt = 1 # this!
36 self.dataOut.ippSeconds = 0.004 # this!
37 37 self.dataOut.nFFTPoints = len(np.array(datastuff.get('vel')).flatten())
38 38 self.dataOut.timeZone = 0
39 39 self.dataOut.heightList = np.array(datastuff.get('hts')).flatten()
@@ -41,21 +41,21 class matoffReader(ProcessingUnit):
41 41 def __readFile(self, currentfile):
42 42 print("Reading from this file:" + currentfile)
43 43
44 #filesplit=currentfile.split("\\")
45 filesplit=currentfile.split("/")
46 newsplit=filesplit[-2]
47 newnewsplit=newsplit.split(".")
48 newnewsplit=[int(i) for i in newnewsplit]
49 gooblist=datetime.datetime(newnewsplit[0],newnewsplit[1],newnewsplit[2],newnewsplit[3],newnewsplit[4],newnewsplit[5])
50 self.utcfirst=(gooblist-datetime.datetime(1970,1,1)).total_seconds()
44 # filesplit=currentfile.split("\\")
45 filesplit = currentfile.split("/")
46 newsplit = filesplit[-2]
47 newnewsplit = newsplit.split(".")
48 newnewsplit = [int(i) for i in newnewsplit]
49 gooblist = datetime.datetime(newnewsplit[0], newnewsplit[1], newnewsplit[2], newnewsplit[3], newnewsplit[4], newnewsplit[5])
50 self.utcfirst = (gooblist - datetime.datetime(1970, 1, 1)).total_seconds()
51 51
52 52
53 newsplit=filesplit[-1]
54 newnewsplit=newsplit.split(".")
55 goobnum=newnewsplit[0]
56 goobnum=int(goobnum)
53 newsplit = filesplit[-1]
54 newnewsplit = newsplit.split(".")
55 goobnum = newnewsplit[0]
56 goobnum = int(goobnum)
57 57
58 self.utcfirst=self.utcfirst+goobnum*2
58 self.utcfirst = self.utcfirst + goobnum * 2
59 59 # if (currentfile[43:]=='0.mat'):
60 60 # self.utcmatcounter=0
61 61 # self.utcfirst=self.utclist[self.index]
@@ -66,26 +66,26 class matoffReader(ProcessingUnit):
66 66 # print self.utcmatcounter
67 67 print(self.utcfirst)
68 68 try:
69 datastuff=sio.loadmat(currentfile)
69 datastuff = sio.loadmat(currentfile)
70 70 except:
71 71 return None, None
72 72
73 dataphase=datastuff.get('phase')
74 data3=datastuff.get('doppler0')
75 data4=datastuff.get('doppler1')
76 data3= np.array(data3)
73 dataphase = datastuff.get('phase')
74 data3 = datastuff.get('doppler0')
75 data4 = datastuff.get('doppler1')
76 data3 = np.array(data3)
77 77 data4 = np.array(data4)
78 datacoh=datastuff.get('coherence2')
78 datacoh = datastuff.get('coherence2')
79 79
80 datacohphase=datacoh*np.exp(-dataphase*1j)
80 datacohphase = datacoh * np.exp(-dataphase * 1j)
81 81 # data31 = np.fliplr(data3)
82 82 # data41 = np.fliplr(data4)
83 83
84 data31 = data3.reshape((1,data3.shape[0],data3.shape[1]))
85 data41 = data4.reshape((1,data4.shape[0],data4.shape[1]))
86 datacohphase1 = datacohphase.reshape((1,datacoh.shape[0],datacoh.shape[1]))
84 data31 = data3.reshape((1, data3.shape[0], data3.shape[1]))
85 data41 = data4.reshape((1, data4.shape[0], data4.shape[1]))
86 datacohphase1 = datacohphase.reshape((1, datacoh.shape[0], datacoh.shape[1]))
87 87
88 datastack = np.vstack((data31,data41))
88 datastack = np.vstack((data31, data41))
89 89
90 90 self.__setHeader(datastuff)
91 91
@@ -94,46 +94,46 class matoffReader(ProcessingUnit):
94 94
95 95 return spc, cspc
96 96
97 def __findFiles(self, path, startDate=None, endDate=None,startTime=datetime.time(0,0,0), endTime=datetime.time(23,59,59)):
97 def __findFiles(self, path, startDate=None, endDate=None, startTime=datetime.time(0, 0, 0), endTime=datetime.time(23, 59, 59)):
98 98
99 99 if startDate == None:
100 startDate = datetime.date(1970,1,1)
100 startDate = datetime.date(1970, 1, 1)
101 101
102 102 if endDate == None:
103 endDate = datetime.date(2050,1,1)
103 endDate = datetime.date(2050, 1, 1)
104 104
105 startsearch1=datetime.datetime.combine(startDate,startTime)
106 startsearch2=(startsearch1-datetime.datetime(1970,1,1)).total_seconds()
107 endsearch1=datetime.datetime.combine(endDate,endTime)
108 endsearch2=(endsearch1-datetime.datetime(1970,1,1)).total_seconds()
105 startsearch1 = datetime.datetime.combine(startDate, startTime)
106 startsearch2 = (startsearch1 - datetime.datetime(1970, 1, 1)).total_seconds()
107 endsearch1 = datetime.datetime.combine(endDate, endTime)
108 endsearch2 = (endsearch1 - datetime.datetime(1970, 1, 1)).total_seconds()
109 109
110 110 dirList = listdir(path)
111 111 dirList = sorted(dirList)
112 112
113 dirListFiltered=[]
114 fileListFiltered=[]
115 utclist=[]
113 dirListFiltered = []
114 fileListFiltered = []
115 utclist = []
116 116
117 117 if not dirList:
118 118 print("No directories found")
119 119 return []
120 120
121 #if self.online:
121 # if self.online:
122 122 # dirList= [dirList[-1]]
123 123
124 124 if self.online:
125 125 currentdate = datetime.datetime.now()
126 strsplit1=currentdate.strftime('%Y.%m.%d')
127 dirList = fnmatch.filter(dirList,strsplit1+'*')
126 strsplit1 = currentdate.strftime('%Y.%m.%d')
127 dirList = fnmatch.filter(dirList, strsplit1 + '*')
128 128
129 129 for thisDir in dirList:
130 130 if not os.path.isdir(os.path.join(path, thisDir)):
131 131 continue
132 132
133 strsplit=thisDir.split('.')
134 timeints=[int(i) for i in strsplit]
135 timelist=datetime.datetime(timeints[0],timeints[1],timeints[2],timeints[3],timeints[4],timeints[5])
136 utctime=(timelist-datetime.datetime(1970,1,1)).total_seconds()
133 strsplit = thisDir.split('.')
134 timeints = [int(i) for i in strsplit]
135 timelist = datetime.datetime(timeints[0], timeints[1], timeints[2], timeints[3], timeints[4], timeints[5])
136 utctime = (timelist - datetime.datetime(1970, 1, 1)).total_seconds()
137 137
138 138 if not self.online:
139 139 if (utctime > endsearch2):
@@ -159,7 +159,7 class matoffReader(ProcessingUnit):
159 159 continue
160 160
161 161 for k in range(len(fileList)):
162 thisFile = str(k)+'.mat'
162 thisFile = str(k) + '.mat'
163 163
164 164 if not os.path.isfile(os.path.join(pathFile, thisFile)):
165 165 continue
@@ -168,7 +168,7 class matoffReader(ProcessingUnit):
168 168
169 169 return fileListFiltered
170 170
171 def __getNextOnlineFile(self, seconds = 40):
171 def __getNextOnlineFile(self, seconds=40):
172 172
173 173 filename = self.__getNextOfflineFile()
174 174
@@ -188,7 +188,7 class matoffReader(ProcessingUnit):
188 188 if nTries > 3:
189 189 break
190 190
191 print("Waiting %d seconds ..." %seconds)
191 print("Waiting %d seconds ..." % seconds)
192 192 time.sleep(40)
193 193
194 194 if not (len(filelist) > ncurrentfiles):
@@ -204,7 +204,7 class matoffReader(ProcessingUnit):
204 204 if self.index >= len(self.fileList):
205 205 return None
206 206
207 filename=self.fileList[self.index]
207 filename = self.fileList[self.index]
208 208 self.index += 1
209 209 return filename
210 210
@@ -216,12 +216,12 class matoffReader(ProcessingUnit):
216 216 filename = self.__getNextOfflineFile()
217 217 return filename
218 218
219 def setup(self, path, startDate=None, endDate=None,startTime=datetime.time(0,0,0), endTime=datetime.time(23,59,59)):
219 def setup(self, path, startDate=None, endDate=None, startTime=datetime.time(0, 0, 0), endTime=datetime.time(23, 59, 59)):
220 220
221 221 fileList = self.__findFiles(path, startDate, endDate, startTime, endTime)
222 222
223 223 if self.online:
224 self.index = len(fileList) -1
224 self.index = len(fileList) - 1
225 225 else:
226 226 self.index = 0
227 227
@@ -229,24 +229,24 class matoffReader(ProcessingUnit):
229 229
230 230 print("fin setup")
231 231
232 def run(self,path=None,startDate=None, endDate=None,
233 startTime=datetime.time(0,0,0),
234 endTime=datetime.time(23,59,59),
235 walk=True,timezone='ut',
236 all=0,online=False,ext=None,**kwargs):
237
238 self.path=path
239 self.ext=ext
240 self.startDate=startDate
241 self.endDate=endDate
242 self.startTime=startTime
243 self.endTime=endTime
232 def run(self, path=None, startDate=None, endDate=None,
233 startTime=datetime.time(0, 0, 0),
234 endTime=datetime.time(23, 59, 59),
235 walk=True, timezone='ut',
236 all=0, online=False, ext=None, **kwargs):
237
238 self.path = path
239 self.ext = ext
240 self.startDate = startDate
241 self.endDate = endDate
242 self.startTime = startTime
243 self.endTime = endTime
244 244 self.online = online
245 245 self.dataOut.flagNoData = True
246 246
247 if (self.firsttime==True):
247 if (self.firsttime == True):
248 248 self.setup(path, startDate, endDate, startTime, endTime)
249 self.firsttime=False
249 self.firsttime = False
250 250
251 251
252 252 if not self.fileList:
@@ -262,7 +262,7 class matoffReader(ProcessingUnit):
262 262
263 263 spc, cspc = self.__readFile(currentfile)
264 264
265 if spc!=None:
265 if spc != None:
266 266
267 267 self.dataOut.data_spc = spc
268 268 self.dataOut.data_cspc = cspc
@@ -270,4 +270,4 class matoffReader(ProcessingUnit):
270 270 self.dataOut.flagNoData = False
271 271
272 272 return 1
273 No newline at end of file
273
@@ -23,9 +23,9 except:
23 23 from time import sleep
24 24
25 25 from schainpy.model.data.jrodata import Spectra
26 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
26 # from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
27 27 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
28 #from schainpy.model.io.jroIO_bltr import BLTRReader
28 # from schainpy.model.io.jroIO_bltr import BLTRReader
29 29 from numpy import imag, shape, NaN, empty
30 30
31 31
@@ -315,7 +315,7 SRVI_HEADER = numpy.dtype([
315 315
316 316
317 317 class SRVIHeader(Header):
318 def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
318 def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
319 319
320 320 self.SignatureSRVI1 = SignatureSRVI1
321 321 self.SizeOfDataBlock1 = SizeOfDataBlock1
@@ -338,34 +338,34 class SRVIHeader(Header):
338 338
339 339 SRVI_STRUCTURE = numpy.dtype([
340 340 ('frame_cnt', '<u4'),
341 ('time_t', '<u4'), #
342 ('tpow', '<f4'), #
343 ('npw1', '<f4'), #
344 ('npw2', '<f4'), #
345 ('cpw1', '<f4'), #
346 ('pcw2', '<f4'), #
347 ('ps_err', '<u4'), #
348 ('te_err', '<u4'), #
349 ('rc_err', '<u4'), #
350 ('grs1', '<u4'), #
351 ('grs2', '<u4'), #
352 ('azipos', '<f4'), #
353 ('azivel', '<f4'), #
354 ('elvpos', '<f4'), #
355 ('elvvel', '<f4'), #
341 ('time_t', '<u4'), #
342 ('tpow', '<f4'), #
343 ('npw1', '<f4'), #
344 ('npw2', '<f4'), #
345 ('cpw1', '<f4'), #
346 ('pcw2', '<f4'), #
347 ('ps_err', '<u4'), #
348 ('te_err', '<u4'), #
349 ('rc_err', '<u4'), #
350 ('grs1', '<u4'), #
351 ('grs2', '<u4'), #
352 ('azipos', '<f4'), #
353 ('azivel', '<f4'), #
354 ('elvpos', '<f4'), #
355 ('elvvel', '<f4'), #
356 356 ('northAngle', '<f4'),
357 ('microsec', '<u4'), #
357 ('microsec', '<u4'), #
358 358 ('azisetvel', '<f4'), #
359 359 ('elvsetpos', '<f4'), #
360 ('RadarConst', '<f4'), ]) #
360 ('RadarConst', '<f4'), ]) #
361 361
362 362
363 363 class RecordHeader(Header):
364 364
365 def __init__(self, frame_cnt=0, time_t=0, tpow=0, npw1=0, npw2=0,
366 cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0,
367 grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0,
368 microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0, RecCounter=0, Off2StartNxtRec=0):
365 def __init__(self, frame_cnt=0, time_t=0, tpow=0, npw1=0, npw2=0,
366 cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0,
367 grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0,
368 microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0, RecCounter=0, Off2StartNxtRec=0):
369 369
370 370 self.frame_cnt = frame_cnt
371 371 self.dwell = time_t
@@ -396,44 +396,44 class RecordHeader(Header):
396 396
397 397 # startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file.
398 398
399 #OffRHeader= 1180 + self.RecCounter*(self.Off2StartNxtRec)
400 #startFp.seek(OffRHeader, os.SEEK_SET)
399 # OffRHeader= 1180 + self.RecCounter*(self.Off2StartNxtRec)
400 # startFp.seek(OffRHeader, os.SEEK_SET)
401 401
402 402 # print 'Posicion del bloque: ',OffRHeader
403 403
404 404 header = numpy.fromfile(fp, SRVI_STRUCTURE, 1)
405 405
406 406 self.frame_cnt = header['frame_cnt'][0]
407 self.time_t = header['time_t'][0] #
408 self.tpow = header['tpow'][0] #
409 self.npw1 = header['npw1'][0] #
410 self.npw2 = header['npw2'][0] #
411 self.cpw1 = header['cpw1'][0] #
412 self.pcw2 = header['pcw2'][0] #
413 self.ps_err = header['ps_err'][0] #
414 self.te_err = header['te_err'][0] #
415 self.rc_err = header['rc_err'][0] #
416 self.grs1 = header['grs1'][0] #
417 self.grs2 = header['grs2'][0] #
418 self.azipos = header['azipos'][0] #
419 self.azivel = header['azivel'][0] #
420 self.elvpos = header['elvpos'][0] #
421 self.elvvel = header['elvvel'][0] #
422 self.northAngle = header['northAngle'][0] #
423 self.microsec = header['microsec'][0] #
424 self.azisetvel = header['azisetvel'][0] #
425 self.elvsetpos = header['elvsetpos'][0] #
426 self.RadarConst = header['RadarConst'][0] #
407 self.time_t = header['time_t'][0] #
408 self.tpow = header['tpow'][0] #
409 self.npw1 = header['npw1'][0] #
410 self.npw2 = header['npw2'][0] #
411 self.cpw1 = header['cpw1'][0] #
412 self.pcw2 = header['pcw2'][0] #
413 self.ps_err = header['ps_err'][0] #
414 self.te_err = header['te_err'][0] #
415 self.rc_err = header['rc_err'][0] #
416 self.grs1 = header['grs1'][0] #
417 self.grs2 = header['grs2'][0] #
418 self.azipos = header['azipos'][0] #
419 self.azivel = header['azivel'][0] #
420 self.elvpos = header['elvpos'][0] #
421 self.elvvel = header['elvvel'][0] #
422 self.northAngle = header['northAngle'][0] #
423 self.microsec = header['microsec'][0] #
424 self.azisetvel = header['azisetvel'][0] #
425 self.elvsetpos = header['elvsetpos'][0] #
426 self.RadarConst = header['RadarConst'][0] #
427 427 # 84
428 428
429 429 # print 'Pointer fp RECheader', fp.tell()
430 430
431 #self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz)
431 # self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz)
432 432
433 #self.RHsize = 180+20*self.nChannels
434 #self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4
433 # self.RHsize = 180+20*self.nChannels
434 # self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4
435 435 # print 'Datasize',self.Datasize
436 #endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
436 # endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
437 437
438 438 print('==============================================')
439 439
@@ -626,11 +626,11 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
626 626
627 627 self.Num_inCoh = self.fheader.PPARavc
628 628 self.dataOut.PRF = self.fheader.PPARprf
629 self.dataOut.frequency = 34.85 * 10**9
629 self.dataOut.frequency = 34.85 * 10 ** 9
630 630 self.Lambda = SPEED_OF_LIGHT / self.dataOut.frequency
631 631 self.dataOut.ippSeconds = 1. / float(self.dataOut.PRF)
632 632
633 pulse_width = self.fheader.PPARpdr * 10**-9
633 pulse_width = self.fheader.PPARpdr * 10 ** -9
634 634 self.__deltaHeigth = 0.5 * SPEED_OF_LIGHT * pulse_width
635 635
636 636 self.data_spc = numpy.zeros((self.Num_Hei, self.Num_Bins, 2))
@@ -790,11 +790,11 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
790 790 # print 'SHAPE', self.dataOut_spc.shape
791 791 # For nyquist correction:
792 792 # fix = 20 # ~3m/s
793 #shift = self.Num_Bins/2 + fix
794 #self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]])
793 # shift = self.Num_Bins/2 + fix
794 # self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]])
795 795
796 796 '''Block Reading, the Block Data is received and Reshape is used to give it
797 797 shape.
798 798 '''
799 799
800 self.PointerReader = self.fp.tell() No newline at end of file
800 self.PointerReader = self.fp.tell()
@@ -105,7 +105,7 class HDFReader(Reader, ProcessingUnit):
105 105
106 106 for nTries in range(self.nTries):
107 107 fullpath = self.searchFilesOnLine(self.path, self.startDate,
108 self.endDate, self.expLabel, self.ext, self.walk,
108 self.endDate, self.expLabel, self.ext, self.walk,
109 109 self.filefmt, self.folderfmt)
110 110 try:
111 111 fullpath = next(fullpath)
@@ -117,7 +117,7 class HDFReader(Reader, ProcessingUnit):
117 117
118 118 log.warning(
119 119 'Waiting {} sec for a valid file in {}: try {} ...'.format(
120 self.delay, self.path, nTries + 1),
120 self.delay, self.path, nTries + 1),
121 121 self.name)
122 122 time.sleep(self.delay)
123 123
@@ -131,7 +131,7 class HDFReader(Reader, ProcessingUnit):
131 131 self.set = int(filename[8:11]) - 1
132 132 else:
133 133 log.log("Searching files in {}".format(self.path), self.name)
134 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
134 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
135 135 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
136 136
137 137 self.setNextFile()
@@ -346,11 +346,11 class HDFWriter(Operation):
346 346 setFile = None
347 347 fp = None
348 348 firsttime = True
349 #Configurations
349 # Configurations
350 350 blocksPerFile = None
351 351 blockIndex = None
352 352 dataOut = None
353 #Data Arrays
353 # Data Arrays
354 354 dataList = None
355 355 metadataList = None
356 356 currentDay = None
@@ -411,11 +411,11 class HDFWriter(Operation):
411 411
412 412 timeDiff = currentTime - self.lastTime
413 413
414 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
414 # Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
415 415 if dataDay != self.currentDay:
416 416 self.currentDay = dataDay
417 417 return True
418 elif timeDiff > 3*60*60:
418 elif timeDiff > 3 * 60 * 60:
419 419 self.lastTime = currentTime
420 420 return True
421 421 else:
@@ -427,7 +427,7 class HDFWriter(Operation):
427 427
428 428 self.dataOut = dataOut
429 429 if not(self.isConfig):
430 self.setup(path=path, blocksPerFile=blocksPerFile,
430 self.setup(path=path, blocksPerFile=blocksPerFile,
431 431 metadataList=metadataList, dataList=dataList,
432 432 setType=setType, description=description)
433 433
@@ -444,27 +444,27 class HDFWriter(Operation):
444 444 setFile = self.setFile
445 445
446 446 timeTuple = time.localtime(self.dataOut.utctime)
447 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
447 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
448 448 fullpath = os.path.join(path, subfolder)
449 449
450 450 if os.path.exists(fullpath):
451 451 filesList = os.listdir(fullpath)
452 452 filesList = [k for k in filesList if k.startswith(self.optchar)]
453 if len( filesList ) > 0:
453 if len(filesList) > 0:
454 454 filesList = sorted(filesList, key=str.lower)
455 455 filen = filesList[-1]
456 456 # el filename debera tener el siguiente formato
457 457 # 0 1234 567 89A BCDE (hex)
458 458 # x YYYY DDD SSS .ext
459 459 if isNumber(filen[8:11]):
460 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
460 setFile = int(filen[8:11]) # inicializo mi contador de seteo al seteo del ultimo file
461 461 else:
462 462 setFile = -1
463 463 else:
464 setFile = -1 #inicializo mi contador de seteo
464 setFile = -1 # inicializo mi contador de seteo
465 465 else:
466 466 os.makedirs(fullpath)
467 setFile = -1 #inicializo mi contador de seteo
467 setFile = -1 # inicializo mi contador de seteo
468 468
469 469 if self.setType is None:
470 470 setFile += 1
@@ -472,22 +472,22 class HDFWriter(Operation):
472 472 timeTuple.tm_year,
473 473 timeTuple.tm_yday,
474 474 setFile,
475 ext )
475 ext)
476 476 else:
477 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
477 setFile = timeTuple.tm_hour * 60 + timeTuple.tm_min
478 478 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
479 479 timeTuple.tm_year,
480 480 timeTuple.tm_yday,
481 481 setFile,
482 ext )
482 ext)
483 483
484 self.filename = os.path.join( path, subfolder, file )
484 self.filename = os.path.join(path, subfolder, file)
485 485
486 #Setting HDF5 File
486 # Setting HDF5 File
487 487 self.fp = h5py.File(self.filename, 'w')
488 #write metadata
488 # write metadata
489 489 self.writeMetadata(self.fp)
490 #Write data
490 # Write data
491 491 self.writeData(self.fp)
492 492
493 493 def getLabel(self, name, x=None):
@@ -563,9 +563,9 class HDFWriter(Operation):
563 563 for dsInfo in self.dsList:
564 564 if dsInfo['nDim'] == 0:
565 565 ds = grp.create_dataset(
566 self.getLabel(dsInfo['variable']),
567 (self.blocksPerFile, ),
568 chunks=True,
566 self.getLabel(dsInfo['variable']),
567 (self.blocksPerFile,),
568 chunks=True,
569 569 dtype=numpy.float64)
570 570 dtsets.append(ds)
571 571 data.append((dsInfo['variable'], -1))
@@ -577,8 +577,8 class HDFWriter(Operation):
577 577 sgrp = grp
578 578 for i in range(dsInfo['dsNumber']):
579 579 ds = sgrp.create_dataset(
580 self.getLabel(dsInfo['variable'], i),
581 (self.blocksPerFile, ) + dsInfo['shape'][1:],
580 self.getLabel(dsInfo['variable'], i),
581 (self.blocksPerFile,) + dsInfo['shape'][1:],
582 582 chunks=True,
583 583 dtype=dsInfo['dtype'])
584 584 dtsets.append(ds)
This diff has been collapsed as it changes many lines, (590 lines changed) Show them Hide them
@@ -1,47 +1,47
1 import numpy,math,random,time
1 import numpy, math, random, time
2 2 #---------------1 Heredamos JRODatareader
3 3 from schainpy.model.io.jroIO_base import *
4 4 #---------------2 Heredamos las propiedades de ProcessingUnit
5 from schainpy.model.proc.jroproc_base import ProcessingUnit,Operation,MPDecorator
5 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
6 6 #---------------3 Importaremos las clases BascicHeader, SystemHeader, RadarControlHeader, ProcessingHeader
7 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader,SystemHeader,RadarControllerHeader, ProcessingHeader
7 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
8 8 #---------------4 Importaremos el objeto Voltge
9 9 from schainpy.model.data.jrodata import Voltage
10 10
11 11 class SimulatorReader(JRODataReader, ProcessingUnit):
12 incIntFactor = 1
13 nFFTPoints = 0
14 FixPP_IncInt = 1
15 FixRCP_IPP = 1000
16 FixPP_CohInt = 1
17 Tau_0 = 250
18 AcqH0_0 = 70
19 H0 = AcqH0_0
20 AcqDH_0 = 1.25
21 DH0 = AcqDH_0
22 Bauds = 32
23 BaudWidth = None
24 FixRCP_TXA = 40
25 FixRCP_TXB = 70
26 fAngle = 2.0*math.pi*(1/16)
27 DC_level = 500
28 stdev = 8
29 Num_Codes = 2
30 #code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
31 #code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
32 #Dyn_snCode = numpy.array([Num_Codes,Bauds])
33 Dyn_snCode = None
34 Samples = 200
35 channels = 2
36 pulses = None
37 Reference = None
38 pulse_size = None
39 prof_gen = None
40 Fdoppler = 100
41 Hdoppler = 36
42 Adoppler = 300
43 frequency = 9345
44 nTotalReadFiles = 1000
12 incIntFactor = 1
13 nFFTPoints = 0
14 FixPP_IncInt = 1
15 FixRCP_IPP = 1000
16 FixPP_CohInt = 1
17 Tau_0 = 250
18 AcqH0_0 = 70
19 H0 = AcqH0_0
20 AcqDH_0 = 1.25
21 DH0 = AcqDH_0
22 Bauds = 32
23 BaudWidth = None
24 FixRCP_TXA = 40
25 FixRCP_TXB = 70
26 fAngle = 2.0 * math.pi * (1 / 16)
27 DC_level = 500
28 stdev = 8
29 Num_Codes = 2
30 # code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
31 # code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
32 # Dyn_snCode = numpy.array([Num_Codes,Bauds])
33 Dyn_snCode = None
34 Samples = 200
35 channels = 2
36 pulses = None
37 Reference = None
38 pulse_size = None
39 prof_gen = None
40 Fdoppler = 100
41 Hdoppler = 36
42 Adoppler = 300
43 frequency = 9345
44 nTotalReadFiles = 1000
45 45
46 46 def __init__(self):
47 47 """
@@ -56,19 +56,19 class SimulatorReader(JRODataReader, ProcessingUnit):
56 56 ProcessingUnit.__init__(self)
57 57 print(" [ START ] init - Metodo Simulator Reader")
58 58
59 self.isConfig = False
60 self.basicHeaderObj = BasicHeader(LOCALTIME)
61 self.systemHeaderObj = SystemHeader()
62 self.radarControllerHeaderObj = RadarControllerHeader()
63 self.processingHeaderObj = ProcessingHeader()
64 self.profileIndex = 2**32-1
65 self.dataOut = Voltage()
66 #code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
67 code0 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1,-1,1,-1,-1,-1,1,1,1,-1,1])
68 #code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
69 code1 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,-1,-1,-1,1,-1,-1,1,-1,1,1,1,-1,-1,-1,1,-1])
70 #self.Dyn_snCode = numpy.array([code0,code1])
71 self.Dyn_snCode = None
59 self.isConfig = False
60 self.basicHeaderObj = BasicHeader(LOCALTIME)
61 self.systemHeaderObj = SystemHeader()
62 self.radarControllerHeaderObj = RadarControllerHeader()
63 self.processingHeaderObj = ProcessingHeader()
64 self.profileIndex = 2 ** 32 - 1
65 self.dataOut = Voltage()
66 # code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
67 code0 = numpy.array([1, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, 1])
68 # code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
69 code1 = numpy.array([1, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1])
70 # self.Dyn_snCode = numpy.array([code0,code1])
71 self.Dyn_snCode = None
72 72
73 73 def set_kwargs(self, **kwargs):
74 74 for key, value in kwargs.items():
@@ -76,13 +76,13 class SimulatorReader(JRODataReader, ProcessingUnit):
76 76
77 77 def __hasNotDataInBuffer(self):
78 78
79 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock* self.nTxs:
80 if self.nReadBlocks>0:
81 tmp = self.dataOut.utctime
82 tmp_utc = int(self.dataOut.utctime)
83 tmp_milisecond = int((tmp-tmp_utc)*1000)
84 self.basicHeaderObj.utc = tmp_utc
85 self.basicHeaderObj.miliSecond= tmp_milisecond
79 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock * self.nTxs:
80 if self.nReadBlocks > 0:
81 tmp = self.dataOut.utctime
82 tmp_utc = int(self.dataOut.utctime)
83 tmp_milisecond = int((tmp - tmp_utc) * 1000)
84 self.basicHeaderObj.utc = tmp_utc
85 self.basicHeaderObj.miliSecond = tmp_milisecond
86 86 return 1
87 87 return 0
88 88
@@ -90,14 +90,14 class SimulatorReader(JRODataReader, ProcessingUnit):
90 90 """Set the next file to be readed open it and parse de file header"""
91 91
92 92 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
93 self.nReadFiles=self.nReadFiles+1
93 self.nReadFiles = self.nReadFiles + 1
94 94 if self.nReadFiles > self.nTotalReadFiles:
95 self.flagNoMoreFiles=1
95 self.flagNoMoreFiles = 1
96 96 raise schainpy.admin.SchainWarning('No more files to read')
97 97
98 print('------------------- [Opening file] ------------------------------',self.nReadFiles)
99 self.nReadBlocks = 0
100 #if self.nReadBlocks==0:
98 print('------------------- [Opening file] ------------------------------', self.nReadFiles)
99 self.nReadBlocks = 0
100 # if self.nReadBlocks==0:
101 101 # self.readFirstHeader()
102 102
103 103 def __setNewBlock(self):
@@ -113,43 +113,43 class SimulatorReader(JRODataReader, ProcessingUnit):
113 113 self.getBasicHeader()
114 114 break
115 115 if self.verbose:
116 print("[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
116 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
117 117 self.processingHeaderObj.dataBlocksPerFile,
118 self.dataOut.datatime.ctime()) )
118 self.dataOut.datatime.ctime()))
119 119 return 1
120 120
121 121 def getFirstHeader(self):
122 122 self.getBasicHeader()
123 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
124 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
123 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
124 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
125 125 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
126 self.dataOut.dtype = self.dtype
126 self.dataOut.dtype = self.dtype
127 127
128 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
129 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
128 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
129 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
130 130 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
131 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
131 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
132 132 # asumo q la data no esta decodificada
133 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
133 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
134 134 # asumo q la data no esta sin flip
135 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
136 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
137 self.dataOut.frequency = self.frequency
135 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
136 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
137 self.dataOut.frequency = self.frequency
138 138
139 139 def getBasicHeader(self):
140 140 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
141 141 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
142 142
143 143 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
144 self.dataOut.timeZone = self.basicHeaderObj.timeZone
145 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
146 self.dataOut.errorCount = self.basicHeaderObj.errorCount
147 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
148 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
144 self.dataOut.timeZone = self.basicHeaderObj.timeZone
145 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
146 self.dataOut.errorCount = self.basicHeaderObj.errorCount
147 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
148 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
149 149
150 150 def readFirstHeader(self):
151 151
152 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
152 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
153 153 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
154 154 if datatype == 0:
155 155 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
@@ -169,53 +169,53 class SimulatorReader(JRODataReader, ProcessingUnit):
169 169 self.dtype = datatype_str
170 170
171 171
172 def set_RCH(self, expType=2, nTx=1,ipp=None, txA=0, txB=0,
172 def set_RCH(self, expType=2, nTx=1, ipp=None, txA=0, txB=0,
173 173 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
174 174 numTaus=0, line6Function=0, line5Function=0, fClock=None,
175 175 prePulseBefore=0, prePulseAfter=0,
176 176 codeType=0, nCode=0, nBaud=0, code=None,
177 flip1=0, flip2=0,Taus=0):
178 self.radarControllerHeaderObj.expType = expType
179 self.radarControllerHeaderObj.nTx = nTx
180 self.radarControllerHeaderObj.ipp = float(ipp)
181 self.radarControllerHeaderObj.txA = float(txA)
182 self.radarControllerHeaderObj.txB = float(txB)
183 self.radarControllerHeaderObj.rangeIpp = b'A\n'#ipp
184 self.radarControllerHeaderObj.rangeTxA = b''
185 self.radarControllerHeaderObj.rangeTxB = b''
186
187 self.radarControllerHeaderObj.nHeights = int(nHeights)
188 self.radarControllerHeaderObj.firstHeight = numpy.array([firstHeight])
189 self.radarControllerHeaderObj.deltaHeight = numpy.array([deltaHeight])
190 self.radarControllerHeaderObj.samplesWin = numpy.array([nHeights])
191
192
193 self.radarControllerHeaderObj.nWindows = nWindows
194 self.radarControllerHeaderObj.numTaus = numTaus
195 self.radarControllerHeaderObj.codeType = codeType
177 flip1=0, flip2=0, Taus=0):
178 self.radarControllerHeaderObj.expType = expType
179 self.radarControllerHeaderObj.nTx = nTx
180 self.radarControllerHeaderObj.ipp = float(ipp)
181 self.radarControllerHeaderObj.txA = float(txA)
182 self.radarControllerHeaderObj.txB = float(txB)
183 self.radarControllerHeaderObj.rangeIpp = b'A\n' # ipp
184 self.radarControllerHeaderObj.rangeTxA = b''
185 self.radarControllerHeaderObj.rangeTxB = b''
186
187 self.radarControllerHeaderObj.nHeights = int(nHeights)
188 self.radarControllerHeaderObj.firstHeight = numpy.array([firstHeight])
189 self.radarControllerHeaderObj.deltaHeight = numpy.array([deltaHeight])
190 self.radarControllerHeaderObj.samplesWin = numpy.array([nHeights])
191
192
193 self.radarControllerHeaderObj.nWindows = nWindows
194 self.radarControllerHeaderObj.numTaus = numTaus
195 self.radarControllerHeaderObj.codeType = codeType
196 196 self.radarControllerHeaderObj.line6Function = line6Function
197 197 self.radarControllerHeaderObj.line5Function = line5Function
198 #self.radarControllerHeaderObj.fClock = fClock
199 self.radarControllerHeaderObj.prePulseBefore= prePulseBefore
198 # self.radarControllerHeaderObj.fClock = fClock
199 self.radarControllerHeaderObj.prePulseBefore = prePulseBefore
200 200 self.radarControllerHeaderObj.prePulseAfter = prePulseAfter
201 201
202 self.radarControllerHeaderObj.flip1 = flip1
203 self.radarControllerHeaderObj.flip2 = flip2
202 self.radarControllerHeaderObj.flip1 = flip1
203 self.radarControllerHeaderObj.flip2 = flip2
204 204
205 self.radarControllerHeaderObj.code_size = 0
206 if self.radarControllerHeaderObj.codeType != 0:
207 self.radarControllerHeaderObj.nCode = nCode
208 self.radarControllerHeaderObj.nBaud = nBaud
209 self.radarControllerHeaderObj.code = code
210 self.radarControllerHeaderObj.code_size = int(numpy.ceil(nBaud / 32.)) * nCode * 4
205 self.radarControllerHeaderObj.code_size = 0
206 if self.radarControllerHeaderObj.codeType != 0:
207 self.radarControllerHeaderObj.nCode = nCode
208 self.radarControllerHeaderObj.nBaud = nBaud
209 self.radarControllerHeaderObj.code = code
210 self.radarControllerHeaderObj.code_size = int(numpy.ceil(nBaud / 32.)) * nCode * 4
211 211
212 212 if fClock is None and deltaHeight is not None:
213 213 self.fClock = 0.15 / (deltaHeight * 1e-6)
214 self.radarControllerHeaderObj.fClock = self.fClock
215 if numTaus==0:
216 self.radarControllerHeaderObj.Taus = numpy.array(0,'<f4')
214 self.radarControllerHeaderObj.fClock = self.fClock
215 if numTaus == 0:
216 self.radarControllerHeaderObj.Taus = numpy.array(0, '<f4')
217 217 else:
218 self.radarControllerHeaderObj.Taus = numpy.array(Taus,'<f4')
218 self.radarControllerHeaderObj.Taus = numpy.array(Taus, '<f4')
219 219
220 220 def set_PH(self, dtype=0, blockSize=0, profilesPerBlock=0,
221 221 dataBlocksPerFile=0, nWindows=0, processFlags=0, nCohInt=0,
@@ -224,177 +224,177 class SimulatorReader(JRODataReader, ProcessingUnit):
224 224 code=0, nBaud=None, shif_fft=False, flag_dc=False,
225 225 flag_cspc=False, flag_decode=False, flag_deflip=False):
226 226
227 self.processingHeaderObj.dtype = dtype
228 self.processingHeaderObj.profilesPerBlock = profilesPerBlock
227 self.processingHeaderObj.dtype = dtype
228 self.processingHeaderObj.profilesPerBlock = profilesPerBlock
229 229 self.processingHeaderObj.dataBlocksPerFile = dataBlocksPerFile
230 self.processingHeaderObj.nWindows = nWindows
231 self.processingHeaderObj.processFlags = processFlags
232 self.processingHeaderObj.nCohInt = nCohInt
233 self.processingHeaderObj.nIncohInt = nIncohInt
234 self.processingHeaderObj.totalSpectra = totalSpectra
235
236 self.processingHeaderObj.nHeights = int(nHeights)
237 self.processingHeaderObj.firstHeight = firstHeight#numpy.array([firstHeight])#firstHeight
238 self.processingHeaderObj.deltaHeight = deltaHeight#numpy.array([deltaHeight])#deltaHeight
239 self.processingHeaderObj.samplesWin = nHeights#numpy.array([nHeights])#nHeights
240
241 def set_BH(self, utc = 0, miliSecond = 0, timeZone = 0):
242 self.basicHeaderObj.utc = utc
243 self.basicHeaderObj.miliSecond = miliSecond
244 self.basicHeaderObj.timeZone = timeZone
230 self.processingHeaderObj.nWindows = nWindows
231 self.processingHeaderObj.processFlags = processFlags
232 self.processingHeaderObj.nCohInt = nCohInt
233 self.processingHeaderObj.nIncohInt = nIncohInt
234 self.processingHeaderObj.totalSpectra = totalSpectra
235
236 self.processingHeaderObj.nHeights = int(nHeights)
237 self.processingHeaderObj.firstHeight = firstHeight # numpy.array([firstHeight])#firstHeight
238 self.processingHeaderObj.deltaHeight = deltaHeight # numpy.array([deltaHeight])#deltaHeight
239 self.processingHeaderObj.samplesWin = nHeights # numpy.array([nHeights])#nHeights
240
241 def set_BH(self, utc=0, miliSecond=0, timeZone=0):
242 self.basicHeaderObj.utc = utc
243 self.basicHeaderObj.miliSecond = miliSecond
244 self.basicHeaderObj.timeZone = timeZone
245 245
246 246 def set_SH(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWidth=32):
247 #self.systemHeaderObj.size = size
248 self.systemHeaderObj.nSamples = nSamples
249 self.systemHeaderObj.nProfiles = nProfiles
250 self.systemHeaderObj.nChannels = nChannels
251 self.systemHeaderObj.adcResolution = adcResolution
247 # self.systemHeaderObj.size = size
248 self.systemHeaderObj.nSamples = nSamples
249 self.systemHeaderObj.nProfiles = nProfiles
250 self.systemHeaderObj.nChannels = nChannels
251 self.systemHeaderObj.adcResolution = adcResolution
252 252 self.systemHeaderObj.pciDioBusWidth = pciDioBusWidth
253 253
254 254 def init_acquisition(self):
255 255
256 256 if self.nFFTPoints != 0:
257 self.incIntFactor = m_nProfilesperBlock/self.nFFTPoints
257 self.incIntFactor = m_nProfilesperBlock / self.nFFTPoints
258 258 if (self.FixPP_IncInt > self.incIntFactor):
259 self.incIntFactor = self.FixPP_IncInt/ self.incIntFactor
260 elif(self.FixPP_IncInt< self.incIntFactor):
259 self.incIntFactor = self.FixPP_IncInt / self.incIntFactor
260 elif(self.FixPP_IncInt < self.incIntFactor):
261 261 print("False alert...")
262 262
263 ProfilesperBlock = self.processingHeaderObj.profilesPerBlock
263 ProfilesperBlock = self.processingHeaderObj.profilesPerBlock
264 264
265 self.timeperblock =int(((self.FixRCP_IPP
266 *ProfilesperBlock
267 *self.FixPP_CohInt
268 *self.incIntFactor)
269 /150.0)
270 *0.9
271 +0.5)
265 self.timeperblock = int(((self.FixRCP_IPP
266 * ProfilesperBlock
267 * self.FixPP_CohInt
268 * self.incIntFactor)
269 / 150.0)
270 * 0.9
271 + 0.5)
272 272 # para cada canal
273 self.profiles = ProfilesperBlock*self.FixPP_CohInt
274 self.profiles = ProfilesperBlock
275 self.Reference = int((self.Tau_0-self.AcqH0_0)/(self.AcqDH_0)+0.5)
276 self.BaudWidth = int((self.FixRCP_TXA/self.AcqDH_0)/self.Bauds + 0.5 )
273 self.profiles = ProfilesperBlock * self.FixPP_CohInt
274 self.profiles = ProfilesperBlock
275 self.Reference = int((self.Tau_0 - self.AcqH0_0) / (self.AcqDH_0) + 0.5)
276 self.BaudWidth = int((self.FixRCP_TXA / self.AcqDH_0) / self.Bauds + 0.5)
277 277
278 if (self.BaudWidth==0):
279 self.BaudWidth=1
278 if (self.BaudWidth == 0):
279 self.BaudWidth = 1
280 280
281 def init_pulse(self,Num_Codes=Num_Codes,Bauds=Bauds,BaudWidth=BaudWidth,Dyn_snCode=Dyn_snCode):
281 def init_pulse(self, Num_Codes=Num_Codes, Bauds=Bauds, BaudWidth=BaudWidth, Dyn_snCode=Dyn_snCode):
282 282
283 Num_Codes = Num_Codes
284 Bauds = Bauds
285 BaudWidth = BaudWidth
286 Dyn_snCode = Dyn_snCode
283 Num_Codes = Num_Codes
284 Bauds = Bauds
285 BaudWidth = BaudWidth
286 Dyn_snCode = Dyn_snCode
287 287
288 288 if Dyn_snCode:
289 289 print("EXISTE")
290 290 else:
291 291 print("No existe")
292 292
293 if Dyn_snCode: # if Bauds:
294 pulses = list(range(0,Num_Codes))
295 num_codes = Num_Codes
293 if Dyn_snCode: # if Bauds:
294 pulses = list(range(0, Num_Codes))
295 num_codes = Num_Codes
296 296 for i in range(num_codes):
297 pulse_size = Bauds*BaudWidth
298 pulses[i] = numpy.zeros(pulse_size)
297 pulse_size = Bauds * BaudWidth
298 pulses[i] = numpy.zeros(pulse_size)
299 299 for j in range(Bauds):
300 300 for k in range(BaudWidth):
301 pulses[i][j*BaudWidth+k] = int(Dyn_snCode[i][j]*600)
301 pulses[i][j * BaudWidth + k] = int(Dyn_snCode[i][j] * 600)
302 302 else:
303 303 print("sin code")
304 pulses = list(range(1))
305 if self.AcqDH_0>0.149:
306 pulse_size = int(self.FixRCP_TXB/0.15+0.5)
304 pulses = list(range(1))
305 if self.AcqDH_0 > 0.149:
306 pulse_size = int(self.FixRCP_TXB / 0.15 + 0.5)
307 307 else:
308 pulse_size = int((self.FixRCP_TXB/self.AcqDH_0)+0.5) #0.0375
309 pulses[0] = numpy.ones(pulse_size)
310 pulses = 600*pulses[0]
311
312 return pulses,pulse_size
313
314 def jro_GenerateBlockOfData(self,Samples=Samples,DC_level= DC_level,stdev=stdev,
315 Reference= Reference,pulses= pulses,
316 Num_Codes= Num_Codes,pulse_size=pulse_size,
317 prof_gen= prof_gen,H0 = H0,DH0=DH0,
318 Adoppler=Adoppler,Fdoppler= Fdoppler,Hdoppler=Hdoppler):
319 Samples = Samples
320 DC_level = DC_level
321 stdev = stdev
322 m_nR = Reference
323 pulses = pulses
324 num_codes = Num_Codes
325 ps = pulse_size
326 prof_gen = prof_gen
327 channels = self.channels
328 H0 = H0
329 DH0 = DH0
330 ippSec = self.radarControllerHeaderObj.ippSeconds
331 Fdoppler = self.Fdoppler
332 Hdoppler = self.Hdoppler
333 Adoppler = self.Adoppler
334
335 self.datablock = numpy.zeros([channels,prof_gen,Samples],dtype= numpy.complex64)
308 pulse_size = int((self.FixRCP_TXB / self.AcqDH_0) + 0.5) # 0.0375
309 pulses[0] = numpy.ones(pulse_size)
310 pulses = 600 * pulses[0]
311
312 return pulses, pulse_size
313
314 def jro_GenerateBlockOfData(self, Samples=Samples, DC_level=DC_level, stdev=stdev,
315 Reference=Reference, pulses=pulses,
316 Num_Codes=Num_Codes, pulse_size=pulse_size,
317 prof_gen=prof_gen, H0=H0, DH0=DH0,
318 Adoppler=Adoppler, Fdoppler=Fdoppler, Hdoppler=Hdoppler):
319 Samples = Samples
320 DC_level = DC_level
321 stdev = stdev
322 m_nR = Reference
323 pulses = pulses
324 num_codes = Num_Codes
325 ps = pulse_size
326 prof_gen = prof_gen
327 channels = self.channels
328 H0 = H0
329 DH0 = DH0
330 ippSec = self.radarControllerHeaderObj.ippSeconds
331 Fdoppler = self.Fdoppler
332 Hdoppler = self.Hdoppler
333 Adoppler = self.Adoppler
334
335 self.datablock = numpy.zeros([channels, prof_gen, Samples], dtype=numpy.complex64)
336 336 for i in range(channels):
337 337 for k in range(prof_gen):
338 338 #-----------------------NOISE---------------
339 Noise_r = numpy.random.normal(DC_level,stdev,Samples)
340 Noise_i = numpy.random.normal(DC_level,stdev,Samples)
341 Noise = numpy.zeros(Samples,dtype=complex)
339 Noise_r = numpy.random.normal(DC_level, stdev, Samples)
340 Noise_i = numpy.random.normal(DC_level, stdev, Samples)
341 Noise = numpy.zeros(Samples, dtype=complex)
342 342 Noise.real = Noise_r
343 343 Noise.imag = Noise_i
344 344 #-----------------------PULSOS--------------
345 Pulso = numpy.zeros(pulse_size,dtype=complex)
346 Pulso.real = pulses[k%num_codes]
347 Pulso.imag = pulses[k%num_codes]
345 Pulso = numpy.zeros(pulse_size, dtype=complex)
346 Pulso.real = pulses[k % num_codes]
347 Pulso.imag = pulses[k % num_codes]
348 348 #--------------------- PULSES+NOISE----------
349 InBuffer = numpy.zeros(Samples,dtype=complex)
350 InBuffer[m_nR:m_nR+ps] = Pulso
351 InBuffer = InBuffer+Noise
349 InBuffer = numpy.zeros(Samples, dtype=complex)
350 InBuffer[m_nR:m_nR + ps] = Pulso
351 InBuffer = InBuffer + Noise
352 352 #--------------------- ANGLE -------------------------------
353 InBuffer.real[m_nR:m_nR+ps] = InBuffer.real[m_nR:m_nR+ps]*(math.cos( self.fAngle)*5)
354 InBuffer.imag[m_nR:m_nR+ps] = InBuffer.imag[m_nR:m_nR+ps]*(math.sin( self.fAngle)*5)
355 InBuffer=InBuffer
356 self.datablock[i][k]= InBuffer
353 InBuffer.real[m_nR:m_nR + ps] = InBuffer.real[m_nR:m_nR + ps] * (math.cos(self.fAngle) * 5)
354 InBuffer.imag[m_nR:m_nR + ps] = InBuffer.imag[m_nR:m_nR + ps] * (math.sin(self.fAngle) * 5)
355 InBuffer = InBuffer
356 self.datablock[i][k] = InBuffer
357 357
358 358 #----------------DOPPLER SIGNAL...............................................
359 time_vec = numpy.linspace(0,(prof_gen-1)*ippSec,int(prof_gen))+self.nReadBlocks*ippSec*prof_gen+(self.nReadFiles-1)*ippSec*prof_gen
360 fd = Fdoppler #+(600.0/120)*self.nReadBlocks
361 d_signal = Adoppler*numpy.array(numpy.exp(1.0j*2.0*math.pi*fd*time_vec),dtype=numpy.complex64)
359 time_vec = numpy.linspace(0, (prof_gen - 1) * ippSec, int(prof_gen)) + self.nReadBlocks * ippSec * prof_gen + (self.nReadFiles - 1) * ippSec * prof_gen
360 fd = Fdoppler # +(600.0/120)*self.nReadBlocks
361 d_signal = Adoppler * numpy.array(numpy.exp(1.0j * 2.0 * math.pi * fd * time_vec), dtype=numpy.complex64)
362 362 #-------------Senal con ancho espectral--------------------
363 if prof_gen%2==0:
364 min = int(prof_gen/2.0-1.0)
365 max = int(prof_gen/2.0)
363 if prof_gen % 2 == 0:
364 min = int(prof_gen / 2.0 - 1.0)
365 max = int(prof_gen / 2.0)
366 366 else:
367 min = int(prof_gen/2.0)
368 max = int(prof_gen/2.0)
369 specw_sig = numpy.linspace(-min,max,prof_gen)
370 w = 4
371 A = 20
372 specw_sig = specw_sig/w
373 specw_sig = numpy.sinc(specw_sig)
374 specw_sig = A*numpy.array(specw_sig,dtype=numpy.complex64)
367 min = int(prof_gen / 2.0)
368 max = int(prof_gen / 2.0)
369 specw_sig = numpy.linspace(-min, max, prof_gen)
370 w = 4
371 A = 20
372 specw_sig = specw_sig / w
373 specw_sig = numpy.sinc(specw_sig)
374 specw_sig = A * numpy.array(specw_sig, dtype=numpy.complex64)
375 375 #------------------ DATABLOCK + DOPPLER--------------------
376 HD=int(Hdoppler/self.AcqDH_0)
376 HD = int(Hdoppler / self.AcqDH_0)
377 377 for i in range(12):
378 self.datablock[0,:,HD+i]=self.datablock[0,:,HD+i]+ d_signal# RESULT
378 self.datablock[0, :, HD + i] = self.datablock[0, :, HD + i] + d_signal # RESULT
379 379 #------------------ DATABLOCK + DOPPLER*Sinc(x)--------------------
380 HD=int(Hdoppler/self.AcqDH_0)
381 HD=int(HD/2)
380 HD = int(Hdoppler / self.AcqDH_0)
381 HD = int(HD / 2)
382 382 for i in range(12):
383 self.datablock[0,:,HD+i]=self.datablock[0,:,HD+i]+ specw_sig*d_signal# RESULT
383 self.datablock[0, :, HD + i] = self.datablock[0, :, HD + i] + specw_sig * d_signal # RESULT
384 384
385 385 def readBlock(self):
386 386
387 self.jro_GenerateBlockOfData(Samples= self.samples,DC_level=self.DC_level,
388 stdev=self.stdev,Reference= self.Reference,
389 pulses = self.pulses,Num_Codes=self.Num_Codes,
390 pulse_size=self.pulse_size,prof_gen=self.profiles,
391 H0=self.H0,DH0=self.DH0)
387 self.jro_GenerateBlockOfData(Samples=self.samples, DC_level=self.DC_level,
388 stdev=self.stdev, Reference=self.Reference,
389 pulses=self.pulses, Num_Codes=self.Num_Codes,
390 pulse_size=self.pulse_size, prof_gen=self.profiles,
391 H0=self.H0, DH0=self.DH0)
392 392
393 self.profileIndex = 0
394 self.flagIsNewFile = 0
393 self.profileIndex = 0
394 self.flagIsNewFile = 0
395 395 self.flagIsNewBlock = 1
396 self.nTotalBlocks += 1
397 self.nReadBlocks += 1
396 self.nTotalBlocks += 1
397 self.nReadBlocks += 1
398 398
399 399 return 1
400 400
@@ -404,11 +404,11 class SimulatorReader(JRODataReader, ProcessingUnit):
404 404 self.dataOut.flagNodata = True
405 405 return 0
406 406 self.flagDiscontinuousBlock = 0
407 self.flagIsNewBlock = 0
408 if self.__hasNotDataInBuffer(): # aqui es verdad
409 if not(self.readNextBlock()): # return 1 y por eso el if not salta a getBasic Header
407 self.flagIsNewBlock = 0
408 if self.__hasNotDataInBuffer(): # aqui es verdad
409 if not(self.readNextBlock()): # return 1 y por eso el if not salta a getBasic Header
410 410 return 0
411 self.getFirstHeader() # atributo
411 self.getFirstHeader() # atributo
412 412
413 413 if not self.getByBlock:
414 414 self.dataOut.flagDataAsBlock = False
@@ -423,36 +423,36 class SimulatorReader(JRODataReader, ProcessingUnit):
423 423 return self.dataOut.data
424 424
425 425
426 def setup(self,frequency=49.92e6,incIntFactor= 1, nFFTPoints = 0, FixPP_IncInt=1,FixRCP_IPP=1000,
427 FixPP_CohInt= 1,Tau_0= 250,AcqH0_0 = 70 ,AcqDH_0=1.25, Bauds= 32,
428 FixRCP_TXA = 40, FixRCP_TXB = 50, fAngle = 2.0*math.pi*(1/16),DC_level= 50,
429 stdev= 8,Num_Codes = 1 , Dyn_snCode = None, samples=200,
430 channels=2,Fdoppler=20,Hdoppler=36,Adoppler=500,
431 profilesPerBlock=300,dataBlocksPerFile=120,nTotalReadFiles=10000,
426 def setup(self, frequency=49.92e6, incIntFactor=1, nFFTPoints=0, FixPP_IncInt=1, FixRCP_IPP=1000,
427 FixPP_CohInt=1, Tau_0=250, AcqH0_0=70 , AcqDH_0=1.25, Bauds=32,
428 FixRCP_TXA=40, FixRCP_TXB=50, fAngle=2.0 * math.pi * (1 / 16), DC_level=50,
429 stdev=8, Num_Codes=1 , Dyn_snCode=None, samples=200,
430 channels=2, Fdoppler=20, Hdoppler=36, Adoppler=500,
431 profilesPerBlock=300, dataBlocksPerFile=120, nTotalReadFiles=10000,
432 432 **kwargs):
433 433
434 434 self.set_kwargs(**kwargs)
435 435 self.nReadBlocks = 0
436 self.nReadFiles = 1
437 print('------------------- [Opening file: ] ------------------------------',self.nReadFiles)
436 self.nReadFiles = 1
437 print('------------------- [Opening file: ] ------------------------------', self.nReadFiles)
438 438
439 tmp = time.time()
440 tmp_utc = int(tmp)
441 tmp_milisecond = int((tmp-tmp_utc)*1000)
442 print(" SETUP -basicHeaderObj.utc",datetime.datetime.utcfromtimestamp(tmp))
439 tmp = time.time()
440 tmp_utc = int(tmp)
441 tmp_milisecond = int((tmp - tmp_utc) * 1000)
442 print(" SETUP -basicHeaderObj.utc", datetime.datetime.utcfromtimestamp(tmp))
443 443 if Dyn_snCode is None:
444 Num_Codes=1
445 Bauds =1
444 Num_Codes = 1
445 Bauds = 1
446 446
447 447
448 448
449 self.set_BH(utc= tmp_utc,miliSecond= tmp_milisecond,timeZone=300 )
450 self.set_RCH( expType=0, nTx=150,ipp=FixRCP_IPP, txA=FixRCP_TXA, txB= FixRCP_TXB,
449 self.set_BH(utc=tmp_utc, miliSecond=tmp_milisecond, timeZone=300)
450 self.set_RCH(expType=0, nTx=150, ipp=FixRCP_IPP, txA=FixRCP_TXA, txB=FixRCP_TXB,
451 451 nWindows=1 , nHeights=samples, firstHeight=AcqH0_0, deltaHeight=AcqDH_0,
452 452 numTaus=1, line6Function=0, line5Function=0, fClock=None,
453 453 prePulseBefore=0, prePulseAfter=0,
454 454 codeType=0, nCode=Num_Codes, nBaud=32, code=Dyn_snCode,
455 flip1=0, flip2=0,Taus=Tau_0)
455 flip1=0, flip2=0, Taus=Tau_0)
456 456
457 457 self.set_PH(dtype=0, blockSize=0, profilesPerBlock=profilesPerBlock,
458 458 dataBlocksPerFile=dataBlocksPerFile, nWindows=1, processFlags=numpy.array([1024]), nCohInt=1,
@@ -465,54 +465,54 class SimulatorReader(JRODataReader, ProcessingUnit):
465 465
466 466 self.readFirstHeader()
467 467
468 self.frequency = frequency
469 self.incIntFactor = incIntFactor
470 self.nFFTPoints = nFFTPoints
471 self.FixPP_IncInt = FixPP_IncInt
472 self.FixRCP_IPP = FixRCP_IPP
473 self.FixPP_CohInt = FixPP_CohInt
474 self.Tau_0 = Tau_0
475 self.AcqH0_0 = AcqH0_0
476 self.H0 = AcqH0_0
477 self.AcqDH_0 = AcqDH_0
478 self.DH0 = AcqDH_0
479 self.Bauds = Bauds
480 self.FixRCP_TXA = FixRCP_TXA
481 self.FixRCP_TXB = FixRCP_TXB
482 self.fAngle = fAngle
483 self.DC_level = DC_level
484 self.stdev = stdev
485 self.Num_Codes = Num_Codes
486 self.Dyn_snCode = Dyn_snCode
487 self.samples = samples
488 self.channels = channels
489 self.profiles = None
490 self.m_nReference = None
491 self.Baudwidth = None
492 self.Fdoppler = Fdoppler
493 self.Hdoppler = Hdoppler
494 self.Adoppler = Adoppler
495 self.nTotalReadFiles = int(nTotalReadFiles)
468 self.frequency = frequency
469 self.incIntFactor = incIntFactor
470 self.nFFTPoints = nFFTPoints
471 self.FixPP_IncInt = FixPP_IncInt
472 self.FixRCP_IPP = FixRCP_IPP
473 self.FixPP_CohInt = FixPP_CohInt
474 self.Tau_0 = Tau_0
475 self.AcqH0_0 = AcqH0_0
476 self.H0 = AcqH0_0
477 self.AcqDH_0 = AcqDH_0
478 self.DH0 = AcqDH_0
479 self.Bauds = Bauds
480 self.FixRCP_TXA = FixRCP_TXA
481 self.FixRCP_TXB = FixRCP_TXB
482 self.fAngle = fAngle
483 self.DC_level = DC_level
484 self.stdev = stdev
485 self.Num_Codes = Num_Codes
486 self.Dyn_snCode = Dyn_snCode
487 self.samples = samples
488 self.channels = channels
489 self.profiles = None
490 self.m_nReference = None
491 self.Baudwidth = None
492 self.Fdoppler = Fdoppler
493 self.Hdoppler = Hdoppler
494 self.Adoppler = Adoppler
495 self.nTotalReadFiles = int(nTotalReadFiles)
496 496
497 497 print("IPP ", self.FixRCP_IPP)
498 print("Tau_0 ",self.Tau_0)
499 print("AcqH0_0",self.AcqH0_0)
500 print("samples,window ",self.samples)
501 print("AcqDH_0",AcqDH_0)
502 print("FixRCP_TXA",self.FixRCP_TXA)
503 print("FixRCP_TXB",self.FixRCP_TXB)
504 print("Dyn_snCode",Dyn_snCode)
498 print("Tau_0 ", self.Tau_0)
499 print("AcqH0_0", self.AcqH0_0)
500 print("samples,window ", self.samples)
501 print("AcqDH_0", AcqDH_0)
502 print("FixRCP_TXA", self.FixRCP_TXA)
503 print("FixRCP_TXB", self.FixRCP_TXB)
504 print("Dyn_snCode", Dyn_snCode)
505 505 print("Fdoppler", Fdoppler)
506 print("Hdoppler",Hdoppler)
507 print("Vdopplermax",Fdoppler*(3.0e8/self.frequency)/2.0)
506 print("Hdoppler", Hdoppler)
507 print("Vdopplermax", Fdoppler * (3.0e8 / self.frequency) / 2.0)
508 508 print("nTotalReadFiles", nTotalReadFiles)
509 509
510 510 self.init_acquisition()
511 self.pulses,self.pulse_size=self.init_pulse(Num_Codes=self.Num_Codes,Bauds=self.Bauds,BaudWidth=self.BaudWidth,Dyn_snCode=Dyn_snCode)
511 self.pulses, self.pulse_size = self.init_pulse(Num_Codes=self.Num_Codes, Bauds=self.Bauds, BaudWidth=self.BaudWidth, Dyn_snCode=Dyn_snCode)
512 512 print(" [ END ] - SETUP metodo")
513 513 return
514 514
515 def run(self,**kwargs): # metodo propio
515 def run(self, **kwargs): # metodo propio
516 516 if not(self.isConfig):
517 517 self.setup(**kwargs)
518 518 self.isConfig = True
@@ -53,7 +53,7 class SpectraReader(JRODataReader, ProcessingUnit):
53 53
54 54 """
55 55
56 def __init__(self):#, **kwargs):
56 def __init__(self): # , **kwargs):
57 57 """
58 58 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
59 59
@@ -121,12 +121,12 class SpectraReader(JRODataReader, ProcessingUnit):
121 121 self.nRdPairs = 0
122 122 self.rdPairList = []
123 123
124 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
125 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
126 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
124 for i in range(0, self.processingHeaderObj.totalSpectra * 2, 2):
125 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i + 1]:
126 self.nRdChannels = self.nRdChannels + 1 # par de canales iguales
127 127 else:
128 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
129 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
128 self.nRdPairs = self.nRdPairs + 1 # par de canales diferentes
129 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i + 1]))
130 130
131 131 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
132 132
@@ -165,38 +165,38 class SpectraReader(JRODataReader, ProcessingUnit):
165 165
166 166 fpointer = self.fp.tell()
167 167
168 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
169 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
168 spc = numpy.fromfile(self.fp, self.dtype[0], self.pts2read_SelfSpectra)
169 spc = spc.reshape((self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock)) # transforma a un arreglo 3D
170 170
171 171 if self.processingHeaderObj.flag_cspc:
172 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
173 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
172 cspc = numpy.fromfile(self.fp, self.dtype, self.pts2read_CrossSpectra)
173 cspc = cspc.reshape((self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock)) # transforma a un arreglo 3D
174 174
175 175 if self.processingHeaderObj.flag_dc:
176 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
177 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
176 dc = numpy.fromfile(self.fp, self.dtype, self.pts2read_DCchannels) # int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
177 dc = dc.reshape((self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights)) # transforma a un arreglo 2D
178 178
179 179 if not self.processingHeaderObj.shif_fft:
180 #desplaza a la derecha en el eje 2 determinadas posiciones
181 shift = int(self.processingHeaderObj.profilesPerBlock/2)
182 spc = numpy.roll( spc, shift , axis=2 )
180 # desplaza a la derecha en el eje 2 determinadas posiciones
181 shift = int(self.processingHeaderObj.profilesPerBlock / 2)
182 spc = numpy.roll(spc, shift , axis=2)
183 183
184 184 if self.processingHeaderObj.flag_cspc:
185 #desplaza a la derecha en el eje 2 determinadas posiciones
186 cspc = numpy.roll( cspc, shift, axis=2 )
185 # desplaza a la derecha en el eje 2 determinadas posiciones
186 cspc = numpy.roll(cspc, shift, axis=2)
187 187
188 #Dimensions : nChannels, nProfiles, nSamples
189 spc = numpy.transpose( spc, (0,2,1) )
188 # Dimensions : nChannels, nProfiles, nSamples
189 spc = numpy.transpose(spc, (0, 2, 1))
190 190 self.data_spc = spc
191 191
192 192 if self.processingHeaderObj.flag_cspc:
193 cspc = numpy.transpose( cspc, (0,2,1) )
194 self.data_cspc = cspc['real'] + cspc['imag']*1j
193 cspc = numpy.transpose(cspc, (0, 2, 1))
194 self.data_cspc = cspc['real'] + cspc['imag'] * 1j
195 195 else:
196 196 self.data_cspc = None
197 197
198 198 if self.processingHeaderObj.flag_dc:
199 self.data_dc = dc['real'] + dc['imag']*1j
199 self.data_dc = dc['real'] + dc['imag'] * 1j
200 200 else:
201 201 self.data_dc = None
202 202
@@ -219,12 +219,12 class SpectraReader(JRODataReader, ProcessingUnit):
219 219 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
220 220 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
221 221 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
222 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
222 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights * self.processingHeaderObj.deltaHeight
223 223 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
224 224 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
225 self.dataOut.flagShiftFFT = True #Data is always shifted
226 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
227 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip
225 self.dataOut.flagShiftFFT = True # Data is always shifted
226 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode # asumo q la data no esta decodificada
227 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip # asumo q la data esta sin flip
228 228
229 229 def getData(self):
230 230 """
@@ -253,11 +253,11 class SpectraReader(JRODataReader, ProcessingUnit):
253 253
254 254 if self.__hasNotDataInBuffer():
255 255
256 if not( self.readNextBlock() ):
256 if not(self.readNextBlock()):
257 257 self.dataOut.flagNoData = True
258 258 return 0
259 259
260 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
260 # data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
261 261
262 262 if self.data_spc is None:
263 263 self.dataOut.flagNoData = True
@@ -356,20 +356,20 class SpectraWriter(JRODataWriter, Operation):
356 356 Return: None
357 357 """
358 358
359 spc = numpy.transpose( self.data_spc, (0,2,1) )
359 spc = numpy.transpose(self.data_spc, (0, 2, 1))
360 360 if not self.processingHeaderObj.shif_fft:
361 spc = numpy.roll( spc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
361 spc = numpy.roll(spc, int(self.processingHeaderObj.profilesPerBlock / 2), axis=2) # desplaza a la derecha en el eje 2 determinadas posiciones
362 362 data = spc.reshape((-1))
363 363 data = data.astype(self.dtype[0])
364 364 data.tofile(self.fp)
365 365
366 366 if self.data_cspc is not None:
367 367
368 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
369 data = numpy.zeros( numpy.shape(cspc), self.dtype )
370 #print 'data.shape', self.shape_cspc_Buffer
368 cspc = numpy.transpose(self.data_cspc, (0, 2, 1))
369 data = numpy.zeros(numpy.shape(cspc), self.dtype)
370 # print 'data.shape', self.shape_cspc_Buffer
371 371 if not self.processingHeaderObj.shif_fft:
372 cspc = numpy.roll( cspc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
372 cspc = numpy.roll(cspc, int(self.processingHeaderObj.profilesPerBlock / 2), axis=2) # desplaza a la derecha en el eje 2 determinadas posiciones
373 373 data['real'] = cspc.real
374 374 data['imag'] = cspc.imag
375 375 data = data.reshape((-1))
@@ -378,7 +378,7 class SpectraWriter(JRODataWriter, Operation):
378 378 if self.data_dc is not None:
379 379
380 380 dc = self.data_dc
381 data = numpy.zeros( numpy.shape(dc), self.dtype )
381 data = numpy.zeros(numpy.shape(dc), self.dtype)
382 382 data['real'] = dc.real
383 383 data['imag'] = dc.imag
384 384 data = data.reshape((-1))
@@ -453,15 +453,15 class SpectraWriter(JRODataWriter, Operation):
453 453 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
454 454
455 455 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
456 blocksize = (pts2write_SelfSpectra*dtype_width)
456 blocksize = (pts2write_SelfSpectra * dtype_width)
457 457
458 458 if self.dataOut.data_cspc is not None:
459 459 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
460 blocksize += (pts2write_CrossSpectra*dtype_width*2)
460 blocksize += (pts2write_CrossSpectra * dtype_width * 2)
461 461
462 462 if self.dataOut.data_dc is not None:
463 463 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
464 blocksize += (pts2write_DCchannels*dtype_width*2)
464 blocksize += (pts2write_DCchannels * dtype_width * 2)
465 465
466 466 # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
467 467
@@ -485,12 +485,12 class SpectraWriter(JRODataWriter, Operation):
485 485 self.systemHeaderObj.nChannels = self.dataOut.nChannels
486 486 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
487 487
488 self.processingHeaderObj.dtype = 1 # Spectra
488 self.processingHeaderObj.dtype = 1 # Spectra
489 489 self.processingHeaderObj.blockSize = self.__getBlockSize()
490 490 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
491 491 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
492 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
493 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
492 self.processingHeaderObj.nWindows = 1 # podria ser 1 o self.dataOut.processingHeaderObj.nWindows
493 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt # Se requiere para determinar el valor de timeInterval
494 494 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
495 495 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
496 496 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
@@ -524,4 +524,4 class SpectraWriter(JRODataWriter, Operation):
524 524
525 525 self.processingHeaderObj.processFlags = self.getProcessFlags()
526 526
527 self.setBasicHeader() No newline at end of file
527 self.setBasicHeader()
@@ -46,7 +46,7 class USRPReader(ProcessingUnit):
46 46
47 47 def __getCurrentSecond(self):
48 48
49 return self.__thisUnixSample/self.__sample_rate
49 return self.__thisUnixSample / self.__sample_rate
50 50
51 51 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
52 52
@@ -54,9 +54,9 class USRPReader(ProcessingUnit):
54 54 '''
55 55 In this method will be initialized every parameter of dataOut object (header, no data)
56 56 '''
57 ippSeconds = 1.0*self.__nSamples/self.__sample_rate
57 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
58 58
59 nProfiles = 1.0/ippSeconds #Number of profiles in one second
59 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
60 60
61 61 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
62 62 txA=0,
@@ -67,7 +67,7 class USRPReader(ProcessingUnit):
67 67 deltaHeight=self.__deltaHeigth,
68 68 codeType=self.__codeType,
69 69 nCode=self.__nCode, nBaud=self.__nBaud,
70 code = self.__code)
70 code=self.__code)
71 71
72 72 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
73 73 nProfiles=nProfiles,
@@ -78,7 +78,7 class USRPReader(ProcessingUnit):
78 78
79 79 self.dataOut.data = None
80 80
81 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
81 self.dataOut.dtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
82 82
83 83 # self.dataOut.nChannels = 0
84 84
@@ -86,7 +86,7 class USRPReader(ProcessingUnit):
86 86
87 87 self.dataOut.nProfiles = nProfiles
88 88
89 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
89 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype=numpy.float) * self.__deltaHeigth
90 90
91 91 self.dataOut.channelList = self.__channelList
92 92
@@ -96,12 +96,12 class USRPReader(ProcessingUnit):
96 96
97 97 self.dataOut.flagNoData = True
98 98
99 #Set to TRUE if the data is discontinuous
99 # Set to TRUE if the data is discontinuous
100 100 self.dataOut.flagDiscontinuousBlock = False
101 101
102 102 self.dataOut.utctime = None
103 103
104 self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
104 self.dataOut.timeZone = self.__timezone / 60 # timezone like jroheader, difference in minutes between UTC and localtime
105 105
106 106 self.dataOut.dstFlag = 0
107 107
@@ -109,16 +109,16 class USRPReader(ProcessingUnit):
109 109
110 110 self.dataOut.nCohInt = 1
111 111
112 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
112 self.dataOut.flagDecodeData = False # asumo que la data esta decodificada
113 113
114 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
114 self.dataOut.flagDeflipData = False # asumo que la data esta sin flip
115 115
116 116 self.dataOut.flagShiftFFT = False
117 117
118 118 self.dataOut.ippSeconds = ippSeconds
119 119
120 #Time interval between profiles
121 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
120 # Time interval between profiles
121 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
122 122
123 123 self.dataOut.frequency = self.__frequency
124 124
@@ -150,7 +150,7 class USRPReader(ProcessingUnit):
150 150 except:
151 151 timezone = 0
152 152
153 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
153 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0]) / sample_rate - timezone
154 154
155 155 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
156 156 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
@@ -165,7 +165,7 class USRPReader(ProcessingUnit):
165 165
166 166 thisDatetime = startDatetime
167 167
168 while(thisDatetime<=endDatatime):
168 while(thisDatetime <= endDatatime):
169 169
170 170 thisDate = thisDatetime.date()
171 171
@@ -180,17 +180,17 class USRPReader(ProcessingUnit):
180 180
181 181 return dateList
182 182
183 def setup(self, path = None,
184 startDate = None,
185 endDate = None,
186 startTime = datetime.time(0,0,0),
187 endTime = datetime.time(23,59,59),
188 channelList = None,
189 nSamples = None,
190 ippKm = 60,
191 online = False,
192 delay = 60,
193 buffer_size = 1024,
183 def setup(self, path=None,
184 startDate=None,
185 endDate=None,
186 startTime=datetime.time(0, 0, 0),
187 endTime=datetime.time(23, 59, 59),
188 channelList=None,
189 nSamples=None,
190 ippKm=60,
191 online=False,
192 delay=60,
193 buffer_size=1024,
194 194 **kwargs):
195 195 '''
196 196 In this method we should set all initial parameters.
@@ -209,7 +209,7 class USRPReader(ProcessingUnit):
209 209 '''
210 210
211 211 if not os.path.isdir(path):
212 raise ValueError("[Reading] Directory %s does not exist" %path)
212 raise ValueError("[Reading] Directory %s does not exist" % path)
213 213
214 214 try:
215 215 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
@@ -219,7 +219,7 class USRPReader(ProcessingUnit):
219 219 channelNameList = self.digitalReadObj.get_channels()
220 220
221 221 if not channelNameList:
222 raise ValueError("[Reading] Directory %s does not have any files" %path)
222 raise ValueError("[Reading] Directory %s does not have any files" % path)
223 223
224 224 if not channelList:
225 225 channelList = list(range(len(channelNameList)))
@@ -230,7 +230,7 class USRPReader(ProcessingUnit):
230 230
231 231 self.__sample_rate = metadata_dict['sample_rate'][0]
232 232 # self.__samples_per_file = metadata_dict['samples_per_file'][0]
233 self.__deltaHeigth = 1e6*0.15/self.__sample_rate
233 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate
234 234
235 235 this_metadata_file = self.digitalReadObj.get_metadata(channelNameList[channelList[0]])
236 236
@@ -266,8 +266,8 class USRPReader(ProcessingUnit):
266 266
267 267 if not ippKm:
268 268 try:
269 #seconds to km
270 ippKm = 1e6*0.15*this_metadata_file['ipp'].value
269 # seconds to km
270 ippKm = 1e6 * 0.15 * this_metadata_file['ipp'].value
271 271 except:
272 272 ippKm = None
273 273
@@ -277,37 +277,37 class USRPReader(ProcessingUnit):
277 277
278 278 if startDate:
279 279 startDatetime = datetime.datetime.combine(startDate, startTime)
280 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
280 startUTCSecond = (startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
281 281
282 282 if endDate:
283 283 endDatetime = datetime.datetime.combine(endDate, endTime)
284 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
284 endUTCSecond = (endDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
285 285
286 286 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
287 287
288 288 if not startUTCSecond:
289 startUTCSecond = start_index/self.__sample_rate
289 startUTCSecond = start_index / self.__sample_rate
290 290
291 if start_index > startUTCSecond*self.__sample_rate:
292 startUTCSecond = start_index/self.__sample_rate
291 if start_index > startUTCSecond * self.__sample_rate:
292 startUTCSecond = start_index / self.__sample_rate
293 293
294 294 if not endUTCSecond:
295 endUTCSecond = end_index/self.__sample_rate
295 endUTCSecond = end_index / self.__sample_rate
296 296
297 if end_index < endUTCSecond*self.__sample_rate:
298 endUTCSecond = end_index/self.__sample_rate
297 if end_index < endUTCSecond * self.__sample_rate:
298 endUTCSecond = end_index / self.__sample_rate
299 299
300 300 if not nSamples:
301 301 if not ippKm:
302 302 raise ValueError("[Reading] nSamples or ippKm should be defined")
303 303
304 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
304 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
305 305
306 306 channelBoundList = []
307 307 channelNameListFiltered = []
308 308
309 309 for thisIndexChannel in channelList:
310 thisChannelName = channelNameList[thisIndexChannel]
310 thisChannelName = channelNameList[thisIndexChannel]
311 311 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
312 312 channelBoundList.append((start_index, end_index))
313 313 channelNameListFiltered.append(thisChannelName)
@@ -327,31 +327,31 class USRPReader(ProcessingUnit):
327 327 self.__channelNameList = channelNameListFiltered
328 328 self.__channelBoundList = channelBoundList
329 329 self.__nSamples = nSamples
330 self.__samples_to_read = int(buffer_size*nSamples)
330 self.__samples_to_read = int(buffer_size * nSamples)
331 331 self.__nChannels = len(self.__channelList)
332 332
333 333 self.__startUTCSecond = startUTCSecond
334 334 self.__endUTCSecond = endUTCSecond
335 335
336 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval
336 self.__timeInterval = 1.0 * self.__samples_to_read / self.__sample_rate # Time interval
337 337
338 338 if online:
339 339 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
340 340 startUTCSecond = numpy.floor(endUTCSecond)
341 341
342 self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read
342 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
343 343
344 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex)
344 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype=numpy.complex)
345 345
346 346 self.__setFileHeader()
347 347 self.isConfig = True
348 348
349 print("[Reading] USRP Data was found from %s to %s " %(
349 print("[Reading] USRP Data was found from %s to %s " % (
350 350 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
351 351 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
352 352 ))
353 353
354 print("[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
354 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
355 355 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
356 356 ))
357 357
@@ -375,13 +375,13 class USRPReader(ProcessingUnit):
375 375
376 376 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
377 377
378 if start_index > self.__startUTCSecond*self.__sample_rate:
379 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
378 if start_index > self.__startUTCSecond * self.__sample_rate:
379 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
380 380
381 if end_index > self.__endUTCSecond*self.__sample_rate:
382 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
381 if end_index > self.__endUTCSecond * self.__sample_rate:
382 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
383 383 print()
384 print("[Reading] New timerange found [%s, %s] " %(
384 print("[Reading] New timerange found [%s, %s] " % (
385 385 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
386 386 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
387 387 ))
@@ -390,21 +390,21 class USRPReader(ProcessingUnit):
390 390
391 391 return False
392 392
393 def __readNextBlock(self, seconds=30, volt_scale = 218776):
393 def __readNextBlock(self, seconds=30, volt_scale=218776):
394 394 '''
395 395 '''
396 396
397 #Set the next data
397 # Set the next data
398 398 self.__flagDiscontinuousBlock = False
399 399 self.__thisUnixSample += self.__samples_to_read
400 400
401 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
401 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
402 402 print("[Reading] There are no more data into selected time-range")
403 403
404 404 self.__reload()
405 405
406 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
407 self.__thisUnixSample -= self.__samples_to_read
406 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
407 self.__thisUnixSample -= self.__samples_to_read
408 408 return False
409 409
410 410 indexChannel = 0
@@ -419,30 +419,30 class USRPReader(ProcessingUnit):
419 419 thisChannelName)
420 420
421 421 except IOError as e:
422 #read next profile
422 # read next profile
423 423 self.__flagDiscontinuousBlock = True
424 print("[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
424 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
425 425 break
426 426
427 427 if result.shape[0] != self.__samples_to_read:
428 428 self.__flagDiscontinuousBlock = True
429 print("[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
429 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
430 430 result.shape[0],
431 431 self.__samples_to_read))
432 432 break
433 433
434 self.__data_buffer[indexChannel,:] = result*volt_scale
434 self.__data_buffer[indexChannel, :] = result * volt_scale
435 435
436 436 indexChannel += 1
437 437
438 438 dataOk = True
439 439
440 self.__utctime = self.__thisUnixSample/self.__sample_rate
440 self.__utctime = self.__thisUnixSample / self.__sample_rate
441 441
442 442 if not dataOk:
443 443 return False
444 444
445 print("[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
445 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
446 446 self.__samples_to_read,
447 447 self.__timeInterval))
448 448
@@ -486,7 +486,7 class USRPReader(ProcessingUnit):
486 486 if self.__readNextBlock():
487 487 break
488 488
489 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
489 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
490 490 return False
491 491
492 492 if self.__flagDiscontinuousBlock:
@@ -500,11 +500,11 class USRPReader(ProcessingUnit):
500 500 if err_counter > nTries:
501 501 return False
502 502
503 print('[Reading] waiting %d seconds to read a new block' %seconds)
503 print('[Reading] waiting %d seconds to read a new block' % seconds)
504 504 sleep(seconds)
505 505
506 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
507 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
506 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
507 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
508 508 self.dataOut.flagNoData = False
509 509 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
510 510 self.dataOut.profileIndex = self.profileIndex
@@ -599,4 +599,4 if __name__ == '__main__':
599 599 while True:
600 600 readObj.run(path='/Volumes/DATA/haystack/passive_radar/')
601 601 # readObj.printInfo()
602 readObj.printNumberOfBlock() No newline at end of file
602 readObj.printNumberOfBlock()
@@ -22,7 +22,7 class USRPReaderAPI(USRPReader, threading.Thread):
22 22
23 23 # __isBufferEmpty = True
24 24
25 __DATAKEYLIST = ['data','utctime','flagNoData']
25 __DATAKEYLIST = ['data', 'utctime', 'flagNoData']
26 26
27 27 def __init__(self, serializer='msgpack'):
28 28
@@ -71,7 +71,7 class USRPReaderAPI(USRPReader, threading.Thread):
71 71 return None
72 72
73 73 myMetadataSerial = obj2Serial(self.dataOut,
74 serializer = self.__serializer)
74 serializer=self.__serializer)
75 75
76 76 return myMetadataSerial
77 77
@@ -125,8 +125,8 class USRPReaderAPI(USRPReader, threading.Thread):
125 125 print(".", end=' ')
126 126
127 127 self.__mySerial = obj2Serial(self.dataOut,
128 keyList = self.__DATAKEYLIST,
129 serializer = self.__serializer)
128 keyList=self.__DATAKEYLIST,
129 serializer=self.__serializer)
130 130 self.__isBufferEmpty = False
131 131
132 132 # print self.profileIndex
@@ -136,4 +136,4 class USRPReaderAPI(USRPReader, threading.Thread):
136 136
137 137 print("Closing thread")
138 138
139 return No newline at end of file
139 return
@@ -81,7 +81,7 class VoltageReader(JRODataReader, ProcessingUnit):
81 81 self.radarControllerHeaderObj = RadarControllerHeader()
82 82 self.processingHeaderObj = ProcessingHeader()
83 83 self.lastUTTime = 0
84 self.profileIndex = 2**32 - 1
84 self.profileIndex = 2 ** 32 - 1
85 85 self.dataOut = Voltage()
86 86 self.selBlocksize = None
87 87 self.selBlocktime = None
@@ -251,7 +251,7 class VoltageReader(JRODataReader, ProcessingUnit):
251 251
252 252 self.firstHeaderSize = self.basicHeaderObj.size
253 253
254 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
254 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
255 255 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
256 256 if datatype == 0:
257 257 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
@@ -269,7 +269,7 class VoltageReader(JRODataReader, ProcessingUnit):
269 269 raise ValueError('Data type was not defined')
270 270
271 271 self.dtype = datatype_str
272 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
272 # self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
273 273 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
274 274 self.firstHeaderSize + self.basicHeaderSize * \
275 275 (self.processingHeaderObj.dataBlocksPerFile - 1)
@@ -448,7 +448,7 class VoltageReader(JRODataReader, ProcessingUnit):
448 448 blockIndex = self.selBlocksize - datasize
449 449 datablock1 = self.datablock[:, :blockIndex, :]
450 450
451 buffer[:, datasize:datasize +
451 buffer[:, datasize:datasize +
452 452 datablock1.shape[1], :] = datablock1
453 453 datasize += datablock1.shape[1]
454 454
@@ -480,7 +480,7 class VoltageWriter(JRODataWriter, Operation):
480 480
481 481 shapeBuffer = None
482 482
483 def __init__(self):#, **kwargs):
483 def __init__(self): # , **kwargs):
484 484 """
485 485 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
486 486
@@ -489,7 +489,7 class VoltageWriter(JRODataWriter, Operation):
489 489
490 490 Return: None
491 491 """
492 Operation.__init__(self)#, **kwargs)
492 Operation.__init__(self) # , **kwargs)
493 493
494 494 self.nTotalBlocks = 0
495 495
@@ -624,7 +624,7 class VoltageWriter(JRODataWriter, Operation):
624 624
625 625 dtype_width = self.getDtypeWidth()
626 626
627 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
627 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
628 628 self.profilesPerBlock * dtype_width * 2)
629 629
630 630 return blocksize
@@ -673,4 +673,4 class VoltageWriter(JRODataWriter, Operation):
673 673 self.processingHeaderObj.processFlags = self.getProcessFlags()
674 674
675 675 self.setBasicHeader()
676 No newline at end of file
676
@@ -25,7 +25,7 FILE_HEADER_STRUCTURE = numpy.dtype([
25 25 ('navg', 'f'),
26 26 ('fh', 'f'),
27 27 ('dh', 'f'),
28 ('nheights', 'f'),
28 ('nheights', 'f'),
29 29 ('ipp', 'f')
30 30 ])
31 31
@@ -82,7 +82,7 class JULIAParamReader(JRODataReader, ProcessingUnit):
82 82 path=None,
83 83 startDate=None,
84 84 endDate=None,
85 ext=None,
85 ext=None,
86 86 startTime=datetime.time(0, 0, 0),
87 87 endTime=datetime.time(23, 59, 59),
88 88 timezone=0,
@@ -146,7 +146,7 class JULIAParamReader(JRODataReader, ProcessingUnit):
146 146 continue
147 147
148 148 year, month, day = int(year), int(month), int(day)
149 dateFile = datetime.date(year+2000, month, day)
149 dateFile = datetime.date(year + 2000, month, day)
150 150
151 151 if (startDate > dateFile) or (endDate < dateFile):
152 152 continue
@@ -259,7 +259,7 class JULIAParamReader(JRODataReader, ProcessingUnit):
259 259 nheights = int(self.header_rec['nheights'])
260 260 hours = float(self.header_rec['hours'][0])
261 261 heights = numpy.arange(nheights) * self.dH + self.header_rec['h0']
262 datatime = datetime.datetime(self.year, 1, 1) + datetime.timedelta(days=self.doy-1, hours=hours)
262 datatime = datetime.datetime(self.year, 1, 1) + datetime.timedelta(days=self.doy - 1, hours=hours)
263 263 return heights, datatime
264 264 else:
265 265 return False
@@ -269,30 +269,30 class JULIAParamReader(JRODataReader, ProcessingUnit):
269 269 Parse data
270 270 '''
271 271
272 buffer = numpy.fromfile(self.fp, 'f', 8*N).reshape(N, 8)
272 buffer = numpy.fromfile(self.fp, 'f', 8 * N).reshape(N, 8)
273 273
274 274 pow0 = buffer[:, 0]
275 275 pow1 = buffer[:, 1]
276 acf0 = (buffer[:,2] + buffer[:,3]*1j) / pow0
277 acf1 = (buffer[:,4] + buffer[:,5]*1j) / pow1
278 dccf = (buffer[:,6] + buffer[:,7]*1j) / (pow0*pow1)
276 acf0 = (buffer[:, 2] + buffer[:, 3] * 1j) / pow0
277 acf1 = (buffer[:, 4] + buffer[:, 5] * 1j) / pow1
278 dccf = (buffer[:, 6] + buffer[:, 7] * 1j) / (pow0 * pow1)
279 279
280 ### SNR
280 # ## SNR
281 281 sno = (pow0 + pow1 - self.header_rec['snr']) / self.header_rec['snr']
282 282 sno10 = numpy.log10(sno)
283 283 # dsno = 1.0 / numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * (1 + (1 / sno))
284 284
285 ### Vertical Drift
286 sp = numpy.sqrt(numpy.abs(acf0)*numpy.abs(acf1))
285 # ## Vertical Drift
286 sp = numpy.sqrt(numpy.abs(acf0) * numpy.abs(acf1))
287 287 sp[numpy.where(numpy.abs(sp) >= 1.0)] = numpy.sqrt(0.9999)
288 288
289 vzo = -numpy.arctan2(acf0.imag + acf1.imag,acf0.real + acf1.real)*1.5E5*1.5/(self.ipp*numpy.pi)
290 dvzo = numpy.sqrt(1.0 - sp*sp)*0.338*1.5E5/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*sp*self.ipp)
289 vzo = -numpy.arctan2(acf0.imag + acf1.imag, acf0.real + acf1.real) * 1.5E5 * 1.5 / (self.ipp * numpy.pi)
290 dvzo = numpy.sqrt(1.0 - sp * sp) * 0.338 * 1.5E5 / (numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * sp * self.ipp)
291 291 err = numpy.where(dvzo <= 0.1)
292 292 dvzo[err] = 0.1
293 293
294 #Zonal Drifts
295 dt = self.header_file['nint']*self.ipp / 1.5E5
294 # Zonal Drifts
295 dt = self.header_file['nint'] * self.ipp / 1.5E5
296 296 coh = numpy.sqrt(numpy.abs(dccf))
297 297 err = numpy.where(coh >= 1.0)
298 298 coh[err] = numpy.sqrt(0.99999)
@@ -300,8 +300,8 class JULIAParamReader(JRODataReader, ProcessingUnit):
300 300 err = numpy.where(coh <= 0.1)
301 301 coh[err] = numpy.sqrt(0.1)
302 302
303 vxo = numpy.arctan2(dccf.imag, dccf.real)*self.header_rec['h0']*1.0E3/(self.kd*dt)
304 dvxo = numpy.sqrt(1.0 - coh*coh)*self.header_rec['h0']*1.0E3/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*coh*self.kd*dt)
303 vxo = numpy.arctan2(dccf.imag, dccf.real) * self.header_rec['h0'] * 1.0E3 / (self.kd * dt)
304 dvxo = numpy.sqrt(1.0 - coh * coh) * self.header_rec['h0'] * 1.0E3 / (numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * coh * self.kd * dt)
305 305
306 306 err = numpy.where(dvxo <= 0.1)
307 307 dvxo[err] = 0.1
@@ -315,7 +315,7 class JULIAParamReader(JRODataReader, ProcessingUnit):
315 315
316 316 self.dataOut.data_snr = self.buffer[4].reshape(1, -1)
317 317 self.dataOut.heightList = self.heights
318 self.dataOut.data_param = self.buffer[0:4,]
318 self.dataOut.data_param = self.buffer[0:4, ]
319 319 self.dataOut.utctimeInit = self.time
320 320 self.dataOut.utctime = self.time
321 321 self.dataOut.useLocalTime = True
@@ -340,4 +340,4 class JULIAParamReader(JRODataReader, ProcessingUnit):
340 340
341 341 self.set_output()
342 342
343 return 1 No newline at end of file
343 return 1
@@ -59,7 +59,7 class PXReader(JRODataReader, ProcessingUnit):
59 59 self.endDate = endDate
60 60 self.startTime = startTime
61 61 self.endTime = endTime
62 self.datatime = datetime.datetime(1900,1,1)
62 self.datatime = datetime.datetime(1900, 1, 1)
63 63 self.walk = walk
64 64 self.nTries = kwargs.get('nTries', 10)
65 65 self.online = kwargs.get('online', False)
@@ -65,7 +65,7 class BLTRParametersProc(ProcessingUnit):
65 65 self.dataOut.heightList = self.dataOut.height[0]
66 66 self.dataOut.data_snr = self.dataOut.data_snr[mode]
67 67 SNRavg = numpy.average(self.dataOut.data_snr, axis=0)
68 SNRavgdB = 10*numpy.log10(SNRavg)
68 SNRavgdB = 10 * numpy.log10(SNRavg)
69 69 self.dataOut.data_snr_avg_db = SNRavgdB.reshape(1, *SNRavgdB.shape)
70 70
71 71 # Censoring Data
@@ -114,29 +114,29 class OutliersFilter(Operation):
114 114 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
115 115 minvalid = npoints
116 116
117 #only if valid values greater than the minimum required (10%)
117 # only if valid values greater than the minimum required (10%)
118 118 if nvalues_valid > minvalid:
119 119
120 120 if method == 0:
121 #SMOOTH
121 # SMOOTH
122 122 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
123 123
124 124
125 125 if method == 1:
126 #MEDIAN
127 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
126 # MEDIAN
127 w = value_temp[h] - self.Median(input=value_temp[h], width=npoints)
128 128
129 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
129 dw = numpy.std(w[numpy.where(numpy.isfinite(w))], ddof=1)
130 130
131 threshold = dw*factor
132 value_temp[numpy.where(w > threshold),h] = numpy.nan
133 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
131 threshold = dw * factor
132 value_temp[numpy.where(w > threshold), h] = numpy.nan
133 value_temp[numpy.where(w < -1 * threshold), h] = numpy.nan
134 134
135 135
136 #At the end
136 # At the end
137 137 if svalue2 == 'inHeight':
138 138 value_temp = numpy.transpose(value_temp)
139 output_array[:,m] = value_temp
139 output_array[:, m] = value_temp
140 140
141 141 if svalue == 'zonal':
142 142 self.dataOut.data_output[0] = output_array
@@ -150,7 +150,7 class OutliersFilter(Operation):
150 150 return self.dataOut.data_output
151 151
152 152
153 def Median(self,input,width):
153 def Median(self, input, width):
154 154 '''
155 155 Inputs:
156 156 input - Velocity array
@@ -158,26 +158,26 class OutliersFilter(Operation):
158 158
159 159 '''
160 160
161 if numpy.mod(width,2) == 1:
161 if numpy.mod(width, 2) == 1:
162 162 pc = int((width - 1) / 2)
163 163 cont = 0
164 164 output = []
165 165
166 166 for i in range(len(input)):
167 167 if i >= pc and i < len(input) - pc:
168 new2 = input[i-pc:i+pc+1]
168 new2 = input[i - pc:i + pc + 1]
169 169 temp = numpy.where(numpy.isfinite(new2))
170 170 new = new2[temp]
171 171 value = numpy.median(new)
172 172 output.append(value)
173 173
174 174 output = numpy.array(output)
175 output = numpy.hstack((input[0:pc],output))
176 output = numpy.hstack((output,input[-pc:len(input)]))
175 output = numpy.hstack((input[0:pc], output))
176 output = numpy.hstack((output, input[-pc:len(input)]))
177 177
178 178 return output
179 179
180 def Smooth(self,input,width,edge_truncate = None):
180 def Smooth(self, input, width, edge_truncate=None):
181 181 '''
182 182 Inputs:
183 183 input - Velocity array
@@ -186,17 +186,17 class OutliersFilter(Operation):
186 186
187 187 '''
188 188
189 if numpy.mod(width,2) == 0:
189 if numpy.mod(width, 2) == 0:
190 190 real_width = width + 1
191 191 nzeros = width / 2
192 192 else:
193 193 real_width = width
194 194 nzeros = (width - 1) / 2
195 195
196 half_width = int(real_width)/2
196 half_width = int(real_width) / 2
197 197 length = len(input)
198 198
199 gate = numpy.ones(real_width,dtype='float')
199 gate = numpy.ones(real_width, dtype='float')
200 200 norm_of_gate = numpy.sum(gate)
201 201
202 202 nan_process = 0
@@ -208,21 +208,21 class OutliersFilter(Operation):
208 208 input[nan_id] = 0.
209 209
210 210 if edge_truncate == True:
211 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
211 output = numpy.convolve(input / norm_of_gate, gate, mode='same')
212 212 elif edge_truncate == False or edge_truncate == None:
213 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
214 output = numpy.hstack((input[0:half_width],output))
215 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
213 output = numpy.convolve(input / norm_of_gate, gate, mode='valid')
214 output = numpy.hstack((input[0:half_width], output))
215 output = numpy.hstack((output, input[len(input) - half_width:len(input)]))
216 216
217 217 if nan_process:
218 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
219 pb = numpy.hstack((numpy.zeros(half_width),pb))
220 pb = numpy.hstack((pb,numpy.zeros(half_width)))
218 pb = numpy.convolve(pb / norm_of_gate, gate, mode='valid')
219 pb = numpy.hstack((numpy.zeros(half_width), pb))
220 pb = numpy.hstack((pb, numpy.zeros(half_width)))
221 221 output[numpy.where(pb > 0.9999)] = numpy.nan
222 222 input[nan_id] = numpy.nan
223 223 return output
224 224
225 def Average(self,aver=0,nhaver=1):
225 def Average(self, aver=0, nhaver=1):
226 226 '''
227 227 Inputs:
228 228 aver - Indicates the time period over which is averaged or consensus data
@@ -235,27 +235,27 class OutliersFilter(Operation):
235 235 lat_huancayo = -12.04
236 236 lat_porcuya = -5.8
237 237
238 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
238 if '%2.2f' % self.dataOut.lat == '%2.2f' % lat_piura:
239 239 hcm = 3.
240 240 if self.dataOut.year == 2003 :
241 241 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
242 242 nhpoints = 12
243 243
244 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
244 elif '%2.2f' % self.dataOut.lat == '%2.2f' % lat_huancayo:
245 245 hcm = 3.
246 246 if self.dataOut.year == 2003 :
247 247 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
248 248 nhpoints = 12
249 249
250 250
251 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
252 hcm = 5.#2
251 elif '%2.2f' % self.dataOut.lat == '%2.2f' % lat_porcuya:
252 hcm = 5. # 2
253 253
254 254 pdata = 0.2
255 taver = [1,2,3,4,6,8,12,24]
255 taver = [1, 2, 3, 4, 6, 8, 12, 24]
256 256 t0 = 0
257 257 tf = 24
258 ntime =(tf-t0)/taver[aver]
258 ntime = (tf - t0) / taver[aver]
259 259 ti = numpy.arange(ntime)
260 260 tf = numpy.arange(ntime) + taver[aver]
261 261
@@ -263,11 +263,11 class OutliersFilter(Operation):
263 263 old_height = self.dataOut.heightList
264 264
265 265 if nhaver > 1:
266 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
267 deltha = 0.05*nhaver
268 minhvalid = pdata*nhaver
266 num_hei = len(self.dataOut.heightList) / nhaver / self.dataOut.nmodes
267 deltha = 0.05 * nhaver
268 minhvalid = pdata * nhaver
269 269 for im in range(self.dataOut.nmodes):
270 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
270 new_height = numpy.arange(num_hei) * deltha + self.dataOut.height[im, 0] + deltha / 2.
271 271
272 272
273 273 data_fHeigths_List = []
@@ -280,8 +280,8 class OutliersFilter(Operation):
280 280 for i in range(ntime):
281 281 height = old_height
282 282
283 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
284 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
283 start = datetime.datetime(self.dataOut.year, self.dataOut.month, self.dataOut.day) + datetime.timedelta(hours=int(ti[i])) - datetime.timedelta(hours=5)
284 stop = datetime.datetime(self.dataOut.year, self.dataOut.month, self.dataOut.day) + datetime.timedelta(hours=int(tf[i])) - datetime.timedelta(hours=5)
285 285
286 286
287 287 limit_sec1 = time.mktime(start.timetuple())
@@ -295,17 +295,17 class OutliersFilter(Operation):
295 295 time_select.append(val_sec)
296 296
297 297
298 time_select = numpy.array(time_select,dtype = 'int')
299 minvalid = numpy.ceil(pdata*nhpoints)
298 time_select = numpy.array(time_select, dtype='int')
299 minvalid = numpy.ceil(pdata * nhpoints)
300 300
301 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
302 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
303 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
301 zon_aver = numpy.zeros([self.dataOut.nranges, self.dataOut.nmodes], dtype='f4') + numpy.nan
302 mer_aver = numpy.zeros([self.dataOut.nranges, self.dataOut.nmodes], dtype='f4') + numpy.nan
303 ver_aver = numpy.zeros([self.dataOut.nranges, self.dataOut.nmodes], dtype='f4') + numpy.nan
304 304
305 305 if nhaver > 1:
306 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
307 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
308 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
306 new_zon_aver = numpy.zeros([num_hei, self.dataOut.nmodes], dtype='f4') + numpy.nan
307 new_mer_aver = numpy.zeros([num_hei, self.dataOut.nmodes], dtype='f4') + numpy.nan
308 new_ver_aver = numpy.zeros([num_hei, self.dataOut.nmodes], dtype='f4') + numpy.nan
309 309
310 310 if len(time_select) > minvalid:
311 311 time_average = self.f_timesec[time_select]
@@ -313,27 +313,27 class OutliersFilter(Operation):
313 313 for im in range(self.dataOut.nmodes):
314 314
315 315 for ih in range(self.dataOut.nranges):
316 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
317 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
316 if numpy.sum(numpy.isfinite(self.f_zon[time_select, ih, im])) >= minvalid:
317 zon_aver[ih, im] = numpy.nansum(self.f_zon[time_select, ih, im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select, ih, im]))
318 318
319 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
320 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
319 if numpy.sum(numpy.isfinite(self.f_mer[time_select, ih, im])) >= minvalid:
320 mer_aver[ih, im] = numpy.nansum(self.f_mer[time_select, ih, im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select, ih, im]))
321 321
322 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
323 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
322 if numpy.sum(numpy.isfinite(self.f_ver[time_select, ih, im])) >= minvalid:
323 ver_aver[ih, im] = numpy.nansum(self.f_ver[time_select, ih, im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select, ih, im]))
324 324
325 325 if nhaver > 1:
326 326 for ih in range(num_hei):
327 hvalid = numpy.arange(nhaver) + nhaver*ih
327 hvalid = numpy.arange(nhaver) + nhaver * ih
328 328
329 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
330 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
329 if numpy.sum(numpy.isfinite(zon_aver[hvalid, im])) >= minvalid:
330 new_zon_aver[ih, im] = numpy.nansum(zon_aver[hvalid, im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid, im]))
331 331
332 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
333 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
332 if numpy.sum(numpy.isfinite(mer_aver[hvalid, im])) >= minvalid:
333 new_mer_aver[ih, im] = numpy.nansum(mer_aver[hvalid, im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid, im]))
334 334
335 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
336 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
335 if numpy.sum(numpy.isfinite(ver_aver[hvalid, im])) >= minvalid:
336 new_ver_aver[ih, im] = numpy.nansum(ver_aver[hvalid, im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid, im]))
337 337 if nhaver > 1:
338 338 zon_aver = new_zon_aver
339 339 mer_aver = new_mer_aver
@@ -352,7 +352,7 class OutliersFilter(Operation):
352 352 minute = startTime.tm_min
353 353 second = startTime.tm_sec
354 354
355 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
355 startDTList.append(datetime.datetime(year, month, day, hour, minute, second))
356 356
357 357
358 358 o_height = numpy.array([])
@@ -363,17 +363,17 class OutliersFilter(Operation):
363 363 for im in range(self.dataOut.nmodes):
364 364
365 365 if im == 0:
366 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
366 h_select = numpy.where(numpy.bitwise_and(height[0, :] >= 0, height[0, :] <= hcm, numpy.isfinite(height[0, :])))
367 367 else:
368 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
368 h_select = numpy.where(numpy.bitwise_and(height[1, :] > hcm, height[1, :] < 20, numpy.isfinite(height[1, :])))
369 369
370 370
371 371 ht = h_select[0]
372 372
373 o_height = numpy.hstack((o_height,height[im,ht]))
374 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
375 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
376 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
373 o_height = numpy.hstack((o_height, height[im, ht]))
374 o_zon_aver = numpy.hstack((o_zon_aver, zon_aver[ht, im]))
375 o_mer_aver = numpy.hstack((o_mer_aver, mer_aver[ht, im]))
376 o_ver_aver = numpy.hstack((o_ver_aver, ver_aver[ht, im]))
377 377
378 378 data_fHeigths_List.append(o_height)
379 379 data_fZonal_List.append(o_zon_aver)
@@ -382,12 +382,12 class OutliersFilter(Operation):
382 382
383 383
384 384 else:
385 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
385 h_select = numpy.where(numpy.bitwise_and(height[0, :] <= hcm, numpy.isfinite(height[0, :])))
386 386 ht = h_select[0]
387 o_height = numpy.hstack((o_height,height[im,ht]))
388 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
389 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
390 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
387 o_height = numpy.hstack((o_height, height[im, ht]))
388 o_zon_aver = numpy.hstack((o_zon_aver, zon_aver[ht, im]))
389 o_mer_aver = numpy.hstack((o_mer_aver, mer_aver[ht, im]))
390 o_ver_aver = numpy.hstack((o_ver_aver, ver_aver[ht, im]))
391 391
392 392 data_fHeigths_List.append(o_height)
393 393 data_fZonal_List.append(o_zon_aver)
@@ -24,14 +24,14 class PrintInfoAMISR(Operation):
24 24 def run(self, dataOut):
25 25
26 26 if not self.__isPrinted:
27 print('Number of Records by File: %d'%dataOut.nRecords)
28 print('Number of Pulses: %d'%dataOut.nProfiles)
29 print('Number of Pulses by Frame: %d'%dataOut.npulseByFrame)
30 print('Number of Samples by Pulse: %d'%len(dataOut.heightList))
31 print('Ipp Seconds: %f'%dataOut.ippSeconds)
32 print('Number of Beams: %d'%dataOut.nBeams)
27 print('Number of Records by File: %d' % dataOut.nRecords)
28 print('Number of Pulses: %d' % dataOut.nProfiles)
29 print('Number of Pulses by Frame: %d' % dataOut.npulseByFrame)
30 print('Number of Samples by Pulse: %d' % len(dataOut.heightList))
31 print('Ipp Seconds: %f' % dataOut.ippSeconds)
32 print('Number of Beams: %d' % dataOut.nBeams)
33 33 print('BeamCodes:')
34 beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in list(dataOut.beamCodeDict.items())]
34 beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f' % (k, v[0], v[1], v[2], v[3]) for k, v in list(dataOut.beamCodeDict.items())]
35 35 for b in beamStrList:
36 36 print(b)
37 37 self.__isPrinted = True
@@ -119,7 +119,7 class ProfileToChannels(Operation):
119 119 if not(self.__isConfig):
120 120 nchannels = len(list(dataOut.beamRangeDict.keys()))
121 121 nsamples = dataOut.nHeights
122 self.buffer = numpy.zeros((nchannels, nsamples), dtype = 'complex128')
122 self.buffer = numpy.zeros((nchannels, nsamples), dtype='complex128')
123 123 dataOut.beam.codeList = [dataOut.beamCodeDict[x][0] for x in range(nchannels)]
124 124 dataOut.beam.azimuthList = [dataOut.beamCodeDict[x][1] for x in range(nchannels)]
125 125 dataOut.beam.zenithList = [dataOut.beamCodeDict[x][2] for x in range(nchannels)]
@@ -127,7 +127,7 class ProfileToChannels(Operation):
127 127
128 128 for i in range(self.buffer.shape[0]):
129 129 if dataOut.profileIndex in dataOut.beamRangeDict[i]:
130 self.buffer[i,:] = dataOut.data
130 self.buffer[i, :] = dataOut.data
131 131 break
132 132
133 133
@@ -140,4 +140,4 class ProfileToChannels(Operation):
140 140 self.__isConfig = False
141 141 dataOut.flagNoData = False
142 142 pass
143 No newline at end of file
143
@@ -198,6 +198,6 def MPDecorator(BaseClass):
198 198 def close(self):
199 199
200 200 BaseClass.close(self)
201 log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
201 log.success('Done...(Time:{:4.2f} secs)'.format(time.time() - self.start_time), self.name)
202 202
203 203 return MPClass
@@ -30,7 +30,7 class CorrelationProc(ProcessingUnit):
30 30 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
31 31 self.dataOut.channelList = self.dataIn.channelList
32 32 self.dataOut.heightList = self.dataIn.heightList
33 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
33 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
34 34 # self.dataOut.nHeights = self.dataIn.nHeights
35 35 # self.dataOut.nChannels = self.dataIn.nChannels
36 36 self.dataOut.nBaud = self.dataIn.nBaud
@@ -39,8 +39,8 class CorrelationProc(ProcessingUnit):
39 39 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
40 40 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
41 41 self.dataOut.utctime = self.firstdatatime
42 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
43 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
42 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData # asumo q la data esta decodificada
43 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData # asumo q la data esta sin flip
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 # self.dataOut.nIncohInt = 1
46 46 self.dataOut.ippSeconds = self.dataIn.ippSeconds
@@ -56,39 +56,39 class CorrelationProc(ProcessingUnit):
56 56 nChannel = jspectra.shape[0]
57 57
58 58 for i in range(nChannel):
59 jspectra_tmp = jspectra[i,:,:]
60 jspectra_DC = numpy.mean(jspectra_tmp,axis = 0)
59 jspectra_tmp = jspectra[i, :, :]
60 jspectra_DC = numpy.mean(jspectra_tmp, axis=0)
61 61
62 62 jspectra_tmp = jspectra_tmp - jspectra_DC
63 jspectra[i,:,:] = jspectra_tmp
63 jspectra[i, :, :] = jspectra_tmp
64 64
65 65 return jspectra
66 66
67 67
68 def removeNoise(self, mode = 2):
68 def removeNoise(self, mode=2):
69 69 indR = numpy.where(self.dataOut.lagR == 0)[0][0]
70 70 indT = numpy.where(self.dataOut.lagT == 0)[0][0]
71 71
72 jspectra = self.dataOut.data_corr[:,:,indR,:]
72 jspectra = self.dataOut.data_corr[:, :, indR, :]
73 73
74 74 num_chan = jspectra.shape[0]
75 75 num_hei = jspectra.shape[2]
76 76
77 77 freq_dc = indT
78 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
78 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
79 79
80 80 NPot = self.dataOut.getNoise(mode)
81 jspectra[:,freq_dc,:] = jspectra[:,freq_dc,:] - NPot
82 SPot = jspectra[:,freq_dc,:]
81 jspectra[:, freq_dc, :] = jspectra[:, freq_dc, :] - NPot
82 SPot = jspectra[:, freq_dc, :]
83 83 pairsAutoCorr = self.dataOut.getPairsAutoCorr()
84 84 # self.dataOut.signalPotency = SPot
85 85 self.dataOut.noise = NPot
86 self.dataOut.SNR = (SPot/NPot)[pairsAutoCorr]
87 self.dataOut.data_corr[:,:,indR,:] = jspectra
86 self.dataOut.SNR = (SPot / NPot)[pairsAutoCorr]
87 self.dataOut.data_corr[:, :, indR, :] = jspectra
88 88
89 89 return 1
90 90
91 def run(self, lags=None, mode = 'time', pairsList=None, fullBuffer=False, nAvg = 1, removeDC = False, splitCF=False):
91 def run(self, lags=None, mode='time', pairsList=None, fullBuffer=False, nAvg=1, removeDC=False, splitCF=False):
92 92
93 93 self.dataOut.flagNoData = True
94 94
@@ -118,19 +118,19 class CorrelationProc(ProcessingUnit):
118 118 # acf_pairs = numpy.arange(len(ccfList),len(pairsList))
119 119 self.__updateObjFromVoltage()
120 120 #----------------------------------------------------------------------
121 #Creating temporal buffers
121 # Creating temporal buffers
122 122 if fullBuffer:
123 tmp = numpy.zeros((len(pairsList), len(lags), nProfiles, nHeights), dtype = 'complex')*numpy.nan
123 tmp = numpy.zeros((len(pairsList), len(lags), nProfiles, nHeights), dtype='complex') * numpy.nan
124 124 elif mode == 'time':
125 125 if lags == None:
126 lags = numpy.arange(-nProfiles+1, nProfiles)
127 tmp = numpy.zeros((len(pairsList), len(lags), nHeights),dtype='complex')
126 lags = numpy.arange(-nProfiles + 1, nProfiles)
127 tmp = numpy.zeros((len(pairsList), len(lags), nHeights), dtype='complex')
128 128 elif mode == 'height':
129 129 if lags == None:
130 lags = numpy.arange(-nHeights+1, nHeights)
131 tmp = numpy.zeros(len(pairsList), (len(lags), nProfiles),dtype='complex')
130 lags = numpy.arange(-nHeights + 1, nHeights)
131 tmp = numpy.zeros(len(pairsList), (len(lags), nProfiles), dtype='complex')
132 132
133 #For loop
133 # For loop
134 134 for l in range(len(pairsList)):
135 135
136 136 ch0 = pairsList[l][0]
@@ -141,23 +141,23 class CorrelationProc(ProcessingUnit):
141 141
142 142 if idx >= 0:
143 143 if mode == 'time':
144 ccf0 = data_pre[ch0,:nProfiles-idx,:]*numpy.conj(data_pre[ch1,idx:,:]) #time
144 ccf0 = data_pre[ch0, :nProfiles - idx, :] * numpy.conj(data_pre[ch1, idx:, :]) # time
145 145 else:
146 ccf0 = data_pre[ch0,:,nHeights-idx]*numpy.conj(data_pre[ch1,:,idx:]) #heights
146 ccf0 = data_pre[ch0, :, nHeights - idx] * numpy.conj(data_pre[ch1, :, idx:]) # heights
147 147 else:
148 148 if mode == 'time':
149 ccf0 = data_pre[ch0,-idx:,:]*numpy.conj(data_pre[ch1,:nProfiles+idx,:]) #time
149 ccf0 = data_pre[ch0, -idx:, :] * numpy.conj(data_pre[ch1, :nProfiles + idx, :]) # time
150 150 else:
151 ccf0 = data_pre[ch0,:,-idx:]*numpy.conj(data_pre[ch1,:,:nHeights+idx]) #heights
151 ccf0 = data_pre[ch0, :, -idx:] * numpy.conj(data_pre[ch1, :, :nHeights + idx]) # heights
152 152
153 153 if fullBuffer:
154 tmp[l,i,:ccf0.shape[0],:] = ccf0
154 tmp[l, i, :ccf0.shape[0], :] = ccf0
155 155 else:
156 tmp[l,i,:] = numpy.sum(ccf0, axis=0)
156 tmp[l, i, :] = numpy.sum(ccf0, axis=0)
157 157
158 158 #-----------------------------------------------------------------
159 159 if fullBuffer:
160 tmp = numpy.sum(numpy.reshape(tmp,(tmp.shape[0],tmp.shape[1],tmp.shape[2]/nAvg,nAvg,tmp.shape[3])),axis=3)
160 tmp = numpy.sum(numpy.reshape(tmp, (tmp.shape[0], tmp.shape[1], tmp.shape[2] / nAvg, nAvg, tmp.shape[3])), axis=3)
161 161 self.dataOut.nAvg = nAvg
162 162
163 163 self.dataOut.data_cf = tmp
@@ -166,12 +166,12 class CorrelationProc(ProcessingUnit):
166 166 self.dataOut.pairsList = pairsList
167 167 self.dataOut.nPairs = len(pairsList)
168 168
169 #Se Calcula los factores de Normalizacion
169 # Se Calcula los factores de Normalizacion
170 170 if mode == 'time':
171 delta = self.dataIn.ippSeconds*self.dataIn.nCohInt
171 delta = self.dataIn.ippSeconds * self.dataIn.nCohInt
172 172 else:
173 173 delta = self.dataIn.heightList[1] - self.dataIn.heightList[0]
174 self.dataOut.lagRange = numpy.array(lags)*delta
174 self.dataOut.lagRange = numpy.array(lags) * delta
175 175 # self.dataOut.nCohInt = self.dataIn.nCohInt*nAvg
176 176 self.dataOut.flagNoData = False
177 177 # a = self.dataOut.normFactor
@@ -8,9 +8,9 from schainpy.utils import log
8 8
9 9 class SpectraHeisProc(ProcessingUnit):
10 10
11 def __init__(self):#, **kwargs):
11 def __init__(self): # , **kwargs):
12 12
13 ProcessingUnit.__init__(self)#, **kwargs)
13 ProcessingUnit.__init__(self) # , **kwargs)
14 14
15 15 # self.buffer = None
16 16 # self.firstdatatime = None
@@ -24,12 +24,12 class SpectraHeisProc(ProcessingUnit):
24 24 self.dataOut.errorCount = self.dataIn.errorCount
25 25 self.dataOut.useLocalTime = self.dataIn.useLocalTime
26 26
27 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
28 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
27 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy() #
28 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy() #
29 29 self.dataOut.channelList = self.dataIn.channelList
30 30 self.dataOut.heightList = self.dataIn.heightList
31 31 # self.dataOut.dtype = self.dataIn.dtype
32 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
32 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
33 33 # self.dataOut.nHeights = self.dataIn.nHeights
34 34 # self.dataOut.nChannels = self.dataIn.nChannels
35 35 self.dataOut.nBaud = self.dataIn.nBaud
@@ -45,8 +45,8 class SpectraHeisProc(ProcessingUnit):
45 45 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
46 46 self.dataOut.utctime = self.dataIn.utctime
47 47 # self.dataOut.utctime = self.firstdatatime
48 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
48 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData # asumo q la data esta decodificada
49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData # asumo q la data esta sin flip
50 50 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
51 51 self.dataOut.nCohInt = self.dataIn.nCohInt
52 52 self.dataOut.nIncohInt = 1
@@ -78,8 +78,8 class SpectraHeisProc(ProcessingUnit):
78 78 def __getFft(self):
79 79
80 80 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
81 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
82 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
81 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
82 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt)) / (self.dataOut.nFFTPoints)
83 83 self.dataOut.data_spc = spc
84 84
85 85 def run(self):
@@ -102,7 +102,7 class SpectraHeisProc(ProcessingUnit):
102 102
103 103 return
104 104
105 raise ValueError("The type object %s is not valid"%(self.dataIn.type))
105 raise ValueError("The type object %s is not valid" % (self.dataIn.type))
106 106
107 107
108 108 def selectChannels(self, channelList):
@@ -136,9 +136,9 class SpectraHeisProc(ProcessingUnit):
136 136
137 137 for channelIndex in channelIndexList:
138 138 if channelIndex not in self.dataOut.channelIndexList:
139 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
139 raise ValueError("The value %d in channelIndexList is not valid" % channelIndex)
140 140
141 data_spc = self.dataOut.data_spc[channelIndexList,:]
141 data_spc = self.dataOut.data_spc[channelIndexList, :]
142 142
143 143 self.dataOut.data_spc = data_spc
144 144 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
@@ -151,7 +151,7 class IncohInt4SpectraHeis(Operation):
151 151 isConfig = False
152 152
153 153 __profIndex = 0
154 __withOverapping = False
154 __withOverapping = False
155 155
156 156 __byTime = False
157 157 __initime = None
@@ -164,9 +164,9 class IncohInt4SpectraHeis(Operation):
164 164
165 165 n = None
166 166
167 def __init__(self):#, **kwargs):
167 def __init__(self): # , **kwargs):
168 168
169 Operation.__init__(self)#, **kwargs)
169 Operation.__init__(self) # , **kwargs)
170 170 # self.isConfig = False
171 171
172 172 def setup(self, n=None, timeInterval=None, overlapping=False):
@@ -194,7 +194,7 class IncohInt4SpectraHeis(Operation):
194 194 self.n = n
195 195 self.__byTime = False
196 196 else:
197 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
197 self.__integrationtime = timeInterval # * 60. #if (type(timeInterval)!=integer) -> change this line
198 198 self.n = 9999
199 199 self.__byTime = True
200 200
@@ -219,25 +219,25 class IncohInt4SpectraHeis(Operation):
219 219 self.__profIndex += 1
220 220 return
221 221
222 #Overlapping data
222 # Overlapping data
223 223 nChannels, nHeis = data.shape
224 224 data = numpy.reshape(data, (1, nChannels, nHeis))
225 225
226 #If the buffer is empty then it takes the data value
226 # If the buffer is empty then it takes the data value
227 227 if self.__buffer is None:
228 228 self.__buffer = data
229 229 self.__profIndex += 1
230 230 return
231 231
232 #If the buffer length is lower than n then stakcing the data value
232 # If the buffer length is lower than n then stakcing the data value
233 233 if self.__profIndex < self.n:
234 234 self.__buffer = numpy.vstack((self.__buffer, data))
235 235 self.__profIndex += 1
236 236 return
237 237
238 #If the buffer length is equal to n then replacing the last buffer value with the data value
238 # If the buffer length is equal to n then replacing the last buffer value with the data value
239 239 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
240 self.__buffer[self.n-1] = data
240 self.__buffer[self.n - 1] = data
241 241 self.__profIndex = self.n
242 242 return
243 243
@@ -261,7 +261,7 class IncohInt4SpectraHeis(Operation):
261 261
262 262 return data, n
263 263
264 #Integration with Overlapping
264 # Integration with Overlapping
265 265 data = numpy.sum(self.__buffer, axis=0)
266 266 n = self.__profIndex
267 267
@@ -315,7 +315,7 class IncohInt4SpectraHeis(Operation):
315 315
316 316 avgdatatime = self.__initime
317 317
318 deltatime = datatime -self.__lastdatatime
318 deltatime = datatime - self.__lastdatatime
319 319
320 320 if not self.__withOverapping:
321 321 self.__initime = datatime
@@ -344,4 +344,4 class IncohInt4SpectraHeis(Operation):
344 344 # dataOut.timeInterval = self.__timeInterval*self.n
345 345 dataOut.flagNoData = False
346 346
347 return dataOut No newline at end of file
347 return dataOut
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now