##// END OF EJS Templates
Update2 for EW-Drifts
Percy Condor -
r1383:3e971ac8dea1
parent child
Show More
@@ -129,7 +129,7 class SchainConfigure():
129 129 __SCHAIN_SENDER_EMAIL = "MAILSERVER_ACCOUNT"
130 130 __SCHAIN_SENDER_PASS = "MAILSERVER_PASSWORD"
131 131
132 def __init__(self, initFile = None):
132 def __init__(self, initFile=None):
133 133
134 134 # Set configuration file
135 135 if (initFile == None):
@@ -251,7 +251,7 class SchainNotify:
251 251 Written by "Miguel Urco":mailto:miguel.urco@jro.igp.gob.pe Dec. 1, 2015
252 252 """
253 253
254 #constants
254 # constants
255 255
256 256 def __init__(self):
257 257 """__init__ initializes SchainNotify by getting some basic information from SchainDB and SchainSite.
@@ -275,7 +275,7 class SchainNotify:
275 275 self.__emailFromAddress = confObj.getSenderEmail()
276 276 self.__emailPass = confObj.getSenderPass()
277 277 self.__emailToAddress = confObj.getAdminEmail()
278 self.__emailServer = confObj.getEmailServer()
278 self.__emailServer = confObj.getEmailServer()
279 279
280 280 def sendEmail(self, email_from, email_to, subject='Error running ...', message="", subtitle="", filename="", html_format=True):
281 281
@@ -297,7 +297,7 class SchainNotify:
297 297 msg.preamble = 'SChainPy'
298 298
299 299 if html_format:
300 message = "<h1> %s </h1>" %subject + "<h3>" + subtitle.replace("\n", "</h3><h3>\n") + "</h3>" + message.replace("\n", "<br>\n")
300 message = "<h1> %s </h1>" % subject + "<h3>" + subtitle.replace("\n", "</h3><h3>\n") + "</h3>" + message.replace("\n", "<br>\n")
301 301 message = "<html>\n" + message + '</html>'
302 302
303 303 # This is the textual part:
@@ -310,8 +310,8 class SchainNotify:
310 310
311 311 if filename and os.path.isfile(filename):
312 312 # This is the binary part(The Attachment):
313 part = MIMEApplication(open(filename,"rb").read())
314 part.add_header('Content-Disposition',
313 part = MIMEApplication(open(filename, "rb").read())
314 part.add_header('Content-Disposition',
315 315 'attachment',
316 316 filename=os.path.basename(filename))
317 317 msg.attach(part)
@@ -342,7 +342,7 class SchainNotify:
342 342
343 343 return 1
344 344
345 def sendAlert(self, message, subject = "", subtitle="", filename=""):
345 def sendAlert(self, message, subject="", subtitle="", filename=""):
346 346 """sendAlert sends an email with the given message and optional title.
347 347
348 348 Inputs: message (string), and optional title (string)
@@ -357,14 +357,14 class SchainNotify:
357 357 if not self.__emailToAddress:
358 358 return 0
359 359
360 print("***** Sending alert to %s *****" %self.__emailToAddress)
360 print("***** Sending alert to %s *****" % self.__emailToAddress)
361 361 # set up message
362 362
363 sent=self.sendEmail(email_from=self.__emailFromAddress,
363 sent = self.sendEmail(email_from=self.__emailFromAddress,
364 364 email_to=self.__emailToAddress,
365 365 subject=subject,
366 366 message=message,
367 subtitle=subtitle,
367 subtitle=subtitle,
368 368 filename=filename)
369 369
370 370 if not sent:
@@ -372,7 +372,7 class SchainNotify:
372 372
373 373 return 1
374 374
375 def notify(self, email, message, subject = "", subtitle="", filename=""):
375 def notify(self, email, message, subject="", subtitle="", filename=""):
376 376 """notify sends an email with the given message and title to email.
377 377
378 378 Inputs: email (string), message (string), and subject (string)
@@ -392,7 +392,7 class SchainNotify:
392 392 email_to=email,
393 393 subject=subject,
394 394 message=message,
395 subtitle=subtitle,
395 subtitle=subtitle,
396 396 filename=filename
397 397 )
398 398
@@ -502,4 +502,4 if __name__ == '__main__':
502 502
503 503 test.sendAlert('This is a message from the python module SchainNotify', 'Test from SchainNotify')
504 504
505 print('Hopefully message sent - check.') No newline at end of file
505 print('Hopefully message sent - check.')
@@ -161,7 +161,7 def search(nextcommand):
161 161 except Exception as e:
162 162 log.error('Module `{}` does not exists'.format(nextcommand), '')
163 163 allModules = getAll()
164 similar = [t[0] for t in process.extract(nextcommand, allModules, limit=12) if t[1]>80]
164 similar = [t[0] for t in process.extract(nextcommand, allModules, limit=12) if t[1] > 80]
165 165 log.success('Possible modules are: {}'.format(', '.join(similar)), '')
166 166
167 167 def runschain(nextcommand):
@@ -203,7 +203,7 class ProcUnitConf(ConfBase):
203 203
204 204 def removeOperation(self, id):
205 205
206 i = [1 if x.id==id else 0 for x in self.operations]
206 i = [1 if x.id == id else 0 for x in self.operations]
207 207 self.operations.pop(i.index(1))
208 208
209 209 def getOperation(self, id):
@@ -293,12 +293,12 class ReadUnitConf(ProcUnitConf):
293 293 if name == None:
294 294 if 'Reader' in datatype:
295 295 name = datatype
296 datatype = name.replace('Reader','')
296 datatype = name.replace('Reader', '')
297 297 else:
298 298 name = '{}Reader'.format(datatype)
299 299 if datatype == None:
300 300 if 'Reader' in name:
301 datatype = name.replace('Reader','')
301 datatype = name.replace('Reader', '')
302 302 else:
303 303 datatype = name
304 304 name = '{}Reader'.format(name)
@@ -572,7 +572,7 class Project(Process):
572 572 if '#_start_#' in msg:
573 573 procs += 1
574 574 elif '#_end_#' in msg:
575 procs -=1
575 procs -= 1
576 576 else:
577 577 err_msg = msg
578 578
@@ -617,7 +617,7 class Project(Process):
617 617 subtitle += '[End time = %s]\n' % readUnitConfObj.parameters['endTime']
618 618
619 619 a = Alarm(
620 modes=self.alarm,
620 modes=self.alarm,
621 621 email=self.email,
622 622 message=message,
623 623 subject=subject,
@@ -656,4 +656,4 class Project(Process):
656 656 self.runProcs()
657 657 log.success('{} Done (Time: {:4.2f}s)'.format(
658 658 self.name,
659 time.time()-self.start_time), '')
659 time.time() - self.start_time), '')
@@ -12,49 +12,49 import datetime
12 12 SPEED_OF_LIGHT = 299792458
13 13 SPEED_OF_LIGHT = 3e8
14 14
15 FILE_STRUCTURE = numpy.dtype([ #HEADER 48bytes
16 ('FileMgcNumber','<u4'), #0x23020100
17 ('nFDTdataRecors','<u4'), #No Of FDT data records in this file (0 or more)
18 ('RadarUnitId','<u4'),
19 ('SiteName','<s32'), #Null terminated
15 FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes
16 ('FileMgcNumber', '<u4'), # 0x23020100
17 ('nFDTdataRecors', '<u4'), # No Of FDT data records in this file (0 or more)
18 ('RadarUnitId', '<u4'),
19 ('SiteName', '<s32'), # Null terminated
20 20 ])
21 21
22 RECORD_STRUCTURE = numpy.dtype([ #RECORD HEADER 180+20N bytes
23 ('RecMgcNumber','<u4'), #0x23030001
24 ('RecCounter','<u4'), #Record counter(0,1, ...)
25 ('Off2StartNxtRec','<u4'), #Offset to start of next record form start of this record
26 ('Off2StartData','<u4'), #Offset to start of data from start of this record
27 ('EpTimeStamp','<i4'), #Epoch time stamp of start of acquisition (seconds)
28 ('msCompTimeStamp','<u4'), #Millisecond component of time stamp (0,...,999)
29 ('ExpTagName','<s32'), #Experiment tag name (null terminated)
30 ('ExpComment','<s32'), #Experiment comment (null terminated)
31 ('SiteLatDegrees','<f4'), #Site latitude (from GPS) in degrees (positive implies North)
32 ('SiteLongDegrees','<f4'), #Site longitude (from GPS) in degrees (positive implies East)
33 ('RTCgpsStatus','<u4'), #RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
34 ('TransmitFrec','<u4'), #Transmit frequency (Hz)
35 ('ReceiveFrec','<u4'), #Receive frequency
36 ('FirstOsciFrec','<u4'), #First local oscillator frequency (Hz)
37 ('Polarisation','<u4'), #(0="O", 1="E", 2="linear 1", 3="linear2")
38 ('ReceiverFiltSett','<u4'), #Receiver filter settings (0,1,2,3)
39 ('nModesInUse','<u4'), #Number of modes in use (1 or 2)
40 ('DualModeIndex','<u4'), #Dual Mode index number for these data (0 or 1)
41 ('DualModeRange','<u4'), #Dual Mode range correction for these data (m)
42 ('nDigChannels','<u4'), #Number of digital channels acquired (2*N)
43 ('SampResolution','<u4'), #Sampling resolution (meters)
44 ('nRangeGatesSamp','<u4'), #Number of range gates sampled
45 ('StartRangeSamp','<u4'), #Start range of sampling (meters)
46 ('PRFhz','<u4'), #PRF (Hz)
47 ('Integrations','<u4'), #Integrations
48 ('nDataPointsTrsf','<u4'), #Number of data points transformed
49 ('nReceiveBeams','<u4'), #Number of receive beams stored in file (1 or N)
50 ('nSpectAverages','<u4'), #Number of spectral averages
51 ('FFTwindowingInd','<u4'), #FFT windowing index (0 = no window)
52 ('BeamAngleAzim','<f4'), #Beam steer angle (azimuth) in degrees (clockwise from true North)
53 ('BeamAngleZen','<f4'), #Beam steer angle (zenith) in degrees (0=> vertical)
54 ('AntennaCoord','<f24'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
55 ('RecPhaseCalibr','<f12'), #Receiver phase calibration (degrees) - N values
56 ('RecAmpCalibr','<f12'), #Receiver amplitude calibration (ratio relative to receiver one) - N values
57 ('ReceiverGaindB','<u12'), #Receiver gains in dB - N values
22 RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes
23 ('RecMgcNumber', '<u4'), # 0x23030001
24 ('RecCounter', '<u4'), # Record counter(0,1, ...)
25 ('Off2StartNxtRec', '<u4'), # Offset to start of next record form start of this record
26 ('Off2StartData', '<u4'), # Offset to start of data from start of this record
27 ('EpTimeStamp', '<i4'), # Epoch time stamp of start of acquisition (seconds)
28 ('msCompTimeStamp', '<u4'), # Millisecond component of time stamp (0,...,999)
29 ('ExpTagName', '<s32'), # Experiment tag name (null terminated)
30 ('ExpComment', '<s32'), # Experiment comment (null terminated)
31 ('SiteLatDegrees', '<f4'), # Site latitude (from GPS) in degrees (positive implies North)
32 ('SiteLongDegrees', '<f4'), # Site longitude (from GPS) in degrees (positive implies East)
33 ('RTCgpsStatus', '<u4'), # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
34 ('TransmitFrec', '<u4'), # Transmit frequency (Hz)
35 ('ReceiveFrec', '<u4'), # Receive frequency
36 ('FirstOsciFrec', '<u4'), # First local oscillator frequency (Hz)
37 ('Polarisation', '<u4'), # (0="O", 1="E", 2="linear 1", 3="linear2")
38 ('ReceiverFiltSett', '<u4'), # Receiver filter settings (0,1,2,3)
39 ('nModesInUse', '<u4'), # Number of modes in use (1 or 2)
40 ('DualModeIndex', '<u4'), # Dual Mode index number for these data (0 or 1)
41 ('DualModeRange', '<u4'), # Dual Mode range correction for these data (m)
42 ('nDigChannels', '<u4'), # Number of digital channels acquired (2*N)
43 ('SampResolution', '<u4'), # Sampling resolution (meters)
44 ('nRangeGatesSamp', '<u4'), # Number of range gates sampled
45 ('StartRangeSamp', '<u4'), # Start range of sampling (meters)
46 ('PRFhz', '<u4'), # PRF (Hz)
47 ('Integrations', '<u4'), # Integrations
48 ('nDataPointsTrsf', '<u4'), # Number of data points transformed
49 ('nReceiveBeams', '<u4'), # Number of receive beams stored in file (1 or N)
50 ('nSpectAverages', '<u4'), # Number of spectral averages
51 ('FFTwindowingInd', '<u4'), # FFT windowing index (0 = no window)
52 ('BeamAngleAzim', '<f4'), # Beam steer angle (azimuth) in degrees (clockwise from true North)
53 ('BeamAngleZen', '<f4'), # Beam steer angle (zenith) in degrees (0=> vertical)
54 ('AntennaCoord', '<f24'), # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
55 ('RecPhaseCalibr', '<f12'), # Receiver phase calibration (degrees) - N values
56 ('RecAmpCalibr', '<f12'), # Receiver amplitude calibration (ratio relative to receiver one) - N values
57 ('ReceiverGaindB', '<u12'), # Receiver gains in dB - N values
58 58 ])
59 59
60 60
@@ -82,39 +82,39 class Header(object):
82 82 keyList.sort()
83 83
84 84 for key in keyList:
85 message += "%s = %s" %(key, self.__dict__[key]) + "\n"
85 message += "%s = %s" % (key, self.__dict__[key]) + "\n"
86 86
87 87 if "size" not in keyList:
88 88 attr = getattr(self, "size")
89 89
90 90 if attr:
91 message += "%s = %s" %("size", attr) + "\n"
91 message += "%s = %s" % ("size", attr) + "\n"
92 92
93 93 print(message)
94 94
95 95 class FileHeader(Header):
96 96
97 FileMgcNumber= None
98 nFDTdataRecors=None #No Of FDT data records in this file (0 or more)
99 RadarUnitId= None
100 SiteName= None
97 FileMgcNumber = None
98 nFDTdataRecors = None # No Of FDT data records in this file (0 or more)
99 RadarUnitId = None
100 SiteName = None
101 101
102 #__LOCALTIME = None
102 # __LOCALTIME = None
103 103
104 104 def __init__(self, useLocalTime=True):
105 105
106 self.FileMgcNumber= 0 #0x23020100
107 self.nFDTdataRecors=0 #No Of FDT data records in this file (0 or more)
108 self.RadarUnitId= 0
109 self.SiteName= ""
106 self.FileMgcNumber = 0 # 0x23020100
107 self.nFDTdataRecors = 0 # No Of FDT data records in this file (0 or more)
108 self.RadarUnitId = 0
109 self.SiteName = ""
110 110 self.size = 48
111 111
112 #self.useLocalTime = useLocalTime
112 # self.useLocalTime = useLocalTime
113 113
114 114 def read(self, fp):
115 115
116 116 try:
117 header = numpy.fromfile(fp, FILE_STRUCTURE,1)
117 header = numpy.fromfile(fp, FILE_STRUCTURE, 1)
118 118 ''' numpy.fromfile(file, dtype, count, sep='')
119 119 file : file or str
120 120 Open file object or filename.
@@ -139,13 +139,13 class FileHeader(Header):
139 139 print(eBasicHeader)
140 140 return 0
141 141
142 self.FileMgcNumber= byte(header['FileMgcNumber'][0])
143 self.nFDTdataRecors=int(header['nFDTdataRecors'][0]) #No Of FDT data records in this file (0 or more)
144 self.RadarUnitId= int(header['RadarUnitId'][0])
145 self.SiteName= char(header['SiteName'][0])
142 self.FileMgcNumber = byte(header['FileMgcNumber'][0])
143 self.nFDTdataRecors = int(header['nFDTdataRecors'][0]) # No Of FDT data records in this file (0 or more)
144 self.RadarUnitId = int(header['RadarUnitId'][0])
145 self.SiteName = char(header['SiteName'][0])
146 146
147 147
148 if self.size <48:
148 if self.size < 48:
149 149 return 0
150 150
151 151 return 1
@@ -182,40 +182,40 class FileHeader(Header):
182 182
183 183 class RecordHeader(Header):
184 184
185 RecMgcNumber=None #0x23030001
186 RecCounter= None
187 Off2StartNxtRec= None
188 EpTimeStamp= None
189 msCompTimeStamp= None
190 ExpTagName= None
191 ExpComment=None
192 SiteLatDegrees=None
193 SiteLongDegrees= None
194 RTCgpsStatus= None
195 TransmitFrec= None
196 ReceiveFrec= None
197 FirstOsciFrec= None
198 Polarisation= None
199 ReceiverFiltSett= None
200 nModesInUse= None
201 DualModeIndex= None
202 DualModeRange= None
203 nDigChannels= None
204 SampResolution= None
205 nRangeGatesSamp= None
206 StartRangeSamp= None
207 PRFhz= None
208 Integrations= None
209 nDataPointsTrsf= None
210 nReceiveBeams= None
211 nSpectAverages= None
212 FFTwindowingInd= None
213 BeamAngleAzim= None
214 BeamAngleZen= None
215 AntennaCoord= None
216 RecPhaseCalibr= None
217 RecAmpCalibr= None
218 ReceiverGaindB= None
185 RecMgcNumber = None # 0x23030001
186 RecCounter = None
187 Off2StartNxtRec = None
188 EpTimeStamp = None
189 msCompTimeStamp = None
190 ExpTagName = None
191 ExpComment = None
192 SiteLatDegrees = None
193 SiteLongDegrees = None
194 RTCgpsStatus = None
195 TransmitFrec = None
196 ReceiveFrec = None
197 FirstOsciFrec = None
198 Polarisation = None
199 ReceiverFiltSett = None
200 nModesInUse = None
201 DualModeIndex = None
202 DualModeRange = None
203 nDigChannels = None
204 SampResolution = None
205 nRangeGatesSamp = None
206 StartRangeSamp = None
207 PRFhz = None
208 Integrations = None
209 nDataPointsTrsf = None
210 nReceiveBeams = None
211 nSpectAverages = None
212 FFTwindowingInd = None
213 BeamAngleAzim = None
214 BeamAngleZen = None
215 AntennaCoord = None
216 RecPhaseCalibr = None
217 RecAmpCalibr = None
218 ReceiverGaindB = None
219 219
220 220 '''size = None
221 221 nSamples = None
@@ -224,20 +224,20 class RecordHeader(Header):
224 224 adcResolution = None
225 225 pciDioBusWidth = None'''
226 226
227 def __init__(self, RecMgcNumber=None, RecCounter= 0, Off2StartNxtRec= 0,
228 EpTimeStamp= 0, msCompTimeStamp= 0, ExpTagName= None,
229 ExpComment=None, SiteLatDegrees=0, SiteLongDegrees= 0,
230 RTCgpsStatus= 0, TransmitFrec= 0, ReceiveFrec= 0,
231 FirstOsciFrec= 0, Polarisation= 0, ReceiverFiltSett= 0,
232 nModesInUse= 0, DualModeIndex= 0, DualModeRange= 0,
233 nDigChannels= 0, SampResolution= 0, nRangeGatesSamp= 0,
234 StartRangeSamp= 0, PRFhz= 0, Integrations= 0,
235 nDataPointsTrsf= 0, nReceiveBeams= 0, nSpectAverages= 0,
236 FFTwindowingInd= 0, BeamAngleAzim= 0, BeamAngleZen= 0,
237 AntennaCoord= 0, RecPhaseCalibr= 0, RecAmpCalibr= 0,
238 ReceiverGaindB= 0):
227 def __init__(self, RecMgcNumber=None, RecCounter=0, Off2StartNxtRec=0,
228 EpTimeStamp=0, msCompTimeStamp=0, ExpTagName=None,
229 ExpComment=None, SiteLatDegrees=0, SiteLongDegrees=0,
230 RTCgpsStatus=0, TransmitFrec=0, ReceiveFrec=0,
231 FirstOsciFrec=0, Polarisation=0, ReceiverFiltSett=0,
232 nModesInUse=0, DualModeIndex=0, DualModeRange=0,
233 nDigChannels=0, SampResolution=0, nRangeGatesSamp=0,
234 StartRangeSamp=0, PRFhz=0, Integrations=0,
235 nDataPointsTrsf=0, nReceiveBeams=0, nSpectAverages=0,
236 FFTwindowingInd=0, BeamAngleAzim=0, BeamAngleZen=0,
237 AntennaCoord=0, RecPhaseCalibr=0, RecAmpCalibr=0,
238 ReceiverGaindB=0):
239 239
240 self.RecMgcNumber = RecMgcNumber #0x23030001
240 self.RecMgcNumber = RecMgcNumber # 0x23030001
241 241 self.RecCounter = RecCounter
242 242 self.Off2StartNxtRec = Off2StartNxtRec
243 243 self.EpTimeStamp = EpTimeStamp
@@ -275,15 +275,15 class RecordHeader(Header):
275 275
276 276 def read(self, fp):
277 277
278 startFp = fp.tell() #The method tell() returns the current position of the file read/write pointer within the file.
278 startFp = fp.tell() # The method tell() returns the current position of the file read/write pointer within the file.
279 279
280 280 try:
281 header = numpy.fromfile(fp,RECORD_STRUCTURE,1)
281 header = numpy.fromfile(fp, RECORD_STRUCTURE, 1)
282 282 except Exception as e:
283 283 print("System Header: " + e)
284 284 return 0
285 285
286 self.RecMgcNumber = header['RecMgcNumber'][0] #0x23030001
286 self.RecMgcNumber = header['RecMgcNumber'][0] # 0x23030001
287 287 self.RecCounter = header['RecCounter'][0]
288 288 self.Off2StartNxtRec = header['Off2StartNxtRec'][0]
289 289 self.EpTimeStamp = header['EpTimeStamp'][0]
@@ -318,16 +318,16 class RecordHeader(Header):
318 318 self.RecAmpCalibr = header['RecAmpCalibr'][0]
319 319 self.ReceiverGaindB = header['ReceiverGaindB'][0]
320 320
321 Self.size = 180+20*3
321 Self.size = 180 + 20 * 3
322 322
323 323 endFp = self.size + startFp
324 324
325 325 if fp.tell() > endFp:
326 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp.name)
326 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" % fp.name)
327 327 return 0
328 328
329 329 if fp.tell() < endFp:
330 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp.name)
330 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" % fp.name)
331 331 return 0
332 332
333 333 return 1
@@ -335,38 +335,38 class RecordHeader(Header):
335 335 def write(self, fp):
336 336
337 337 headerTuple = (self.RecMgcNumber,
338 self.RecCounter,
339 self.Off2StartNxtRec,
340 self.EpTimeStamp,
341 self.msCompTimeStamp,
342 self.ExpTagName,
343 self.ExpComment,
344 self.SiteLatDegrees,
345 self.SiteLongDegrees,
346 self.RTCgpsStatus,
347 self.TransmitFrec,
348 self.ReceiveFrec,
349 self.FirstOsciFrec,
350 self.Polarisation,
351 self.ReceiverFiltSett,
352 self.nModesInUse,
353 self.DualModeIndex,
354 self.DualModeRange,
338 self.RecCounter,
339 self.Off2StartNxtRec,
340 self.EpTimeStamp,
341 self.msCompTimeStamp,
342 self.ExpTagName,
343 self.ExpComment,
344 self.SiteLatDegrees,
345 self.SiteLongDegrees,
346 self.RTCgpsStatus,
347 self.TransmitFrec,
348 self.ReceiveFrec,
349 self.FirstOsciFrec,
350 self.Polarisation,
351 self.ReceiverFiltSett,
352 self.nModesInUse,
353 self.DualModeIndex,
354 self.DualModeRange,
355 355 self.nDigChannels,
356 self.SampResolution,
357 self.nRangeGatesSamp,
358 self.StartRangeSamp,
359 self.PRFhz,
360 self.Integrations,
361 self.nDataPointsTrsf,
362 self.nReceiveBeams,
363 self.nSpectAverages,
364 self.FFTwindowingInd,
365 self.BeamAngleAzim,
366 self.BeamAngleZen,
367 self.AntennaCoord,
368 self.RecPhaseCalibr,
369 self.RecAmpCalibr,
356 self.SampResolution,
357 self.nRangeGatesSamp,
358 self.StartRangeSamp,
359 self.PRFhz,
360 self.Integrations,
361 self.nDataPointsTrsf,
362 self.nReceiveBeams,
363 self.nSpectAverages,
364 self.FFTwindowingInd,
365 self.BeamAngleAzim,
366 self.BeamAngleZen,
367 self.AntennaCoord,
368 self.RecPhaseCalibr,
369 self.RecAmpCalibr,
370 370 self.ReceiverGaindB)
371 371
372 372 # self.size,self.nSamples,
@@ -375,7 +375,7 class RecordHeader(Header):
375 375 # self.adcResolution,
376 376 # self.pciDioBusWidth
377 377
378 header = numpy.array(headerTuple,RECORD_STRUCTURE)
378 header = numpy.array(headerTuple, RECORD_STRUCTURE)
379 379 header.tofile(fp)
380 380
381 381 return 1
@@ -394,11 +394,11 def get_dtype_index(numpy_dtype):
394 394
395 395 def get_numpy_dtype(index):
396 396
397 #dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
397 # dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
398 398
399 399 return NUMPY_DTYPE_LIST[index]
400 400
401 401
402 402 def get_dtype_width(index):
403 403
404 return DTYPE_WIDTH[index] No newline at end of file
404 return DTYPE_WIDTH[index]
@@ -1,3 +1,3
1 1 from .jrodata import *
2 2 from .jroheaderIO import *
3 from .jroamisr import * No newline at end of file
3 from .jroamisr import *
@@ -15,41 +15,41 class AMISR:
15 15 self.utctime = None
16 16 self.type = "AMISR"
17 17
18 #propiedades para compatibilidad con Voltages
19 self.timeZone = 0#timezone like jroheader, difference in minutes between UTC and localtime
20 self.dstFlag = 0#self.dataIn.dstFlag
21 self.errorCount = 0#self.dataIn.errorCount
22 self.useLocalTime = True#self.dataIn.useLocalTime
18 # propiedades para compatibilidad con Voltages
19 self.timeZone = 0 # timezone like jroheader, difference in minutes between UTC and localtime
20 self.dstFlag = 0 # self.dataIn.dstFlag
21 self.errorCount = 0 # self.dataIn.errorCount
22 self.useLocalTime = True # self.dataIn.useLocalTime
23 23
24 self.radarControllerHeaderObj = None#self.dataIn.radarControllerHeaderObj.copy()
25 self.systemHeaderObj = None#self.dataIn.systemHeaderObj.copy()
26 self.channelList = [0]#self.dataIn.channelList esto solo aplica para el caso de AMISR
27 self.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
24 self.radarControllerHeaderObj = None # self.dataIn.radarControllerHeaderObj.copy()
25 self.systemHeaderObj = None # self.dataIn.systemHeaderObj.copy()
26 self.channelList = [0] # self.dataIn.channelList esto solo aplica para el caso de AMISR
27 self.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
28 28
29 self.flagDiscontinuousBlock = None#self.dataIn.flagDiscontinuousBlock
30 #self.utctime = #self.firstdatatime
31 self.flagDecodeData = None#self.dataIn.flagDecodeData #asumo q la data esta decodificada
32 self.flagDeflipData = None#self.dataIn.flagDeflipData #asumo q la data esta sin flip
29 self.flagDiscontinuousBlock = None # self.dataIn.flagDiscontinuousBlock
30 # self.utctime = #self.firstdatatime
31 self.flagDecodeData = None # self.dataIn.flagDecodeData #asumo q la data esta decodificada
32 self.flagDeflipData = None # self.dataIn.flagDeflipData #asumo q la data esta sin flip
33 33
34 self.nCohInt = 1#self.dataIn.nCohInt
34 self.nCohInt = 1 # self.dataIn.nCohInt
35 35 self.nIncohInt = 1
36 self.ippSeconds = None#self.dataIn.ippSeconds, segun el filename/Setup/Tufile
37 self.windowOfFilter = None#self.dataIn.windowOfFilter
36 self.ippSeconds = None # self.dataIn.ippSeconds, segun el filename/Setup/Tufile
37 self.windowOfFilter = None # self.dataIn.windowOfFilter
38 38
39 self.timeInterval = None#self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
40 self.frequency = None#self.dataIn.frequency
41 self.realtime = 0#self.dataIn.realtime
39 self.timeInterval = None # self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
40 self.frequency = None # self.dataIn.frequency
41 self.realtime = 0 # self.dataIn.realtime
42 42
43 #actualizar en la lectura de datos
44 self.heightList = None#self.dataIn.heightList
45 self.nProfiles = None#Number of samples or nFFTPoints
43 # actualizar en la lectura de datos
44 self.heightList = None # self.dataIn.heightList
45 self.nProfiles = None # Number of samples or nFFTPoints
46 46 self.nRecords = None
47 47 self.nBeams = None
48 self.nBaud = None#self.dataIn.nBaud
49 self.nCode = None#self.dataIn.nCode
50 self.code = None#self.dataIn.code
48 self.nBaud = None # self.dataIn.nBaud
49 self.nCode = None # self.dataIn.nCode
50 self.code = None # self.dataIn.code
51 51
52 #consideracion para los Beams
52 # consideracion para los Beams
53 53 self.beamCodeDict = None
54 54 self.beamRangeDict = None
55 55 self.beamcode = None
@@ -246,7 +246,7 class JROData(GenericData):
246 246
247 247 def getFmaxTimeResponse(self):
248 248
249 period = (10**-6) * self.getDeltaH() / (0.15)
249 period = (10 ** -6) * self.getDeltaH() / (0.15)
250 250
251 251 PRF = 1. / (period * self.nCohInt)
252 252
@@ -338,10 +338,10 class JROData(GenericData):
338 338
339 339 class Voltage(JROData):
340 340
341 dataPP_POW = None
342 dataPP_DOP = None
341 dataPP_POW = None
342 dataPP_DOP = None
343 343 dataPP_WIDTH = None
344 dataPP_SNR = None
344 dataPP_SNR = None
345 345
346 346 def __init__(self):
347 347 '''
@@ -370,7 +370,7 class Voltage(JROData):
370 370 self.flagShiftFFT = False
371 371 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
372 372 self.profileIndex = 0
373 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
373 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
374 374 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp']
375 375
376 376 def getNoisebyHildebrand(self, channel=None):
@@ -458,8 +458,8 class Spectra(JROData):
458 458 self.ippFactor = 1
459 459 self.beacon_heiIndexList = []
460 460 self.noise_estimation = None
461 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
462 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp','nIncohInt', 'nFFTPoints', 'nProfiles']
461 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
462 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp', 'nIncohInt', 'nFFTPoints', 'nProfiles']
463 463
464 464 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
465 465 """
@@ -498,14 +498,14 class Spectra(JROData):
498 498 def getAcfRange(self, extrapoints=0):
499 499
500 500 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
501 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
501 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
502 502
503 503 return freqrange
504 504
505 505 def getFreqRange(self, extrapoints=0):
506 506
507 507 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
508 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
508 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
509 509
510 510 return freqrange
511 511
@@ -515,7 +515,7 class Spectra(JROData):
515 515 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
516 516
517 517 if self.nmodes:
518 return velrange/self.nmodes
518 return velrange / self.nmodes
519 519 else:
520 520 return velrange
521 521
@@ -535,8 +535,8 class Spectra(JROData):
535 535 pwcode = 1
536 536
537 537 if self.flagDecodeData:
538 pwcode = numpy.sum(self.code[0]**2)
539 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
538 pwcode = numpy.sum(self.code[0] ** 2)
539 # normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
540 540 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
541 541
542 542 return normFactor
@@ -562,7 +562,7 class Spectra(JROData):
562 562
563 563 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
564 564 if self.nmodes:
565 return self.nmodes*timeInterval
565 return self.nmodes * timeInterval
566 566 else:
567 567 return timeInterval
568 568
@@ -634,7 +634,7 class SpectraHeis(Spectra):
634 634 def normFactor(self):
635 635 pwcode = 1
636 636 if self.flagDecodeData:
637 pwcode = numpy.sum(self.code[0]**2)
637 pwcode = numpy.sum(self.code[0] ** 2)
638 638
639 639 normFactor = self.nIncohInt * self.nCohInt * pwcode
640 640
@@ -765,7 +765,7 class Correlation(JROData):
765 765 xx = numpy.zeros([4, 4])
766 766
767 767 for fil in range(4):
768 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
768 xx[fil, :] = vel[fil] ** numpy.asarray(list(range(4)))
769 769
770 770 xx_inv = numpy.linalg.inv(xx)
771 771 xx_aux = xx_inv[0, :]
@@ -997,13 +997,13 class PlotterData(object):
997 997
998 998 meta = {}
999 999 meta['xrange'] = []
1000 dy = int(len(self.yrange)/self.MAXNUMY) + 1
1000 dy = int(len(self.yrange) / self.MAXNUMY) + 1
1001 1001 tmp = self.data[tm][self.key]
1002 1002 shape = tmp.shape
1003 1003 if len(shape) == 2:
1004 1004 data = self.roundFloats(self.data[tm][self.key][::, ::dy].tolist())
1005 1005 elif len(shape) == 3:
1006 dx = int(self.data[tm][self.key].shape[1]/self.MAXNUMX) + 1
1006 dx = int(self.data[tm][self.key].shape[1] / self.MAXNUMX) + 1
1007 1007 data = self.roundFloats(
1008 1008 self.data[tm][self.key][::, ::dx, ::dy].tolist())
1009 1009 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
@@ -516,7 +516,7 class RadarControllerHeader(Header):
516 516 for j in range(len(code_selected) - 1, -1, -1):
517 517 if code_selected[j] == 1:
518 518 tempx[i] = tempx[i] + \
519 2**(len(code_selected) - 1 - j)
519 2 ** (len(code_selected) - 1 - j)
520 520 start = start + 32
521 521 end = end + 32
522 522
@@ -903,4 +903,4 def get_procflag_dtype(index):
903 903
904 904 def get_dtype_width(index):
905 905
906 return DTYPE_WIDTH[index] No newline at end of file
906 return DTYPE_WIDTH[index]
@@ -52,13 +52,13 EARTH_RADIUS = 6.3710e3
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
55 a = 0.5 - numpy.cos((lat2 - lat1) * p) / 2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 theta = -theta + numpy.pi/2
61 return r*numpy.cos(theta), r*numpy.sin(theta)
58 theta = numpy.arctan2(numpy.sin((lon2 - lon1) * p) * numpy.cos(lat2 * p), numpy.cos(lat1 * p)
59 * numpy.sin(lat2 * p) - numpy.sin(lat1 * p) * numpy.cos(lat2 * p) * numpy.cos((lon2 - lon1) * p))
60 theta = -theta + numpy.pi / 2
61 return r * numpy.cos(theta), r * numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
@@ -66,7 +66,7 def km2deg(km):
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 return numpy.rad2deg(km/EARTH_RADIUS)
69 return numpy.rad2deg(km / EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
@@ -385,7 +385,7 class Plot(Operation):
385 385 xmax = self.xmax
386 386 else:
387 387 xmin = self.tmin
388 xmax = self.tmin + self.xrange*60*60
388 xmax = self.tmin + self.xrange * 60 * 60
389 389 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
390 390 ax.xaxis.set_major_locator(LinearLocator(9))
391 391 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
@@ -393,10 +393,10 class Plot(Operation):
393 393 ax.set_facecolor(self.bgcolor)
394 394 if self.xscale:
395 395 ax.xaxis.set_major_formatter(FuncFormatter(
396 lambda x, pos: '{0:g}'.format(x*self.xscale)))
396 lambda x, pos: '{0:g}'.format(x * self.xscale)))
397 397 if self.yscale:
398 398 ax.yaxis.set_major_formatter(FuncFormatter(
399 lambda x, pos: '{0:g}'.format(x*self.yscale)))
399 lambda x, pos: '{0:g}'.format(x * self.yscale)))
400 400 if self.xlabel is not None:
401 401 ax.set_xlabel(self.xlabel)
402 402 if self.ylabel is not None:
@@ -447,7 +447,7 class Plot(Operation):
447 447 Reset axes for redraw plots
448 448 '''
449 449
450 for ax in self.axes+self.pf_axes+self.cb_axes:
450 for ax in self.axes + self.pf_axes + self.cb_axes:
451 451 ax.clear()
452 452 ax.firsttime = True
453 453 if hasattr(ax, 'cbar') and ax.cbar:
@@ -510,7 +510,7 class Plot(Operation):
510 510 figname = os.path.join(
511 511 self.save,
512 512 self.save_code,
513 '{}_{}.png'.format(
513 '{}_{}.png'.format(
514 514 self.save_code,
515 515 self.getDateTime(self.data.max_time).strftime(
516 516 '%Y%m%d_%H%M%S'
@@ -649,10 +649,10 class Plot(Operation):
649 649
650 650 tm = getattr(dataOut, self.attr_time)
651 651
652 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
652 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange * 60 * 60:
653 653 self.save_time = tm
654 654 self.__plot()
655 self.tmin += self.xrange*60*60
655 self.tmin += self.xrange * 60 * 60
656 656 self.data.setup()
657 657 self.clear_figures()
658 658
@@ -666,9 +666,9 class Plot(Operation):
666 666 if self.xmin is None:
667 667 self.tmin = tm
668 668 self.xmin = dt.hour
669 minutes = (self.xmin-int(self.xmin)) * 60
669 minutes = (self.xmin - int(self.xmin)) * 60
670 670 seconds = (minutes - int(minutes)) * 60
671 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
671 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
672 672 datetime.datetime(1970, 1, 1)).total_seconds()
673 673 if self.localtime:
674 674 self.tmin += time.timezone
@@ -679,7 +679,7 class Plot(Operation):
679 679 if self.throttle == 0:
680 680 self.__plot()
681 681 else:
682 self.__throttle_plot(self.__plot)#, coerce=coerce)
682 self.__throttle_plot(self.__plot) # , coerce=coerce)
683 683
684 684 def close(self):
685 685
@@ -32,8 +32,8 class CorrelationPlot(Plot):
32 32
33 33 def getSubplots(self):
34 34
35 ncol = int(numpy.sqrt(self.nplots)+0.9)
36 nrow = int(self.nplots*1./ncol + 0.9)
35 ncol = int(numpy.sqrt(self.nplots) + 0.9)
36 nrow = int(self.nplots * 1. / ncol + 0.9)
37 37
38 38 return nrow, ncol
39 39
@@ -50,10 +50,10 class CorrelationPlot(Plot):
50 50 colspan = 2
51 51 self.__nsubplots = 2
52 52
53 self.createFigure(id = id,
54 wintitle = wintitle,
55 widthplot = self.WIDTH + self.WIDTHPROF,
56 heightplot = self.HEIGHT + self.HEIGHTPROF,
53 self.createFigure(id=id,
54 wintitle=wintitle,
55 widthplot=self.WIDTH + self.WIDTHPROF,
56 heightplot=self.HEIGHT + self.HEIGHTPROF,
57 57 show=show)
58 58
59 59 nrow, ncol = self.getSubplots()
@@ -65,10 +65,10 class CorrelationPlot(Plot):
65 65 if counter >= self.nplots:
66 66 break
67 67
68 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
68 self.addAxes(nrow, ncol * ncolspan, y, x * ncolspan, colspan, 1)
69 69
70 70 if showprofile:
71 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1)
71 self.addAxes(nrow, ncol * ncolspan, y, x * ncolspan + colspan, 1, 1)
72 72
73 73 counter += 1
74 74
@@ -98,7 +98,7 class CorrelationPlot(Plot):
98 98 return None
99 99
100 100 if realtime:
101 if not(isRealtime(utcdatatime = dataOut.utctime)):
101 if not(isRealtime(utcdatatime=dataOut.utctime)):
102 102 print('Skipping this plot function')
103 103 return
104 104
@@ -116,16 +116,16 class CorrelationPlot(Plot):
116 116 x = dataOut.getLagTRange(1)
117 117 y = dataOut.heightList
118 118
119 z = copy.copy(dataOut.data_corr[:,:,0,:])
119 z = copy.copy(dataOut.data_corr[:, :, 0, :])
120 120 for i in range(dataOut.data_corr.shape[0]):
121 z[i,:,:] = z[i,:,:]/factor[i,:]
121 z[i, :, :] = z[i, :, :] / factor[i, :]
122 122 zdB = numpy.abs(z)
123 123
124 124 avg = numpy.average(z, axis=1)
125 125 # avg = numpy.nanmean(z, axis=1)
126 126 # noise = dataOut.noise/factor
127 127
128 #thisDatetime = dataOut.datatime
128 # thisDatetime = dataOut.datatime
129 129 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
130 130 title = wintitle + " Correlation"
131 131 xlabel = "Lag T (s)"
@@ -158,10 +158,10 class CorrelationPlot(Plot):
158 158 self.setWinTitle(title)
159 159
160 160 for i in range(self.nplots):
161 str_datetime = '%s %s'%(thisDatetime.strftime("%Y/%m/%d"),thisDatetime.strftime("%H:%M:%S"))
162 title = "Channel %d and %d: : %s" %(dataOut.pairsList[i][0],dataOut.pairsList[i][1] , str_datetime)
163 axes = self.axesList[i*self.__nsubplots]
164 axes.pcolor(x, y, zdB[i,:,:],
161 str_datetime = '%s %s' % (thisDatetime.strftime("%Y/%m/%d"), thisDatetime.strftime("%H:%M:%S"))
162 title = "Channel %d and %d: : %s" % (dataOut.pairsList[i][0], dataOut.pairsList[i][1] , str_datetime)
163 axes = self.axesList[i * self.__nsubplots]
164 axes.pcolor(x, y, zdB[i, :, :],
165 165 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax,
166 166 xlabel=xlabel, ylabel=ylabel, title=title,
167 167 ticksize=9, cblabel='')
@@ -184,4 +184,4 class CorrelationPlot(Plot):
184 184 save=save,
185 185 ftp=ftp,
186 186 wr_period=wr_period,
187 thisDatetime=thisDatetime) No newline at end of file
187 thisDatetime=thisDatetime)
@@ -31,7 +31,7 class SpectraHeisPlot(Plot):
31 31
32 32 data = {}
33 33 meta = {}
34 spc = 10*numpy.log10(dataOut.data_spc / dataOut.normFactor)
34 spc = 10 * numpy.log10(dataOut.data_spc / dataOut.normFactor)
35 35 data['spc_heis'] = spc
36 36
37 37 return data, meta
@@ -40,12 +40,12 class SpectraHeisPlot(Plot):
40 40
41 41 c = 3E8
42 42 deltaHeight = self.data.yrange[1] - self.data.yrange[0]
43 x = numpy.arange(-1*len(self.data.yrange)/2., len(self.data.yrange)/2.)*(c/(2*deltaHeight*len(self.data.yrange)*1000))
43 x = numpy.arange(-1 * len(self.data.yrange) / 2., len(self.data.yrange) / 2.) * (c / (2 * deltaHeight * len(self.data.yrange) * 1000))
44 44 self.y = self.data[-1]['spc_heis']
45 45 self.titles = []
46 46
47 47 for n, ax in enumerate(self.axes):
48 ychannel = self.y[n,:]
48 ychannel = self.y[n, :]
49 49 if ax.firsttime:
50 50 self.xmin = min(x) if self.xmin is None else self.xmin
51 51 self.xmax = max(x) if self.xmax is None else self.xmax
@@ -78,7 +78,7 class RTIHeisPlot(Plot):
78 78 data = {}
79 79 meta = {}
80 80 spc = dataOut.data_spc / dataOut.normFactor
81 spc = 10*numpy.log10(numpy.average(spc, axis=1))
81 spc = 10 * numpy.log10(numpy.average(spc, axis=1))
82 82 data['rti_heis'] = spc
83 83
84 84 return data, meta
@@ -12,13 +12,13 EARTH_RADIUS = 6.3710e3
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
15 a = 0.5 - numpy.cos((lat2 - lat1) * p) / 2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 theta = -theta + numpy.pi/2
21 return r*numpy.cos(theta), r*numpy.sin(theta)
18 theta = numpy.arctan2(numpy.sin((lon2 - lon1) * p) * numpy.cos(lat2 * p), numpy.cos(lat1 * p)
19 * numpy.sin(lat2 * p) - numpy.sin(lat1 * p) * numpy.cos(lat2 * p) * numpy.cos((lon2 - lon1) * p))
20 theta = -theta + numpy.pi / 2
21 return r * numpy.cos(theta), r * numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
@@ -26,7 +26,7 def km2deg(km):
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 return numpy.rad2deg(km/EARTH_RADIUS)
29 return numpy.rad2deg(km / EARTH_RADIUS)
30 30
31 31
32 32
@@ -50,7 +50,7 class SnrPlot(RTIPlot):
50 50 def update(self, dataOut):
51 51
52 52 data = {
53 'snr': 10*numpy.log10(dataOut.data_snr)
53 'snr': 10 * numpy.log10(dataOut.data_snr)
54 54 }
55 55
56 56 return data, {}
@@ -66,7 +66,7 class DopplerPlot(RTIPlot):
66 66 def update(self, dataOut):
67 67
68 68 data = {
69 'dop': 10*numpy.log10(dataOut.data_dop)
69 'dop': 10 * numpy.log10(dataOut.data_dop)
70 70 }
71 71
72 72 return data, {}
@@ -82,7 +82,7 class PowerPlot(RTIPlot):
82 82 def update(self, dataOut):
83 83
84 84 data = {
85 'pow': 10*numpy.log10(dataOut.data_pow)
85 'pow': 10 * numpy.log10(dataOut.data_pow)
86 86 }
87 87
88 88 return data, {}
@@ -269,22 +269,22 class PolarMapPlot(Plot):
269 269 zeniths = numpy.linspace(
270 270 0, self.data.meta['max_range'], data.shape[1])
271 271 if self.mode == 'E':
272 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
272 azimuths = -numpy.radians(self.data.yrange) + numpy.pi / 2
273 273 r, theta = numpy.meshgrid(zeniths, azimuths)
274 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
275 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
274 x, y = r * numpy.cos(theta) * numpy.cos(numpy.radians(self.data.meta['elevation'])), r * numpy.sin(
275 theta) * numpy.cos(numpy.radians(self.data.meta['elevation']))
276 276 x = km2deg(x) + self.lon
277 277 y = km2deg(y) + self.lat
278 278 else:
279 279 azimuths = numpy.radians(self.data.yrange)
280 280 r, theta = numpy.meshgrid(zeniths, azimuths)
281 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
281 x, y = r * numpy.cos(theta), r * numpy.sin(theta)
282 282 self.y = zeniths
283 283
284 284 if ax.firsttime:
285 285 if self.zlimits is not None:
286 286 self.zmin, self.zmax = self.zlimits[n]
287 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
287 ax.plt = ax.pcolormesh(# r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
288 288 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
289 289 vmin=self.zmin,
290 290 vmax=self.zmax,
@@ -293,7 +293,7 class PolarMapPlot(Plot):
293 293 if self.zlimits is not None:
294 294 self.zmin, self.zmax = self.zlimits[n]
295 295 ax.collections.remove(ax.collections[0])
296 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
296 ax.plt = ax.pcolormesh(# r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
297 297 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
298 298 vmin=self.zmin,
299 299 vmax=self.zmax,
@@ -339,8 +339,8 class PolarMapPlot(Plot):
339 339 ax.add_artist(plt.Circle((self.lon, self.lat),
340 340 km2deg(r), color='0.6', fill=False, lw=0.2))
341 341 ax.text(
342 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
343 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
342 self.lon + (km2deg(r)) * numpy.cos(60 * numpy.pi / 180),
343 self.lat + (km2deg(r)) * numpy.sin(60 * numpy.pi / 180),
344 344 '{}km'.format(r),
345 345 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
346 346
@@ -17,7 +17,7 class SpectraPlot(Plot):
17 17 Plot for Spectra data
18 18 '''
19 19
20 CODE = 'spc'
20 CODE = 'spc_moments'
21 21 colormap = 'jet'
22 22 plot_type = 'pcolor'
23 23 buffering = False
@@ -39,11 +39,11 class SpectraPlot(Plot):
39 39
40 40 data = {}
41 41 meta = {}
42 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
42 spc = 10 * numpy.log10(dataOut.data_spc / dataOut.normFactor)
43 43 data['spc'] = spc
44 44 data['rti'] = dataOut.getPower()
45 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
46 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
45 data['noise'] = 10 * numpy.log10(dataOut.getNoise() / dataOut.normFactor)
46 meta['xrange'] = (dataOut.getFreqRange(1) / 1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
47 47 if self.CODE == 'spc_moments':
48 48 data['moments'] = dataOut.moments
49 49
@@ -71,11 +71,12 class SpectraPlot(Plot):
71 71
72 72 data = self.data[-1]
73 73 z = data['spc']
74
74 #self.CODE = 'spc_moments'
75 75 for n, ax in enumerate(self.axes):
76 76 noise = data['noise'][n]
77 print(n,self.CODE)
77 78 if self.CODE == 'spc_moments':
78 mean = data['moments'][n, 1]
79 mean = data['moments'][n,1]
79 80 if ax.firsttime:
80 81 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
81 82 self.xmin = self.xmin if self.xmin else -self.xmax
@@ -132,7 +133,7 class CrossSpectraPlot(Plot):
132 133
133 134 spc = dataOut.data_spc
134 135 cspc = dataOut.data_cspc
135 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
136 meta['xrange'] = (dataOut.getFreqRange(1) / 1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
136 137 meta['pairs'] = dataOut.pairsList
137 138
138 139 tmp = []
@@ -170,8 +171,8 class CrossSpectraPlot(Plot):
170 171
171 172 for n in range(len(self.data.pairs)):
172 173 pair = self.data.pairs[n]
173 coh = cspc[n*2]
174 phase = cspc[n*2+1]
174 coh = cspc[n * 2]
175 phase = cspc[n * 2 + 1]
175 176 ax = self.axes[2 * n]
176 177 if ax.firsttime:
177 178 ax.plt = ax.pcolormesh(x, y, coh.T,
@@ -222,7 +223,7 class RTIPlot(Plot):
222 223 data = {}
223 224 meta = {}
224 225 data['rti'] = dataOut.getPower()
225 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
226 data['noise'] = 10 * numpy.log10(dataOut.getNoise() / dataOut.normFactor)
226 227
227 228 return data, meta
228 229
@@ -279,7 +280,7 class CoherencePlot(RTIPlot):
279 280 self.nplots = len(self.data.pairs)
280 281 self.ylabel = 'Range [km]'
281 282 self.xlabel = 'Time'
282 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
283 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1, 'right':0.95})
283 284 if self.CODE == 'coh':
284 285 self.cb_label = ''
285 286 self.titles = [
@@ -338,7 +339,7 class NoisePlot(Plot):
338 339
339 340 data = {}
340 341 meta = {}
341 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
342 data['noise'] = 10 * numpy.log10(dataOut.getNoise() / dataOut.normFactor).reshape(dataOut.nChannels, 1)
342 343 meta['yrange'] = numpy.array([])
343 344
344 345 return data, meta
@@ -395,8 +396,8 class PowerProfilePlot(Plot):
395 396
396 397 x = self.data[-1][self.CODE]
397 398
398 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
399 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
399 if self.xmin is None: self.xmin = numpy.nanmin(x) * 0.9
400 if self.xmax is None: self.xmax = numpy.nanmax(x) * 1.1
400 401
401 402 if self.axes[0].firsttime:
402 403 for ch in self.data.channels:
@@ -428,9 +429,9 class SpectraCutPlot(Plot):
428 429
429 430 data = {}
430 431 meta = {}
431 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
432 spc = 10 * numpy.log10(dataOut.data_spc / dataOut.normFactor)
432 433 data['spc'] = spc
433 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
434 meta['xrange'] = (dataOut.getFreqRange(1) / 1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
434 435
435 436 return data, meta
436 437
@@ -453,7 +454,7 class SpectraCutPlot(Plot):
453 454 if self.height_index:
454 455 index = numpy.array(self.height_index)
455 456 else:
456 index = numpy.arange(0, len(y), int((len(y))/9))
457 index = numpy.arange(0, len(y), int((len(y)) / 9))
457 458
458 459 for n, ax in enumerate(self.axes):
459 460 if ax.firsttime:
@@ -479,7 +480,7 class BeaconPhase(Plot):
479 480
480 481 def __init__(self):
481 482 Plot.__init__(self)
482 self.timerange = 24*60*60
483 self.timerange = 24 * 60 * 60
483 484 self.isConfig = False
484 485 self.__nsubplots = 1
485 486 self.counter_imagwr = 0
@@ -520,25 +521,25 class BeaconPhase(Plot):
520 521 colspan = 6
521 522 self.__nsubplots = 2
522 523
523 self.createFigure(id = id,
524 wintitle = wintitle,
525 widthplot = self.WIDTH+self.WIDTHPROF,
526 heightplot = self.HEIGHT+self.HEIGHTPROF,
524 self.createFigure(id=id,
525 wintitle=wintitle,
526 widthplot=self.WIDTH + self.WIDTHPROF,
527 heightplot=self.HEIGHT + self.HEIGHTPROF,
527 528 show=show)
528 529
529 530 nrow, ncol = self.getSubplots()
530 531
531 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
532 self.addAxes(nrow, ncol * ncolspan, 0, 0, colspan, 1)
532 533
533 534 def save_phase(self, filename_phase):
534 f = open(filename_phase,'w+')
535 f = open(filename_phase, 'w+')
535 536 f.write('\n\n')
536 537 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
537 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
538 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n')
538 539 f.close()
539 540
540 541 def save_data(self, filename_phase, data, data_datetime):
541 f=open(filename_phase,'a')
542 f = open(filename_phase, 'a')
542 543 timetuple_data = data_datetime.timetuple()
543 544 day = str(timetuple_data.tm_mday)
544 545 month = str(timetuple_data.tm_mon)
@@ -546,7 +547,7 class BeaconPhase(Plot):
546 547 hour = str(timetuple_data.tm_hour)
547 548 minute = str(timetuple_data.tm_min)
548 549 second = str(timetuple_data.tm_sec)
549 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
550 f.write(day + ' ' + month + ' ' + year + ' ' + hour + ' ' + minute + ' ' + second + ' ' + str(data[0]) + ' ' + str(data[1]) + ' ' + str(data[2]) + ' ' + str(data[3]) + '\n')
550 551 f.close()
551 552
552 553 def plot(self):
@@ -571,7 +572,7 class BeaconPhase(Plot):
571 572 pairsIndexList = []
572 573 for pair in pairsList:
573 574 if pair not in dataOut.pairsList:
574 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
575 raise ValueError("Pair %s is not in dataOut.pairsList" % (pair))
575 576 pairsIndexList.append(dataOut.pairsList.index(pair))
576 577
577 578 if pairsIndexList == []:
@@ -592,28 +593,28 class BeaconPhase(Plot):
592 593 hmin_index = hmin_list[0]
593 594
594 595 if hmax_list.any():
595 hmax_index = hmax_list[-1]+1
596 hmax_index = hmax_list[-1] + 1
596 597
597 598 x = dataOut.getTimeRange()
598 599
599 600 thisDatetime = dataOut.datatime
600 601
601 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
602 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
602 603 xlabel = "Local Time"
603 604 ylabel = "Phase (degrees)"
604 605
605 606 update_figfile = False
606 607
607 608 nplots = len(pairsIndexList)
608 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
609 # phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
609 610 phase_beacon = numpy.zeros(len(pairsIndexList))
610 611 for i in range(nplots):
611 612 pair = dataOut.pairsList[pairsIndexList[i]]
612 613 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
613 614 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
614 615 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
615 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
616 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
616 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
617 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real) * 180 / numpy.pi
617 618
618 619 if dataOut.beacon_heiIndexList:
619 620 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
@@ -651,31 +652,31 class BeaconPhase(Plot):
651 652
652 653 update_figfile = True
653 654
654 #open file beacon phase
655 path = '%s%03d' %(self.PREFIX, self.id)
656 beacon_file = os.path.join(path,'%s.txt'%self.name)
657 self.filename_phase = os.path.join(figpath,beacon_file)
658 #self.save_phase(self.filename_phase)
655 # open file beacon phase
656 path = '%s%03d' % (self.PREFIX, self.id)
657 beacon_file = os.path.join(path, '%s.txt' % self.name)
658 self.filename_phase = os.path.join(figpath, beacon_file)
659 # self.save_phase(self.filename_phase)
659 660
660 661
661 #store data beacon phase
662 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
662 # store data beacon phase
663 # self.save_data(self.filename_phase, phase_beacon, thisDatetime)
663 664
664 665 self.setWinTitle(title)
665 666
666 667
667 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
668 title = "Phase Plot %s" % (thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
668 669
669 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
670 legendlabels = ["Pair (%d,%d)" % (pair[0], pair[1]) for pair in dataOut.pairsList]
670 671
671 672 axes = self.axesList[0]
672 673
673 674 self.xdata = numpy.hstack((self.xdata, x[0:1]))
674 675
675 if len(self.ydata)==0:
676 self.ydata = phase_beacon.reshape(-1,1)
676 if len(self.ydata) == 0:
677 self.ydata = phase_beacon.reshape(-1, 1)
677 678 else:
678 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
679 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1, 1)))
679 680
680 681
681 682 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
@@ -699,4 +700,4 class BeaconPhase(Plot):
699 700 thisDatetime=thisDatetime,
700 701 update_figfile=update_figfile)
701 702
702 return dataOut No newline at end of file
703 return dataOut
@@ -54,9 +54,9 class ScopePlot(Plot):
54 54
55 55 def plot_iq(self, x, y, channelIndexList, thisDatetime, wintitle):
56 56
57 yreal = y[channelIndexList,:].real
58 yimag = y[channelIndexList,:].imag
59 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y"))
57 yreal = y[channelIndexList, :].real
58 yimag = y[channelIndexList, :].imag
59 title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y"))
60 60 self.xlabel = "Range (Km)"
61 61 self.ylabel = "Intensity - IQ"
62 62
@@ -65,32 +65,32 class ScopePlot(Plot):
65 65
66 66 self.titles[0] = title
67 67
68 for i,ax in enumerate(self.axes):
69 title = "Channel %d" %(i)
68 for i, ax in enumerate(self.axes):
69 title = "Channel %d" % (i)
70 70 if ax.firsttime:
71 71 self.xmin = min(x)
72 72 self.xmax = max(x)
73 ax.plt_r = ax.plot(x, yreal[i,:], color='b')[0]
74 ax.plt_i = ax.plot(x, yimag[i,:], color='r')[0]
73 ax.plt_r = ax.plot(x, yreal[i, :], color='b')[0]
74 ax.plt_i = ax.plot(x, yimag[i, :], color='r')[0]
75 75 else:
76 ax.plt_r.set_data(x, yreal[i,:])
77 ax.plt_i.set_data(x, yimag[i,:])
76 ax.plt_r.set_data(x, yreal[i, :])
77 ax.plt_i.set_data(x, yimag[i, :])
78 78
79 79 def plot_power(self, x, y, channelIndexList, thisDatetime, wintitle):
80 y = y[channelIndexList,:] * numpy.conjugate(y[channelIndexList,:])
80 y = y[channelIndexList, :] * numpy.conjugate(y[channelIndexList, :])
81 81 yreal = y.real
82 yreal = 10*numpy.log10(yreal)
82 yreal = 10 * numpy.log10(yreal)
83 83 self.y = yreal
84 title = wintitle + " Power: %s" %(thisDatetime.strftime("%d-%b-%Y"))
84 title = wintitle + " Power: %s" % (thisDatetime.strftime("%d-%b-%Y"))
85 85 self.xlabel = "Range (Km)"
86 86 self.ylabel = "Intensity [dB]"
87 87
88 88
89 89 self.titles[0] = title
90 90
91 for i,ax in enumerate(self.axes):
92 title = "Channel %d" %(i)
93 ychannel = yreal[i,:]
91 for i, ax in enumerate(self.axes):
92 title = "Channel %d" % (i)
93 ychannel = yreal[i, :]
94 94
95 95 if ax.firsttime:
96 96 self.xmin = min(x)
@@ -102,66 +102,66 class ScopePlot(Plot):
102 102 def plot_weatherpower(self, x, y, channelIndexList, thisDatetime, wintitle):
103 103
104 104
105 y = y[channelIndexList,:]
106 yreal = y.real
107 yreal = 10*numpy.log10(yreal)
105 y = y[channelIndexList, :]
106 yreal = y.real
107 yreal = 10 * numpy.log10(yreal)
108 108 self.y = yreal
109 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
109 title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
110 110 self.xlabel = "Range (Km)"
111 111 self.ylabel = "Intensity"
112 self.xmin = min(x)
113 self.xmax = max(x)
112 self.xmin = min(x)
113 self.xmax = max(x)
114 114
115 self.titles[0] =title
116 for i,ax in enumerate(self.axes):
117 title = "Channel %d" %(i)
115 self.titles[0] = title
116 for i, ax in enumerate(self.axes):
117 title = "Channel %d" % (i)
118 118
119 ychannel = yreal[i,:]
119 ychannel = yreal[i, :]
120 120
121 121 if ax.firsttime:
122 122 ax.plt_r = ax.plot(x, ychannel)[0]
123 123 else:
124 #pass
124 # pass
125 125 ax.plt_r.set_data(x, ychannel)
126 126
127 127 def plot_weathervelocity(self, x, y, channelIndexList, thisDatetime, wintitle):
128 128
129 x = x[channelIndexList,:]
130 yreal = y
129 x = x[channelIndexList, :]
130 yreal = y
131 131 self.y = yreal
132 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
132 title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
133 133 self.xlabel = "Velocity (m/s)"
134 134 self.ylabel = "Range (Km)"
135 self.xmin = numpy.min(x)
136 self.xmax = numpy.max(x)
137 self.titles[0] =title
138 for i,ax in enumerate(self.axes):
139 title = "Channel %d" %(i)
140 xchannel = x[i,:]
135 self.xmin = numpy.min(x)
136 self.xmax = numpy.max(x)
137 self.titles[0] = title
138 for i, ax in enumerate(self.axes):
139 title = "Channel %d" % (i)
140 xchannel = x[i, :]
141 141 if ax.firsttime:
142 142 ax.plt_r = ax.plot(xchannel, yreal)[0]
143 143 else:
144 #pass
144 # pass
145 145 ax.plt_r.set_data(xchannel, yreal)
146 146
147 147 def plot_weatherspecwidth(self, x, y, channelIndexList, thisDatetime, wintitle):
148 148
149 x = x[channelIndexList,:]
150 yreal = y
149 x = x[channelIndexList, :]
150 yreal = y
151 151 self.y = yreal
152 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
152 title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
153 153 self.xlabel = "width "
154 154 self.ylabel = "Range (Km)"
155 self.xmin = numpy.min(x)
156 self.xmax = numpy.max(x)
157 self.titles[0] =title
158 for i,ax in enumerate(self.axes):
159 title = "Channel %d" %(i)
160 xchannel = x[i,:]
155 self.xmin = numpy.min(x)
156 self.xmax = numpy.max(x)
157 self.titles[0] = title
158 for i, ax in enumerate(self.axes):
159 title = "Channel %d" % (i)
160 xchannel = x[i, :]
161 161 if ax.firsttime:
162 162 ax.plt_r = ax.plot(xchannel, yreal)[0]
163 163 else:
164 #pass
164 # pass
165 165 ax.plt_r.set_data(xchannel, yreal)
166 166
167 167 def plot(self):
@@ -178,11 +178,11 class ScopePlot(Plot):
178 178
179 179 for i in range(self.data.nProfiles):
180 180
181 wintitle1 = " [Profile = %d] " %i
182 if self.CODE =="scope":
181 wintitle1 = " [Profile = %d] " % i
182 if self.CODE == "scope":
183 183 if self.type == "power":
184 184 self.plot_power(self.data.yrange,
185 scope[:,i,:],
185 scope[:, i, :],
186 186 channels,
187 187 thisDatetime,
188 188 wintitle1
@@ -190,42 +190,42 class ScopePlot(Plot):
190 190
191 191 if self.type == "iq":
192 192 self.plot_iq(self.data.yrange,
193 scope[:,i,:],
193 scope[:, i, :],
194 194 channels,
195 195 thisDatetime,
196 196 wintitle1
197 197 )
198 if self.CODE=="pp_power":
198 if self.CODE == "pp_power":
199 199 self.plot_weatherpower(self.data.yrange,
200 scope[:,i,:],
200 scope[:, i, :],
201 201 channels,
202 202 thisDatetime,
203 203 wintitle
204 204 )
205 if self.CODE=="pp_signal":
205 if self.CODE == "pp_signal":
206 206 self.plot_weatherpower(self.data.yrange,
207 scope[:,i,:],
207 scope[:, i, :],
208 208 channels,
209 209 thisDatetime,
210 210 wintitle
211 211 )
212 if self.CODE=="pp_velocity":
213 self.plot_weathervelocity(scope[:,i,:],
212 if self.CODE == "pp_velocity":
213 self.plot_weathervelocity(scope[:, i, :],
214 214 self.data.yrange,
215 215 channels,
216 216 thisDatetime,
217 217 wintitle
218 218 )
219 if self.CODE=="pp_spcwidth":
220 self.plot_weatherspecwidth(scope[:,i,:],
219 if self.CODE == "pp_spcwidth":
220 self.plot_weatherspecwidth(scope[:, i, :],
221 221 self.data.yrange,
222 222 channels,
223 223 thisDatetime,
224 224 wintitle
225 225 )
226 226 else:
227 wintitle = " [Profile = %d] " %self.data.profileIndex
228 if self.CODE== "scope":
227 wintitle = " [Profile = %d] " % self.data.profileIndex
228 if self.CODE == "scope":
229 229 if self.type == "power":
230 230 self.plot_power(self.data.yrange,
231 231 scope,
@@ -241,28 +241,28 class ScopePlot(Plot):
241 241 thisDatetime,
242 242 wintitle
243 243 )
244 if self.CODE=="pp_power":
244 if self.CODE == "pp_power":
245 245 self.plot_weatherpower(self.data.yrange,
246 246 scope,
247 247 channels,
248 248 thisDatetime,
249 249 wintitle
250 250 )
251 if self.CODE=="pp_signal":
251 if self.CODE == "pp_signal":
252 252 self.plot_weatherpower(self.data.yrange,
253 253 scope,
254 254 channels,
255 255 thisDatetime,
256 256 wintitle
257 257 )
258 if self.CODE=="pp_velocity":
258 if self.CODE == "pp_velocity":
259 259 self.plot_weathervelocity(scope,
260 260 self.data.yrange,
261 261 channels,
262 262 thisDatetime,
263 263 wintitle
264 264 )
265 if self.CODE=="pp_specwidth":
265 if self.CODE == "pp_specwidth":
266 266 self.plot_weatherspecwidth(scope,
267 267 self.data.yrange,
268 268 channels,
@@ -1,23 +1,23
1 1 '''
2 2 @author: roj-idl71
3 3 '''
4 #USED IN jroplot_spectra.py
5 RTI_CODE = 0 #Range time intensity (RTI).
6 SPEC_CODE = 1 #Spectra (and Cross-spectra) information.
7 CROSS_CODE = 2 #Cross-Correlation information.
8 COH_CODE = 3 #Coherence map.
9 BASE_CODE = 4 #Base lines graphic.
10 ROW_CODE = 5 #Row Spectra.
11 TOTAL_CODE = 6 #Total Power.
12 DRIFT_CODE = 7 #Drifts graphics.
13 HEIGHT_CODE = 8 #Height profile.
14 PHASE_CODE = 9 #Signal Phase.
4 # USED IN jroplot_spectra.py
5 RTI_CODE = 0 # Range time intensity (RTI).
6 SPEC_CODE = 1 # Spectra (and Cross-spectra) information.
7 CROSS_CODE = 2 # Cross-Correlation information.
8 COH_CODE = 3 # Coherence map.
9 BASE_CODE = 4 # Base lines graphic.
10 ROW_CODE = 5 # Row Spectra.
11 TOTAL_CODE = 6 # Total Power.
12 DRIFT_CODE = 7 # Drifts graphics.
13 HEIGHT_CODE = 8 # Height profile.
14 PHASE_CODE = 9 # Signal Phase.
15 15
16 16 POWER_CODE = 16
17 17 NOISE_CODE = 17
18 18 BEACON_CODE = 18
19 19
20 #USED IN jroplot_parameters.py
20 # USED IN jroplot_parameters.py
21 21 WIND_CODE = 22
22 22 MSKYMAP_CODE = 23
23 23 MPHASE_CODE = 24
@@ -24,9 +24,9 except:
24 24 from time import sleep
25 25
26 26 from schainpy.model.data.jrodata import Spectra
27 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
27 # from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
28 28 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
29 #from schainpy.model.io.jroIO_bltr import BLTRReader
29 # from schainpy.model.io.jroIO_bltr import BLTRReader
30 30 from numpy import imag, shape, NaN
31 31
32 32
@@ -225,26 +225,26 SPARprc = header['SPARprc'][0]
225 225
226 226 SRVI_STRUCTURE = numpy.dtype([
227 227 ('frame_cnt', '<u4'),
228 ('time_t', '<u4'), #
229 ('tpow', '<f4'), #
230 ('npw1', '<f4'), #
231 ('npw2', '<f4'), #
232 ('cpw1', '<f4'), #
233 ('pcw2', '<f4'), #
234 ('ps_err', '<u4'), #
235 ('te_err', '<u4'), #
236 ('rc_err', '<u4'), #
237 ('grs1', '<u4'), #
238 ('grs2', '<u4'), #
239 ('azipos', '<f4'), #
240 ('azivel', '<f4'), #
241 ('elvpos', '<f4'), #
242 ('elvvel', '<f4'), #
228 ('time_t', '<u4'), #
229 ('tpow', '<f4'), #
230 ('npw1', '<f4'), #
231 ('npw2', '<f4'), #
232 ('cpw1', '<f4'), #
233 ('pcw2', '<f4'), #
234 ('ps_err', '<u4'), #
235 ('te_err', '<u4'), #
236 ('rc_err', '<u4'), #
237 ('grs1', '<u4'), #
238 ('grs2', '<u4'), #
239 ('azipos', '<f4'), #
240 ('azivel', '<f4'), #
241 ('elvpos', '<f4'), #
242 ('elvvel', '<f4'), #
243 243 ('northAngle', '<f4'),
244 ('microsec', '<u4'), #
244 ('microsec', '<u4'), #
245 245 ('azisetvel', '<f4'), #
246 246 ('elvsetpos', '<f4'), #
247 ('RadarConst', '<f4'), ]) #
247 ('RadarConst', '<f4'), ]) #
248 248
249 249 JUMP_STRUCTURE = numpy.dtype([
250 250 ('jump', '<u140'),
@@ -289,34 +289,34 RadarConst5 = RadarConst
289 289 # RecCounter=0
290 290 # Off2StartNxtRec=811248
291 291 # print 'OffsetStartHeader ',self.OffsetStartHeader,'RecCounter ', self.RecCounter, 'Off2StartNxtRec ' , self.Off2StartNxtRec
292 #OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
293 #startFp.seek(OffRHeader, os.SEEK_SET)
292 # OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
293 # startFp.seek(OffRHeader, os.SEEK_SET)
294 294 print('debe ser 48, RecCounter*811248', self.OffsetStartHeader, self.RecCounter, self.Off2StartNxtRec)
295 295 print('Posicion del bloque: ', OffRHeader)
296 296
297 297 header = numpy.fromfile(startFp, SRVI_STRUCTURE, 1)
298 298
299 299 self.frame_cnt = header['frame_cnt'][0]
300 self.time_t = header['frame_cnt'][0] #
301 self.tpow = header['frame_cnt'][0] #
302 self.npw1 = header['frame_cnt'][0] #
303 self.npw2 = header['frame_cnt'][0] #
304 self.cpw1 = header['frame_cnt'][0] #
305 self.pcw2 = header['frame_cnt'][0] #
306 self.ps_err = header['frame_cnt'][0] #
307 self.te_err = header['frame_cnt'][0] #
308 self.rc_err = header['frame_cnt'][0] #
309 self.grs1 = header['frame_cnt'][0] #
310 self.grs2 = header['frame_cnt'][0] #
311 self.azipos = header['frame_cnt'][0] #
312 self.azivel = header['frame_cnt'][0] #
313 self.elvpos = header['frame_cnt'][0] #
314 self.elvvel = header['frame_cnt'][0] #
315 self.northAngle = header['frame_cnt'][0] #
316 self.microsec = header['frame_cnt'][0] #
317 self.azisetvel = header['frame_cnt'][0] #
318 self.elvsetpos = header['frame_cnt'][0] #
319 self.RadarConst = header['frame_cnt'][0] #
300 self.time_t = header['frame_cnt'][0] #
301 self.tpow = header['frame_cnt'][0] #
302 self.npw1 = header['frame_cnt'][0] #
303 self.npw2 = header['frame_cnt'][0] #
304 self.cpw1 = header['frame_cnt'][0] #
305 self.pcw2 = header['frame_cnt'][0] #
306 self.ps_err = header['frame_cnt'][0] #
307 self.te_err = header['frame_cnt'][0] #
308 self.rc_err = header['frame_cnt'][0] #
309 self.grs1 = header['frame_cnt'][0] #
310 self.grs2 = header['frame_cnt'][0] #
311 self.azipos = header['frame_cnt'][0] #
312 self.azivel = header['frame_cnt'][0] #
313 self.elvpos = header['frame_cnt'][0] #
314 self.elvvel = header['frame_cnt'][0] #
315 self.northAngle = header['frame_cnt'][0] #
316 self.microsec = header['frame_cnt'][0] #
317 self.azisetvel = header['frame_cnt'][0] #
318 self.elvsetpos = header['frame_cnt'][0] #
319 self.RadarConst = header['frame_cnt'][0] #
320 320
321 321
322 322 self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz)
@@ -328,4 +328,4 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
328 328
329 329 print('==============================================')
330 330
331 print('==============================================') No newline at end of file
331 print('==============================================')
@@ -21,4 +21,4 from .jroIO_mira35c import *
21 21 from .julIO_param import *
22 22
23 23 from .pxIO_param import *
24 from .jroIO_simulator import * No newline at end of file
24 from .jroIO_simulator import *
@@ -104,7 +104,7 class BLTRParamReader(Reader, ProcessingUnit):
104 104 self.isConfig = False
105 105 self.filename = None
106 106 self.status_value = 0
107 self.datatime = datetime.datetime(1900,1,1)
107 self.datatime = datetime.datetime(1900, 1, 1)
108 108 self.filefmt = "*********%Y%m%d******"
109 109
110 110 def setup(self, **kwargs):
@@ -119,7 +119,7 class BLTRParamReader(Reader, ProcessingUnit):
119 119
120 120 for nTries in range(self.nTries):
121 121 fullpath = self.searchFilesOnLine(self.path, self.startDate,
122 self.endDate, self.expLabel, self.ext, self.walk,
122 self.endDate, self.expLabel, self.ext, self.walk,
123 123 self.filefmt, self.folderfmt)
124 124 try:
125 125 fullpath = next(fullpath)
@@ -138,7 +138,7 class BLTRParamReader(Reader, ProcessingUnit):
138 138
139 139 log.warning(
140 140 'Waiting {} sec for a valid file in {}: try {} ...'.format(
141 self.delay, self.path, nTries + 1),
141 self.delay, self.path, nTries + 1),
142 142 self.name)
143 143 time.sleep(self.delay)
144 144
@@ -148,7 +148,7 class BLTRParamReader(Reader, ProcessingUnit):
148 148 self.readFirstHeader()
149 149 else:
150 150 log.log("Searching files in {}".format(self.path), self.name)
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
152 152 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
153 153 self.setNextFile()
154 154
@@ -258,7 +258,7 class BLTRParamReader(Reader, ProcessingUnit):
258 258 self.rx_gains = self.header_rec['rx_gains']
259 259 self.time = self.header_rec['time'][0]
260 260 dt = datetime.datetime.utcfromtimestamp(self.time)
261 if dt.date()>self.datatime.date():
261 if dt.date() > self.datatime.date():
262 262 self.flagDiscontinuousBlock = 1
263 263 self.datatime = dt
264 264
@@ -352,4 +352,4 class BLTRParamReader(Reader, ProcessingUnit):
352 352
353 353 self.getData()
354 354
355 return No newline at end of file
355 return
@@ -181,8 +181,8 class RecordHeaderBLTR():
181 181 self.Off2StartData = int(header['Off2StartData'][0])
182 182 self.nUtime = header['nUtime'][0]
183 183 self.nMilisec = header['nMilisec'][0]
184 self.ExpTagName = '' # str(header['ExpTagName'][0])
185 self.ExpComment = '' # str(header['ExpComment'][0])
184 self.ExpTagName = '' # str(header['ExpTagName'][0])
185 self.ExpComment = '' # str(header['ExpComment'][0])
186 186 self.SiteLatDegrees = header['SiteLatDegrees'][0]
187 187 self.SiteLongDegrees = header['SiteLongDegrees'][0]
188 188 self.RTCgpsStatus = header['RTCgpsStatus'][0]
@@ -293,7 +293,7 class BLTRSpectraReader (ProcessingUnit):
293 293
294 294 self.getData()
295 295
296 def setup(self,
296 def setup(self,
297 297 path=None,
298 298 startDate=None,
299 299 endDate=None,
@@ -374,7 +374,7 class BLTRSpectraReader (ProcessingUnit):
374 374 return
375 375
376 376 if self.mode == 1:
377 self.rheader.read(self.BlockCounter+1)
377 self.rheader.read(self.BlockCounter + 1)
378 378 elif self.mode == 0:
379 379 self.rheader.read(self.BlockCounter)
380 380
@@ -393,13 +393,13 class BLTRSpectraReader (ProcessingUnit):
393 393 self.dataOut.nRdPairs = self.nRdPairs
394 394 self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000.
395 395 self.dataOut.channelList = range(self.nChannels)
396 self.dataOut.nProfiles=self.rheader.nProfiles
397 self.dataOut.nIncohInt=self.rheader.nIncohInt
398 self.dataOut.nCohInt=self.rheader.nCohInt
399 self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz)
400 self.dataOut.PRF=self.rheader.PRFhz
401 self.dataOut.nFFTPoints=self.rheader.nProfiles
402 self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000.
396 self.dataOut.nProfiles = self.rheader.nProfiles
397 self.dataOut.nIncohInt = self.rheader.nIncohInt
398 self.dataOut.nCohInt = self.rheader.nCohInt
399 self.dataOut.ippSeconds = 1 / float(self.rheader.PRFhz)
400 self.dataOut.PRF = self.rheader.PRFhz
401 self.dataOut.nFFTPoints = self.rheader.nProfiles
402 self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec / 1000.
403 403 self.dataOut.timeZone = 0
404 404 self.dataOut.useLocalTime = False
405 405 self.dataOut.nmodes = 2
@@ -408,10 +408,10 class BLTRSpectraReader (ProcessingUnit):
408 408 self.Off2StartNxtRec + self.Off2StartData
409 409 self.fp.seek(OffDATA, os.SEEK_SET)
410 410
411 self.data_fft = numpy.fromfile(self.fp, [('complex','<c8')], self.nProfiles*self.nChannels*self.nHeights )
411 self.data_fft = numpy.fromfile(self.fp, [('complex', '<c8')], self.nProfiles * self.nChannels * self.nHeights)
412 412 self.data_fft = self.data_fft.astype(numpy.dtype('complex'))
413 self.data_block = numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles))
414 self.data_block = numpy.transpose(self.data_block, (1,2,0))
413 self.data_block = numpy.reshape(self.data_fft, (self.nHeights, self.nChannels, self.nProfiles))
414 self.data_block = numpy.transpose(self.data_block, (1, 2, 0))
415 415 copy = self.data_block.copy()
416 416 spc = copy * numpy.conjugate(copy)
417 417 self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud
@@ -450,4 +450,4 class BLTRSpectraReader (ProcessingUnit):
450 450
451 451 self.BlockCounter += 2
452 452 self.dataOut.data_spc = self.data_spc
453 self.dataOut.data_cspc =self.data_cspc
453 self.dataOut.data_cspc = self.data_cspc
@@ -23,36 +23,36 except:
23 23 class RadacHeader():
24 24 def __init__(self, fp):
25 25 header = 'Raw11/Data/RadacHeader'
26 self.beamCodeByPulse = fp.get(header+'/BeamCode')
26 self.beamCodeByPulse = fp.get(header + '/BeamCode')
27 27 self.beamCode = fp.get('Raw11/Data/Beamcodes')
28 self.code = fp.get(header+'/Code')
29 self.frameCount = fp.get(header+'/FrameCount')
30 self.modeGroup = fp.get(header+'/ModeGroup')
31 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')
32 self.pulseCount = fp.get(header+'/PulseCount')
33 self.radacTime = fp.get(header+'/RadacTime')
34 self.timeCount = fp.get(header+'/TimeCount')
35 self.timeStatus = fp.get(header+'/TimeStatus')
36
37 self.nrecords = self.pulseCount.shape[0] #nblocks
38 self.npulses = self.pulseCount.shape[1] #nprofile
39 self.nsamples = self.nsamplesPulse[0,0] #ngates
28 self.code = fp.get(header + '/Code')
29 self.frameCount = fp.get(header + '/FrameCount')
30 self.modeGroup = fp.get(header + '/ModeGroup')
31 self.nsamplesPulse = fp.get(header + '/NSamplesPulse')
32 self.pulseCount = fp.get(header + '/PulseCount')
33 self.radacTime = fp.get(header + '/RadacTime')
34 self.timeCount = fp.get(header + '/TimeCount')
35 self.timeStatus = fp.get(header + '/TimeStatus')
36
37 self.nrecords = self.pulseCount.shape[0] # nblocks
38 self.npulses = self.pulseCount.shape[1] # nprofile
39 self.nsamples = self.nsamplesPulse[0, 0] # ngates
40 40 self.nbeams = self.beamCode.shape[1]
41 41
42 42
43 43 def getIndexRangeToPulse(self, idrecord=0):
44 #indexToZero = numpy.where(self.pulseCount.value[idrecord,:]==0)
45 #startPulseCountId = indexToZero[0][0]
46 #endPulseCountId = startPulseCountId - 1
47 #range1 = numpy.arange(startPulseCountId,self.npulses,1)
48 #range2 = numpy.arange(0,startPulseCountId,1)
49 #return range1, range2
44 # indexToZero = numpy.where(self.pulseCount.value[idrecord,:]==0)
45 # startPulseCountId = indexToZero[0][0]
46 # endPulseCountId = startPulseCountId - 1
47 # range1 = numpy.arange(startPulseCountId,self.npulses,1)
48 # range2 = numpy.arange(0,startPulseCountId,1)
49 # return range1, range2
50 50 zero = 0
51 npulse = max(self.pulseCount[0,:]+1)-1
52 looking_index = numpy.where(self.pulseCount.value[idrecord,:]==npulse)[0]
51 npulse = max(self.pulseCount[0, :] + 1) - 1
52 looking_index = numpy.where(self.pulseCount.value[idrecord, :] == npulse)[0]
53 53 getLastIndex = looking_index[-1]
54 index_data = numpy.arange(0,getLastIndex+1,1)
55 index_buffer = numpy.arange(getLastIndex+1,self.npulses,1)
54 index_data = numpy.arange(0, getLastIndex + 1, 1)
55 index_buffer = numpy.arange(getLastIndex + 1, self.npulses, 1)
56 56 return index_data, index_buffer
57 57
58 58 class AMISRReader(ProcessingUnit):
@@ -91,7 +91,7 class AMISRReader(ProcessingUnit):
91 91 self.index_amisr_buffer = None
92 92 self.beamCodeByFrame = None
93 93 self.radacTimeByFrame = None
94 #atributos originales tal y como esta en el archivo de datos
94 # atributos originales tal y como esta en el archivo de datos
95 95 self.beamCodesFromFile = None
96 96 self.radacTimeFromFile = None
97 97 self.rangeFromFile = None
@@ -101,7 +101,7 class AMISRReader(ProcessingUnit):
101 101 self.beamCodeDict = {}
102 102 self.beamRangeDict = {}
103 103
104 #experiment cgf file
104 # experiment cgf file
105 105 self.npulsesint_fromfile = None
106 106 self.recordsperfile_fromfile = None
107 107 self.nbeamcodes_fromfile = None
@@ -131,7 +131,7 class AMISRReader(ProcessingUnit):
131 131
132 132 return dataObj
133 133
134 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
134 def __setParameters(self, path='', startDate='', endDate='', startTime='', endTime='', walk=''):
135 135 self.path = path
136 136 self.startDate = startDate
137 137 self.endDate = endDate
@@ -144,7 +144,7 class AMISRReader(ProcessingUnit):
144 144 self.status = 1
145 145 else:
146 146 self.status = 0
147 print('Path:%s does not exists'%self.path)
147 print('Path:%s does not exists' % self.path)
148 148
149 149 return
150 150
@@ -153,14 +153,14 class AMISRReader(ProcessingUnit):
153 153 year = int(amisr_dirname_format[0:4])
154 154 month = int(amisr_dirname_format[4:6])
155 155 dom = int(amisr_dirname_format[6:8])
156 thisDate = datetime.date(year,month,dom)
156 thisDate = datetime.date(year, month, dom)
157 157
158 if (thisDate>=self.startDate and thisDate <= self.endDate):
158 if (thisDate >= self.startDate and thisDate <= self.endDate):
159 159 return amisr_dirname_format
160 160 except:
161 161 return None
162 162
163 def __findDataForDates(self,online=False):
163 def __findDataForDates(self, online=False):
164 164
165 165
166 166
@@ -168,13 +168,13 class AMISRReader(ProcessingUnit):
168 168 return None
169 169
170 170 pat = '\d+.\d+'
171 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
172 dirnameList = [x for x in dirnameList if x!=None]
171 dirnameList = [re.search(pat, x) for x in os.listdir(self.path)]
172 dirnameList = [x for x in dirnameList if x != None]
173 173 dirnameList = [x.string for x in dirnameList]
174 174 if not(online):
175 175 dirnameList = [self.__selDates(x) for x in dirnameList]
176 dirnameList = [x for x in dirnameList if x!=None]
177 if len(dirnameList)>0:
176 dirnameList = [x for x in dirnameList if x != None]
177 if len(dirnameList) > 0:
178 178 self.status = 1
179 179 self.dirnameList = dirnameList
180 180 self.dirnameList.sort()
@@ -183,34 +183,34 class AMISRReader(ProcessingUnit):
183 183 return None
184 184
185 185 def __getTimeFromData(self):
186 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
187 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
186 startDateTime_Reader = datetime.datetime.combine(self.startDate, self.startTime)
187 endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
188 188
189 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
189 print('Filtering Files from %s to %s' % (startDateTime_Reader, endDateTime_Reader))
190 190 print('........................................')
191 191 filter_filenameList = []
192 192 self.filenameList.sort()
193 for i in range(len(self.filenameList)-1):
193 for i in range(len(self.filenameList) - 1):
194 194 filename = self.filenameList[i]
195 fp = h5py.File(filename,'r')
195 fp = h5py.File(filename, 'r')
196 196 time_str = fp.get('Time/RadacTimeString')
197 197
198 198 startDateTimeStr_File = time_str[0][0].split('.')[0]
199 199 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
200 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
200 startDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
201 201
202 202 endDateTimeStr_File = time_str[-1][-1].split('.')[0]
203 203 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
204 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
204 endDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
205 205
206 206 fp.close()
207 207
208 208 if self.timezone == 'lt':
209 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
210 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
209 startDateTime_File = startDateTime_File - datetime.timedelta(minutes=300)
210 endDateTime_File = endDateTime_File - datetime.timedelta(minutes=300)
211 211
212 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<endDateTime_Reader):
213 #self.filenameList.remove(filename)
212 if (endDateTime_File >= startDateTime_Reader and endDateTime_File < endDateTime_Reader):
213 # self.filenameList.remove(filename)
214 214 filter_filenameList.append(filename)
215 215
216 216 filter_filenameList.sort()
@@ -218,7 +218,7 class AMISRReader(ProcessingUnit):
218 218 return 1
219 219
220 220 def __filterByGlob1(self, dirName):
221 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
221 filter_files = glob.glob1(dirName, '*.*%s' % self.extension_file)
222 222 filterDict = {}
223 223 filterDict.setdefault(dirName)
224 224 filterDict[dirName] = filter_files
@@ -233,21 +233,21 class AMISRReader(ProcessingUnit):
233 233
234 234
235 235 def __selectDataForTimes(self, online=False):
236 #aun no esta implementado el filtro for tiempo
236 # aun no esta implementado el filtro for tiempo
237 237 if not(self.status):
238 238 return None
239 239
240 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
240 dirList = [os.path.join(self.path, x) for x in self.dirnameList]
241 241
242 242 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
243 243
244 244 self.__getFilenameList(fileListInKeys, dirList)
245 245 if not(online):
246 #filtro por tiempo
246 # filtro por tiempo
247 247 if not(self.all):
248 248 self.__getTimeFromData()
249 249
250 if len(self.filenameList)>0:
250 if len(self.filenameList) > 0:
251 251 self.status = 1
252 252 self.filenameList.sort()
253 253 else:
@@ -255,7 +255,7 class AMISRReader(ProcessingUnit):
255 255 return None
256 256
257 257 else:
258 #get the last file - 1
258 # get the last file - 1
259 259 self.filenameList = [self.filenameList[-2]]
260 260
261 261 new_dirnameList = []
@@ -291,8 +291,8 class AMISRReader(ProcessingUnit):
291 291 path,
292 292 startDate,
293 293 endDate,
294 startTime=datetime.time(0,0,0),
295 endTime=datetime.time(23,59,59),
294 startTime=datetime.time(0, 0, 0),
295 endTime=datetime.time(23, 59, 59),
296 296 walk=True):
297 297
298 298 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
@@ -304,7 +304,7 class AMISRReader(ProcessingUnit):
304 304 self.__selectDataForTimes()
305 305
306 306 for i in range(len(self.filenameList)):
307 print("%s" %(self.filenameList[i]))
307 print("%s" % (self.filenameList[i]))
308 308
309 309 return
310 310
@@ -320,7 +320,7 class AMISRReader(ProcessingUnit):
320 320
321 321 filename = self.filenameList[idFile]
322 322
323 amisrFilePointer = h5py.File(filename,'r')
323 amisrFilePointer = h5py.File(filename, 'r')
324 324
325 325 break
326 326
@@ -330,7 +330,7 class AMISRReader(ProcessingUnit):
330 330
331 331 self.amisrFilePointer = amisrFilePointer
332 332
333 print("Setting the file: %s"%self.filename)
333 print("Setting the file: %s" % self.filename)
334 334
335 335 return 1
336 336
@@ -341,47 +341,47 class AMISRReader(ProcessingUnit):
341 341 self.__selectDataForTimes(online=True)
342 342 filename = self.filenameList[0]
343 343 while self.__filename_online == filename:
344 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
344 print('waiting %d seconds to get a new file...' % (self.__waitForNewFile))
345 345 sleep(self.__waitForNewFile)
346 346 self.__selectDataForTimes(online=True)
347 347 filename = self.filenameList[0]
348 348
349 349 self.__filename_online = filename
350 350
351 self.amisrFilePointer = h5py.File(filename,'r')
351 self.amisrFilePointer = h5py.File(filename, 'r')
352 352 self.flagIsNewFile = 1
353 353 self.filename = filename
354 print("Setting the file: %s"%self.filename)
354 print("Setting the file: %s" % self.filename)
355 355 return 1
356 356
357 357
358 358 def __readHeader(self):
359 359 self.radacHeaderObj = RadacHeader(self.amisrFilePointer)
360 360
361 #update values from experiment cfg file
361 # update values from experiment cfg file
362 362 if self.radacHeaderObj.nrecords == self.recordsperfile_fromfile:
363 363 self.radacHeaderObj.nrecords = self.recordsperfile_fromfile
364 364 self.radacHeaderObj.nbeams = self.nbeamcodes_fromfile
365 365 self.radacHeaderObj.npulses = self.npulsesint_fromfile
366 366 self.radacHeaderObj.nsamples = self.ngates_fromfile
367 367
368 #looking index list for data
369 start_index = self.radacHeaderObj.pulseCount[0,:][0]
368 # looking index list for data
369 start_index = self.radacHeaderObj.pulseCount[0, :][0]
370 370 end_index = self.radacHeaderObj.npulses
371 371 range4data = list(range(start_index, end_index))
372 372 self.index4_schain_datablock = numpy.array(range4data)
373 373
374 374 buffer_start_index = 0
375 buffer_end_index = self.radacHeaderObj.pulseCount[0,:][0]
375 buffer_end_index = self.radacHeaderObj.pulseCount[0, :][0]
376 376 range4buffer = list(range(buffer_start_index, buffer_end_index))
377 377 self.index4_buffer = numpy.array(range4buffer)
378 378
379 379 self.linear_pulseCount = numpy.array(range4data + range4buffer)
380 self.npulseByFrame = max(self.radacHeaderObj.pulseCount[0,:]+1)
380 self.npulseByFrame = max(self.radacHeaderObj.pulseCount[0, :] + 1)
381 381
382 #get tuning frequency
383 frequency_h5file_dataset = self.amisrFilePointer.get('Rx'+'/TuningFrequency')
384 self.frequency_h5file = frequency_h5file_dataset[0,0]
382 # get tuning frequency
383 frequency_h5file_dataset = self.amisrFilePointer.get('Rx' + '/TuningFrequency')
384 self.frequency_h5file = frequency_h5file_dataset[0, 0]
385 385
386 386 self.flagIsNewFile = 1
387 387
@@ -391,20 +391,20 class AMISRReader(ProcessingUnit):
391 391
392 392 beamCodeMap = self.amisrFilePointer.get('Setup/BeamcodeMap')
393 393
394 for i in range(len(self.radacHeaderObj.beamCode[0,:])):
394 for i in range(len(self.radacHeaderObj.beamCode[0, :])):
395 395 self.beamCodeDict.setdefault(i)
396 396 self.beamRangeDict.setdefault(i)
397 beamcodeValue = self.radacHeaderObj.beamCode[0,i]
398 beamcodeIndex = numpy.where(beamCodeMap[:,0] == beamcodeValue)[0][0]
397 beamcodeValue = self.radacHeaderObj.beamCode[0, i]
398 beamcodeIndex = numpy.where(beamCodeMap[:, 0] == beamcodeValue)[0][0]
399 399 x = beamCodeMap[beamcodeIndex][1]
400 400 y = beamCodeMap[beamcodeIndex][2]
401 401 z = beamCodeMap[beamcodeIndex][3]
402 402 self.beamCodeDict[i] = [beamcodeValue, x, y, z]
403 403
404 just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:]
404 just4record0 = self.radacHeaderObj.beamCodeByPulse[0, :]
405 405
406 406 for i in range(len(list(self.beamCodeDict.values()))):
407 xx = numpy.where(just4record0==list(self.beamCodeDict.values())[i][0])
407 xx = numpy.where(just4record0 == list(self.beamCodeDict.values())[i][0])
408 408 indexPulseByBeam = self.linear_pulseCount[xx[0]]
409 409 self.beamRangeDict[i] = indexPulseByBeam
410 410
@@ -414,22 +414,22 class AMISRReader(ProcessingUnit):
414 414
415 415 experimentCfgPath = os.path.join(self.path, self.dirnameList[0], 'Setup')
416 416
417 expFinder = glob.glob1(experimentCfgPath,'*.exp')
418 if len(expFinder)== 0:
417 expFinder = glob.glob1(experimentCfgPath, '*.exp')
418 if len(expFinder) == 0:
419 419 self.status = 0
420 420 return None
421 421
422 experimentFilename = os.path.join(experimentCfgPath,expFinder[0])
422 experimentFilename = os.path.join(experimentCfgPath, expFinder[0])
423 423
424 424 f = open(experimentFilename)
425 425 lines = f.readlines()
426 426 f.close()
427 427
428 parmsList = ['npulsesint*','recordsperfile*','nbeamcodes*','ngates*']
428 parmsList = ['npulsesint*', 'recordsperfile*', 'nbeamcodes*', 'ngates*']
429 429 filterList = [fnmatch.filter(lines, x) for x in parmsList]
430 430
431 431
432 values = [re.sub(r'\D',"",x[0]) for x in filterList]
432 values = [re.sub(r'\D', "", x[0]) for x in filterList]
433 433
434 434 self.npulsesint_fromfile = int(values[0])
435 435 self.recordsperfile_fromfile = int(values[1])
@@ -439,12 +439,12 class AMISRReader(ProcessingUnit):
439 439 tufileFinder = fnmatch.filter(lines, 'tufile=*')
440 440 tufile = tufileFinder[0].split('=')[1].split('\n')[0]
441 441 tufile = tufile.split('\r')[0]
442 tufilename = os.path.join(experimentCfgPath,tufile)
442 tufilename = os.path.join(experimentCfgPath, tufile)
443 443
444 444 f = open(tufilename)
445 445 lines = f.readlines()
446 446 f.close()
447 self.ippSeconds_fromfile = float(lines[1].split()[2])/1E6
447 self.ippSeconds_fromfile = float(lines[1].split()[2]) / 1E6
448 448
449 449
450 450 self.status = 1
@@ -459,7 +459,7 class AMISRReader(ProcessingUnit):
459 459 self.buffer_radactime = numpy.zeros_like(self.radacTimeByFrame)
460 460
461 461
462 def __setNextFile(self,online=False):
462 def __setNextFile(self, online=False):
463 463
464 464 if not(online):
465 465 newFile = self.__setNextFileOffline()
@@ -479,11 +479,11 class AMISRReader(ProcessingUnit):
479 479 self.readDataBlock()
480 480
481 481
482 def setup(self,path=None,
483 startDate=None,
484 endDate=None,
485 startTime=datetime.time(0,0,0),
486 endTime=datetime.time(23,59,59),
482 def setup(self, path=None,
483 startDate=None,
484 endDate=None,
485 startTime=datetime.time(0, 0, 0),
486 endTime=datetime.time(23, 59, 59),
487 487 walk=True,
488 488 timezone='ut',
489 489 all=0,
@@ -493,13 +493,13 class AMISRReader(ProcessingUnit):
493 493 self.all = all
494 494 self.online = online
495 495 if not(online):
496 #Busqueda de archivos offline
496 # Busqueda de archivos offline
497 497 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
498 498 else:
499 499 self.searchFilesOnLine(path, walk)
500 500
501 501 if not(self.filenameList):
502 print("There is no files into the folder: %s"%(path))
502 print("There is no files into the folder: %s" % (path))
503 503
504 504 sys.exit(-1)
505 505
@@ -511,22 +511,22 class AMISRReader(ProcessingUnit):
511 511
512 512 # first_beamcode = self.radacHeaderObj.beamCodeByPulse[0,0]
513 513 # index = numpy.where(self.radacHeaderObj.beamCodeByPulse[0,:]!=first_beamcode)[0][0]
514 self.profileIndex_offset = self.radacHeaderObj.pulseCount[0,:][0]
514 self.profileIndex_offset = self.radacHeaderObj.pulseCount[0, :][0]
515 515 self.profileIndex = self.profileIndex_offset
516 516
517 517 def readRanges(self):
518 518 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Range')
519 519
520 self.rangeFromFile = numpy.reshape(dataset.value,(-1))
520 self.rangeFromFile = numpy.reshape(dataset.value, (-1))
521 521 return self.rangeFromFile
522 522
523 523
524 def readRadacTime(self,idrecord, range1, range2):
524 def readRadacTime(self, idrecord, range1, range2):
525 525 self.radacTimeFromFile = self.radacHeaderObj.radacTime.value
526 526
527 527 radacTimeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
528 #radacTimeByFrame = dataset[idrecord - 1,range1]
529 #radacTimeByFrame = dataset[idrecord,range2]
528 # radacTimeByFrame = dataset[idrecord - 1,range1]
529 # radacTimeByFrame = dataset[idrecord,range2]
530 530
531 531 return radacTimeByFrame
532 532
@@ -535,8 +535,8 class AMISRReader(ProcessingUnit):
535 535 beamcodeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
536 536 self.beamCodesFromFile = dataset.value
537 537
538 #beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
539 #beamcodeByFrame[range2] = dataset[idrecord, range2]
538 # beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
539 # beamcodeByFrame[range2] = dataset[idrecord, range2]
540 540 beamcodeByFrame[range1] = dataset[idrecord, range1]
541 541 beamcodeByFrame[range2] = dataset[idrecord, range2]
542 542
@@ -544,7 +544,7 class AMISRReader(ProcessingUnit):
544 544
545 545
546 546 def __setDataByFrame(self):
547 ndata = 2 # porque es complejo
547 ndata = 2 # porque es complejo
548 548 dataByFrame = numpy.zeros((self.radacHeaderObj.npulses, self.radacHeaderObj.nsamples, ndata))
549 549 return dataByFrame
550 550
@@ -553,31 +553,31 class AMISRReader(ProcessingUnit):
553 553 return dataset
554 554
555 555 def __setDataBlock(self,):
556 real = self.dataByFrame[:,:,0] #asumo que 0 es real
557 imag = self.dataByFrame[:,:,1] #asumo que 1 es imaginario
558 datablock = real + imag*1j #armo el complejo
556 real = self.dataByFrame[:, :, 0] # asumo que 0 es real
557 imag = self.dataByFrame[:, :, 1] # asumo que 1 es imaginario
558 datablock = real + imag * 1j # armo el complejo
559 559 return datablock
560 560
561 def readSamples_version1(self,idrecord):
562 #estas tres primeras lineas solo se deben ejecutar una vez
561 def readSamples_version1(self, idrecord):
562 # estas tres primeras lineas solo se deben ejecutar una vez
563 563 if self.flagIsNewFile:
564 #reading dataset
564 # reading dataset
565 565 self.dataset = self.__readDataSet()
566 566 self.flagIsNewFile = 0
567 567
568 568 if idrecord == 0:
569 self.dataByFrame[self.index4_schain_datablock, : ,:] = self.dataset[0, self.index_amisr_sample,:,:]
569 self.dataByFrame[self.index4_schain_datablock, : , :] = self.dataset[0, self.index_amisr_sample, :, :]
570 570 self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[0, self.index_amisr_sample]
571 571 datablock = self.__setDataBlock()
572 572 if len(self.index_amisr_buffer) > 0:
573 self.buffer = self.dataset[0, self.index_amisr_buffer,:,:]
573 self.buffer = self.dataset[0, self.index_amisr_buffer, :, :]
574 574 self.buffer_radactime = self.radacHeaderObj.radacTime[0, self.index_amisr_buffer]
575 575
576 576 return datablock
577 577 if len(self.index_amisr_buffer) > 0:
578 self.dataByFrame[self.index4_buffer,:,:] = self.buffer.copy()
578 self.dataByFrame[self.index4_buffer, :, :] = self.buffer.copy()
579 579 self.radacTimeByFrame[self.index4_buffer] = self.buffer_radactime.copy()
580 self.dataByFrame[self.index4_schain_datablock,:,:] = self.dataset[idrecord, self.index_amisr_sample,:,:]
580 self.dataByFrame[self.index4_schain_datablock, :, :] = self.dataset[idrecord, self.index_amisr_sample, :, :]
581 581 self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[idrecord, self.index_amisr_sample]
582 582 datablock = self.__setDataBlock()
583 583 if len(self.index_amisr_buffer) > 0:
@@ -587,14 +587,14 class AMISRReader(ProcessingUnit):
587 587 return datablock
588 588
589 589
590 def readSamples(self,idrecord):
590 def readSamples(self, idrecord):
591 591 if self.flagIsNewFile:
592 592 self.dataByFrame = self.__setDataByFrame()
593 593 self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[idrecord, :]
594 594
595 #reading ranges
595 # reading ranges
596 596 self.readRanges()
597 #reading dataset
597 # reading dataset
598 598 self.dataset = self.__readDataSet()
599 599
600 600 self.flagIsNewFile = 0
@@ -607,8 +607,8 class AMISRReader(ProcessingUnit):
607 607 def readDataBlock(self):
608 608
609 609 self.datablock = self.readSamples_version1(self.idrecord_count)
610 #self.datablock = self.readSamples(self.idrecord_count)
611 #print 'record:', self.idrecord_count
610 # self.datablock = self.readSamples(self.idrecord_count)
611 # print 'record:', self.idrecord_count
612 612
613 613 self.idrecord_count += 1
614 614 self.profileIndex = 0
@@ -626,7 +626,7 class AMISRReader(ProcessingUnit):
626 626 pass
627 627
628 628 def __hasNotDataInBuffer(self):
629 #self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
629 # self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
630 630 if self.profileIndex >= self.radacHeaderObj.npulses:
631 631 return 1
632 632 return 0
@@ -637,7 +637,7 class AMISRReader(ProcessingUnit):
637 637
638 638 def setObjProperties(self):
639 639
640 self.dataOut.heightList = self.rangeFromFile/1000.0 #km
640 self.dataOut.heightList = self.rangeFromFile / 1000.0 # km
641 641 self.dataOut.nProfiles = self.radacHeaderObj.npulses
642 642 self.dataOut.nRecords = self.radacHeaderObj.nrecords
643 643 self.dataOut.nBeams = self.radacHeaderObj.nbeams
@@ -653,9 +653,9 class AMISRReader(ProcessingUnit):
653 653 self.dataOut.beamRangeDict = self.beamRangeDict
654 654
655 655 if self.timezone == 'lt':
656 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
656 self.dataOut.timeZone = time.timezone / 60. # get the timezone in minutes
657 657 else:
658 self.dataOut.timeZone = 0 #by default time is UTC
658 self.dataOut.timeZone = 0 # by default time is UTC
659 659
660 660 def getData(self):
661 661
@@ -667,11 +667,11 class AMISRReader(ProcessingUnit):
667 667 self.readNextBlock()
668 668
669 669
670 if self.datablock is None: # setear esta condicion cuando no hayan datos por leers
670 if self.datablock is None: # setear esta condicion cuando no hayan datos por leers
671 671 self.dataOut.flagNoData = True
672 672 return 0
673 673
674 self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex,:],(1,-1))
674 self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex, :], (1, -1))
675 675
676 676 self.dataOut.utctime = self.radacTimeByFrame[self.profileIndex]
677 677 self.dataOut.profileIndex = self.profileIndex
@@ -688,4 +688,4 class AMISRReader(ProcessingUnit):
688 688 self.setObjProperties()
689 689 self.isConfig = True
690 690
691 self.getData() No newline at end of file
691 self.getData()
@@ -194,7 +194,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 #<<<<<<<<<<<o o>>>>>>>>>>>
197 # <<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
@@ -420,8 +420,8 def parse_format(s, fmt):
420 420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
423 d = DT_DIRECTIVES[fmt[x:x + 2]]
424 fmt = fmt.replace(fmt[x:x + 2], s[x:x + d])
425 425 return fmt
426 426
427 427 class Reader(object):
@@ -518,7 +518,7 class Reader(object):
518 518 continue
519 519 return
520 520
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 522 expLabel='', last=False):
523 523
524 524 for path in folders:
@@ -548,7 +548,7 class Reader(object):
548 548 continue
549 549
550 550 def searchFilesOffLine(self, path, startDate, endDate,
551 expLabel, ext, walk,
551 expLabel, ext, walk,
552 552 filefmt, folderfmt):
553 553 """Search files in offline mode for the given arguments
554 554
@@ -566,7 +566,7 class Reader(object):
566 566 folders, ext, filefmt, startDate, endDate, expLabel)
567 567
568 568 def searchFilesOnLine(self, path, startDate, endDate,
569 expLabel, ext, walk,
569 expLabel, ext, walk,
570 570 filefmt, folderfmt):
571 571 """Search for the last file of the last folder
572 572
@@ -633,7 +633,7 class Reader(object):
633 633 nextFile = True
634 634 nextDay = False
635 635
636 for nFiles in range(self.nFiles+1):
636 for nFiles in range(self.nFiles + 1):
637 637 for nTries in range(self.nTries):
638 638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 639 if fullfilename is not None:
@@ -674,7 +674,7 class Reader(object):
674 674
675 675 try:
676 676 filename = next(self.filenameList)
677 self.fileIndex +=1
677 self.fileIndex += 1
678 678 except StopIteration:
679 679 self.flagNoMoreFiles = 1
680 680 return 0
@@ -803,7 +803,7 class JRODataReader(Reader):
803 803 if prefixDir != None:
804 804 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
805 805 if foldercounter == 0:
806 thispath = os.path.join(self.path, "%s%04d%03d" %
806 thispath = os.path.join(self.path, "%s%04d%03d" %
807 807 (prefixDir, self.year, self.doy))
808 808 else:
809 809 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
@@ -931,7 +931,7 class JRODataReader(Reader):
931 931 self.processingHeaderObj.read(self.fp)
932 932 self.firstHeaderSize = self.basicHeaderObj.size
933 933
934 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
934 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
935 935 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
936 936 if datatype == 0:
937 937 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
@@ -949,7 +949,7 class JRODataReader(Reader):
949 949 raise ValueError('Data type was not defined')
950 950
951 951 self.dtype = datatype_str
952 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
952 # self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
953 953 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
954 954 self.firstHeaderSize + self.basicHeaderSize * \
955 955 (self.processingHeaderObj.dataBlocksPerFile - 1)
@@ -985,8 +985,8 class JRODataReader(Reader):
985 985 flag = False
986 986 if not self.online:
987 987 dt1 = basicHeaderObj.datatime
988 pos = self.fileSize-processingHeaderObj.blockSize-24
989 if pos<0:
988 pos = self.fileSize - processingHeaderObj.blockSize - 24
989 if pos < 0:
990 990 flag = False
991 991 log.error('Invalid size for file: {}'.format(self.filename), self.name)
992 992 else:
@@ -1131,7 +1131,7 class JRODataReader(Reader):
1131 1131
1132 1132 for nTries in range(self.nTries):
1133 1133 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1134 self.endDate, self.expLabel, self.ext, self.walk,
1134 self.endDate, self.expLabel, self.ext, self.walk,
1135 1135 self.filefmt, self.folderfmt)
1136 1136
1137 1137 try:
@@ -1144,7 +1144,7 class JRODataReader(Reader):
1144 1144
1145 1145 log.warning(
1146 1146 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1147 self.delay, self.path, nTries + 1),
1147 self.delay, self.path, nTries + 1),
1148 1148 self.name)
1149 1149 time.sleep(self.delay)
1150 1150
@@ -1158,7 +1158,7 class JRODataReader(Reader):
1158 1158 self.set = int(filename[8:11]) - 1
1159 1159 else:
1160 1160 log.log("Searching files in {}".format(self.path), self.name)
1161 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1161 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1162 1162 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1163 1163
1164 1164 self.setNextFile()
@@ -1558,7 +1558,7 class printInfo(Operation):
1558 1558 Operation.__init__(self)
1559 1559 self.__printInfo = True
1560 1560
1561 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1561 def run(self, dataOut, headers=['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1562 1562 if self.__printInfo == False:
1563 1563 return
1564 1564
@@ -43,18 +43,18 class DigitalRFReader(ProcessingUnit):
43 43
44 44 ProcessingUnit.__init__(self)
45 45
46 self.dataOut = Voltage()
47 self.__printInfo = True
46 self.dataOut = Voltage()
47 self.__printInfo = True
48 48 self.__flagDiscontinuousBlock = False
49 49 self.__bufferIndex = 9999999
50 self.__codeType = 0
51 self.__ippKm = None
52 self.__nCode = None
53 self.__nBaud = None
54 self.__code = None
55 self.dtype = None
56 self.oldAverage = None
57 self.path = None
50 self.__codeType = 0
51 self.__ippKm = None
52 self.__nCode = None
53 self.__nBaud = None
54 self.__code = None
55 self.dtype = None
56 self.oldAverage = None
57 self.path = None
58 58
59 59 def close(self):
60 60 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
@@ -97,9 +97,9 class DigitalRFReader(ProcessingUnit):
97 97 nChannels=len(
98 98 self.__channelList),
99 99 adcResolution=14)
100 self.dataOut.type = "Voltage"
100 self.dataOut.type = "Voltage"
101 101
102 self.dataOut.data = None
102 self.dataOut.data = None
103 103
104 104 self.dataOut.dtype = self.dtype
105 105
@@ -107,32 +107,32 class DigitalRFReader(ProcessingUnit):
107 107
108 108 # self.dataOut.nHeights = 0
109 109
110 self.dataOut.nProfiles = int(nProfiles)
110 self.dataOut.nProfiles = int(nProfiles)
111 111
112 self.dataOut.heightList = self.__firstHeigth + \
112 self.dataOut.heightList = self.__firstHeigth + \
113 113 numpy.arange(self.__nSamples, dtype=numpy.float) * \
114 114 self.__deltaHeigth
115 115
116 116 self.dataOut.channelList = list(range(self.__num_subchannels))
117 117
118 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
118 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
119 119
120 120 # self.dataOut.channelIndexList = None
121 121
122 self.dataOut.flagNoData = True
122 self.dataOut.flagNoData = True
123 123
124 124 self.dataOut.flagDataAsBlock = False
125 125 # Set to TRUE if the data is discontinuous
126 126 self.dataOut.flagDiscontinuousBlock = False
127 127
128 self.dataOut.utctime = None
128 self.dataOut.utctime = None
129 129
130 130 # timezone like jroheader, difference in minutes between UTC and localtime
131 self.dataOut.timeZone = self.__timezone / 60
131 self.dataOut.timeZone = self.__timezone / 60
132 132
133 self.dataOut.dstFlag = 0
133 self.dataOut.dstFlag = 0
134 134
135 self.dataOut.errorCount = 0
135 self.dataOut.errorCount = 0
136 136
137 137 try:
138 138 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
@@ -145,9 +145,9 class DigitalRFReader(ProcessingUnit):
145 145 # asumo que la data esta sin flip
146 146 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
147 147
148 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
148 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
149 149
150 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
150 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
151 151 except:
152 152 pass
153 153
@@ -156,9 +156,9 class DigitalRFReader(ProcessingUnit):
156 156 # Time interval between profiles
157 157 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
158 158
159 self.dataOut.frequency = self.__frequency
159 self.dataOut.frequency = self.__frequency
160 160
161 self.dataOut.realtime = self.__online
161 self.dataOut.realtime = self.__online
162 162
163 163 def findDatafiles(self, path, startDate=None, endDate=None):
164 164
@@ -171,46 +171,46 class DigitalRFReader(ProcessingUnit):
171 171 except:
172 172 digitalReadObj = digital_rf.DigitalRFReader(path)
173 173
174 channelNameList = digitalReadObj.get_channels()
174 channelNameList = digitalReadObj.get_channels()
175 175
176 176 if not channelNameList:
177 177 return []
178 178
179 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
179 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
180 180
181 sample_rate = metadata_dict['sample_rate'][0]
181 sample_rate = metadata_dict['sample_rate'][0]
182 182
183 183 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
184 184
185 185 try:
186 timezone = this_metadata_file['timezone'].value
186 timezone = this_metadata_file['timezone'].value
187 187 except:
188 timezone = 0
188 timezone = 0
189 189
190 190 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
191 191 channelNameList[0]) / sample_rate - timezone
192 192
193 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
194 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
193 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
194 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
195 195
196 196 if not startDate:
197 startDate = startDatetime.date()
197 startDate = startDatetime.date()
198 198
199 199 if not endDate:
200 endDate = endDatatime.date()
200 endDate = endDatatime.date()
201 201
202 dateList = []
202 dateList = []
203 203
204 thisDatetime = startDatetime
204 thisDatetime = startDatetime
205 205
206 206 while(thisDatetime <= endDatatime):
207 207
208 thisDate = thisDatetime.date()
208 thisDate = thisDatetime.date()
209 209
210 if thisDate < startDate:
210 if thisDate < startDate:
211 211 continue
212 212
213 if thisDate > endDate:
213 if thisDate > endDate:
214 214 break
215 215
216 216 dateList.append(thisDate)
@@ -250,10 +250,10 class DigitalRFReader(ProcessingUnit):
250 250 online
251 251 delay
252 252 '''
253 self.path = path
254 self.nCohInt = nCohInt
253 self.path = path
254 self.nCohInt = nCohInt
255 255 self.flagDecodeData = flagDecodeData
256 self.i = 0
256 self.i = 0
257 257 if not os.path.isdir(path):
258 258 raise ValueError("[Reading] Directory %s does not exist" % path)
259 259
@@ -263,7 +263,7 class DigitalRFReader(ProcessingUnit):
263 263 except:
264 264 self.digitalReadObj = digital_rf.DigitalRFReader(path)
265 265
266 channelNameList = self.digitalReadObj.get_channels()
266 channelNameList = self.digitalReadObj.get_channels()
267 267
268 268 if not channelNameList:
269 269 raise ValueError("[Reading] Directory %s does not have any files" % path)
@@ -273,27 +273,27 class DigitalRFReader(ProcessingUnit):
273 273
274 274 ########## Reading metadata ######################
275 275
276 top_properties = self.digitalReadObj.get_properties(
276 top_properties = self.digitalReadObj.get_properties(
277 277 channelNameList[channelList[0]])
278 278
279 self.__num_subchannels = top_properties['num_subchannels']
280 self.__sample_rate = 1.0 * \
279 self.__num_subchannels = top_properties['num_subchannels']
280 self.__sample_rate = 1.0 * \
281 281 top_properties['sample_rate_numerator'] / \
282 282 top_properties['sample_rate_denominator']
283 283 # self.__samples_per_file = top_properties['samples_per_file'][0]
284 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
284 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
285 285
286 this_metadata_file = self.digitalReadObj.get_digital_metadata(
286 this_metadata_file = self.digitalReadObj.get_digital_metadata(
287 287 channelNameList[channelList[0]])
288 metadata_bounds = this_metadata_file.get_bounds()
288 metadata_bounds = this_metadata_file.get_bounds()
289 289 self.fixed_metadata_dict = this_metadata_file.read(
290 290 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
291 291
292 292 try:
293 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
293 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
294 294 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
295 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
295 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
297 297 except:
298 298 pass
299 299
@@ -311,15 +311,15 class DigitalRFReader(ProcessingUnit):
311 311 self.__firstHeigth = 0
312 312
313 313 try:
314 codeType = self.__radarControllerHeader['codeType']
314 codeType = self.__radarControllerHeader['codeType']
315 315 except:
316 codeType = 0
316 codeType = 0
317 317
318 318 try:
319 319 if codeType:
320 320 nCode = self.__radarControllerHeader['nCode']
321 321 nBaud = self.__radarControllerHeader['nBaud']
322 code = self.__radarControllerHeader['code']
322 code = self.__radarControllerHeader['code']
323 323 except:
324 324 pass
325 325
@@ -330,18 +330,18 class DigitalRFReader(ProcessingUnit):
330 330 except:
331 331 ippKm = None
332 332 ####################################################
333 self.__ippKm = ippKm
333 self.__ippKm = ippKm
334 334 startUTCSecond = None
335 endUTCSecond = None
335 endUTCSecond = None
336 336
337 337 if startDate:
338 startDatetime = datetime.datetime.combine(startDate, startTime)
338 startDatetime = datetime.datetime.combine(startDate, startTime)
339 339 startUTCSecond = (
340 340 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
341 341
342 342 if endDate:
343 endDatetime = datetime.datetime.combine(endDate, endTime)
344 endUTCSecond = (endDatetime - datetime.datetime(1970,
343 endDatetime = datetime.datetime.combine(endDate, endTime)
344 endUTCSecond = (endDatetime - datetime.datetime(1970,
345 345 1, 1)).total_seconds() + self.__timezone
346 346
347 347 start_index, end_index = self.digitalReadObj.get_bounds(
@@ -350,50 +350,50 class DigitalRFReader(ProcessingUnit):
350 350 if not startUTCSecond:
351 351 startUTCSecond = start_index / self.__sample_rate
352 352
353 if start_index > startUTCSecond * self.__sample_rate:
353 if start_index > startUTCSecond * self.__sample_rate:
354 354 startUTCSecond = start_index / self.__sample_rate
355 355
356 356 if not endUTCSecond:
357 endUTCSecond = end_index / self.__sample_rate
357 endUTCSecond = end_index / self.__sample_rate
358 358
359 if end_index < endUTCSecond * self.__sample_rate:
360 endUTCSecond = end_index / self.__sample_rate
359 if end_index < endUTCSecond * self.__sample_rate:
360 endUTCSecond = end_index / self.__sample_rate
361 361 if not nSamples:
362 362 if not ippKm:
363 363 raise ValueError("[Reading] nSamples or ippKm should be defined")
364 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
365 channelBoundList = []
364 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
365 channelBoundList = []
366 366 channelNameListFiltered = []
367 367
368 368 for thisIndexChannel in channelList:
369 thisChannelName = channelNameList[thisIndexChannel]
369 thisChannelName = channelNameList[thisIndexChannel]
370 370 start_index, end_index = self.digitalReadObj.get_bounds(
371 371 thisChannelName)
372 372 channelBoundList.append((start_index, end_index))
373 373 channelNameListFiltered.append(thisChannelName)
374 374
375 375 self.profileIndex = 0
376 self.i = 0
377 self.__delay = delay
378
379 self.__codeType = codeType
380 self.__nCode = nCode
381 self.__nBaud = nBaud
382 self.__code = code
383
384 self.__datapath = path
385 self.__online = online
386 self.__channelList = channelList
387 self.__channelNameList = channelNameListFiltered
376 self.i = 0
377 self.__delay = delay
378
379 self.__codeType = codeType
380 self.__nCode = nCode
381 self.__nBaud = nBaud
382 self.__code = code
383
384 self.__datapath = path
385 self.__online = online
386 self.__channelList = channelList
387 self.__channelNameList = channelNameListFiltered
388 388 self.__channelBoundList = channelBoundList
389 self.__nSamples = nSamples
390 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
391 self.__nChannels = len(self.__channelList)
389 self.__nSamples = nSamples
390 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
391 self.__nChannels = len(self.__channelList)
392 392
393 self.__startUTCSecond = startUTCSecond
394 self.__endUTCSecond = endUTCSecond
393 self.__startUTCSecond = startUTCSecond
394 self.__endUTCSecond = endUTCSecond
395 395
396 self.__timeInterval = 1.0 * self.__samples_to_read / \
396 self.__timeInterval = 1.0 * self.__samples_to_read / \
397 397 self.__sample_rate # Time interval
398 398
399 399 if online:
@@ -403,7 +403,7 class DigitalRFReader(ProcessingUnit):
403 403 # por que en el otro metodo lo primero q se hace es sumar samplestoread
404 404 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
405 405
406 self.__data_buffer = numpy.zeros(
406 self.__data_buffer = numpy.zeros(
407 407 (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
408 408
409 409 self.__setFileHeader()
@@ -420,8 +420,8 class DigitalRFReader(ProcessingUnit):
420 420 datetime.datetime.utcfromtimestamp(
421 421 endUTCSecond - self.__timezone)
422 422 ))
423 self.oldAverage = None
424 self.count = 0
423 self.oldAverage = None
424 self.count = 0
425 425 self.executionTime = 0
426 426
427 427 def __reload(self):
@@ -438,13 +438,13 class DigitalRFReader(ProcessingUnit):
438 438 except:
439 439 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
440 440
441 start_index, end_index = self.digitalReadObj.get_bounds(
441 start_index, end_index = self.digitalReadObj.get_bounds(
442 442 self.__channelNameList[self.__channelList[0]])
443 443
444 if start_index > self.__startUTCSecond * self.__sample_rate:
444 if start_index > self.__startUTCSecond * self.__sample_rate:
445 445 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
446 446
447 if end_index > self.__endUTCSecond * self.__sample_rate:
447 if end_index > self.__endUTCSecond * self.__sample_rate:
448 448 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
449 449 print()
450 450 print("[Reading] New timerange found [%s, %s] " % (
@@ -459,14 +459,14 class DigitalRFReader(ProcessingUnit):
459 459 return False
460 460
461 461 def timeit(self, toExecute):
462 t0 = time.time()
462 t0 = time.time()
463 463 toExecute()
464 self.executionTime = time.time() - t0
464 self.executionTime = time.time() - t0
465 465 if self.oldAverage is None:
466 466 self.oldAverage = self.executionTime
467 self.oldAverage = (self.executionTime + self.count *
467 self.oldAverage = (self.executionTime + self.count *
468 468 self.oldAverage) / (self.count + 1.0)
469 self.count = self.count + 1.0
469 self.count = self.count + 1.0
470 470 return
471 471
472 472 def __readNextBlock(self, seconds=30, volt_scale=1):
@@ -475,7 +475,7 class DigitalRFReader(ProcessingUnit):
475 475
476 476 # Set the next data
477 477 self.__flagDiscontinuousBlock = False
478 self.__thisUnixSample += self.__samples_to_read
478 self.__thisUnixSample += self.__samples_to_read
479 479
480 480 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
481 481 print ("[Reading] There are no more data into selected time-range")
@@ -496,14 +496,14 class DigitalRFReader(ProcessingUnit):
496 496 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
497 497 for indexSubchannel in range(self.__num_subchannels):
498 498 try:
499 t0 = time()
499 t0 = time()
500 500 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
501 501 self.__samples_to_read,
502 502 thisChannelName, sub_channel=indexSubchannel)
503 self.executionTime = time() - t0
503 self.executionTime = time() - t0
504 504 if self.oldAverage is None:
505 505 self.oldAverage = self.executionTime
506 self.oldAverage = (
506 self.oldAverage = (
507 507 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
508 508 self.count = self.count + 1.0
509 509
@@ -521,11 +521,11 class DigitalRFReader(ProcessingUnit):
521 521 break
522 522
523 523 self.__data_buffer[indexSubchannel, :] = result * volt_scale
524 indexChannel+=1
524 indexChannel += 1
525 525
526 dataOk = True
526 dataOk = True
527 527
528 self.__utctime = self.__thisUnixSample / self.__sample_rate
528 self.__utctime = self.__thisUnixSample / self.__sample_rate
529 529
530 530 if not dataOk:
531 531 return False
@@ -534,7 +534,7 class DigitalRFReader(ProcessingUnit):
534 534 self.__samples_to_read,
535 535 self.__timeInterval))
536 536
537 self.__bufferIndex = 0
537 self.__bufferIndex = 0
538 538
539 539 return True
540 540
@@ -557,16 +557,16 class DigitalRFReader(ProcessingUnit):
557 557 self.flagDiscontinuousBlock
558 558 self.flagIsNewBlock
559 559 '''
560 #print("getdata")
560 # print("getdata")
561 561 err_counter = 0
562 562 self.dataOut.flagNoData = True
563 563
564 564 if self.__isBufferEmpty():
565 #print("hi")
565 # print("hi")
566 566 self.__flagDiscontinuousBlock = False
567 567
568 568 while True:
569 #print ("q ha pasado")
569 # print ("q ha pasado")
570 570 if self.__readNextBlock():
571 571 break
572 572 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
@@ -589,14 +589,14 class DigitalRFReader(ProcessingUnit):
589 589 print('[Reading] waiting %d seconds to read a new block' % seconds)
590 590 time.sleep(seconds)
591 591
592 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
593 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
594 self.dataOut.flagNoData = False
592 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
593 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
594 self.dataOut.flagNoData = False
595 595 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
596 self.dataOut.profileIndex = self.profileIndex
596 self.dataOut.profileIndex = self.profileIndex
597 597
598 598 self.__bufferIndex += self.__nSamples
599 self.profileIndex += 1
599 self.profileIndex += 1
600 600
601 601 if self.profileIndex == self.dataOut.nProfiles:
602 602 self.profileIndex = 0
@@ -627,7 +627,7 class DigitalRFReader(ProcessingUnit):
627 627
628 628 if not self.isConfig:
629 629 self.setup(**kwargs)
630 #self.i = self.i+1
630 # self.i = self.i+1
631 631 self.getData(seconds=self.__delay)
632 632
633 633 return
@@ -644,25 +644,25 class DigitalRFWriter(Operation):
644 644 '''
645 645 Operation.__init__(self, **kwargs)
646 646 self.metadata_dict = {}
647 self.dataOut = None
648 self.dtype = None
649 self.oldAverage = 0
647 self.dataOut = None
648 self.dtype = None
649 self.oldAverage = 0
650 650
651 651 def setHeader(self):
652 652
653 self.metadata_dict['frequency'] = self.dataOut.frequency
654 self.metadata_dict['timezone'] = self.dataOut.timeZone
655 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
656 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
657 self.metadata_dict['heightList'] = self.dataOut.heightList
658 self.metadata_dict['channelList'] = self.dataOut.channelList
653 self.metadata_dict['frequency'] = self.dataOut.frequency
654 self.metadata_dict['timezone'] = self.dataOut.timeZone
655 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
656 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
657 self.metadata_dict['heightList'] = self.dataOut.heightList
658 self.metadata_dict['channelList'] = self.dataOut.channelList
659 659 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
660 660 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
661 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
662 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
663 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
664 self.metadata_dict['type'] = self.dataOut.type
665 self.metadata_dict['flagDataAsBlock']= getattr(
661 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
662 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
663 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
664 self.metadata_dict['type'] = self.dataOut.type
665 self.metadata_dict['flagDataAsBlock'] = getattr(
666 666 self.dataOut, 'flagDataAsBlock', None) # chequear
667 667
668 668 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
@@ -672,13 +672,13 class DigitalRFWriter(Operation):
672 672 dataOut: Input data will also be outputa data
673 673 '''
674 674 self.setHeader()
675 self.__ippSeconds = dataOut.ippSeconds
676 self.__deltaH = dataOut.getDeltaH()
675 self.__ippSeconds = dataOut.ippSeconds
676 self.__deltaH = dataOut.getDeltaH()
677 677 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
678 self.__dtype = dataOut.dtype
678 self.__dtype = dataOut.dtype
679 679 if len(dataOut.dtype) == 2:
680 680 self.__dtype = dataOut.dtype[0]
681 self.__nSamples = dataOut.systemHeaderObj.nSamples
681 self.__nSamples = dataOut.systemHeaderObj.nSamples
682 682 self.__nProfiles = dataOut.nProfiles
683 683
684 684 if self.dataOut.type != 'Voltage':
@@ -689,44 +689,44 class DigitalRFWriter(Operation):
689 689 self.arr_data = numpy.ones((self.__nSamples, len(
690 690 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
691 691
692 file_cadence_millisecs = 1000
692 file_cadence_millisecs = 1000
693 693
694 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
695 sample_rate_numerator = int(sample_rate_fraction.numerator)
694 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
695 sample_rate_numerator = int(sample_rate_fraction.numerator)
696 696 sample_rate_denominator = int(sample_rate_fraction.denominator)
697 start_global_index = dataOut.utctime * self.__sample_rate
697 start_global_index = dataOut.utctime * self.__sample_rate
698 698
699 uuid = 'prueba'
699 uuid = 'prueba'
700 700 compression_level = 0
701 checksum = False
702 is_complex = True
703 num_subchannels = len(dataOut.channelList)
704 is_continuous = True
705 marching_periods = False
701 checksum = False
702 is_complex = True
703 num_subchannels = len(dataOut.channelList)
704 is_continuous = True
705 marching_periods = False
706 706
707 707 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
708 708 fileCadence, start_global_index,
709 709 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
710 710 is_complex, num_subchannels, is_continuous, marching_periods)
711 metadata_dir = os.path.join(path, 'metadata')
711 metadata_dir = os.path.join(path, 'metadata')
712 712 os.system('mkdir %s' % (metadata_dir))
713 713 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
714 714 sample_rate_numerator, sample_rate_denominator,
715 715 metadataFile)
716 self.isConfig = True
716 self.isConfig = True
717 717 self.currentSample = 0
718 self.oldAverage = 0
719 self.count = 0
718 self.oldAverage = 0
719 self.count = 0
720 720 return
721 721
722 722 def writeMetadata(self):
723 start_idx = self.__sample_rate * self.dataOut.utctime
723 start_idx = self.__sample_rate * self.dataOut.utctime
724 724
725 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
725 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
726 726 )
727 727 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
728 728 )
729 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
729 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
730 730 )
731 731 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
732 732 return
@@ -734,12 +734,12 class DigitalRFWriter(Operation):
734 734 def timeit(self, toExecute):
735 735 t0 = time()
736 736 toExecute()
737 self.executionTime = time() - t0
737 self.executionTime = time() - t0
738 738 if self.oldAverage is None:
739 739 self.oldAverage = self.executionTime
740 self.oldAverage = (self.executionTime + self.count *
740 self.oldAverage = (self.executionTime + self.count *
741 741 self.oldAverage) / (self.count + 1.0)
742 self.count = self.count + 1.0
742 self.count = self.count + 1.0
743 743 return
744 744
745 745 def writeData(self):
@@ -747,9 +747,9 class DigitalRFWriter(Operation):
747 747 raise 'Digital RF cannot be used with this data type'
748 748 for channel in self.dataOut.channelList:
749 749 for i in range(self.dataOut.nFFTPoints):
750 self.arr_data[1][channel * self.dataOut.nFFTPoints +
750 self.arr_data[1][channel * self.dataOut.nFFTPoints +
751 751 i]['r'] = self.dataOut.data[channel][i].real
752 self.arr_data[1][channel * self.dataOut.nFFTPoints +
752 self.arr_data[1][channel * self.dataOut.nFFTPoints +
753 753 i]['i'] = self.dataOut.data[channel][i].imag
754 754 else:
755 755 for i in range(self.dataOut.systemHeaderObj.nSamples):
@@ -777,12 +777,12 class DigitalRFWriter(Operation):
777 777
778 778 self.writeData()
779 779
780 ## self.currentSample += 1
780 # # self.currentSample += 1
781 781 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
782 782 # self.writeMetadata()
783 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
783 # # if self.currentSample == self.__nProfiles: self.currentSample = 0
784 784
785 return dataOut# en la version 2.7 no aparece este return
785 return dataOut # en la version 2.7 no aparece este return
786 786
787 787 def close(self):
788 788 print('[Writing] - Closing files ')
@@ -25,23 +25,23 from schainpy.utils import log
25 25
26 26
27 27 class PyFits(object):
28 name=None
29 format=None
30 array =None
31 data =None
32 thdulist=None
33 prihdr=None
34 hdu=None
28 name = None
29 format = None
30 array = None
31 data = None
32 thdulist = None
33 prihdr = None
34 hdu = None
35 35
36 36 def __init__(self):
37 37
38 38 pass
39 39
40 def setColF(self,name,format,array):
41 self.name=name
42 self.format=format
43 self.array=array
44 a1=numpy.array([self.array],dtype=numpy.float32)
40 def setColF(self, name, format, array):
41 self.name = name
42 self.format = format
43 self.array = array
44 a1 = numpy.array([self.array], dtype=numpy.float32)
45 45 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
46 46 return self.col1
47 47
@@ -54,35 +54,35 class PyFits(object):
54 54 # return self.col2
55 55
56 56
57 def writeData(self,name,format,data):
58 self.name=name
59 self.format=format
60 self.data=data
61 a2=numpy.array([self.data],dtype=numpy.float32)
57 def writeData(self, name, format, data):
58 self.name = name
59 self.format = format
60 self.data = data
61 a2 = numpy.array([self.data], dtype=numpy.float32)
62 62 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
63 63 return self.col2
64 64
65 def cFImage(self,idblock,year,month,day,hour,minute,second):
66 self.hdu= pyfits.PrimaryHDU(idblock)
67 self.hdu.header.set("Year",year)
68 self.hdu.header.set("Month",month)
69 self.hdu.header.set("Day",day)
70 self.hdu.header.set("Hour",hour)
71 self.hdu.header.set("Minute",minute)
72 self.hdu.header.set("Second",second)
65 def cFImage(self, idblock, year, month, day, hour, minute, second):
66 self.hdu = pyfits.PrimaryHDU(idblock)
67 self.hdu.header.set("Year", year)
68 self.hdu.header.set("Month", month)
69 self.hdu.header.set("Day", day)
70 self.hdu.header.set("Hour", hour)
71 self.hdu.header.set("Minute", minute)
72 self.hdu.header.set("Second", second)
73 73 return self.hdu
74 74
75 75
76 def Ctable(self,colList):
77 self.cols=pyfits.ColDefs(colList)
76 def Ctable(self, colList):
77 self.cols = pyfits.ColDefs(colList)
78 78 self.tbhdu = pyfits.new_table(self.cols)
79 79 return self.tbhdu
80 80
81 81
82 def CFile(self,hdu,tbhdu):
83 self.thdulist=pyfits.HDUList([hdu,tbhdu])
82 def CFile(self, hdu, tbhdu):
83 self.thdulist = pyfits.HDUList([hdu, tbhdu])
84 84
85 def wFile(self,filename):
85 def wFile(self, filename):
86 86 if os.path.isfile(filename):
87 87 os.remove(filename)
88 88 self.thdulist.writeto(filename)
@@ -154,7 +154,7 class FitsWriter(Operation):
154 154 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
155 155 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
156 156 header_data.header['NCHANNELS'] = dataOut.nChannels
157 #header_data.header['HEIGHTS'] = dataOut.heightList
157 # header_data.header['HEIGHTS'] = dataOut.heightList
158 158 header_data.header['NHEIGHTS'] = dataOut.nHeights
159 159
160 160 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
@@ -165,7 +165,7 class FitsWriter(Operation):
165 165
166 166 header_data.writeto(self.filename)
167 167
168 self.addExtension(dataOut.heightList,'HEIGHTLIST')
168 self.addExtension(dataOut.heightList, 'HEIGHTLIST')
169 169
170 170
171 171 def setup(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None):
@@ -182,7 +182,7 class FitsWriter(Operation):
182 182 def addExtension(self, data, tagname):
183 183 self.open()
184 184 extension = pyfits.ImageHDU(data=data, name=tagname)
185 #extension.header['TAG'] = tagname
185 # extension.header['TAG'] = tagname
186 186 self.fitsObj.append(extension)
187 187 self.write()
188 188
@@ -207,25 +207,25 class FitsWriter(Operation):
207 207 ext = self.ext
208 208 path = self.path
209 209
210 timeTuple = time.localtime( self.dataOut.utctime)
211 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
210 timeTuple = time.localtime(self.dataOut.utctime)
211 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
212 212
213 fullpath = os.path.join( path, subfolder )
214 if not( os.path.exists(fullpath) ):
213 fullpath = os.path.join(path, subfolder)
214 if not(os.path.exists(fullpath)):
215 215 os.mkdir(fullpath)
216 self.setFile = -1 #inicializo mi contador de seteo
216 self.setFile = -1 # inicializo mi contador de seteo
217 217 else:
218 filesList = os.listdir( fullpath )
219 if len( filesList ) > 0:
220 filesList = sorted( filesList, key=str.lower )
218 filesList = os.listdir(fullpath)
219 if len(filesList) > 0:
220 filesList = sorted(filesList, key=str.lower)
221 221 filen = filesList[-1]
222 222
223 if isNumber( filen[8:11] ):
224 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
223 if isNumber(filen[8:11]):
224 self.setFile = int(filen[8:11]) # inicializo mi contador de seteo al seteo del ultimo file
225 225 else:
226 226 self.setFile = -1
227 227 else:
228 self.setFile = -1 #inicializo mi contador de seteo
228 self.setFile = -1 # inicializo mi contador de seteo
229 229
230 230 setFile = self.setFile
231 231 setFile += 1
@@ -234,16 +234,16 class FitsWriter(Operation):
234 234 timeTuple.tm_year,
235 235 timeTuple.tm_yday,
236 236 setFile,
237 ext )
237 ext)
238 238
239 filename = os.path.join( path, subfolder, thisFile )
239 filename = os.path.join(path, subfolder, thisFile)
240 240
241 241 self.blockIndex = 0
242 242 self.filename = filename
243 243 self.setFile = setFile
244 244 self.flagIsNewFile = 1
245 245
246 print('Writing the file: %s'%self.filename)
246 print('Writing the file: %s' % self.filename)
247 247
248 248 self.setFitsHeader(self.dataOut, self.metadatafile)
249 249
@@ -262,13 +262,13 class FitsWriter(Operation):
262 262 if self.blockIndex < self.dataBlocksPerFile:
263 263 return 1
264 264
265 if not( self.setNextFile() ):
265 if not(self.setNextFile()):
266 266 return 0
267 267
268 268 return 1
269 269
270 270 def writeNextBlock(self):
271 if not( self.__setNewBlock() ):
271 if not(self.__setNewBlock()):
272 272 return 0
273 273 self.writeBlock()
274 274 return 1
@@ -301,8 +301,8 class FitsReader(ProcessingUnit):
301 301 data = None
302 302 data_header_dict = None
303 303
304 def __init__(self):#, **kwargs):
305 ProcessingUnit.__init__(self)#, **kwargs)
304 def __init__(self): # , **kwargs):
305 ProcessingUnit.__init__(self) # , **kwargs)
306 306 self.isConfig = False
307 307 self.ext = '.fits'
308 308 self.setFile = 0
@@ -317,7 +317,7 class FitsReader(ProcessingUnit):
317 317 self.nReadBlocks = 0
318 318 self.nTotalBlocks = 0
319 319 self.dataOut = self.createObjByDefault()
320 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
320 self.maxTimeStep = 10 # deberia ser definido por el usuario usando el metodo setup()
321 321 self.blockIndex = 1
322 322
323 323 def createObjByDefault(self):
@@ -328,14 +328,14 class FitsReader(ProcessingUnit):
328 328
329 329 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
330 330 try:
331 fitsObj = pyfits.open(filename,'readonly')
331 fitsObj = pyfits.open(filename, 'readonly')
332 332 except:
333 print("File %s can't be opened" %(filename))
333 print("File %s can't be opened" % (filename))
334 334 return None
335 335
336 336 header = fitsObj[0].header
337 337 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
338 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
338 utc = time.mktime(struct_time) - time.timezone # TIMEZONE debe ser un parametro del header FITS
339 339
340 340 ltc = utc
341 341 if useLocalTime:
@@ -367,7 +367,7 class FitsReader(ProcessingUnit):
367 367 # continue
368 368
369 369 fileSize = os.path.getsize(filename)
370 fitsObj = pyfits.open(filename,'readonly')
370 fitsObj = pyfits.open(filename, 'readonly')
371 371 break
372 372
373 373 self.flagIsNewFile = 1
@@ -376,7 +376,7 class FitsReader(ProcessingUnit):
376 376 self.fileSize = fileSize
377 377 self.fitsObj = fitsObj
378 378 self.blockIndex = 0
379 print("Setting the file: %s"%self.filename)
379 print("Setting the file: %s" % self.filename)
380 380
381 381 return 1
382 382
@@ -459,8 +459,8 class FitsReader(ProcessingUnit):
459 459 path,
460 460 startDate,
461 461 endDate,
462 startTime=datetime.time(0,0,0),
463 endTime=datetime.time(23,59,59),
462 startTime=datetime.time(0, 0, 0),
463 endTime=datetime.time(23, 59, 59),
464 464 set=None,
465 465 expLabel='',
466 466 ext='.fits',
@@ -474,7 +474,7 class FitsReader(ProcessingUnit):
474 474 else:
475 475 dirList = []
476 476 for thisPath in os.listdir(path):
477 if not os.path.isdir(os.path.join(path,thisPath)):
477 if not os.path.isdir(os.path.join(path, thisPath)):
478 478 continue
479 479 if not isRadarFolder(thisPath):
480 480 continue
@@ -490,20 +490,20 class FitsReader(ProcessingUnit):
490 490 year = thisDate.timetuple().tm_year
491 491 doy = thisDate.timetuple().tm_yday
492 492
493 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
493 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year, doy) + '*')
494 494 if len(matchlist) == 0:
495 495 thisDate += datetime.timedelta(1)
496 496 continue
497 497 for match in matchlist:
498 pathList.append(os.path.join(path,match,expLabel))
498 pathList.append(os.path.join(path, match, expLabel))
499 499
500 500 thisDate += datetime.timedelta(1)
501 501
502 502 if pathList == []:
503 print("Any folder was found for the date range: %s-%s" %(startDate, endDate))
503 print("Any folder was found for the date range: %s-%s" % (startDate, endDate))
504 504 return None, None
505 505
506 print("%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate))
506 print("%d folder(s) was(were) found for the date range: %s - %s" % (len(pathList), startDate, endDate))
507 507
508 508 filenameList = []
509 509 datetimeList = []
@@ -512,12 +512,12 class FitsReader(ProcessingUnit):
512 512
513 513 thisPath = pathList[i]
514 514
515 fileList = glob.glob1(thisPath, "*%s" %ext)
515 fileList = glob.glob1(thisPath, "*%s" % ext)
516 516 fileList.sort()
517 517
518 518 for thisFile in fileList:
519 519
520 filename = os.path.join(thisPath,thisFile)
520 filename = os.path.join(thisPath, thisFile)
521 521 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
522 522
523 523 if not(thisDatetime):
@@ -527,14 +527,14 class FitsReader(ProcessingUnit):
527 527 datetimeList.append(thisDatetime)
528 528
529 529 if not(filenameList):
530 print("Any file was found for the time range %s - %s" %(startTime, endTime))
530 print("Any file was found for the time range %s - %s" % (startTime, endTime))
531 531 return None, None
532 532
533 print("%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime))
533 print("%d file(s) was(were) found for the time range: %s - %s" % (len(filenameList), startTime, endTime))
534 534 print()
535 535
536 536 for i in range(len(filenameList)):
537 print("%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()))
537 print("%s -> [%s]" % (filenameList[i], datetimeList[i].ctime()))
538 538
539 539 self.filenameList = filenameList
540 540 self.datetimeList = datetimeList
@@ -544,14 +544,14 class FitsReader(ProcessingUnit):
544 544 def setup(self, path=None,
545 545 startDate=None,
546 546 endDate=None,
547 startTime=datetime.time(0,0,0),
548 endTime=datetime.time(23,59,59),
547 startTime=datetime.time(0, 0, 0),
548 endTime=datetime.time(23, 59, 59),
549 549 set=0,
550 expLabel = "",
551 ext = None,
552 online = False,
553 delay = 60,
554 walk = True):
550 expLabel="",
551 ext=None,
552 online=False,
553 delay=60,
554 walk=True):
555 555
556 556 if path == None:
557 557 raise ValueError("The path is not valid")
@@ -567,9 +567,9 class FitsReader(ProcessingUnit):
567 567 walk=walk)
568 568
569 569 if not(pathList):
570 print("No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
571 datetime.datetime.combine(startDate,startTime).ctime(),
572 datetime.datetime.combine(endDate,endTime).ctime()))
570 print("No *%s files into the folder %s \nfor the range: %s - %s" % (ext, path,
571 datetime.datetime.combine(startDate, startTime).ctime(),
572 datetime.datetime.combine(endDate, endTime).ctime()))
573 573
574 574 sys.exit(-1)
575 575
@@ -583,10 +583,10 class FitsReader(ProcessingUnit):
583 583 self.ext = ext
584 584
585 585 if not(self.setNextFile()):
586 if (startDate!=None) and (endDate!=None):
587 print("No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
586 if (startDate != None) and (endDate != None):
587 print("No files in range: %s - %s" % (datetime.datetime.combine(startDate, startTime).ctime(), datetime.datetime.combine(endDate, endTime).ctime()))
588 588 elif startDate != None:
589 print("No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()))
589 print("No files in range: %s" % (datetime.datetime.combine(startDate, startTime).ctime()))
590 590 else:
591 591 print("No files")
592 592
@@ -627,21 +627,21 class FitsReader(ProcessingUnit):
627 627
628 628 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
629 629
630 for nTries in range( self.nTries ):
630 for nTries in range(self.nTries):
631 631
632 632 self.fp.close()
633 self.fp = open( self.filename, 'rb' )
634 self.fp.seek( currentPointer )
633 self.fp = open(self.filename, 'rb')
634 self.fp.seek(currentPointer)
635 635
636 self.fileSize = os.path.getsize( self.filename )
636 self.fileSize = os.path.getsize(self.filename)
637 637 currentSize = self.fileSize - currentPointer
638 638
639 if ( currentSize >= neededSize ):
639 if (currentSize >= neededSize):
640 640 self.__rdBasicHeader()
641 641 return 1
642 642
643 print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1))
644 sleep( self.delay )
643 print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
644 sleep(self.delay)
645 645
646 646
647 647 return 0
@@ -737,11 +737,11 class SpectraHeisWriter(Operation):
737 737 doypath = None
738 738 subfolder = None
739 739
740 def __init__(self):#, **kwargs):
741 Operation.__init__(self)#, **kwargs)
740 def __init__(self): # , **kwargs):
741 Operation.__init__(self) # , **kwargs)
742 742 self.wrObj = PyFits()
743 743 # self.dataOut = dataOut
744 self.nTotalBlocks=0
744 self.nTotalBlocks = 0
745 745 # self.set = None
746 746 self.setFile = None
747 747 self.idblock = 0
@@ -764,7 +764,7 class SpectraHeisWriter(Operation):
764 764 False : no es un string numerico
765 765 """
766 766 try:
767 float( str )
767 float(str)
768 768 return True
769 769 except:
770 770 return False
@@ -779,28 +779,28 class SpectraHeisWriter(Operation):
779 779 self.dataOut = dataOut
780 780
781 781 def putData(self):
782 name= time.localtime( self.dataOut.utctime)
783 ext=".fits"
782 name = time.localtime(self.dataOut.utctime)
783 ext = ".fits"
784 784
785 785 if self.doypath == None:
786 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
787 self.doypath = os.path.join( self.wrpath, self.subfolder )
786 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year, name.tm_yday, time.mktime(datetime.datetime.now().timetuple()))
787 self.doypath = os.path.join(self.wrpath, self.subfolder)
788 788 os.mkdir(self.doypath)
789 789
790 790 if self.setFile == None:
791 791 # self.set = self.dataOut.set
792 792 self.setFile = 0
793 793 # if self.set != self.dataOut.set:
794 ## self.set = self.dataOut.set
794 # # self.set = self.dataOut.set
795 795 # self.setFile = 0
796 796
797 #make the filename
798 thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
797 # make the filename
798 thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year, name.tm_yday, self.setFile, ext)
799 799
800 filename = os.path.join(self.wrpath,self.subfolder, thisFile)
800 filename = os.path.join(self.wrpath, self.subfolder, thisFile)
801 801
802 idblock = numpy.array([self.idblock],dtype="int64")
803 header=self.wrObj.cFImage(idblock=idblock,
802 idblock = numpy.array([self.idblock], dtype="int64")
803 header = self.wrObj.cFImage(idblock=idblock,
804 804 year=time.gmtime(self.dataOut.utctime).tm_year,
805 805 month=time.gmtime(self.dataOut.utctime).tm_mon,
806 806 day=time.gmtime(self.dataOut.utctime).tm_mday,
@@ -808,32 +808,32 class SpectraHeisWriter(Operation):
808 808 minute=time.gmtime(self.dataOut.utctime).tm_min,
809 809 second=time.gmtime(self.dataOut.utctime).tm_sec)
810 810
811 c=3E8
811 c = 3E8
812 812 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
813 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
813 freq = numpy.arange(-1 * self.dataOut.nHeights / 2., self.dataOut.nHeights / 2.) * (c / (2 * deltaHeight * 1000))
814 814
815 815 colList = []
816 816
817 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
817 colFreq = self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints) + 'E', array=freq)
818 818
819 819 colList.append(colFreq)
820 820
821 nchannel=self.dataOut.nChannels
821 nchannel = self.dataOut.nChannels
822 822
823 823 for i in range(nchannel):
824 col = self.wrObj.writeData(name="PCh"+str(i+1),
825 format=str(self.dataOut.nFFTPoints)+'E',
826 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
824 col = self.wrObj.writeData(name="PCh" + str(i + 1),
825 format=str(self.dataOut.nFFTPoints) + 'E',
826 data=10 * numpy.log10(self.dataOut.data_spc[i, :]))
827 827
828 828 colList.append(col)
829 829
830 data=self.wrObj.Ctable(colList=colList)
830 data = self.wrObj.Ctable(colList=colList)
831 831
832 self.wrObj.CFile(header,data)
832 self.wrObj.CFile(header, data)
833 833
834 834 self.wrObj.wFile(filename)
835 835
836 #update the setFile
836 # update the setFile
837 837 self.setFile += 1
838 838 self.idblock += 1
839 839
@@ -847,4 +847,4 class SpectraHeisWriter(Operation):
847 847 self.isConfig = True
848 848
849 849 self.putData()
850 return dataOut No newline at end of file
850 return dataOut
This diff has been collapsed as it changes many lines, (506 lines changed) Show them Hide them
@@ -4,8 +4,8 Created on Jul 3, 2014
4 4 @author: roj-com0419
5 5 '''
6 6
7 import os,sys
8 import time,datetime
7 import os, sys
8 import time, datetime
9 9 import h5py
10 10 import numpy
11 11 import fnmatch
@@ -30,7 +30,7 def isNumber(str):
30 30 False : no es un string numerico
31 31 """
32 32 try:
33 float( str )
33 float(str)
34 34 return True
35 35 except:
36 36 return False
@@ -48,7 +48,7 def getFileFromSet(path, ext, set=None):
48 48
49 49 for thisFile in fileList:
50 50 try:
51 number= int(thisFile[6:16])
51 number = int(thisFile[6:16])
52 52
53 53 # year = int(thisFile[1:5])
54 54 # doy = int(thisFile[5:8])
@@ -63,31 +63,31 def getFileFromSet(path, ext, set=None):
63 63 if len(validFilelist) < 1:
64 64 return None
65 65
66 validFilelist = sorted( validFilelist, key=str.lower )
66 validFilelist = sorted(validFilelist, key=str.lower)
67 67
68 68 if set == None:
69 69 return validFilelist[-1]
70 70
71 print("set =" ,set)
71 print("set =" , set)
72 72 for thisFile in validFilelist:
73 73 if set <= int(thisFile[6:16]):
74 print(thisFile,int(thisFile[6:16]))
74 print(thisFile, int(thisFile[6:16]))
75 75 return thisFile
76 76
77 77 return validFilelist[-1]
78 78
79 myfile = fnmatch.filter(validFilelist,'*%10d*'%(set))
80 #myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
79 myfile = fnmatch.filter(validFilelist, '*%10d*' % (set))
80 # myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
81 81
82 if len(myfile)!= 0:
82 if len(myfile) != 0:
83 83 return myfile[0]
84 84 else:
85 filename = '*%10.10d%s'%(set,ext.lower())
86 print('the filename %s does not exist'%filename)
85 filename = '*%10.10d%s' % (set, ext.lower())
86 print('the filename %s does not exist' % filename)
87 87 print('...going to the last file: ')
88 88
89 89 if validFilelist:
90 validFilelist = sorted( validFilelist, key=str.lower )
90 validFilelist = sorted(validFilelist, key=str.lower)
91 91 return validFilelist[-1]
92 92
93 93 return None
@@ -113,7 +113,7 Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
113 113 for thisFile in fileList:
114 114
115 115 try:
116 number= int(thisFile[6:16])
116 number = int(thisFile[6:16])
117 117 except:
118 118 print("There is a file or folder with different format")
119 119 if not isNumber(number):
@@ -127,7 +127,7 Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
127 127 # if not isNumber(doy):
128 128 # continue
129 129
130 number= int(number)
130 number = int(number)
131 131 # year = int(year)
132 132 # doy = int(doy)
133 133
@@ -139,7 +139,7 Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
139 139
140 140
141 141 if validFilelist:
142 validFilelist = sorted( validFilelist, key=str.lower )
142 validFilelist = sorted(validFilelist, key=str.lower)
143 143 return validFilelist[-1]
144 144
145 145 return None
@@ -150,16 +150,16 class HFReader(ProcessingUnit):
150 150 '''
151 151 classdocs
152 152 '''
153 path = None
154 startDate= None
155 endDate = None
156 startTime= None
157 endTime = None
158 walk = None
153 path = None
154 startDate = None
155 endDate = None
156 startTime = None
157 endTime = None
158 walk = None
159 159 isConfig = False
160 dataOut=None
160 dataOut = None
161 161 nTries = 3
162 ext = ".hdf5"
162 ext = ".hdf5"
163 163
164 164 def __init__(self, **kwargs):
165 165 '''
@@ -167,32 +167,32 class HFReader(ProcessingUnit):
167 167 '''
168 168 ProcessingUnit.__init__(self, **kwargs)
169 169
170 self.isConfig =False
170 self.isConfig = False
171 171
172 172 self.datablock = None
173 173
174 self.filename_current=None
174 self.filename_current = None
175 175
176 176 self.utc = 0
177 177
178 self.ext='.hdf5'
178 self.ext = '.hdf5'
179 179
180 180 self.flagIsNewFile = 1
181 181
182 182 #-------------------------------------------------
183 self.fileIndex=None
183 self.fileIndex = None
184 184
185 self.profileIndex_offset=None
185 self.profileIndex_offset = None
186 186
187 self.filenameList=[]
187 self.filenameList = []
188 188
189 self.hfFilePointer= None
189 self.hfFilePointer = None
190 190
191 191 self.filename_online = None
192 192
193 self.status=True
193 self.status = True
194 194
195 self.flagNoMoreFiles= False
195 self.flagNoMoreFiles = False
196 196
197 197 self.__waitForNewFile = 20
198 198
@@ -222,7 +222,7 class HFReader(ProcessingUnit):
222 222 Return:
223 223 None
224 224 """
225 pts2read =self.nChannels*self.nHeights*self.nProfiles
225 pts2read = self.nChannels * self.nHeights * self.nProfiles
226 226 self.blocksize = pts2read
227 227
228 228 def __readHeader(self):
@@ -230,20 +230,20 class HFReader(ProcessingUnit):
230 230 self.nProfiles = 100
231 231 self.nHeights = 1000
232 232 self.nChannels = 2
233 self.__firstHeigth=0
234 self.__nSamples=1000
235 self.__deltaHeigth=1.5
236 self.__sample_rate=1e5
237 #self.__frequency=2.72e6
238 #self.__frequency=3.64e6
239 self.__frequency=None
233 self.__firstHeigth = 0
234 self.__nSamples = 1000
235 self.__deltaHeigth = 1.5
236 self.__sample_rate = 1e5
237 # self.__frequency=2.72e6
238 # self.__frequency=3.64e6
239 self.__frequency = None
240 240 self.__online = False
241 self.filename_next_set=None
241 self.filename_next_set = None
242 242
243 #print "Frequency of Operation:", self.__frequency
243 # print "Frequency of Operation:", self.__frequency
244 244
245 245
246 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
246 def __setParameters(self, path='', startDate='', endDate='', startTime='', endTime='', walk=''):
247 247 self.path = path
248 248 self.startDate = startDate
249 249 self.endDate = endDate
@@ -253,43 +253,43 class HFReader(ProcessingUnit):
253 253
254 254 def __checkPath(self):
255 255 if os.path.exists(self.path):
256 self.status=1
256 self.status = 1
257 257 else:
258 self.status=0
259 print('Path %s does not exits'%self.path)
258 self.status = 0
259 print('Path %s does not exits' % self.path)
260 260 return
261 261 return
262 262
263 263 def __selDates(self, hf_dirname_format):
264 264 try:
265 dir_hf_filename= self.path+"/"+hf_dirname_format
266 fp= h5py.File(dir_hf_filename,'r')
267 hipoc=fp['t'].value
265 dir_hf_filename = self.path + "/" + hf_dirname_format
266 fp = h5py.File(dir_hf_filename, 'r')
267 hipoc = fp['t'].value
268 268 fp.close()
269 date_time=datetime.datetime.utcfromtimestamp(hipoc)
270 year =int(date_time[0:4])
271 month=int(date_time[5:7])
272 dom =int(date_time[8:10])
273 thisDate= datetime.date(year,month,dom)
274 if (thisDate>=self.startDate and thisDate <= self.endDate):
269 date_time = datetime.datetime.utcfromtimestamp(hipoc)
270 year = int(date_time[0:4])
271 month = int(date_time[5:7])
272 dom = int(date_time[8:10])
273 thisDate = datetime.date(year, month, dom)
274 if (thisDate >= self.startDate and thisDate <= self.endDate):
275 275 return hf_dirname_format
276 276 except:
277 277 return None
278 278
279 def __findDataForDates(self,online=False):
279 def __findDataForDates(self, online=False):
280 280 if not(self.status):
281 281 return None
282 282
283 283 pat = '\d+.\d+'
284 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
285 dirnameList = [x for x in dirnameList if x!=None]
284 dirnameList = [re.search(pat, x) for x in os.listdir(self.path)]
285 dirnameList = [x for x in dirnameList if x != None]
286 286 dirnameList = [x.string for x in dirnameList]
287 287 if not(online):
288 288
289 289 dirnameList = [self.__selDates(x) for x in dirnameList]
290 dirnameList = [x for x in dirnameList if x!=None]
290 dirnameList = [x for x in dirnameList if x != None]
291 291
292 if len(dirnameList)>0:
292 if len(dirnameList) > 0:
293 293 self.status = 1
294 294 self.dirnameList = dirnameList
295 295 self.dirnameList.sort()
@@ -299,40 +299,40 class HFReader(ProcessingUnit):
299 299 return None
300 300
301 301 def __getTimeFromData(self):
302 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
303 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
304 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
302 startDateTime_Reader = datetime.datetime.combine(self.startDate, self.startTime)
303 endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
304 print('Filtering Files from %s to %s' % (startDateTime_Reader, endDateTime_Reader))
305 305 print('........................................')
306 filter_filenameList=[]
306 filter_filenameList = []
307 307 self.filenameList.sort()
308 for i in range(len(self.filenameList)-1):
309 filename=self.filenameList[i]
310 dir_hf_filename= filename
311 fp= h5py.File(dir_hf_filename,'r')
312 hipoc=fp['t'].value
313 hipoc=hipoc+self.timezone
314 date_time=datetime.datetime.utcfromtimestamp(hipoc)
308 for i in range(len(self.filenameList) - 1):
309 filename = self.filenameList[i]
310 dir_hf_filename = filename
311 fp = h5py.File(dir_hf_filename, 'r')
312 hipoc = fp['t'].value
313 hipoc = hipoc + self.timezone
314 date_time = datetime.datetime.utcfromtimestamp(hipoc)
315 315 fp.close()
316 year =int(date_time[0:4])
317 month=int(date_time[5:7])
318 dom =int(date_time[8:10])
319 hour =int(date_time[11:13])
320 min =int(date_time[14:16])
321 sec =int(date_time[17:19])
322 this_time=datetime.datetime(year,month,dom,hour,min,sec)
323 if (this_time>=startDateTime_Reader and this_time <= endDateTime_Reader):
316 year = int(date_time[0:4])
317 month = int(date_time[5:7])
318 dom = int(date_time[8:10])
319 hour = int(date_time[11:13])
320 min = int(date_time[14:16])
321 sec = int(date_time[17:19])
322 this_time = datetime.datetime(year, month, dom, hour, min, sec)
323 if (this_time >= startDateTime_Reader and this_time <= endDateTime_Reader):
324 324 filter_filenameList.append(filename)
325 325 filter_filenameList.sort()
326 326 self.filenameList = filter_filenameList
327 327 return 1
328 328
329 329 def __getFilenameList(self):
330 #print "hola"
331 #print self.dirnameList
332 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
333 self.filenameList= dirList
334 #print self.filenameList
335 #print "pase",len(self.filenameList)
330 # print "hola"
331 # print self.dirnameList
332 dirList = [os.path.join(self.path, x) for x in self.dirnameList]
333 self.filenameList = dirList
334 # print self.filenameList
335 # print "pase",len(self.filenameList)
336 336
337 337 def __selectDataForTimes(self, online=False):
338 338
@@ -344,70 +344,70 class HFReader(ProcessingUnit):
344 344 if not(online):
345 345 if not(self.all):
346 346 self.__getTimeFromData()
347 if len(self.filenameList)>0:
348 self.status=1
347 if len(self.filenameList) > 0:
348 self.status = 1
349 349 self.filenameList.sort()
350 350 else:
351 self.status=0
351 self.status = 0
352 352 return None
353 353 else:
354 354 if self.set != None:
355 355
356 filename=getFileFromSet(self.path,self.ext,self.set)
356 filename = getFileFromSet(self.path, self.ext, self.set)
357 357
358 if self.flag_nextfile==True:
359 self.dirnameList=[filename]
360 fullfilename=self.path+"/"+filename
361 self.filenameList=[fullfilename]
362 self.filename_next_set=int(filename[6:16])+10
358 if self.flag_nextfile == True:
359 self.dirnameList = [filename]
360 fullfilename = self.path + "/" + filename
361 self.filenameList = [fullfilename]
362 self.filename_next_set = int(filename[6:16]) + 10
363 363
364 self.flag_nextfile=False
364 self.flag_nextfile = False
365 365 else:
366 366 print(filename)
367 367 print("PRIMERA CONDICION")
368 #if self.filename_next_set== int(filename[6:16]):
368 # if self.filename_next_set== int(filename[6:16]):
369 369 print("TODO BIEN")
370 370
371 371 if filename == None:
372 372 raise ValueError("corregir")
373 373
374 self.dirnameList=[filename]
375 fullfilename=self.path+"/"+filename
376 self.filenameList=[fullfilename]
377 self.filename_next_set=int(filename[6:16])+10
378 print("Setting next file",self.filename_next_set)
379 self.set=int(filename[6:16])
374 self.dirnameList = [filename]
375 fullfilename = self.path + "/" + filename
376 self.filenameList = [fullfilename]
377 self.filename_next_set = int(filename[6:16]) + 10
378 print("Setting next file", self.filename_next_set)
379 self.set = int(filename[6:16])
380 380 if True:
381 381 pass
382 382 else:
383 383 print("ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO")
384 384
385 385 else:
386 filename =getlastFileFromPath(self.path,self.ext)
386 filename = getlastFileFromPath(self.path, self.ext)
387 387
388 if self.flag_nextfile==True:
389 self.dirnameList=[filename]
390 fullfilename=self.path+"/"+filename
391 self.filenameList=[self.filenameList[-1]]
392 self.filename_next_set=int(filename[6:16])+10
388 if self.flag_nextfile == True:
389 self.dirnameList = [filename]
390 fullfilename = self.path + "/" + filename
391 self.filenameList = [self.filenameList[-1]]
392 self.filename_next_set = int(filename[6:16]) + 10
393 393
394 self.flag_nextfile=False
394 self.flag_nextfile = False
395 395 else:
396 filename=getFileFromSet(self.path,self.ext,self.set)
396 filename = getFileFromSet(self.path, self.ext, self.set)
397 397 print(filename)
398 398 print("PRIMERA CONDICION")
399 #if self.filename_next_set== int(filename[6:16]):
399 # if self.filename_next_set== int(filename[6:16]):
400 400 print("TODO BIEN")
401 401
402 402 if filename == None:
403 403 raise ValueError("corregir")
404 404
405 self.dirnameList=[filename]
406 fullfilename=self.path+"/"+filename
407 self.filenameList=[fullfilename]
408 self.filename_next_set=int(filename[6:16])+10
409 print("Setting next file",self.filename_next_set)
410 self.set=int(filename[6:16])
405 self.dirnameList = [filename]
406 fullfilename = self.path + "/" + filename
407 self.filenameList = [fullfilename]
408 self.filename_next_set = int(filename[6:16]) + 10
409 print("Setting next file", self.filename_next_set)
410 self.set = int(filename[6:16])
411 411 if True:
412 412 pass
413 413 else:
@@ -420,8 +420,8 class HFReader(ProcessingUnit):
420 420 startDate,
421 421 endDate,
422 422 ext,
423 startTime=datetime.time(0,0,0),
424 endTime=datetime.time(23,59,59),
423 startTime=datetime.time(0, 0, 0),
424 endTime=datetime.time(23, 59, 59),
425 425 walk=True):
426 426
427 427 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
@@ -429,18 +429,18 class HFReader(ProcessingUnit):
429 429 self.__checkPath()
430 430
431 431 self.__findDataForDates()
432 #print self.dirnameList
432 # print self.dirnameList
433 433
434 434 self.__selectDataForTimes()
435 435
436 436 for i in range(len(self.filenameList)):
437 print("%s"% (self.filenameList[i]))
437 print("%s" % (self.filenameList[i]))
438 438
439 439 return
440 440
441 441 def searchFilesOnLine(self,
442 442 path,
443 expLabel= "",
443 expLabel="",
444 444 ext=None,
445 445 startDate=None,
446 446 endDate=None,
@@ -451,27 +451,27 class HFReader(ProcessingUnit):
451 451 startDate = datetime.datetime.utcnow().date()
452 452 endDate = datetime.datetime.utcnow().date()
453 453
454 self.__setParameters(path=path,startDate=startDate,endDate=endDate,walk=walk)
454 self.__setParameters(path=path, startDate=startDate, endDate=endDate, walk=walk)
455 455
456 456 self.__checkPath()
457 457
458 fullpath=path
459 print("%s folder was found: " %(fullpath ))
458 fullpath = path
459 print("%s folder was found: " % (fullpath))
460 460
461 461 if set == None:
462 self.set=None
463 filename =getlastFileFromPath(fullpath,ext)
464 startDate= datetime.datetime.utcnow().date
465 endDate= datetime.datetime.utcnow().date()
462 self.set = None
463 filename = getlastFileFromPath(fullpath, ext)
464 startDate = datetime.datetime.utcnow().date
465 endDate = datetime.datetime.utcnow().date()
466 466 #
467 467 else:
468 filename= getFileFromSet(fullpath,ext,set)
469 startDate=None
470 endDate=None
468 filename = getFileFromSet(fullpath, ext, set)
469 startDate = None
470 endDate = None
471 471 #
472 472 if not (filename):
473 return None,None,None,None,None
474 #print "%s file was found" %(filename)
473 return None, None, None, None, None
474 # print "%s file was found" %(filename)
475 475
476 476 #
477 477 # dir_hf_filename= self.path+"/"+filename
@@ -485,20 +485,20 class HFReader(ProcessingUnit):
485 485 # dom =int(date_time[8:10])
486 486 # set= int(filename[4:10])
487 487 # self.set=set-1
488 #self.dirnameList=[filename]
489 filenameList= fullpath+"/"+filename
490 self.dirnameList=[filename]
491 self.filenameList=[filenameList]
492 self.flag_nextfile=True
493
494 #self.__findDataForDates(online=True)
495 #self.dirnameList=[self.dirnameList[-1]]
496 #print self.dirnameList
497 #self.__selectDataForTimes(online=True)
498 #return fullpath,filename,year,month,dom,set
488 # self.dirnameList=[filename]
489 filenameList = fullpath + "/" + filename
490 self.dirnameList = [filename]
491 self.filenameList = [filenameList]
492 self.flag_nextfile = True
493
494 # self.__findDataForDates(online=True)
495 # self.dirnameList=[self.dirnameList[-1]]
496 # print self.dirnameList
497 # self.__selectDataForTimes(online=True)
498 # return fullpath,filename,year,month,dom,set
499 499 return
500 500
501 def __setNextFile(self,online=False):
501 def __setNextFile(self, online=False):
502 502 """
503 503 """
504 504 if not(online):
@@ -513,7 +513,7 class HFReader(ProcessingUnit):
513 513 def __setNextFileOffline(self):
514 514 """
515 515 """
516 idFile= self.fileIndex
516 idFile = self.fileIndex
517 517 while(True):
518 518 idFile += 1
519 519 if not (idFile < len(self.filenameList)):
@@ -521,10 +521,10 class HFReader(ProcessingUnit):
521 521 print("No more Files")
522 522 return 0
523 523 filename = self.filenameList[idFile]
524 hfFilePointer =h5py.File(filename,'r')
524 hfFilePointer = h5py.File(filename, 'r')
525 525
526 epoc=hfFilePointer['t'].value
527 #this_time=datetime.datetime(year,month,dom,hour,min,sec)
526 epoc = hfFilePointer['t'].value
527 # this_time=datetime.datetime(year,month,dom,hour,min,sec)
528 528 break
529 529
530 530 self.flagIsNewFile = 1
@@ -533,70 +533,70 class HFReader(ProcessingUnit):
533 533
534 534 self.hfFilePointer = hfFilePointer
535 535 hfFilePointer.close()
536 self.__t0=epoc
537 print("Setting the file: %s"%self.filename)
536 self.__t0 = epoc
537 print("Setting the file: %s" % self.filename)
538 538
539 539 return 1
540 540
541 541 def __setNextFileOnline(self):
542 542 """
543 543 """
544 print("SOY NONE",self.set)
545 if self.set==None:
544 print("SOY NONE", self.set)
545 if self.set == None:
546 546 pass
547 547 else:
548 self.set +=10
548 self.set += 10
549 549
550 filename = self.filenameList[0]#fullfilename
550 filename = self.filenameList[0] # fullfilename
551 551 if self.filename_online != None:
552 552 self.__selectDataForTimes(online=True)
553 553 filename = self.filenameList[0]
554 554 while self.filename_online == filename:
555 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
555 print('waiting %d seconds to get a new file...' % (self.__waitForNewFile))
556 556 time.sleep(self.__waitForNewFile)
557 #self.__findDataForDates(online=True)
558 self.set=self.filename_next_set
557 # self.__findDataForDates(online=True)
558 self.set = self.filename_next_set
559 559 self.__selectDataForTimes(online=True)
560 560 filename = self.filenameList[0]
561 sizeoffile=os.path.getsize(filename)
562
563 #print filename
564 sizeoffile=os.path.getsize(filename)
565 if sizeoffile<1670240:
566 print("%s is not the rigth size"%filename)
567 delay=50
568 print('waiting %d seconds for delay...'%(delay))
561 sizeoffile = os.path.getsize(filename)
562
563 # print filename
564 sizeoffile = os.path.getsize(filename)
565 if sizeoffile < 1670240:
566 print("%s is not the rigth size" % filename)
567 delay = 50
568 print('waiting %d seconds for delay...' % (delay))
569 569 time.sleep(delay)
570 sizeoffile=os.path.getsize(filename)
571 if sizeoffile<1670240:
572 delay=50
573 print('waiting %d more seconds for delay...'%(delay))
570 sizeoffile = os.path.getsize(filename)
571 if sizeoffile < 1670240:
572 delay = 50
573 print('waiting %d more seconds for delay...' % (delay))
574 574 time.sleep(delay)
575 575
576 sizeoffile=os.path.getsize(filename)
577 if sizeoffile<1670240:
578 delay=50
579 print('waiting %d more seconds for delay...'%(delay))
576 sizeoffile = os.path.getsize(filename)
577 if sizeoffile < 1670240:
578 delay = 50
579 print('waiting %d more seconds for delay...' % (delay))
580 580 time.sleep(delay)
581 581
582 582 try:
583 hfFilePointer=h5py.File(filename,'r')
583 hfFilePointer = h5py.File(filename, 'r')
584 584
585 585 except:
586 print("Error reading file %s"%filename)
586 print("Error reading file %s" % filename)
587 587
588 self.filename_online=filename
589 epoc=hfFilePointer['t'].value
588 self.filename_online = filename
589 epoc = hfFilePointer['t'].value
590 590
591 self.hfFilePointer=hfFilePointer
591 self.hfFilePointer = hfFilePointer
592 592 hfFilePointer.close()
593 self.__t0=epoc
593 self.__t0 = epoc
594 594
595 595
596 596 self.flagIsNewFile = 1
597 597 self.filename = filename
598 598
599 print("Setting the file: %s"%self.filename)
599 print("Setting the file: %s" % self.filename)
600 600 return 1
601 601
602 602 def __getExpParameters(self):
@@ -604,46 +604,46 class HFReader(ProcessingUnit):
604 604 return None
605 605
606 606 def setup(self,
607 path = None,
608 startDate = None,
609 endDate = None,
610 startTime = datetime.time(0,0,0),
611 endTime = datetime.time(23,59,59),
612 set = None,
613 expLabel = "",
614 ext = None,
607 path=None,
608 startDate=None,
609 endDate=None,
610 startTime=datetime.time(0, 0, 0),
611 endTime=datetime.time(23, 59, 59),
612 set=None,
613 expLabel="",
614 ext=None,
615 615 all=0,
616 616 timezone=0,
617 online = False,
618 delay = 60,
619 walk = True):
617 online=False,
618 delay=60,
619 walk=True):
620 620 '''
621 621 In this method we should set all initial parameters.
622 622
623 623 '''
624 if path==None:
624 if path == None:
625 625 raise ValueError("The path is not valid")
626 626
627 if ext==None:
627 if ext == None:
628 628 ext = self.ext
629 629
630 self.timezone= timezone
631 self.online= online
632 self.all=all
633 #if set==None:
630 self.timezone = timezone
631 self.online = online
632 self.all = all
633 # if set==None:
634 634
635 #print set
635 # print set
636 636 if not(online):
637 637 print("Searching files in offline mode...")
638 638
639 639 self.searchFilesOffLine(path, startDate, endDate, ext, startTime, endTime, walk)
640 640 else:
641 641 print("Searching files in online mode...")
642 self.searchFilesOnLine(path, walk,ext,set=set)
643 if set==None:
642 self.searchFilesOnLine(path, walk, ext, set=set)
643 if set == None:
644 644 pass
645 645 else:
646 self.set=set-10
646 self.set = set - 10
647 647
648 648 # for nTries in range(self.nTries):
649 649 #
@@ -659,7 +659,7 class HFReader(ProcessingUnit):
659 659
660 660
661 661 if not(self.filenameList):
662 print("There is no files into the folder: %s"%(path))
662 print("There is no files into the folder: %s" % (path))
663 663 sys.exit(-1)
664 664
665 665 self.__getExpParameters()
@@ -674,9 +674,9 class HFReader(ProcessingUnit):
674 674 self.__setLocalVariables()
675 675
676 676 self.__setHeaderDO()
677 #self.profileIndex_offset= 0
677 # self.profileIndex_offset= 0
678 678
679 #self.profileIndex = self.profileIndex_offset
679 # self.profileIndex = self.profileIndex_offset
680 680
681 681 self.isConfig = True
682 682
@@ -686,7 +686,7 class HFReader(ProcessingUnit):
686 686
687 687 def __setLocalVariables(self):
688 688
689 self.datablock = numpy.zeros((self.nChannels, self.nHeights,self.nProfiles), dtype = numpy.complex)
689 self.datablock = numpy.zeros((self.nChannels, self.nHeights, self.nProfiles), dtype=numpy.complex)
690 690 #
691 691
692 692
@@ -703,34 +703,34 class HFReader(ProcessingUnit):
703 703
704 704
705 705 #---------------------------------------------------------
706 self.dataOut.systemHeaderObj.nProfiles=100
707 self.dataOut.systemHeaderObj.nSamples=1000
706 self.dataOut.systemHeaderObj.nProfiles = 100
707 self.dataOut.systemHeaderObj.nSamples = 1000
708 708
709 709
710 SAMPLING_STRUCTURE=[('h0', '<f4'), ('dh', '<f4'), ('nsa', '<u4')]
711 self.dataOut.radarControllerHeaderObj.samplingWindow=numpy.zeros((1,),SAMPLING_STRUCTURE)
712 self.dataOut.radarControllerHeaderObj.samplingWindow['h0']=0
713 self.dataOut.radarControllerHeaderObj.samplingWindow['dh']=1.5
714 self.dataOut.radarControllerHeaderObj.samplingWindow['nsa']=1000
715 self.dataOut.radarControllerHeaderObj.nHeights=int(self.dataOut.radarControllerHeaderObj.samplingWindow['nsa'])
710 SAMPLING_STRUCTURE = [('h0', '<f4'), ('dh', '<f4'), ('nsa', '<u4')]
711 self.dataOut.radarControllerHeaderObj.samplingWindow = numpy.zeros((1,), SAMPLING_STRUCTURE)
712 self.dataOut.radarControllerHeaderObj.samplingWindow['h0'] = 0
713 self.dataOut.radarControllerHeaderObj.samplingWindow['dh'] = 1.5
714 self.dataOut.radarControllerHeaderObj.samplingWindow['nsa'] = 1000
715 self.dataOut.radarControllerHeaderObj.nHeights = int(self.dataOut.radarControllerHeaderObj.samplingWindow['nsa'])
716 716 self.dataOut.radarControllerHeaderObj.firstHeight = self.dataOut.radarControllerHeaderObj.samplingWindow['h0']
717 717 self.dataOut.radarControllerHeaderObj.deltaHeight = self.dataOut.radarControllerHeaderObj.samplingWindow['dh']
718 718 self.dataOut.radarControllerHeaderObj.samplesWin = self.dataOut.radarControllerHeaderObj.samplingWindow['nsa']
719 719
720 self.dataOut.radarControllerHeaderObj.nWindows=1
721 self.dataOut.radarControllerHeaderObj.codetype=0
722 self.dataOut.radarControllerHeaderObj.numTaus=0
723 #self.dataOut.radarControllerHeaderObj.Taus = numpy.zeros((1,),'<f4')
720 self.dataOut.radarControllerHeaderObj.nWindows = 1
721 self.dataOut.radarControllerHeaderObj.codetype = 0
722 self.dataOut.radarControllerHeaderObj.numTaus = 0
723 # self.dataOut.radarControllerHeaderObj.Taus = numpy.zeros((1,),'<f4')
724 724
725 725
726 #self.dataOut.radarControllerHeaderObj.nCode=numpy.zeros((1,), '<u4')
727 #self.dataOut.radarControllerHeaderObj.nBaud=numpy.zeros((1,), '<u4')
728 #self.dataOut.radarControllerHeaderObj.code=numpy.zeros(0)
726 # self.dataOut.radarControllerHeaderObj.nCode=numpy.zeros((1,), '<u4')
727 # self.dataOut.radarControllerHeaderObj.nBaud=numpy.zeros((1,), '<u4')
728 # self.dataOut.radarControllerHeaderObj.code=numpy.zeros(0)
729 729
730 self.dataOut.radarControllerHeaderObj.code_size=0
731 self.dataOut.nBaud=0
732 self.dataOut.nCode=0
733 self.dataOut.nPairs=0
730 self.dataOut.radarControllerHeaderObj.code_size = 0
731 self.dataOut.nBaud = 0
732 self.dataOut.nCode = 0
733 self.dataOut.nPairs = 0
734 734
735 735
736 736 #---------------------------------------------------------
@@ -739,19 +739,19 class HFReader(ProcessingUnit):
739 739
740 740 self.dataOut.data = None
741 741
742 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
742 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
743 743
744 744 self.dataOut.nProfiles = 1
745 745
746 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
746 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype=numpy.float) * self.__deltaHeigth
747 747
748 748 self.dataOut.channelList = list(range(self.nChannels))
749 749
750 #self.dataOut.channelIndexList = None
750 # self.dataOut.channelIndexList = None
751 751
752 752 self.dataOut.flagNoData = True
753 753
754 #Set to TRUE if the data is discontinuous
754 # Set to TRUE if the data is discontinuous
755 755 self.dataOut.flagDiscontinuousBlock = False
756 756
757 757 self.dataOut.utctime = None
@@ -766,16 +766,16 class HFReader(ProcessingUnit):
766 766
767 767 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
768 768
769 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
769 self.dataOut.flagDecodeData = False # asumo que la data esta decodificada
770 770
771 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
771 self.dataOut.flagDeflipData = False # asumo que la data esta sin flip
772 772
773 773 self.dataOut.flagShiftFFT = False
774 774
775 self.dataOut.ippSeconds = 1.0*self.__nSamples/self.__sample_rate
775 self.dataOut.ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
776 776
777 #Time interval between profiles
778 #self.dataOut.timeInterval =self.dataOut.ippSeconds * self.dataOut.nCohInt
777 # Time interval between profiles
778 # self.dataOut.timeInterval =self.dataOut.ippSeconds * self.dataOut.nCohInt
779 779
780 780
781 781 self.dataOut.frequency = self.__frequency
@@ -800,7 +800,7 class HFReader(ProcessingUnit):
800 800
801 801 def __setNewBlock(self):
802 802
803 if self.hfFilePointer==None:
803 if self.hfFilePointer == None:
804 804 return 0
805 805
806 806 if self.flagIsNewFile:
@@ -816,17 +816,17 class HFReader(ProcessingUnit):
816 816
817 817
818 818 def readBlock(self):
819 fp=h5py.File(self.filename,'r')
820 #Puntero que apunta al archivo hdf5
821 ch0=(fp['ch0']).value #Primer canal (100,1000)--(perfiles,alturas)
822 ch1=(fp['ch1']).value #Segundo canal (100,1000)--(perfiles,alturas)
819 fp = h5py.File(self.filename, 'r')
820 # Puntero que apunta al archivo hdf5
821 ch0 = (fp['ch0']).value # Primer canal (100,1000)--(perfiles,alturas)
822 ch1 = (fp['ch1']).value # Segundo canal (100,1000)--(perfiles,alturas)
823 823 fp.close()
824 ch0= ch0.swapaxes(0,1) #Primer canal (100,1000)--(alturas,perfiles)
825 ch1= ch1.swapaxes(0,1) #Segundo canal (100,1000)--(alturas,perfiles)
826 self.datablock = numpy.array([ch0,ch1])
827 self.flagIsNewFile=0
824 ch0 = ch0.swapaxes(0, 1) # Primer canal (100,1000)--(alturas,perfiles)
825 ch1 = ch1.swapaxes(0, 1) # Segundo canal (100,1000)--(alturas,perfiles)
826 self.datablock = numpy.array([ch0, ch1])
827 self.flagIsNewFile = 0
828 828
829 self.profileIndex=0
829 self.profileIndex = 0
830 830
831 831 return 1
832 832
@@ -837,16 +837,16 class HFReader(ProcessingUnit):
837 837
838 838 if self.__hasNotDataInBuffer():
839 839 if not(self.readNextBlock()):
840 self.dataOut.flagNodata=True
840 self.dataOut.flagNodata = True
841 841 return 0
842 842
843 843 ##############################
844 844 ##############################
845 self.dataOut.data = self.datablock[:,:,self.profileIndex]
846 self.dataOut.utctime = self.__t0 + self.dataOut.ippSeconds*self.profileIndex
847 self.dataOut.profileIndex= self.profileIndex
848 self.dataOut.flagNoData=False
849 self.profileIndex +=1
845 self.dataOut.data = self.datablock[:, :, self.profileIndex]
846 self.dataOut.utctime = self.__t0 + self.dataOut.ippSeconds * self.profileIndex
847 self.dataOut.profileIndex = self.profileIndex
848 self.dataOut.flagNoData = False
849 self.profileIndex += 1
850 850
851 851 return self.dataOut.data
852 852
@@ -859,4 +859,4 class HFReader(ProcessingUnit):
859 859 if not self.isConfig:
860 860 self.setup(**kwargs)
861 861 self.isConfig = True
862 self.getData() No newline at end of file
862 self.getData()
@@ -77,20 +77,20 class AMISRReader(ProcessingUnit):
77 77
78 78 self.__waitForNewFile = 20
79 79 self.__filename_online = None
80 #Is really necessary create the output object in the initializer
80 # Is really necessary create the output object in the initializer
81 81 self.dataOut = Voltage()
82 82
83 def setup(self,path=None,
84 startDate=None,
85 endDate=None,
86 startTime=None,
83 def setup(self, path=None,
84 startDate=None,
85 endDate=None,
86 startTime=None,
87 87 endTime=None,
88 88 walk=True,
89 89 timezone='ut',
90 90 all=0,
91 code = None,
92 nCode = 0,
93 nBaud = 0,
91 code=None,
92 nCode=0,
93 nBaud=0,
94 94 online=False):
95 95
96 96 self.timezone = timezone
@@ -103,15 +103,15 class AMISRReader(ProcessingUnit):
103 103
104 104
105 105
106 #self.findFiles()
106 # self.findFiles()
107 107 if not(online):
108 #Busqueda de archivos offline
108 # Busqueda de archivos offline
109 109 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
110 110 else:
111 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
111 self.searchFilesOnLine(path, startDate, endDate, startTime, endTime, walk)
112 112
113 113 if not(self.filenameList):
114 print("There is no files into the folder: %s"%(path))
114 print("There is no files into the folder: %s" % (path))
115 115
116 116 sys.exit(-1)
117 117
@@ -127,43 +127,43 class AMISRReader(ProcessingUnit):
127 127 pass
128 128
129 129
130 def readAMISRHeader(self,fp):
130 def readAMISRHeader(self, fp):
131 131 header = 'Raw11/Data/RadacHeader'
132 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
133 self.beamCode = fp.get('Raw11/Data/Beamcodes') # NUMBER OF CHANNELS AND IDENTIFY POSITION TO CREATE A FILE WITH THAT INFO
134 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
135 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
136 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
137 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
138 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
139 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
140 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
141 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
132 self.beamCodeByPulse = fp.get(header + '/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
133 self.beamCode = fp.get('Raw11/Data/Beamcodes') # NUMBER OF CHANNELS AND IDENTIFY POSITION TO CREATE A FILE WITH THAT INFO
134 # self.code = fp.get(header+'/Code') # NOT USE FOR THIS
135 self.frameCount = fp.get(header + '/FrameCount') # NOT USE FOR THIS
136 self.modeGroup = fp.get(header + '/ModeGroup') # NOT USE FOR THIS
137 self.nsamplesPulse = fp.get(header + '/NSamplesPulse') # TO GET NSA OR USING DATA FOR THAT
138 self.pulseCount = fp.get(header + '/PulseCount') # NOT USE FOR THIS
139 self.radacTime = fp.get(header + '/RadacTime') # 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
140 self.timeCount = fp.get(header + '/TimeCount') # NOT USE FOR THIS
141 self.timeStatus = fp.get(header + '/TimeStatus') # NOT USE FOR THIS
142 142 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
143 self.frequency = fp.get('Rx/Frequency')
143 self.frequency = fp.get('Rx/Frequency')
144 144 txAus = fp.get('Raw11/Data/Pulsewidth')
145 145
146 146
147 self.nblocks = self.pulseCount.shape[0] #nblocks
147 self.nblocks = self.pulseCount.shape[0] # nblocks
148 148
149 self.nprofiles = self.pulseCount.shape[1] #nprofile
150 self.nsa = self.nsamplesPulse[0,0] #ngates
149 self.nprofiles = self.pulseCount.shape[1] # nprofile
150 self.nsa = self.nsamplesPulse[0, 0] # ngates
151 151 self.nchannels = self.beamCode.shape[1]
152 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
153 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
154 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
152 self.ippSeconds = (self.radacTime[0][1] - self.radacTime[0][0]) # Ipp in seconds
153 # self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
154 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
155 155
156 #filling radar controller header parameters
157 self.__ippKm = self.ippSeconds *.15*1e6 # in km
158 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
156 # filling radar controller header parameters
157 self.__ippKm = self.ippSeconds * .15 * 1e6 # in km
158 self.__txA = (txAus.value) * .15 # (ipp[us]*.15km/1us) in km
159 159 self.__txB = 0
160 nWindows=1
160 nWindows = 1
161 161 self.__nSamples = self.nsa
162 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
163 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
162 self.__firstHeight = self.rangeFromFile[0][0] / 1000 # in km
163 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0]) / 1000
164 164
165 #for now until understand why the code saved is different (code included even though code not in tuf file)
166 #self.__codeType = 0
165 # for now until understand why the code saved is different (code included even though code not in tuf file)
166 # self.__codeType = 0
167 167 # self.__nCode = None
168 168 # self.__nBaud = None
169 169 self.__code = self.code
@@ -172,11 +172,11 class AMISRReader(ProcessingUnit):
172 172 self.__codeType = 1
173 173 self.__nCode = self.nCode
174 174 self.__nBaud = self.nBaud
175 #self.__code = 0
175 # self.__code = 0
176 176
177 #filling system header parameters
177 # filling system header parameters
178 178 self.__nSamples = self.nsa
179 self.newProfiles = self.nprofiles/self.nchannels
179 self.newProfiles = self.nprofiles / self.nchannels
180 180 self.__channelList = list(range(self.nchannels))
181 181
182 182 self.__frequency = self.frequency[0][0]
@@ -187,7 +187,7 class AMISRReader(ProcessingUnit):
187 187
188 188 pass
189 189
190 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
190 def __setParameters(self, path='', startDate='', endDate='', startTime='', endTime='', walk=''):
191 191 self.path = path
192 192 self.startDate = startDate
193 193 self.endDate = endDate
@@ -200,7 +200,7 class AMISRReader(ProcessingUnit):
200 200 self.status = 1
201 201 else:
202 202 self.status = 0
203 print('Path:%s does not exists'%self.path)
203 print('Path:%s does not exists' % self.path)
204 204
205 205 return
206 206
@@ -210,27 +210,27 class AMISRReader(ProcessingUnit):
210 210 year = int(amisr_dirname_format[0:4])
211 211 month = int(amisr_dirname_format[4:6])
212 212 dom = int(amisr_dirname_format[6:8])
213 thisDate = datetime.date(year,month,dom)
213 thisDate = datetime.date(year, month, dom)
214 214
215 if (thisDate>=self.startDate and thisDate <= self.endDate):
215 if (thisDate >= self.startDate and thisDate <= self.endDate):
216 216 return amisr_dirname_format
217 217 except:
218 218 return None
219 219
220 220
221 def __findDataForDates(self,online=False):
221 def __findDataForDates(self, online=False):
222 222
223 223 if not(self.status):
224 224 return None
225 225
226 226 pat = '\d+.\d+'
227 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
228 dirnameList = [x for x in dirnameList if x!=None]
227 dirnameList = [re.search(pat, x) for x in os.listdir(self.path)]
228 dirnameList = [x for x in dirnameList if x != None]
229 229 dirnameList = [x.string for x in dirnameList]
230 230 if not(online):
231 231 dirnameList = [self.__selDates(x) for x in dirnameList]
232 dirnameList = [x for x in dirnameList if x!=None]
233 if len(dirnameList)>0:
232 dirnameList = [x for x in dirnameList if x != None]
233 if len(dirnameList) > 0:
234 234 self.status = 1
235 235 self.dirnameList = dirnameList
236 236 self.dirnameList.sort()
@@ -239,38 +239,38 class AMISRReader(ProcessingUnit):
239 239 return None
240 240
241 241 def __getTimeFromData(self):
242 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
243 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
242 startDateTime_Reader = datetime.datetime.combine(self.startDate, self.startTime)
243 endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
244 244
245 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
245 print('Filtering Files from %s to %s' % (startDateTime_Reader, endDateTime_Reader))
246 246 print('........................................')
247 247 filter_filenameList = []
248 248 self.filenameList.sort()
249 #for i in range(len(self.filenameList)-1):
249 # for i in range(len(self.filenameList)-1):
250 250 for i in range(len(self.filenameList)):
251 251 filename = self.filenameList[i]
252 fp = h5py.File(filename,'r')
252 fp = h5py.File(filename, 'r')
253 253 time_str = fp.get('Time/RadacTimeString')
254 254
255 255 startDateTimeStr_File = time_str[0][0].split('.')[0]
256 256 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
257 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
257 startDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
258 258
259 259 endDateTimeStr_File = time_str[-1][-1].split('.')[0]
260 260 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
261 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
261 endDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
262 262
263 263 fp.close()
264 264
265 265 if self.timezone == 'lt':
266 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
267 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
266 startDateTime_File = startDateTime_File - datetime.timedelta(minutes=300)
267 endDateTime_File = endDateTime_File - datetime.timedelta(minutes=300)
268 268
269 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<endDateTime_Reader):
270 #self.filenameList.remove(filename)
269 if (endDateTime_File >= startDateTime_Reader and endDateTime_File < endDateTime_Reader):
270 # self.filenameList.remove(filename)
271 271 filter_filenameList.append(filename)
272 272
273 if (endDateTime_File>=endDateTime_Reader):
273 if (endDateTime_File >= endDateTime_Reader):
274 274 break
275 275
276 276
@@ -279,7 +279,7 class AMISRReader(ProcessingUnit):
279 279 return 1
280 280
281 281 def __filterByGlob1(self, dirName):
282 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
282 filter_files = glob.glob1(dirName, '*.*%s' % self.extension_file)
283 283 filter_files.sort()
284 284 filterDict = {}
285 285 filterDict.setdefault(dirName)
@@ -295,21 +295,21 class AMISRReader(ProcessingUnit):
295 295
296 296
297 297 def __selectDataForTimes(self, online=False):
298 #aun no esta implementado el filtro for tiempo
298 # aun no esta implementado el filtro for tiempo
299 299 if not(self.status):
300 300 return None
301 301
302 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
302 dirList = [os.path.join(self.path, x) for x in self.dirnameList]
303 303
304 304 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
305 305
306 306 self.__getFilenameList(fileListInKeys, dirList)
307 307 if not(online):
308 #filtro por tiempo
308 # filtro por tiempo
309 309 if not(self.all):
310 310 self.__getTimeFromData()
311 311
312 if len(self.filenameList)>0:
312 if len(self.filenameList) > 0:
313 313 self.status = 1
314 314 self.filenameList.sort()
315 315 else:
@@ -317,7 +317,7 class AMISRReader(ProcessingUnit):
317 317 return None
318 318
319 319 else:
320 #get the last file - 1
320 # get the last file - 1
321 321 self.filenameList = [self.filenameList[-2]]
322 322
323 323 new_dirnameList = []
@@ -329,14 +329,14 class AMISRReader(ProcessingUnit):
329 329 self.dirnameList = new_dirnameList
330 330 return 1
331 331
332 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
333 endTime=datetime.time(23,59,59),walk=True):
332 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0, 0, 0),
333 endTime=datetime.time(23, 59, 59), walk=True):
334 334
335 if endDate ==None:
335 if endDate == None:
336 336 startDate = datetime.datetime.utcnow().date()
337 337 endDate = datetime.datetime.utcnow().date()
338 338
339 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
339 self.__setParameters(path=path, startDate=startDate, endDate=endDate, startTime=startTime, endTime=endTime, walk=walk)
340 340
341 341 self.__checkPath()
342 342
@@ -353,8 +353,8 class AMISRReader(ProcessingUnit):
353 353 path,
354 354 startDate,
355 355 endDate,
356 startTime=datetime.time(0,0,0),
357 endTime=datetime.time(23,59,59),
356 startTime=datetime.time(0, 0, 0),
357 endTime=datetime.time(23, 59, 59),
358 358 walk=True):
359 359
360 360 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
@@ -366,7 +366,7 class AMISRReader(ProcessingUnit):
366 366 self.__selectDataForTimes()
367 367
368 368 for i in range(len(self.filenameList)):
369 print("%s" %(self.filenameList[i]))
369 print("%s" % (self.filenameList[i]))
370 370
371 371 return
372 372
@@ -382,7 +382,7 class AMISRReader(ProcessingUnit):
382 382
383 383 filename = self.filenameList[idFile]
384 384
385 amisrFilePointer = h5py.File(filename,'r')
385 amisrFilePointer = h5py.File(filename, 'r')
386 386
387 387 break
388 388
@@ -392,7 +392,7 class AMISRReader(ProcessingUnit):
392 392
393 393 self.amisrFilePointer = amisrFilePointer
394 394
395 print("Setting the file: %s"%self.filename)
395 print("Setting the file: %s" % self.filename)
396 396
397 397 return 1
398 398
@@ -404,7 +404,7 class AMISRReader(ProcessingUnit):
404 404 filename = self.filenameList[0]
405 405 wait = 0
406 406 while self.__filename_online == filename:
407 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
407 print('waiting %d seconds to get a new file...' % (self.__waitForNewFile))
408 408 if wait == 5:
409 409 return 0
410 410 sleep(self.__waitForNewFile)
@@ -414,40 +414,40 class AMISRReader(ProcessingUnit):
414 414
415 415 self.__filename_online = filename
416 416
417 self.amisrFilePointer = h5py.File(filename,'r')
417 self.amisrFilePointer = h5py.File(filename, 'r')
418 418 self.flagIsNewFile = 1
419 419 self.filename = filename
420 print("Setting the file: %s"%self.filename)
420 print("Setting the file: %s" % self.filename)
421 421 return 1
422 422
423 423
424 424 def readData(self):
425 425 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
426 re = buffer[:,:,:,0]
427 im = buffer[:,:,:,1]
428 dataset = re + im*1j
426 re = buffer[:, :, :, 0]
427 im = buffer[:, :, :, 1]
428 dataset = re + im * 1j
429 429 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
430 timeset = self.radacTime[:,0]
431 return dataset,timeset
430 timeset = self.radacTime[:, 0]
431 return dataset, timeset
432 432
433 433 def reshapeData(self):
434 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
435 channels = self.beamCodeByPulse[0,:]
434 # self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
435 channels = self.beamCodeByPulse[0, :]
436 436 nchan = self.nchannels
437 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
437 # self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
438 438 nblocks = self.nblocks
439 439 nsamples = self.nsa
440 440
441 #Dimensions : nChannels, nProfiles, nSamples
441 # Dimensions : nChannels, nProfiles, nSamples
442 442 new_block = numpy.empty((nblocks, nchan, self.newProfiles, nsamples), dtype="complex64")
443 443 ############################################
444 444
445 445 for thisChannel in range(nchan):
446 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[0][thisChannel])[0],:]
446 new_block[:, thisChannel, :, :] = self.dataset[:, numpy.where(channels == self.beamCode[0][thisChannel])[0], :]
447 447
448 448
449 new_block = numpy.transpose(new_block, (1,0,2,3))
450 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
449 new_block = numpy.transpose(new_block, (1, 0, 2, 3))
450 new_block = numpy.reshape(new_block, (nchan, -1, nsamples))
451 451
452 452 return new_block
453 453
@@ -457,7 +457,7 class AMISRReader(ProcessingUnit):
457 457
458 458 def fillJROHeader(self):
459 459
460 #fill radar controller header
460 # fill radar controller header
461 461 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
462 462 txA=self.__txA,
463 463 txB=0,
@@ -467,12 +467,12 class AMISRReader(ProcessingUnit):
467 467 deltaHeight=self.__deltaHeight,
468 468 codeType=self.__codeType,
469 469 nCode=self.__nCode, nBaud=self.__nBaud,
470 code = self.__code,
470 code=self.__code,
471 471 fClock=1)
472 472
473 473
474 474
475 #fill system header
475 # fill system header
476 476 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
477 477 nProfiles=self.newProfiles,
478 478 nChannels=len(self.__channelList),
@@ -483,17 +483,17 class AMISRReader(ProcessingUnit):
483 483
484 484 self.dataOut.data = None
485 485
486 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
486 self.dataOut.dtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
487 487
488 488 # self.dataOut.nChannels = 0
489 489
490 490 # self.dataOut.nHeights = 0
491 491
492 self.dataOut.nProfiles = self.newProfiles*self.nblocks
492 self.dataOut.nProfiles = self.newProfiles * self.nblocks
493 493
494 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
495 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
496 self.dataOut.heightList = ranges/1000.0 #km
494 # self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
495 ranges = numpy.reshape(self.rangeFromFile.value, (-1))
496 self.dataOut.heightList = ranges / 1000.0 # km
497 497
498 498
499 499 self.dataOut.channelList = self.__channelList
@@ -504,16 +504,16 class AMISRReader(ProcessingUnit):
504 504
505 505 self.dataOut.flagNoData = True
506 506
507 #Set to TRUE if the data is discontinuous
507 # Set to TRUE if the data is discontinuous
508 508 self.dataOut.flagDiscontinuousBlock = False
509 509
510 510 self.dataOut.utctime = None
511 511
512 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
512 # self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
513 513 if self.timezone == 'lt':
514 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
514 self.dataOut.timeZone = time.timezone / 60. # get the timezone in minutes
515 515 else:
516 self.dataOut.timeZone = 0 #by default time is UTC
516 self.dataOut.timeZone = 0 # by default time is UTC
517 517
518 518 self.dataOut.dstFlag = 0
519 519
@@ -521,23 +521,23 class AMISRReader(ProcessingUnit):
521 521
522 522 self.dataOut.nCohInt = 1
523 523
524 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
524 self.dataOut.flagDecodeData = False # asumo que la data esta decodificada
525 525
526 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
526 self.dataOut.flagDeflipData = False # asumo que la data esta sin flip
527 527
528 528 self.dataOut.flagShiftFFT = False
529 529
530 530 self.dataOut.ippSeconds = self.ippSeconds
531 531
532 #Time interval between profiles
533 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
532 # Time interval between profiles
533 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
534 534
535 535 self.dataOut.frequency = self.__frequency
536 536
537 537 self.dataOut.realtime = self.online
538 538 pass
539 539
540 def readNextFile(self,online=False):
540 def readNextFile(self, online=False):
541 541
542 542 if not(online):
543 543 newFile = self.__setNextFileOffline()
@@ -547,25 +547,25 class AMISRReader(ProcessingUnit):
547 547 if not(newFile):
548 548 return 0
549 549
550 #if self.__firstFile:
550 # if self.__firstFile:
551 551 self.readAMISRHeader(self.amisrFilePointer)
552 552 self.createBuffers()
553 553 self.fillJROHeader()
554 #self.__firstFile = False
554 # self.__firstFile = False
555 555
556 556
557 557
558 self.dataset,self.timeset = self.readData()
558 self.dataset, self.timeset = self.readData()
559 559
560 if self.endDate!=None:
561 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
560 if self.endDate != None:
561 endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
562 562 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
563 563 startDateTimeStr_File = time_str[0][0].split('.')[0]
564 564 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
565 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
565 startDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
566 566 if self.timezone == 'lt':
567 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
568 if (startDateTime_File>endDateTime_Reader):
567 startDateTime_File = startDateTime_File - datetime.timedelta(minutes=300)
568 if (startDateTime_File > endDateTime_Reader):
569 569 return 0
570 570
571 571 self.jrodataset = self.reshapeData()
@@ -576,7 +576,7 class AMISRReader(ProcessingUnit):
576 576
577 577
578 578 def __hasNotDataInBuffer(self):
579 if self.profileIndex >= (self.newProfiles*self.nblocks):
579 if self.profileIndex >= (self.newProfiles * self.nblocks):
580 580 return 1
581 581 return 0
582 582
@@ -592,20 +592,20 class AMISRReader(ProcessingUnit):
592 592 return 0
593 593
594 594
595 if self.dataset is None: # setear esta condicion cuando no hayan datos por leers
595 if self.dataset is None: # setear esta condicion cuando no hayan datos por leers
596 596 self.dataOut.flagNoData = True
597 597 return 0
598 598
599 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
599 # self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
600 600
601 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
601 self.dataOut.data = self.jrodataset[:, self.profileIndex, :]
602 602
603 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
604 #verificar basic header de jro data y ver si es compatible con este valor
605 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
603 # self.dataOut.utctime = self.jrotimeset[self.profileIndex]
604 # verificar basic header de jro data y ver si es compatible con este valor
605 # self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
606 606 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
607 indexblock = self.profileIndex/self.newProfiles
608 #print indexblock, indexprof
607 indexblock = self.profileIndex / self.newProfiles
608 # print indexblock, indexprof
609 609 self.dataOut.utctime = self.timeset[indexblock] + (indexprof * self.ippSeconds * self.nchannels)
610 610 self.dataOut.profileIndex = self.profileIndex
611 611 self.dataOut.flagNoData = False
@@ -91,7 +91,7 class MADReader(Reader, ProcessingUnit):
91 91 self.flagNoMoreFiles = 0
92 92 self.filename = None
93 93 self.intervals = set()
94 self.datatime = datetime.datetime(1900,1,1)
94 self.datatime = datetime.datetime(1900, 1, 1)
95 95 self.format = None
96 96 self.filefmt = "***%Y%m%d*******"
97 97
@@ -125,7 +125,7 class MADReader(Reader, ProcessingUnit):
125 125
126 126 for nTries in range(self.nTries):
127 127 fullpath = self.searchFilesOnLine(self.path, self.startDate,
128 self.endDate, self.expLabel, self.ext, self.walk,
128 self.endDate, self.expLabel, self.ext, self.walk,
129 129 self.filefmt, self.folderfmt)
130 130
131 131 try:
@@ -138,7 +138,7 class MADReader(Reader, ProcessingUnit):
138 138
139 139 log.warning(
140 140 'Waiting {} sec for a valid file in {}: try {} ...'.format(
141 self.delay, self.path, nTries + 1),
141 self.delay, self.path, nTries + 1),
142 142 self.name)
143 143 time.sleep(self.delay)
144 144
@@ -148,7 +148,7 class MADReader(Reader, ProcessingUnit):
148 148
149 149 else:
150 150 log.log("Searching files in {}".format(self.path), self.name)
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
152 152 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
153 153
154 154 self.setNextFile()
@@ -212,7 +212,7 class MADReader(Reader, ProcessingUnit):
212 212 if self.ext == '.txt':
213 213 self.data = numpy.genfromtxt(self.fp, missing_values=('missing'))
214 214 self.nrecords = self.data.shape[0]
215 self.ranges = numpy.unique(self.data[:,self.parameters.index(self.independentParam.lower())])
215 self.ranges = numpy.unique(self.data[:, self.parameters.index(self.independentParam.lower())])
216 216 self.counter_records = 0
217 217 elif self.ext == '.hdf5':
218 218 self.data = self.fp['Data']
@@ -268,14 +268,14 class MADReader(Reader, ProcessingUnit):
268 268 if self.counter_records == self.nrecords:
269 269 break
270 270 continue
271 self.intervals.add((datatime-self.datatime).seconds)
271 self.intervals.add((datatime - self.datatime).seconds)
272 272 break
273 273 elif self.ext == '.hdf5':
274 274 datatime = datetime.datetime.utcfromtimestamp(
275 275 self.times[self.counter_records])
276 dum = self.data['Table Layout'][self.data['Table Layout']['recno']==self.counter_records]
277 self.intervals.add((datatime-self.datatime).seconds)
278 if datatime.date()>self.datatime.date():
276 dum = self.data['Table Layout'][self.data['Table Layout']['recno'] == self.counter_records]
277 self.intervals.add((datatime - self.datatime).seconds)
278 if datatime.date() > self.datatime.date():
279 279 self.flagDiscontinuousBlock = 1
280 280 self.datatime = datatime
281 281 self.counter_records += 1
@@ -299,11 +299,11 class MADReader(Reader, ProcessingUnit):
299 299 if self.ext == '.txt':
300 300 x = self.parameters.index(param.lower())
301 301 y = self.parameters.index(self.independentParam.lower())
302 ranges = self.buffer[:,y]
303 #if self.ranges.size == ranges.size:
302 ranges = self.buffer[:, y]
303 # if self.ranges.size == ranges.size:
304 304 # continue
305 305 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
306 dummy[index] = self.buffer[:,x]
306 dummy[index] = self.buffer[:, x]
307 307 else:
308 308 ranges = self.buffer[self.independentParam.lower()]
309 309 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
@@ -311,7 +311,7 class MADReader(Reader, ProcessingUnit):
311 311
312 312 if isinstance(value, str):
313 313 if value not in self.independentParam:
314 setattr(self.dataOut, value, dummy.reshape(1,-1))
314 setattr(self.dataOut, value, dummy.reshape(1, -1))
315 315 elif isinstance(value, list):
316 316 self.output[value[0]][value[1]] = dummy
317 317 parameters[value[1]] = param
@@ -382,7 +382,7 Inputs:
382 382 format hdf5, cedar
383 383 blocks number of blocks per file'''
384 384
385 __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict','metadata', 'format', 'blocks']
385 __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict', 'metadata', 'format', 'blocks']
386 386 missing = -32767
387 387
388 388 def __init__(self):
@@ -438,7 +438,7 Inputs:
438 438 Create new cedar file object
439 439 '''
440 440
441 self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal
441 self.mnemonic = MNEMONICS[self.kinst] # TODO get mnemonic from madrigal
442 442 date = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
443 443
444 444 filename = '{}{}{}'.format(self.mnemonic,
@@ -499,7 +499,7 Inputs:
499 499 if 'db' in value.lower():
500 500 tmp = getattr(self.dataOut, value.replace('_db', ''))
501 501 SNRavg = numpy.average(tmp, axis=0)
502 tmp = 10*numpy.log10(SNRavg)
502 tmp = 10 * numpy.log10(SNRavg)
503 503 else:
504 504 tmp = getattr(self.dataOut, value)
505 505 out[key] = tmp.flatten()[:len(heights)]
@@ -521,14 +521,14 Inputs:
521 521 startTime.hour,
522 522 startTime.minute,
523 523 startTime.second,
524 startTime.microsecond/10000,
524 startTime.microsecond / 10000,
525 525 endTime.year,
526 526 endTime.month,
527 527 endTime.day,
528 528 endTime.hour,
529 529 endTime.minute,
530 530 endTime.second,
531 endTime.microsecond/10000,
531 endTime.microsecond / 10000,
532 532 list(self.oneDDict.keys()),
533 533 list(self.twoDDict.keys()),
534 534 len(index),
@@ -592,4 +592,4 Inputs:
592 592 def close(self):
593 593
594 594 if self.counter > 0:
595 self.setHeader() No newline at end of file
595 self.setHeader()
@@ -12,14 +12,14 import cmath
12 12
13 13 class matoffReader(ProcessingUnit):
14 14
15 index=None
16 list=None
17 firsttime=True
18 utccounter=None
19 utcfiletime=None
20 utcmatcounter=0
21 utcfirst=None
22 utclist=None
15 index = None
16 list = None
17 firsttime = True
18 utccounter = None
19 utcfiletime = None
20 utcmatcounter = 0
21 utcfirst = None
22 utclist = None
23 23
24 24 def __init__(self):
25 25 self.dataOut = Spectra()
@@ -28,12 +28,12 class matoffReader(ProcessingUnit):
28 28
29 29 def __setHeader(self, datastuff):
30 30
31 self.dataOut.pairsList=[(0,1)]
32 self.dataOut.channelList = list(range(np.array(datastuff.get('power')).shape[1]))
33 self.dataOut.nProfiles = len(np.array(datastuff.get('vel')).flatten()) #this!
31 self.dataOut.pairsList = [(0, 1)]
32 self.dataOut.channelList = list(range(np.array(datastuff.get('power')).shape[1]))
33 self.dataOut.nProfiles = len(np.array(datastuff.get('vel')).flatten()) # this!
34 34 self.dataOut.nIncohInt = 20
35 self.dataOut.nCohInt = 1 #this!
36 self.dataOut.ippSeconds = 0.004 #this!
35 self.dataOut.nCohInt = 1 # this!
36 self.dataOut.ippSeconds = 0.004 # this!
37 37 self.dataOut.nFFTPoints = len(np.array(datastuff.get('vel')).flatten())
38 38 self.dataOut.timeZone = 0
39 39 self.dataOut.heightList = np.array(datastuff.get('hts')).flatten()
@@ -41,21 +41,21 class matoffReader(ProcessingUnit):
41 41 def __readFile(self, currentfile):
42 42 print("Reading from this file:" + currentfile)
43 43
44 #filesplit=currentfile.split("\\")
45 filesplit=currentfile.split("/")
46 newsplit=filesplit[-2]
47 newnewsplit=newsplit.split(".")
48 newnewsplit=[int(i) for i in newnewsplit]
49 gooblist=datetime.datetime(newnewsplit[0],newnewsplit[1],newnewsplit[2],newnewsplit[3],newnewsplit[4],newnewsplit[5])
50 self.utcfirst=(gooblist-datetime.datetime(1970,1,1)).total_seconds()
44 # filesplit=currentfile.split("\\")
45 filesplit = currentfile.split("/")
46 newsplit = filesplit[-2]
47 newnewsplit = newsplit.split(".")
48 newnewsplit = [int(i) for i in newnewsplit]
49 gooblist = datetime.datetime(newnewsplit[0], newnewsplit[1], newnewsplit[2], newnewsplit[3], newnewsplit[4], newnewsplit[5])
50 self.utcfirst = (gooblist - datetime.datetime(1970, 1, 1)).total_seconds()
51 51
52 52
53 newsplit=filesplit[-1]
54 newnewsplit=newsplit.split(".")
55 goobnum=newnewsplit[0]
56 goobnum=int(goobnum)
53 newsplit = filesplit[-1]
54 newnewsplit = newsplit.split(".")
55 goobnum = newnewsplit[0]
56 goobnum = int(goobnum)
57 57
58 self.utcfirst=self.utcfirst+goobnum*2
58 self.utcfirst = self.utcfirst + goobnum * 2
59 59 # if (currentfile[43:]=='0.mat'):
60 60 # self.utcmatcounter=0
61 61 # self.utcfirst=self.utclist[self.index]
@@ -66,26 +66,26 class matoffReader(ProcessingUnit):
66 66 # print self.utcmatcounter
67 67 print(self.utcfirst)
68 68 try:
69 datastuff=sio.loadmat(currentfile)
69 datastuff = sio.loadmat(currentfile)
70 70 except:
71 71 return None, None
72 72
73 dataphase=datastuff.get('phase')
74 data3=datastuff.get('doppler0')
75 data4=datastuff.get('doppler1')
76 data3= np.array(data3)
73 dataphase = datastuff.get('phase')
74 data3 = datastuff.get('doppler0')
75 data4 = datastuff.get('doppler1')
76 data3 = np.array(data3)
77 77 data4 = np.array(data4)
78 datacoh=datastuff.get('coherence2')
78 datacoh = datastuff.get('coherence2')
79 79
80 datacohphase=datacoh*np.exp(-dataphase*1j)
80 datacohphase = datacoh * np.exp(-dataphase * 1j)
81 81 # data31 = np.fliplr(data3)
82 82 # data41 = np.fliplr(data4)
83 83
84 data31 = data3.reshape((1,data3.shape[0],data3.shape[1]))
85 data41 = data4.reshape((1,data4.shape[0],data4.shape[1]))
86 datacohphase1 = datacohphase.reshape((1,datacoh.shape[0],datacoh.shape[1]))
84 data31 = data3.reshape((1, data3.shape[0], data3.shape[1]))
85 data41 = data4.reshape((1, data4.shape[0], data4.shape[1]))
86 datacohphase1 = datacohphase.reshape((1, datacoh.shape[0], datacoh.shape[1]))
87 87
88 datastack = np.vstack((data31,data41))
88 datastack = np.vstack((data31, data41))
89 89
90 90 self.__setHeader(datastuff)
91 91
@@ -94,46 +94,46 class matoffReader(ProcessingUnit):
94 94
95 95 return spc, cspc
96 96
97 def __findFiles(self, path, startDate=None, endDate=None,startTime=datetime.time(0,0,0), endTime=datetime.time(23,59,59)):
97 def __findFiles(self, path, startDate=None, endDate=None, startTime=datetime.time(0, 0, 0), endTime=datetime.time(23, 59, 59)):
98 98
99 99 if startDate == None:
100 startDate = datetime.date(1970,1,1)
100 startDate = datetime.date(1970, 1, 1)
101 101
102 102 if endDate == None:
103 endDate = datetime.date(2050,1,1)
103 endDate = datetime.date(2050, 1, 1)
104 104
105 startsearch1=datetime.datetime.combine(startDate,startTime)
106 startsearch2=(startsearch1-datetime.datetime(1970,1,1)).total_seconds()
107 endsearch1=datetime.datetime.combine(endDate,endTime)
108 endsearch2=(endsearch1-datetime.datetime(1970,1,1)).total_seconds()
105 startsearch1 = datetime.datetime.combine(startDate, startTime)
106 startsearch2 = (startsearch1 - datetime.datetime(1970, 1, 1)).total_seconds()
107 endsearch1 = datetime.datetime.combine(endDate, endTime)
108 endsearch2 = (endsearch1 - datetime.datetime(1970, 1, 1)).total_seconds()
109 109
110 110 dirList = listdir(path)
111 111 dirList = sorted(dirList)
112 112
113 dirListFiltered=[]
114 fileListFiltered=[]
115 utclist=[]
113 dirListFiltered = []
114 fileListFiltered = []
115 utclist = []
116 116
117 117 if not dirList:
118 118 print("No directories found")
119 119 return []
120 120
121 #if self.online:
121 # if self.online:
122 122 # dirList= [dirList[-1]]
123 123
124 124 if self.online:
125 125 currentdate = datetime.datetime.now()
126 strsplit1=currentdate.strftime('%Y.%m.%d')
127 dirList = fnmatch.filter(dirList,strsplit1+'*')
126 strsplit1 = currentdate.strftime('%Y.%m.%d')
127 dirList = fnmatch.filter(dirList, strsplit1 + '*')
128 128
129 129 for thisDir in dirList:
130 130 if not os.path.isdir(os.path.join(path, thisDir)):
131 131 continue
132 132
133 strsplit=thisDir.split('.')
134 timeints=[int(i) for i in strsplit]
135 timelist=datetime.datetime(timeints[0],timeints[1],timeints[2],timeints[3],timeints[4],timeints[5])
136 utctime=(timelist-datetime.datetime(1970,1,1)).total_seconds()
133 strsplit = thisDir.split('.')
134 timeints = [int(i) for i in strsplit]
135 timelist = datetime.datetime(timeints[0], timeints[1], timeints[2], timeints[3], timeints[4], timeints[5])
136 utctime = (timelist - datetime.datetime(1970, 1, 1)).total_seconds()
137 137
138 138 if not self.online:
139 139 if (utctime > endsearch2):
@@ -159,7 +159,7 class matoffReader(ProcessingUnit):
159 159 continue
160 160
161 161 for k in range(len(fileList)):
162 thisFile = str(k)+'.mat'
162 thisFile = str(k) + '.mat'
163 163
164 164 if not os.path.isfile(os.path.join(pathFile, thisFile)):
165 165 continue
@@ -168,7 +168,7 class matoffReader(ProcessingUnit):
168 168
169 169 return fileListFiltered
170 170
171 def __getNextOnlineFile(self, seconds = 40):
171 def __getNextOnlineFile(self, seconds=40):
172 172
173 173 filename = self.__getNextOfflineFile()
174 174
@@ -188,7 +188,7 class matoffReader(ProcessingUnit):
188 188 if nTries > 3:
189 189 break
190 190
191 print("Waiting %d seconds ..." %seconds)
191 print("Waiting %d seconds ..." % seconds)
192 192 time.sleep(40)
193 193
194 194 if not (len(filelist) > ncurrentfiles):
@@ -204,7 +204,7 class matoffReader(ProcessingUnit):
204 204 if self.index >= len(self.fileList):
205 205 return None
206 206
207 filename=self.fileList[self.index]
207 filename = self.fileList[self.index]
208 208 self.index += 1
209 209 return filename
210 210
@@ -216,12 +216,12 class matoffReader(ProcessingUnit):
216 216 filename = self.__getNextOfflineFile()
217 217 return filename
218 218
219 def setup(self, path, startDate=None, endDate=None,startTime=datetime.time(0,0,0), endTime=datetime.time(23,59,59)):
219 def setup(self, path, startDate=None, endDate=None, startTime=datetime.time(0, 0, 0), endTime=datetime.time(23, 59, 59)):
220 220
221 221 fileList = self.__findFiles(path, startDate, endDate, startTime, endTime)
222 222
223 223 if self.online:
224 self.index = len(fileList) -1
224 self.index = len(fileList) - 1
225 225 else:
226 226 self.index = 0
227 227
@@ -229,24 +229,24 class matoffReader(ProcessingUnit):
229 229
230 230 print("fin setup")
231 231
232 def run(self,path=None,startDate=None, endDate=None,
233 startTime=datetime.time(0,0,0),
234 endTime=datetime.time(23,59,59),
235 walk=True,timezone='ut',
236 all=0,online=False,ext=None,**kwargs):
237
238 self.path=path
239 self.ext=ext
240 self.startDate=startDate
241 self.endDate=endDate
242 self.startTime=startTime
243 self.endTime=endTime
232 def run(self, path=None, startDate=None, endDate=None,
233 startTime=datetime.time(0, 0, 0),
234 endTime=datetime.time(23, 59, 59),
235 walk=True, timezone='ut',
236 all=0, online=False, ext=None, **kwargs):
237
238 self.path = path
239 self.ext = ext
240 self.startDate = startDate
241 self.endDate = endDate
242 self.startTime = startTime
243 self.endTime = endTime
244 244 self.online = online
245 245 self.dataOut.flagNoData = True
246 246
247 if (self.firsttime==True):
247 if (self.firsttime == True):
248 248 self.setup(path, startDate, endDate, startTime, endTime)
249 self.firsttime=False
249 self.firsttime = False
250 250
251 251
252 252 if not self.fileList:
@@ -262,7 +262,7 class matoffReader(ProcessingUnit):
262 262
263 263 spc, cspc = self.__readFile(currentfile)
264 264
265 if spc!=None:
265 if spc != None:
266 266
267 267 self.dataOut.data_spc = spc
268 268 self.dataOut.data_cspc = cspc
@@ -270,4 +270,4 class matoffReader(ProcessingUnit):
270 270 self.dataOut.flagNoData = False
271 271
272 272 return 1
273 No newline at end of file
273
@@ -23,9 +23,9 except:
23 23 from time import sleep
24 24
25 25 from schainpy.model.data.jrodata import Spectra
26 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
26 # from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
27 27 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
28 #from schainpy.model.io.jroIO_bltr import BLTRReader
28 # from schainpy.model.io.jroIO_bltr import BLTRReader
29 29 from numpy import imag, shape, NaN, empty
30 30
31 31
@@ -315,7 +315,7 SRVI_HEADER = numpy.dtype([
315 315
316 316
317 317 class SRVIHeader(Header):
318 def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
318 def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
319 319
320 320 self.SignatureSRVI1 = SignatureSRVI1
321 321 self.SizeOfDataBlock1 = SizeOfDataBlock1
@@ -338,34 +338,34 class SRVIHeader(Header):
338 338
339 339 SRVI_STRUCTURE = numpy.dtype([
340 340 ('frame_cnt', '<u4'),
341 ('time_t', '<u4'), #
342 ('tpow', '<f4'), #
343 ('npw1', '<f4'), #
344 ('npw2', '<f4'), #
345 ('cpw1', '<f4'), #
346 ('pcw2', '<f4'), #
347 ('ps_err', '<u4'), #
348 ('te_err', '<u4'), #
349 ('rc_err', '<u4'), #
350 ('grs1', '<u4'), #
351 ('grs2', '<u4'), #
352 ('azipos', '<f4'), #
353 ('azivel', '<f4'), #
354 ('elvpos', '<f4'), #
355 ('elvvel', '<f4'), #
341 ('time_t', '<u4'), #
342 ('tpow', '<f4'), #
343 ('npw1', '<f4'), #
344 ('npw2', '<f4'), #
345 ('cpw1', '<f4'), #
346 ('pcw2', '<f4'), #
347 ('ps_err', '<u4'), #
348 ('te_err', '<u4'), #
349 ('rc_err', '<u4'), #
350 ('grs1', '<u4'), #
351 ('grs2', '<u4'), #
352 ('azipos', '<f4'), #
353 ('azivel', '<f4'), #
354 ('elvpos', '<f4'), #
355 ('elvvel', '<f4'), #
356 356 ('northAngle', '<f4'),
357 ('microsec', '<u4'), #
357 ('microsec', '<u4'), #
358 358 ('azisetvel', '<f4'), #
359 359 ('elvsetpos', '<f4'), #
360 ('RadarConst', '<f4'), ]) #
360 ('RadarConst', '<f4'), ]) #
361 361
362 362
363 363 class RecordHeader(Header):
364 364
365 def __init__(self, frame_cnt=0, time_t=0, tpow=0, npw1=0, npw2=0,
366 cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0,
367 grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0,
368 microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0, RecCounter=0, Off2StartNxtRec=0):
365 def __init__(self, frame_cnt=0, time_t=0, tpow=0, npw1=0, npw2=0,
366 cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0,
367 grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0,
368 microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0, RecCounter=0, Off2StartNxtRec=0):
369 369
370 370 self.frame_cnt = frame_cnt
371 371 self.dwell = time_t
@@ -396,44 +396,44 class RecordHeader(Header):
396 396
397 397 # startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file.
398 398
399 #OffRHeader= 1180 + self.RecCounter*(self.Off2StartNxtRec)
400 #startFp.seek(OffRHeader, os.SEEK_SET)
399 # OffRHeader= 1180 + self.RecCounter*(self.Off2StartNxtRec)
400 # startFp.seek(OffRHeader, os.SEEK_SET)
401 401
402 402 # print 'Posicion del bloque: ',OffRHeader
403 403
404 404 header = numpy.fromfile(fp, SRVI_STRUCTURE, 1)
405 405
406 406 self.frame_cnt = header['frame_cnt'][0]
407 self.time_t = header['time_t'][0] #
408 self.tpow = header['tpow'][0] #
409 self.npw1 = header['npw1'][0] #
410 self.npw2 = header['npw2'][0] #
411 self.cpw1 = header['cpw1'][0] #
412 self.pcw2 = header['pcw2'][0] #
413 self.ps_err = header['ps_err'][0] #
414 self.te_err = header['te_err'][0] #
415 self.rc_err = header['rc_err'][0] #
416 self.grs1 = header['grs1'][0] #
417 self.grs2 = header['grs2'][0] #
418 self.azipos = header['azipos'][0] #
419 self.azivel = header['azivel'][0] #
420 self.elvpos = header['elvpos'][0] #
421 self.elvvel = header['elvvel'][0] #
422 self.northAngle = header['northAngle'][0] #
423 self.microsec = header['microsec'][0] #
424 self.azisetvel = header['azisetvel'][0] #
425 self.elvsetpos = header['elvsetpos'][0] #
426 self.RadarConst = header['RadarConst'][0] #
407 self.time_t = header['time_t'][0] #
408 self.tpow = header['tpow'][0] #
409 self.npw1 = header['npw1'][0] #
410 self.npw2 = header['npw2'][0] #
411 self.cpw1 = header['cpw1'][0] #
412 self.pcw2 = header['pcw2'][0] #
413 self.ps_err = header['ps_err'][0] #
414 self.te_err = header['te_err'][0] #
415 self.rc_err = header['rc_err'][0] #
416 self.grs1 = header['grs1'][0] #
417 self.grs2 = header['grs2'][0] #
418 self.azipos = header['azipos'][0] #
419 self.azivel = header['azivel'][0] #
420 self.elvpos = header['elvpos'][0] #
421 self.elvvel = header['elvvel'][0] #
422 self.northAngle = header['northAngle'][0] #
423 self.microsec = header['microsec'][0] #
424 self.azisetvel = header['azisetvel'][0] #
425 self.elvsetpos = header['elvsetpos'][0] #
426 self.RadarConst = header['RadarConst'][0] #
427 427 # 84
428 428
429 429 # print 'Pointer fp RECheader', fp.tell()
430 430
431 #self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz)
431 # self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz)
432 432
433 #self.RHsize = 180+20*self.nChannels
434 #self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4
433 # self.RHsize = 180+20*self.nChannels
434 # self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4
435 435 # print 'Datasize',self.Datasize
436 #endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
436 # endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
437 437
438 438 print('==============================================')
439 439
@@ -626,11 +626,11 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
626 626
627 627 self.Num_inCoh = self.fheader.PPARavc
628 628 self.dataOut.PRF = self.fheader.PPARprf
629 self.dataOut.frequency = 34.85 * 10**9
629 self.dataOut.frequency = 34.85 * 10 ** 9
630 630 self.Lambda = SPEED_OF_LIGHT / self.dataOut.frequency
631 631 self.dataOut.ippSeconds = 1. / float(self.dataOut.PRF)
632 632
633 pulse_width = self.fheader.PPARpdr * 10**-9
633 pulse_width = self.fheader.PPARpdr * 10 ** -9
634 634 self.__deltaHeigth = 0.5 * SPEED_OF_LIGHT * pulse_width
635 635
636 636 self.data_spc = numpy.zeros((self.Num_Hei, self.Num_Bins, 2))
@@ -790,11 +790,11 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
790 790 # print 'SHAPE', self.dataOut_spc.shape
791 791 # For nyquist correction:
792 792 # fix = 20 # ~3m/s
793 #shift = self.Num_Bins/2 + fix
794 #self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]])
793 # shift = self.Num_Bins/2 + fix
794 # self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]])
795 795
796 796 '''Block Reading, the Block Data is received and Reshape is used to give it
797 797 shape.
798 798 '''
799 799
800 self.PointerReader = self.fp.tell() No newline at end of file
800 self.PointerReader = self.fp.tell()
@@ -105,7 +105,7 class HDFReader(Reader, ProcessingUnit):
105 105
106 106 for nTries in range(self.nTries):
107 107 fullpath = self.searchFilesOnLine(self.path, self.startDate,
108 self.endDate, self.expLabel, self.ext, self.walk,
108 self.endDate, self.expLabel, self.ext, self.walk,
109 109 self.filefmt, self.folderfmt)
110 110 try:
111 111 fullpath = next(fullpath)
@@ -117,7 +117,7 class HDFReader(Reader, ProcessingUnit):
117 117
118 118 log.warning(
119 119 'Waiting {} sec for a valid file in {}: try {} ...'.format(
120 self.delay, self.path, nTries + 1),
120 self.delay, self.path, nTries + 1),
121 121 self.name)
122 122 time.sleep(self.delay)
123 123
@@ -131,7 +131,7 class HDFReader(Reader, ProcessingUnit):
131 131 self.set = int(filename[8:11]) - 1
132 132 else:
133 133 log.log("Searching files in {}".format(self.path), self.name)
134 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
134 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
135 135 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
136 136
137 137 self.setNextFile()
@@ -346,11 +346,11 class HDFWriter(Operation):
346 346 setFile = None
347 347 fp = None
348 348 firsttime = True
349 #Configurations
349 # Configurations
350 350 blocksPerFile = None
351 351 blockIndex = None
352 352 dataOut = None
353 #Data Arrays
353 # Data Arrays
354 354 dataList = None
355 355 metadataList = None
356 356 currentDay = None
@@ -411,11 +411,11 class HDFWriter(Operation):
411 411
412 412 timeDiff = currentTime - self.lastTime
413 413
414 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
414 # Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
415 415 if dataDay != self.currentDay:
416 416 self.currentDay = dataDay
417 417 return True
418 elif timeDiff > 3*60*60:
418 elif timeDiff > 3 * 60 * 60:
419 419 self.lastTime = currentTime
420 420 return True
421 421 else:
@@ -427,7 +427,7 class HDFWriter(Operation):
427 427
428 428 self.dataOut = dataOut
429 429 if not(self.isConfig):
430 self.setup(path=path, blocksPerFile=blocksPerFile,
430 self.setup(path=path, blocksPerFile=blocksPerFile,
431 431 metadataList=metadataList, dataList=dataList,
432 432 setType=setType, description=description)
433 433
@@ -444,27 +444,27 class HDFWriter(Operation):
444 444 setFile = self.setFile
445 445
446 446 timeTuple = time.localtime(self.dataOut.utctime)
447 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
447 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
448 448 fullpath = os.path.join(path, subfolder)
449 449
450 450 if os.path.exists(fullpath):
451 451 filesList = os.listdir(fullpath)
452 452 filesList = [k for k in filesList if k.startswith(self.optchar)]
453 if len( filesList ) > 0:
453 if len(filesList) > 0:
454 454 filesList = sorted(filesList, key=str.lower)
455 455 filen = filesList[-1]
456 456 # el filename debera tener el siguiente formato
457 457 # 0 1234 567 89A BCDE (hex)
458 458 # x YYYY DDD SSS .ext
459 459 if isNumber(filen[8:11]):
460 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
460 setFile = int(filen[8:11]) # inicializo mi contador de seteo al seteo del ultimo file
461 461 else:
462 462 setFile = -1
463 463 else:
464 setFile = -1 #inicializo mi contador de seteo
464 setFile = -1 # inicializo mi contador de seteo
465 465 else:
466 466 os.makedirs(fullpath)
467 setFile = -1 #inicializo mi contador de seteo
467 setFile = -1 # inicializo mi contador de seteo
468 468
469 469 if self.setType is None:
470 470 setFile += 1
@@ -472,22 +472,22 class HDFWriter(Operation):
472 472 timeTuple.tm_year,
473 473 timeTuple.tm_yday,
474 474 setFile,
475 ext )
475 ext)
476 476 else:
477 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
477 setFile = timeTuple.tm_hour * 60 + timeTuple.tm_min
478 478 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
479 479 timeTuple.tm_year,
480 480 timeTuple.tm_yday,
481 481 setFile,
482 ext )
482 ext)
483 483
484 self.filename = os.path.join( path, subfolder, file )
484 self.filename = os.path.join(path, subfolder, file)
485 485
486 #Setting HDF5 File
486 # Setting HDF5 File
487 487 self.fp = h5py.File(self.filename, 'w')
488 #write metadata
488 # write metadata
489 489 self.writeMetadata(self.fp)
490 #Write data
490 # Write data
491 491 self.writeData(self.fp)
492 492
493 493 def getLabel(self, name, x=None):
@@ -563,9 +563,9 class HDFWriter(Operation):
563 563 for dsInfo in self.dsList:
564 564 if dsInfo['nDim'] == 0:
565 565 ds = grp.create_dataset(
566 self.getLabel(dsInfo['variable']),
567 (self.blocksPerFile, ),
568 chunks=True,
566 self.getLabel(dsInfo['variable']),
567 (self.blocksPerFile,),
568 chunks=True,
569 569 dtype=numpy.float64)
570 570 dtsets.append(ds)
571 571 data.append((dsInfo['variable'], -1))
@@ -577,8 +577,8 class HDFWriter(Operation):
577 577 sgrp = grp
578 578 for i in range(dsInfo['dsNumber']):
579 579 ds = sgrp.create_dataset(
580 self.getLabel(dsInfo['variable'], i),
581 (self.blocksPerFile, ) + dsInfo['shape'][1:],
580 self.getLabel(dsInfo['variable'], i),
581 (self.blocksPerFile,) + dsInfo['shape'][1:],
582 582 chunks=True,
583 583 dtype=dsInfo['dtype'])
584 584 dtsets.append(ds)
This diff has been collapsed as it changes many lines, (590 lines changed) Show them Hide them
@@ -1,47 +1,47
1 import numpy,math,random,time
1 import numpy, math, random, time
2 2 #---------------1 Heredamos JRODatareader
3 3 from schainpy.model.io.jroIO_base import *
4 4 #---------------2 Heredamos las propiedades de ProcessingUnit
5 from schainpy.model.proc.jroproc_base import ProcessingUnit,Operation,MPDecorator
5 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
6 6 #---------------3 Importaremos las clases BascicHeader, SystemHeader, RadarControlHeader, ProcessingHeader
7 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader,SystemHeader,RadarControllerHeader, ProcessingHeader
7 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
8 8 #---------------4 Importaremos el objeto Voltge
9 9 from schainpy.model.data.jrodata import Voltage
10 10
11 11 class SimulatorReader(JRODataReader, ProcessingUnit):
12 incIntFactor = 1
13 nFFTPoints = 0
14 FixPP_IncInt = 1
15 FixRCP_IPP = 1000
16 FixPP_CohInt = 1
17 Tau_0 = 250
18 AcqH0_0 = 70
19 H0 = AcqH0_0
20 AcqDH_0 = 1.25
21 DH0 = AcqDH_0
22 Bauds = 32
23 BaudWidth = None
24 FixRCP_TXA = 40
25 FixRCP_TXB = 70
26 fAngle = 2.0*math.pi*(1/16)
27 DC_level = 500
28 stdev = 8
29 Num_Codes = 2
30 #code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
31 #code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
32 #Dyn_snCode = numpy.array([Num_Codes,Bauds])
33 Dyn_snCode = None
34 Samples = 200
35 channels = 2
36 pulses = None
37 Reference = None
38 pulse_size = None
39 prof_gen = None
40 Fdoppler = 100
41 Hdoppler = 36
42 Adoppler = 300
43 frequency = 9345
44 nTotalReadFiles = 1000
12 incIntFactor = 1
13 nFFTPoints = 0
14 FixPP_IncInt = 1
15 FixRCP_IPP = 1000
16 FixPP_CohInt = 1
17 Tau_0 = 250
18 AcqH0_0 = 70
19 H0 = AcqH0_0
20 AcqDH_0 = 1.25
21 DH0 = AcqDH_0
22 Bauds = 32
23 BaudWidth = None
24 FixRCP_TXA = 40
25 FixRCP_TXB = 70
26 fAngle = 2.0 * math.pi * (1 / 16)
27 DC_level = 500
28 stdev = 8
29 Num_Codes = 2
30 # code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
31 # code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
32 # Dyn_snCode = numpy.array([Num_Codes,Bauds])
33 Dyn_snCode = None
34 Samples = 200
35 channels = 2
36 pulses = None
37 Reference = None
38 pulse_size = None
39 prof_gen = None
40 Fdoppler = 100
41 Hdoppler = 36
42 Adoppler = 300
43 frequency = 9345
44 nTotalReadFiles = 1000
45 45
46 46 def __init__(self):
47 47 """
@@ -56,19 +56,19 class SimulatorReader(JRODataReader, ProcessingUnit):
56 56 ProcessingUnit.__init__(self)
57 57 print(" [ START ] init - Metodo Simulator Reader")
58 58
59 self.isConfig = False
60 self.basicHeaderObj = BasicHeader(LOCALTIME)
61 self.systemHeaderObj = SystemHeader()
62 self.radarControllerHeaderObj = RadarControllerHeader()
63 self.processingHeaderObj = ProcessingHeader()
64 self.profileIndex = 2**32-1
65 self.dataOut = Voltage()
66 #code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
67 code0 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1,-1,1,-1,-1,-1,1,1,1,-1,1])
68 #code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
69 code1 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,-1,-1,-1,1,-1,-1,1,-1,1,1,1,-1,-1,-1,1,-1])
70 #self.Dyn_snCode = numpy.array([code0,code1])
71 self.Dyn_snCode = None
59 self.isConfig = False
60 self.basicHeaderObj = BasicHeader(LOCALTIME)
61 self.systemHeaderObj = SystemHeader()
62 self.radarControllerHeaderObj = RadarControllerHeader()
63 self.processingHeaderObj = ProcessingHeader()
64 self.profileIndex = 2 ** 32 - 1
65 self.dataOut = Voltage()
66 # code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
67 code0 = numpy.array([1, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, 1])
68 # code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
69 code1 = numpy.array([1, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1])
70 # self.Dyn_snCode = numpy.array([code0,code1])
71 self.Dyn_snCode = None
72 72
73 73 def set_kwargs(self, **kwargs):
74 74 for key, value in kwargs.items():
@@ -76,13 +76,13 class SimulatorReader(JRODataReader, ProcessingUnit):
76 76
77 77 def __hasNotDataInBuffer(self):
78 78
79 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock* self.nTxs:
80 if self.nReadBlocks>0:
81 tmp = self.dataOut.utctime
82 tmp_utc = int(self.dataOut.utctime)
83 tmp_milisecond = int((tmp-tmp_utc)*1000)
84 self.basicHeaderObj.utc = tmp_utc
85 self.basicHeaderObj.miliSecond= tmp_milisecond
79 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock * self.nTxs:
80 if self.nReadBlocks > 0:
81 tmp = self.dataOut.utctime
82 tmp_utc = int(self.dataOut.utctime)
83 tmp_milisecond = int((tmp - tmp_utc) * 1000)
84 self.basicHeaderObj.utc = tmp_utc
85 self.basicHeaderObj.miliSecond = tmp_milisecond
86 86 return 1
87 87 return 0
88 88
@@ -90,14 +90,14 class SimulatorReader(JRODataReader, ProcessingUnit):
90 90 """Set the next file to be readed open it and parse de file header"""
91 91
92 92 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
93 self.nReadFiles=self.nReadFiles+1
93 self.nReadFiles = self.nReadFiles + 1
94 94 if self.nReadFiles > self.nTotalReadFiles:
95 self.flagNoMoreFiles=1
95 self.flagNoMoreFiles = 1
96 96 raise schainpy.admin.SchainWarning('No more files to read')
97 97
98 print('------------------- [Opening file] ------------------------------',self.nReadFiles)
99 self.nReadBlocks = 0
100 #if self.nReadBlocks==0:
98 print('------------------- [Opening file] ------------------------------', self.nReadFiles)
99 self.nReadBlocks = 0
100 # if self.nReadBlocks==0:
101 101 # self.readFirstHeader()
102 102
103 103 def __setNewBlock(self):
@@ -113,43 +113,43 class SimulatorReader(JRODataReader, ProcessingUnit):
113 113 self.getBasicHeader()
114 114 break
115 115 if self.verbose:
116 print("[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
116 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
117 117 self.processingHeaderObj.dataBlocksPerFile,
118 self.dataOut.datatime.ctime()) )
118 self.dataOut.datatime.ctime()))
119 119 return 1
120 120
121 121 def getFirstHeader(self):
122 122 self.getBasicHeader()
123 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
124 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
123 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
124 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
125 125 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
126 self.dataOut.dtype = self.dtype
126 self.dataOut.dtype = self.dtype
127 127
128 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
129 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
128 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
129 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
130 130 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
131 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
131 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
132 132 # asumo q la data no esta decodificada
133 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
133 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
134 134 # asumo q la data no esta sin flip
135 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
136 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
137 self.dataOut.frequency = self.frequency
135 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
136 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
137 self.dataOut.frequency = self.frequency
138 138
139 139 def getBasicHeader(self):
140 140 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
141 141 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
142 142
143 143 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
144 self.dataOut.timeZone = self.basicHeaderObj.timeZone
145 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
146 self.dataOut.errorCount = self.basicHeaderObj.errorCount
147 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
148 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
144 self.dataOut.timeZone = self.basicHeaderObj.timeZone
145 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
146 self.dataOut.errorCount = self.basicHeaderObj.errorCount
147 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
148 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
149 149
150 150 def readFirstHeader(self):
151 151
152 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
152 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
153 153 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
154 154 if datatype == 0:
155 155 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
@@ -169,53 +169,53 class SimulatorReader(JRODataReader, ProcessingUnit):
169 169 self.dtype = datatype_str
170 170
171 171
172 def set_RCH(self, expType=2, nTx=1,ipp=None, txA=0, txB=0,
172 def set_RCH(self, expType=2, nTx=1, ipp=None, txA=0, txB=0,
173 173 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
174 174 numTaus=0, line6Function=0, line5Function=0, fClock=None,
175 175 prePulseBefore=0, prePulseAfter=0,
176 176 codeType=0, nCode=0, nBaud=0, code=None,
177 flip1=0, flip2=0,Taus=0):
178 self.radarControllerHeaderObj.expType = expType
179 self.radarControllerHeaderObj.nTx = nTx
180 self.radarControllerHeaderObj.ipp = float(ipp)
181 self.radarControllerHeaderObj.txA = float(txA)
182 self.radarControllerHeaderObj.txB = float(txB)
183 self.radarControllerHeaderObj.rangeIpp = b'A\n'#ipp
184 self.radarControllerHeaderObj.rangeTxA = b''
185 self.radarControllerHeaderObj.rangeTxB = b''
186
187 self.radarControllerHeaderObj.nHeights = int(nHeights)
188 self.radarControllerHeaderObj.firstHeight = numpy.array([firstHeight])
189 self.radarControllerHeaderObj.deltaHeight = numpy.array([deltaHeight])
190 self.radarControllerHeaderObj.samplesWin = numpy.array([nHeights])
191
192
193 self.radarControllerHeaderObj.nWindows = nWindows
194 self.radarControllerHeaderObj.numTaus = numTaus
195 self.radarControllerHeaderObj.codeType = codeType
177 flip1=0, flip2=0, Taus=0):
178 self.radarControllerHeaderObj.expType = expType
179 self.radarControllerHeaderObj.nTx = nTx
180 self.radarControllerHeaderObj.ipp = float(ipp)
181 self.radarControllerHeaderObj.txA = float(txA)
182 self.radarControllerHeaderObj.txB = float(txB)
183 self.radarControllerHeaderObj.rangeIpp = b'A\n' # ipp
184 self.radarControllerHeaderObj.rangeTxA = b''
185 self.radarControllerHeaderObj.rangeTxB = b''
186
187 self.radarControllerHeaderObj.nHeights = int(nHeights)
188 self.radarControllerHeaderObj.firstHeight = numpy.array([firstHeight])
189 self.radarControllerHeaderObj.deltaHeight = numpy.array([deltaHeight])
190 self.radarControllerHeaderObj.samplesWin = numpy.array([nHeights])
191
192
193 self.radarControllerHeaderObj.nWindows = nWindows
194 self.radarControllerHeaderObj.numTaus = numTaus
195 self.radarControllerHeaderObj.codeType = codeType
196 196 self.radarControllerHeaderObj.line6Function = line6Function
197 197 self.radarControllerHeaderObj.line5Function = line5Function
198 #self.radarControllerHeaderObj.fClock = fClock
199 self.radarControllerHeaderObj.prePulseBefore= prePulseBefore
198 # self.radarControllerHeaderObj.fClock = fClock
199 self.radarControllerHeaderObj.prePulseBefore = prePulseBefore
200 200 self.radarControllerHeaderObj.prePulseAfter = prePulseAfter
201 201
202 self.radarControllerHeaderObj.flip1 = flip1
203 self.radarControllerHeaderObj.flip2 = flip2
202 self.radarControllerHeaderObj.flip1 = flip1
203 self.radarControllerHeaderObj.flip2 = flip2
204 204
205 self.radarControllerHeaderObj.code_size = 0
206 if self.radarControllerHeaderObj.codeType != 0:
207 self.radarControllerHeaderObj.nCode = nCode
208 self.radarControllerHeaderObj.nBaud = nBaud
209 self.radarControllerHeaderObj.code = code
210 self.radarControllerHeaderObj.code_size = int(numpy.ceil(nBaud / 32.)) * nCode * 4
205 self.radarControllerHeaderObj.code_size = 0
206 if self.radarControllerHeaderObj.codeType != 0:
207 self.radarControllerHeaderObj.nCode = nCode
208 self.radarControllerHeaderObj.nBaud = nBaud
209 self.radarControllerHeaderObj.code = code
210 self.radarControllerHeaderObj.code_size = int(numpy.ceil(nBaud / 32.)) * nCode * 4
211 211
212 212 if fClock is None and deltaHeight is not None:
213 213 self.fClock = 0.15 / (deltaHeight * 1e-6)
214 self.radarControllerHeaderObj.fClock = self.fClock
215 if numTaus==0:
216 self.radarControllerHeaderObj.Taus = numpy.array(0,'<f4')
214 self.radarControllerHeaderObj.fClock = self.fClock
215 if numTaus == 0:
216 self.radarControllerHeaderObj.Taus = numpy.array(0, '<f4')
217 217 else:
218 self.radarControllerHeaderObj.Taus = numpy.array(Taus,'<f4')
218 self.radarControllerHeaderObj.Taus = numpy.array(Taus, '<f4')
219 219
220 220 def set_PH(self, dtype=0, blockSize=0, profilesPerBlock=0,
221 221 dataBlocksPerFile=0, nWindows=0, processFlags=0, nCohInt=0,
@@ -224,177 +224,177 class SimulatorReader(JRODataReader, ProcessingUnit):
224 224 code=0, nBaud=None, shif_fft=False, flag_dc=False,
225 225 flag_cspc=False, flag_decode=False, flag_deflip=False):
226 226
227 self.processingHeaderObj.dtype = dtype
228 self.processingHeaderObj.profilesPerBlock = profilesPerBlock
227 self.processingHeaderObj.dtype = dtype
228 self.processingHeaderObj.profilesPerBlock = profilesPerBlock
229 229 self.processingHeaderObj.dataBlocksPerFile = dataBlocksPerFile
230 self.processingHeaderObj.nWindows = nWindows
231 self.processingHeaderObj.processFlags = processFlags
232 self.processingHeaderObj.nCohInt = nCohInt
233 self.processingHeaderObj.nIncohInt = nIncohInt
234 self.processingHeaderObj.totalSpectra = totalSpectra
235
236 self.processingHeaderObj.nHeights = int(nHeights)
237 self.processingHeaderObj.firstHeight = firstHeight#numpy.array([firstHeight])#firstHeight
238 self.processingHeaderObj.deltaHeight = deltaHeight#numpy.array([deltaHeight])#deltaHeight
239 self.processingHeaderObj.samplesWin = nHeights#numpy.array([nHeights])#nHeights
240
241 def set_BH(self, utc = 0, miliSecond = 0, timeZone = 0):
242 self.basicHeaderObj.utc = utc
243 self.basicHeaderObj.miliSecond = miliSecond
244 self.basicHeaderObj.timeZone = timeZone
230 self.processingHeaderObj.nWindows = nWindows
231 self.processingHeaderObj.processFlags = processFlags
232 self.processingHeaderObj.nCohInt = nCohInt
233 self.processingHeaderObj.nIncohInt = nIncohInt
234 self.processingHeaderObj.totalSpectra = totalSpectra
235
236 self.processingHeaderObj.nHeights = int(nHeights)
237 self.processingHeaderObj.firstHeight = firstHeight # numpy.array([firstHeight])#firstHeight
238 self.processingHeaderObj.deltaHeight = deltaHeight # numpy.array([deltaHeight])#deltaHeight
239 self.processingHeaderObj.samplesWin = nHeights # numpy.array([nHeights])#nHeights
240
241 def set_BH(self, utc=0, miliSecond=0, timeZone=0):
242 self.basicHeaderObj.utc = utc
243 self.basicHeaderObj.miliSecond = miliSecond
244 self.basicHeaderObj.timeZone = timeZone
245 245
246 246 def set_SH(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWidth=32):
247 #self.systemHeaderObj.size = size
248 self.systemHeaderObj.nSamples = nSamples
249 self.systemHeaderObj.nProfiles = nProfiles
250 self.systemHeaderObj.nChannels = nChannels
251 self.systemHeaderObj.adcResolution = adcResolution
247 # self.systemHeaderObj.size = size
248 self.systemHeaderObj.nSamples = nSamples
249 self.systemHeaderObj.nProfiles = nProfiles
250 self.systemHeaderObj.nChannels = nChannels
251 self.systemHeaderObj.adcResolution = adcResolution
252 252 self.systemHeaderObj.pciDioBusWidth = pciDioBusWidth
253 253
254 254 def init_acquisition(self):
255 255
256 256 if self.nFFTPoints != 0:
257 self.incIntFactor = m_nProfilesperBlock/self.nFFTPoints
257 self.incIntFactor = m_nProfilesperBlock / self.nFFTPoints
258 258 if (self.FixPP_IncInt > self.incIntFactor):
259 self.incIntFactor = self.FixPP_IncInt/ self.incIntFactor
260 elif(self.FixPP_IncInt< self.incIntFactor):
259 self.incIntFactor = self.FixPP_IncInt / self.incIntFactor
260 elif(self.FixPP_IncInt < self.incIntFactor):
261 261 print("False alert...")
262 262
263 ProfilesperBlock = self.processingHeaderObj.profilesPerBlock
263 ProfilesperBlock = self.processingHeaderObj.profilesPerBlock
264 264
265 self.timeperblock =int(((self.FixRCP_IPP
266 *ProfilesperBlock
267 *self.FixPP_CohInt
268 *self.incIntFactor)
269 /150.0)
270 *0.9
271 +0.5)
265 self.timeperblock = int(((self.FixRCP_IPP
266 * ProfilesperBlock
267 * self.FixPP_CohInt
268 * self.incIntFactor)
269 / 150.0)
270 * 0.9
271 + 0.5)
272 272 # para cada canal
273 self.profiles = ProfilesperBlock*self.FixPP_CohInt
274 self.profiles = ProfilesperBlock
275 self.Reference = int((self.Tau_0-self.AcqH0_0)/(self.AcqDH_0)+0.5)
276 self.BaudWidth = int((self.FixRCP_TXA/self.AcqDH_0)/self.Bauds + 0.5 )
273 self.profiles = ProfilesperBlock * self.FixPP_CohInt
274 self.profiles = ProfilesperBlock
275 self.Reference = int((self.Tau_0 - self.AcqH0_0) / (self.AcqDH_0) + 0.5)
276 self.BaudWidth = int((self.FixRCP_TXA / self.AcqDH_0) / self.Bauds + 0.5)
277 277
278 if (self.BaudWidth==0):
279 self.BaudWidth=1
278 if (self.BaudWidth == 0):
279 self.BaudWidth = 1
280 280
281 def init_pulse(self,Num_Codes=Num_Codes,Bauds=Bauds,BaudWidth=BaudWidth,Dyn_snCode=Dyn_snCode):
281 def init_pulse(self, Num_Codes=Num_Codes, Bauds=Bauds, BaudWidth=BaudWidth, Dyn_snCode=Dyn_snCode):
282 282
283 Num_Codes = Num_Codes
284 Bauds = Bauds
285 BaudWidth = BaudWidth
286 Dyn_snCode = Dyn_snCode
283 Num_Codes = Num_Codes
284 Bauds = Bauds
285 BaudWidth = BaudWidth
286 Dyn_snCode = Dyn_snCode
287 287
288 288 if Dyn_snCode:
289 289 print("EXISTE")
290 290 else:
291 291 print("No existe")
292 292
293 if Dyn_snCode: # if Bauds:
294 pulses = list(range(0,Num_Codes))
295 num_codes = Num_Codes
293 if Dyn_snCode: # if Bauds:
294 pulses = list(range(0, Num_Codes))
295 num_codes = Num_Codes
296 296 for i in range(num_codes):
297 pulse_size = Bauds*BaudWidth
298 pulses[i] = numpy.zeros(pulse_size)
297 pulse_size = Bauds * BaudWidth
298 pulses[i] = numpy.zeros(pulse_size)
299 299 for j in range(Bauds):
300 300 for k in range(BaudWidth):
301 pulses[i][j*BaudWidth+k] = int(Dyn_snCode[i][j]*600)
301 pulses[i][j * BaudWidth + k] = int(Dyn_snCode[i][j] * 600)
302 302 else:
303 303 print("sin code")
304 pulses = list(range(1))
305 if self.AcqDH_0>0.149:
306 pulse_size = int(self.FixRCP_TXB/0.15+0.5)
304 pulses = list(range(1))
305 if self.AcqDH_0 > 0.149:
306 pulse_size = int(self.FixRCP_TXB / 0.15 + 0.5)
307 307 else:
308 pulse_size = int((self.FixRCP_TXB/self.AcqDH_0)+0.5) #0.0375
309 pulses[0] = numpy.ones(pulse_size)
310 pulses = 600*pulses[0]
311
312 return pulses,pulse_size
313
314 def jro_GenerateBlockOfData(self,Samples=Samples,DC_level= DC_level,stdev=stdev,
315 Reference= Reference,pulses= pulses,
316 Num_Codes= Num_Codes,pulse_size=pulse_size,
317 prof_gen= prof_gen,H0 = H0,DH0=DH0,
318 Adoppler=Adoppler,Fdoppler= Fdoppler,Hdoppler=Hdoppler):
319 Samples = Samples
320 DC_level = DC_level
321 stdev = stdev
322 m_nR = Reference
323 pulses = pulses
324 num_codes = Num_Codes
325 ps = pulse_size
326 prof_gen = prof_gen
327 channels = self.channels
328 H0 = H0
329 DH0 = DH0
330 ippSec = self.radarControllerHeaderObj.ippSeconds
331 Fdoppler = self.Fdoppler
332 Hdoppler = self.Hdoppler
333 Adoppler = self.Adoppler
334
335 self.datablock = numpy.zeros([channels,prof_gen,Samples],dtype= numpy.complex64)
308 pulse_size = int((self.FixRCP_TXB / self.AcqDH_0) + 0.5) # 0.0375
309 pulses[0] = numpy.ones(pulse_size)
310 pulses = 600 * pulses[0]
311
312 return pulses, pulse_size
313
314 def jro_GenerateBlockOfData(self, Samples=Samples, DC_level=DC_level, stdev=stdev,
315 Reference=Reference, pulses=pulses,
316 Num_Codes=Num_Codes, pulse_size=pulse_size,
317 prof_gen=prof_gen, H0=H0, DH0=DH0,
318 Adoppler=Adoppler, Fdoppler=Fdoppler, Hdoppler=Hdoppler):
319 Samples = Samples
320 DC_level = DC_level
321 stdev = stdev
322 m_nR = Reference
323 pulses = pulses
324 num_codes = Num_Codes
325 ps = pulse_size
326 prof_gen = prof_gen
327 channels = self.channels
328 H0 = H0
329 DH0 = DH0
330 ippSec = self.radarControllerHeaderObj.ippSeconds
331 Fdoppler = self.Fdoppler
332 Hdoppler = self.Hdoppler
333 Adoppler = self.Adoppler
334
335 self.datablock = numpy.zeros([channels, prof_gen, Samples], dtype=numpy.complex64)
336 336 for i in range(channels):
337 337 for k in range(prof_gen):
338 338 #-----------------------NOISE---------------
339 Noise_r = numpy.random.normal(DC_level,stdev,Samples)
340 Noise_i = numpy.random.normal(DC_level,stdev,Samples)
341 Noise = numpy.zeros(Samples,dtype=complex)
339 Noise_r = numpy.random.normal(DC_level, stdev, Samples)
340 Noise_i = numpy.random.normal(DC_level, stdev, Samples)
341 Noise = numpy.zeros(Samples, dtype=complex)
342 342 Noise.real = Noise_r
343 343 Noise.imag = Noise_i
344 344 #-----------------------PULSOS--------------
345 Pulso = numpy.zeros(pulse_size,dtype=complex)
346 Pulso.real = pulses[k%num_codes]
347 Pulso.imag = pulses[k%num_codes]
345 Pulso = numpy.zeros(pulse_size, dtype=complex)
346 Pulso.real = pulses[k % num_codes]
347 Pulso.imag = pulses[k % num_codes]
348 348 #--------------------- PULSES+NOISE----------
349 InBuffer = numpy.zeros(Samples,dtype=complex)
350 InBuffer[m_nR:m_nR+ps] = Pulso
351 InBuffer = InBuffer+Noise
349 InBuffer = numpy.zeros(Samples, dtype=complex)
350 InBuffer[m_nR:m_nR + ps] = Pulso
351 InBuffer = InBuffer + Noise
352 352 #--------------------- ANGLE -------------------------------
353 InBuffer.real[m_nR:m_nR+ps] = InBuffer.real[m_nR:m_nR+ps]*(math.cos( self.fAngle)*5)
354 InBuffer.imag[m_nR:m_nR+ps] = InBuffer.imag[m_nR:m_nR+ps]*(math.sin( self.fAngle)*5)
355 InBuffer=InBuffer
356 self.datablock[i][k]= InBuffer
353 InBuffer.real[m_nR:m_nR + ps] = InBuffer.real[m_nR:m_nR + ps] * (math.cos(self.fAngle) * 5)
354 InBuffer.imag[m_nR:m_nR + ps] = InBuffer.imag[m_nR:m_nR + ps] * (math.sin(self.fAngle) * 5)
355 InBuffer = InBuffer
356 self.datablock[i][k] = InBuffer
357 357
358 358 #----------------DOPPLER SIGNAL...............................................
359 time_vec = numpy.linspace(0,(prof_gen-1)*ippSec,int(prof_gen))+self.nReadBlocks*ippSec*prof_gen+(self.nReadFiles-1)*ippSec*prof_gen
360 fd = Fdoppler #+(600.0/120)*self.nReadBlocks
361 d_signal = Adoppler*numpy.array(numpy.exp(1.0j*2.0*math.pi*fd*time_vec),dtype=numpy.complex64)
359 time_vec = numpy.linspace(0, (prof_gen - 1) * ippSec, int(prof_gen)) + self.nReadBlocks * ippSec * prof_gen + (self.nReadFiles - 1) * ippSec * prof_gen
360 fd = Fdoppler # +(600.0/120)*self.nReadBlocks
361 d_signal = Adoppler * numpy.array(numpy.exp(1.0j * 2.0 * math.pi * fd * time_vec), dtype=numpy.complex64)
362 362 #-------------Senal con ancho espectral--------------------
363 if prof_gen%2==0:
364 min = int(prof_gen/2.0-1.0)
365 max = int(prof_gen/2.0)
363 if prof_gen % 2 == 0:
364 min = int(prof_gen / 2.0 - 1.0)
365 max = int(prof_gen / 2.0)
366 366 else:
367 min = int(prof_gen/2.0)
368 max = int(prof_gen/2.0)
369 specw_sig = numpy.linspace(-min,max,prof_gen)
370 w = 4
371 A = 20
372 specw_sig = specw_sig/w
373 specw_sig = numpy.sinc(specw_sig)
374 specw_sig = A*numpy.array(specw_sig,dtype=numpy.complex64)
367 min = int(prof_gen / 2.0)
368 max = int(prof_gen / 2.0)
369 specw_sig = numpy.linspace(-min, max, prof_gen)
370 w = 4
371 A = 20
372 specw_sig = specw_sig / w
373 specw_sig = numpy.sinc(specw_sig)
374 specw_sig = A * numpy.array(specw_sig, dtype=numpy.complex64)
375 375 #------------------ DATABLOCK + DOPPLER--------------------
376 HD=int(Hdoppler/self.AcqDH_0)
376 HD = int(Hdoppler / self.AcqDH_0)
377 377 for i in range(12):
378 self.datablock[0,:,HD+i]=self.datablock[0,:,HD+i]+ d_signal# RESULT
378 self.datablock[0, :, HD + i] = self.datablock[0, :, HD + i] + d_signal # RESULT
379 379 #------------------ DATABLOCK + DOPPLER*Sinc(x)--------------------
380 HD=int(Hdoppler/self.AcqDH_0)
381 HD=int(HD/2)
380 HD = int(Hdoppler / self.AcqDH_0)
381 HD = int(HD / 2)
382 382 for i in range(12):
383 self.datablock[0,:,HD+i]=self.datablock[0,:,HD+i]+ specw_sig*d_signal# RESULT
383 self.datablock[0, :, HD + i] = self.datablock[0, :, HD + i] + specw_sig * d_signal # RESULT
384 384
385 385 def readBlock(self):
386 386
387 self.jro_GenerateBlockOfData(Samples= self.samples,DC_level=self.DC_level,
388 stdev=self.stdev,Reference= self.Reference,
389 pulses = self.pulses,Num_Codes=self.Num_Codes,
390 pulse_size=self.pulse_size,prof_gen=self.profiles,
391 H0=self.H0,DH0=self.DH0)
387 self.jro_GenerateBlockOfData(Samples=self.samples, DC_level=self.DC_level,
388 stdev=self.stdev, Reference=self.Reference,
389 pulses=self.pulses, Num_Codes=self.Num_Codes,
390 pulse_size=self.pulse_size, prof_gen=self.profiles,
391 H0=self.H0, DH0=self.DH0)
392 392
393 self.profileIndex = 0
394 self.flagIsNewFile = 0
393 self.profileIndex = 0
394 self.flagIsNewFile = 0
395 395 self.flagIsNewBlock = 1
396 self.nTotalBlocks += 1
397 self.nReadBlocks += 1
396 self.nTotalBlocks += 1
397 self.nReadBlocks += 1
398 398
399 399 return 1
400 400
@@ -404,11 +404,11 class SimulatorReader(JRODataReader, ProcessingUnit):
404 404 self.dataOut.flagNodata = True
405 405 return 0
406 406 self.flagDiscontinuousBlock = 0
407 self.flagIsNewBlock = 0
408 if self.__hasNotDataInBuffer(): # aqui es verdad
409 if not(self.readNextBlock()): # return 1 y por eso el if not salta a getBasic Header
407 self.flagIsNewBlock = 0
408 if self.__hasNotDataInBuffer(): # aqui es verdad
409 if not(self.readNextBlock()): # return 1 y por eso el if not salta a getBasic Header
410 410 return 0
411 self.getFirstHeader() # atributo
411 self.getFirstHeader() # atributo
412 412
413 413 if not self.getByBlock:
414 414 self.dataOut.flagDataAsBlock = False
@@ -423,36 +423,36 class SimulatorReader(JRODataReader, ProcessingUnit):
423 423 return self.dataOut.data
424 424
425 425
426 def setup(self,frequency=49.92e6,incIntFactor= 1, nFFTPoints = 0, FixPP_IncInt=1,FixRCP_IPP=1000,
427 FixPP_CohInt= 1,Tau_0= 250,AcqH0_0 = 70 ,AcqDH_0=1.25, Bauds= 32,
428 FixRCP_TXA = 40, FixRCP_TXB = 50, fAngle = 2.0*math.pi*(1/16),DC_level= 50,
429 stdev= 8,Num_Codes = 1 , Dyn_snCode = None, samples=200,
430 channels=2,Fdoppler=20,Hdoppler=36,Adoppler=500,
431 profilesPerBlock=300,dataBlocksPerFile=120,nTotalReadFiles=10000,
426 def setup(self, frequency=49.92e6, incIntFactor=1, nFFTPoints=0, FixPP_IncInt=1, FixRCP_IPP=1000,
427 FixPP_CohInt=1, Tau_0=250, AcqH0_0=70 , AcqDH_0=1.25, Bauds=32,
428 FixRCP_TXA=40, FixRCP_TXB=50, fAngle=2.0 * math.pi * (1 / 16), DC_level=50,
429 stdev=8, Num_Codes=1 , Dyn_snCode=None, samples=200,
430 channels=2, Fdoppler=20, Hdoppler=36, Adoppler=500,
431 profilesPerBlock=300, dataBlocksPerFile=120, nTotalReadFiles=10000,
432 432 **kwargs):
433 433
434 434 self.set_kwargs(**kwargs)
435 435 self.nReadBlocks = 0
436 self.nReadFiles = 1
437 print('------------------- [Opening file: ] ------------------------------',self.nReadFiles)
436 self.nReadFiles = 1
437 print('------------------- [Opening file: ] ------------------------------', self.nReadFiles)
438 438
439 tmp = time.time()
440 tmp_utc = int(tmp)
441 tmp_milisecond = int((tmp-tmp_utc)*1000)
442 print(" SETUP -basicHeaderObj.utc",datetime.datetime.utcfromtimestamp(tmp))
439 tmp = time.time()
440 tmp_utc = int(tmp)
441 tmp_milisecond = int((tmp - tmp_utc) * 1000)
442 print(" SETUP -basicHeaderObj.utc", datetime.datetime.utcfromtimestamp(tmp))
443 443 if Dyn_snCode is None:
444 Num_Codes=1
445 Bauds =1
444 Num_Codes = 1
445 Bauds = 1
446 446
447 447
448 448
449 self.set_BH(utc= tmp_utc,miliSecond= tmp_milisecond,timeZone=300 )
450 self.set_RCH( expType=0, nTx=150,ipp=FixRCP_IPP, txA=FixRCP_TXA, txB= FixRCP_TXB,
449 self.set_BH(utc=tmp_utc, miliSecond=tmp_milisecond, timeZone=300)
450 self.set_RCH(expType=0, nTx=150, ipp=FixRCP_IPP, txA=FixRCP_TXA, txB=FixRCP_TXB,
451 451 nWindows=1 , nHeights=samples, firstHeight=AcqH0_0, deltaHeight=AcqDH_0,
452 452 numTaus=1, line6Function=0, line5Function=0, fClock=None,
453 453 prePulseBefore=0, prePulseAfter=0,
454 454 codeType=0, nCode=Num_Codes, nBaud=32, code=Dyn_snCode,
455 flip1=0, flip2=0,Taus=Tau_0)
455 flip1=0, flip2=0, Taus=Tau_0)
456 456
457 457 self.set_PH(dtype=0, blockSize=0, profilesPerBlock=profilesPerBlock,
458 458 dataBlocksPerFile=dataBlocksPerFile, nWindows=1, processFlags=numpy.array([1024]), nCohInt=1,
@@ -465,54 +465,54 class SimulatorReader(JRODataReader, ProcessingUnit):
465 465
466 466 self.readFirstHeader()
467 467
468 self.frequency = frequency
469 self.incIntFactor = incIntFactor
470 self.nFFTPoints = nFFTPoints
471 self.FixPP_IncInt = FixPP_IncInt
472 self.FixRCP_IPP = FixRCP_IPP
473 self.FixPP_CohInt = FixPP_CohInt
474 self.Tau_0 = Tau_0
475 self.AcqH0_0 = AcqH0_0
476 self.H0 = AcqH0_0
477 self.AcqDH_0 = AcqDH_0
478 self.DH0 = AcqDH_0
479 self.Bauds = Bauds
480 self.FixRCP_TXA = FixRCP_TXA
481 self.FixRCP_TXB = FixRCP_TXB
482 self.fAngle = fAngle
483 self.DC_level = DC_level
484 self.stdev = stdev
485 self.Num_Codes = Num_Codes
486 self.Dyn_snCode = Dyn_snCode
487 self.samples = samples
488 self.channels = channels
489 self.profiles = None
490 self.m_nReference = None
491 self.Baudwidth = None
492 self.Fdoppler = Fdoppler
493 self.Hdoppler = Hdoppler
494 self.Adoppler = Adoppler
495 self.nTotalReadFiles = int(nTotalReadFiles)
468 self.frequency = frequency
469 self.incIntFactor = incIntFactor
470 self.nFFTPoints = nFFTPoints
471 self.FixPP_IncInt = FixPP_IncInt
472 self.FixRCP_IPP = FixRCP_IPP
473 self.FixPP_CohInt = FixPP_CohInt
474 self.Tau_0 = Tau_0
475 self.AcqH0_0 = AcqH0_0
476 self.H0 = AcqH0_0
477 self.AcqDH_0 = AcqDH_0
478 self.DH0 = AcqDH_0
479 self.Bauds = Bauds
480 self.FixRCP_TXA = FixRCP_TXA
481 self.FixRCP_TXB = FixRCP_TXB
482 self.fAngle = fAngle
483 self.DC_level = DC_level
484 self.stdev = stdev
485 self.Num_Codes = Num_Codes
486 self.Dyn_snCode = Dyn_snCode
487 self.samples = samples
488 self.channels = channels
489 self.profiles = None
490 self.m_nReference = None
491 self.Baudwidth = None
492 self.Fdoppler = Fdoppler
493 self.Hdoppler = Hdoppler
494 self.Adoppler = Adoppler
495 self.nTotalReadFiles = int(nTotalReadFiles)
496 496
497 497 print("IPP ", self.FixRCP_IPP)
498 print("Tau_0 ",self.Tau_0)
499 print("AcqH0_0",self.AcqH0_0)
500 print("samples,window ",self.samples)
501 print("AcqDH_0",AcqDH_0)
502 print("FixRCP_TXA",self.FixRCP_TXA)
503 print("FixRCP_TXB",self.FixRCP_TXB)
504 print("Dyn_snCode",Dyn_snCode)
498 print("Tau_0 ", self.Tau_0)
499 print("AcqH0_0", self.AcqH0_0)
500 print("samples,window ", self.samples)
501 print("AcqDH_0", AcqDH_0)
502 print("FixRCP_TXA", self.FixRCP_TXA)
503 print("FixRCP_TXB", self.FixRCP_TXB)
504 print("Dyn_snCode", Dyn_snCode)
505 505 print("Fdoppler", Fdoppler)
506 print("Hdoppler",Hdoppler)
507 print("Vdopplermax",Fdoppler*(3.0e8/self.frequency)/2.0)
506 print("Hdoppler", Hdoppler)
507 print("Vdopplermax", Fdoppler * (3.0e8 / self.frequency) / 2.0)
508 508 print("nTotalReadFiles", nTotalReadFiles)
509 509
510 510 self.init_acquisition()
511 self.pulses,self.pulse_size=self.init_pulse(Num_Codes=self.Num_Codes,Bauds=self.Bauds,BaudWidth=self.BaudWidth,Dyn_snCode=Dyn_snCode)
511 self.pulses, self.pulse_size = self.init_pulse(Num_Codes=self.Num_Codes, Bauds=self.Bauds, BaudWidth=self.BaudWidth, Dyn_snCode=Dyn_snCode)
512 512 print(" [ END ] - SETUP metodo")
513 513 return
514 514
515 def run(self,**kwargs): # metodo propio
515 def run(self, **kwargs): # metodo propio
516 516 if not(self.isConfig):
517 517 self.setup(**kwargs)
518 518 self.isConfig = True
@@ -53,7 +53,7 class SpectraReader(JRODataReader, ProcessingUnit):
53 53
54 54 """
55 55
56 def __init__(self):#, **kwargs):
56 def __init__(self): # , **kwargs):
57 57 """
58 58 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
59 59
@@ -121,12 +121,12 class SpectraReader(JRODataReader, ProcessingUnit):
121 121 self.nRdPairs = 0
122 122 self.rdPairList = []
123 123
124 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
125 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
126 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
124 for i in range(0, self.processingHeaderObj.totalSpectra * 2, 2):
125 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i + 1]:
126 self.nRdChannels = self.nRdChannels + 1 # par de canales iguales
127 127 else:
128 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
129 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
128 self.nRdPairs = self.nRdPairs + 1 # par de canales diferentes
129 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i + 1]))
130 130
131 131 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
132 132
@@ -165,38 +165,38 class SpectraReader(JRODataReader, ProcessingUnit):
165 165
166 166 fpointer = self.fp.tell()
167 167
168 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
169 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
168 spc = numpy.fromfile(self.fp, self.dtype[0], self.pts2read_SelfSpectra)
169 spc = spc.reshape((self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock)) # transforma a un arreglo 3D
170 170
171 171 if self.processingHeaderObj.flag_cspc:
172 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
173 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
172 cspc = numpy.fromfile(self.fp, self.dtype, self.pts2read_CrossSpectra)
173 cspc = cspc.reshape((self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock)) # transforma a un arreglo 3D
174 174
175 175 if self.processingHeaderObj.flag_dc:
176 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
177 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
176 dc = numpy.fromfile(self.fp, self.dtype, self.pts2read_DCchannels) # int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
177 dc = dc.reshape((self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights)) # transforma a un arreglo 2D
178 178
179 179 if not self.processingHeaderObj.shif_fft:
180 #desplaza a la derecha en el eje 2 determinadas posiciones
181 shift = int(self.processingHeaderObj.profilesPerBlock/2)
182 spc = numpy.roll( spc, shift , axis=2 )
180 # desplaza a la derecha en el eje 2 determinadas posiciones
181 shift = int(self.processingHeaderObj.profilesPerBlock / 2)
182 spc = numpy.roll(spc, shift , axis=2)
183 183
184 184 if self.processingHeaderObj.flag_cspc:
185 #desplaza a la derecha en el eje 2 determinadas posiciones
186 cspc = numpy.roll( cspc, shift, axis=2 )
185 # desplaza a la derecha en el eje 2 determinadas posiciones
186 cspc = numpy.roll(cspc, shift, axis=2)
187 187
188 #Dimensions : nChannels, nProfiles, nSamples
189 spc = numpy.transpose( spc, (0,2,1) )
188 # Dimensions : nChannels, nProfiles, nSamples
189 spc = numpy.transpose(spc, (0, 2, 1))
190 190 self.data_spc = spc
191 191
192 192 if self.processingHeaderObj.flag_cspc:
193 cspc = numpy.transpose( cspc, (0,2,1) )
194 self.data_cspc = cspc['real'] + cspc['imag']*1j
193 cspc = numpy.transpose(cspc, (0, 2, 1))
194 self.data_cspc = cspc['real'] + cspc['imag'] * 1j
195 195 else:
196 196 self.data_cspc = None
197 197
198 198 if self.processingHeaderObj.flag_dc:
199 self.data_dc = dc['real'] + dc['imag']*1j
199 self.data_dc = dc['real'] + dc['imag'] * 1j
200 200 else:
201 201 self.data_dc = None
202 202
@@ -219,12 +219,12 class SpectraReader(JRODataReader, ProcessingUnit):
219 219 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
220 220 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
221 221 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
222 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
222 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights * self.processingHeaderObj.deltaHeight
223 223 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
224 224 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
225 self.dataOut.flagShiftFFT = True #Data is always shifted
226 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
227 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip
225 self.dataOut.flagShiftFFT = True # Data is always shifted
226 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode # asumo q la data no esta decodificada
227 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip # asumo q la data esta sin flip
228 228
229 229 def getData(self):
230 230 """
@@ -253,11 +253,11 class SpectraReader(JRODataReader, ProcessingUnit):
253 253
254 254 if self.__hasNotDataInBuffer():
255 255
256 if not( self.readNextBlock() ):
256 if not(self.readNextBlock()):
257 257 self.dataOut.flagNoData = True
258 258 return 0
259 259
260 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
260 # data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
261 261
262 262 if self.data_spc is None:
263 263 self.dataOut.flagNoData = True
@@ -356,20 +356,20 class SpectraWriter(JRODataWriter, Operation):
356 356 Return: None
357 357 """
358 358
359 spc = numpy.transpose( self.data_spc, (0,2,1) )
359 spc = numpy.transpose(self.data_spc, (0, 2, 1))
360 360 if not self.processingHeaderObj.shif_fft:
361 spc = numpy.roll( spc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
361 spc = numpy.roll(spc, int(self.processingHeaderObj.profilesPerBlock / 2), axis=2) # desplaza a la derecha en el eje 2 determinadas posiciones
362 362 data = spc.reshape((-1))
363 363 data = data.astype(self.dtype[0])
364 364 data.tofile(self.fp)
365 365
366 366 if self.data_cspc is not None:
367 367
368 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
369 data = numpy.zeros( numpy.shape(cspc), self.dtype )
370 #print 'data.shape', self.shape_cspc_Buffer
368 cspc = numpy.transpose(self.data_cspc, (0, 2, 1))
369 data = numpy.zeros(numpy.shape(cspc), self.dtype)
370 # print 'data.shape', self.shape_cspc_Buffer
371 371 if not self.processingHeaderObj.shif_fft:
372 cspc = numpy.roll( cspc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
372 cspc = numpy.roll(cspc, int(self.processingHeaderObj.profilesPerBlock / 2), axis=2) # desplaza a la derecha en el eje 2 determinadas posiciones
373 373 data['real'] = cspc.real
374 374 data['imag'] = cspc.imag
375 375 data = data.reshape((-1))
@@ -378,7 +378,7 class SpectraWriter(JRODataWriter, Operation):
378 378 if self.data_dc is not None:
379 379
380 380 dc = self.data_dc
381 data = numpy.zeros( numpy.shape(dc), self.dtype )
381 data = numpy.zeros(numpy.shape(dc), self.dtype)
382 382 data['real'] = dc.real
383 383 data['imag'] = dc.imag
384 384 data = data.reshape((-1))
@@ -453,15 +453,15 class SpectraWriter(JRODataWriter, Operation):
453 453 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
454 454
455 455 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
456 blocksize = (pts2write_SelfSpectra*dtype_width)
456 blocksize = (pts2write_SelfSpectra * dtype_width)
457 457
458 458 if self.dataOut.data_cspc is not None:
459 459 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
460 blocksize += (pts2write_CrossSpectra*dtype_width*2)
460 blocksize += (pts2write_CrossSpectra * dtype_width * 2)
461 461
462 462 if self.dataOut.data_dc is not None:
463 463 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
464 blocksize += (pts2write_DCchannels*dtype_width*2)
464 blocksize += (pts2write_DCchannels * dtype_width * 2)
465 465
466 466 # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
467 467
@@ -485,12 +485,12 class SpectraWriter(JRODataWriter, Operation):
485 485 self.systemHeaderObj.nChannels = self.dataOut.nChannels
486 486 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
487 487
488 self.processingHeaderObj.dtype = 1 # Spectra
488 self.processingHeaderObj.dtype = 1 # Spectra
489 489 self.processingHeaderObj.blockSize = self.__getBlockSize()
490 490 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
491 491 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
492 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
493 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
492 self.processingHeaderObj.nWindows = 1 # podria ser 1 o self.dataOut.processingHeaderObj.nWindows
493 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt # Se requiere para determinar el valor de timeInterval
494 494 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
495 495 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
496 496 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
@@ -524,4 +524,4 class SpectraWriter(JRODataWriter, Operation):
524 524
525 525 self.processingHeaderObj.processFlags = self.getProcessFlags()
526 526
527 self.setBasicHeader() No newline at end of file
527 self.setBasicHeader()
@@ -46,7 +46,7 class USRPReader(ProcessingUnit):
46 46
47 47 def __getCurrentSecond(self):
48 48
49 return self.__thisUnixSample/self.__sample_rate
49 return self.__thisUnixSample / self.__sample_rate
50 50
51 51 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
52 52
@@ -54,9 +54,9 class USRPReader(ProcessingUnit):
54 54 '''
55 55 In this method will be initialized every parameter of dataOut object (header, no data)
56 56 '''
57 ippSeconds = 1.0*self.__nSamples/self.__sample_rate
57 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
58 58
59 nProfiles = 1.0/ippSeconds #Number of profiles in one second
59 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
60 60
61 61 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
62 62 txA=0,
@@ -67,7 +67,7 class USRPReader(ProcessingUnit):
67 67 deltaHeight=self.__deltaHeigth,
68 68 codeType=self.__codeType,
69 69 nCode=self.__nCode, nBaud=self.__nBaud,
70 code = self.__code)
70 code=self.__code)
71 71
72 72 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
73 73 nProfiles=nProfiles,
@@ -78,7 +78,7 class USRPReader(ProcessingUnit):
78 78
79 79 self.dataOut.data = None
80 80
81 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
81 self.dataOut.dtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
82 82
83 83 # self.dataOut.nChannels = 0
84 84
@@ -86,7 +86,7 class USRPReader(ProcessingUnit):
86 86
87 87 self.dataOut.nProfiles = nProfiles
88 88
89 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
89 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype=numpy.float) * self.__deltaHeigth
90 90
91 91 self.dataOut.channelList = self.__channelList
92 92
@@ -96,12 +96,12 class USRPReader(ProcessingUnit):
96 96
97 97 self.dataOut.flagNoData = True
98 98
99 #Set to TRUE if the data is discontinuous
99 # Set to TRUE if the data is discontinuous
100 100 self.dataOut.flagDiscontinuousBlock = False
101 101
102 102 self.dataOut.utctime = None
103 103
104 self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
104 self.dataOut.timeZone = self.__timezone / 60 # timezone like jroheader, difference in minutes between UTC and localtime
105 105
106 106 self.dataOut.dstFlag = 0
107 107
@@ -109,16 +109,16 class USRPReader(ProcessingUnit):
109 109
110 110 self.dataOut.nCohInt = 1
111 111
112 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
112 self.dataOut.flagDecodeData = False # asumo que la data esta decodificada
113 113
114 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
114 self.dataOut.flagDeflipData = False # asumo que la data esta sin flip
115 115
116 116 self.dataOut.flagShiftFFT = False
117 117
118 118 self.dataOut.ippSeconds = ippSeconds
119 119
120 #Time interval between profiles
121 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
120 # Time interval between profiles
121 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
122 122
123 123 self.dataOut.frequency = self.__frequency
124 124
@@ -150,7 +150,7 class USRPReader(ProcessingUnit):
150 150 except:
151 151 timezone = 0
152 152
153 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
153 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0]) / sample_rate - timezone
154 154
155 155 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
156 156 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
@@ -165,7 +165,7 class USRPReader(ProcessingUnit):
165 165
166 166 thisDatetime = startDatetime
167 167
168 while(thisDatetime<=endDatatime):
168 while(thisDatetime <= endDatatime):
169 169
170 170 thisDate = thisDatetime.date()
171 171
@@ -180,17 +180,17 class USRPReader(ProcessingUnit):
180 180
181 181 return dateList
182 182
183 def setup(self, path = None,
184 startDate = None,
185 endDate = None,
186 startTime = datetime.time(0,0,0),
187 endTime = datetime.time(23,59,59),
188 channelList = None,
189 nSamples = None,
190 ippKm = 60,
191 online = False,
192 delay = 60,
193 buffer_size = 1024,
183 def setup(self, path=None,
184 startDate=None,
185 endDate=None,
186 startTime=datetime.time(0, 0, 0),
187 endTime=datetime.time(23, 59, 59),
188 channelList=None,
189 nSamples=None,
190 ippKm=60,
191 online=False,
192 delay=60,
193 buffer_size=1024,
194 194 **kwargs):
195 195 '''
196 196 In this method we should set all initial parameters.
@@ -209,7 +209,7 class USRPReader(ProcessingUnit):
209 209 '''
210 210
211 211 if not os.path.isdir(path):
212 raise ValueError("[Reading] Directory %s does not exist" %path)
212 raise ValueError("[Reading] Directory %s does not exist" % path)
213 213
214 214 try:
215 215 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
@@ -219,7 +219,7 class USRPReader(ProcessingUnit):
219 219 channelNameList = self.digitalReadObj.get_channels()
220 220
221 221 if not channelNameList:
222 raise ValueError("[Reading] Directory %s does not have any files" %path)
222 raise ValueError("[Reading] Directory %s does not have any files" % path)
223 223
224 224 if not channelList:
225 225 channelList = list(range(len(channelNameList)))
@@ -230,7 +230,7 class USRPReader(ProcessingUnit):
230 230
231 231 self.__sample_rate = metadata_dict['sample_rate'][0]
232 232 # self.__samples_per_file = metadata_dict['samples_per_file'][0]
233 self.__deltaHeigth = 1e6*0.15/self.__sample_rate
233 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate
234 234
235 235 this_metadata_file = self.digitalReadObj.get_metadata(channelNameList[channelList[0]])
236 236
@@ -266,8 +266,8 class USRPReader(ProcessingUnit):
266 266
267 267 if not ippKm:
268 268 try:
269 #seconds to km
270 ippKm = 1e6*0.15*this_metadata_file['ipp'].value
269 # seconds to km
270 ippKm = 1e6 * 0.15 * this_metadata_file['ipp'].value
271 271 except:
272 272 ippKm = None
273 273
@@ -277,37 +277,37 class USRPReader(ProcessingUnit):
277 277
278 278 if startDate:
279 279 startDatetime = datetime.datetime.combine(startDate, startTime)
280 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
280 startUTCSecond = (startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
281 281
282 282 if endDate:
283 283 endDatetime = datetime.datetime.combine(endDate, endTime)
284 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
284 endUTCSecond = (endDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
285 285
286 286 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
287 287
288 288 if not startUTCSecond:
289 startUTCSecond = start_index/self.__sample_rate
289 startUTCSecond = start_index / self.__sample_rate
290 290
291 if start_index > startUTCSecond*self.__sample_rate:
292 startUTCSecond = start_index/self.__sample_rate
291 if start_index > startUTCSecond * self.__sample_rate:
292 startUTCSecond = start_index / self.__sample_rate
293 293
294 294 if not endUTCSecond:
295 endUTCSecond = end_index/self.__sample_rate
295 endUTCSecond = end_index / self.__sample_rate
296 296
297 if end_index < endUTCSecond*self.__sample_rate:
298 endUTCSecond = end_index/self.__sample_rate
297 if end_index < endUTCSecond * self.__sample_rate:
298 endUTCSecond = end_index / self.__sample_rate
299 299
300 300 if not nSamples:
301 301 if not ippKm:
302 302 raise ValueError("[Reading] nSamples or ippKm should be defined")
303 303
304 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
304 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
305 305
306 306 channelBoundList = []
307 307 channelNameListFiltered = []
308 308
309 309 for thisIndexChannel in channelList:
310 thisChannelName = channelNameList[thisIndexChannel]
310 thisChannelName = channelNameList[thisIndexChannel]
311 311 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
312 312 channelBoundList.append((start_index, end_index))
313 313 channelNameListFiltered.append(thisChannelName)
@@ -327,31 +327,31 class USRPReader(ProcessingUnit):
327 327 self.__channelNameList = channelNameListFiltered
328 328 self.__channelBoundList = channelBoundList
329 329 self.__nSamples = nSamples
330 self.__samples_to_read = int(buffer_size*nSamples)
330 self.__samples_to_read = int(buffer_size * nSamples)
331 331 self.__nChannels = len(self.__channelList)
332 332
333 333 self.__startUTCSecond = startUTCSecond
334 334 self.__endUTCSecond = endUTCSecond
335 335
336 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval
336 self.__timeInterval = 1.0 * self.__samples_to_read / self.__sample_rate # Time interval
337 337
338 338 if online:
339 339 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
340 340 startUTCSecond = numpy.floor(endUTCSecond)
341 341
342 self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read
342 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
343 343
344 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex)
344 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype=numpy.complex)
345 345
346 346 self.__setFileHeader()
347 347 self.isConfig = True
348 348
349 print("[Reading] USRP Data was found from %s to %s " %(
349 print("[Reading] USRP Data was found from %s to %s " % (
350 350 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
351 351 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
352 352 ))
353 353
354 print("[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
354 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
355 355 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
356 356 ))
357 357
@@ -375,13 +375,13 class USRPReader(ProcessingUnit):
375 375
376 376 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
377 377
378 if start_index > self.__startUTCSecond*self.__sample_rate:
379 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
378 if start_index > self.__startUTCSecond * self.__sample_rate:
379 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
380 380
381 if end_index > self.__endUTCSecond*self.__sample_rate:
382 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
381 if end_index > self.__endUTCSecond * self.__sample_rate:
382 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
383 383 print()
384 print("[Reading] New timerange found [%s, %s] " %(
384 print("[Reading] New timerange found [%s, %s] " % (
385 385 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
386 386 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
387 387 ))
@@ -390,21 +390,21 class USRPReader(ProcessingUnit):
390 390
391 391 return False
392 392
393 def __readNextBlock(self, seconds=30, volt_scale = 218776):
393 def __readNextBlock(self, seconds=30, volt_scale=218776):
394 394 '''
395 395 '''
396 396
397 #Set the next data
397 # Set the next data
398 398 self.__flagDiscontinuousBlock = False
399 399 self.__thisUnixSample += self.__samples_to_read
400 400
401 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
401 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
402 402 print("[Reading] There are no more data into selected time-range")
403 403
404 404 self.__reload()
405 405
406 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
407 self.__thisUnixSample -= self.__samples_to_read
406 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
407 self.__thisUnixSample -= self.__samples_to_read
408 408 return False
409 409
410 410 indexChannel = 0
@@ -419,30 +419,30 class USRPReader(ProcessingUnit):
419 419 thisChannelName)
420 420
421 421 except IOError as e:
422 #read next profile
422 # read next profile
423 423 self.__flagDiscontinuousBlock = True
424 print("[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
424 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
425 425 break
426 426
427 427 if result.shape[0] != self.__samples_to_read:
428 428 self.__flagDiscontinuousBlock = True
429 print("[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
429 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
430 430 result.shape[0],
431 431 self.__samples_to_read))
432 432 break
433 433
434 self.__data_buffer[indexChannel,:] = result*volt_scale
434 self.__data_buffer[indexChannel, :] = result * volt_scale
435 435
436 436 indexChannel += 1
437 437
438 438 dataOk = True
439 439
440 self.__utctime = self.__thisUnixSample/self.__sample_rate
440 self.__utctime = self.__thisUnixSample / self.__sample_rate
441 441
442 442 if not dataOk:
443 443 return False
444 444
445 print("[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
445 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
446 446 self.__samples_to_read,
447 447 self.__timeInterval))
448 448
@@ -486,7 +486,7 class USRPReader(ProcessingUnit):
486 486 if self.__readNextBlock():
487 487 break
488 488
489 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
489 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
490 490 return False
491 491
492 492 if self.__flagDiscontinuousBlock:
@@ -500,11 +500,11 class USRPReader(ProcessingUnit):
500 500 if err_counter > nTries:
501 501 return False
502 502
503 print('[Reading] waiting %d seconds to read a new block' %seconds)
503 print('[Reading] waiting %d seconds to read a new block' % seconds)
504 504 sleep(seconds)
505 505
506 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
507 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
506 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
507 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
508 508 self.dataOut.flagNoData = False
509 509 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
510 510 self.dataOut.profileIndex = self.profileIndex
@@ -599,4 +599,4 if __name__ == '__main__':
599 599 while True:
600 600 readObj.run(path='/Volumes/DATA/haystack/passive_radar/')
601 601 # readObj.printInfo()
602 readObj.printNumberOfBlock() No newline at end of file
602 readObj.printNumberOfBlock()
@@ -22,7 +22,7 class USRPReaderAPI(USRPReader, threading.Thread):
22 22
23 23 # __isBufferEmpty = True
24 24
25 __DATAKEYLIST = ['data','utctime','flagNoData']
25 __DATAKEYLIST = ['data', 'utctime', 'flagNoData']
26 26
27 27 def __init__(self, serializer='msgpack'):
28 28
@@ -71,7 +71,7 class USRPReaderAPI(USRPReader, threading.Thread):
71 71 return None
72 72
73 73 myMetadataSerial = obj2Serial(self.dataOut,
74 serializer = self.__serializer)
74 serializer=self.__serializer)
75 75
76 76 return myMetadataSerial
77 77
@@ -125,8 +125,8 class USRPReaderAPI(USRPReader, threading.Thread):
125 125 print(".", end=' ')
126 126
127 127 self.__mySerial = obj2Serial(self.dataOut,
128 keyList = self.__DATAKEYLIST,
129 serializer = self.__serializer)
128 keyList=self.__DATAKEYLIST,
129 serializer=self.__serializer)
130 130 self.__isBufferEmpty = False
131 131
132 132 # print self.profileIndex
@@ -136,4 +136,4 class USRPReaderAPI(USRPReader, threading.Thread):
136 136
137 137 print("Closing thread")
138 138
139 return No newline at end of file
139 return
@@ -81,7 +81,7 class VoltageReader(JRODataReader, ProcessingUnit):
81 81 self.radarControllerHeaderObj = RadarControllerHeader()
82 82 self.processingHeaderObj = ProcessingHeader()
83 83 self.lastUTTime = 0
84 self.profileIndex = 2**32 - 1
84 self.profileIndex = 2 ** 32 - 1
85 85 self.dataOut = Voltage()
86 86 self.selBlocksize = None
87 87 self.selBlocktime = None
@@ -251,7 +251,7 class VoltageReader(JRODataReader, ProcessingUnit):
251 251
252 252 self.firstHeaderSize = self.basicHeaderObj.size
253 253
254 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
254 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
255 255 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
256 256 if datatype == 0:
257 257 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
@@ -269,7 +269,7 class VoltageReader(JRODataReader, ProcessingUnit):
269 269 raise ValueError('Data type was not defined')
270 270
271 271 self.dtype = datatype_str
272 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
272 # self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
273 273 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
274 274 self.firstHeaderSize + self.basicHeaderSize * \
275 275 (self.processingHeaderObj.dataBlocksPerFile - 1)
@@ -448,7 +448,7 class VoltageReader(JRODataReader, ProcessingUnit):
448 448 blockIndex = self.selBlocksize - datasize
449 449 datablock1 = self.datablock[:, :blockIndex, :]
450 450
451 buffer[:, datasize:datasize +
451 buffer[:, datasize:datasize +
452 452 datablock1.shape[1], :] = datablock1
453 453 datasize += datablock1.shape[1]
454 454
@@ -480,7 +480,7 class VoltageWriter(JRODataWriter, Operation):
480 480
481 481 shapeBuffer = None
482 482
483 def __init__(self):#, **kwargs):
483 def __init__(self): # , **kwargs):
484 484 """
485 485 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
486 486
@@ -489,7 +489,7 class VoltageWriter(JRODataWriter, Operation):
489 489
490 490 Return: None
491 491 """
492 Operation.__init__(self)#, **kwargs)
492 Operation.__init__(self) # , **kwargs)
493 493
494 494 self.nTotalBlocks = 0
495 495
@@ -624,7 +624,7 class VoltageWriter(JRODataWriter, Operation):
624 624
625 625 dtype_width = self.getDtypeWidth()
626 626
627 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
627 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
628 628 self.profilesPerBlock * dtype_width * 2)
629 629
630 630 return blocksize
@@ -673,4 +673,4 class VoltageWriter(JRODataWriter, Operation):
673 673 self.processingHeaderObj.processFlags = self.getProcessFlags()
674 674
675 675 self.setBasicHeader()
676 No newline at end of file
676
@@ -25,7 +25,7 FILE_HEADER_STRUCTURE = numpy.dtype([
25 25 ('navg', 'f'),
26 26 ('fh', 'f'),
27 27 ('dh', 'f'),
28 ('nheights', 'f'),
28 ('nheights', 'f'),
29 29 ('ipp', 'f')
30 30 ])
31 31
@@ -82,7 +82,7 class JULIAParamReader(JRODataReader, ProcessingUnit):
82 82 path=None,
83 83 startDate=None,
84 84 endDate=None,
85 ext=None,
85 ext=None,
86 86 startTime=datetime.time(0, 0, 0),
87 87 endTime=datetime.time(23, 59, 59),
88 88 timezone=0,
@@ -146,7 +146,7 class JULIAParamReader(JRODataReader, ProcessingUnit):
146 146 continue
147 147
148 148 year, month, day = int(year), int(month), int(day)
149 dateFile = datetime.date(year+2000, month, day)
149 dateFile = datetime.date(year + 2000, month, day)
150 150
151 151 if (startDate > dateFile) or (endDate < dateFile):
152 152 continue
@@ -259,7 +259,7 class JULIAParamReader(JRODataReader, ProcessingUnit):
259 259 nheights = int(self.header_rec['nheights'])
260 260 hours = float(self.header_rec['hours'][0])
261 261 heights = numpy.arange(nheights) * self.dH + self.header_rec['h0']
262 datatime = datetime.datetime(self.year, 1, 1) + datetime.timedelta(days=self.doy-1, hours=hours)
262 datatime = datetime.datetime(self.year, 1, 1) + datetime.timedelta(days=self.doy - 1, hours=hours)
263 263 return heights, datatime
264 264 else:
265 265 return False
@@ -269,30 +269,30 class JULIAParamReader(JRODataReader, ProcessingUnit):
269 269 Parse data
270 270 '''
271 271
272 buffer = numpy.fromfile(self.fp, 'f', 8*N).reshape(N, 8)
272 buffer = numpy.fromfile(self.fp, 'f', 8 * N).reshape(N, 8)
273 273
274 274 pow0 = buffer[:, 0]
275 275 pow1 = buffer[:, 1]
276 acf0 = (buffer[:,2] + buffer[:,3]*1j) / pow0
277 acf1 = (buffer[:,4] + buffer[:,5]*1j) / pow1
278 dccf = (buffer[:,6] + buffer[:,7]*1j) / (pow0*pow1)
276 acf0 = (buffer[:, 2] + buffer[:, 3] * 1j) / pow0
277 acf1 = (buffer[:, 4] + buffer[:, 5] * 1j) / pow1
278 dccf = (buffer[:, 6] + buffer[:, 7] * 1j) / (pow0 * pow1)
279 279
280 ### SNR
280 # ## SNR
281 281 sno = (pow0 + pow1 - self.header_rec['snr']) / self.header_rec['snr']
282 282 sno10 = numpy.log10(sno)
283 283 # dsno = 1.0 / numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * (1 + (1 / sno))
284 284
285 ### Vertical Drift
286 sp = numpy.sqrt(numpy.abs(acf0)*numpy.abs(acf1))
285 # ## Vertical Drift
286 sp = numpy.sqrt(numpy.abs(acf0) * numpy.abs(acf1))
287 287 sp[numpy.where(numpy.abs(sp) >= 1.0)] = numpy.sqrt(0.9999)
288 288
289 vzo = -numpy.arctan2(acf0.imag + acf1.imag,acf0.real + acf1.real)*1.5E5*1.5/(self.ipp*numpy.pi)
290 dvzo = numpy.sqrt(1.0 - sp*sp)*0.338*1.5E5/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*sp*self.ipp)
289 vzo = -numpy.arctan2(acf0.imag + acf1.imag, acf0.real + acf1.real) * 1.5E5 * 1.5 / (self.ipp * numpy.pi)
290 dvzo = numpy.sqrt(1.0 - sp * sp) * 0.338 * 1.5E5 / (numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * sp * self.ipp)
291 291 err = numpy.where(dvzo <= 0.1)
292 292 dvzo[err] = 0.1
293 293
294 #Zonal Drifts
295 dt = self.header_file['nint']*self.ipp / 1.5E5
294 # Zonal Drifts
295 dt = self.header_file['nint'] * self.ipp / 1.5E5
296 296 coh = numpy.sqrt(numpy.abs(dccf))
297 297 err = numpy.where(coh >= 1.0)
298 298 coh[err] = numpy.sqrt(0.99999)
@@ -300,8 +300,8 class JULIAParamReader(JRODataReader, ProcessingUnit):
300 300 err = numpy.where(coh <= 0.1)
301 301 coh[err] = numpy.sqrt(0.1)
302 302
303 vxo = numpy.arctan2(dccf.imag, dccf.real)*self.header_rec['h0']*1.0E3/(self.kd*dt)
304 dvxo = numpy.sqrt(1.0 - coh*coh)*self.header_rec['h0']*1.0E3/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*coh*self.kd*dt)
303 vxo = numpy.arctan2(dccf.imag, dccf.real) * self.header_rec['h0'] * 1.0E3 / (self.kd * dt)
304 dvxo = numpy.sqrt(1.0 - coh * coh) * self.header_rec['h0'] * 1.0E3 / (numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * coh * self.kd * dt)
305 305
306 306 err = numpy.where(dvxo <= 0.1)
307 307 dvxo[err] = 0.1
@@ -315,7 +315,7 class JULIAParamReader(JRODataReader, ProcessingUnit):
315 315
316 316 self.dataOut.data_snr = self.buffer[4].reshape(1, -1)
317 317 self.dataOut.heightList = self.heights
318 self.dataOut.data_param = self.buffer[0:4,]
318 self.dataOut.data_param = self.buffer[0:4, ]
319 319 self.dataOut.utctimeInit = self.time
320 320 self.dataOut.utctime = self.time
321 321 self.dataOut.useLocalTime = True
@@ -340,4 +340,4 class JULIAParamReader(JRODataReader, ProcessingUnit):
340 340
341 341 self.set_output()
342 342
343 return 1 No newline at end of file
343 return 1
@@ -59,7 +59,7 class PXReader(JRODataReader, ProcessingUnit):
59 59 self.endDate = endDate
60 60 self.startTime = startTime
61 61 self.endTime = endTime
62 self.datatime = datetime.datetime(1900,1,1)
62 self.datatime = datetime.datetime(1900, 1, 1)
63 63 self.walk = walk
64 64 self.nTries = kwargs.get('nTries', 10)
65 65 self.online = kwargs.get('online', False)
@@ -65,7 +65,7 class BLTRParametersProc(ProcessingUnit):
65 65 self.dataOut.heightList = self.dataOut.height[0]
66 66 self.dataOut.data_snr = self.dataOut.data_snr[mode]
67 67 SNRavg = numpy.average(self.dataOut.data_snr, axis=0)
68 SNRavgdB = 10*numpy.log10(SNRavg)
68 SNRavgdB = 10 * numpy.log10(SNRavg)
69 69 self.dataOut.data_snr_avg_db = SNRavgdB.reshape(1, *SNRavgdB.shape)
70 70
71 71 # Censoring Data
@@ -114,29 +114,29 class OutliersFilter(Operation):
114 114 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
115 115 minvalid = npoints
116 116
117 #only if valid values greater than the minimum required (10%)
117 # only if valid values greater than the minimum required (10%)
118 118 if nvalues_valid > minvalid:
119 119
120 120 if method == 0:
121 #SMOOTH
121 # SMOOTH
122 122 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
123 123
124 124
125 125 if method == 1:
126 #MEDIAN
127 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
126 # MEDIAN
127 w = value_temp[h] - self.Median(input=value_temp[h], width=npoints)
128 128
129 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
129 dw = numpy.std(w[numpy.where(numpy.isfinite(w))], ddof=1)
130 130
131 threshold = dw*factor
132 value_temp[numpy.where(w > threshold),h] = numpy.nan
133 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
131 threshold = dw * factor
132 value_temp[numpy.where(w > threshold), h] = numpy.nan
133 value_temp[numpy.where(w < -1 * threshold), h] = numpy.nan
134 134
135 135
136 #At the end
136 # At the end
137 137 if svalue2 == 'inHeight':
138 138 value_temp = numpy.transpose(value_temp)
139 output_array[:,m] = value_temp
139 output_array[:, m] = value_temp
140 140
141 141 if svalue == 'zonal':
142 142 self.dataOut.data_output[0] = output_array
@@ -150,7 +150,7 class OutliersFilter(Operation):
150 150 return self.dataOut.data_output
151 151
152 152
153 def Median(self,input,width):
153 def Median(self, input, width):
154 154 '''
155 155 Inputs:
156 156 input - Velocity array
@@ -158,26 +158,26 class OutliersFilter(Operation):
158 158
159 159 '''
160 160
161 if numpy.mod(width,2) == 1:
161 if numpy.mod(width, 2) == 1:
162 162 pc = int((width - 1) / 2)
163 163 cont = 0
164 164 output = []
165 165
166 166 for i in range(len(input)):
167 167 if i >= pc and i < len(input) - pc:
168 new2 = input[i-pc:i+pc+1]
168 new2 = input[i - pc:i + pc + 1]
169 169 temp = numpy.where(numpy.isfinite(new2))
170 170 new = new2[temp]
171 171 value = numpy.median(new)
172 172 output.append(value)
173 173
174 174 output = numpy.array(output)
175 output = numpy.hstack((input[0:pc],output))
176 output = numpy.hstack((output,input[-pc:len(input)]))
175 output = numpy.hstack((input[0:pc], output))
176 output = numpy.hstack((output, input[-pc:len(input)]))
177 177
178 178 return output
179 179
180 def Smooth(self,input,width,edge_truncate = None):
180 def Smooth(self, input, width, edge_truncate=None):
181 181 '''
182 182 Inputs:
183 183 input - Velocity array
@@ -186,17 +186,17 class OutliersFilter(Operation):
186 186
187 187 '''
188 188
189 if numpy.mod(width,2) == 0:
189 if numpy.mod(width, 2) == 0:
190 190 real_width = width + 1
191 191 nzeros = width / 2
192 192 else:
193 193 real_width = width
194 194 nzeros = (width - 1) / 2
195 195
196 half_width = int(real_width)/2
196 half_width = int(real_width) / 2
197 197 length = len(input)
198 198
199 gate = numpy.ones(real_width,dtype='float')
199 gate = numpy.ones(real_width, dtype='float')
200 200 norm_of_gate = numpy.sum(gate)
201 201
202 202 nan_process = 0
@@ -208,21 +208,21 class OutliersFilter(Operation):
208 208 input[nan_id] = 0.
209 209
210 210 if edge_truncate == True:
211 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
211 output = numpy.convolve(input / norm_of_gate, gate, mode='same')
212 212 elif edge_truncate == False or edge_truncate == None:
213 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
214 output = numpy.hstack((input[0:half_width],output))
215 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
213 output = numpy.convolve(input / norm_of_gate, gate, mode='valid')
214 output = numpy.hstack((input[0:half_width], output))
215 output = numpy.hstack((output, input[len(input) - half_width:len(input)]))
216 216
217 217 if nan_process:
218 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
219 pb = numpy.hstack((numpy.zeros(half_width),pb))
220 pb = numpy.hstack((pb,numpy.zeros(half_width)))
218 pb = numpy.convolve(pb / norm_of_gate, gate, mode='valid')
219 pb = numpy.hstack((numpy.zeros(half_width), pb))
220 pb = numpy.hstack((pb, numpy.zeros(half_width)))
221 221 output[numpy.where(pb > 0.9999)] = numpy.nan
222 222 input[nan_id] = numpy.nan
223 223 return output
224 224
225 def Average(self,aver=0,nhaver=1):
225 def Average(self, aver=0, nhaver=1):
226 226 '''
227 227 Inputs:
228 228 aver - Indicates the time period over which is averaged or consensus data
@@ -235,27 +235,27 class OutliersFilter(Operation):
235 235 lat_huancayo = -12.04
236 236 lat_porcuya = -5.8
237 237
238 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
238 if '%2.2f' % self.dataOut.lat == '%2.2f' % lat_piura:
239 239 hcm = 3.
240 240 if self.dataOut.year == 2003 :
241 241 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
242 242 nhpoints = 12
243 243
244 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
244 elif '%2.2f' % self.dataOut.lat == '%2.2f' % lat_huancayo:
245 245 hcm = 3.
246 246 if self.dataOut.year == 2003 :
247 247 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
248 248 nhpoints = 12
249 249
250 250
251 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
252 hcm = 5.#2
251 elif '%2.2f' % self.dataOut.lat == '%2.2f' % lat_porcuya:
252 hcm = 5. # 2
253 253
254 254 pdata = 0.2
255 taver = [1,2,3,4,6,8,12,24]
255 taver = [1, 2, 3, 4, 6, 8, 12, 24]
256 256 t0 = 0
257 257 tf = 24
258 ntime =(tf-t0)/taver[aver]
258 ntime = (tf - t0) / taver[aver]
259 259 ti = numpy.arange(ntime)
260 260 tf = numpy.arange(ntime) + taver[aver]
261 261
@@ -263,11 +263,11 class OutliersFilter(Operation):
263 263 old_height = self.dataOut.heightList
264 264
265 265 if nhaver > 1:
266 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
267 deltha = 0.05*nhaver
268 minhvalid = pdata*nhaver
266 num_hei = len(self.dataOut.heightList) / nhaver / self.dataOut.nmodes
267 deltha = 0.05 * nhaver
268 minhvalid = pdata * nhaver
269 269 for im in range(self.dataOut.nmodes):
270 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
270 new_height = numpy.arange(num_hei) * deltha + self.dataOut.height[im, 0] + deltha / 2.
271 271
272 272
273 273 data_fHeigths_List = []
@@ -280,8 +280,8 class OutliersFilter(Operation):
280 280 for i in range(ntime):
281 281 height = old_height
282 282
283 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
284 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
283 start = datetime.datetime(self.dataOut.year, self.dataOut.month, self.dataOut.day) + datetime.timedelta(hours=int(ti[i])) - datetime.timedelta(hours=5)
284 stop = datetime.datetime(self.dataOut.year, self.dataOut.month, self.dataOut.day) + datetime.timedelta(hours=int(tf[i])) - datetime.timedelta(hours=5)
285 285
286 286
287 287 limit_sec1 = time.mktime(start.timetuple())
@@ -295,17 +295,17 class OutliersFilter(Operation):
295 295 time_select.append(val_sec)
296 296
297 297
298 time_select = numpy.array(time_select,dtype = 'int')
299 minvalid = numpy.ceil(pdata*nhpoints)
298 time_select = numpy.array(time_select, dtype='int')
299 minvalid = numpy.ceil(pdata * nhpoints)
300 300
301 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
302 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
303 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
301 zon_aver = numpy.zeros([self.dataOut.nranges, self.dataOut.nmodes], dtype='f4') + numpy.nan
302 mer_aver = numpy.zeros([self.dataOut.nranges, self.dataOut.nmodes], dtype='f4') + numpy.nan
303 ver_aver = numpy.zeros([self.dataOut.nranges, self.dataOut.nmodes], dtype='f4') + numpy.nan
304 304
305 305 if nhaver > 1:
306 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
307 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
308 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
306 new_zon_aver = numpy.zeros([num_hei, self.dataOut.nmodes], dtype='f4') + numpy.nan
307 new_mer_aver = numpy.zeros([num_hei, self.dataOut.nmodes], dtype='f4') + numpy.nan
308 new_ver_aver = numpy.zeros([num_hei, self.dataOut.nmodes], dtype='f4') + numpy.nan
309 309
310 310 if len(time_select) > minvalid:
311 311 time_average = self.f_timesec[time_select]
@@ -313,27 +313,27 class OutliersFilter(Operation):
313 313 for im in range(self.dataOut.nmodes):
314 314
315 315 for ih in range(self.dataOut.nranges):
316 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
317 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
316 if numpy.sum(numpy.isfinite(self.f_zon[time_select, ih, im])) >= minvalid:
317 zon_aver[ih, im] = numpy.nansum(self.f_zon[time_select, ih, im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select, ih, im]))
318 318
319 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
320 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
319 if numpy.sum(numpy.isfinite(self.f_mer[time_select, ih, im])) >= minvalid:
320 mer_aver[ih, im] = numpy.nansum(self.f_mer[time_select, ih, im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select, ih, im]))
321 321
322 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
323 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
322 if numpy.sum(numpy.isfinite(self.f_ver[time_select, ih, im])) >= minvalid:
323 ver_aver[ih, im] = numpy.nansum(self.f_ver[time_select, ih, im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select, ih, im]))
324 324
325 325 if nhaver > 1:
326 326 for ih in range(num_hei):
327 hvalid = numpy.arange(nhaver) + nhaver*ih
327 hvalid = numpy.arange(nhaver) + nhaver * ih
328 328
329 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
330 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
329 if numpy.sum(numpy.isfinite(zon_aver[hvalid, im])) >= minvalid:
330 new_zon_aver[ih, im] = numpy.nansum(zon_aver[hvalid, im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid, im]))
331 331
332 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
333 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
332 if numpy.sum(numpy.isfinite(mer_aver[hvalid, im])) >= minvalid:
333 new_mer_aver[ih, im] = numpy.nansum(mer_aver[hvalid, im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid, im]))
334 334
335 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
336 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
335 if numpy.sum(numpy.isfinite(ver_aver[hvalid, im])) >= minvalid:
336 new_ver_aver[ih, im] = numpy.nansum(ver_aver[hvalid, im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid, im]))
337 337 if nhaver > 1:
338 338 zon_aver = new_zon_aver
339 339 mer_aver = new_mer_aver
@@ -352,7 +352,7 class OutliersFilter(Operation):
352 352 minute = startTime.tm_min
353 353 second = startTime.tm_sec
354 354
355 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
355 startDTList.append(datetime.datetime(year, month, day, hour, minute, second))
356 356
357 357
358 358 o_height = numpy.array([])
@@ -363,17 +363,17 class OutliersFilter(Operation):
363 363 for im in range(self.dataOut.nmodes):
364 364
365 365 if im == 0:
366 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
366 h_select = numpy.where(numpy.bitwise_and(height[0, :] >= 0, height[0, :] <= hcm, numpy.isfinite(height[0, :])))
367 367 else:
368 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
368 h_select = numpy.where(numpy.bitwise_and(height[1, :] > hcm, height[1, :] < 20, numpy.isfinite(height[1, :])))
369 369
370 370
371 371 ht = h_select[0]
372 372
373 o_height = numpy.hstack((o_height,height[im,ht]))
374 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
375 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
376 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
373 o_height = numpy.hstack((o_height, height[im, ht]))
374 o_zon_aver = numpy.hstack((o_zon_aver, zon_aver[ht, im]))
375 o_mer_aver = numpy.hstack((o_mer_aver, mer_aver[ht, im]))
376 o_ver_aver = numpy.hstack((o_ver_aver, ver_aver[ht, im]))
377 377
378 378 data_fHeigths_List.append(o_height)
379 379 data_fZonal_List.append(o_zon_aver)
@@ -382,12 +382,12 class OutliersFilter(Operation):
382 382
383 383
384 384 else:
385 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
385 h_select = numpy.where(numpy.bitwise_and(height[0, :] <= hcm, numpy.isfinite(height[0, :])))
386 386 ht = h_select[0]
387 o_height = numpy.hstack((o_height,height[im,ht]))
388 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
389 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
390 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
387 o_height = numpy.hstack((o_height, height[im, ht]))
388 o_zon_aver = numpy.hstack((o_zon_aver, zon_aver[ht, im]))
389 o_mer_aver = numpy.hstack((o_mer_aver, mer_aver[ht, im]))
390 o_ver_aver = numpy.hstack((o_ver_aver, ver_aver[ht, im]))
391 391
392 392 data_fHeigths_List.append(o_height)
393 393 data_fZonal_List.append(o_zon_aver)
@@ -24,14 +24,14 class PrintInfoAMISR(Operation):
24 24 def run(self, dataOut):
25 25
26 26 if not self.__isPrinted:
27 print('Number of Records by File: %d'%dataOut.nRecords)
28 print('Number of Pulses: %d'%dataOut.nProfiles)
29 print('Number of Pulses by Frame: %d'%dataOut.npulseByFrame)
30 print('Number of Samples by Pulse: %d'%len(dataOut.heightList))
31 print('Ipp Seconds: %f'%dataOut.ippSeconds)
32 print('Number of Beams: %d'%dataOut.nBeams)
27 print('Number of Records by File: %d' % dataOut.nRecords)
28 print('Number of Pulses: %d' % dataOut.nProfiles)
29 print('Number of Pulses by Frame: %d' % dataOut.npulseByFrame)
30 print('Number of Samples by Pulse: %d' % len(dataOut.heightList))
31 print('Ipp Seconds: %f' % dataOut.ippSeconds)
32 print('Number of Beams: %d' % dataOut.nBeams)
33 33 print('BeamCodes:')
34 beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in list(dataOut.beamCodeDict.items())]
34 beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f' % (k, v[0], v[1], v[2], v[3]) for k, v in list(dataOut.beamCodeDict.items())]
35 35 for b in beamStrList:
36 36 print(b)
37 37 self.__isPrinted = True
@@ -119,7 +119,7 class ProfileToChannels(Operation):
119 119 if not(self.__isConfig):
120 120 nchannels = len(list(dataOut.beamRangeDict.keys()))
121 121 nsamples = dataOut.nHeights
122 self.buffer = numpy.zeros((nchannels, nsamples), dtype = 'complex128')
122 self.buffer = numpy.zeros((nchannels, nsamples), dtype='complex128')
123 123 dataOut.beam.codeList = [dataOut.beamCodeDict[x][0] for x in range(nchannels)]
124 124 dataOut.beam.azimuthList = [dataOut.beamCodeDict[x][1] for x in range(nchannels)]
125 125 dataOut.beam.zenithList = [dataOut.beamCodeDict[x][2] for x in range(nchannels)]
@@ -127,7 +127,7 class ProfileToChannels(Operation):
127 127
128 128 for i in range(self.buffer.shape[0]):
129 129 if dataOut.profileIndex in dataOut.beamRangeDict[i]:
130 self.buffer[i,:] = dataOut.data
130 self.buffer[i, :] = dataOut.data
131 131 break
132 132
133 133
@@ -140,4 +140,4 class ProfileToChannels(Operation):
140 140 self.__isConfig = False
141 141 dataOut.flagNoData = False
142 142 pass
143 No newline at end of file
143
@@ -198,6 +198,6 def MPDecorator(BaseClass):
198 198 def close(self):
199 199
200 200 BaseClass.close(self)
201 log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
201 log.success('Done...(Time:{:4.2f} secs)'.format(time.time() - self.start_time), self.name)
202 202
203 203 return MPClass
@@ -30,7 +30,7 class CorrelationProc(ProcessingUnit):
30 30 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
31 31 self.dataOut.channelList = self.dataIn.channelList
32 32 self.dataOut.heightList = self.dataIn.heightList
33 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
33 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
34 34 # self.dataOut.nHeights = self.dataIn.nHeights
35 35 # self.dataOut.nChannels = self.dataIn.nChannels
36 36 self.dataOut.nBaud = self.dataIn.nBaud
@@ -39,8 +39,8 class CorrelationProc(ProcessingUnit):
39 39 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
40 40 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
41 41 self.dataOut.utctime = self.firstdatatime
42 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
43 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
42 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData # asumo q la data esta decodificada
43 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData # asumo q la data esta sin flip
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 # self.dataOut.nIncohInt = 1
46 46 self.dataOut.ippSeconds = self.dataIn.ippSeconds
@@ -56,39 +56,39 class CorrelationProc(ProcessingUnit):
56 56 nChannel = jspectra.shape[0]
57 57
58 58 for i in range(nChannel):
59 jspectra_tmp = jspectra[i,:,:]
60 jspectra_DC = numpy.mean(jspectra_tmp,axis = 0)
59 jspectra_tmp = jspectra[i, :, :]
60 jspectra_DC = numpy.mean(jspectra_tmp, axis=0)
61 61
62 62 jspectra_tmp = jspectra_tmp - jspectra_DC
63 jspectra[i,:,:] = jspectra_tmp
63 jspectra[i, :, :] = jspectra_tmp
64 64
65 65 return jspectra
66 66
67 67
68 def removeNoise(self, mode = 2):
68 def removeNoise(self, mode=2):
69 69 indR = numpy.where(self.dataOut.lagR == 0)[0][0]
70 70 indT = numpy.where(self.dataOut.lagT == 0)[0][0]
71 71
72 jspectra = self.dataOut.data_corr[:,:,indR,:]
72 jspectra = self.dataOut.data_corr[:, :, indR, :]
73 73
74 74 num_chan = jspectra.shape[0]
75 75 num_hei = jspectra.shape[2]
76 76
77 77 freq_dc = indT
78 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
78 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
79 79
80 80 NPot = self.dataOut.getNoise(mode)
81 jspectra[:,freq_dc,:] = jspectra[:,freq_dc,:] - NPot
82 SPot = jspectra[:,freq_dc,:]
81 jspectra[:, freq_dc, :] = jspectra[:, freq_dc, :] - NPot
82 SPot = jspectra[:, freq_dc, :]
83 83 pairsAutoCorr = self.dataOut.getPairsAutoCorr()
84 84 # self.dataOut.signalPotency = SPot
85 85 self.dataOut.noise = NPot
86 self.dataOut.SNR = (SPot/NPot)[pairsAutoCorr]
87 self.dataOut.data_corr[:,:,indR,:] = jspectra
86 self.dataOut.SNR = (SPot / NPot)[pairsAutoCorr]
87 self.dataOut.data_corr[:, :, indR, :] = jspectra
88 88
89 89 return 1
90 90
91 def run(self, lags=None, mode = 'time', pairsList=None, fullBuffer=False, nAvg = 1, removeDC = False, splitCF=False):
91 def run(self, lags=None, mode='time', pairsList=None, fullBuffer=False, nAvg=1, removeDC=False, splitCF=False):
92 92
93 93 self.dataOut.flagNoData = True
94 94
@@ -118,19 +118,19 class CorrelationProc(ProcessingUnit):
118 118 # acf_pairs = numpy.arange(len(ccfList),len(pairsList))
119 119 self.__updateObjFromVoltage()
120 120 #----------------------------------------------------------------------
121 #Creating temporal buffers
121 # Creating temporal buffers
122 122 if fullBuffer:
123 tmp = numpy.zeros((len(pairsList), len(lags), nProfiles, nHeights), dtype = 'complex')*numpy.nan
123 tmp = numpy.zeros((len(pairsList), len(lags), nProfiles, nHeights), dtype='complex') * numpy.nan
124 124 elif mode == 'time':
125 125 if lags == None:
126 lags = numpy.arange(-nProfiles+1, nProfiles)
127 tmp = numpy.zeros((len(pairsList), len(lags), nHeights),dtype='complex')
126 lags = numpy.arange(-nProfiles + 1, nProfiles)
127 tmp = numpy.zeros((len(pairsList), len(lags), nHeights), dtype='complex')
128 128 elif mode == 'height':
129 129 if lags == None:
130 lags = numpy.arange(-nHeights+1, nHeights)
131 tmp = numpy.zeros(len(pairsList), (len(lags), nProfiles),dtype='complex')
130 lags = numpy.arange(-nHeights + 1, nHeights)
131 tmp = numpy.zeros(len(pairsList), (len(lags), nProfiles), dtype='complex')
132 132
133 #For loop
133 # For loop
134 134 for l in range(len(pairsList)):
135 135
136 136 ch0 = pairsList[l][0]
@@ -141,23 +141,23 class CorrelationProc(ProcessingUnit):
141 141
142 142 if idx >= 0:
143 143 if mode == 'time':
144 ccf0 = data_pre[ch0,:nProfiles-idx,:]*numpy.conj(data_pre[ch1,idx:,:]) #time
144 ccf0 = data_pre[ch0, :nProfiles - idx, :] * numpy.conj(data_pre[ch1, idx:, :]) # time
145 145 else:
146 ccf0 = data_pre[ch0,:,nHeights-idx]*numpy.conj(data_pre[ch1,:,idx:]) #heights
146 ccf0 = data_pre[ch0, :, nHeights - idx] * numpy.conj(data_pre[ch1, :, idx:]) # heights
147 147 else:
148 148 if mode == 'time':
149 ccf0 = data_pre[ch0,-idx:,:]*numpy.conj(data_pre[ch1,:nProfiles+idx,:]) #time
149 ccf0 = data_pre[ch0, -idx:, :] * numpy.conj(data_pre[ch1, :nProfiles + idx, :]) # time
150 150 else:
151 ccf0 = data_pre[ch0,:,-idx:]*numpy.conj(data_pre[ch1,:,:nHeights+idx]) #heights
151 ccf0 = data_pre[ch0, :, -idx:] * numpy.conj(data_pre[ch1, :, :nHeights + idx]) # heights
152 152
153 153 if fullBuffer:
154 tmp[l,i,:ccf0.shape[0],:] = ccf0
154 tmp[l, i, :ccf0.shape[0], :] = ccf0
155 155 else:
156 tmp[l,i,:] = numpy.sum(ccf0, axis=0)
156 tmp[l, i, :] = numpy.sum(ccf0, axis=0)
157 157
158 158 #-----------------------------------------------------------------
159 159 if fullBuffer:
160 tmp = numpy.sum(numpy.reshape(tmp,(tmp.shape[0],tmp.shape[1],tmp.shape[2]/nAvg,nAvg,tmp.shape[3])),axis=3)
160 tmp = numpy.sum(numpy.reshape(tmp, (tmp.shape[0], tmp.shape[1], tmp.shape[2] / nAvg, nAvg, tmp.shape[3])), axis=3)
161 161 self.dataOut.nAvg = nAvg
162 162
163 163 self.dataOut.data_cf = tmp
@@ -166,12 +166,12 class CorrelationProc(ProcessingUnit):
166 166 self.dataOut.pairsList = pairsList
167 167 self.dataOut.nPairs = len(pairsList)
168 168
169 #Se Calcula los factores de Normalizacion
169 # Se Calcula los factores de Normalizacion
170 170 if mode == 'time':
171 delta = self.dataIn.ippSeconds*self.dataIn.nCohInt
171 delta = self.dataIn.ippSeconds * self.dataIn.nCohInt
172 172 else:
173 173 delta = self.dataIn.heightList[1] - self.dataIn.heightList[0]
174 self.dataOut.lagRange = numpy.array(lags)*delta
174 self.dataOut.lagRange = numpy.array(lags) * delta
175 175 # self.dataOut.nCohInt = self.dataIn.nCohInt*nAvg
176 176 self.dataOut.flagNoData = False
177 177 # a = self.dataOut.normFactor
@@ -8,9 +8,9 from schainpy.utils import log
8 8
9 9 class SpectraHeisProc(ProcessingUnit):
10 10
11 def __init__(self):#, **kwargs):
11 def __init__(self): # , **kwargs):
12 12
13 ProcessingUnit.__init__(self)#, **kwargs)
13 ProcessingUnit.__init__(self) # , **kwargs)
14 14
15 15 # self.buffer = None
16 16 # self.firstdatatime = None
@@ -24,12 +24,12 class SpectraHeisProc(ProcessingUnit):
24 24 self.dataOut.errorCount = self.dataIn.errorCount
25 25 self.dataOut.useLocalTime = self.dataIn.useLocalTime
26 26
27 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
28 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
27 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy() #
28 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy() #
29 29 self.dataOut.channelList = self.dataIn.channelList
30 30 self.dataOut.heightList = self.dataIn.heightList
31 31 # self.dataOut.dtype = self.dataIn.dtype
32 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
32 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
33 33 # self.dataOut.nHeights = self.dataIn.nHeights
34 34 # self.dataOut.nChannels = self.dataIn.nChannels
35 35 self.dataOut.nBaud = self.dataIn.nBaud
@@ -45,8 +45,8 class SpectraHeisProc(ProcessingUnit):
45 45 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
46 46 self.dataOut.utctime = self.dataIn.utctime
47 47 # self.dataOut.utctime = self.firstdatatime
48 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
48 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData # asumo q la data esta decodificada
49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData # asumo q la data esta sin flip
50 50 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
51 51 self.dataOut.nCohInt = self.dataIn.nCohInt
52 52 self.dataOut.nIncohInt = 1
@@ -78,8 +78,8 class SpectraHeisProc(ProcessingUnit):
78 78 def __getFft(self):
79 79
80 80 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
81 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
82 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
81 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
82 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt)) / (self.dataOut.nFFTPoints)
83 83 self.dataOut.data_spc = spc
84 84
85 85 def run(self):
@@ -102,7 +102,7 class SpectraHeisProc(ProcessingUnit):
102 102
103 103 return
104 104
105 raise ValueError("The type object %s is not valid"%(self.dataIn.type))
105 raise ValueError("The type object %s is not valid" % (self.dataIn.type))
106 106
107 107
108 108 def selectChannels(self, channelList):
@@ -136,9 +136,9 class SpectraHeisProc(ProcessingUnit):
136 136
137 137 for channelIndex in channelIndexList:
138 138 if channelIndex not in self.dataOut.channelIndexList:
139 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
139 raise ValueError("The value %d in channelIndexList is not valid" % channelIndex)
140 140
141 data_spc = self.dataOut.data_spc[channelIndexList,:]
141 data_spc = self.dataOut.data_spc[channelIndexList, :]
142 142
143 143 self.dataOut.data_spc = data_spc
144 144 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
@@ -151,7 +151,7 class IncohInt4SpectraHeis(Operation):
151 151 isConfig = False
152 152
153 153 __profIndex = 0
154 __withOverapping = False
154 __withOverapping = False
155 155
156 156 __byTime = False
157 157 __initime = None
@@ -164,9 +164,9 class IncohInt4SpectraHeis(Operation):
164 164
165 165 n = None
166 166
167 def __init__(self):#, **kwargs):
167 def __init__(self): # , **kwargs):
168 168
169 Operation.__init__(self)#, **kwargs)
169 Operation.__init__(self) # , **kwargs)
170 170 # self.isConfig = False
171 171
172 172 def setup(self, n=None, timeInterval=None, overlapping=False):
@@ -194,7 +194,7 class IncohInt4SpectraHeis(Operation):
194 194 self.n = n
195 195 self.__byTime = False
196 196 else:
197 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
197 self.__integrationtime = timeInterval # * 60. #if (type(timeInterval)!=integer) -> change this line
198 198 self.n = 9999
199 199 self.__byTime = True
200 200
@@ -219,25 +219,25 class IncohInt4SpectraHeis(Operation):
219 219 self.__profIndex += 1
220 220 return
221 221
222 #Overlapping data
222 # Overlapping data
223 223 nChannels, nHeis = data.shape
224 224 data = numpy.reshape(data, (1, nChannels, nHeis))
225 225
226 #If the buffer is empty then it takes the data value
226 # If the buffer is empty then it takes the data value
227 227 if self.__buffer is None:
228 228 self.__buffer = data
229 229 self.__profIndex += 1
230 230 return
231 231
232 #If the buffer length is lower than n then stakcing the data value
232 # If the buffer length is lower than n then stakcing the data value
233 233 if self.__profIndex < self.n:
234 234 self.__buffer = numpy.vstack((self.__buffer, data))
235 235 self.__profIndex += 1
236 236 return
237 237
238 #If the buffer length is equal to n then replacing the last buffer value with the data value
238 # If the buffer length is equal to n then replacing the last buffer value with the data value
239 239 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
240 self.__buffer[self.n-1] = data
240 self.__buffer[self.n - 1] = data
241 241 self.__profIndex = self.n
242 242 return
243 243
@@ -261,7 +261,7 class IncohInt4SpectraHeis(Operation):
261 261
262 262 return data, n
263 263
264 #Integration with Overlapping
264 # Integration with Overlapping
265 265 data = numpy.sum(self.__buffer, axis=0)
266 266 n = self.__profIndex
267 267
@@ -315,7 +315,7 class IncohInt4SpectraHeis(Operation):
315 315
316 316 avgdatatime = self.__initime
317 317
318 deltatime = datatime -self.__lastdatatime
318 deltatime = datatime - self.__lastdatatime
319 319
320 320 if not self.__withOverapping:
321 321 self.__initime = datatime
@@ -344,4 +344,4 class IncohInt4SpectraHeis(Operation):
344 344 # dataOut.timeInterval = self.__timeInterval*self.n
345 345 dataOut.flagNoData = False
346 346
347 return dataOut No newline at end of file
347 return dataOut
This diff has been collapsed as it changes many lines, (4280 lines changed) Show them Hide them
@@ -1,6 +1,7
1 1 import numpy
2 2 import math
3 3 from scipy import optimize, interpolate, signal, stats, ndimage
4 from scipy.stats import norm
4 5 import scipy
5 6 import re
6 7 import datetime
@@ -8,25 +9,25 import copy
8 9 import sys
9 10 import importlib
10 11 import itertools
11 from multiprocessing import Pool, TimeoutError
12 from multiprocessing import Pool, TimeoutError
12 13 from multiprocessing.pool import ThreadPool
13 14 import time
14 15
15 16 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
16 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
17 from .jroproc_base import ProcessingUnit, Operation #, MPDecorator
17 18 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
18 19 from scipy import asarray as ar,exp
19 20 from scipy.optimize import curve_fit
20 from schainpy.utils import log
21 #from schainpy.utils import log
21 22 import warnings
22 23 from numpy import NaN
23 24 from scipy.optimize.optimize import OptimizeWarning
24 25 warnings.filterwarnings('ignore')
25 26
26 import matplotlib.pyplot as plt
27 27
28 28 SPEED_OF_LIGHT = 299792458
29 29
30
30 31 '''solving pickling issue'''
31 32
32 33 def _pickle_method(method):
@@ -45,15 +46,15 def _unpickle_method(func_name, obj, cls):
45 46 break
46 47 return func.__get__(obj, cls)
47 48
48
49 #@MPDecorator
49 50 class ParametersProc(ProcessingUnit):
50
51
51 52 METHODS = {}
52 53 nSeconds = None
53 54
54 55 def __init__(self):
55 56 ProcessingUnit.__init__(self)
56
57
57 58 # self.objectDict = {}
58 59 self.buffer = None
59 60 self.firstdatatime = None
@@ -62,73 +63,59 class ParametersProc(ProcessingUnit):
62 63 self.setupReq = False #Agregar a todas las unidades de proc
63 64
64 65 def __updateObjFromInput(self):
65
66
66 67 self.dataOut.inputUnit = self.dataIn.type
67
68
68 69 self.dataOut.timeZone = self.dataIn.timeZone
69 70 self.dataOut.dstFlag = self.dataIn.dstFlag
70 71 self.dataOut.errorCount = self.dataIn.errorCount
71 72 self.dataOut.useLocalTime = self.dataIn.useLocalTime
72
73
73 74 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
74 75 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
75 76 self.dataOut.channelList = self.dataIn.channelList
76 77 self.dataOut.heightList = self.dataIn.heightList
77 78 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
78 # self.dataOut.nHeights = self.dataIn.nHeights
79 # self.dataOut.nChannels = self.dataIn.nChannels
80 # self.dataOut.nBaud = self.dataIn.nBaud
81 # self.dataOut.nCode = self.dataIn.nCode
82 # self.dataOut.code = self.dataIn.code
83 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
79 #self.dataOut.nHeights = self.dataIn.nHeights
80 #self.dataOut.nChannels = self.dataIn.nChannels
81 self.dataOut.nBaud = self.dataIn.nBaud
82 self.dataOut.nCode = self.dataIn.nCode
83 self.dataOut.code = self.dataIn.code
84 #self.dataOut.nProfiles = self.dataOut.nFFTPoints
84 85 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
85 86 # self.dataOut.utctime = self.firstdatatime
86 87 self.dataOut.utctime = self.dataIn.utctime
87 88 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
88 89 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
89 90 self.dataOut.nCohInt = self.dataIn.nCohInt
90 # self.dataOut.nIncohInt = 1
91 # self.dataOut.ippSeconds = self.dataIn.ippSeconds
91 #self.dataOut.nIncohInt = 1
92 self.dataOut.ippSeconds = self.dataIn.ippSeconds
92 93 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
93 94 self.dataOut.timeInterval1 = self.dataIn.timeInterval
94 self.dataOut.heightList = self.dataIn.heightList
95 self.dataOut.heightList = self.dataIn.heightList #getHeiRange()
95 96 self.dataOut.frequency = self.dataIn.frequency
96 # self.dataOut.noise = self.dataIn.noise
97
97 #self.dataOut.noise = self.dataIn.noise
98
98 99 def run(self):
99 100
100 101
101 102
102 103 #---------------------- Voltage Data ---------------------------
103
104
104 105 if self.dataIn.type == "Voltage":
105 106
106 107 self.__updateObjFromInput()
107 108 self.dataOut.data_pre = self.dataIn.data.copy()
108 109 self.dataOut.flagNoData = False
109 110 self.dataOut.utctimeInit = self.dataIn.utctime
110 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
111 if hasattr(self.dataIn, 'dataPP_POW'):
112 self.dataOut.dataPP_POW = self.dataIn.dataPP_POW
113
114 if hasattr(self.dataIn, 'dataPP_POWER'):
115 self.dataOut.dataPP_POWER = self.dataIn.dataPP_POWER
116
117 if hasattr(self.dataIn, 'dataPP_DOP'):
118 self.dataOut.dataPP_DOP = self.dataIn.dataPP_DOP
119
120 if hasattr(self.dataIn, 'dataPP_SNR'):
121 self.dataOut.dataPP_SNR = self.dataIn.dataPP_SNR
122
123 if hasattr(self.dataIn, 'dataPP_WIDTH'):
124 self.dataOut.dataPP_WIDTH = self.dataIn.dataPP_WIDTH
111 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
125 112 return
126
113
127 114 #---------------------- Spectra Data ---------------------------
128
115
129 116 if self.dataIn.type == "Spectra":
130 117
131 self.dataOut.data_pre = [self.dataIn.data_spc, self.dataIn.data_cspc]
118 self.dataOut.data_pre = (self.dataIn.data_spc, self.dataIn.data_cspc)
132 119 self.dataOut.data_spc = self.dataIn.data_spc
133 120 self.dataOut.data_cspc = self.dataIn.data_cspc
134 121 self.dataOut.nProfiles = self.dataIn.nProfiles
@@ -138,386 +125,318 class ParametersProc(ProcessingUnit):
138 125 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
139 126 self.dataOut.spc_noise = self.dataIn.getNoise()
140 127 self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
141 # self.dataOut.normFactor = self.dataIn.normFactor
142 self.dataOut.pairsList = self.dataIn.pairsList
128 #self.dataOut.normFactor = self.dataIn.normFactor
129 self.dataOut.pairsList = self.dataIn.pairsList
143 130 self.dataOut.groupList = self.dataIn.pairsList
144 self.dataOut.flagNoData = False
145
131 self.dataOut.flagNoData = False
132 self.dataOut.spcacum = None
133
146 134 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
147 135 self.dataOut.ChanDist = self.dataIn.ChanDist
148 else: self.dataOut.ChanDist = None
149
136 else: self.dataOut.ChanDist = None
137
150 138 #if hasattr(self.dataIn, 'VelRange'): #Velocities range
151 139 # self.dataOut.VelRange = self.dataIn.VelRange
152 140 #else: self.dataOut.VelRange = None
153
141
154 142 if hasattr(self.dataIn, 'RadarConst'): #Radar Constant
155 143 self.dataOut.RadarConst = self.dataIn.RadarConst
156
144
157 145 if hasattr(self.dataIn, 'NPW'): #NPW
158 146 self.dataOut.NPW = self.dataIn.NPW
159
147
160 148 if hasattr(self.dataIn, 'COFA'): #COFA
161 149 self.dataOut.COFA = self.dataIn.COFA
162
163
164
150
151
152
165 153 #---------------------- Correlation Data ---------------------------
166
154
167 155 if self.dataIn.type == "Correlation":
168 156 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
169
157
170 158 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
171 159 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
172 160 self.dataOut.groupList = (acf_pairs, ccf_pairs)
173
161
174 162 self.dataOut.abscissaList = self.dataIn.lagRange
175 163 self.dataOut.noise = self.dataIn.noise
176 self.dataOut.data_snr = self.dataIn.SNR
164 self.dataOut.data_SNR = self.dataIn.SNR
177 165 self.dataOut.flagNoData = False
178 166 self.dataOut.nAvg = self.dataIn.nAvg
179
167
180 168 #---------------------- Parameters Data ---------------------------
181
169
182 170 if self.dataIn.type == "Parameters":
183 171 self.dataOut.copy(self.dataIn)
184 172 self.dataOut.flagNoData = False
185
173
186 174 return True
187
175
188 176 self.__updateObjFromInput()
189 177 self.dataOut.utctimeInit = self.dataIn.utctime
190 178 self.dataOut.paramInterval = self.dataIn.timeInterval
191
179
192 180 return
193 181
194 182
195 183 def target(tups):
196
184
197 185 obj, args = tups
198
186
199 187 return obj.FitGau(args)
200
201 class RemoveWideGC(Operation):
202 ''' This class remove the wide clutter and replace it with a simple interpolation points
203 This mainly applies to CLAIRE radar
204
205 ClutterWidth : Width to look for the clutter peak
206
207 Input:
208
209 self.dataOut.data_pre : SPC and CSPC
210 self.dataOut.spc_range : To select wind and rainfall velocities
211
212 Affected:
213
214 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
215
216 Written by D. Scipión 25.02.2021
217 '''
218 def __init__(self):
219 Operation.__init__(self)
220 self.i = 0
221 self.ich = 0
222 self.ir = 0
223 188
224 def run(self, dataOut, ClutterWidth=2.5):
225 # print ('Entering RemoveWideGC ... ')
226
227 self.spc = dataOut.data_pre[0].copy()
228 self.spc_out = dataOut.data_pre[0].copy()
229 self.Num_Chn = self.spc.shape[0]
230 self.Num_Hei = self.spc.shape[2]
231 VelRange = dataOut.spc_range[2][:-1]
232 dv = VelRange[1]-VelRange[0]
233
234 # Find the velocities that corresponds to zero
235 gc_values = numpy.squeeze(numpy.where(numpy.abs(VelRange) <= ClutterWidth))
236
237 # Removing novalid data from the spectra
238 for ich in range(self.Num_Chn) :
239 for ir in range(self.Num_Hei) :
240 # Estimate the noise at each range
241 HSn = hildebrand_sekhon(self.spc[ich,:,ir],dataOut.nIncohInt)
242
243 # Removing the noise floor at each range
244 novalid = numpy.where(self.spc[ich,:,ir] < HSn)
245 self.spc[ich,novalid,ir] = HSn
246
247 junk = numpy.append(numpy.insert(numpy.squeeze(self.spc[ich,gc_values,ir]),0,HSn),HSn)
248 j1index = numpy.squeeze(numpy.where(numpy.diff(junk)>0))
249 j2index = numpy.squeeze(numpy.where(numpy.diff(junk)<0))
250 if ((numpy.size(j1index)<=1) | (numpy.size(j2index)<=1)) :
251 continue
252 junk3 = numpy.squeeze(numpy.diff(j1index))
253 junk4 = numpy.squeeze(numpy.diff(j2index))
254
255 valleyindex = j2index[numpy.where(junk4>1)]
256 peakindex = j1index[numpy.where(junk3>1)]
257
258 isvalid = numpy.squeeze(numpy.where(numpy.abs(VelRange[gc_values[peakindex]]) <= 2.5*dv))
259 if numpy.size(isvalid) == 0 :
260 continue
261 if numpy.size(isvalid) >1 :
262 vindex = numpy.argmax(self.spc[ich,gc_values[peakindex[isvalid]],ir])
263 isvalid = isvalid[vindex]
264
265 # clutter peak
266 gcpeak = peakindex[isvalid]
267 vl = numpy.where(valleyindex < gcpeak)
268 if numpy.size(vl) == 0:
269 continue
270 gcvl = valleyindex[vl[0][-1]]
271 vr = numpy.where(valleyindex > gcpeak)
272 if numpy.size(vr) == 0:
273 continue
274 gcvr = valleyindex[vr[0][0]]
275
276 # Removing the clutter
277 interpindex = numpy.array([gc_values[gcvl], gc_values[gcvr]])
278 gcindex = gc_values[gcvl+1:gcvr-1]
279 self.spc_out[ich,gcindex,ir] = numpy.interp(VelRange[gcindex],VelRange[interpindex],self.spc[ich,interpindex,ir])
280
281 dataOut.data_pre[0] = self.spc_out
282 #print ('Leaving RemoveWideGC ... ')
283 return dataOut
284
189
285 190 class SpectralFilters(Operation):
286 ''' This class allows to replace the novalid values with noise for each channel
287 This applies to CLAIRE RADAR
288
289 PositiveLimit : RightLimit of novalid data
290 NegativeLimit : LeftLimit of novalid data
291
191
192 '''This class allows the Rainfall / Wind Selection for CLAIRE RADAR
193
194 LimitR : It is the limit in m/s of Rainfall
195 LimitW : It is the limit in m/s for Winds
196
292 197 Input:
293
198
294 199 self.dataOut.data_pre : SPC and CSPC
295 200 self.dataOut.spc_range : To select wind and rainfall velocities
296
201
297 202 Affected:
298
203
299 204 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
300
301 Written by D. Scipión 29.01.2021
205 self.dataOut.spcparam_range : Used in SpcParamPlot
206 self.dataOut.SPCparam : Used in PrecipitationProc
207
208
302 209 '''
210
303 211 def __init__(self):
304 212 Operation.__init__(self)
305 self.i = 0
306
307 def run(self, dataOut, ):
308
213 self.i=0
214
215 def run(self, dataOut, PositiveLimit=1.5, NegativeLimit=2.5):
216
217
218 #Limite de vientos
219 LimitR = PositiveLimit
220 LimitN = NegativeLimit
221
309 222 self.spc = dataOut.data_pre[0].copy()
223 self.cspc = dataOut.data_pre[1].copy()
224
225 self.Num_Hei = self.spc.shape[2]
226 self.Num_Bin = self.spc.shape[1]
310 227 self.Num_Chn = self.spc.shape[0]
228
311 229 VelRange = dataOut.spc_range[2]
312
313 # novalid corresponds to data within the Negative and PositiveLimit
230 TimeRange = dataOut.spc_range[1]
231 FrecRange = dataOut.spc_range[0]
232
233 Vmax= 2*numpy.max(dataOut.spc_range[2])
234 Tmax= 2*numpy.max(dataOut.spc_range[1])
235 Fmax= 2*numpy.max(dataOut.spc_range[0])
314 236
237 Breaker1R=VelRange[numpy.abs(VelRange-(-LimitN)).argmin()]
238 Breaker1R=numpy.where(VelRange == Breaker1R)
239
240 Delta = self.Num_Bin/2 - Breaker1R[0]
241
242
243 '''Reacomodando SPCrange'''
315 244
316 # Removing novalid data from the spectra
245 VelRange=numpy.roll(VelRange,-(int(self.Num_Bin/2)) ,axis=0)
246
247 VelRange[-(int(self.Num_Bin/2)):]+= Vmax
248
249 FrecRange=numpy.roll(FrecRange,-(int(self.Num_Bin/2)),axis=0)
250
251 FrecRange[-(int(self.Num_Bin/2)):]+= Fmax
252
253 TimeRange=numpy.roll(TimeRange,-(int(self.Num_Bin/2)),axis=0)
254
255 TimeRange[-(int(self.Num_Bin/2)):]+= Tmax
256
257 ''' ------------------ '''
258
259 Breaker2R=VelRange[numpy.abs(VelRange-(LimitR)).argmin()]
260 Breaker2R=numpy.where(VelRange == Breaker2R)
261
262
263 SPCroll = numpy.roll(self.spc,-(int(self.Num_Bin/2)) ,axis=1)
264
265 SPCcut = SPCroll.copy()
317 266 for i in range(self.Num_Chn):
318 self.spc[i,novalid,:] = dataOut.noise[i]
319 dataOut.data_pre[0] = self.spc
267
268 SPCcut[i,0:int(Breaker2R[0]),:] = dataOut.noise[i]
269 SPCcut[i,-int(Delta):,:] = dataOut.noise[i]
270
271 SPCcut[i]=SPCcut[i]- dataOut.noise[i]
272 SPCcut[ numpy.where( SPCcut<0 ) ] = 1e-20
273
274 SPCroll[i]=SPCroll[i]-dataOut.noise[i]
275 SPCroll[ numpy.where( SPCroll<0 ) ] = 1e-20
276
277 SPC_ch1 = SPCroll
278
279 SPC_ch2 = SPCcut
280
281 SPCparam = (SPC_ch1, SPC_ch2, self.spc)
282 dataOut.SPCparam = numpy.asarray(SPCparam)
283
284
285 dataOut.spcparam_range=numpy.zeros([self.Num_Chn,self.Num_Bin+1])
286
287 dataOut.spcparam_range[2]=VelRange
288 dataOut.spcparam_range[1]=TimeRange
289 dataOut.spcparam_range[0]=FrecRange
320 290 return dataOut
321
291
322 292 class GaussianFit(Operation):
323
293
324 294 '''
325 Function that fit of one and two generalized gaussians (gg) based
326 on the PSD shape across an "power band" identified from a cumsum of
295 Function that fit of one and two generalized gaussians (gg) based
296 on the PSD shape across an "power band" identified from a cumsum of
327 297 the measured spectrum - noise.
328
298
329 299 Input:
330 300 self.dataOut.data_pre : SelfSpectra
331
301
332 302 Output:
333 303 self.dataOut.SPCparam : SPC_ch1, SPC_ch2
334
304
335 305 '''
336 306 def __init__(self):
337 307 Operation.__init__(self)
338 308 self.i=0
339
340
341 # def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
342 def run(self, dataOut, SNRdBlimit=-9, method='generalized'):
309
310
311 def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
343 312 """This routine will find a couple of generalized Gaussians to a power spectrum
344 methods: generalized, squared
345 313 input: spc
346 314 output:
347 noise, amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1
315 Amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1,noise
348 316 """
349 print ('Entering ',method,' double Gaussian fit')
317
350 318 self.spc = dataOut.data_pre[0].copy()
351 319 self.Num_Hei = self.spc.shape[2]
352 320 self.Num_Bin = self.spc.shape[1]
353 321 self.Num_Chn = self.spc.shape[0]
322 Vrange = dataOut.abscissaList
323
324 GauSPC = numpy.empty([self.Num_Chn,self.Num_Bin,self.Num_Hei])
325 SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
326 SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
327 SPC_ch1[:] = numpy.NaN
328 SPC_ch2[:] = numpy.NaN
354 329
330
355 331 start_time = time.time()
356
357 pool = Pool(processes=self.Num_Chn)
358 args = [(dataOut.spc_range[2], ich, dataOut.spc_noise[ich], dataOut.nIncohInt, SNRdBlimit) for ich in range(self.Num_Chn)]
359 objs = [self for __ in range(self.Num_Chn)]
360 attrs = list(zip(objs, args))
361 DGauFitParam = pool.map(target, attrs)
362 # Parameters:
363 # 0. Noise, 1. Amplitude, 2. Shift, 3. Width 4. Power
364 dataOut.DGauFitParams = numpy.asarray(DGauFitParam)
365
366 # Double Gaussian Curves
367 gau0 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
368 gau0[:] = numpy.NaN
369 gau1 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
370 gau1[:] = numpy.NaN
371 x_mtr = numpy.transpose(numpy.tile(dataOut.getVelRange(1)[:-1], (self.Num_Hei,1)))
372 for iCh in range(self.Num_Chn):
373 N0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,0]] * self.Num_Bin))
374 N1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,1]] * self.Num_Bin))
375 A0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,0]] * self.Num_Bin))
376 A1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,1]] * self.Num_Bin))
377 v0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,0]] * self.Num_Bin))
378 v1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,1]] * self.Num_Bin))
379 s0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,0]] * self.Num_Bin))
380 s1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,1]] * self.Num_Bin))
381 if method == 'genealized':
382 p0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,0]] * self.Num_Bin))
383 p1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,1]] * self.Num_Bin))
384 elif method == 'squared':
385 p0 = 2.
386 p1 = 2.
387 gau0[iCh] = A0*numpy.exp(-0.5*numpy.abs((x_mtr-v0)/s0)**p0)+N0
388 gau1[iCh] = A1*numpy.exp(-0.5*numpy.abs((x_mtr-v1)/s1)**p1)+N1
389 dataOut.GaussFit0 = gau0
390 dataOut.GaussFit1 = gau1
391
392 print('Leaving ',method ,' double Gaussian fit')
393 return dataOut
394
332
333 noise_ = dataOut.spc_noise[0].copy()
334
335
336 pool = Pool(processes=self.Num_Chn)
337 args = [(Vrange, Ch, pnoise, noise_, num_intg, SNRlimit) for Ch in range(self.Num_Chn)]
338 objs = [self for __ in range(self.Num_Chn)]
339 attrs = list(zip(objs, args))
340 gauSPC = pool.map(target, attrs)
341 dataOut.SPCparam = numpy.asarray(SPCparam)
342
343 ''' Parameters:
344 1. Amplitude
345 2. Shift
346 3. Width
347 4. Power
348 '''
349
395 350 def FitGau(self, X):
396 # print('Entering FitGau')
397 # Assigning the variables
398 Vrange, ch, wnoise, num_intg, SNRlimit = X
399 # Noise Limits
400 noisebl = wnoise * 0.9
401 noisebh = wnoise * 1.1
402 # Radar Velocity
403 Va = max(Vrange)
404 deltav = Vrange[1] - Vrange[0]
405 x = numpy.arange(self.Num_Bin)
406
407 # print ('stop 0')
408
409 # 5 parameters, 2 Gaussians
410 DGauFitParam = numpy.zeros([5, self.Num_Hei,2])
411 DGauFitParam[:] = numpy.NaN
412
413 # SPCparam = []
414 # SPC_ch1 = numpy.zeros([self.Num_Bin,self.Num_Hei])
415 # SPC_ch2 = numpy.zeros([self.Num_Bin,self.Num_Hei])
416 # SPC_ch1[:] = 0 #numpy.NaN
417 # SPC_ch2[:] = 0 #numpy.NaN
418 # print ('stop 1')
351
352 Vrange, ch, pnoise, noise_, num_intg, SNRlimit = X
353
354 SPCparam = []
355 SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
356 SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
357 SPC_ch1[:] = 0#numpy.NaN
358 SPC_ch2[:] = 0#numpy.NaN
359
360
361
419 362 for ht in range(self.Num_Hei):
420 # print (ht)
421 # print ('stop 2')
422 # Spectra at each range
363
364
423 365 spc = numpy.asarray(self.spc)[ch,:,ht]
424 snr = ( spc.mean() - wnoise ) / wnoise
425 snrdB = 10.*numpy.log10(snr)
426
427 #print ('stop 3')
428 if snrdB < SNRlimit :
429 # snr = numpy.NaN
430 # SPC_ch1[:,ht] = 0#numpy.NaN
431 # SPC_ch1[:,ht] = 0#numpy.NaN
432 # SPCparam = (SPC_ch1,SPC_ch2)
433 # print ('SNR less than SNRth')
434 continue
435 # wnoise = hildebrand_sekhon(spc,num_intg)
436 # print ('stop 2.01')
366
437 367 #############################################
438 368 # normalizing spc and noise
439 369 # This part differs from gg1
440 # spc_norm_max = max(spc) #commented by D. Scipión 19.03.2021
370 spc_norm_max = max(spc)
441 371 #spc = spc / spc_norm_max
442 # pnoise = pnoise #/ spc_norm_max #commented by D. Scipión 19.03.2021
372 pnoise = pnoise #/ spc_norm_max
443 373 #############################################
444
445 # print ('stop 2.1')
374
446 375 fatspectra=1.0
447 # noise per channel.... we might want to use the noise at each range
448 376
449 # wnoise = noise_ #/ spc_norm_max #commented by D. Scipión 19.03.2021
377 wnoise = noise_ #/ spc_norm_max
450 378 #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
451 #if wnoise>1.1*pnoise: # to be tested later
379 #if wnoise>1.1*pnoise: # to be tested later
452 380 # wnoise=pnoise
453 # noisebl = wnoise*0.9
454 # noisebh = wnoise*1.1
455 spc = spc - wnoise # signal
456
457 # print ('stop 2.2')
458 minx = numpy.argmin(spc)
459 #spcs=spc.copy()
460 spcs = numpy.roll(spc,-minx)
461 cum = numpy.cumsum(spcs)
462 # tot_noise = wnoise * self.Num_Bin #64;
463
464 # print ('stop 2.3')
465 # snr = sum(spcs) / tot_noise
466 # snrdB = 10.*numpy.log10(snr)
467 #print ('stop 3')
468 # if snrdB < SNRlimit :
469 # snr = numpy.NaN
470 # SPC_ch1[:,ht] = 0#numpy.NaN
471 # SPC_ch1[:,ht] = 0#numpy.NaN
472 # SPCparam = (SPC_ch1,SPC_ch2)
473 # print ('SNR less than SNRth')
474 # continue
475
476
381 noisebl=wnoise*0.9;
382 noisebh=wnoise*1.1
383 spc=spc-wnoise
384
385 minx=numpy.argmin(spc)
386 #spcs=spc.copy()
387 spcs=numpy.roll(spc,-minx)
388 cum=numpy.cumsum(spcs)
389 tot_noise=wnoise * self.Num_Bin #64;
390
391 snr = sum(spcs)/tot_noise
392 snrdB=10.*numpy.log10(snr)
393
394 if snrdB < SNRlimit :
395 snr = numpy.NaN
396 SPC_ch1[:,ht] = 0#numpy.NaN
397 SPC_ch1[:,ht] = 0#numpy.NaN
398 SPCparam = (SPC_ch1,SPC_ch2)
399 continue
400
401
477 402 #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
478 403 # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
479 # print ('stop 4')
480 cummax = max(cum)
481 epsi = 0.08 * fatspectra # cumsum to narrow down the energy region
482 cumlo = cummax * epsi
483 cumhi = cummax * (1-epsi)
484 powerindex = numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
485
486 # print ('stop 5')
404
405 cummax=max(cum);
406 epsi=0.08*fatspectra # cumsum to narrow down the energy region
407 cumlo=cummax*epsi;
408 cumhi=cummax*(1-epsi)
409 powerindex=numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
410
411
487 412 if len(powerindex) < 1:# case for powerindex 0
488 # print ('powerindex < 1')
489 continue
490 powerlo = powerindex[0]
491 powerhi = powerindex[-1]
492 powerwidth = powerhi-powerlo
493 if powerwidth <= 1:
494 # print('powerwidth <= 1')
495 413 continue
496
497 # print ('stop 6')
498 firstpeak = powerlo + powerwidth/10.# first gaussian energy location
499 secondpeak = powerhi - powerwidth/10. #second gaussian energy location
500 midpeak = (firstpeak + secondpeak)/2.
501 firstamp = spcs[int(firstpeak)]
502 secondamp = spcs[int(secondpeak)]
503 midamp = spcs[int(midpeak)]
504
505 y_data = spc + wnoise
506
414 powerlo=powerindex[0]
415 powerhi=powerindex[-1]
416 powerwidth=powerhi-powerlo
417
418 firstpeak=powerlo+powerwidth/10.# first gaussian energy location
419 secondpeak=powerhi-powerwidth/10.#second gaussian energy location
420 midpeak=(firstpeak+secondpeak)/2.
421 firstamp=spcs[int(firstpeak)]
422 secondamp=spcs[int(secondpeak)]
423 midamp=spcs[int(midpeak)]
424
425 x=numpy.arange( self.Num_Bin )
426 y_data=spc+wnoise
427
507 428 ''' single Gaussian '''
508 shift0 = numpy.mod(midpeak+minx, self.Num_Bin )
509 width0 = powerwidth/4.#Initialization entire power of spectrum divided by 4
510 power0 = 2.
511 amplitude0 = midamp
512 state0 = [shift0,width0,amplitude0,power0,wnoise]
513 bnds = ((0,self.Num_Bin-1),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
514 lsq1 = fmin_l_bfgs_b(self.misfit1, state0, args=(y_data,x,num_intg), bounds=bnds, approx_grad=True)
515 # print ('stop 7.1')
516 # print (bnds)
517
518 chiSq1=lsq1[1]
519
520 # print ('stop 8')
429 shift0=numpy.mod(midpeak+minx, self.Num_Bin )
430 width0=powerwidth/4.#Initialization entire power of spectrum divided by 4
431 power0=2.
432 amplitude0=midamp
433 state0=[shift0,width0,amplitude0,power0,wnoise]
434 bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
435 lsq1=fmin_l_bfgs_b(self.misfit1,state0,args=(y_data,x,num_intg),bounds=bnds,approx_grad=True)
436
437 chiSq1=lsq1[1];
438
439
521 440 if fatspectra<1.0 and powerwidth<4:
522 441 choice=0
523 442 Amplitude0=lsq1[0][2]
@@ -531,305 +450,343 class GaussianFit(Operation):
531 450 noise=lsq1[0][4]
532 451 #return (numpy.array([shift0,width0,Amplitude0,p0]),
533 452 # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
534
535 # print ('stop 9')
536 ''' two Gaussians '''
453
454 ''' two gaussians '''
537 455 #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
538 shift0 = numpy.mod(firstpeak+minx, self.Num_Bin )
539 shift1 = numpy.mod(secondpeak+minx, self.Num_Bin )
540 width0 = powerwidth/6.
541 width1 = width0
542 power0 = 2.
543 power1 = power0
544 amplitude0 = firstamp
545 amplitude1 = secondamp
546 state0 = [shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
456 shift0=numpy.mod(firstpeak+minx, self.Num_Bin );
457 shift1=numpy.mod(secondpeak+minx, self.Num_Bin )
458 width0=powerwidth/6.;
459 width1=width0
460 power0=2.;
461 power1=power0
462 amplitude0=firstamp;
463 amplitude1=secondamp
464 state0=[shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
547 465 #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
548 bnds=((0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
466 bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
549 467 #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
550
551 # print ('stop 10')
468
552 469 lsq2 = fmin_l_bfgs_b( self.misfit2 , state0 , args=(y_data,x,num_intg) , bounds=bnds , approx_grad=True )
553
554 # print ('stop 11')
555 chiSq2 = lsq2[1]
556
557 # print ('stop 12')
558
559 oneG = (chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
560
561 # print ('stop 13')
470
471
472 chiSq2=lsq2[1];
473
474
475
476 oneG=(chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
477
562 478 if snrdB>-12: # when SNR is strong pick the peak with least shift (LOS velocity) error
563 479 if oneG:
564 choice = 0
480 choice=0
565 481 else:
566 w1 = lsq2[0][1]; w2 = lsq2[0][5]
567 a1 = lsq2[0][2]; a2 = lsq2[0][6]
568 p1 = lsq2[0][3]; p2 = lsq2[0][7]
569 s1 = (2**(1+1./p1))*scipy.special.gamma(1./p1)/p1
570 s2 = (2**(1+1./p2))*scipy.special.gamma(1./p2)/p2
571 gp1 = a1*w1*s1; gp2 = a2*w2*s2 # power content of each ggaussian with proper p scaling
572
482 w1=lsq2[0][1]; w2=lsq2[0][5]
483 a1=lsq2[0][2]; a2=lsq2[0][6]
484 p1=lsq2[0][3]; p2=lsq2[0][7]
485 s1=(2**(1+1./p1))*scipy.special.gamma(1./p1)/p1;
486 s2=(2**(1+1./p2))*scipy.special.gamma(1./p2)/p2;
487 gp1=a1*w1*s1; gp2=a2*w2*s2 # power content of each ggaussian with proper p scaling
488
573 489 if gp1>gp2:
574 490 if a1>0.7*a2:
575 choice = 1
491 choice=1
576 492 else:
577 choice = 2
493 choice=2
578 494 elif gp2>gp1:
579 495 if a2>0.7*a1:
580 choice = 2
496 choice=2
581 497 else:
582 choice = 1
498 choice=1
583 499 else:
584 choice = numpy.argmax([a1,a2])+1
500 choice=numpy.argmax([a1,a2])+1
585 501 #else:
586 502 #choice=argmin([std2a,std2b])+1
587
503
588 504 else: # with low SNR go to the most energetic peak
589 choice = numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
590
591 # print ('stop 14')
592 shift0 = lsq2[0][0]
593 vel0 = Vrange[0] + shift0 * deltav
594 shift1 = lsq2[0][4]
595 # vel1=Vrange[0] + shift1 * deltav
596
597 # max_vel = 1.0
598 # Va = max(Vrange)
599 # deltav = Vrange[1]-Vrange[0]
600 # print ('stop 15')
505 choice=numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
506
507
508 shift0=lsq2[0][0];
509 vel0=Vrange[0] + shift0*(Vrange[1]-Vrange[0])
510 shift1=lsq2[0][4];
511 vel1=Vrange[0] + shift1*(Vrange[1]-Vrange[0])
512
513 max_vel = 1.0
514
601 515 #first peak will be 0, second peak will be 1
602 # if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range # Commented by D.Scipión 19.03.2021
603 if vel0 > -Va and vel0 < Va : #first peak is in the correct range
604 shift0 = lsq2[0][0]
605 width0 = lsq2[0][1]
606 Amplitude0 = lsq2[0][2]
607 p0 = lsq2[0][3]
608
609 shift1 = lsq2[0][4]
610 width1 = lsq2[0][5]
611 Amplitude1 = lsq2[0][6]
612 p1 = lsq2[0][7]
613 noise = lsq2[0][8]
516 if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range
517 shift0=lsq2[0][0]
518 width0=lsq2[0][1]
519 Amplitude0=lsq2[0][2]
520 p0=lsq2[0][3]
521
522 shift1=lsq2[0][4]
523 width1=lsq2[0][5]
524 Amplitude1=lsq2[0][6]
525 p1=lsq2[0][7]
526 noise=lsq2[0][8]
614 527 else:
615 shift1 = lsq2[0][0]
616 width1 = lsq2[0][1]
617 Amplitude1 = lsq2[0][2]
618 p1 = lsq2[0][3]
619
620 shift0 = lsq2[0][4]
621 width0 = lsq2[0][5]
622 Amplitude0 = lsq2[0][6]
623 p0 = lsq2[0][7]
624 noise = lsq2[0][8]
625
528 shift1=lsq2[0][0]
529 width1=lsq2[0][1]
530 Amplitude1=lsq2[0][2]
531 p1=lsq2[0][3]
532
533 shift0=lsq2[0][4]
534 width0=lsq2[0][5]
535 Amplitude0=lsq2[0][6]
536 p0=lsq2[0][7]
537 noise=lsq2[0][8]
538
626 539 if Amplitude0<0.05: # in case the peak is noise
627 shift0,width0,Amplitude0,p0 = 4*[numpy.NaN]
540 shift0,width0,Amplitude0,p0 = [0,0,0,0]#4*[numpy.NaN]
628 541 if Amplitude1<0.05:
629 shift1,width1,Amplitude1,p1 = 4*[numpy.NaN]
630
631 # print ('stop 16 ')
632 # SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0)/width0)**p0)
633 # SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1)/width1)**p1)
634 # SPCparam = (SPC_ch1,SPC_ch2)
635
636 DGauFitParam[0,ht,0] = noise
637 DGauFitParam[0,ht,1] = noise
638 DGauFitParam[1,ht,0] = Amplitude0
639 DGauFitParam[1,ht,1] = Amplitude1
640 DGauFitParam[2,ht,0] = Vrange[0] + shift0 * deltav
641 DGauFitParam[2,ht,1] = Vrange[0] + shift1 * deltav
642 DGauFitParam[3,ht,0] = width0 * deltav
643 DGauFitParam[3,ht,1] = width1 * deltav
644 DGauFitParam[4,ht,0] = p0
645 DGauFitParam[4,ht,1] = p1
646
647 # print (DGauFitParam.shape)
648 # print ('Leaving FitGau')
649 return DGauFitParam
650 # return SPCparam
651 # return GauSPC
652
542 shift1,width1,Amplitude1,p1 = [0,0,0,0]#4*[numpy.NaN]
543
544
545 SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0))/width0)**p0
546 SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1))/width1)**p1
547 SPCparam = (SPC_ch1,SPC_ch2)
548
549
550 return GauSPC
551
653 552 def y_model1(self,x,state):
654 shift0, width0, amplitude0, power0, noise = state
655 model0 = amplitude0*numpy.exp(-0.5*abs((x - shift0)/width0)**power0)
656 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
657 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
658 return model0 + model0u + model0d + noise
659
660 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
661 shift0, width0, amplitude0, power0, shift1, width1, amplitude1, power1, noise = state
662 model0 = amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
663 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
664 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
553 shift0,width0,amplitude0,power0,noise=state
554 model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
665 555
666 model1 = amplitude1*numpy.exp(-0.5*abs((x - shift1)/width1)**power1)
667 model1u = amplitude1*numpy.exp(-0.5*abs((x - shift1 - self.Num_Bin)/width1)**power1)
668 model1d = amplitude1*numpy.exp(-0.5*abs((x - shift1 + self.Num_Bin)/width1)**power1)
669 return model0 + model0u + model0d + model1 + model1u + model1d + noise
670
671 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
556 model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
557
558 model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
559 return model0+model0u+model0d+noise
560
561 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
562 shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,noise=state
563 model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
564
565 model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
566
567 model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
568 model1=amplitude1*numpy.exp(-0.5*abs((x-shift1)/width1)**power1)
569
570 model1u=amplitude1*numpy.exp(-0.5*abs((x-shift1- self.Num_Bin )/width1)**power1)
571
572 model1d=amplitude1*numpy.exp(-0.5*abs((x-shift1+ self.Num_Bin )/width1)**power1)
573 return model0+model0u+model0d+model1+model1u+model1d+noise
574
575 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
672 576
673 577 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
674
578
675 579 def misfit2(self,state,y_data,x,num_intg):
676 580 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
677
678
581
582
679 583
680 584 class PrecipitationProc(Operation):
681
585
682 586 '''
683 587 Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
684
685 Input:
588
589 Input:
686 590 self.dataOut.data_pre : SelfSpectra
687
688 Output:
689
690 self.dataOut.data_output : Reflectivity factor, rainfall Rate
691
692
693 Parameters affected:
591
592 Output:
593
594 self.dataOut.data_output : Reflectivity factor, rainfall Rate
595
596
597 Parameters affected:
694 598 '''
695
599
696 600 def __init__(self):
697 601 Operation.__init__(self)
698 602 self.i=0
699
700 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
701 tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km2 = 0.93, Altitude=3350,SNRdBlimit=-30):
702
703 # print ('Entering PrecepitationProc ... ')
704
705 if radar == "MIRA35C" :
706
603
604
605 def gaus(self,xSamples,Amp,Mu,Sigma):
606 return ( Amp / ((2*numpy.pi)**0.5 * Sigma) ) * numpy.exp( -( xSamples - Mu )**2 / ( 2 * (Sigma**2) ))
607
608
609
610 def Moments(self, ySamples, xSamples):
611 Pot = numpy.nansum( ySamples ) # Potencia, momento 0
612 yNorm = ySamples / Pot
613
614 Vr = numpy.nansum( yNorm * xSamples ) # Velocidad radial, mu, corrimiento doppler, primer momento
615 Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento
616 Desv = Sigma2**0.5 # Desv. Estandar, Ancho espectral
617
618 return numpy.array([Pot, Vr, Desv])
619
620 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
621 tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km = 0.93, Altitude=3350):
622
623
624 Velrange = dataOut.spcparam_range[2]
625 FrecRange = dataOut.spcparam_range[0]
626
627 dV= Velrange[1]-Velrange[0]
628 dF= FrecRange[1]-FrecRange[0]
629
630 if radar == "MIRA35C" :
631
707 632 self.spc = dataOut.data_pre[0].copy()
708 633 self.Num_Hei = self.spc.shape[2]
709 634 self.Num_Bin = self.spc.shape[1]
710 635 self.Num_Chn = self.spc.shape[0]
711 636 Ze = self.dBZeMODE2(dataOut)
712
637
713 638 else:
714
715 self.spc = dataOut.data_pre[0].copy()
716
717 #NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX
718 self.spc[:,:,0:7]= numpy.NaN
719
639
640 self.spc = dataOut.SPCparam[1].copy() #dataOut.data_pre[0].copy() #
641
642 """NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX"""
643
644 self.spc[:,:,0:7]= numpy.NaN
645
646 """##########################################"""
647
720 648 self.Num_Hei = self.spc.shape[2]
721 649 self.Num_Bin = self.spc.shape[1]
722 650 self.Num_Chn = self.spc.shape[0]
723
724 VelRange = dataOut.spc_range[2]
725
651
726 652 ''' Se obtiene la constante del RADAR '''
727
653
728 654 self.Pt = Pt
729 655 self.Gt = Gt
730 656 self.Gr = Gr
731 657 self.Lambda = Lambda
732 658 self.aL = aL
733 659 self.tauW = tauW
734 self.ThetaT = ThetaT
660 self.ThetaT = ThetaT
735 661 self.ThetaR = ThetaR
736 self.GSys = 10**(36.63/10) # Ganancia de los LNA 36.63 dB
737 self.lt = 10**(1.67/10) # Perdida en cables Tx 1.67 dB
738 self.lr = 10**(5.73/10) # Perdida en cables Rx 5.73 dB
739
662
740 663 Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
741 664 Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR)
742 665 RadarConstant = 10e-26 * Numerator / Denominator #
743 ExpConstant = 10**(40/10) #Constante Experimental
744
745 SignalPower = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
746 for i in range(self.Num_Chn):
747 SignalPower[i,:,:] = self.spc[i,:,:] - dataOut.noise[i]
748 SignalPower[numpy.where(SignalPower < 0)] = 1e-20
749
750 SPCmean = numpy.mean(SignalPower, 0)
751 Pr = SPCmean[:,:]/dataOut.normFactor
752
753 # Declaring auxiliary variables
754 Range = dataOut.heightList*1000. #Range in m
755 # replicate the heightlist to obtain a matrix [Num_Bin,Num_Hei]
756 rMtrx = numpy.transpose(numpy.transpose([dataOut.heightList*1000.] * self.Num_Bin))
757 zMtrx = rMtrx+Altitude
758 # replicate the VelRange to obtain a matrix [Num_Bin,Num_Hei]
759 VelMtrx = numpy.transpose(numpy.tile(VelRange[:-1], (self.Num_Hei,1)))
760
761 # height dependence to air density Foote and Du Toit (1969)
762 delv_z = 1 + 3.68e-5 * zMtrx + 1.71e-9 * zMtrx**2
763 VMtrx = VelMtrx / delv_z #Normalized velocity
764 VMtrx[numpy.where(VMtrx> 9.6)] = numpy.NaN
765 # Diameter is related to the fall speed of falling drops
766 D_Vz = -1.667 * numpy.log( 0.9369 - 0.097087 * VMtrx ) # D in [mm]
767 # Only valid for D>= 0.16 mm
768 D_Vz[numpy.where(D_Vz < 0.16)] = numpy.NaN
769
770 #Calculate Radar Reflectivity ETAn
771 ETAn = (RadarConstant *ExpConstant) * Pr * rMtrx**2 #Reflectivity (ETA)
772 ETAd = ETAn * 6.18 * exp( -0.6 * D_Vz ) * delv_z
773 # Radar Cross Section
774 sigmaD = Km2 * (D_Vz * 1e-3 )**6 * numpy.pi**5 / Lambda**4
775 # Drop Size Distribution
776 DSD = ETAn / sigmaD
777 # Equivalente Reflectivy
778 Ze_eqn = numpy.nansum( DSD * D_Vz**6 ,axis=0)
779 Ze_org = numpy.nansum(ETAn * Lambda**4, axis=0) / (1e-18*numpy.pi**5 * Km2) # [mm^6 /m^3]
780 # RainFall Rate
781 RR = 0.0006*numpy.pi * numpy.nansum( D_Vz**3 * DSD * VelMtrx ,0) #mm/hr
782
783 # Censoring the data
784 # Removing data with SNRth < 0dB se debe considerar el SNR por canal
785 SNRth = 10**(SNRdBlimit/10) #-30dB
786 novalid = numpy.where((dataOut.data_snr[0,:] <SNRth) | (dataOut.data_snr[1,:] <SNRth) | (dataOut.data_snr[2,:] <SNRth)) # AND condition. Maybe OR condition better
787 W = numpy.nanmean(dataOut.data_dop,0)
788 W[novalid] = numpy.NaN
789 Ze_org[novalid] = numpy.NaN
790 RR[novalid] = numpy.NaN
791
666
667 ''' ============================= '''
668
669 self.spc[0] = (self.spc[0]-dataOut.noise[0])
670 self.spc[1] = (self.spc[1]-dataOut.noise[1])
671 self.spc[2] = (self.spc[2]-dataOut.noise[2])
672
673 self.spc[ numpy.where(self.spc < 0)] = 0
674
675 SPCmean = (numpy.mean(self.spc,0) - numpy.mean(dataOut.noise))
676 SPCmean[ numpy.where(SPCmean < 0)] = 0
677
678 ETAn = numpy.zeros([self.Num_Bin,self.Num_Hei])
679 ETAv = numpy.zeros([self.Num_Bin,self.Num_Hei])
680 ETAd = numpy.zeros([self.Num_Bin,self.Num_Hei])
681
682 Pr = SPCmean[:,:]
683
684 VelMeteoro = numpy.mean(SPCmean,axis=0)
685
686 D_range = numpy.zeros([self.Num_Bin,self.Num_Hei])
687 SIGMA = numpy.zeros([self.Num_Bin,self.Num_Hei])
688 N_dist = numpy.zeros([self.Num_Bin,self.Num_Hei])
689 V_mean = numpy.zeros(self.Num_Hei)
690 del_V = numpy.zeros(self.Num_Hei)
691 Z = numpy.zeros(self.Num_Hei)
692 Ze = numpy.zeros(self.Num_Hei)
693 RR = numpy.zeros(self.Num_Hei)
694
695 Range = dataOut.heightList*1000.
696
697 for R in range(self.Num_Hei):
698
699 h = Range[R] + Altitude #Range from ground to radar pulse altitude
700 del_V[R] = 1 + 3.68 * 10**-5 * h + 1.71 * 10**-9 * h**2 #Density change correction for velocity
701
702 D_range[:,R] = numpy.log( (9.65 - (Velrange[0:self.Num_Bin] / del_V[R])) / 10.3 ) / -0.6 #Diameter range [m]x10**-3
703
704 '''NOTA: ETA(n) dn = ETA(f) df
705
706 dn = 1 Diferencial de muestreo
707 df = ETA(n) / ETA(f)
708
709 '''
710
711 ETAn[:,R] = RadarConstant * Pr[:,R] * (Range[R] )**2 #Reflectivity (ETA)
712
713 ETAv[:,R]=ETAn[:,R]/dV
714
715 ETAd[:,R]=ETAv[:,R]*6.18*exp(-0.6*D_range[:,R])
716
717 SIGMA[:,R] = Km * (D_range[:,R] * 1e-3 )**6 * numpy.pi**5 / Lambda**4 #Equivalent Section of drops (sigma)
718
719 N_dist[:,R] = ETAn[:,R] / SIGMA[:,R]
720
721 DMoments = self.Moments(Pr[:,R], Velrange[0:self.Num_Bin])
722
723 try:
724 popt01,pcov = curve_fit(self.gaus, Velrange[0:self.Num_Bin] , Pr[:,R] , p0=DMoments)
725 except:
726 popt01=numpy.zeros(3)
727 popt01[1]= DMoments[1]
728
729 if popt01[1]<0 or popt01[1]>20:
730 popt01[1]=numpy.NaN
731
732
733 V_mean[R]=popt01[1]
734
735 Z[R] = numpy.nansum( N_dist[:,R] * (D_range[:,R])**6 )#*10**-18
736
737 RR[R] = 0.0006*numpy.pi * numpy.nansum( D_range[:,R]**3 * N_dist[:,R] * Velrange[0:self.Num_Bin] ) #Rainfall rate
738
739 Ze[R] = (numpy.nansum( ETAn[:,R]) * Lambda**4) / ( 10**-18*numpy.pi**5 * Km)
740
741
742
743 RR2 = (Z/200)**(1/1.6)
744 dBRR = 10*numpy.log10(RR)
745 dBRR2 = 10*numpy.log10(RR2)
746
747 dBZe = 10*numpy.log10(Ze)
748 dBZ = 10*numpy.log10(Z)
749
792 750 dataOut.data_output = RR[8]
793 751 dataOut.data_param = numpy.ones([3,self.Num_Hei])
794 752 dataOut.channelList = [0,1,2]
795 753
796 dataOut.data_param[0]=10*numpy.log10(Ze_org)
797 dataOut.data_param[1]=-W
754 dataOut.data_param[0]=dBZ
755 dataOut.data_param[1]=V_mean
798 756 dataOut.data_param[2]=RR
799 757
800 # print ('Leaving PrecepitationProc ... ')
801 758 return dataOut
802
759
803 760 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
804
761
805 762 NPW = dataOut.NPW
806 763 COFA = dataOut.COFA
807
764
808 765 SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
809 766 RadarConst = dataOut.RadarConst
810 767 #frequency = 34.85*10**9
811
768
812 769 ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
813 770 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
814
771
815 772 ETA = numpy.sum(SNR,1)
816
817 ETA = numpy.where(ETA != 0. , ETA, numpy.NaN)
818
773
774 ETA = numpy.where(ETA is not 0. , ETA, numpy.NaN)
775
819 776 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
820
777
821 778 for r in range(self.Num_Hei):
822
779
823 780 Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
824 781 #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
825
782
826 783 return Ze
827
784
828 785 # def GetRadarConstant(self):
829 #
830 # """
786 #
787 # """
831 788 # Constants:
832 #
789 #
833 790 # Pt: Transmission Power dB 5kW 5000
834 791 # Gt: Transmission Gain dB 24.7 dB 295.1209
835 792 # Gr: Reception Gain dB 18.5 dB 70.7945
@@ -838,416 +795,438 class PrecipitationProc(Operation):
838 795 # tauW: Width of transmission pulse s 4us 4e-6
839 796 # ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317
840 797 # ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087
841 #
798 #
842 799 # """
843 #
800 #
844 801 # Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
845 802 # Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
846 803 # RadarConstant = Numerator / Denominator
847 #
804 #
848 805 # return RadarConstant
849
850
851
852 class FullSpectralAnalysis(Operation):
853
806
807
808
809 class FullSpectralAnalysis(Operation):
810
854 811 """
855 Function that implements Full Spectral Analysis technique.
856
857 Input:
858 self.dataOut.data_pre : SelfSpectra and CrossSpectra data
812 Function that implements Full Spectral Analisys technique.
813
814 Input:
815 self.dataOut.data_pre : SelfSpectra and CrossSPectra data
859 816 self.dataOut.groupList : Pairlist of channels
860 817 self.dataOut.ChanDist : Physical distance between receivers
861
862
863 Output:
864
865 self.dataOut.data_output : Zonal wind, Meridional wind, and Vertical wind
866
867
818
819
820 Output:
821
822 self.dataOut.data_output : Zonal wind, Meridional wind and Vertical wind
823
824
868 825 Parameters affected: Winds, height range, SNR
869
826
870 827 """
871 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRdBlimit=-30,
872 minheight=None, maxheight=None, NegativeLimit=None, PositiveLimit=None):
873
828 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRlimit=7):
829
830 self.indice=int(numpy.random.rand()*1000)
831
874 832 spc = dataOut.data_pre[0].copy()
875 833 cspc = dataOut.data_pre[1]
834
835 """NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX"""
836
837 SNRspc = spc.copy()
838 SNRspc[:,:,0:7]= numpy.NaN
839
840 """##########################################"""
841
842
843 nChannel = spc.shape[0]
844 nProfiles = spc.shape[1]
876 845 nHeights = spc.shape[2]
877
878 # first_height = 0.75 #km (ref: data header 20170822)
879 # resolution_height = 0.075 #km
880 '''
881 finding height range. check this when radar parameters are changed!
882 '''
883 if maxheight is not None:
884 # range_max = math.ceil((maxheight - first_height) / resolution_height) # theoretical
885 range_max = math.ceil(13.26 * maxheight - 3) # empirical, works better
886 else:
887 range_max = nHeights
888 if minheight is not None:
889 # range_min = int((minheight - first_height) / resolution_height) # theoretical
890 range_min = int(13.26 * minheight - 5) # empirical, works better
891 if range_min < 0:
892 range_min = 0
893 else:
894 range_min = 0
895
846
896 847 pairsList = dataOut.groupList
897 848 if dataOut.ChanDist is not None :
898 849 ChanDist = dataOut.ChanDist
899 850 else:
900 851 ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]])
901
902 # 4 variables: zonal, meridional, vertical, and average SNR
903 data_param = numpy.zeros([4,nHeights]) * numpy.NaN
904 velocityX = numpy.zeros([nHeights]) * numpy.NaN
905 velocityY = numpy.zeros([nHeights]) * numpy.NaN
906 velocityZ = numpy.zeros([nHeights]) * numpy.NaN
907
908 dbSNR = 10*numpy.log10(numpy.average(dataOut.data_snr,0))
909
910 '''***********************************************WIND ESTIMATION**************************************'''
852
853 FrecRange = dataOut.spc_range[0]
854
855 ySamples=numpy.ones([nChannel,nProfiles])
856 phase=numpy.ones([nChannel,nProfiles])
857 CSPCSamples=numpy.ones([nChannel,nProfiles],dtype=numpy.complex_)
858 coherence=numpy.ones([nChannel,nProfiles])
859 PhaseSlope=numpy.ones(nChannel)
860 PhaseInter=numpy.ones(nChannel)
861 data_SNR=numpy.zeros([nProfiles])
862
863 data = dataOut.data_pre
864 noise = dataOut.noise
865
866 dataOut.data_SNR = (numpy.mean(SNRspc,axis=1)- noise[0]) / noise[0]
867
868 dataOut.data_SNR[numpy.where( dataOut.data_SNR <0 )] = 1e-20
869
870
871 data_output=numpy.ones([spc.shape[0],spc.shape[2]])*numpy.NaN
872
873 velocityX=[]
874 velocityY=[]
875 velocityV=[]
876 PhaseLine=[]
877
878 dbSNR = 10*numpy.log10(dataOut.data_SNR)
879 dbSNR = numpy.average(dbSNR,0)
880
911 881 for Height in range(nHeights):
882
883 [Vzon,Vmer,Vver, GaussCenter, PhaseSlope, FitGaussCSPC]= self.WindEstimation(spc, cspc, pairsList, ChanDist, Height, noise, dataOut.spc_range, dbSNR[Height], SNRlimit)
884 PhaseLine = numpy.append(PhaseLine, PhaseSlope)
885
886 if abs(Vzon)<100. and abs(Vzon)> 0.:
887 velocityX=numpy.append(velocityX, Vzon)#Vmag
888
889 else:
890 velocityX=numpy.append(velocityX, numpy.NaN)
891
892 if abs(Vmer)<100. and abs(Vmer) > 0.:
893 velocityY=numpy.append(velocityY, -Vmer)#Vang
894
895 else:
896 velocityY=numpy.append(velocityY, numpy.NaN)
897
898 if dbSNR[Height] > SNRlimit:
899 velocityV=numpy.append(velocityV, -Vver)#FirstMoment[Height])
900 else:
901 velocityV=numpy.append(velocityV, numpy.NaN)
912 902
913 if Height >= range_min and Height < range_max:
914 # error_code will be useful in future analysis
915 [Vzon,Vmer,Vver, error_code] = self.WindEstimation(spc[:,:,Height], cspc[:,:,Height], pairsList,
916 ChanDist, Height, dataOut.noise, dataOut.spc_range, dbSNR[Height], SNRdBlimit, NegativeLimit, PositiveLimit,dataOut.frequency)
917
918 if abs(Vzon) < 100. and abs(Vmer) < 100.:
919 velocityX[Height] = Vzon
920 velocityY[Height] = -Vmer
921 velocityZ[Height] = Vver
922 903
923 # Censoring data with SNR threshold
924 dbSNR [dbSNR < SNRdBlimit] = numpy.NaN
925
926 data_param[0] = velocityX
927 data_param[1] = velocityY
928 data_param[2] = velocityZ
929 data_param[3] = dbSNR
930 dataOut.data_param = data_param
904
905 '''Nota: Cambiar el signo de numpy.array(velocityX) cuando se intente procesar datos de BLTR'''
906 data_output[0] = numpy.array(velocityX) #self.moving_average(numpy.array(velocityX) , N=1)
907 data_output[1] = numpy.array(velocityY) #self.moving_average(numpy.array(velocityY) , N=1)
908 data_output[2] = velocityV#FirstMoment
909
910 xFrec=FrecRange[0:spc.shape[1]]
911
912 dataOut.data_output=data_output
913
931 914 return dataOut
932
915
916
933 917 def moving_average(self,x, N=2):
934 """ convolution for smoothenig data. note that last N-1 values are convolution with zeroes """
935 918 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
936
919
937 920 def gaus(self,xSamples,Amp,Mu,Sigma):
938 return Amp * numpy.exp(-0.5*((xSamples - Mu)/Sigma)**2)
939
921 return ( Amp / ((2*numpy.pi)**0.5 * Sigma) ) * numpy.exp( -( xSamples - Mu )**2 / ( 2 * (Sigma**2) ))
922
923
924
940 925 def Moments(self, ySamples, xSamples):
941 Power = numpy.nanmean(ySamples) # Power, 0th Moment
942 yNorm = ySamples / numpy.nansum(ySamples)
943 RadVel = numpy.nansum(xSamples * yNorm) # Radial Velocity, 1st Moment
944 Sigma2 = numpy.nansum(yNorm * (xSamples - RadVel)**2) # Spectral Width, 2nd Moment
945 StdDev = numpy.sqrt(numpy.abs(Sigma2)) # Desv. Estandar, Ancho espectral
946 return numpy.array([Power,RadVel,StdDev])
947
948 def StopWindEstimation(self, error_code):
949 Vzon = numpy.NaN
950 Vmer = numpy.NaN
951 Vver = numpy.NaN
952 return Vzon, Vmer, Vver, error_code
953
954 def AntiAliasing(self, interval, maxstep):
955 """
956 function to prevent errors from aliased values when computing phaseslope
957 """
958 antialiased = numpy.zeros(len(interval))
959 copyinterval = interval.copy()
960
961 antialiased[0] = copyinterval[0]
962
963 for i in range(1,len(antialiased)):
964 step = interval[i] - interval[i-1]
965 if step > maxstep:
966 copyinterval -= 2*numpy.pi
967 antialiased[i] = copyinterval[i]
968 elif step < maxstep*(-1):
969 copyinterval += 2*numpy.pi
970 antialiased[i] = copyinterval[i]
971 else:
972 antialiased[i] = copyinterval[i].copy()
973
974 return antialiased
975
976 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit, NegativeLimit, PositiveLimit, radfreq):
977 """
978 Function that Calculates Zonal, Meridional and Vertical wind velocities.
979 Initial Version by E. Bocanegra updated by J. Zibell until Nov. 2019.
980
981 Input:
982 spc, cspc : self spectra and cross spectra data. In Briggs notation something like S_i*(S_i)_conj, (S_j)_conj respectively.
983 pairsList : Pairlist of channels
984 ChanDist : array of xi_ij and eta_ij
985 Height : height at which data is processed
986 noise : noise in [channels] format for specific height
987 Abbsisarange : range of the frequencies or velocities
988 dbSNR, SNRlimit : signal to noise ratio in db, lower limit
989
990 Output:
991 Vzon, Vmer, Vver : wind velocities
992 error_code : int that states where code is terminated
993
994 0 : no error detected
995 1 : Gaussian of mean spc exceeds widthlimit
996 2 : no Gaussian of mean spc found
997 3 : SNR to low or velocity to high -> prec. e.g.
998 4 : at least one Gaussian of cspc exceeds widthlimit
999 5 : zero out of three cspc Gaussian fits converged
1000 6 : phase slope fit could not be found
1001 7 : arrays used to fit phase have different length
1002 8 : frequency range is either too short (len <= 5) or very long (> 30% of cspc)
1003
1004 """
1005
1006 error_code = 0
1007
1008 nChan = spc.shape[0]
1009 nProf = spc.shape[1]
1010 nPair = cspc.shape[0]
1011
1012 SPC_Samples = numpy.zeros([nChan, nProf]) # for normalized spc values for one height
1013 CSPC_Samples = numpy.zeros([nPair, nProf], dtype=numpy.complex_) # for normalized cspc values
1014 phase = numpy.zeros([nPair, nProf]) # phase between channels
1015 PhaseSlope = numpy.zeros(nPair) # slope of the phases, channelwise
1016 PhaseInter = numpy.zeros(nPair) # intercept to the slope of the phases, channelwise
1017 xFrec = AbbsisaRange[0][:-1] # frequency range
1018 xVel = AbbsisaRange[2][:-1] # velocity range
1019 xSamples = xFrec # the frequency range is taken
1020 delta_x = xSamples[1] - xSamples[0] # delta_f or delta_x
1021
1022 # only consider velocities with in NegativeLimit and PositiveLimit
1023 if (NegativeLimit is None):
1024 NegativeLimit = numpy.min(xVel)
1025 if (PositiveLimit is None):
1026 PositiveLimit = numpy.max(xVel)
1027 xvalid = numpy.where((xVel > NegativeLimit) & (xVel < PositiveLimit))
1028 xSamples_zoom = xSamples[xvalid]
926 Pot = numpy.nansum( ySamples ) # Potencia, momento 0
927 yNorm = ySamples / Pot
928 Vr = numpy.nansum( yNorm * xSamples ) # Velocidad radial, mu, corrimiento doppler, primer momento
929 Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento
930 Desv = Sigma2**0.5 # Desv. Estandar, Ancho espectral
931
932 return numpy.array([Pot, Vr, Desv])
933
934 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit):
935
1029 936
937
938 ySamples=numpy.ones([spc.shape[0],spc.shape[1]])
939 phase=numpy.ones([spc.shape[0],spc.shape[1]])
940 CSPCSamples=numpy.ones([spc.shape[0],spc.shape[1]],dtype=numpy.complex_)
941 coherence=numpy.ones([spc.shape[0],spc.shape[1]])
942 PhaseSlope=numpy.zeros(spc.shape[0])
943 PhaseInter=numpy.ones(spc.shape[0])
944 xFrec=AbbsisaRange[0][0:spc.shape[1]]
945 xVel =AbbsisaRange[2][0:spc.shape[1]]
946 Vv=numpy.empty(spc.shape[2])*0
947 SPCav = numpy.average(spc, axis=0)-numpy.average(noise) #spc[0]-noise[0]#
948
949 SPCmoments = self.Moments(SPCav[:,Height], xVel )
950 CSPCmoments = []
951 cspcNoise = numpy.empty(3)
952
1030 953 '''Getting Eij and Nij'''
1031 Xi01, Xi02, Xi12 = ChanDist[:,0]
1032 Eta01, Eta02, Eta12 = ChanDist[:,1]
1033
1034 # spwd limit - updated by D. Scipión 30.03.2021
1035 widthlimit = 10
1036 '''************************* SPC is normalized ********************************'''
1037 spc_norm = spc.copy()
1038 # For each channel
1039 for i in range(nChan):
1040 spc_sub = spc_norm[i,:] - noise[i] # only the signal power
1041 SPC_Samples[i] = spc_sub / (numpy.nansum(spc_sub) * delta_x)
1042
1043 '''********************** FITTING MEAN SPC GAUSSIAN **********************'''
1044
1045 """ the gaussian of the mean: first subtract noise, then normalize. this is legal because
1046 you only fit the curve and don't need the absolute value of height for calculation,
1047 only for estimation of width. for normalization of cross spectra, you need initial,
1048 unnormalized self-spectra With noise.
1049
1050 Technically, you don't even need to normalize the self-spectra, as you only need the
1051 width of the peak. However, it was left this way. Note that the normalization has a flaw:
1052 due to subtraction of the noise, some values are below zero. Raw "spc" values should be
1053 >= 0, as it is the modulus squared of the signals (complex * it's conjugate)
1054 """
1055 # initial conditions
1056 popt = [1e-10,0,1e-10]
1057 # Spectra average
1058 SPCMean = numpy.average(SPC_Samples,0)
1059 # Moments in frequency
1060 SPCMoments = self.Moments(SPCMean[xvalid], xSamples_zoom)
1061
1062 # Gauss Fit SPC in frequency domain
1063 if dbSNR > SNRlimit: # only if SNR > SNRth
954
955 Xi01=ChanDist[0][0]
956 Eta01=ChanDist[0][1]
957
958 Xi02=ChanDist[1][0]
959 Eta02=ChanDist[1][1]
960
961 Xi12=ChanDist[2][0]
962 Eta12=ChanDist[2][1]
963
964 z = spc.copy()
965 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
966
967 for i in range(spc.shape[0]):
968
969 '''****** Line of Data SPC ******'''
970 zline=z[i,:,Height].copy() - noise[i] # Se resta ruido
971
972 '''****** SPC is normalized ******'''
973 SmoothSPC =self.moving_average(zline.copy(),N=1) # Se suaviza el ruido
974 FactNorm = SmoothSPC/numpy.nansum(SmoothSPC) # SPC Normalizado y suavizado
975
976 xSamples = xFrec # Se toma el rango de frecuncias
977 ySamples[i] = FactNorm # Se toman los valores de SPC normalizado
978
979 for i in range(spc.shape[0]):
980
981 '''****** Line of Data CSPC ******'''
982 cspcLine = ( cspc[i,:,Height].copy())# - noise[i] ) # no! Se resta el ruido
983 SmoothCSPC =self.moving_average(cspcLine,N=1) # Se suaviza el ruido
984 cspcNorm = SmoothCSPC/numpy.nansum(SmoothCSPC) # CSPC normalizado y suavizado
985
986 '''****** CSPC is normalized with respect to Briggs and Vincent ******'''
987 chan_index0 = pairsList[i][0]
988 chan_index1 = pairsList[i][1]
989
990 CSPCFactor= numpy.abs(numpy.nansum(ySamples[chan_index0]))**2 * numpy.abs(numpy.nansum(ySamples[chan_index1]))**2
991 CSPCNorm = cspcNorm / numpy.sqrt(CSPCFactor)
992
993 CSPCSamples[i] = CSPCNorm
994
995 coherence[i] = numpy.abs(CSPCSamples[i]) / numpy.sqrt(CSPCFactor)
996
997 #coherence[i]= self.moving_average(coherence[i],N=1)
998
999 phase[i] = self.moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi
1000
1001 CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPCSamples[0]), xSamples),
1002 self.Moments(numpy.abs(CSPCSamples[1]), xSamples),
1003 self.Moments(numpy.abs(CSPCSamples[2]), xSamples)])
1004
1005
1006 popt=[1e-10,0,1e-10]
1007 popt01, popt02, popt12 = [1e-10,1e-10,1e-10], [1e-10,1e-10,1e-10] ,[1e-10,1e-10,1e-10]
1008 FitGauss01, FitGauss02, FitGauss12 = numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0
1009
1010 CSPCMask01 = numpy.abs(CSPCSamples[0])
1011 CSPCMask02 = numpy.abs(CSPCSamples[1])
1012 CSPCMask12 = numpy.abs(CSPCSamples[2])
1013
1014 mask01 = ~numpy.isnan(CSPCMask01)
1015 mask02 = ~numpy.isnan(CSPCMask02)
1016 mask12 = ~numpy.isnan(CSPCMask12)
1017
1018 #mask = ~numpy.isnan(CSPCMask01)
1019 CSPCMask01 = CSPCMask01[mask01]
1020 CSPCMask02 = CSPCMask02[mask02]
1021 CSPCMask12 = CSPCMask12[mask12]
1022 #CSPCMask01 = numpy.ma.masked_invalid(CSPCMask01)
1023
1024
1025
1026 '''***Fit Gauss CSPC01***'''
1027 if dbSNR > SNRlimit and numpy.abs(SPCmoments[1])<3 :
1064 1028 try:
1065 popt,pcov = curve_fit(self.gaus,xSamples_zoom,SPCMean[xvalid],p0=SPCMoments)
1066 if popt[2] <= 0 or popt[2] > widthlimit: # CONDITION
1067 return self.StopWindEstimation(error_code = 1)
1068 FitGauss = self.gaus(xSamples_zoom,*popt)
1029 popt01,pcov = curve_fit(self.gaus,xSamples[mask01],numpy.abs(CSPCMask01),p0=CSPCmoments[0])
1030 popt02,pcov = curve_fit(self.gaus,xSamples[mask02],numpy.abs(CSPCMask02),p0=CSPCmoments[1])
1031 popt12,pcov = curve_fit(self.gaus,xSamples[mask12],numpy.abs(CSPCMask12),p0=CSPCmoments[2])
1032 FitGauss01 = self.gaus(xSamples,*popt01)
1033 FitGauss02 = self.gaus(xSamples,*popt02)
1034 FitGauss12 = self.gaus(xSamples,*popt12)
1035 except:
1036 FitGauss01=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[0]))
1037 FitGauss02=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[1]))
1038 FitGauss12=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[2]))
1039
1040
1041 CSPCopt = numpy.vstack([popt01,popt02,popt12])
1042
1043 '''****** Getting fij width ******'''
1044
1045 yMean = numpy.average(ySamples, axis=0) # ySamples[0]
1046
1047 '''******* Getting fitting Gaussian *******'''
1048 meanGauss = sum(xSamples*yMean) / len(xSamples) # Mu, velocidad radial (frecuencia)
1049 sigma2 = sum(yMean*(xSamples-meanGauss)**2) / len(xSamples) # Varianza, Ancho espectral (frecuencia)
1050
1051 yMoments = self.Moments(yMean, xSamples)
1052
1053 if dbSNR > SNRlimit and numpy.abs(SPCmoments[1])<3: # and abs(meanGauss/sigma2) > 0.00001:
1054 try:
1055 popt,pcov = curve_fit(self.gaus,xSamples,yMean,p0=yMoments)
1056 FitGauss=self.gaus(xSamples,*popt)
1057
1069 1058 except :#RuntimeError:
1070 return self.StopWindEstimation(error_code = 2)
1059 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1060
1061
1071 1062 else:
1072 return self.StopWindEstimation(error_code = 3)
1073
1074 '''***************************** CSPC Normalization *************************
1075 The Spc spectra are used to normalize the crossspectra. Peaks from precipitation
1076 influence the norm which is not desired. First, a range is identified where the
1077 wind peak is estimated -> sum_wind is sum of those frequencies. Next, the area
1078 around it gets cut off and values replaced by mean determined by the boundary
1079 data -> sum_noise (spc is not normalized here, thats why the noise is important)
1080
1081 The sums are then added and multiplied by range/datapoints, because you need
1082 an integral and not a sum for normalization.
1083
1084 A norm is found according to Briggs 92.
1085 '''
1086 # for each pair
1087 for i in range(nPair):
1088 cspc_norm = cspc[i,:].copy()
1089 chan_index0 = pairsList[i][0]
1090 chan_index1 = pairsList[i][1]
1091 CSPC_Samples[i] = cspc_norm / (numpy.sqrt(numpy.nansum(spc_norm[chan_index0])*numpy.nansum(spc_norm[chan_index1])) * delta_x)
1092 phase[i] = numpy.arctan2(CSPC_Samples[i].imag, CSPC_Samples[i].real)
1093
1094 CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPC_Samples[0,xvalid]), xSamples_zoom),
1095 self.Moments(numpy.abs(CSPC_Samples[1,xvalid]), xSamples_zoom),
1096 self.Moments(numpy.abs(CSPC_Samples[2,xvalid]), xSamples_zoom)])
1097
1098 popt01, popt02, popt12 = [1e-10,0,1e-10], [1e-10,0,1e-10] ,[1e-10,0,1e-10]
1099 FitGauss01, FitGauss02, FitGauss12 = numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples))
1100
1101 '''*******************************FIT GAUSS CSPC************************************'''
1102 try:
1103 popt01,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[0][xvalid]),p0=CSPCmoments[0])
1104 if popt01[2] > widthlimit: # CONDITION
1105 return self.StopWindEstimation(error_code = 4)
1106 popt02,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[1][xvalid]),p0=CSPCmoments[1])
1107 if popt02[2] > widthlimit: # CONDITION
1108 return self.StopWindEstimation(error_code = 4)
1109 popt12,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[2][xvalid]),p0=CSPCmoments[2])
1110 if popt12[2] > widthlimit: # CONDITION
1111 return self.StopWindEstimation(error_code = 4)
1112
1113 FitGauss01 = self.gaus(xSamples_zoom, *popt01)
1114 FitGauss02 = self.gaus(xSamples_zoom, *popt02)
1115 FitGauss12 = self.gaus(xSamples_zoom, *popt12)
1116 except:
1117 return self.StopWindEstimation(error_code = 5)
1118
1119
1120 '''************* Getting Fij ***************'''
1121 # x-axis point of the gaussian where the center is located from GaussFit of spectra
1122 GaussCenter = popt[1]
1123 ClosestCenter = xSamples_zoom[numpy.abs(xSamples_zoom-GaussCenter).argmin()]
1124 PointGauCenter = numpy.where(xSamples_zoom==ClosestCenter)[0][0]
1125
1126 # Point where e^-1 is located in the gaussian
1127 PeMinus1 = numpy.max(FitGauss) * numpy.exp(-1)
1128 FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # The closest point to"Peminus1" in "FitGauss"
1063 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1064
1065
1066
1067 '''****** Getting Fij ******'''
1068 Fijcspc = CSPCopt[:,2]/2*3
1069
1070
1071 GaussCenter = popt[1] #xFrec[GCpos]
1072 #Punto en Eje X de la Gaussiana donde se encuentra el centro
1073 ClosestCenter = xSamples[numpy.abs(xSamples-GaussCenter).argmin()]
1074 PointGauCenter = numpy.where(xSamples==ClosestCenter)[0][0]
1075
1076 #Punto e^-1 hubicado en la Gaussiana
1077 PeMinus1 = numpy.max(FitGauss)* numpy.exp(-1)
1078 FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # El punto mas cercano a "Peminus1" dentro de "FitGauss"
1129 1079 PointFij = numpy.where(FitGauss==FijClosest)[0][0]
1130 Fij = numpy.abs(xSamples_zoom[PointFij] - xSamples_zoom[PointGauCenter])
1131
1132 '''********** Taking frequency ranges from mean SPCs **********'''
1133 GauWidth = popt[2] * 3/2 # Bandwidth of Gau01
1080
1081 if xSamples[PointFij] > xSamples[PointGauCenter]:
1082 Fij = xSamples[PointFij] - xSamples[PointGauCenter]
1083
1084 else:
1085 Fij = xSamples[PointGauCenter] - xSamples[PointFij]
1086
1087
1088 '''****** Taking frequency ranges from SPCs ******'''
1089
1090
1091 #GaussCenter = popt[1] #Primer momento 01
1092 GauWidth = popt[2] *3/2 #Ancho de banda de Gau01
1134 1093 Range = numpy.empty(2)
1135 1094 Range[0] = GaussCenter - GauWidth
1136 Range[1] = GaussCenter + GauWidth
1137 # Point in x-axis where the bandwidth is located (min:max)
1138 ClosRangeMin = xSamples_zoom[numpy.abs(xSamples_zoom-Range[0]).argmin()]
1139 ClosRangeMax = xSamples_zoom[numpy.abs(xSamples_zoom-Range[1]).argmin()]
1140 PointRangeMin = numpy.where(xSamples_zoom==ClosRangeMin)[0][0]
1141 PointRangeMax = numpy.where(xSamples_zoom==ClosRangeMax)[0][0]
1142 Range = numpy.array([ PointRangeMin, PointRangeMax ])
1143 FrecRange = xSamples_zoom[ Range[0] : Range[1] ]
1144
1145 '''************************** Getting Phase Slope ***************************'''
1146 for i in range(nPair):
1147 if len(FrecRange) > 5:
1148 PhaseRange = phase[i, xvalid[0][Range[0]:Range[1]]].copy()
1095 Range[1] = GaussCenter + GauWidth
1096 #Punto en Eje X de la Gaussiana donde se encuentra ancho de banda (min:max)
1097 ClosRangeMin = xSamples[numpy.abs(xSamples-Range[0]).argmin()]
1098 ClosRangeMax = xSamples[numpy.abs(xSamples-Range[1]).argmin()]
1099
1100 PointRangeMin = numpy.where(xSamples==ClosRangeMin)[0][0]
1101 PointRangeMax = numpy.where(xSamples==ClosRangeMax)[0][0]
1102
1103 Range=numpy.array([ PointRangeMin, PointRangeMax ])
1104
1105 FrecRange = xFrec[ Range[0] : Range[1] ]
1106 VelRange = xVel[ Range[0] : Range[1] ]
1107
1108
1109 '''****** Getting SCPC Slope ******'''
1110
1111 for i in range(spc.shape[0]):
1112
1113 if len(FrecRange)>5 and len(FrecRange)<spc.shape[1]*0.3:
1114 PhaseRange=self.moving_average(phase[i,Range[0]:Range[1]],N=3)
1115
1116 '''***********************VelRange******************'''
1117
1149 1118 mask = ~numpy.isnan(FrecRange) & ~numpy.isnan(PhaseRange)
1119
1150 1120 if len(FrecRange) == len(PhaseRange):
1151 1121 try:
1152 slope, intercept, _, _, _ = stats.linregress(FrecRange[mask], self.AntiAliasing(PhaseRange[mask], 4.5))
1153 PhaseSlope[i] = slope
1154 PhaseInter[i] = intercept
1122 slope, intercept, r_value, p_value, std_err = stats.linregress(FrecRange[mask], PhaseRange[mask])
1123 PhaseSlope[i]=slope
1124 PhaseInter[i]=intercept
1155 1125 except:
1156 return self.StopWindEstimation(error_code = 6)
1126 PhaseSlope[i]=0
1127 PhaseInter[i]=0
1157 1128 else:
1158 return self.StopWindEstimation(error_code = 7)
1129 PhaseSlope[i]=0
1130 PhaseInter[i]=0
1159 1131 else:
1160 return self.StopWindEstimation(error_code = 8)
1161
1162 '''*** Constants A-H correspond to the convention as in Briggs and Vincent 1992 ***'''
1163
1164 '''Getting constant C'''
1165 cC=(Fij*numpy.pi)**2
1166
1167 '''****** Getting constants F and G ******'''
1168 MijEijNij = numpy.array([[Xi02,Eta02], [Xi12,Eta12]])
1169 # MijEijNij = numpy.array([[Xi01,Eta01], [Xi02,Eta02], [Xi12,Eta12]])
1170 # MijResult0 = (-PhaseSlope[0] * cC) / (2*numpy.pi)
1171 MijResult1 = (-PhaseSlope[1] * cC) / (2*numpy.pi)
1172 MijResult2 = (-PhaseSlope[2] * cC) / (2*numpy.pi)
1173 # MijResults = numpy.array([MijResult0, MijResult1, MijResult2])
1174 MijResults = numpy.array([MijResult1, MijResult2])
1175 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1176
1177 '''****** Getting constants A, B and H ******'''
1178 W01 = numpy.nanmax( FitGauss01 )
1179 W02 = numpy.nanmax( FitGauss02 )
1180 W12 = numpy.nanmax( FitGauss12 )
1181
1182 WijResult01 = ((cF * Xi01 + cG * Eta01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi / cC))
1183 WijResult02 = ((cF * Xi02 + cG * Eta02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi / cC))
1184 WijResult12 = ((cF * Xi12 + cG * Eta12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi / cC))
1185 WijResults = numpy.array([WijResult01, WijResult02, WijResult12])
1186
1187 WijEijNij = numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1188 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1189
1190 VxVy = numpy.array([[cA,cH],[cH,cB]])
1191 VxVyResults = numpy.array([-cF,-cG])
1192 (Vmer,Vzon) = numpy.linalg.solve(VxVy, VxVyResults)
1193 Vver = -SPCMoments[1]*SPEED_OF_LIGHT/(2*radfreq)
1194 error_code = 0
1195
1196 return Vzon, Vmer, Vver, error_code
1197
1132 PhaseSlope[i]=0
1133 PhaseInter[i]=0
1134
1135
1136 '''Getting constant C'''
1137 cC=(Fij*numpy.pi)**2
1138
1139 '''****** Getting constants F and G ******'''
1140 MijEijNij=numpy.array([[Xi02,Eta02], [Xi12,Eta12]])
1141 MijResult0=(-PhaseSlope[1]*cC) / (2*numpy.pi)
1142 MijResult1=(-PhaseSlope[2]*cC) / (2*numpy.pi)
1143 MijResults=numpy.array([MijResult0,MijResult1])
1144 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1145
1146 '''****** Getting constants A, B and H ******'''
1147 W01=numpy.nanmax( FitGauss01 ) #numpy.abs(CSPCSamples[0]))
1148 W02=numpy.nanmax( FitGauss02 ) #numpy.abs(CSPCSamples[1]))
1149 W12=numpy.nanmax( FitGauss12 ) #numpy.abs(CSPCSamples[2]))
1150
1151 WijResult0=((cF*Xi01+cG*Eta01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi/cC))
1152 WijResult1=((cF*Xi02+cG*Eta02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi/cC))
1153 WijResult2=((cF*Xi12+cG*Eta12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi/cC))
1154
1155 WijResults=numpy.array([WijResult0, WijResult1, WijResult2])
1156
1157 WijEijNij=numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1158 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1159
1160 VxVy=numpy.array([[cA,cH],[cH,cB]])
1161 VxVyResults=numpy.array([-cF,-cG])
1162 (Vx,Vy) = numpy.linalg.solve(VxVy, VxVyResults)
1163
1164 Vzon = Vy
1165 Vmer = Vx
1166 Vmag=numpy.sqrt(Vzon**2+Vmer**2)
1167 Vang=numpy.arctan2(Vmer,Vzon)
1168 if numpy.abs( popt[1] ) < 3.5 and len(FrecRange)>4:
1169 Vver=popt[1]
1170 else:
1171 Vver=numpy.NaN
1172 FitGaussCSPC = numpy.array([FitGauss01,FitGauss02,FitGauss12])
1173
1174
1175 return Vzon, Vmer, Vver, GaussCenter, PhaseSlope, FitGaussCSPC
1176
1198 1177 class SpectralMoments(Operation):
1199
1178
1200 1179 '''
1201 1180 Function SpectralMoments()
1202
1181
1203 1182 Calculates moments (power, mean, standard deviation) and SNR of the signal
1204
1183
1205 1184 Type of dataIn: Spectra
1206
1185
1207 1186 Configuration Parameters:
1208
1187
1209 1188 dirCosx : Cosine director in X axis
1210 1189 dirCosy : Cosine director in Y axis
1211
1190
1212 1191 elevation :
1213 1192 azimuth :
1214
1193
1215 1194 Input:
1216 channelList : simple channel list to select e.g. [2,3,7]
1195 channelList : simple channel list to select e.g. [2,3,7]
1217 1196 self.dataOut.data_pre : Spectral data
1218 1197 self.dataOut.abscissaList : List of frequencies
1219 1198 self.dataOut.noise : Noise level per channel
1220
1199
1221 1200 Affected:
1222 1201 self.dataOut.moments : Parameters per channel
1223 self.dataOut.data_snr : SNR per channel
1224
1202 self.dataOut.data_SNR : SNR per channel
1203
1225 1204 '''
1226
1205
1227 1206 def run(self, dataOut):
1228
1207
1208 #dataOut.data_pre = dataOut.data_pre[0]
1229 1209 data = dataOut.data_pre[0]
1230 1210 absc = dataOut.abscissaList[:-1]
1231 1211 noise = dataOut.noise
1232 1212 nChannel = data.shape[0]
1233 1213 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
1234
1214
1235 1215 for ind in range(nChannel):
1236 1216 data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind] )
1237
1217
1238 1218 dataOut.moments = data_param[:,1:,:]
1239 dataOut.data_snr = data_param[:,0]
1240 dataOut.data_pow = data_param[:,1]
1241 dataOut.data_dop = data_param[:,2]
1242 dataOut.data_width = data_param[:,3]
1243
1219 dataOut.data_SNR = data_param[:,0]
1220 dataOut.data_POW = data_param[:,1]
1221 dataOut.data_DOP = data_param[:,2]
1222 dataOut.data_WIDTH = data_param[:,3]
1244 1223 return dataOut
1245
1246 def __calculateMoments(self, oldspec, oldfreq, n0,
1224
1225 def __calculateMoments(self, oldspec, oldfreq, n0,
1247 1226 nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
1248
1227
1249 1228 if (nicoh is None): nicoh = 1
1250 if (graph is None): graph = 0
1229 if (graph is None): graph = 0
1251 1230 if (smooth is None): smooth = 0
1252 1231 elif (self.smooth < 3): smooth = 0
1253 1232
@@ -1258,105 +1237,98 class SpectralMoments(Operation):
1258 1237 if (aliasing is None): aliasing = 0
1259 1238 if (oldfd is None): oldfd = 0
1260 1239 if (wwauto is None): wwauto = 0
1261
1240
1262 1241 if (n0 < 1.e-20): n0 = 1.e-20
1263
1242
1264 1243 freq = oldfreq
1265 1244 vec_power = numpy.zeros(oldspec.shape[1])
1266 1245 vec_fd = numpy.zeros(oldspec.shape[1])
1267 1246 vec_w = numpy.zeros(oldspec.shape[1])
1268 1247 vec_snr = numpy.zeros(oldspec.shape[1])
1269
1270 # oldspec = numpy.ma.masked_invalid(oldspec)
1248
1249 oldspec = numpy.ma.masked_invalid(oldspec)
1271 1250
1272 1251 for ind in range(oldspec.shape[1]):
1273
1252
1274 1253 spec = oldspec[:,ind]
1275 1254 aux = spec*fwindow
1276 1255 max_spec = aux.max()
1277 m = aux.tolist().index(max_spec)
1278
1279 # Smooth
1280 if (smooth == 0):
1281 spec2 = spec
1282 else:
1283 spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1284
1285 # Moments Estimation
1286 bb = spec2[numpy.arange(m,spec2.size)]
1256 m = list(aux).index(max_spec)
1257
1258 #Smooth
1259 if (smooth == 0): spec2 = spec
1260 else: spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1261
1262 # Calculo de Momentos
1263 bb = spec2[list(range(m,spec2.size))]
1287 1264 bb = (bb<n0).nonzero()
1288 1265 bb = bb[0]
1289
1290 ss = spec2[numpy.arange(0,m + 1)]
1266
1267 ss = spec2[list(range(0,m + 1))]
1291 1268 ss = (ss<n0).nonzero()
1292 1269 ss = ss[0]
1293
1270
1294 1271 if (bb.size == 0):
1295 1272 bb0 = spec.size - 1 - m
1296 else:
1273 else:
1297 1274 bb0 = bb[0] - 1
1298 1275 if (bb0 < 0):
1299 1276 bb0 = 0
1300
1301 if (ss.size == 0):
1302 ss1 = 1
1303 else:
1304 ss1 = max(ss) + 1
1305
1306 if (ss1 > m):
1307 ss1 = m
1308
1309 valid = numpy.arange(int(m + bb0 - ss1 + 1)) + ss1
1310
1311 signal_power = ((spec2[valid] - n0) * fwindow[valid]).mean() # D. Scipión added with correct definition
1312 total_power = (spec2[valid] * fwindow[valid]).mean() # D. Scipión added with correct definition
1313 power = ((spec2[valid] - n0) * fwindow[valid]).sum()
1314 fd = ((spec2[valid]- n0)*freq[valid] * fwindow[valid]).sum() / power
1315 w = numpy.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum() / power)
1316 snr = (spec2.mean()-n0)/n0
1317 if (snr < 1.e-20) :
1277
1278 if (ss.size == 0): ss1 = 1
1279 else: ss1 = max(ss) + 1
1280
1281 if (ss1 > m): ss1 = m
1282
1283 valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1
1284 power = ((spec2[valid] - n0)*fwindow[valid]).sum()
1285 fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power
1286 w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power)
1287 snr = (spec2.mean()-n0)/n0
1288
1289 if (snr < 1.e-20) :
1318 1290 snr = 1.e-20
1319
1320 # vec_power[ind] = power #D. Scipión replaced with the line below
1321 vec_power[ind] = total_power
1291
1292 vec_power[ind] = power
1322 1293 vec_fd[ind] = fd
1323 1294 vec_w[ind] = w
1324 1295 vec_snr[ind] = snr
1325
1326 return numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1327
1296
1297 moments = numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1298 return moments
1299
1328 1300 #------------------ Get SA Parameters --------------------------
1329
1301
1330 1302 def GetSAParameters(self):
1331 1303 #SA en frecuencia
1332 1304 pairslist = self.dataOut.groupList
1333 1305 num_pairs = len(pairslist)
1334
1306
1335 1307 vel = self.dataOut.abscissaList
1336 1308 spectra = self.dataOut.data_pre
1337 1309 cspectra = self.dataIn.data_cspc
1338 delta_v = vel[1] - vel[0]
1339
1310 delta_v = vel[1] - vel[0]
1311
1340 1312 #Calculating the power spectrum
1341 1313 spc_pow = numpy.sum(spectra, 3)*delta_v
1342 1314 #Normalizing Spectra
1343 1315 norm_spectra = spectra/spc_pow
1344 1316 #Calculating the norm_spectra at peak
1345 max_spectra = numpy.max(norm_spectra, 3)
1346
1317 max_spectra = numpy.max(norm_spectra, 3)
1318
1347 1319 #Normalizing Cross Spectra
1348 1320 norm_cspectra = numpy.zeros(cspectra.shape)
1349
1321
1350 1322 for i in range(num_chan):
1351 1323 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
1352
1324
1353 1325 max_cspectra = numpy.max(norm_cspectra,2)
1354 1326 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
1355
1327
1356 1328 for i in range(num_pairs):
1357 1329 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
1358 1330 #------------------- Get Lags ----------------------------------
1359
1331
1360 1332 class SALags(Operation):
1361 1333 '''
1362 1334 Function GetMoments()
@@ -1366,281 +1338,1192 class SALags(Operation):
1366 1338 self.dataOut.abscissaList
1367 1339 self.dataOut.noise
1368 1340 self.dataOut.normFactor
1369 self.dataOut.data_snr
1341 self.dataOut.data_SNR
1370 1342 self.dataOut.groupList
1371 1343 self.dataOut.nChannels
1372
1344
1373 1345 Affected:
1374 1346 self.dataOut.data_param
1375
1347
1376 1348 '''
1377 def run(self, dataOut):
1349 def run(self, dataOut):
1378 1350 data_acf = dataOut.data_pre[0]
1379 1351 data_ccf = dataOut.data_pre[1]
1380 1352 normFactor_acf = dataOut.normFactor[0]
1381 1353 normFactor_ccf = dataOut.normFactor[1]
1382 1354 pairs_acf = dataOut.groupList[0]
1383 1355 pairs_ccf = dataOut.groupList[1]
1384
1356
1385 1357 nHeights = dataOut.nHeights
1386 1358 absc = dataOut.abscissaList
1387 1359 noise = dataOut.noise
1388 SNR = dataOut.data_snr
1360 SNR = dataOut.data_SNR
1389 1361 nChannels = dataOut.nChannels
1390 1362 # pairsList = dataOut.groupList
1391 1363 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
1392 1364
1393 1365 for l in range(len(pairs_acf)):
1394 1366 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
1395
1367
1396 1368 for l in range(len(pairs_ccf)):
1397 1369 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
1398
1370
1399 1371 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
1400 1372 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
1401 1373 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
1402 1374 return
1403
1375
1404 1376 # def __getPairsAutoCorr(self, pairsList, nChannels):
1405 #
1377 #
1406 1378 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1407 #
1408 # for l in range(len(pairsList)):
1379 #
1380 # for l in range(len(pairsList)):
1409 1381 # firstChannel = pairsList[l][0]
1410 1382 # secondChannel = pairsList[l][1]
1411 #
1412 # #Obteniendo pares de Autocorrelacion
1383 #
1384 # #Obteniendo pares de Autocorrelacion
1413 1385 # if firstChannel == secondChannel:
1414 1386 # pairsAutoCorr[firstChannel] = int(l)
1415 #
1387 #
1416 1388 # pairsAutoCorr = pairsAutoCorr.astype(int)
1417 #
1389 #
1418 1390 # pairsCrossCorr = range(len(pairsList))
1419 1391 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1420 #
1392 #
1421 1393 # return pairsAutoCorr, pairsCrossCorr
1422
1394
1423 1395 def __calculateTaus(self, data_acf, data_ccf, lagRange):
1424
1396
1425 1397 lag0 = data_acf.shape[1]/2
1426 1398 #Funcion de Autocorrelacion
1427 1399 mean_acf = stats.nanmean(data_acf, axis = 0)
1428
1400
1429 1401 #Obtencion Indice de TauCross
1430 1402 ind_ccf = data_ccf.argmax(axis = 1)
1431 1403 #Obtencion Indice de TauAuto
1432 1404 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
1433 1405 ccf_lag0 = data_ccf[:,lag0,:]
1434
1406
1435 1407 for i in range(ccf_lag0.shape[0]):
1436 1408 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
1437
1409
1438 1410 #Obtencion de TauCross y TauAuto
1439 1411 tau_ccf = lagRange[ind_ccf]
1440 1412 tau_acf = lagRange[ind_acf]
1441
1413
1442 1414 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
1443
1415
1444 1416 tau_ccf[Nan1,Nan2] = numpy.nan
1445 1417 tau_acf[Nan1,Nan2] = numpy.nan
1446 1418 tau = numpy.vstack((tau_ccf,tau_acf))
1447
1419
1448 1420 return tau
1449
1421
1450 1422 def __calculateLag1Phase(self, data, lagTRange):
1451 1423 data1 = stats.nanmean(data, axis = 0)
1452 1424 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
1453 1425
1454 1426 phase = numpy.angle(data1[lag1,:])
1455
1427
1456 1428 return phase
1457 1429
1430 def fit_func( x, a0, a1, a2): #, a3, a4, a5):
1431 z = (x - a1) / a2
1432 y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
1433 return y
1434
1458 1435 class SpectralFitting(Operation):
1459 1436 '''
1460 1437 Function GetMoments()
1461
1438
1462 1439 Input:
1463 1440 Output:
1464 1441 Variables modified:
1465 1442 '''
1443 def __calculateMoments(self,oldspec, oldfreq, n0, nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
1444
1445 if (nicoh is None): nicoh = 1
1446 if (graph is None): graph = 0
1447 if (smooth is None): smooth = 0
1448 elif (self.smooth < 3): smooth = 0
1449
1450 if (type1 is None): type1 = 0
1451 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
1452 if (snrth is None): snrth = -3
1453 if (dc is None): dc = 0
1454 if (aliasing is None): aliasing = 0
1455 if (oldfd is None): oldfd = 0
1456 if (wwauto is None): wwauto = 0
1457
1458 if (n0 < 1.e-20): n0 = 1.e-20
1459
1460 freq = oldfreq
1461 vec_power = numpy.zeros(oldspec.shape[1])
1462 vec_fd = numpy.zeros(oldspec.shape[1])
1463 vec_w = numpy.zeros(oldspec.shape[1])
1464 vec_snr = numpy.zeros(oldspec.shape[1])
1465
1466 oldspec = numpy.ma.masked_invalid(oldspec)
1467
1468 for ind in range(oldspec.shape[1]):
1469
1470 spec = oldspec[:,ind]
1471 aux = spec*fwindow
1472 max_spec = aux.max()
1473 m = list(aux).index(max_spec)
1474
1475 #Smooth
1476 if (smooth == 0): spec2 = spec
1477 else: spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1478
1479 # Calculo de Momentos
1480 bb = spec2[list(range(m,spec2.size))]
1481 bb = (bb<n0).nonzero()
1482 bb = bb[0]
1483
1484 ss = spec2[list(range(0,m + 1))]
1485 ss = (ss<n0).nonzero()
1486 ss = ss[0]
1487
1488 if (bb.size == 0):
1489 bb0 = spec.size - 1 - m
1490 else:
1491 bb0 = bb[0] - 1
1492 if (bb0 < 0):
1493 bb0 = 0
1494
1495 if (ss.size == 0): ss1 = 1
1496 else: ss1 = max(ss) + 1
1497
1498 if (ss1 > m): ss1 = m
1499
1500 valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1
1501 power = ((spec2[valid] - n0)*fwindow[valid]).sum()
1502 fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power
1503 w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power)
1504 snr = (spec2.mean()-n0)/n0
1505
1506 if (snr < 1.e-20) :
1507 snr = 1.e-20
1508
1509 vec_power[ind] = power
1510 vec_fd[ind] = fd
1511 vec_w[ind] = w
1512 vec_snr[ind] = snr
1513
1514 moments = numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1515 return moments
1516
1517 #def __DiffCoherent(self,snrth, spectra, cspectra, nProf, heights,nChan, nHei, nPairs, channels, noise, crosspairs):
1518 def __DiffCoherent(self, spectra, cspectra, dataOut, noise, snrth, coh_th, hei_th):
1519
1520 import matplotlib.pyplot as plt
1521 nProf = dataOut.nProfiles
1522 heights = dataOut.heightList
1523 nHei = len(heights)
1524 channels = dataOut.channelList
1525 nChan = len(channels)
1526 crosspairs = dataOut.groupList
1527 nPairs = len(crosspairs)
1528 #Separar espectros incoherentes de coherentes snr > 20 dB'
1529 snr_th = 10**(snrth/10.0)
1530 my_incoh_spectra = numpy.zeros([nChan, nProf,nHei], dtype='float')
1531 my_incoh_cspectra = numpy.zeros([nPairs,nProf, nHei], dtype='complex')
1532 my_incoh_aver = numpy.zeros([nChan, nHei])
1533 my_coh_aver = numpy.zeros([nChan, nHei])
1534
1535 coh_spectra = numpy.zeros([nChan, nProf, nHei], dtype='float')
1536 coh_cspectra = numpy.zeros([nPairs, nProf, nHei], dtype='complex')
1537 coh_aver = numpy.zeros([nChan, nHei])
1538
1539 incoh_spectra = numpy.zeros([nChan, nProf, nHei], dtype='float')
1540 incoh_cspectra = numpy.zeros([nPairs, nProf, nHei], dtype='complex')
1541 incoh_aver = numpy.zeros([nChan, nHei])
1542 power = numpy.sum(spectra, axis=1)
1543
1544 if coh_th == None : coh_th = numpy.array([0.75,0.65,0.15]) # 0.65
1545 if hei_th == None : hei_th = numpy.array([60,300,650])
1546 for ic in range(2):
1547 pair = crosspairs[ic]
1548 #si el SNR es mayor que el SNR threshold los datos se toman coherentes
1549 s_n0 = power[pair[0],:]/noise[pair[0]]
1550 s_n1 = power[pair[1],:]/noise[pair[1]]
1551
1552 valid1 =(s_n0>=snr_th).nonzero()
1553 valid2 = (s_n1>=snr_th).nonzero()
1554 #valid = valid2 + valid1 #numpy.concatenate((valid1,valid2), axis=None)
1555 valid1 = numpy.array(valid1[0])
1556 valid2 = numpy.array(valid2[0])
1557 valid = valid1
1558 for iv in range(len(valid2)):
1559 #for ivv in range(len(valid1)) :
1560 indv = numpy.array((valid1 == valid2[iv]).nonzero())
1561 if len(indv[0]) == 0 :
1562 valid = numpy.concatenate((valid,valid2[iv]), axis=None)
1563 if len(valid)>0:
1564 my_coh_aver[pair[0],valid]=1
1565 my_coh_aver[pair[1],valid]=1
1566 # si la coherencia es mayor a la coherencia threshold los datos se toman
1567 #print my_coh_aver[0,:]
1568 coh = numpy.squeeze(numpy.nansum(cspectra[ic,:,:], axis=0)/numpy.sqrt(numpy.nansum(spectra[pair[0],:,:], axis=0)*numpy.nansum(spectra[pair[1],:,:], axis=0)))
1569 #print('coh',numpy.absolute(coh))
1570 for ih in range(len(hei_th)):
1571 hvalid = (heights>hei_th[ih]).nonzero()
1572 hvalid = hvalid[0]
1573 if len(hvalid)>0:
1574 valid = (numpy.absolute(coh[hvalid])>coh_th[ih]).nonzero()
1575 valid = valid[0]
1576 #print('hvalid:',hvalid)
1577 #print('valid', valid)
1578 if len(valid)>0:
1579 my_coh_aver[pair[0],hvalid[valid]] =1
1580 my_coh_aver[pair[1],hvalid[valid]] =1
1581
1582 coh_echoes = (my_coh_aver[pair[0],:] == 1).nonzero()
1583 incoh_echoes = (my_coh_aver[pair[0],:] != 1).nonzero()
1584 incoh_echoes = incoh_echoes[0]
1585 if len(incoh_echoes) > 0:
1586 my_incoh_spectra[pair[0],:,incoh_echoes] = spectra[pair[0],:,incoh_echoes]
1587 my_incoh_spectra[pair[1],:,incoh_echoes] = spectra[pair[1],:,incoh_echoes]
1588 my_incoh_cspectra[ic,:,incoh_echoes] = cspectra[ic,:,incoh_echoes]
1589 my_incoh_aver[pair[0],incoh_echoes] = 1
1590 my_incoh_aver[pair[1],incoh_echoes] = 1
1466 1591
1467 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1592
1593 for ic in range(2):
1594 pair = crosspairs[ic]
1595
1596 valid1 =(my_coh_aver[pair[0],:]==1 ).nonzero()
1597 valid2 = (my_coh_aver[pair[1],:]==1).nonzero()
1598 valid1 = numpy.array(valid1[0])
1599 valid2 = numpy.array(valid2[0])
1600 valid = valid1
1601 #print valid1 , valid2
1602 for iv in range(len(valid2)):
1603 #for ivv in range(len(valid1)) :
1604 indv = numpy.array((valid1 == valid2[iv]).nonzero())
1605 if len(indv[0]) == 0 :
1606 valid = numpy.concatenate((valid,valid2[iv]), axis=None)
1607 #print valid
1608 #valid = numpy.concatenate((valid1,valid2), axis=None)
1609 valid1 =(my_coh_aver[pair[0],:] !=1 ).nonzero()
1610 valid2 = (my_coh_aver[pair[1],:] !=1).nonzero()
1611 valid1 = numpy.array(valid1[0])
1612 valid2 = numpy.array(valid2[0])
1613 incoh_echoes = valid1
1614 #print valid1, valid2
1615 #incoh_echoes= numpy.concatenate((valid1,valid2), axis=None)
1616 for iv in range(len(valid2)):
1617 #for ivv in range(len(valid1)) :
1618 indv = numpy.array((valid1 == valid2[iv]).nonzero())
1619 if len(indv[0]) == 0 :
1620 incoh_echoes = numpy.concatenate(( incoh_echoes,valid2[iv]), axis=None)
1621 #print incoh_echoes
1622 if len(valid)>0:
1623 #print pair
1624 coh_spectra[pair[0],:,valid] = spectra[pair[0],:,valid]
1625 coh_spectra[pair[1],:,valid] = spectra[pair[1],:,valid]
1626 coh_cspectra[ic,:,valid] = cspectra[ic,:,valid]
1627 coh_aver[pair[0],valid]=1
1628 coh_aver[pair[1],valid]=1
1629 if len(incoh_echoes)>0:
1630 incoh_spectra[pair[0],:,incoh_echoes] = spectra[pair[0],:,incoh_echoes]
1631 incoh_spectra[pair[1],:,incoh_echoes] = spectra[pair[1],:,incoh_echoes]
1632 incoh_cspectra[ic,:,incoh_echoes] = cspectra[ic,:,incoh_echoes]
1633 incoh_aver[pair[0],incoh_echoes]=1
1634 incoh_aver[pair[1],incoh_echoes]=1
1635 #plt.imshow(spectra[0,:,:],vmin=20000000)
1636 #plt.show()
1637 #my_incoh_aver = my_incoh_aver+1
1638
1639 #spec = my_incoh_spectra.copy()
1640 #cspec = my_incoh_cspectra.copy()
1641 #print('######################', spec)
1642 #print(self.numpy)
1643 #return spec, cspec,coh_aver
1644 return my_incoh_spectra ,my_incoh_cspectra,my_incoh_aver,my_coh_aver, incoh_spectra, coh_spectra, incoh_cspectra, coh_cspectra, incoh_aver, coh_aver
1645
1646 def __CleanCoherent(self,snrth, spectra, cspectra, coh_aver,dataOut, noise,clean_coh_echoes,index):
1647
1648 import matplotlib.pyplot as plt
1649 nProf = dataOut.nProfiles
1650 heights = dataOut.heightList
1651 nHei = len(heights)
1652 channels = dataOut.channelList
1653 nChan = len(channels)
1654 crosspairs = dataOut.groupList
1655 nPairs = len(crosspairs)
1656
1657 #data = dataOut.data_pre[0]
1658 absc = dataOut.abscissaList[:-1]
1659 #noise = dataOut.noise
1660 #nChannel = data.shape[0]
1661 data_param = numpy.zeros((nChan, 4, spectra.shape[2]))
1662
1663
1664 #plt.plot(absc)
1665 #plt.show()
1666 clean_coh_spectra = spectra.copy()
1667 clean_coh_cspectra = cspectra.copy()
1668 clean_coh_aver = coh_aver.copy()
1669
1670 spwd_th=[10,6] #spwd_th[0] --> For satellites ; spwd_th[1] --> For special events like SUN.
1671 coh_th = 0.75
1672
1673 rtime0 = [6,18] # periodo sin ESF
1674 rtime1 = [10.5,13.5] # periodo con alta coherencia y alto ancho espectral (esperado): SOL.
1675
1676 time = index*5./60
1677 if clean_coh_echoes == 1 :
1678 for ind in range(nChan):
1679 data_param[ind,:,:] = self.__calculateMoments( spectra[ind,:,:] , absc , noise[ind] )
1680 #print data_param[:,3]
1681 spwd = data_param[:,3]
1682 #print spwd.shape
1683 # SPECB_JULIA,header=anal_header,jspectra=spectra,vel=velocities,hei=heights, num_aver=1, mode_fit=0,smoothing=smoothing,jvelr=velr,jspwd=spwd,jsnr=snr,jnoise=noise,jstdvnoise=stdvnoise
1684 #spwd1=[ 1.65607, 1.43416, 0.500373, 0.208361, 0.000000, 26.7767, 22.5936, 26.7530, 20.6962, 29.1098, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 28.0300, 27.0511, 27.8810, 26.3126, 27.8445, 24.6181, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000]
1685 #spwd=numpy.array([spwd1,spwd1,spwd1,spwd1])
1686 #print spwd.shape, heights.shape,coh_aver.shape
1687 # para obtener spwd
1688 for ic in range(nPairs):
1689 pair = crosspairs[ic]
1690 coh = numpy.squeeze(numpy.sum(cspectra[ic,:,:], axis=1)/numpy.sqrt(numpy.sum(spectra[pair[0],:,:], axis=1)*numpy.sum(spectra[pair[1],:,:], axis=1)))
1691 for ih in range(nHei) :
1692 # Considering heights higher than 200km in order to avoid removing phenomena like EEJ.
1693 if heights[ih] >= 200 and coh_aver[pair[0],ih] == 1 and coh_aver[pair[1],ih] == 1 :
1694 # Checking coherence
1695 if (numpy.abs(coh[ih]) <= coh_th) or (time >= rtime0[0] and time <= rtime0[1]) :
1696 # Checking spectral widths
1697 if (spwd[pair[0],ih] > spwd_th[0]) or (spwd[pair[1],ih] > spwd_th[0]) :
1698 # satelite
1699 clean_coh_spectra[pair,ih,:] = 0.0
1700 clean_coh_cspectra[ic,ih,:] = 0.0
1701 clean_coh_aver[pair,ih] = 0
1702 else :
1703 if ((spwd[pair[0],ih] < spwd_th[1]) or (spwd[pair[1],ih] < spwd_th[1])) :
1704 # Especial event like sun.
1705 clean_coh_spectra[pair,ih,:] = 0.0
1706 clean_coh_cspectra[ic,ih,:] = 0.0
1707 clean_coh_aver[pair,ih] = 0
1708
1709 return clean_coh_spectra, clean_coh_cspectra, clean_coh_aver
1710
1711 isConfig = False
1712 __dataReady = False
1713 bloques = None
1714 bloque0 = None
1468 1715
1716 def __init__(self):
1717 Operation.__init__(self)
1718 self.i=0
1719 self.isConfig = False
1720
1469 1721
1722 def setup(self,nChan,nProf,nHei,nBlocks):
1723 self.__dataReady = False
1724 self.bloques = numpy.zeros([2, nProf, nHei,nBlocks], dtype= complex)
1725 self.bloque0 = numpy.zeros([nChan, nProf, nHei, nBlocks])
1726
1727 #def CleanRayleigh(self,dataOut,spectra,cspectra,out_spectra,out_cspectra,sat_spectra,sat_cspectra,crosspairs,heights, channels, nProf,nHei,nChan,nPairs,nIncohInt,nBlocks):
1728 def CleanRayleigh(self,dataOut,spectra,cspectra,save_drifts):
1729 #import matplotlib.pyplot as plt
1730 #for k in range(149):
1731
1732 # self.bloque0[:,:,:,k] = spectra[:,:,0:nHei]
1733 # self.bloques[:,:,:,k] = cspectra[:,:,0:nHei]
1734 #if self.i==nBlocks:
1735 # self.i==0
1736 rfunc = cspectra.copy() #self.bloques
1737 n_funct = len(rfunc[0,:,0,0])
1738 val_spc = spectra*0.0 #self.bloque0*0.0
1739 val_cspc = cspectra*0.0 #self.bloques*0.0
1740 in_sat_spectra = spectra.copy() #self.bloque0
1741 in_sat_cspectra = cspectra.copy() #self.bloques
1742
1743 #print( rfunc.shape)
1744 min_hei = 200
1745 nProf = dataOut.nProfiles
1746 heights = dataOut.heightList
1747 nHei = len(heights)
1748 channels = dataOut.channelList
1749 nChan = len(channels)
1750 crosspairs = dataOut.groupList
1751 nPairs = len(crosspairs)
1752 hval=(heights >= min_hei).nonzero()
1753 ih=hval[0]
1754 #print numpy.absolute(rfunc[:,0,0,14])
1755 for ih in range(hval[0][0],nHei):
1756 for ifreq in range(nProf):
1757 for ii in range(n_funct):
1758
1759 func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih]))
1760 #print numpy.amin(func2clean)
1761 val = (numpy.isfinite(func2clean)==True).nonzero()
1762 if len(val)>0:
1763 min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
1764 if min_val <= -40 : min_val = -40
1765 max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
1766 if max_val >= 200 : max_val = 200
1767 #print min_val, max_val
1768 step = 1
1769 #Getting bins and the histogram
1770 x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
1771 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
1772 mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
1773 sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
1774 parg = [numpy.amax(y_dist),mean,sigma]
1775 try :
1776 gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
1777 mode = gauss_fit[1]
1778 stdv = gauss_fit[2]
1779 except:
1780 mode = mean
1781 stdv = sigma
1782 # if ih == 14 and ii == 0 and ifreq ==0 :
1783 # print x_dist.shape, y_dist.shape
1784 # print x_dist, y_dist
1785 # print min_val, max_val, binstep
1786 # print func2clean
1787 # print mean,sigma
1788 # mean1,std = norm.fit(y_dist)
1789 # print mean1, std, gauss_fit
1790 # print fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
1791 # 7.84616 53.9307 3.61863
1792 #stdv = 3.61863 # 2.99089
1793 #mode = 53.9307 #7.79008
1794
1795 #Removing echoes greater than mode + 3*stdv
1796 factor_stdv = 2.5
1797 noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
1798
1799 if len(noval[0]) > 0:
1800 novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
1801 cross_pairs = crosspairs[ii]
1802 #Getting coherent echoes which are removed.
1803 if len(novall[0]) > 0:
1804 #val_spc[(0,1),novall[a],ih] = 1
1805 #val_spc[,(2,3),novall[a],ih] = 1
1806 val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
1807 val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
1808 val_cspc[novall[0],ii,ifreq,ih] = 1
1809 #print("OUT NOVALL 1")
1810 #Removing coherent from ISR data
1811 # if ih == 17 and ii == 0 and ifreq ==0 :
1812 # print spectra[:,cross_pairs[0],ifreq,ih]
1813 spectra[noval,cross_pairs[0],ifreq,ih] = numpy.nan
1814 spectra[noval,cross_pairs[1],ifreq,ih] = numpy.nan
1815 cspectra[noval,ii,ifreq,ih] = numpy.nan
1816 # if ih == 17 and ii == 0 and ifreq ==0 :
1817 # print spectra[:,cross_pairs[0],ifreq,ih]
1818 # print noval, len(noval[0])
1819 # print novall, len(novall[0])
1820 # print factor_stdv*stdv
1821 # print func2clean-mode
1822 # print val_spc[:,cross_pairs[0],ifreq,ih]
1823 # print spectra[:,cross_pairs[0],ifreq,ih]
1824 #no sale es para savedrifts >2
1825 ''' channels = channels
1826 cross_pairs = cross_pairs
1827 #print("OUT NOVALL 2")
1828
1829 vcross0 = (cross_pairs[0] == channels[ii]).nonzero()
1830 vcross1 = (cross_pairs[1] == channels[ii]).nonzero()
1831 vcross = numpy.concatenate((vcross0,vcross1),axis=None)
1832 #print('vcros =', vcross)
1833
1834 #Getting coherent echoes which are removed.
1835 if len(novall) > 0:
1836 #val_spc[novall,ii,ifreq,ih] = 1
1837 val_spc[ii,ifreq,ih,novall] = 1
1838 if len(vcross) > 0:
1839 val_cspc[vcross,ifreq,ih,novall] = 1
1840
1841 #Removing coherent from ISR data.
1842 self.bloque0[ii,ifreq,ih,noval] = numpy.nan
1843 if len(vcross) > 0:
1844 self.bloques[vcross,ifreq,ih,noval] = numpy.nan
1845 '''
1846 #Getting average of the spectra and cross-spectra from incoherent echoes.
1847 out_spectra = numpy.zeros([nChan,nProf,nHei], dtype=float) #+numpy.nan
1848 out_cspectra = numpy.zeros([nPairs,nProf,nHei], dtype=complex) #+numpy.nan
1849 for ih in range(nHei):
1850 for ifreq in range(nProf):
1851 for ich in range(nChan):
1852 tmp = spectra[:,ich,ifreq,ih]
1853 valid = (numpy.isfinite(tmp[:])==True).nonzero()
1854 # if ich == 0 and ifreq == 0 and ih == 17 :
1855 # print tmp
1856 # print valid
1857 # print len(valid[0])
1858 #print('TMP',tmp)
1859 if len(valid[0]) >0 :
1860 out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
1861 #for icr in range(nPairs):
1862 for icr in range(nPairs):
1863 tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
1864 valid = (numpy.isfinite(tmp)==True).nonzero()
1865 if len(valid[0]) > 0:
1866 out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
1867 # print('##########################################################')
1868 #Removing fake coherent echoes (at least 4 points around the point)
1869
1870 val_spectra = numpy.sum(val_spc,0)
1871 val_cspectra = numpy.sum(val_cspc,0)
1872
1873 val_spectra = self.REM_ISOLATED_POINTS(val_spectra,4)
1874 val_cspectra = self.REM_ISOLATED_POINTS(val_cspectra,4)
1875
1876 for i in range(nChan):
1877 for j in range(nProf):
1878 for k in range(nHei):
1879 if numpy.isfinite(val_spectra[i,j,k]) and val_spectra[i,j,k] < 1 :
1880 val_spc[:,i,j,k] = 0.0
1881 for i in range(nPairs):
1882 for j in range(nProf):
1883 for k in range(nHei):
1884 if numpy.isfinite(val_cspectra[i,j,k]) and val_cspectra[i,j,k] < 1 :
1885 val_cspc[:,i,j,k] = 0.0
1886 # val_spc = numpy.reshape(val_spc, (len(spectra[:,0,0,0]),nProf*nHei*nChan))
1887 # if numpy.isfinite(val_spectra)==str(True):
1888 # noval = (val_spectra<1).nonzero()
1889 # if len(noval) > 0:
1890 # val_spc[:,noval] = 0.0
1891 # val_spc = numpy.reshape(val_spc, (149,nChan,nProf,nHei))
1892
1893 #val_cspc = numpy.reshape(val_spc, (149,nChan*nHei*nProf))
1894 #if numpy.isfinite(val_cspectra)==str(True):
1895 # noval = (val_cspectra<1).nonzero()
1896 # if len(noval) > 0:
1897 # val_cspc[:,noval] = 0.0
1898 # val_cspc = numpy.reshape(val_cspc, (149,nChan,nProf,nHei))
1899
1900 tmp_sat_spectra = spectra.copy()
1901 tmp_sat_spectra = tmp_sat_spectra*numpy.nan
1902 tmp_sat_cspectra = cspectra.copy()
1903 tmp_sat_cspectra = tmp_sat_cspectra*numpy.nan
1904
1905 # fig = plt.figure(figsize=(6,5))
1906 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
1907 # ax = fig.add_axes([left, bottom, width, height])
1908 # cp = ax.contour(10*numpy.log10(numpy.absolute(spectra[0,0,:,:])))
1909 # ax.clabel(cp, inline=True,fontsize=10)
1910 # plt.show()
1911
1912 val = (val_spc > 0).nonzero()
1913 if len(val[0]) > 0:
1914 tmp_sat_spectra[val] = in_sat_spectra[val]
1915
1916 val = (val_cspc > 0).nonzero()
1917 if len(val[0]) > 0:
1918 tmp_sat_cspectra[val] = in_sat_cspectra[val]
1919
1920 #Getting average of the spectra and cross-spectra from incoherent echoes.
1921 sat_spectra = numpy.zeros((nChan,nProf,nHei), dtype=float)
1922 sat_cspectra = numpy.zeros((nPairs,nProf,nHei), dtype=complex)
1923 for ih in range(nHei):
1924 for ifreq in range(nProf):
1925 for ich in range(nChan):
1926 tmp = numpy.squeeze(tmp_sat_spectra[:,ich,ifreq,ih])
1927 valid = (numpy.isfinite(tmp)).nonzero()
1928 if len(valid[0]) > 0:
1929 sat_spectra[ich,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
1930
1931 for icr in range(nPairs):
1932 tmp = numpy.squeeze(tmp_sat_cspectra[:,icr,ifreq,ih])
1933 valid = (numpy.isfinite(tmp)).nonzero()
1934 if len(valid[0]) > 0:
1935 sat_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
1936 #self.__dataReady= True
1937 #sat_spectra, sat_cspectra= sat_spectra, sat_cspectra
1938 #if not self.__dataReady:
1939 #return None, None
1940 return out_spectra, out_cspectra,sat_spectra,sat_cspectra
1941 def REM_ISOLATED_POINTS(self,array,rth):
1942 # import matplotlib.pyplot as plt
1943 if rth == None : rth = 4
1944
1945 num_prof = len(array[0,:,0])
1946 num_hei = len(array[0,0,:])
1947 n2d = len(array[:,0,0])
1948
1949 for ii in range(n2d) :
1950 #print ii,n2d
1951 tmp = array[ii,:,:]
1952 #print tmp.shape, array[ii,101,:],array[ii,102,:]
1953
1954 # fig = plt.figure(figsize=(6,5))
1955 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
1956 # ax = fig.add_axes([left, bottom, width, height])
1957 # x = range(num_prof)
1958 # y = range(num_hei)
1959 # cp = ax.contour(y,x,tmp)
1960 # ax.clabel(cp, inline=True,fontsize=10)
1961 # plt.show()
1962
1963 #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
1964 tmp = numpy.reshape(tmp,num_prof*num_hei)
1965 indxs1 = (numpy.isfinite(tmp)==True).nonzero()
1966 indxs2 = (tmp > 0).nonzero()
1967
1968 indxs1 = (indxs1[0])
1969 indxs2 = indxs2[0]
1970 #indxs1 = numpy.array(indxs1[0])
1971 #indxs2 = numpy.array(indxs2[0])
1972 indxs = None
1973 #print indxs1 , indxs2
1974 for iv in range(len(indxs2)):
1975 indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
1976 #print len(indxs2), indv
1977 if len(indv[0]) > 0 :
1978 indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
1979 # print indxs
1980 indxs = indxs[1:]
1981 #print indxs, len(indxs)
1982 if len(indxs) < 4 :
1983 array[ii,:,:] = 0.
1984 return
1985
1986 xpos = numpy.mod(indxs ,num_hei)
1987 ypos = (indxs / num_hei)
1988 sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
1989 #print sx
1990 xpos = xpos[sx]
1991 ypos = ypos[sx]
1992
1993 # *********************************** Cleaning isolated points **********************************
1994 ic = 0
1995 while True :
1996 r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
1997 #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
1998 #plt.plot(r)
1999 #plt.show()
2000 no_coh1 = (numpy.isfinite(r)==True).nonzero()
2001 no_coh2 = (r <= rth).nonzero()
2002 #print r, no_coh1, no_coh2
2003 no_coh1 = numpy.array(no_coh1[0])
2004 no_coh2 = numpy.array(no_coh2[0])
2005 no_coh = None
2006 #print valid1 , valid2
2007 for iv in range(len(no_coh2)):
2008 indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
2009 if len(indv[0]) > 0 :
2010 no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
2011 no_coh = no_coh[1:]
2012 #print len(no_coh), no_coh
2013 if len(no_coh) < 4 :
2014 #print xpos[ic], ypos[ic], ic
2015 # plt.plot(r)
2016 # plt.show()
2017 xpos[ic] = numpy.nan
2018 ypos[ic] = numpy.nan
2019
2020 ic = ic + 1
2021 if (ic == len(indxs)) :
2022 break
2023 #print( xpos, ypos)
2024
2025 indxs = (numpy.isfinite(list(xpos))==True).nonzero()
2026 #print indxs[0]
2027 if len(indxs[0]) < 4 :
2028 array[ii,:,:] = 0.
2029 return
2030
2031 xpos = xpos[indxs[0]]
2032 ypos = ypos[indxs[0]]
2033 for i in range(0,len(ypos)):
2034 ypos[i]=int(ypos[i])
2035 junk = tmp
2036 tmp = junk*0.0
2037
2038 tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
2039 array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
2040
2041 #print array.shape
2042 #tmp = numpy.reshape(tmp,(num_prof,num_hei))
2043 #print tmp.shape
2044
2045 # fig = plt.figure(figsize=(6,5))
2046 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
2047 # ax = fig.add_axes([left, bottom, width, height])
2048 # x = range(num_prof)
2049 # y = range(num_hei)
2050 # cp = ax.contour(y,x,array[ii,:,:])
2051 # ax.clabel(cp, inline=True,fontsize=10)
2052 # plt.show()
2053 return array
2054 def moments(self,doppler,yarray,npoints):
2055 ytemp = yarray
2056 #val = WHERE(ytemp GT 0,cval)
2057 #if cval == 0 : val = range(npoints-1)
2058 val = (ytemp > 0).nonzero()
2059 val = val[0]
2060 #print('hvalid:',hvalid)
2061 #print('valid', valid)
2062 if len(val) == 0 : val = range(npoints-1)
2063
2064 ynew = 0.5*(ytemp[val[0]]+ytemp[val[len(val)-1]])
2065 ytemp[len(ytemp):] = [ynew]
2066
2067 index = 0
2068 index = numpy.argmax(ytemp)
2069 ytemp = numpy.roll(ytemp,int(npoints/2)-1-index)
2070 ytemp = ytemp[0:npoints-1]
2071
2072 fmom = numpy.sum(doppler*ytemp)/numpy.sum(ytemp)+(index-(npoints/2-1))*numpy.abs(doppler[1]-doppler[0])
2073 smom = numpy.sum(doppler*doppler*ytemp)/numpy.sum(ytemp)
2074 return [fmom,numpy.sqrt(smom)]
2075 # **********************************************************************************************
2076 index = 0
2077 fint = 0
2078 buffer = 0
2079 buffer2 = 0
2080 buffer3 = 0
2081 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
2082 #print (dataOut.utctime)
2083 import matplotlib.pyplot as plt
2084 #nGroups = groupArray.shape[0]
2085 nChannels = dataOut.nChannels
2086 nHeights= dataOut.heightList.size
2087 nProf = dataOut.nProfiles
2088
2089 tini=time.localtime(dataOut.utctime)
2090 if (tini.tm_min % 5) == 0 and (tini.tm_sec < 5 and self.fint==0):
2091 # print tini.tm_min
2092 self.index = 0
2093 jspc = self.buffer
2094 jcspc = self.buffer2
2095 jnoise = self.buffer3
2096
2097 self.buffer = dataOut.data_spc
2098 self.buffer2 = dataOut.data_cspc
2099 self.buffer3 = dataOut.noise
2100 self.fint = 1
2101 #print self.buffer[0,:,0]
2102
2103 if numpy.any(jspc) :
2104 #print (len(jspc), jspc.shape)
2105 #print jspc[len(jspc)-4,:,0]
2106 jspc= numpy.reshape(jspc,(int(len(jspc)/4),nChannels,nProf,nHeights))
2107 jcspc= numpy.reshape(jcspc,(int(len(jcspc)/2),2,nProf,nHeights))
2108 jnoise= numpy.reshape(jnoise,(int(len(jnoise)/4),nChannels))
2109 #print jspc[len(jspc)-1,0,:,0]
2110 else:
2111 dataOut.flagNoData = True
2112 return dataOut
2113
2114 else :
2115 #print tini.tm_min
2116 #self.fint = 0
2117 if (tini.tm_min % 5) == 0 : self.fint = 1
2118 else : self.fint = 0
2119 self.index += 1
2120 #print( len(self.buffer))
2121
2122 if numpy.any(self.buffer):
2123 self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
2124 self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
2125 self.buffer3 = numpy.concatenate((self.buffer3,dataOut.noise), axis=0)
2126 else:
2127 self.buffer = dataOut.data_spc
2128 self.buffer2 = dataOut.data_cspc
2129 self.buffer3 = dataOut.noise
2130 #print self.index, self.fint
2131 #print self.buffer2.shape
2132 dataOut.flagNoData = True
2133 return dataOut
2134 # if self.index == 0 and self.fint == 1 :
2135 # if jspc != None:
2136 # print len(jspc), jspc.shape
2137 # jspc= numpy.reshape(jspc,(4,128,63,len(jspc)/4))
2138 # print jspc.shape
2139 # dataOut.flagNoData = True
2140 # return dataOut
1470 2141 if path != None:
1471 2142 sys.path.append(path)
1472 self.dataOut.library = importlib.import_module(file)
1473
2143 self.library = importlib.import_module(file)
2144
1474 2145 #To be inserted as a parameter
1475 2146 groupArray = numpy.array(groupList)
1476 # groupArray = numpy.array([[0,1],[2,3]])
1477 self.dataOut.groupList = groupArray
1478
2147 #groupArray = numpy.array([[0,1],[2,3]])
2148 dataOut.groupList = groupArray
2149
1479 2150 nGroups = groupArray.shape[0]
1480 nChannels = self.dataIn.nChannels
1481 nHeights=self.dataIn.heightList.size
1482
2151 nChannels = dataOut.nChannels
2152 nHeights= dataOut.heightList.size
2153 # print self.index
1483 2154 #Parameters Array
1484 self.dataOut.data_param = None
1485
2155 dataOut.data_param = None
2156 dataOut.data_paramC = None
2157
1486 2158 #Set constants
1487 constants = self.dataOut.library.setConstants(self.dataIn)
1488 self.dataOut.constants = constants
1489 M = self.dataIn.normFactor
1490 N = self.dataIn.nFFTPoints
1491 ippSeconds = self.dataIn.ippSeconds
1492 K = self.dataIn.nIncohInt
1493 pairsArray = numpy.array(self.dataIn.pairsList)
1494
2159 constants = self.library.setConstants(dataOut)
2160 dataOut.constants = constants
2161 M = dataOut.normFactor
2162 N = dataOut.nFFTPoints
2163 ippSeconds = dataOut.ippSeconds
2164 K = dataOut.nIncohInt
2165 pairsArray = numpy.array(dataOut.pairsList)
2166
2167 snrth= 20
2168 spectra = dataOut.data_spc
2169 cspectra = dataOut.data_cspc
2170 nProf = dataOut.nProfiles
2171 heights = dataOut.heightList
2172 nHei = len(heights)
2173
2174 channels = dataOut.channelList
2175 nChan = len(channels)
2176 nIncohInt = dataOut.nIncohInt
2177 crosspairs = dataOut.groupList
2178 noise = dataOut.noise
2179 #print( nProf,heights)
2180 #print( jspc.shape, jspc.shape[0])
2181 #print noise
2182 #print jnoise[len(jnoise)-1,:], numpy.nansum(jnoise,axis=0)/len(jnoise)
2183 jnoise = jnoise/N
2184 noise = numpy.nansum(jnoise,axis=0)#/len(jnoise)
2185 #print( noise)
2186 power = numpy.sum(spectra, axis=1)
2187 #print power[0,:]
2188 #print("CROSSPAIRS",crosspairs)
2189 nPairs = len(crosspairs)
2190 #print(numpy.shape(dataOut.data_spc))
2191 absc = dataOut.abscissaList[:-1]
2192 #print absc.shape
2193 #nBlocks=149
2194 #print('spectra', spectra.shape)
2195 #print('noise print', crosspairs)
2196 #print('spectra', spectra.shape)
2197 #print('cspectra', cspectra.shape)
2198 #print numpy.array(dataOut.data_pre[1]).shape
2199 #spec, cspec = self.__DiffCoherent(snrth, spectra, cspectra, nProf, heights,nChan, nHei, nPairs, channels, noise*nProf, crosspairs)
2200
2201 if not self.isConfig:
2202 #self.setup(nChan,nProf,nHei=35,nBlocks=nBlocks)
2203 self.isConfig = True
2204
2205 #print ("configure todo")
2206 # dataOut.flagNoData = True
2207 index = tini.tm_hour*12+tini.tm_min/5
2208 #print index
2209 jspc = jspc/N/N
2210 jcspc = jcspc/N/N
2211 #dataOut.data_spc,dataOut.data_cspc = self.CleanRayleigh(dataOut,jspc,jcspc,crosspairs,heights,channels,nProf,nHei,nChan,nPairs,nIncohInt,nBlocks=nBlocks)
2212 tmp_spectra,tmp_cspectra,sat_spectra,sat_cspectra = self.CleanRayleigh(dataOut,jspc,jcspc,2)
2213 jspectra = tmp_spectra*len(jspc[:,0,0,0])
2214 jcspectra = tmp_cspectra*len(jspc[:,0,0,0])
2215 #incoh_spectra, incoh_cspectra,coh_aver = self.__DiffCoherent(snrth, dataOut.data_spc, dataOut.data_cspc, nProf, heights,nChan, nHei, nPairs, channels, noise*nProf, crosspairs)
2216 my_incoh_spectra ,my_incoh_cspectra,my_incoh_aver,my_coh_aver, incoh_spectra, coh_spectra, incoh_cspectra, coh_cspectra, incoh_aver, coh_aver = self.__DiffCoherent(jspectra, jcspectra, dataOut, noise, snrth, None, None)
2217 clean_coh_spectra, clean_coh_cspectra, clean_coh_aver = self.__CleanCoherent(snrth, coh_spectra, coh_cspectra, coh_aver, dataOut, noise,1,index)
2218 dataOut.data_spc = incoh_spectra
2219 dataOut.data_cspc = incoh_cspectra
2220 #dataOut.data_spc = tmp_spectra
2221 #dataOut.data_cspc = tmp_cspectra
2222
2223 clean_num_aver = incoh_aver*len(jspc[:,0,0,0])
2224 coh_num_aver = clean_coh_aver*len(jspc[:,0,0,0])
2225 #plt.plot( tmp_spectra[0,:,17])
2226 #plt.show()
2227 # plt.plot( incoh_spectra[0,64,:])
2228 # plt.show()
2229
2230 # plt.imshow(dataOut.data_spc[0,:,:],vmin=20000000)
2231 # plt.show()
1495 2232 #List of possible combinations
1496 2233 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
1497 2234 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
1498
2235 #print("listComb",listComb)
2236
1499 2237 if getSNR:
1500 2238 listChannels = groupArray.reshape((groupArray.size))
1501 2239 listChannels.sort()
1502 noise = self.dataIn.getNoise()
1503 self.dataOut.data_snr = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1504
1505 for i in range(nGroups):
2240 #noise = dataOut.getNoise()
2241 #print noise
2242 #print(numpy.shape(noise))
2243 #dataOut.data_spc, dataOut.data_cspc = self.__DiffCoherent(snrth, spectra, cspectra, nProf, heights, nHei, nChan, channels, noise, nPairs, crosspairs)
2244 dataOut.data_SNR = self.__getSNR(dataOut.data_spc[listChannels,:,:], noise[listChannels])
2245 #dataOut.data_SNR = self.__getSNR(jspectra[listChannels,:,:], noise[listChannels])
2246
2247 if dataOut.data_paramC is None:
2248 dataOut.data_paramC = numpy.zeros((nGroups*4, nHeights,2))*numpy.nan
2249 for i in range(nGroups):
1506 2250 coord = groupArray[i,:]
1507
1508 2251 #Input data array
1509 data = self.dataIn.data_spc[coord,:,:]/(M*N)
2252 data = dataOut.data_spc[coord,:,:]/(M*N)
1510 2253 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
1511
2254
1512 2255 #Cross Spectra data array for Covariance Matrixes
1513 2256 ind = 0
1514 2257 for pairs in listComb:
1515 2258 pairsSel = numpy.array([coord[x],coord[y]])
1516 2259 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
1517 2260 ind += 1
1518 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
1519 dataCross = dataCross**2/K
1520
2261 dataCross = dataOut.data_cspc[indCross,:,:]/(M*N)
2262 dataCross = dataCross**2
2263 #print dataOut.data_SNR.shape
2264
2265 nhei = nHeights
2266 poweri = numpy.sum(dataOut.data_spc[:,1:nProf-0,:],axis=1)/clean_num_aver[:,:]
2267 if i == 0 : my_noises = numpy.zeros(4,dtype=float) #FLTARR(4)
2268 n0i = numpy.nanmin(poweri[0+i*2,0:nhei-0])/(nProf-1)
2269 n1i = numpy.nanmin(poweri[1+i*2,0:nhei-0])/(nProf-1)
2270
2271 n0 = n0i
2272 n1= n1i
2273 my_noises[2*i+0] = n0
2274 my_noises[2*i+1] = n1
2275 snrth = -16.0
2276 snrth = 10**(snrth/10.0)
2277
1521 2278 for h in range(nHeights):
1522
1523 #Input
2279 # print("I ", "H", i,h )
2280 ##Input
1524 2281 d = data[:,h]
1525
2282 smooth = clean_num_aver[i+1,h] #dataOut.data_spc[:,1:nProf-0,:]
2283 signalpn0 = (dataOut.data_spc[i*2,1:(nProf-0),h])/smooth
2284 signalpn1 = (dataOut.data_spc[i*2+1,1:(nProf-0),h])/smooth
2285 signal0 = signalpn0-n0
2286 signal1 = signalpn1-n1
2287 snr0 = numpy.sum(signal0/n0)/(nProf-1)
2288 snr1 = numpy.sum(signal1/n1)/(nProf-1)
2289 #print clean_num_aver[coord,h]
2290 if snr0 > snrth and snr1 > snrth and clean_num_aver[i+1,h] > 0 :
1526 2291 #Covariance Matrix
1527 D = numpy.diag(d**2/K)
1528 ind = 0
1529 for pairs in listComb:
2292 #print h, d.shape
2293 D = numpy.diag(d**2)
2294 ind = 0
2295 for pairs in listComb:
1530 2296 #Coordinates in Covariance Matrix
1531 x = pairs[0]
1532 y = pairs[1]
2297 x = pairs[0]
2298 y = pairs[1]
1533 2299 #Channel Index
1534 S12 = dataCross[ind,:,h]
1535 D12 = numpy.diag(S12)
2300 S12 = dataCross[ind,:,h]
2301 D12 = numpy.diag(S12)
1536 2302 #Completing Covariance Matrix with Cross Spectras
1537 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
1538 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
1539 ind += 1
1540 Dinv=numpy.linalg.inv(D)
1541 L=numpy.linalg.cholesky(Dinv)
1542 LT=L.T
1543
1544 dp = numpy.dot(LT,d)
2303 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
2304 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
2305 ind += 1
2306 diagD = numpy.zeros(256)
2307 if h == 17 :
2308 for ii in range(256): diagD[ii] = D[ii,ii]
2309 #plt.plot(diagD)
2310 #plt.show()
2311
2312 # print hprint
2313 #Dinv=numpy.linalg.inv(D)
2314 #L=numpy.linalg.cholesky(Dinv)
2315 try:
2316 Dinv=numpy.linalg.inv(D)
2317 L=numpy.linalg.cholesky(Dinv)
2318 except:
2319 Dinv = D*numpy.nan
2320 L= D*numpy.nan
2321 LT=L.T
1545 2322
2323 dp = numpy.dot(LT,d)
2324
1546 2325 #Initial values
1547 data_spc = self.dataIn.data_spc[coord,:,h]
1548
1549 if (h>0)and(error1[3]<5):
1550 p0 = self.dataOut.data_param[i,:,h-1]
1551 else:
1552 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
2326 data_spc = dataOut.data_spc[coord,:,h]
1553 2327
1554 try:
2328 if (h>0)and(error1[3]<5):
2329 p0 = dataOut.data_param[i,:,h-1]
2330 else:
2331 #print("INSIDE ELSE")
2332 #print(data_spc.shape,constants,i)
2333 p0 = numpy.array(self.library.initialValuesFunction(data_spc, constants))# sin el i(data_spc, constants, i)
2334 #print ("WAIT_p0",p0)
2335 try:
1555 2336 #Least Squares
1556 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
1557 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
2337 #print (dp,LT,constants)
2338 #value =self.__residFunction(p0,dp,LT,constants)
2339 #print ("valueREADY",value.shape, type(value))
2340 #optimize.leastsq(value)
2341 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
2342
2343 # print(minp)
2344 #minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
1558 2345 #Chi square error
1559 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
2346 #print(minp,covp.infodict,mesg,ier)
2347 #print("REALIZA OPTIMIZ")
2348 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
1560 2349 #Error with Jacobian
1561 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
1562 except:
2350 error1 = self.library.errorFunction(minp,constants,LT)
2351 # print self.__residFunction(p0,dp,LT, constants)
2352 # print infodict['fvec']
2353 # print self.__residFunction(minp,dp,LT,constants)
2354
2355 except:
2356 minp = p0*numpy.nan
2357 error0 = numpy.nan
2358 error1 = p0*numpy.nan
2359 #print ("EXCEPT 0000000000")
2360 # s_sq = (self.__residFunction(minp,dp,LT,constants)).sum()/(len(dp)-len(p0))
2361 # covp = covp*s_sq
2362 # #print("TRY___________________________________________1")
2363 # error = []
2364 # for ip in range(len(minp)):
2365 # try:
2366 # error.append(numpy.absolute(covp[ip][ip])**0.5)
2367 # except:
2368 # error.append( 0.00 )
2369 else :
2370 data_spc = dataOut.data_spc[coord,:,h]
2371 p0 = numpy.array(self.library.initialValuesFunction(data_spc, constants))
1563 2372 minp = p0*numpy.nan
1564 2373 error0 = numpy.nan
1565 error1 = p0*numpy.nan
1566
2374 error1 = p0*numpy.nan
1567 2375 #Save
1568 if self.dataOut.data_param is None:
1569 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
1570 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
1571
1572 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
1573 self.dataOut.data_param[i,:,h] = minp
1574 return
1575
2376 if dataOut.data_param is None:
2377 dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
2378 dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
2379
2380 dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
2381 dataOut.data_param[i,:,h] = minp
2382 #print(minp)
2383 #print("FIN")
2384 #print ("DATA",minp.shape)
2385
2386 #plt.plot(dataOut.data_param[0,3,:])
2387 #print(dataOut.data_param[:,3,:])
2388 #dataOut.data_errorC = numpy.zeros((nGroups, nHeights,1))*numpy.nan
2389 for ht in range(nHeights-1) :
2390 smooth = coh_num_aver[i+1,ht] #datc[0,ht,0,beam]
2391 dataOut.data_paramC[4*i,ht,1] = smooth
2392 signalpn0 = (coh_spectra[i*2 ,1:(nProf-0),ht])/smooth #coh_spectra
2393 signalpn1 = (coh_spectra[i*2+1,1:(nProf-0),ht])/smooth
2394
2395 #val0 = WHERE(signalpn0 > 0,cval0)
2396 val0 = (signalpn0 > 0).nonzero()
2397 val0 = val0[0]
2398 #print('hvalid:',hvalid)
2399 #print('valid', valid)
2400 if len(val0) == 0 : val0_npoints = nProf
2401 else : val0_npoints = len(val0)
2402
2403 #val1 = WHERE(signalpn1 > 0,cval1)
2404 val1 = (signalpn1 > 0).nonzero()
2405 val1 = val1[0]
2406 if len(val1) == 0 : val1_npoints = nProf
2407 else : val1_npoints = len(val1)
2408
2409 dataOut.data_paramC[0+4*i,ht,0] = numpy.sum((signalpn0/val0_npoints))/n0
2410 dataOut.data_paramC[1+4*i,ht,0] = numpy.sum((signalpn1/val1_npoints))/n1
2411
2412 signal0 = (signalpn0-n0) # > 0
2413 vali = (signal0 < 0).nonzero()
2414 vali = vali[0]
2415 if len(vali) > 0 : signal0[vali] = 0
2416 signal1 = (signalpn1-n1) #> 0
2417 vali = (signal1 < 0).nonzero()
2418 vali = vali[0]
2419 if len(vali) > 0 : signal1[vali] = 0
2420 snr0 = numpy.sum(signal0/n0)/(nProf-1)
2421 snr1 = numpy.sum(signal1/n1)/(nProf-1)
2422 doppler = absc[1:]
2423 if snr0 >= snrth and snr1 >= snrth and smooth :
2424 signalpn0_n0 = signalpn0
2425 signalpn0_n0[val0] = signalpn0[val0] - n0
2426 mom0 = self.moments(doppler,signalpn0-n0,nProf)
2427 # sigtmp= numpy.transpose(numpy.tile(signalpn0, [4,1]))
2428 # momt= self.__calculateMoments( sigtmp, doppler , n0 )
2429 signalpn1_n1 = signalpn1
2430 signalpn1_n1[val1] = signalpn1[val1] - n1
2431 mom1 = self.moments(doppler,signalpn1_n1,nProf)
2432 dataOut.data_paramC[2+4*i,ht,0] = (mom0[0]+mom1[0])/2.
2433 dataOut.data_paramC[3+4*i,ht,0] = (mom0[1]+mom1[1])/2.
2434 # if graph == 1 :
2435 # window, 13
2436 # plot,doppler,signalpn0
2437 # oplot,doppler,signalpn1,linest=1
2438 # oplot,mom0(0)*doppler/doppler,signalpn0
2439 # oplot,mom1(0)*doppler/doppler,signalpn1
2440 # print,interval/12.,beam,45+ht*15,snr0,snr1,mom0(0),mom1(0),mom0(1),mom1(1)
2441 #ENDIF
2442 #ENDIF
2443 #ENDFOR End height
2444 #plt.show()
2445 #print dataOut.data_param[i,3,:]
2446 # if self.__dataReady:
2447 # dataOut.flagNoData = False
2448 #print dataOut.data_error[:,3,:]
2449 dataOut.data_spc = jspectra
2450 if getSNR:
2451 listChannels = groupArray.reshape((groupArray.size))
2452 listChannels.sort()
2453
2454 dataOut.data_SNR = self.__getSNR(dataOut.data_spc[listChannels,:,:], my_noises[listChannels])
2455 return dataOut
2456
2457
1576 2458 def __residFunction(self, p, dp, LT, constants):
1577 2459
1578 fm = self.dataOut.library.modelFunction(p, constants)
2460 fm = self.library.modelFunction(p, constants)
1579 2461 fmp=numpy.dot(LT,fm)
1580
2462 #print ("DONE",dp -fmp)
2463 #print ("ok")
1581 2464 return dp-fmp
1582 2465
1583 2466 def __getSNR(self, z, noise):
1584
2467
1585 2468 avg = numpy.average(z, axis=1)
1586 2469 SNR = (avg.T-noise)/noise
1587 2470 SNR = SNR.T
1588 2471 return SNR
1589
1590 def __chisq(p,chindex,hindex):
2472
2473 def __chisq(self,p,chindex,hindex):
1591 2474 #similar to Resid but calculates CHI**2
1592 2475 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
1593 2476 dp=numpy.dot(LT,d)
1594 2477 fmp=numpy.dot(LT,fm)
1595 2478 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
1596 2479 return chisq
1597
2480
1598 2481 class WindProfiler(Operation):
1599
2482
1600 2483 __isConfig = False
1601
2484
1602 2485 __initime = None
1603 2486 __lastdatatime = None
1604 2487 __integrationtime = None
1605
2488
1606 2489 __buffer = None
1607
2490
1608 2491 __dataReady = False
1609
2492
1610 2493 __firstdata = None
1611
2494
1612 2495 n = None
1613
1614 def __init__(self):
2496
2497 def __init__(self):
1615 2498 Operation.__init__(self)
1616
2499
1617 2500 def __calculateCosDir(self, elev, azim):
1618 2501 zen = (90 - elev)*numpy.pi/180
1619 2502 azim = azim*numpy.pi/180
1620 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
2503 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1621 2504 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
1622
2505
1623 2506 signX = numpy.sign(numpy.cos(azim))
1624 2507 signY = numpy.sign(numpy.sin(azim))
1625
2508
1626 2509 cosDirX = numpy.copysign(cosDirX, signX)
1627 2510 cosDirY = numpy.copysign(cosDirY, signY)
1628 2511 return cosDirX, cosDirY
1629
2512
1630 2513 def __calculateAngles(self, theta_x, theta_y, azimuth):
1631
2514
1632 2515 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
1633 2516 zenith_arr = numpy.arccos(dir_cosw)
1634 2517 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
1635
2518
1636 2519 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
1637 2520 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
1638
2521
1639 2522 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
1640 2523
1641 2524 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
1642
1643 #
2525
2526 #
1644 2527 if horOnly:
1645 2528 A = numpy.c_[dir_cosu,dir_cosv]
1646 2529 else:
@@ -1654,37 +2537,37 class WindProfiler(Operation):
1654 2537 listPhi = phi.tolist()
1655 2538 maxid = listPhi.index(max(listPhi))
1656 2539 minid = listPhi.index(min(listPhi))
1657
1658 rango = list(range(len(phi)))
2540
2541 rango = list(range(len(phi)))
1659 2542 # rango = numpy.delete(rango,maxid)
1660
2543
1661 2544 heiRang1 = heiRang*math.cos(phi[maxid])
1662 2545 heiRangAux = heiRang*math.cos(phi[minid])
1663 2546 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1664 2547 heiRang1 = numpy.delete(heiRang1,indOut)
1665
2548
1666 2549 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1667 2550 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1668
2551
1669 2552 for i in rango:
1670 2553 x = heiRang*math.cos(phi[i])
1671 2554 y1 = velRadial[i,:]
1672 2555 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1673
2556
1674 2557 x1 = heiRang1
1675 2558 y11 = f1(x1)
1676
2559
1677 2560 y2 = SNR[i,:]
1678 2561 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1679 2562 y21 = f2(x1)
1680
2563
1681 2564 velRadial1[i,:] = y11
1682 2565 SNR1[i,:] = y21
1683
2566
1684 2567 return heiRang1, velRadial1, SNR1
1685 2568
1686 2569 def __calculateVelUVW(self, A, velRadial):
1687
2570
1688 2571 #Operacion Matricial
1689 2572 # velUVW = numpy.zeros((velRadial.shape[1],3))
1690 2573 # for ind in range(velRadial.shape[1]):
@@ -1692,27 +2575,27 class WindProfiler(Operation):
1692 2575 # velUVW = velUVW.transpose()
1693 2576 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
1694 2577 velUVW[:,:] = numpy.dot(A,velRadial)
1695
1696
2578
2579
1697 2580 return velUVW
1698
2581
1699 2582 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
1700
2583
1701 2584 def techniqueDBS(self, kwargs):
1702 2585 """
1703 2586 Function that implements Doppler Beam Swinging (DBS) technique.
1704
2587
1705 2588 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1706 2589 Direction correction (if necessary), Ranges and SNR
1707
2590
1708 2591 Output: Winds estimation (Zonal, Meridional and Vertical)
1709
2592
1710 2593 Parameters affected: Winds, height range, SNR
1711 2594 """
1712 2595 velRadial0 = kwargs['velRadial']
1713 2596 heiRang = kwargs['heightList']
1714 2597 SNR0 = kwargs['SNR']
1715
2598
1716 2599 if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
1717 2600 theta_x = numpy.array(kwargs['dirCosx'])
1718 2601 theta_y = numpy.array(kwargs['dirCosy'])
@@ -1720,7 +2603,7 class WindProfiler(Operation):
1720 2603 elev = numpy.array(kwargs['elevation'])
1721 2604 azim = numpy.array(kwargs['azimuth'])
1722 2605 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1723 azimuth = kwargs['correctAzimuth']
2606 azimuth = kwargs['correctAzimuth']
1724 2607 if 'horizontalOnly' in kwargs:
1725 2608 horizontalOnly = kwargs['horizontalOnly']
1726 2609 else: horizontalOnly = False
@@ -1735,22 +2618,22 class WindProfiler(Operation):
1735 2618 param = param[arrayChannel,:,:]
1736 2619 theta_x = theta_x[arrayChannel]
1737 2620 theta_y = theta_y[arrayChannel]
1738
1739 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1740 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
2621
2622 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
2623 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1741 2624 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
1742
2625
1743 2626 #Calculo de Componentes de la velocidad con DBS
1744 2627 winds = self.__calculateVelUVW(A,velRadial1)
1745
2628
1746 2629 return winds, heiRang1, SNR1
1747
2630
1748 2631 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
1749
2632
1750 2633 nPairs = len(pairs_ccf)
1751 2634 posx = numpy.asarray(posx)
1752 2635 posy = numpy.asarray(posy)
1753
2636
1754 2637 #Rotacion Inversa para alinear con el azimuth
1755 2638 if azimuth!= None:
1756 2639 azimuth = azimuth*math.pi/180
@@ -1759,126 +2642,126 class WindProfiler(Operation):
1759 2642 else:
1760 2643 posx1 = posx
1761 2644 posy1 = posy
1762
2645
1763 2646 #Calculo de Distancias
1764 2647 distx = numpy.zeros(nPairs)
1765 2648 disty = numpy.zeros(nPairs)
1766 2649 dist = numpy.zeros(nPairs)
1767 2650 ang = numpy.zeros(nPairs)
1768
2651
1769 2652 for i in range(nPairs):
1770 2653 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
1771 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
2654 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1772 2655 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
1773 2656 ang[i] = numpy.arctan2(disty[i],distx[i])
1774
2657
1775 2658 return distx, disty, dist, ang
1776 #Calculo de Matrices
2659 #Calculo de Matrices
1777 2660 # nPairs = len(pairs)
1778 2661 # ang1 = numpy.zeros((nPairs, 2, 1))
1779 2662 # dist1 = numpy.zeros((nPairs, 2, 1))
1780 #
2663 #
1781 2664 # for j in range(nPairs):
1782 2665 # dist1[j,0,0] = dist[pairs[j][0]]
1783 2666 # dist1[j,1,0] = dist[pairs[j][1]]
1784 2667 # ang1[j,0,0] = ang[pairs[j][0]]
1785 2668 # ang1[j,1,0] = ang[pairs[j][1]]
1786 #
2669 #
1787 2670 # return distx,disty, dist1,ang1
1788 2671
1789
2672
1790 2673 def __calculateVelVer(self, phase, lagTRange, _lambda):
1791 2674
1792 2675 Ts = lagTRange[1] - lagTRange[0]
1793 2676 velW = -_lambda*phase/(4*math.pi*Ts)
1794
2677
1795 2678 return velW
1796
2679
1797 2680 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
1798 2681 nPairs = tau1.shape[0]
1799 2682 nHeights = tau1.shape[1]
1800 vel = numpy.zeros((nPairs,3,nHeights))
2683 vel = numpy.zeros((nPairs,3,nHeights))
1801 2684 dist1 = numpy.reshape(dist, (dist.size,1))
1802
2685
1803 2686 angCos = numpy.cos(ang)
1804 2687 angSin = numpy.sin(ang)
1805
1806 vel0 = dist1*tau1/(2*tau2**2)
2688
2689 vel0 = dist1*tau1/(2*tau2**2)
1807 2690 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
1808 2691 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
1809
2692
1810 2693 ind = numpy.where(numpy.isinf(vel))
1811 2694 vel[ind] = numpy.nan
1812
2695
1813 2696 return vel
1814
2697
1815 2698 # def __getPairsAutoCorr(self, pairsList, nChannels):
1816 #
2699 #
1817 2700 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1818 #
1819 # for l in range(len(pairsList)):
2701 #
2702 # for l in range(len(pairsList)):
1820 2703 # firstChannel = pairsList[l][0]
1821 2704 # secondChannel = pairsList[l][1]
1822 #
1823 # #Obteniendo pares de Autocorrelacion
2705 #
2706 # #Obteniendo pares de Autocorrelacion
1824 2707 # if firstChannel == secondChannel:
1825 2708 # pairsAutoCorr[firstChannel] = int(l)
1826 #
2709 #
1827 2710 # pairsAutoCorr = pairsAutoCorr.astype(int)
1828 #
2711 #
1829 2712 # pairsCrossCorr = range(len(pairsList))
1830 2713 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1831 #
2714 #
1832 2715 # return pairsAutoCorr, pairsCrossCorr
1833
2716
1834 2717 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
1835 2718 def techniqueSA(self, kwargs):
1836
1837 """
2719
2720 """
1838 2721 Function that implements Spaced Antenna (SA) technique.
1839
2722
1840 2723 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1841 2724 Direction correction (if necessary), Ranges and SNR
1842
2725
1843 2726 Output: Winds estimation (Zonal, Meridional and Vertical)
1844
2727
1845 2728 Parameters affected: Winds
1846 2729 """
1847 2730 position_x = kwargs['positionX']
1848 2731 position_y = kwargs['positionY']
1849 2732 azimuth = kwargs['azimuth']
1850
2733
1851 2734 if 'correctFactor' in kwargs:
1852 2735 correctFactor = kwargs['correctFactor']
1853 2736 else:
1854 2737 correctFactor = 1
1855
2738
1856 2739 groupList = kwargs['groupList']
1857 2740 pairs_ccf = groupList[1]
1858 2741 tau = kwargs['tau']
1859 2742 _lambda = kwargs['_lambda']
1860
2743
1861 2744 #Cross Correlation pairs obtained
1862 2745 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
1863 2746 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
1864 2747 # pairsSelArray = numpy.array(pairsSelected)
1865 2748 # pairs = []
1866 #
2749 #
1867 2750 # #Wind estimation pairs obtained
1868 2751 # for i in range(pairsSelArray.shape[0]/2):
1869 2752 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
1870 2753 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
1871 2754 # pairs.append((ind1,ind2))
1872
2755
1873 2756 indtau = tau.shape[0]/2
1874 2757 tau1 = tau[:indtau,:]
1875 2758 tau2 = tau[indtau:-1,:]
1876 2759 # tau1 = tau1[pairs,:]
1877 2760 # tau2 = tau2[pairs,:]
1878 2761 phase1 = tau[-1,:]
1879
2762
1880 2763 #---------------------------------------------------------------------
1881 #Metodo Directo
2764 #Metodo Directo
1882 2765 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
1883 2766 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
1884 2767 winds = stats.nanmean(winds, axis=0)
@@ -1894,97 +2777,97 class WindProfiler(Operation):
1894 2777 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
1895 2778 winds = correctFactor*winds
1896 2779 return winds
1897
2780
1898 2781 def __checkTime(self, currentTime, paramInterval, outputInterval):
1899
2782
1900 2783 dataTime = currentTime + paramInterval
1901 2784 deltaTime = dataTime - self.__initime
1902
2785
1903 2786 if deltaTime >= outputInterval or deltaTime < 0:
1904 2787 self.__dataReady = True
1905 return
1906
2788 return
2789
1907 2790 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
1908 2791 '''
1909 2792 Function that implements winds estimation technique with detected meteors.
1910
2793
1911 2794 Input: Detected meteors, Minimum meteor quantity to wind estimation
1912
2795
1913 2796 Output: Winds estimation (Zonal and Meridional)
1914
2797
1915 2798 Parameters affected: Winds
1916 '''
2799 '''
1917 2800 #Settings
1918 2801 nInt = (heightMax - heightMin)/2
1919 2802 nInt = int(nInt)
1920 winds = numpy.zeros((2,nInt))*numpy.nan
1921
2803 winds = numpy.zeros((2,nInt))*numpy.nan
2804
1922 2805 #Filter errors
1923 2806 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
1924 2807 finalMeteor = arrayMeteor[error,:]
1925
2808
1926 2809 #Meteor Histogram
1927 2810 finalHeights = finalMeteor[:,2]
1928 2811 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
1929 2812 nMeteorsPerI = hist[0]
1930 2813 heightPerI = hist[1]
1931
2814
1932 2815 #Sort of meteors
1933 2816 indSort = finalHeights.argsort()
1934 2817 finalMeteor2 = finalMeteor[indSort,:]
1935
2818
1936 2819 # Calculating winds
1937 2820 ind1 = 0
1938 ind2 = 0
1939
2821 ind2 = 0
2822
1940 2823 for i in range(nInt):
1941 2824 nMet = nMeteorsPerI[i]
1942 2825 ind1 = ind2
1943 2826 ind2 = ind1 + nMet
1944
2827
1945 2828 meteorAux = finalMeteor2[ind1:ind2,:]
1946
2829
1947 2830 if meteorAux.shape[0] >= meteorThresh:
1948 2831 vel = meteorAux[:, 6]
1949 2832 zen = meteorAux[:, 4]*numpy.pi/180
1950 2833 azim = meteorAux[:, 3]*numpy.pi/180
1951
2834
1952 2835 n = numpy.cos(zen)
1953 2836 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
1954 2837 # l = m*numpy.tan(azim)
1955 2838 l = numpy.sin(zen)*numpy.sin(azim)
1956 2839 m = numpy.sin(zen)*numpy.cos(azim)
1957
2840
1958 2841 A = numpy.vstack((l, m)).transpose()
1959 2842 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
1960 2843 windsAux = numpy.dot(A1, vel)
1961
2844
1962 2845 winds[0,i] = windsAux[0]
1963 2846 winds[1,i] = windsAux[1]
1964
2847
1965 2848 return winds, heightPerI[:-1]
1966
2849
1967 2850 def techniqueNSM_SA(self, **kwargs):
1968 2851 metArray = kwargs['metArray']
1969 2852 heightList = kwargs['heightList']
1970 2853 timeList = kwargs['timeList']
1971
2854
1972 2855 rx_location = kwargs['rx_location']
1973 2856 groupList = kwargs['groupList']
1974 2857 azimuth = kwargs['azimuth']
1975 2858 dfactor = kwargs['dfactor']
1976 2859 k = kwargs['k']
1977
2860
1978 2861 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
1979 2862 d = dist*dfactor
1980 2863 #Phase calculation
1981 2864 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
1982
2865
1983 2866 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
1984
2867
1985 2868 velEst = numpy.zeros((heightList.size,2))*numpy.nan
1986 2869 azimuth1 = azimuth1*numpy.pi/180
1987
2870
1988 2871 for i in range(heightList.size):
1989 2872 h = heightList[i]
1990 2873 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
@@ -1997,71 +2880,71 class WindProfiler(Operation):
1997 2880 A = numpy.asmatrix(A)
1998 2881 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
1999 2882 velHor = numpy.dot(A1,velAux)
2000
2883
2001 2884 velEst[i,:] = numpy.squeeze(velHor)
2002 2885 return velEst
2003
2886
2004 2887 def __getPhaseSlope(self, metArray, heightList, timeList):
2005 2888 meteorList = []
2006 2889 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
2007 2890 #Putting back together the meteor matrix
2008 2891 utctime = metArray[:,0]
2009 2892 uniqueTime = numpy.unique(utctime)
2010
2893
2011 2894 phaseDerThresh = 0.5
2012 2895 ippSeconds = timeList[1] - timeList[0]
2013 2896 sec = numpy.where(timeList>1)[0][0]
2014 2897 nPairs = metArray.shape[1] - 6
2015 2898 nHeights = len(heightList)
2016
2899
2017 2900 for t in uniqueTime:
2018 2901 metArray1 = metArray[utctime==t,:]
2019 2902 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
2020 2903 tmet = metArray1[:,1].astype(int)
2021 2904 hmet = metArray1[:,2].astype(int)
2022
2905
2023 2906 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
2024 2907 metPhase[:,:] = numpy.nan
2025 2908 metPhase[:,hmet,tmet] = metArray1[:,6:].T
2026
2909
2027 2910 #Delete short trails
2028 2911 metBool = ~numpy.isnan(metPhase[0,:,:])
2029 2912 heightVect = numpy.sum(metBool, axis = 1)
2030 2913 metBool[heightVect<sec,:] = False
2031 2914 metPhase[:,heightVect<sec,:] = numpy.nan
2032
2915
2033 2916 #Derivative
2034 2917 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
2035 2918 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
2036 2919 metPhase[phDerAux] = numpy.nan
2037
2920
2038 2921 #--------------------------METEOR DETECTION -----------------------------------------
2039 2922 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
2040
2923
2041 2924 for p in numpy.arange(nPairs):
2042 2925 phase = metPhase[p,:,:]
2043 2926 phDer = metDer[p,:,:]
2044
2927
2045 2928 for h in indMet:
2046 2929 height = heightList[h]
2047 2930 phase1 = phase[h,:] #82
2048 2931 phDer1 = phDer[h,:]
2049
2932
2050 2933 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
2051
2934
2052 2935 indValid = numpy.where(~numpy.isnan(phase1))[0]
2053 2936 initMet = indValid[0]
2054 2937 endMet = 0
2055
2938
2056 2939 for i in range(len(indValid)-1):
2057
2940
2058 2941 #Time difference
2059 2942 inow = indValid[i]
2060 2943 inext = indValid[i+1]
2061 2944 idiff = inext - inow
2062 2945 #Phase difference
2063 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2064
2946 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2947
2065 2948 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
2066 2949 sizeTrail = inow - initMet + 1
2067 2950 if sizeTrail>3*sec: #Too short meteors
@@ -2077,43 +2960,43 class WindProfiler(Operation):
2077 2960 vel = slope#*height*1000/(k*d)
2078 2961 estAux = numpy.array([utctime,p,height, vel, rsq])
2079 2962 meteorList.append(estAux)
2080 initMet = inext
2963 initMet = inext
2081 2964 metArray2 = numpy.array(meteorList)
2082
2965
2083 2966 return metArray2
2084
2967
2085 2968 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
2086
2969
2087 2970 azimuth1 = numpy.zeros(len(pairslist))
2088 2971 dist = numpy.zeros(len(pairslist))
2089
2972
2090 2973 for i in range(len(rx_location)):
2091 2974 ch0 = pairslist[i][0]
2092 2975 ch1 = pairslist[i][1]
2093
2976
2094 2977 diffX = rx_location[ch0][0] - rx_location[ch1][0]
2095 2978 diffY = rx_location[ch0][1] - rx_location[ch1][1]
2096 2979 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
2097 2980 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
2098
2981
2099 2982 azimuth1 -= azimuth0
2100 2983 return azimuth1, dist
2101
2984
2102 2985 def techniqueNSM_DBS(self, **kwargs):
2103 2986 metArray = kwargs['metArray']
2104 2987 heightList = kwargs['heightList']
2105 timeList = kwargs['timeList']
2988 timeList = kwargs['timeList']
2106 2989 azimuth = kwargs['azimuth']
2107 2990 theta_x = numpy.array(kwargs['theta_x'])
2108 2991 theta_y = numpy.array(kwargs['theta_y'])
2109
2992
2110 2993 utctime = metArray[:,0]
2111 2994 cmet = metArray[:,1].astype(int)
2112 2995 hmet = metArray[:,3].astype(int)
2113 2996 SNRmet = metArray[:,4]
2114 2997 vmet = metArray[:,5]
2115 2998 spcmet = metArray[:,6]
2116
2999
2117 3000 nChan = numpy.max(cmet) + 1
2118 3001 nHeights = len(heightList)
2119 3002
@@ -2129,20 +3012,20 class WindProfiler(Operation):
2129 3012
2130 3013 thisH = (h1met>=hmin) & (h1met<hmax) & (cmet!=2) & (SNRmet>8) & (vmet<50) & (spcmet<10)
2131 3014 indthisH = numpy.where(thisH)
2132
3015
2133 3016 if numpy.size(indthisH) > 3:
2134
3017
2135 3018 vel_aux = vmet[thisH]
2136 3019 chan_aux = cmet[thisH]
2137 3020 cosu_aux = dir_cosu[chan_aux]
2138 3021 cosv_aux = dir_cosv[chan_aux]
2139 3022 cosw_aux = dir_cosw[chan_aux]
2140
2141 nch = numpy.size(numpy.unique(chan_aux))
3023
3024 nch = numpy.size(numpy.unique(chan_aux))
2142 3025 if nch > 1:
2143 3026 A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
2144 3027 velEst[i,:] = numpy.dot(A,vel_aux)
2145
3028
2146 3029 return velEst
2147 3030
2148 3031 def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
@@ -2152,40 +3035,40 class WindProfiler(Operation):
2152 3035 absc = dataOut.abscissaList[:-1]
2153 3036 # noise = dataOut.noise
2154 3037 heightList = dataOut.heightList
2155 SNR = dataOut.data_snr
2156
3038 SNR = dataOut.data_SNR
3039
2157 3040 if technique == 'DBS':
2158
2159 kwargs['velRadial'] = param[:,1,:] #Radial velocity
3041
3042 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2160 3043 kwargs['heightList'] = heightList
2161 3044 kwargs['SNR'] = SNR
2162
2163 dataOut.data_output, dataOut.heightList, dataOut.data_snr = self.techniqueDBS(kwargs) #DBS Function
3045
3046 dataOut.data_output, dataOut.heightList, dataOut.data_SNR = self.techniqueDBS(kwargs) #DBS Function
2164 3047 dataOut.utctimeInit = dataOut.utctime
2165 3048 dataOut.outputInterval = dataOut.paramInterval
2166
3049
2167 3050 elif technique == 'SA':
2168
3051
2169 3052 #Parameters
2170 3053 # position_x = kwargs['positionX']
2171 3054 # position_y = kwargs['positionY']
2172 3055 # azimuth = kwargs['azimuth']
2173 #
3056 #
2174 3057 # if kwargs.has_key('crosspairsList'):
2175 3058 # pairs = kwargs['crosspairsList']
2176 3059 # else:
2177 # pairs = None
2178 #
3060 # pairs = None
3061 #
2179 3062 # if kwargs.has_key('correctFactor'):
2180 3063 # correctFactor = kwargs['correctFactor']
2181 3064 # else:
2182 3065 # correctFactor = 1
2183
3066
2184 3067 # tau = dataOut.data_param
2185 3068 # _lambda = dataOut.C/dataOut.frequency
2186 3069 # pairsList = dataOut.groupList
2187 3070 # nChannels = dataOut.nChannels
2188
3071
2189 3072 kwargs['groupList'] = dataOut.groupList
2190 3073 kwargs['tau'] = dataOut.data_param
2191 3074 kwargs['_lambda'] = dataOut.C/dataOut.frequency
@@ -2193,30 +3076,30 class WindProfiler(Operation):
2193 3076 dataOut.data_output = self.techniqueSA(kwargs)
2194 3077 dataOut.utctimeInit = dataOut.utctime
2195 3078 dataOut.outputInterval = dataOut.timeInterval
2196
2197 elif technique == 'Meteors':
3079
3080 elif technique == 'Meteors':
2198 3081 dataOut.flagNoData = True
2199 3082 self.__dataReady = False
2200
3083
2201 3084 if 'nHours' in kwargs:
2202 3085 nHours = kwargs['nHours']
2203 else:
3086 else:
2204 3087 nHours = 1
2205
3088
2206 3089 if 'meteorsPerBin' in kwargs:
2207 3090 meteorThresh = kwargs['meteorsPerBin']
2208 3091 else:
2209 3092 meteorThresh = 6
2210
3093
2211 3094 if 'hmin' in kwargs:
2212 3095 hmin = kwargs['hmin']
2213 3096 else: hmin = 70
2214 3097 if 'hmax' in kwargs:
2215 3098 hmax = kwargs['hmax']
2216 3099 else: hmax = 110
2217
3100
2218 3101 dataOut.outputInterval = nHours*3600
2219
3102
2220 3103 if self.__isConfig == False:
2221 3104 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2222 3105 #Get Initial LTC time
@@ -2224,29 +3107,29 class WindProfiler(Operation):
2224 3107 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2225 3108
2226 3109 self.__isConfig = True
2227
3110
2228 3111 if self.__buffer is None:
2229 3112 self.__buffer = dataOut.data_param
2230 3113 self.__firstdata = copy.copy(dataOut)
2231 3114
2232 3115 else:
2233 3116 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2234
3117
2235 3118 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2236
3119
2237 3120 if self.__dataReady:
2238 3121 dataOut.utctimeInit = self.__initime
2239
3122
2240 3123 self.__initime += dataOut.outputInterval #to erase time offset
2241
3124
2242 3125 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
2243 3126 dataOut.flagNoData = False
2244 3127 self.__buffer = None
2245
3128
2246 3129 elif technique == 'Meteors1':
2247 3130 dataOut.flagNoData = True
2248 3131 self.__dataReady = False
2249
3132
2250 3133 if 'nMins' in kwargs:
2251 3134 nMins = kwargs['nMins']
2252 3135 else: nMins = 20
@@ -2261,7 +3144,7 class WindProfiler(Operation):
2261 3144 if 'mode' in kwargs:
2262 3145 mode = kwargs['mode']
2263 3146 if 'theta_x' in kwargs:
2264 theta_x = kwargs['theta_x']
3147 theta_x = kwargs['theta_x']
2265 3148 if 'theta_y' in kwargs:
2266 3149 theta_y = kwargs['theta_y']
2267 3150 else: mode = 'SA'
@@ -2274,10 +3157,10 class WindProfiler(Operation):
2274 3157 freq = 50e6
2275 3158 lamb = C/freq
2276 3159 k = 2*numpy.pi/lamb
2277
3160
2278 3161 timeList = dataOut.abscissaList
2279 3162 heightList = dataOut.heightList
2280
3163
2281 3164 if self.__isConfig == False:
2282 3165 dataOut.outputInterval = nMins*60
2283 3166 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
@@ -2288,20 +3171,20 class WindProfiler(Operation):
2288 3171 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2289 3172
2290 3173 self.__isConfig = True
2291
3174
2292 3175 if self.__buffer is None:
2293 3176 self.__buffer = dataOut.data_param
2294 3177 self.__firstdata = copy.copy(dataOut)
2295 3178
2296 3179 else:
2297 3180 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2298
3181
2299 3182 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2300
3183
2301 3184 if self.__dataReady:
2302 3185 dataOut.utctimeInit = self.__initime
2303 3186 self.__initime += dataOut.outputInterval #to erase time offset
2304
3187
2305 3188 metArray = self.__buffer
2306 3189 if mode == 'SA':
2307 3190 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
@@ -2312,74 +3195,234 class WindProfiler(Operation):
2312 3195 self.__buffer = None
2313 3196
2314 3197 return
2315
3198
2316 3199 class EWDriftsEstimation(Operation):
2317
2318 def __init__(self):
2319 Operation.__init__(self)
2320
3200
3201 def __init__(self):
3202 Operation.__init__(self)
3203
2321 3204 def __correctValues(self, heiRang, phi, velRadial, SNR):
2322 3205 listPhi = phi.tolist()
2323 3206 maxid = listPhi.index(max(listPhi))
2324 3207 minid = listPhi.index(min(listPhi))
2325
2326 rango = list(range(len(phi)))
3208
3209 rango = list(range(len(phi)))
2327 3210 # rango = numpy.delete(rango,maxid)
2328
3211
2329 3212 heiRang1 = heiRang*math.cos(phi[maxid])
2330 3213 heiRangAux = heiRang*math.cos(phi[minid])
2331 3214 indOut = (heiRang1 < heiRangAux[0]).nonzero()
2332 3215 heiRang1 = numpy.delete(heiRang1,indOut)
2333
3216
2334 3217 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
2335 3218 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
2336
3219
2337 3220 for i in rango:
2338 3221 x = heiRang*math.cos(phi[i])
2339 3222 y1 = velRadial[i,:]
2340 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
2341
2342 x1 = heiRang1
3223 vali= (numpy.isfinite(y1)==True).nonzero()
3224 y1=y1[vali]
3225 x = x[vali]
3226 f1 = interpolate.interp1d(x,y1,kind = 'cubic',bounds_error=False)
3227
3228 #heiRang1 = x*math.cos(phi[maxid])
3229 x1 = heiRang1
2343 3230 y11 = f1(x1)
2344
3231
2345 3232 y2 = SNR[i,:]
2346 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
3233 #print 'snr ', y2
3234 x = heiRang*math.cos(phi[i])
3235 vali= (y2 != -1).nonzero()
3236 y2 = y2[vali]
3237 x = x[vali]
3238 #print 'snr ',y2
3239 f2 = interpolate.interp1d(x,y2,kind = 'cubic',bounds_error=False)
2347 3240 y21 = f2(x1)
2348
3241
2349 3242 velRadial1[i,:] = y11
2350 3243 SNR1[i,:] = y21
2351
3244
2352 3245 return heiRang1, velRadial1, SNR1
2353 3246
3247
3248
2354 3249 def run(self, dataOut, zenith, zenithCorrection):
3250 import matplotlib.pyplot as plt
2355 3251 heiRang = dataOut.heightList
2356 3252 velRadial = dataOut.data_param[:,3,:]
2357 SNR = dataOut.data_snr
2358
3253 velRadialm = dataOut.data_param[:,2:4,:]*-1
3254
3255 rbufc=dataOut.data_paramC[:,:,0]
3256 ebufc=dataOut.data_paramC[:,:,1]
3257 SNR = dataOut.data_SNR
3258 velRerr = dataOut.data_error[:,4,:]
3259 moments=numpy.vstack(([velRadialm[0,:]],[velRadialm[0,:]],[velRadialm[1,:]],[velRadialm[1,:]]))
3260 dataOut.moments=moments
3261 # Coherent
3262 smooth_wC = ebufc[0,:]
3263 p_w0C = rbufc[0,:]
3264 p_w1C = rbufc[1,:]
3265 w_wC = rbufc[2,:]*-1 #*radial_sign(radial EQ 1)
3266 t_wC = rbufc[3,:]
3267 my_nbeams = 2
3268
3269 # plt.plot(w_wC)
3270 # plt.show()
2359 3271 zenith = numpy.array(zenith)
2360 zenith -= zenithCorrection
3272 zenith -= zenithCorrection
2361 3273 zenith *= numpy.pi/180
2362
2363 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
2364
3274 if zenithCorrection != 0 :
3275 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
3276 else :
3277 heiRang1 = heiRang
3278 velRadial1 = velRadial
3279 SNR1 = SNR
3280
2365 3281 alp = zenith[0]
2366 bet = zenith[1]
3282 bet = zenith[1]
2367 3283
3284 #t_w(bad) = t_wC(bad)
3285 #t_w_err(bad)=!values.f_nan
3286
2368 3287 w_w = velRadial1[0,:]
2369 3288 w_e = velRadial1[1,:]
2370
2371 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2372 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2373
2374 winds = numpy.vstack((u,w))
2375
3289 w_w_err = velRerr[0,:]
3290 w_e_err = velRerr[1,:]
3291 #plt.plot(w_w)
3292 #plt.show()
3293 #plt.plot(w_e)
3294 #plt.show()
3295 # bad = where((chisq_w GT 2.5 AND abs(w_w_err) GT 1. AND finite(w_wC))
3296 # OR abs(w_w) GT 200. OR (NOT finite(w_w)-254) OR ABS(w_w_err) GT 100, cbad)
3297 val = (numpy.isfinite(w_w)==False).nonzero()
3298 val = val[0]
3299 bad = val
3300 if len(bad) > 0 :
3301 w_w[bad] = w_wC[bad]
3302 w_w_err[bad]= numpy.nan
3303 if my_nbeams == 2:
3304 smooth_eC=ebufc[4,:]
3305 p_e0C = rbufc[4,:]
3306 p_e1C = rbufc[5,:]
3307 w_eC = rbufc[6,:]*-1
3308 t_eC = rbufc[7,:]
3309 val = (numpy.isfinite(w_e)==False).nonzero()
3310 val = val[0]
3311 bad = val
3312 if len(bad) > 0 :
3313 w_e[bad] = w_eC[bad]
3314 w_e_err[bad]= numpy.nan
3315
3316 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
3317 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
3318 #plt.plot(w)
3319 #plt.show()
3320 #error
3321 w_err = numpy.sqrt((w_w_err*numpy.sin(bet))**2.+(w_e_err*numpy.sin(alp))**2.)/ numpy.absolute(numpy.cos(alp)*numpy.sin(bet)-numpy.cos(bet)*numpy.sin(alp))
3322 u_err = numpy.sqrt((w_w_err*numpy.cos(bet))**2.+(w_e_err*numpy.cos(alp))**2.)/ numpy.absolute(numpy.cos(alp)*numpy.sin(bet)-numpy.cos(bet)*numpy.sin(alp))
3323
3324 winds = numpy.vstack((w,u))
3325
2376 3326 dataOut.heightList = heiRang1
2377 3327 dataOut.data_output = winds
2378 dataOut.data_snr = SNR1
2379
3328 #dataOut.data_SNR = SNR1
3329 snr1 = 10*numpy.log10(SNR1[0])
3330 dataOut.data_snr1 = numpy.reshape(snr1,(1,snr1.shape[0]))
2380 3331 dataOut.utctimeInit = dataOut.utctime
2381 3332 dataOut.outputInterval = dataOut.timeInterval
2382 return
3333
3334 hei_aver0 = 218
3335 jrange = 450 #900 para HA drifts
3336 deltah = 15.0 #dataOut.spacing(0)
3337 h0 = 0.0 #dataOut.first_height(0)
3338 heights = dataOut.heightList
3339 nhei = len(heights)
3340
3341 range1 = numpy.arange(nhei) * deltah + h0
3342
3343 #jhei = WHERE(range1 GE hei_aver0 , jcount)
3344 jhei = (range1 >= hei_aver0).nonzero()
3345 if len(jhei[0]) > 0 :
3346 h0_index = jhei[0][0] # Initial height for getting averages 218km
3347
3348 mynhei = 7
3349 nhei_avg = int(jrange/deltah)
3350 h_avgs = int(nhei_avg/mynhei)
3351 nhei_avg = h_avgs*(mynhei-1)+mynhei
3352
3353 navgs = numpy.zeros(mynhei,dtype='float')
3354 delta_h = numpy.zeros(mynhei,dtype='float')
3355 range_aver = numpy.zeros(mynhei,dtype='float')
3356 for ih in range( mynhei-1 ):
3357 range_aver[ih] = numpy.sum(range1[h0_index+h_avgs*ih:h0_index+h_avgs*(ih+1)-0])/h_avgs
3358 navgs[ih] = h_avgs
3359 delta_h[ih] = deltah*h_avgs
3360
3361 range_aver[mynhei-1] = numpy.sum(range1[h0_index:h0_index+6*h_avgs-0])/(6*h_avgs)
3362 navgs[mynhei-1] = 6*h_avgs
3363 delta_h[mynhei-1] = deltah*6*h_avgs
3364
3365 wA = w[h0_index:h0_index+nhei_avg-0]
3366 wA_err = w_err[h0_index:h0_index+nhei_avg-0]
3367 #print(wA, wA_err)
3368 for i in range(5) :
3369 vals = wA[i*h_avgs:(i+1)*h_avgs-0]
3370 errs = wA_err[i*h_avgs:(i+1)*h_avgs-0]
3371 avg = numpy.nansum(vals/errs**2.)/numpy.nansum(1./errs**2.)
3372 sigma = numpy.sqrt(1./numpy.nansum(1./errs**2.))
3373 wA[6*h_avgs+i] = avg
3374 wA_err[6*h_avgs+i] = sigma
3375
3376
3377 vals = wA[0:6*h_avgs-0]
3378 errs=wA_err[0:6*h_avgs-0]
3379 avg = numpy.nansum(vals/errs**2.)/numpy.nansum(1./errs**2)
3380 sigma = numpy.sqrt(1./numpy.nansum(1./errs**2.))
3381 wA[nhei_avg-1] = avg
3382 wA_err[nhei_avg-1] = sigma
3383
3384 wA = wA[6*h_avgs:nhei_avg-0]
3385 wA_err=wA_err[6*h_avgs:nhei_avg-0]
3386 if my_nbeams == 2 :
3387
3388 uA = u[h0_index:h0_index+nhei_avg]
3389 uA_err=u_err[h0_index:h0_index+nhei_avg]
3390
3391 for i in range(5) :
3392 vals = uA[i*h_avgs:(i+1)*h_avgs-0]
3393 errs=uA_err[i*h_avgs:(i+1)*h_avgs-0]
3394 avg = numpy.nansum(vals/errs**2.)/numpy.nansum(1./errs**2.)
3395 sigma = numpy.sqrt(1./numpy.nansum(1./errs**2.))
3396 uA[6*h_avgs+i] = avg
3397 uA_err[6*h_avgs+i]=sigma
3398
3399 vals = uA[0:6*h_avgs-0]
3400 errs = uA_err[0:6*h_avgs-0]
3401 avg = numpy.nansum(vals/errs**2.)/numpy.nansum(1./errs**2.)
3402 sigma = numpy.sqrt(1./numpy.nansum(1./errs**2.))
3403 uA[nhei_avg-1] = avg
3404 uA_err[nhei_avg-1] = sigma
3405 uA = uA[6*h_avgs:nhei_avg-0]
3406 uA_err = uA_err[6*h_avgs:nhei_avg-0]
3407
3408 dataOut.drifts_avg = numpy.vstack((wA,uA))
3409 #print(dataOut.drifts_avg)
3410 tini=time.localtime(dataOut.utctime)
3411 datefile= str(tini[0]).zfill(4)+str(tini[1]).zfill(2)+str(tini[2]).zfill(2)
3412 nfile = '/home/pcondor/Database/ewdriftsschain2019/jro'+datefile+'drifts_sch3.txt'
3413 #print(dataOut.drifts_avg)
3414 f1 = open(nfile,'a')
3415 #print(nfile)
3416 #f.write(datefile)
3417 #numpy.savetxt(f,[datefile,datefile],fmt='%10s')
3418 datedriftavg=str(tini[0])+' '+str(tini[1])+' '+str(tini[2])+' '+str(tini[3])+' '+str(tini[4])
3419 driftavgstr=str(dataOut.drifts_avg)
3420 #f1.write(datedriftavg)
3421 #f1.write(driftavgstr)
3422 numpy.savetxt(f1,numpy.column_stack([tini[0],tini[1],tini[2],tini[3],tini[4]]),fmt='%4i')
3423 numpy.savetxt(f1,dataOut.drifts_avg,fmt='%10.2f')
3424 f1.close()
3425 return dataOut
2383 3426
2384 3427 #--------------- Non Specular Meteor ----------------
2385 3428
@@ -2389,11 +3432,11 class NonSpecularMeteorDetection(Operation):
2389 3432 data_acf = dataOut.data_pre[0]
2390 3433 data_ccf = dataOut.data_pre[1]
2391 3434 pairsList = dataOut.groupList[1]
2392
3435
2393 3436 lamb = dataOut.C/dataOut.frequency
2394 3437 tSamp = dataOut.ippSeconds*dataOut.nCohInt
2395 3438 paramInterval = dataOut.paramInterval
2396
3439
2397 3440 nChannels = data_acf.shape[0]
2398 3441 nLags = data_acf.shape[1]
2399 3442 nProfiles = data_acf.shape[2]
@@ -2403,7 +3446,7 class NonSpecularMeteorDetection(Operation):
2403 3446 heightList = dataOut.heightList
2404 3447 ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
2405 3448 utctime = dataOut.utctime
2406
3449
2407 3450 dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
2408 3451
2409 3452 #------------------------ SNR --------------------------------------
@@ -2415,7 +3458,7 class NonSpecularMeteorDetection(Operation):
2415 3458 SNR[i] = (power[i]-noise[i])/noise[i]
2416 3459 SNRm = numpy.nanmean(SNR, axis = 0)
2417 3460 SNRdB = 10*numpy.log10(SNR)
2418
3461
2419 3462 if mode == 'SA':
2420 3463 dataOut.groupList = dataOut.groupList[1]
2421 3464 nPairs = data_ccf.shape[0]
@@ -2423,22 +3466,22 class NonSpecularMeteorDetection(Operation):
2423 3466 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
2424 3467 # phase1 = numpy.copy(phase)
2425 3468 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
2426
3469
2427 3470 for p in range(nPairs):
2428 3471 ch0 = pairsList[p][0]
2429 3472 ch1 = pairsList[p][1]
2430 3473 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
2431 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2432 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2433 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2434 # coh1[p,:,:] = numpy.abs(ccf) #median filter
3474 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
3475 # phase1[p,:,:] = numpy.angle(ccf) #median filter
3476 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
3477 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2435 3478 coh = numpy.nanmax(coh1, axis = 0)
2436 3479 # struc = numpy.ones((5,1))
2437 3480 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
2438 3481 #---------------------- Radial Velocity ----------------------------
2439 3482 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
2440 3483 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
2441
3484
2442 3485 if allData:
2443 3486 boolMetFin = ~numpy.isnan(SNRm)
2444 3487 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
@@ -2446,31 +3489,31 class NonSpecularMeteorDetection(Operation):
2446 3489 #------------------------ Meteor mask ---------------------------------
2447 3490 # #SNR mask
2448 3491 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
2449 #
3492 #
2450 3493 # #Erase small objects
2451 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2452 #
3494 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
3495 #
2453 3496 # auxEEJ = numpy.sum(boolMet1,axis=0)
2454 3497 # indOver = auxEEJ>nProfiles*0.8 #Use this later
2455 3498 # indEEJ = numpy.where(indOver)[0]
2456 3499 # indNEEJ = numpy.where(~indOver)[0]
2457 #
3500 #
2458 3501 # boolMetFin = boolMet1
2459 #
3502 #
2460 3503 # if indEEJ.size > 0:
2461 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2462 #
3504 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
3505 #
2463 3506 # boolMet2 = coh > cohThresh
2464 3507 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
2465 #
3508 #
2466 3509 # #Final Meteor mask
2467 3510 # boolMetFin = boolMet1|boolMet2
2468
3511
2469 3512 #Coherence mask
2470 3513 boolMet1 = coh > 0.75
2471 3514 struc = numpy.ones((30,1))
2472 3515 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
2473
3516
2474 3517 #Derivative mask
2475 3518 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2476 3519 boolMet2 = derPhase < 0.2
@@ -2487,7 +3530,7 class NonSpecularMeteorDetection(Operation):
2487 3530
2488 3531 tmet = coordMet[0]
2489 3532 hmet = coordMet[1]
2490
3533
2491 3534 data_param = numpy.zeros((tmet.size, 6 + nPairs))
2492 3535 data_param[:,0] = utctime
2493 3536 data_param[:,1] = tmet
@@ -2496,7 +3539,7 class NonSpecularMeteorDetection(Operation):
2496 3539 data_param[:,4] = velRad[tmet,hmet]
2497 3540 data_param[:,5] = coh[tmet,hmet]
2498 3541 data_param[:,6:] = phase[:,tmet,hmet].T
2499
3542
2500 3543 elif mode == 'DBS':
2501 3544 dataOut.groupList = numpy.arange(nChannels)
2502 3545
@@ -2504,7 +3547,7 class NonSpecularMeteorDetection(Operation):
2504 3547 phase = numpy.angle(data_acf[:,1,:,:])
2505 3548 # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
2506 3549 velRad = phase*lamb/(4*numpy.pi*tSamp)
2507
3550
2508 3551 #Spectral width
2509 3552 # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
2510 3553 # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
@@ -2519,24 +3562,24 class NonSpecularMeteorDetection(Operation):
2519 3562 #SNR
2520 3563 boolMet1 = (SNRdB>SNRthresh) #SNR mask
2521 3564 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
2522
3565
2523 3566 #Radial velocity
2524 3567 boolMet2 = numpy.abs(velRad) < 20
2525 3568 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
2526
3569
2527 3570 #Spectral Width
2528 3571 boolMet3 = spcWidth < 30
2529 3572 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
2530 3573 # boolMetFin = self.__erase_small(boolMet1, 10,5)
2531 3574 boolMetFin = boolMet1&boolMet2&boolMet3
2532
3575
2533 3576 #Creating data_param
2534 3577 coordMet = numpy.where(boolMetFin)
2535 3578
2536 3579 cmet = coordMet[0]
2537 3580 tmet = coordMet[1]
2538 3581 hmet = coordMet[2]
2539
3582
2540 3583 data_param = numpy.zeros((tmet.size, 7))
2541 3584 data_param[:,0] = utctime
2542 3585 data_param[:,1] = cmet
@@ -2545,7 +3588,7 class NonSpecularMeteorDetection(Operation):
2545 3588 data_param[:,4] = SNR[cmet,tmet,hmet].T
2546 3589 data_param[:,5] = velRad[cmet,tmet,hmet].T
2547 3590 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
2548
3591
2549 3592 # self.dataOut.data_param = data_int
2550 3593 if len(data_param) == 0:
2551 3594 dataOut.flagNoData = True
@@ -2555,21 +3598,21 class NonSpecularMeteorDetection(Operation):
2555 3598 def __erase_small(self, binArray, threshX, threshY):
2556 3599 labarray, numfeat = ndimage.measurements.label(binArray)
2557 3600 binArray1 = numpy.copy(binArray)
2558
3601
2559 3602 for i in range(1,numfeat + 1):
2560 3603 auxBin = (labarray==i)
2561 3604 auxSize = auxBin.sum()
2562
3605
2563 3606 x,y = numpy.where(auxBin)
2564 3607 widthX = x.max() - x.min()
2565 3608 widthY = y.max() - y.min()
2566
3609
2567 3610 #width X: 3 seg -> 12.5*3
2568 #width Y:
2569
3611 #width Y:
3612
2570 3613 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
2571 3614 binArray1[auxBin] = False
2572
3615
2573 3616 return binArray1
2574 3617
2575 3618 #--------------- Specular Meteor ----------------
@@ -2579,36 +3622,36 class SMDetection(Operation):
2579 3622 Function DetectMeteors()
2580 3623 Project developed with paper:
2581 3624 HOLDSWORTH ET AL. 2004
2582
3625
2583 3626 Input:
2584 3627 self.dataOut.data_pre
2585
3628
2586 3629 centerReceiverIndex: From the channels, which is the center receiver
2587
3630
2588 3631 hei_ref: Height reference for the Beacon signal extraction
2589 3632 tauindex:
2590 3633 predefinedPhaseShifts: Predefined phase offset for the voltge signals
2591
3634
2592 3635 cohDetection: Whether to user Coherent detection or not
2593 3636 cohDet_timeStep: Coherent Detection calculation time step
2594 3637 cohDet_thresh: Coherent Detection phase threshold to correct phases
2595
3638
2596 3639 noise_timeStep: Noise calculation time step
2597 3640 noise_multiple: Noise multiple to define signal threshold
2598
3641
2599 3642 multDet_timeLimit: Multiple Detection Removal time limit in seconds
2600 3643 multDet_rangeLimit: Multiple Detection Removal range limit in km
2601
3644
2602 3645 phaseThresh: Maximum phase difference between receiver to be consider a meteor
2603 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2604
3646 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
3647
2605 3648 hmin: Minimum Height of the meteor to use it in the further wind estimations
2606 3649 hmax: Maximum Height of the meteor to use it in the further wind estimations
2607 3650 azimuth: Azimuth angle correction
2608
3651
2609 3652 Affected:
2610 3653 self.dataOut.data_param
2611
3654
2612 3655 Rejection Criteria (Errors):
2613 3656 0: No error; analysis OK
2614 3657 1: SNR < SNR threshold
@@ -2627,9 +3670,9 class SMDetection(Operation):
2627 3670 14: height ambiguous echo: more then one possible height within 70 to 110 km
2628 3671 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
2629 3672 16: oscilatory echo, indicating event most likely not an underdense echo
2630
3673
2631 3674 17: phase difference in meteor Reestimation
2632
3675
2633 3676 Data Storage:
2634 3677 Meteors for Wind Estimation (8):
2635 3678 Utc Time | Range Height
@@ -2637,75 +3680,75 class SMDetection(Operation):
2637 3680 VelRad errorVelRad
2638 3681 Phase0 Phase1 Phase2 Phase3
2639 3682 TypeError
2640
2641 '''
2642
3683
3684 '''
3685
2643 3686 def run(self, dataOut, hei_ref = None, tauindex = 0,
2644 3687 phaseOffsets = None,
2645 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
3688 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2646 3689 noise_timeStep = 4, noise_multiple = 4,
2647 3690 multDet_timeLimit = 1, multDet_rangeLimit = 3,
2648 3691 phaseThresh = 20, SNRThresh = 5,
2649 3692 hmin = 50, hmax=150, azimuth = 0,
2650 3693 channelPositions = None) :
2651
2652
3694
3695
2653 3696 #Getting Pairslist
2654 3697 if channelPositions is None:
2655 3698 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2656 3699 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2657 3700 meteorOps = SMOperations()
2658 3701 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2659 heiRang = dataOut.heightList
3702 heiRang = dataOut.getHeiRange()
2660 3703 #Get Beacon signal - No Beacon signal anymore
2661 3704 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
2662 #
3705 #
2663 3706 # if hei_ref != None:
2664 3707 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
2665 #
2666
2667
3708 #
3709
3710
2668 3711 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
2669 3712 # see if the user put in pre defined phase shifts
2670 3713 voltsPShift = dataOut.data_pre.copy()
2671
3714
2672 3715 # if predefinedPhaseShifts != None:
2673 3716 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
2674 #
3717 #
2675 3718 # # elif beaconPhaseShifts:
2676 3719 # # #get hardware phase shifts using beacon signal
2677 3720 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
2678 3721 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
2679 #
3722 #
2680 3723 # else:
2681 # hardwarePhaseShifts = numpy.zeros(5)
2682 #
3724 # hardwarePhaseShifts = numpy.zeros(5)
3725 #
2683 3726 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
2684 3727 # for i in range(self.dataOut.data_pre.shape[0]):
2685 3728 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
2686 3729
2687 3730 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
2688
3731
2689 3732 #Remove DC
2690 3733 voltsDC = numpy.mean(voltsPShift,1)
2691 3734 voltsDC = numpy.mean(voltsDC,1)
2692 3735 for i in range(voltsDC.shape[0]):
2693 3736 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
2694
2695 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
3737
3738 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2696 3739 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
2697
3740
2698 3741 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
2699 3742 #Coherent Detection
2700 3743 if cohDetection:
2701 3744 #use coherent detection to get the net power
2702 3745 cohDet_thresh = cohDet_thresh*numpy.pi/180
2703 3746 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
2704
3747
2705 3748 #Non-coherent detection!
2706 3749 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
2707 3750 #********** END OF COH/NON-COH POWER CALCULATION**********************
2708
3751
2709 3752 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
2710 3753 #Get noise
2711 3754 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
@@ -2715,17 +3758,17 class SMDetection(Operation):
2715 3758 #Meteor echoes detection
2716 3759 listMeteors = self.__findMeteors(powerNet, signalThresh)
2717 3760 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
2718
3761
2719 3762 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
2720 3763 #Parameters
2721 heiRange = dataOut.heightList
3764 heiRange = dataOut.getHeiRange()
2722 3765 rangeInterval = heiRange[1] - heiRange[0]
2723 3766 rangeLimit = multDet_rangeLimit/rangeInterval
2724 3767 timeLimit = multDet_timeLimit/dataOut.timeInterval
2725 3768 #Multiple detection removals
2726 3769 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
2727 3770 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
2728
3771
2729 3772 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
2730 3773 #Parameters
2731 3774 phaseThresh = phaseThresh*numpy.pi/180
@@ -2736,40 +3779,40 class SMDetection(Operation):
2736 3779 #Estimation of decay times (Errors N 7, 8, 11)
2737 3780 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
2738 3781 #******************* END OF METEOR REESTIMATION *******************
2739
3782
2740 3783 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
2741 3784 #Calculating Radial Velocity (Error N 15)
2742 3785 radialStdThresh = 10
2743 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
3786 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2744 3787
2745 3788 if len(listMeteors4) > 0:
2746 3789 #Setting New Array
2747 3790 date = dataOut.utctime
2748 3791 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
2749
3792
2750 3793 #Correcting phase offset
2751 3794 if phaseOffsets != None:
2752 3795 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2753 3796 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2754
3797
2755 3798 #Second Pairslist
2756 3799 pairsList = []
2757 3800 pairx = (0,1)
2758 3801 pairy = (2,3)
2759 3802 pairsList.append(pairx)
2760 3803 pairsList.append(pairy)
2761
3804
2762 3805 jph = numpy.array([0,0,0,0])
2763 3806 h = (hmin,hmax)
2764 3807 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2765
3808
2766 3809 # #Calculate AOA (Error N 3, 4)
2767 3810 # #JONES ET AL. 1998
2768 3811 # error = arrayParameters[:,-1]
2769 3812 # AOAthresh = numpy.pi/8
2770 3813 # phases = -arrayParameters[:,9:13]
2771 3814 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
2772 #
3815 #
2773 3816 # #Calculate Heights (Error N 13 and 14)
2774 3817 # error = arrayParameters[:,-1]
2775 3818 # Ranges = arrayParameters[:,2]
@@ -2777,73 +3820,73 class SMDetection(Operation):
2777 3820 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
2778 3821 # error = arrayParameters[:,-1]
2779 3822 #********************* END OF PARAMETERS CALCULATION **************************
2780
2781 #***************************+ PASS DATA TO NEXT STEP **********************
3823
3824 #***************************+ PASS DATA TO NEXT STEP **********************
2782 3825 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
2783 3826 dataOut.data_param = arrayParameters
2784
3827
2785 3828 if arrayParameters is None:
2786 3829 dataOut.flagNoData = True
2787 3830 else:
2788 3831 dataOut.flagNoData = True
2789
3832
2790 3833 return
2791
3834
2792 3835 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
2793
3836
2794 3837 minIndex = min(newheis[0])
2795 3838 maxIndex = max(newheis[0])
2796
3839
2797 3840 voltage = voltage0[:,:,minIndex:maxIndex+1]
2798 3841 nLength = voltage.shape[1]/n
2799 3842 nMin = 0
2800 3843 nMax = 0
2801 3844 phaseOffset = numpy.zeros((len(pairslist),n))
2802
3845
2803 3846 for i in range(n):
2804 3847 nMax += nLength
2805 3848 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
2806 3849 phaseCCF = numpy.mean(phaseCCF, axis = 2)
2807 phaseOffset[:,i] = phaseCCF.transpose()
3850 phaseOffset[:,i] = phaseCCF.transpose()
2808 3851 nMin = nMax
2809 3852 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
2810
3853
2811 3854 #Remove Outliers
2812 3855 factor = 2
2813 3856 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
2814 3857 dw = numpy.std(wt,axis = 1)
2815 3858 dw = dw.reshape((dw.size,1))
2816 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
3859 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2817 3860 phaseOffset[ind] = numpy.nan
2818 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2819
3861 phaseOffset = stats.nanmean(phaseOffset, axis=1)
3862
2820 3863 return phaseOffset
2821
3864
2822 3865 def __shiftPhase(self, data, phaseShift):
2823 3866 #this will shift the phase of a complex number
2824 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
3867 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2825 3868 return dataShifted
2826
3869
2827 3870 def __estimatePhaseDifference(self, array, pairslist):
2828 3871 nChannel = array.shape[0]
2829 3872 nHeights = array.shape[2]
2830 3873 numPairs = len(pairslist)
2831 3874 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
2832 3875 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
2833
3876
2834 3877 #Correct phases
2835 3878 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
2836 3879 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2837
2838 if indDer[0].shape[0] > 0:
3880
3881 if indDer[0].shape[0] > 0:
2839 3882 for i in range(indDer[0].shape[0]):
2840 3883 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
2841 3884 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
2842
3885
2843 3886 # for j in range(numSides):
2844 3887 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
2845 3888 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
2846 #
3889 #
2847 3890 #Linear
2848 3891 phaseInt = numpy.zeros((numPairs,1))
2849 3892 angAllCCF = phaseCCF[:,[0,1,3,4],0]
@@ -2853,16 +3896,16 class SMDetection(Operation):
2853 3896 #Phase Differences
2854 3897 phaseDiff = phaseInt - phaseCCF[:,2,:]
2855 3898 phaseArrival = phaseInt.reshape(phaseInt.size)
2856
3899
2857 3900 #Dealias
2858 3901 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
2859 3902 # indAlias = numpy.where(phaseArrival > numpy.pi)
2860 3903 # phaseArrival[indAlias] -= 2*numpy.pi
2861 3904 # indAlias = numpy.where(phaseArrival < -numpy.pi)
2862 3905 # phaseArrival[indAlias] += 2*numpy.pi
2863
3906
2864 3907 return phaseDiff, phaseArrival
2865
3908
2866 3909 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
2867 3910 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
2868 3911 #find the phase shifts of each channel over 1 second intervals
@@ -2872,25 +3915,25 class SMDetection(Operation):
2872 3915 numHeights = volts.shape[2]
2873 3916 nChannel = volts.shape[0]
2874 3917 voltsCohDet = volts.copy()
2875
3918
2876 3919 pairsarray = numpy.array(pairslist)
2877 3920 indSides = pairsarray[:,1]
2878 3921 # indSides = numpy.array(range(nChannel))
2879 3922 # indSides = numpy.delete(indSides, indCenter)
2880 #
3923 #
2881 3924 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
2882 3925 listBlocks = numpy.array_split(volts, numBlocks, 1)
2883
3926
2884 3927 startInd = 0
2885 3928 endInd = 0
2886
3929
2887 3930 for i in range(numBlocks):
2888 3931 startInd = endInd
2889 endInd = endInd + listBlocks[i].shape[1]
2890
3932 endInd = endInd + listBlocks[i].shape[1]
3933
2891 3934 arrayBlock = listBlocks[i]
2892 3935 # arrayBlockCenter = listCenter[i]
2893
3936
2894 3937 #Estimate the Phase Difference
2895 3938 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
2896 3939 #Phase Difference RMS
@@ -2902,21 +3945,21 class SMDetection(Operation):
2902 3945 for j in range(indSides.size):
2903 3946 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
2904 3947 voltsCohDet[:,startInd:endInd,:] = arrayBlock
2905
3948
2906 3949 return voltsCohDet
2907
3950
2908 3951 def __calculateCCF(self, volts, pairslist ,laglist):
2909
3952
2910 3953 nHeights = volts.shape[2]
2911 nPoints = volts.shape[1]
3954 nPoints = volts.shape[1]
2912 3955 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
2913
3956
2914 3957 for i in range(len(pairslist)):
2915 3958 volts1 = volts[pairslist[i][0]]
2916 volts2 = volts[pairslist[i][1]]
2917
3959 volts2 = volts[pairslist[i][1]]
3960
2918 3961 for t in range(len(laglist)):
2919 idxT = laglist[t]
3962 idxT = laglist[t]
2920 3963 if idxT >= 0:
2921 3964 vStacked = numpy.vstack((volts2[idxT:,:],
2922 3965 numpy.zeros((idxT, nHeights),dtype='complex')))
@@ -2924,10 +3967,10 class SMDetection(Operation):
2924 3967 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
2925 3968 volts2[:(nPoints + idxT),:]))
2926 3969 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
2927
3970
2928 3971 vStacked = None
2929 3972 return voltsCCF
2930
3973
2931 3974 def __getNoise(self, power, timeSegment, timeInterval):
2932 3975 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2933 3976 numBlocks = int(power.shape[0]/numProfPerBlock)
@@ -2936,100 +3979,100 class SMDetection(Operation):
2936 3979 listPower = numpy.array_split(power, numBlocks, 0)
2937 3980 noise = numpy.zeros((power.shape[0], power.shape[1]))
2938 3981 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
2939
3982
2940 3983 startInd = 0
2941 3984 endInd = 0
2942
3985
2943 3986 for i in range(numBlocks): #split por canal
2944 3987 startInd = endInd
2945 endInd = endInd + listPower[i].shape[0]
2946
3988 endInd = endInd + listPower[i].shape[0]
3989
2947 3990 arrayBlock = listPower[i]
2948 3991 noiseAux = numpy.mean(arrayBlock, 0)
2949 3992 # noiseAux = numpy.median(noiseAux)
2950 3993 # noiseAux = numpy.mean(arrayBlock)
2951 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
2952
3994 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
3995
2953 3996 noiseAux1 = numpy.mean(arrayBlock)
2954 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
2955
3997 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
3998
2956 3999 return noise, noise1
2957
4000
2958 4001 def __findMeteors(self, power, thresh):
2959 4002 nProf = power.shape[0]
2960 4003 nHeights = power.shape[1]
2961 4004 listMeteors = []
2962
4005
2963 4006 for i in range(nHeights):
2964 4007 powerAux = power[:,i]
2965 4008 threshAux = thresh[:,i]
2966
4009
2967 4010 indUPthresh = numpy.where(powerAux > threshAux)[0]
2968 4011 indDNthresh = numpy.where(powerAux <= threshAux)[0]
2969
4012
2970 4013 j = 0
2971
4014
2972 4015 while (j < indUPthresh.size - 2):
2973 4016 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
2974 4017 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
2975 4018 indDNthresh = indDNthresh[indDNAux]
2976
4019
2977 4020 if (indDNthresh.size > 0):
2978 4021 indEnd = indDNthresh[0] - 1
2979 4022 indInit = indUPthresh[j]
2980
4023
2981 4024 meteor = powerAux[indInit:indEnd + 1]
2982 4025 indPeak = meteor.argmax() + indInit
2983 4026 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
2984
4027
2985 4028 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
2986 4029 j = numpy.where(indUPthresh == indEnd)[0] + 1
2987 4030 else: j+=1
2988 4031 else: j+=1
2989
4032
2990 4033 return listMeteors
2991
4034
2992 4035 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
2993
2994 arrayMeteors = numpy.asarray(listMeteors)
4036
4037 arrayMeteors = numpy.asarray(listMeteors)
2995 4038 listMeteors1 = []
2996
4039
2997 4040 while arrayMeteors.shape[0] > 0:
2998 4041 FLAs = arrayMeteors[:,4]
2999 4042 maxFLA = FLAs.argmax()
3000 4043 listMeteors1.append(arrayMeteors[maxFLA,:])
3001
4044
3002 4045 MeteorInitTime = arrayMeteors[maxFLA,1]
3003 4046 MeteorEndTime = arrayMeteors[maxFLA,3]
3004 4047 MeteorHeight = arrayMeteors[maxFLA,0]
3005
4048
3006 4049 #Check neighborhood
3007 4050 maxHeightIndex = MeteorHeight + rangeLimit
3008 4051 minHeightIndex = MeteorHeight - rangeLimit
3009 4052 minTimeIndex = MeteorInitTime - timeLimit
3010 4053 maxTimeIndex = MeteorEndTime + timeLimit
3011
4054
3012 4055 #Check Heights
3013 4056 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
3014 4057 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
3015 4058 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
3016
4059
3017 4060 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
3018
4061
3019 4062 return listMeteors1
3020
4063
3021 4064 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
3022 4065 numHeights = volts.shape[2]
3023 4066 nChannel = volts.shape[0]
3024
4067
3025 4068 thresholdPhase = thresh[0]
3026 4069 thresholdNoise = thresh[1]
3027 4070 thresholdDB = float(thresh[2])
3028
4071
3029 4072 thresholdDB1 = 10**(thresholdDB/10)
3030 4073 pairsarray = numpy.array(pairslist)
3031 4074 indSides = pairsarray[:,1]
3032
4075
3033 4076 pairslist1 = list(pairslist)
3034 4077 pairslist1.append((0,1))
3035 4078 pairslist1.append((3,4))
@@ -3038,31 +4081,31 class SMDetection(Operation):
3038 4081 listPowerSeries = []
3039 4082 listVoltageSeries = []
3040 4083 #volts has the war data
3041
4084
3042 4085 if frequency == 30e6:
3043 4086 timeLag = 45*10**-3
3044 4087 else:
3045 4088 timeLag = 15*10**-3
3046 4089 lag = numpy.ceil(timeLag/timeInterval)
3047
4090
3048 4091 for i in range(len(listMeteors)):
3049
4092
3050 4093 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
3051 4094 meteorAux = numpy.zeros(16)
3052
4095
3053 4096 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
3054 4097 mHeight = listMeteors[i][0]
3055 4098 mStart = listMeteors[i][1]
3056 4099 mPeak = listMeteors[i][2]
3057 4100 mEnd = listMeteors[i][3]
3058
4101
3059 4102 #get the volt data between the start and end times of the meteor
3060 4103 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
3061 4104 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3062
4105
3063 4106 #3.6. Phase Difference estimation
3064 4107 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
3065
4108
3066 4109 #3.7. Phase difference removal & meteor start, peak and end times reestimated
3067 4110 #meteorVolts0.- all Channels, all Profiles
3068 4111 meteorVolts0 = volts[:,:,mHeight]
@@ -3070,15 +4113,15 class SMDetection(Operation):
3070 4113 meteorNoise = noise[:,mHeight]
3071 4114 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
3072 4115 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
3073
4116
3074 4117 #Times reestimation
3075 4118 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
3076 4119 if mStart1.size > 0:
3077 4120 mStart1 = mStart1[-1] + 1
3078
3079 else:
4121
4122 else:
3080 4123 mStart1 = mPeak
3081
4124
3082 4125 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
3083 4126 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
3084 4127 if mEndDecayTime1.size == 0:
@@ -3086,7 +4129,7 class SMDetection(Operation):
3086 4129 else:
3087 4130 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
3088 4131 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
3089
4132
3090 4133 #meteorVolts1.- all Channels, from start to end
3091 4134 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
3092 4135 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
@@ -3095,17 +4138,17 class SMDetection(Operation):
3095 4138 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
3096 4139 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
3097 4140 ##################### END PARAMETERS REESTIMATION #########################
3098
4141
3099 4142 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
3100 4143 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
3101 if meteorVolts2.shape[1] > 0:
4144 if meteorVolts2.shape[1] > 0:
3102 4145 #Phase Difference re-estimation
3103 4146 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
3104 4147 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
3105 4148 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
3106 4149 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
3107 4150 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
3108
4151
3109 4152 #Phase Difference RMS
3110 4153 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
3111 4154 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
@@ -3120,27 +4163,27 class SMDetection(Operation):
3120 4163 #Vectorize
3121 4164 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
3122 4165 meteorAux[7:11] = phaseDiffint[0:4]
3123
4166
3124 4167 #Rejection Criterions
3125 4168 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
3126 4169 meteorAux[-1] = 17
3127 4170 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
3128 4171 meteorAux[-1] = 1
3129
3130
3131 else:
4172
4173
4174 else:
3132 4175 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
3133 4176 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
3134 4177 PowerSeries = 0
3135
4178
3136 4179 listMeteors1.append(meteorAux)
3137 4180 listPowerSeries.append(PowerSeries)
3138 4181 listVoltageSeries.append(meteorVolts1)
3139
3140 return listMeteors1, listPowerSeries, listVoltageSeries
3141
4182
4183 return listMeteors1, listPowerSeries, listVoltageSeries
4184
3142 4185 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
3143
4186
3144 4187 threshError = 10
3145 4188 #Depending if it is 30 or 50 MHz
3146 4189 if frequency == 30e6:
@@ -3148,22 +4191,22 class SMDetection(Operation):
3148 4191 else:
3149 4192 timeLag = 15*10**-3
3150 4193 lag = numpy.ceil(timeLag/timeInterval)
3151
4194
3152 4195 listMeteors1 = []
3153
4196
3154 4197 for i in range(len(listMeteors)):
3155 4198 meteorPower = listPower[i]
3156 4199 meteorAux = listMeteors[i]
3157
4200
3158 4201 if meteorAux[-1] == 0:
3159 4202
3160 try:
4203 try:
3161 4204 indmax = meteorPower.argmax()
3162 4205 indlag = indmax + lag
3163
4206
3164 4207 y = meteorPower[indlag:]
3165 4208 x = numpy.arange(0, y.size)*timeLag
3166
4209
3167 4210 #first guess
3168 4211 a = y[0]
3169 4212 tau = timeLag
@@ -3172,26 +4215,26 class SMDetection(Operation):
3172 4215 y1 = self.__exponential_function(x, *popt)
3173 4216 #error estimation
3174 4217 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
3175
4218
3176 4219 decayTime = popt[1]
3177 4220 riseTime = indmax*timeInterval
3178 4221 meteorAux[11:13] = [decayTime, error]
3179
4222
3180 4223 #Table items 7, 8 and 11
3181 4224 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
3182 meteorAux[-1] = 7
4225 meteorAux[-1] = 7
3183 4226 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
3184 4227 meteorAux[-1] = 8
3185 4228 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
3186 meteorAux[-1] = 11
3187
3188
4229 meteorAux[-1] = 11
4230
4231
3189 4232 except:
3190 meteorAux[-1] = 11
3191
3192
4233 meteorAux[-1] = 11
4234
4235
3193 4236 listMeteors1.append(meteorAux)
3194
4237
3195 4238 return listMeteors1
3196 4239
3197 4240 #Exponential Function
@@ -3199,9 +4242,9 class SMDetection(Operation):
3199 4242 def __exponential_function(self, x, a, tau):
3200 4243 y = a*numpy.exp(-x/tau)
3201 4244 return y
3202
4245
3203 4246 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
3204
4247
3205 4248 pairslist1 = list(pairslist)
3206 4249 pairslist1.append((0,1))
3207 4250 pairslist1.append((3,4))
@@ -3211,33 +4254,33 class SMDetection(Operation):
3211 4254 c = 3e8
3212 4255 lag = numpy.ceil(timeLag/timeInterval)
3213 4256 freq = 30e6
3214
4257
3215 4258 listMeteors1 = []
3216
4259
3217 4260 for i in range(len(listMeteors)):
3218 4261 meteorAux = listMeteors[i]
3219 4262 if meteorAux[-1] == 0:
3220 4263 mStart = listMeteors[i][1]
3221 mPeak = listMeteors[i][2]
4264 mPeak = listMeteors[i][2]
3222 4265 mLag = mPeak - mStart + lag
3223
4266
3224 4267 #get the volt data between the start and end times of the meteor
3225 4268 meteorVolts = listVolts[i]
3226 4269 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3227 4270
3228 4271 #Get CCF
3229 4272 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
3230
4273
3231 4274 #Method 2
3232 4275 slopes = numpy.zeros(numPairs)
3233 4276 time = numpy.array([-2,-1,1,2])*timeInterval
3234 4277 angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
3235
4278
3236 4279 #Correct phases
3237 4280 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
3238 4281 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
3239
3240 if indDer[0].shape[0] > 0:
4282
4283 if indDer[0].shape[0] > 0:
3241 4284 for i in range(indDer[0].shape[0]):
3242 4285 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
3243 4286 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
@@ -3246,51 +4289,51 class SMDetection(Operation):
3246 4289 for j in range(numPairs):
3247 4290 fit = stats.linregress(time, angAllCCF[j,:])
3248 4291 slopes[j] = fit[0]
3249
4292
3250 4293 #Remove Outlier
3251 4294 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3252 4295 # slopes = numpy.delete(slopes,indOut)
3253 4296 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3254 4297 # slopes = numpy.delete(slopes,indOut)
3255
4298
3256 4299 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
3257 4300 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
3258 4301 meteorAux[-2] = radialError
3259 4302 meteorAux[-3] = radialVelocity
3260
4303
3261 4304 #Setting Error
3262 4305 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
3263 if numpy.abs(radialVelocity) > 200:
4306 if numpy.abs(radialVelocity) > 200:
3264 4307 meteorAux[-1] = 15
3265 4308 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
3266 4309 elif radialError > radialStdThresh:
3267 4310 meteorAux[-1] = 12
3268
4311
3269 4312 listMeteors1.append(meteorAux)
3270 4313 return listMeteors1
3271
4314
3272 4315 def __setNewArrays(self, listMeteors, date, heiRang):
3273
4316
3274 4317 #New arrays
3275 4318 arrayMeteors = numpy.array(listMeteors)
3276 4319 arrayParameters = numpy.zeros((len(listMeteors), 13))
3277
4320
3278 4321 #Date inclusion
3279 4322 # date = re.findall(r'\((.*?)\)', date)
3280 4323 # date = date[0].split(',')
3281 4324 # date = map(int, date)
3282 #
4325 #
3283 4326 # if len(date)<6:
3284 4327 # date.append(0)
3285 #
4328 #
3286 4329 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
3287 4330 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
3288 4331 arrayDate = numpy.tile(date, (len(listMeteors)))
3289
4332
3290 4333 #Meteor array
3291 4334 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
3292 4335 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
3293
4336
3294 4337 #Parameters Array
3295 4338 arrayParameters[:,0] = arrayDate #Date
3296 4339 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
@@ -3298,13 +4341,13 class SMDetection(Operation):
3298 4341 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
3299 4342 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
3300 4343
3301
4344
3302 4345 return arrayParameters
3303
4346
3304 4347 class CorrectSMPhases(Operation):
3305
4348
3306 4349 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
3307
4350
3308 4351 arrayParameters = dataOut.data_param
3309 4352 pairsList = []
3310 4353 pairx = (0,1)
@@ -3312,49 +4355,49 class CorrectSMPhases(Operation):
3312 4355 pairsList.append(pairx)
3313 4356 pairsList.append(pairy)
3314 4357 jph = numpy.zeros(4)
3315
4358
3316 4359 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
3317 4360 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
3318 4361 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
3319
4362
3320 4363 meteorOps = SMOperations()
3321 4364 if channelPositions is None:
3322 4365 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3323 4366 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3324
4367
3325 4368 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3326 4369 h = (hmin,hmax)
3327
4370
3328 4371 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
3329
4372
3330 4373 dataOut.data_param = arrayParameters
3331 4374 return
3332 4375
3333 4376 class SMPhaseCalibration(Operation):
3334
4377
3335 4378 __buffer = None
3336 4379
3337 4380 __initime = None
3338 4381
3339 4382 __dataReady = False
3340
4383
3341 4384 __isConfig = False
3342
4385
3343 4386 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
3344
4387
3345 4388 dataTime = currentTime + paramInterval
3346 4389 deltaTime = dataTime - initTime
3347
4390
3348 4391 if deltaTime >= outputInterval or deltaTime < 0:
3349 4392 return True
3350
4393
3351 4394 return False
3352
4395
3353 4396 def __getGammas(self, pairs, d, phases):
3354 4397 gammas = numpy.zeros(2)
3355
4398
3356 4399 for i in range(len(pairs)):
3357
4400
3358 4401 pairi = pairs[i]
3359 4402
3360 4403 phip3 = phases[:,pairi[0]]
@@ -3368,7 +4411,7 class SMPhaseCalibration(Operation):
3368 4411 jgamma = numpy.angle(numpy.exp(1j*jgamma))
3369 4412 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
3370 4413 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
3371
4414
3372 4415 #Revised distribution
3373 4416 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
3374 4417
@@ -3377,39 +4420,39 class SMPhaseCalibration(Operation):
3377 4420 rmin = -0.5*numpy.pi
3378 4421 rmax = 0.5*numpy.pi
3379 4422 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
3380
4423
3381 4424 meteorsY = phaseHisto[0]
3382 4425 phasesX = phaseHisto[1][:-1]
3383 4426 width = phasesX[1] - phasesX[0]
3384 4427 phasesX += width/2
3385
4428
3386 4429 #Gaussian aproximation
3387 4430 bpeak = meteorsY.argmax()
3388 4431 peak = meteorsY.max()
3389 4432 jmin = bpeak - 5
3390 4433 jmax = bpeak + 5 + 1
3391
4434
3392 4435 if jmin<0:
3393 4436 jmin = 0
3394 4437 jmax = 6
3395 4438 elif jmax > meteorsY.size:
3396 4439 jmin = meteorsY.size - 6
3397 4440 jmax = meteorsY.size
3398
4441
3399 4442 x0 = numpy.array([peak,bpeak,50])
3400 4443 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
3401
4444
3402 4445 #Gammas
3403 4446 gammas[i] = coeff[0][1]
3404
4447
3405 4448 return gammas
3406
4449
3407 4450 def __residualFunction(self, coeffs, y, t):
3408
4451
3409 4452 return y - self.__gauss_function(t, coeffs)
3410 4453
3411 4454 def __gauss_function(self, t, coeffs):
3412
4455
3413 4456 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
3414 4457
3415 4458 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
@@ -3430,16 +4473,16 class SMPhaseCalibration(Operation):
3430 4473 max_xangle = range_angle[iz]/2 + center_xangle
3431 4474 min_yangle = -range_angle[iz]/2 + center_yangle
3432 4475 max_yangle = range_angle[iz]/2 + center_yangle
3433
4476
3434 4477 inc_x = (max_xangle-min_xangle)/nstepsx
3435 4478 inc_y = (max_yangle-min_yangle)/nstepsy
3436
4479
3437 4480 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
3438 4481 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
3439 4482 penalty = numpy.zeros((nstepsx,nstepsy))
3440 4483 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
3441 4484 jph = numpy.zeros(nchan)
3442
4485
3443 4486 # Iterations looking for the offset
3444 4487 for iy in range(int(nstepsy)):
3445 4488 for ix in range(int(nstepsx)):
@@ -3447,46 +4490,46 class SMPhaseCalibration(Operation):
3447 4490 d2 = d[pairsList[1][1]]
3448 4491 d5 = d[pairsList[0][0]]
3449 4492 d4 = d[pairsList[0][1]]
3450
4493
3451 4494 alp2 = alpha_y[iy] #gamma 1
3452 alp4 = alpha_x[ix] #gamma 0
3453
4495 alp4 = alpha_x[ix] #gamma 0
4496
3454 4497 alp3 = -alp2*d3/d2 - gammas[1]
3455 4498 alp5 = -alp4*d5/d4 - gammas[0]
3456 4499 # jph[pairy[1]] = alpha_y[iy]
3457 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3458
4500 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
4501
3459 4502 # jph[pairx[1]] = alpha_x[ix]
3460 4503 # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
3461 4504 jph[pairsList[0][1]] = alp4
3462 4505 jph[pairsList[0][0]] = alp5
3463 4506 jph[pairsList[1][0]] = alp3
3464 jph[pairsList[1][1]] = alp2
4507 jph[pairsList[1][1]] = alp2
3465 4508 jph_array[:,ix,iy] = jph
3466 4509 # d = [2.0,2.5,2.5,2.0]
3467 #falta chequear si va a leer bien los meteoros
4510 #falta chequear si va a leer bien los meteoros
3468 4511 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
3469 4512 error = meteorsArray1[:,-1]
3470 4513 ind1 = numpy.where(error==0)[0]
3471 4514 penalty[ix,iy] = ind1.size
3472
4515
3473 4516 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
3474 4517 phOffset = jph_array[:,i,j]
3475
4518
3476 4519 center_xangle = phOffset[pairx[1]]
3477 4520 center_yangle = phOffset[pairy[1]]
3478
4521
3479 4522 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
3480 phOffset = phOffset*180/numpy.pi
4523 phOffset = phOffset*180/numpy.pi
3481 4524 return phOffset
3482
3483
4525
4526
3484 4527 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
3485
4528
3486 4529 dataOut.flagNoData = True
3487 self.__dataReady = False
4530 self.__dataReady = False
3488 4531 dataOut.outputInterval = nHours*3600
3489
4532
3490 4533 if self.__isConfig == False:
3491 4534 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
3492 4535 #Get Initial LTC time
@@ -3494,19 +4537,19 class SMPhaseCalibration(Operation):
3494 4537 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
3495 4538
3496 4539 self.__isConfig = True
3497
4540
3498 4541 if self.__buffer is None:
3499 4542 self.__buffer = dataOut.data_param.copy()
3500 4543
3501 4544 else:
3502 4545 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
3503
4546
3504 4547 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
3505
4548
3506 4549 if self.__dataReady:
3507 4550 dataOut.utctimeInit = self.__initime
3508 4551 self.__initime += dataOut.outputInterval #to erase time offset
3509
4552
3510 4553 freq = dataOut.frequency
3511 4554 c = dataOut.C #m/s
3512 4555 lamb = c/freq
@@ -3528,13 +4571,13 class SMPhaseCalibration(Operation):
3528 4571 pairs.append((1,0))
3529 4572 else:
3530 4573 pairs.append((0,1))
3531
4574
3532 4575 if distances[3] > distances[2]:
3533 4576 pairs.append((3,2))
3534 4577 else:
3535 4578 pairs.append((2,3))
3536 4579 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
3537
4580
3538 4581 meteorsArray = self.__buffer
3539 4582 error = meteorsArray[:,-1]
3540 4583 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
@@ -3542,7 +4585,7 class SMPhaseCalibration(Operation):
3542 4585 meteorsArray = meteorsArray[ind1,:]
3543 4586 meteorsArray[:,-1] = 0
3544 4587 phases = meteorsArray[:,8:12]
3545
4588
3546 4589 #Calculate Gammas
3547 4590 gammas = self.__getGammas(pairs, distances, phases)
3548 4591 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
@@ -3552,22 +4595,22 class SMPhaseCalibration(Operation):
3552 4595 dataOut.data_output = -phasesOff
3553 4596 dataOut.flagNoData = False
3554 4597 self.__buffer = None
3555
3556
4598
4599
3557 4600 return
3558
4601
3559 4602 class SMOperations():
3560
4603
3561 4604 def __init__(self):
3562
4605
3563 4606 return
3564
4607
3565 4608 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
3566
4609
3567 4610 arrayParameters = arrayParameters0.copy()
3568 4611 hmin = h[0]
3569 4612 hmax = h[1]
3570
4613
3571 4614 #Calculate AOA (Error N 3, 4)
3572 4615 #JONES ET AL. 1998
3573 4616 AOAthresh = numpy.pi/8
@@ -3575,72 +4618,72 class SMOperations():
3575 4618 phases = -arrayParameters[:,8:12] + jph
3576 4619 # phases = numpy.unwrap(phases)
3577 4620 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
3578
4621
3579 4622 #Calculate Heights (Error N 13 and 14)
3580 4623 error = arrayParameters[:,-1]
3581 4624 Ranges = arrayParameters[:,1]
3582 4625 zenith = arrayParameters[:,4]
3583 4626 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
3584
4627
3585 4628 #----------------------- Get Final data ------------------------------------
3586 4629 # error = arrayParameters[:,-1]
3587 4630 # ind1 = numpy.where(error==0)[0]
3588 4631 # arrayParameters = arrayParameters[ind1,:]
3589
4632
3590 4633 return arrayParameters
3591
4634
3592 4635 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
3593
4636
3594 4637 arrayAOA = numpy.zeros((phases.shape[0],3))
3595 4638 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
3596
4639
3597 4640 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3598 4641 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3599 4642 arrayAOA[:,2] = cosDirError
3600
4643
3601 4644 azimuthAngle = arrayAOA[:,0]
3602 4645 zenithAngle = arrayAOA[:,1]
3603
4646
3604 4647 #Setting Error
3605 4648 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
3606 4649 error[indError] = 0
3607 4650 #Number 3: AOA not fesible
3608 4651 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3609 error[indInvalid] = 3
4652 error[indInvalid] = 3
3610 4653 #Number 4: Large difference in AOAs obtained from different antenna baselines
3611 4654 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3612 error[indInvalid] = 4
4655 error[indInvalid] = 4
3613 4656 return arrayAOA, error
3614
4657
3615 4658 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
3616
4659
3617 4660 #Initializing some variables
3618 4661 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3619 4662 ang_aux = ang_aux.reshape(1,ang_aux.size)
3620
4663
3621 4664 cosdir = numpy.zeros((arrayPhase.shape[0],2))
3622 4665 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3623
3624
4666
4667
3625 4668 for i in range(2):
3626 4669 ph0 = arrayPhase[:,pairsList[i][0]]
3627 4670 ph1 = arrayPhase[:,pairsList[i][1]]
3628 4671 d0 = distances[pairsList[i][0]]
3629 4672 d1 = distances[pairsList[i][1]]
3630
3631 ph0_aux = ph0 + ph1
4673
4674 ph0_aux = ph0 + ph1
3632 4675 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
3633 4676 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
3634 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
4677 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3635 4678 #First Estimation
3636 4679 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
3637
4680
3638 4681 #Most-Accurate Second Estimation
3639 4682 phi1_aux = ph0 - ph1
3640 4683 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3641 4684 #Direction Cosine 1
3642 4685 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
3643
4686
3644 4687 #Searching the correct Direction Cosine
3645 4688 cosdir0_aux = cosdir0[:,i]
3646 4689 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
@@ -3649,59 +4692,59 class SMOperations():
3649 4692 indcos = cosDiff.argmin(axis = 1)
3650 4693 #Saving Value obtained
3651 4694 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3652
4695
3653 4696 return cosdir0, cosdir
3654
4697
3655 4698 def __calculateAOA(self, cosdir, azimuth):
3656 4699 cosdirX = cosdir[:,0]
3657 4700 cosdirY = cosdir[:,1]
3658
4701
3659 4702 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3660 4703 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
3661 4704 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3662
4705
3663 4706 return angles
3664
4707
3665 4708 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3666
4709
3667 4710 Ramb = 375 #Ramb = c/(2*PRF)
3668 4711 Re = 6371 #Earth Radius
3669 4712 heights = numpy.zeros(Ranges.shape)
3670
4713
3671 4714 R_aux = numpy.array([0,1,2])*Ramb
3672 4715 R_aux = R_aux.reshape(1,R_aux.size)
3673 4716
3674 4717 Ranges = Ranges.reshape(Ranges.size,1)
3675
4718
3676 4719 Ri = Ranges + R_aux
3677 4720 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3678
4721
3679 4722 #Check if there is a height between 70 and 110 km
3680 4723 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3681 4724 ind_h = numpy.where(h_bool == 1)[0]
3682
4725
3683 4726 hCorr = hi[ind_h, :]
3684 4727 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3685
4728
3686 4729 hCorr = hi[ind_hCorr][:len(ind_h)]
3687 4730 heights[ind_h] = hCorr
3688
4731
3689 4732 #Setting Error
3690 4733 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3691 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
4734 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3692 4735 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
3693 4736 error[indError] = 0
3694 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
4737 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3695 4738 error[indInvalid2] = 14
3696 4739 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3697 error[indInvalid1] = 13
3698
4740 error[indInvalid1] = 13
4741
3699 4742 return heights, error
3700
4743
3701 4744 def getPhasePairs(self, channelPositions):
3702 4745 chanPos = numpy.array(channelPositions)
3703 4746 listOper = list(itertools.combinations(list(range(5)),2))
3704
4747
3705 4748 distances = numpy.zeros(4)
3706 4749 axisX = []
3707 4750 axisY = []
@@ -3709,15 +4752,15 class SMOperations():
3709 4752 distY = numpy.zeros(3)
3710 4753 ix = 0
3711 4754 iy = 0
3712
4755
3713 4756 pairX = numpy.zeros((2,2))
3714 4757 pairY = numpy.zeros((2,2))
3715
4758
3716 4759 for i in range(len(listOper)):
3717 4760 pairi = listOper[i]
3718
4761
3719 4762 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
3720
4763
3721 4764 if posDif[0] == 0:
3722 4765 axisY.append(pairi)
3723 4766 distY[iy] = posDif[1]
@@ -3726,7 +4769,7 class SMOperations():
3726 4769 axisX.append(pairi)
3727 4770 distX[ix] = posDif[0]
3728 4771 ix += 1
3729
4772
3730 4773 for i in range(2):
3731 4774 if i==0:
3732 4775 dist0 = distX
@@ -3734,7 +4777,7 class SMOperations():
3734 4777 else:
3735 4778 dist0 = distY
3736 4779 axis0 = axisY
3737
4780
3738 4781 side = numpy.argsort(dist0)[:-1]
3739 4782 axis0 = numpy.array(axis0)[side,:]
3740 4783 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
@@ -3742,7 +4785,7 class SMOperations():
3742 4785 side = axis1[axis1 != chanC]
3743 4786 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
3744 4787 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
3745 if diff1<0:
4788 if diff1<0:
3746 4789 chan2 = side[0]
3747 4790 d2 = numpy.abs(diff1)
3748 4791 chan1 = side[1]
@@ -3752,7 +4795,7 class SMOperations():
3752 4795 d2 = numpy.abs(diff2)
3753 4796 chan1 = side[0]
3754 4797 d1 = numpy.abs(diff1)
3755
4798
3756 4799 if i==0:
3757 4800 chanCX = chanC
3758 4801 chan1X = chan1
@@ -3764,10 +4807,10 class SMOperations():
3764 4807 chan2Y = chan2
3765 4808 distances[2:4] = numpy.array([d1,d2])
3766 4809 # axisXsides = numpy.reshape(axisX[ix,:],4)
3767 #
4810 #
3768 4811 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
3769 4812 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
3770 #
4813 #
3771 4814 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
3772 4815 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
3773 4816 # channel25X = int(pairX[0,ind25X])
@@ -3776,59 +4819,59 class SMOperations():
3776 4819 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
3777 4820 # channel25Y = int(pairY[0,ind25Y])
3778 4821 # channel20Y = int(pairY[1,ind20Y])
3779
4822
3780 4823 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
3781 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3782
4824 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
4825
3783 4826 return pairslist, distances
3784 4827 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
3785 #
4828 #
3786 4829 # arrayAOA = numpy.zeros((phases.shape[0],3))
3787 4830 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
3788 #
4831 #
3789 4832 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3790 4833 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3791 4834 # arrayAOA[:,2] = cosDirError
3792 #
4835 #
3793 4836 # azimuthAngle = arrayAOA[:,0]
3794 4837 # zenithAngle = arrayAOA[:,1]
3795 #
4838 #
3796 4839 # #Setting Error
3797 4840 # #Number 3: AOA not fesible
3798 4841 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3799 # error[indInvalid] = 3
4842 # error[indInvalid] = 3
3800 4843 # #Number 4: Large difference in AOAs obtained from different antenna baselines
3801 4844 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3802 # error[indInvalid] = 4
4845 # error[indInvalid] = 4
3803 4846 # return arrayAOA, error
3804 #
4847 #
3805 4848 # def __getDirectionCosines(self, arrayPhase, pairsList):
3806 #
4849 #
3807 4850 # #Initializing some variables
3808 4851 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3809 4852 # ang_aux = ang_aux.reshape(1,ang_aux.size)
3810 #
4853 #
3811 4854 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
3812 4855 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3813 #
3814 #
4856 #
4857 #
3815 4858 # for i in range(2):
3816 4859 # #First Estimation
3817 4860 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
3818 4861 # #Dealias
3819 4862 # indcsi = numpy.where(phi0_aux > numpy.pi)
3820 # phi0_aux[indcsi] -= 2*numpy.pi
4863 # phi0_aux[indcsi] -= 2*numpy.pi
3821 4864 # indcsi = numpy.where(phi0_aux < -numpy.pi)
3822 # phi0_aux[indcsi] += 2*numpy.pi
4865 # phi0_aux[indcsi] += 2*numpy.pi
3823 4866 # #Direction Cosine 0
3824 4867 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
3825 #
4868 #
3826 4869 # #Most-Accurate Second Estimation
3827 4870 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
3828 4871 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3829 4872 # #Direction Cosine 1
3830 4873 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
3831 #
4874 #
3832 4875 # #Searching the correct Direction Cosine
3833 4876 # cosdir0_aux = cosdir0[:,i]
3834 4877 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
@@ -3837,50 +4880,51 class SMOperations():
3837 4880 # indcos = cosDiff.argmin(axis = 1)
3838 4881 # #Saving Value obtained
3839 4882 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3840 #
4883 #
3841 4884 # return cosdir0, cosdir
3842 #
4885 #
3843 4886 # def __calculateAOA(self, cosdir, azimuth):
3844 4887 # cosdirX = cosdir[:,0]
3845 4888 # cosdirY = cosdir[:,1]
3846 #
4889 #
3847 4890 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3848 4891 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
3849 4892 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3850 #
4893 #
3851 4894 # return angles
3852 #
4895 #
3853 4896 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3854 #
4897 #
3855 4898 # Ramb = 375 #Ramb = c/(2*PRF)
3856 4899 # Re = 6371 #Earth Radius
3857 4900 # heights = numpy.zeros(Ranges.shape)
3858 #
4901 #
3859 4902 # R_aux = numpy.array([0,1,2])*Ramb
3860 4903 # R_aux = R_aux.reshape(1,R_aux.size)
3861 #
4904 #
3862 4905 # Ranges = Ranges.reshape(Ranges.size,1)
3863 #
4906 #
3864 4907 # Ri = Ranges + R_aux
3865 4908 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3866 #
4909 #
3867 4910 # #Check if there is a height between 70 and 110 km
3868 4911 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3869 4912 # ind_h = numpy.where(h_bool == 1)[0]
3870 #
4913 #
3871 4914 # hCorr = hi[ind_h, :]
3872 4915 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3873 #
3874 # hCorr = hi[ind_hCorr]
4916 #
4917 # hCorr = hi[ind_hCorr]
3875 4918 # heights[ind_h] = hCorr
3876 #
4919 #
3877 4920 # #Setting Error
3878 4921 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3879 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3880 #
3881 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
4922 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
4923 #
4924 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3882 4925 # error[indInvalid2] = 14
3883 4926 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3884 # error[indInvalid1] = 13
3885 #
3886 # return heights, error
4927 # error[indInvalid1] = 13
4928 #
4929 # return heights, error
4930
@@ -31,7 +31,7 class SpectraProc(ProcessingUnit):
31 31 self.dataOut = Spectra()
32 32 self.id_min = None
33 33 self.id_max = None
34 self.setupReq = False #Agregar a todas las unidades de proc
34 self.setupReq = False # Agregar a todas las unidades de proc
35 35
36 36 def __updateSpecFromVoltage(self):
37 37
@@ -122,12 +122,12 class SpectraProc(ProcessingUnit):
122 122 if self.dataIn.type == "Spectra":
123 123 self.dataOut.copy(self.dataIn)
124 124 if shift_fft:
125 #desplaza a la derecha en el eje 2 determinadas posiciones
126 shift = int(self.dataOut.nFFTPoints/2)
125 # desplaza a la derecha en el eje 2 determinadas posiciones
126 shift = int(self.dataOut.nFFTPoints / 2)
127 127 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
128 128
129 129 if self.dataOut.data_cspc is not None:
130 #desplaza a la derecha en el eje 2 determinadas posiciones
130 # desplaza a la derecha en el eje 2 determinadas posiciones
131 131 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
132 132 if pairsList:
133 133 self.__selectPairs(pairsList)
@@ -215,7 +215,7 class SpectraProc(ProcessingUnit):
215 215
216 216 return
217 217
218 def selectFFTs(self, minFFT, maxFFT ):
218 def selectFFTs(self, minFFT, maxFFT):
219 219 """
220 220 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
221 221 minFFT<= FFT <= maxFFT
@@ -264,7 +264,7 class SpectraProc(ProcessingUnit):
264 264 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
265 265
266 266 # determina indices
267 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
267 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
268 268 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
269 269 avg_dB = 10 * \
270 270 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
@@ -274,16 +274,16 class SpectraProc(ProcessingUnit):
274 274 if val >= beacon_dB[0]:
275 275 beacon_heiIndexList.append(avg_dB.tolist().index(val))
276 276
277 #data_spc = data_spc[:,:,beacon_heiIndexList]
277 # data_spc = data_spc[:,:,beacon_heiIndexList]
278 278 data_cspc = None
279 279 if self.dataOut.data_cspc is not None:
280 280 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
281 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
281 # data_cspc = data_cspc[:,:,beacon_heiIndexList]
282 282
283 283 data_dc = None
284 284 if self.dataOut.data_dc is not None:
285 285 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
286 #data_dc = data_dc[:,beacon_heiIndexList]
286 # data_dc = data_dc[:,beacon_heiIndexList]
287 287
288 288 self.dataOut.data_spc = data_spc
289 289 self.dataOut.data_cspc = data_cspc
@@ -302,24 +302,24 class SpectraProc(ProcessingUnit):
302 302 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
303 303
304 304 if (maxIndex >= self.dataOut.nProfiles):
305 maxIndex = self.dataOut.nProfiles-1
305 maxIndex = self.dataOut.nProfiles - 1
306 306
307 #Spectra
308 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
307 # Spectra
308 data_spc = self.dataOut.data_spc[:, minIndex:maxIndex + 1, :]
309 309
310 310 data_cspc = None
311 311 if self.dataOut.data_cspc is not None:
312 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
312 data_cspc = self.dataOut.data_cspc[:, minIndex:maxIndex + 1, :]
313 313
314 314 data_dc = None
315 315 if self.dataOut.data_dc is not None:
316 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
316 data_dc = self.dataOut.data_dc[minIndex:maxIndex + 1, :]
317 317
318 318 self.dataOut.data_spc = data_spc
319 319 self.dataOut.data_cspc = data_cspc
320 320 self.dataOut.data_dc = data_dc
321 321
322 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
322 self.dataOut.ippSeconds = self.dataOut.ippSeconds * (self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
323 323 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
324 324 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
325 325
@@ -452,7 +452,7 class removeDC(Operation):
452 452 xx = numpy.zeros([4, 4])
453 453
454 454 for fil in range(4):
455 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
455 xx[fil, :] = vel[fil] ** numpy.asarray(list(range(4)))
456 456
457 457 xx_inv = numpy.linalg.inv(xx)
458 458 xx_aux = xx_inv[0, :]
@@ -488,22 +488,22 class removeInterference(Operation):
488 488 realCspc = numpy.abs(cspc)
489 489
490 490 for i in range(cspc.shape[0]):
491 LinePower= numpy.sum(realCspc[i], axis=0)
492 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
493 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
494 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
495 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
496 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
491 LinePower = numpy.sum(realCspc[i], axis=0)
492 Threshold = numpy.amax(LinePower) - numpy.sort(LinePower)[len(Heights) - int(len(Heights) * 0.1)]
493 SelectedHeights = Heights[ numpy.where(LinePower < Threshold) ]
494 InterferenceSum = numpy.sum(realCspc[i, :, SelectedHeights], axis=0)
495 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum) * 0.98)]
496 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum) * 0.99)]
497 497
498 498
499 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
500 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
501 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
502 cspc[i,InterferenceRange,:] = numpy.NaN
499 InterferenceRange = numpy.where(([InterferenceSum > InterferenceThresholdMin])) # , InterferenceSum < InterferenceThresholdMax]) )
500 # InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
501 if len(InterferenceRange) < int(cspc.shape[1] * 0.3):
502 cspc[i, InterferenceRange, :] = numpy.NaN
503 503
504 504 self.dataOut.data_cspc = cspc
505 505
506 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
506 def removeInterference(self, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None):
507 507
508 508 jspectra = self.dataOut.data_spc
509 509 jcspectra = self.dataOut.data_cspc
@@ -557,7 +557,7 class removeInterference(Operation):
557 557 # tmp_noise = jnoise[ich] / num_prof
558 558 tmp_noise = jnoise[ich]
559 559 junkspc_interf = junkspc_interf - tmp_noise
560 #junkspc_interf[:,comp_mask_prof] = 0
560 # junkspc_interf[:,comp_mask_prof] = 0
561 561
562 562 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
563 563 jspc_interf = jspc_interf.transpose()
@@ -599,20 +599,20 class removeInterference(Operation):
599 599
600 600 if cinterfid > 0:
601 601 for ip in range(cinterfid * (interf == 2) - 1):
602 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
602 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
603 603 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
604 604 cind = len(ind)
605 605
606 606 if (cind > 0):
607 607 jspectra[ich, interfid[ip], ind] = tmp_noise * \
608 (1 + (numpy.random.uniform(cind) - 0.5) /
608 (1 + (numpy.random.uniform(cind) - 0.5) /
609 609 numpy.sqrt(num_incoh))
610 610
611 611 ind = numpy.array([-2, -1, 1, 2])
612 612 xx = numpy.zeros([4, 4])
613 613
614 614 for id1 in range(4):
615 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
615 xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
616 616
617 617 xx_inv = numpy.linalg.inv(xx)
618 618 xx = xx_inv[:, 0]
@@ -621,7 +621,7 class removeInterference(Operation):
621 621 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
622 622 yy.transpose(), xx)
623 623
624 indAux = (jspectra[ich, :, :] < tmp_noise *
624 indAux = (jspectra[ich, :, :] < tmp_noise *
625 625 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
626 626 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
627 627 (1 - 1 / numpy.sqrt(num_incoh))
@@ -671,7 +671,7 class removeInterference(Operation):
671 671 xx = numpy.zeros([4, 4])
672 672
673 673 for id1 in range(4):
674 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
674 xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
675 675
676 676 xx_inv = numpy.linalg.inv(xx)
677 677 xx = xx_inv[:, 0]
@@ -686,12 +686,12 class removeInterference(Operation):
686 686
687 687 return 1
688 688
689 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
689 def run(self, dataOut, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None, mode=1):
690 690
691 691 self.dataOut = dataOut
692 692
693 693 if mode == 1:
694 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
694 self.removeInterference(interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None)
695 695 elif mode == 2:
696 696 self.removeInterference2()
697 697
@@ -888,11 +888,11 class dopplerFlip(Operation):
888 888 freq_dc = int(num_profiles / 2)
889 889 # Flip con for
890 890 for j in range(num_profiles):
891 jspectra_tmp[num_profiles-j-1]= jspectra[j]
891 jspectra_tmp[num_profiles - j - 1] = jspectra[j]
892 892 # Intercambio perfil de DC con perfil inmediato anterior
893 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
894 jspectra_tmp[freq_dc]= jspectra[freq_dc]
893 jspectra_tmp[freq_dc - 1] = jspectra[freq_dc - 1]
894 jspectra_tmp[freq_dc] = jspectra[freq_dc]
895 895 # canal modificado es re-escrito en el arreglo de canales
896 896 self.dataOut.data_spc[2] = jspectra_tmp
897 897
898 return self.dataOut No newline at end of file
898 return self.dataOut
@@ -28,11 +28,11 class SpectraAFCProc(ProcessingUnit):
28 28
29 29 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
30 30 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
31 self.dataOut.ippSeconds = self.dataIn.getDeltaH()*(10**-6)/0.15
31 self.dataOut.ippSeconds = self.dataIn.getDeltaH() * (10 ** -6) / 0.15
32 32
33 33 self.dataOut.channelList = self.dataIn.channelList
34 34 self.dataOut.heightList = self.dataIn.heightList
35 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
35 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
36 36
37 37 self.dataOut.nBaud = self.dataIn.nBaud
38 38 self.dataOut.nCode = self.dataIn.nCode
@@ -41,8 +41,8 class SpectraAFCProc(ProcessingUnit):
41 41
42 42 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
43 43 self.dataOut.utctime = self.firstdatatime
44 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
45 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
44 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData # asumo q la data esta decodificada
45 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData # asumo q la data esta sin flip
46 46 self.dataOut.flagShiftFFT = False
47 47
48 48 self.dataOut.nCohInt = self.dataIn.nCohInt
@@ -66,7 +66,7 class SpectraAFCProc(ProcessingUnit):
66 66 return
67 67
68 68 for i in range(nProfiles):
69 self.buffer[:,i,:] = self.buffer[:,i,:]*code[0][i]
69 self.buffer[:, i, :] = self.buffer[:, i, :] * code[0][i]
70 70
71 71 def __getFft(self):
72 72 """
@@ -87,18 +87,18 class SpectraAFCProc(ProcessingUnit):
87 87
88 88 for i in range(nsegments):
89 89 try:
90 _fft_buffer[:,:,i] = self.buffer[:,i:i+self.dataOut.nProfiles]
90 _fft_buffer[:, :, i] = self.buffer[:, i:i + self.dataOut.nProfiles]
91 91
92 92 if self.code is not None:
93 _fft_buffer[:,:,i] = _fft_buffer[:,:,i]*self.code[0]
93 _fft_buffer[:, :, i] = _fft_buffer[:, :, i] * self.code[0]
94 94 except:
95 95 pass
96 96
97 97 fft_volt = numpy.fft.fft(_fft_buffer, n=self.dataOut.nFFTPoints, axis=1)
98 98 fft_volt = fft_volt.astype(numpy.dtype('complex'))
99 dc = fft_volt[:,0,:]
99 dc = fft_volt[:, 0, :]
100 100
101 #calculo de self-spectra
101 # calculo de self-spectra
102 102 # fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
103 103 spc = fft_volt * numpy.conjugate(fft_volt)
104 104
@@ -115,18 +115,18 class SpectraAFCProc(ProcessingUnit):
115 115 pairIndex = 0
116 116
117 117 if self.dataOut.pairsList != None:
118 #calculo de cross-spectra
118 # calculo de cross-spectra
119 119 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
120 120 for pair in self.dataOut.pairsList:
121 121 if pair[0] not in self.dataOut.channelList:
122 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)))
122 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (str(pair), str(self.dataOut.channelList)))
123 123 if pair[1] not in self.dataOut.channelList:
124 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)))
124 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (str(pair), str(self.dataOut.channelList)))
125 125
126 126 chan_index0 = self.dataOut.channelList.index(pair[0])
127 127 chan_index1 = self.dataOut.channelList.index(pair[1])
128 128
129 cspc[pairIndex,:,:] = fft_volt[chan_index0,:,:] * numpy.conjugate(fft_volt[chan_index1,:,:])
129 cspc[pairIndex, :, :] = fft_volt[chan_index0, :, :] * numpy.conjugate(fft_volt[chan_index1, :, :])
130 130 pairIndex += 1
131 131 blocksize += cspc.size
132 132
@@ -141,7 +141,7 class SpectraAFCProc(ProcessingUnit):
141 141 self.dataOut.flagNoData = True
142 142
143 143 if code is not None:
144 self.code = numpy.array(code).reshape(nCode,nBaud)
144 self.code = numpy.array(code).reshape(nCode, nBaud)
145 145 else:
146 146 self.code = None
147 147
@@ -186,7 +186,7 class SpectraAFCProc(ProcessingUnit):
186 186
187 187 return True
188 188
189 raise ValueError("The type of input object '%s' is not valid"%(self.dataIn.type))
189 raise ValueError("The type of input object '%s' is not valid" % (self.dataIn.type))
190 190
191 191 def __selectPairs(self, pairsList):
192 192
@@ -221,10 +221,10 class SpectraAFCProc(ProcessingUnit):
221 221
222 222 pairsIndexListSelected = []
223 223 for pairIndex in self.dataOut.pairsIndexList:
224 #First pair
224 # First pair
225 225 if self.dataOut.pairsList[pairIndex][0] not in channelList:
226 226 continue
227 #Second pair
227 # Second pair
228 228 if self.dataOut.pairsList[pairIndex][1] not in channelList:
229 229 continue
230 230
@@ -246,7 +246,7 class SpectraAFCProc(ProcessingUnit):
246 246
247 247 for channel in channelList:
248 248 if channel not in self.dataOut.channelList:
249 raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList)))
249 raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" % (channel, str(self.dataOut.channelList)))
250 250
251 251 index = self.dataOut.channelList.index(channel)
252 252 channelIndexList.append(index)
@@ -271,12 +271,12 class SpectraAFCProc(ProcessingUnit):
271 271
272 272 for channelIndex in channelIndexList:
273 273 if channelIndex not in self.dataOut.channelIndexList:
274 raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList))
274 raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " % (channelIndex, self.dataOut.channelIndexList))
275 275
276 276 # nChannels = len(channelIndexList)
277 277
278 data_spc = self.dataOut.data_spc[channelIndexList,:]
279 data_dc = self.dataOut.data_dc[channelIndexList,:]
278 data_spc = self.dataOut.data_spc[channelIndexList, :]
279 data_dc = self.dataOut.data_dc[channelIndexList, :]
280 280
281 281 self.dataOut.data_spc = data_spc
282 282 self.dataOut.data_dc = data_dc
@@ -334,36 +334,36 class SpectraAFCProc(ProcessingUnit):
334 334
335 335 return 1
336 336
337 def getBeaconSignal(self, tauindex = 0, channelindex = 0, hei_ref=None):
338 newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
337 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
338 newheis = numpy.where(self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
339 339
340 340 if hei_ref != None:
341 newheis = numpy.where(self.dataOut.heightList>hei_ref)
341 newheis = numpy.where(self.dataOut.heightList > hei_ref)
342 342
343 343 minIndex = min(newheis[0])
344 344 maxIndex = max(newheis[0])
345 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
346 heightList = self.dataOut.heightList[minIndex:maxIndex+1]
345 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
346 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
347 347
348 348 # determina indices
349 nheis = int(self.dataOut.radarControllerHeaderObj.txB/(self.dataOut.heightList[1]-self.dataOut.heightList[0]))
350 avg_dB = 10*numpy.log10(numpy.sum(data_spc[channelindex,:,:],axis=0))
349 nheis = int(self.dataOut.radarControllerHeaderObj.txB / (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
350 avg_dB = 10 * numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
351 351 beacon_dB = numpy.sort(avg_dB)[-nheis:]
352 352 beacon_heiIndexList = []
353 353 for val in avg_dB.tolist():
354 354 if val >= beacon_dB[0]:
355 355 beacon_heiIndexList.append(avg_dB.tolist().index(val))
356 356
357 #data_spc = data_spc[:,:,beacon_heiIndexList]
357 # data_spc = data_spc[:,:,beacon_heiIndexList]
358 358 data_cspc = None
359 359 if self.dataOut.data_cspc is not None:
360 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
361 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
360 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
361 # data_cspc = data_cspc[:,:,beacon_heiIndexList]
362 362
363 363 data_dc = None
364 364 if self.dataOut.data_dc is not None:
365 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
366 #data_dc = data_dc[:,beacon_heiIndexList]
365 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
366 # data_dc = data_dc[:,beacon_heiIndexList]
367 367
368 368 self.dataOut.data_spc = data_spc
369 369 self.dataOut.data_cspc = data_cspc
@@ -397,28 +397,28 class SpectraAFCProc(ProcessingUnit):
397 397 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
398 398
399 399 if (maxIndex >= self.dataOut.nHeights):
400 maxIndex = self.dataOut.nHeights-1
400 maxIndex = self.dataOut.nHeights - 1
401 401
402 #Spectra
403 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
402 # Spectra
403 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
404 404
405 405 data_cspc = None
406 406 if self.dataOut.data_cspc is not None:
407 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
407 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
408 408
409 409 data_dc = None
410 410 if self.dataOut.data_dc is not None:
411 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
411 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
412 412
413 413 self.dataOut.data_spc = data_spc
414 414 self.dataOut.data_cspc = data_cspc
415 415 self.dataOut.data_dc = data_dc
416 416
417 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
417 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
418 418
419 419 return 1
420 420
421 def removeDC(self, mode = 2):
421 def removeDC(self, mode=2):
422 422 jspectra = self.dataOut.data_spc
423 423 jcspectra = self.dataOut.data_cspc
424 424
@@ -431,43 +431,43 class SpectraAFCProc(ProcessingUnit):
431 431 num_pairs = jcspectra.shape[0]
432 432 else: jcspectraExist = False
433 433
434 freq_dc = jspectra.shape[1]/2
435 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
434 freq_dc = jspectra.shape[1] / 2
435 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
436 436
437 if ind_vel[0]<0:
438 ind_vel[list(range(0,1))] = ind_vel[list(range(0,1))] + self.num_prof
437 if ind_vel[0] < 0:
438 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
439 439
440 440 if mode == 1:
441 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
441 jspectra[:, freq_dc, :] = (jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
442 442
443 443 if jcspectraExist:
444 jcspectra[:,freq_dc,:] = (jcspectra[:,ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
444 jcspectra[:, freq_dc, :] = (jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
445 445
446 446 if mode == 2:
447 447
448 vel = numpy.array([-2,-1,1,2])
449 xx = numpy.zeros([4,4])
448 vel = numpy.array([-2, -1, 1, 2])
449 xx = numpy.zeros([4, 4])
450 450
451 451 for fil in range(4):
452 xx[fil,:] = vel[fil]**numpy.asarray(list(range(4)))
452 xx[fil, :] = vel[fil] ** numpy.asarray(list(range(4)))
453 453
454 454 xx_inv = numpy.linalg.inv(xx)
455 xx_aux = xx_inv[0,:]
455 xx_aux = xx_inv[0, :]
456 456
457 457 for ich in range(num_chan):
458 yy = jspectra[ich,ind_vel,:]
459 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
458 yy = jspectra[ich, ind_vel, :]
459 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
460 460
461 junkid = jspectra[ich,freq_dc,:]<=0
461 junkid = jspectra[ich, freq_dc, :] <= 0
462 462 cjunkid = sum(junkid)
463 463
464 464 if cjunkid.any():
465 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
465 jspectra[ich, freq_dc, junkid.nonzero()] = (jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
466 466
467 467 if jcspectraExist:
468 468 for ip in range(num_pairs):
469 yy = jcspectra[ip,ind_vel,:]
470 jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
469 yy = jcspectra[ip, ind_vel, :]
470 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
471 471
472 472
473 473 self.dataOut.data_spc = jspectra
@@ -475,23 +475,23 class SpectraAFCProc(ProcessingUnit):
475 475
476 476 return 1
477 477
478 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
478 def removeInterference(self, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None):
479 479
480 480 jspectra = self.dataOut.data_spc
481 481 jcspectra = self.dataOut.data_cspc
482 482 jnoise = self.dataOut.getNoise()
483 483 num_incoh = self.dataOut.nIncohInt
484 484
485 num_channel = jspectra.shape[0]
486 num_prof = jspectra.shape[1]
487 num_hei = jspectra.shape[2]
485 num_channel = jspectra.shape[0]
486 num_prof = jspectra.shape[1]
487 num_hei = jspectra.shape[2]
488 488
489 #hei_interf
489 # hei_interf
490 490 if hei_interf is None:
491 count_hei = num_hei/2 #Como es entero no importa
491 count_hei = num_hei / 2 # Como es entero no importa
492 492 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
493 493 hei_interf = numpy.asarray(hei_interf)[0]
494 #nhei_interf
494 # nhei_interf
495 495 if (nhei_interf == None):
496 496 nhei_interf = 5
497 497 if (nhei_interf < 1):
@@ -506,46 +506,46 class SpectraAFCProc(ProcessingUnit):
506 506 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
507 507 mask_prof = numpy.asarray(list(range(num_prof)))
508 508 num_mask_prof = mask_prof.size
509 comp_mask_prof = [0, num_prof/2]
509 comp_mask_prof = [0, num_prof / 2]
510 510
511 511
512 #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
512 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
513 513 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
514 514 jnoise = numpy.nan
515 515 noise_exist = jnoise[0] < numpy.Inf
516 516
517 #Subrutina de Remocion de la Interferencia
517 # Subrutina de Remocion de la Interferencia
518 518 for ich in range(num_channel):
519 #Se ordena los espectros segun su potencia (menor a mayor)
520 power = jspectra[ich,mask_prof,:]
521 power = power[:,hei_interf]
522 power = power.sum(axis = 0)
519 # Se ordena los espectros segun su potencia (menor a mayor)
520 power = jspectra[ich, mask_prof, :]
521 power = power[:, hei_interf]
522 power = power.sum(axis=0)
523 523 psort = power.ravel().argsort()
524 524
525 #Se estima la interferencia promedio en los Espectros de Potencia empleando
526 junkspc_interf = jspectra[ich,:,hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
525 # Se estima la interferencia promedio en los Espectros de Potencia empleando
526 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
527 527
528 528 if noise_exist:
529 529 # tmp_noise = jnoise[ich] / num_prof
530 530 tmp_noise = jnoise[ich]
531 531 junkspc_interf = junkspc_interf - tmp_noise
532 #junkspc_interf[:,comp_mask_prof] = 0
532 # junkspc_interf[:,comp_mask_prof] = 0
533 533
534 jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
534 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
535 535 jspc_interf = jspc_interf.transpose()
536 #Calculando el espectro de interferencia promedio
537 noiseid = numpy.where(jspc_interf <= tmp_noise/ numpy.sqrt(num_incoh))
536 # Calculando el espectro de interferencia promedio
537 noiseid = numpy.where(jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
538 538 noiseid = noiseid[0]
539 539 cnoiseid = noiseid.size
540 interfid = numpy.where(jspc_interf > tmp_noise/ numpy.sqrt(num_incoh))
540 interfid = numpy.where(jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
541 541 interfid = interfid[0]
542 542 cinterfid = interfid.size
543 543
544 544 if (cnoiseid > 0): jspc_interf[noiseid] = 0
545 545
546 #Expandiendo los perfiles a limpiar
546 # Expandiendo los perfiles a limpiar
547 547 if (cinterfid > 0):
548 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
548 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
549 549 new_interfid = numpy.asarray(new_interfid)
550 550 new_interfid = {x for x in new_interfid}
551 551 new_interfid = numpy.array(list(new_interfid))
@@ -553,89 +553,89 class SpectraAFCProc(ProcessingUnit):
553 553 else: new_cinterfid = 0
554 554
555 555 for ip in range(new_cinterfid):
556 ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
557 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
556 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
557 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf / 2], new_interfid[ip]]
558 558
559 559
560 jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
560 jspectra[ich, :, ind_hei] = jspectra[ich, :, ind_hei] - jspc_interf # Corregir indices
561 561
562 #Removiendo la interferencia del punto de mayor interferencia
562 # Removiendo la interferencia del punto de mayor interferencia
563 563 ListAux = jspc_interf[mask_prof].tolist()
564 564 maxid = ListAux.index(max(ListAux))
565 565
566 566
567 567 if cinterfid > 0:
568 for ip in range(cinterfid*(interf == 2) - 1):
569 ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/numpy.sqrt(num_incoh))).nonzero()
568 for ip in range(cinterfid * (interf == 2) - 1):
569 ind = (jspectra[ich, interfid[ip], :] < tmp_noise * (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
570 570 cind = len(ind)
571 571
572 572 if (cind > 0):
573 jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/numpy.sqrt(num_incoh))
573 jspectra[ich, interfid[ip], ind] = tmp_noise * (1 + (numpy.random.uniform(cind) - 0.5) / numpy.sqrt(num_incoh))
574 574
575 ind = numpy.array([-2,-1,1,2])
576 xx = numpy.zeros([4,4])
575 ind = numpy.array([-2, -1, 1, 2])
576 xx = numpy.zeros([4, 4])
577 577
578 578 for id1 in range(4):
579 xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
579 xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
580 580
581 581 xx_inv = numpy.linalg.inv(xx)
582 xx = xx_inv[:,0]
583 ind = (ind + maxid + num_mask_prof)%num_mask_prof
584 yy = jspectra[ich,mask_prof[ind],:]
585 jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
582 xx = xx_inv[:, 0]
583 ind = (ind + maxid + num_mask_prof) % num_mask_prof
584 yy = jspectra[ich, mask_prof[ind], :]
585 jspectra[ich, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
586 586
587 587
588 indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/numpy.sqrt(num_incoh))).nonzero()
589 jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/numpy.sqrt(num_incoh))
588 indAux = (jspectra[ich, :, :] < tmp_noise * (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
589 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * (1 - 1 / numpy.sqrt(num_incoh))
590 590
591 #Remocion de Interferencia en el Cross Spectra
591 # Remocion de Interferencia en el Cross Spectra
592 592 if jcspectra is None: return jspectra, jcspectra
593 num_pairs = jcspectra.size/(num_prof*num_hei)
593 num_pairs = jcspectra.size / (num_prof * num_hei)
594 594 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
595 595
596 596 for ip in range(num_pairs):
597 597
598 598 #-------------------------------------------
599 599
600 cspower = numpy.abs(jcspectra[ip,mask_prof,:])
601 cspower = cspower[:,hei_interf]
602 cspower = cspower.sum(axis = 0)
600 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
601 cspower = cspower[:, hei_interf]
602 cspower = cspower.sum(axis=0)
603 603
604 604 cspsort = cspower.ravel().argsort()
605 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
605 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
606 606 junkcspc_interf = junkcspc_interf.transpose()
607 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
607 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
608 608
609 609 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
610 610
611 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
612 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
613 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
611 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
612 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
613 junkcspc_interf[comp_mask_prof, :] = numpy.complex(median_real, median_imag)
614 614
615 615 for iprof in range(num_prof):
616 ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
617 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
616 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
617 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf / 2]]
618 618
619 #Removiendo la Interferencia
620 jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
619 # Removiendo la Interferencia
620 jcspectra[ip, :, ind_hei] = jcspectra[ip, :, ind_hei] - jcspc_interf
621 621
622 622 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
623 623 maxid = ListAux.index(max(ListAux))
624 624
625 ind = numpy.array([-2,-1,1,2])
626 xx = numpy.zeros([4,4])
625 ind = numpy.array([-2, -1, 1, 2])
626 xx = numpy.zeros([4, 4])
627 627
628 628 for id1 in range(4):
629 xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
629 xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
630 630
631 631 xx_inv = numpy.linalg.inv(xx)
632 xx = xx_inv[:,0]
632 xx = xx_inv[:, 0]
633 633
634 ind = (ind + maxid + num_mask_prof)%num_mask_prof
635 yy = jcspectra[ip,mask_prof[ind],:]
636 jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
634 ind = (ind + maxid + num_mask_prof) % num_mask_prof
635 yy = jcspectra[ip, mask_prof[ind], :]
636 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
637 637
638 #Guardar Resultados
638 # Guardar Resultados
639 639 self.dataOut.data_spc = jspectra
640 640 self.dataOut.data_cspc = jcspectra
641 641
@@ -649,7 +649,7 class SpectraAFCProc(ProcessingUnit):
649 649 return 1
650 650
651 651 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
652 #validacion de rango
652 # validacion de rango
653 653 if minHei == None:
654 654 minHei = self.dataOut.heightList[0]
655 655
@@ -657,13 +657,13 class SpectraAFCProc(ProcessingUnit):
657 657 maxHei = self.dataOut.heightList[-1]
658 658
659 659 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
660 print('minHei: %.2f is out of the heights range'%(minHei))
661 print('minHei is setting to %.2f'%(self.dataOut.heightList[0]))
660 print('minHei: %.2f is out of the heights range' % (minHei))
661 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
662 662 minHei = self.dataOut.heightList[0]
663 663
664 664 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
665 print('maxHei: %.2f is out of the heights range'%(maxHei))
666 print('maxHei is setting to %.2f'%(self.dataOut.heightList[-1]))
665 print('maxHei: %.2f is out of the heights range' % (maxHei))
666 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
667 667 maxHei = self.dataOut.heightList[-1]
668 668
669 669 # validacion de velocidades
@@ -676,13 +676,13 class SpectraAFCProc(ProcessingUnit):
676 676 maxVel = velrange[-1]
677 677
678 678 if (minVel < velrange[0]) or (minVel > maxVel):
679 print('minVel: %.2f is out of the velocity range'%(minVel))
680 print('minVel is setting to %.2f'%(velrange[0]))
679 print('minVel: %.2f is out of the velocity range' % (minVel))
680 print('minVel is setting to %.2f' % (velrange[0]))
681 681 minVel = velrange[0]
682 682
683 683 if (maxVel > velrange[-1]) or (maxVel < minVel):
684 print('maxVel: %.2f is out of the velocity range'%(maxVel))
685 print('maxVel is setting to %.2f'%(velrange[-1]))
684 print('maxVel: %.2f is out of the velocity range' % (maxVel))
685 print('maxVel is setting to %.2f' % (velrange[-1]))
686 686 maxVel = velrange[-1]
687 687
688 688 # seleccion de indices para rango
@@ -707,7 +707,7 class SpectraAFCProc(ProcessingUnit):
707 707 raise ValueError("some value in (%d,%d) is not valid" % (minIndex, maxIndex))
708 708
709 709 if (maxIndex >= self.dataOut.nHeights):
710 maxIndex = self.dataOut.nHeights-1
710 maxIndex = self.dataOut.nHeights - 1
711 711
712 712 # seleccion de indices para velocidades
713 713 indminvel = numpy.where(velrange >= minVel)
@@ -722,15 +722,15 class SpectraAFCProc(ProcessingUnit):
722 722 except:
723 723 maxIndexVel = len(velrange)
724 724
725 #seleccion del espectro
726 data_spc = self.dataOut.data_spc[:,minIndexVel:maxIndexVel+1,minIndex:maxIndex+1]
727 #estimacion de ruido
725 # seleccion del espectro
726 data_spc = self.dataOut.data_spc[:, minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
727 # estimacion de ruido
728 728 noise = numpy.zeros(self.dataOut.nChannels)
729 729
730 730 for channel in range(self.dataOut.nChannels):
731 daux = data_spc[channel,:,:]
731 daux = data_spc[channel, :, :]
732 732 noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
733 733
734 734 self.dataOut.noise_estimation = noise.copy()
735 735
736 return 1 No newline at end of file
736 return 1
@@ -30,11 +30,11 class SpectraLagsProc(ProcessingUnit):
30 30
31 31 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
32 32 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
33 self.dataOut.ippSeconds = self.dataIn.getDeltaH()*(10**-6)/0.15
33 self.dataOut.ippSeconds = self.dataIn.getDeltaH() * (10 ** -6) / 0.15
34 34
35 35 self.dataOut.channelList = self.dataIn.channelList
36 36 self.dataOut.heightList = self.dataIn.heightList
37 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
37 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
38 38
39 39 self.dataOut.nBaud = self.dataIn.nBaud
40 40 self.dataOut.nCode = self.dataIn.nCode
@@ -43,8 +43,8 class SpectraLagsProc(ProcessingUnit):
43 43
44 44 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
45 45 self.dataOut.utctime = self.firstdatatime
46 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
47 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
46 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData # asumo q la data esta decodificada
47 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData # asumo q la data esta sin flip
48 48 self.dataOut.flagShiftFFT = False
49 49
50 50 self.dataOut.nCohInt = self.dataIn.nCohInt
@@ -72,7 +72,7 class SpectraLagsProc(ProcessingUnit):
72 72 # codes = numpy.conjugate(self.__input_buffer[:,9:169])/10000
73 73
74 74 for i in range(nsegments):
75 self.__lags_buffer[:,:,i] = voltages[:,i:i+self.dataOut.nProfiles]#*codes
75 self.__lags_buffer[:, :, i] = voltages[:, i:i + self.dataOut.nProfiles] # *codes
76 76
77 77 return self.__lags_buffer
78 78
@@ -81,12 +81,12 class SpectraLagsProc(ProcessingUnit):
81 81 if pulseIndex is None:
82 82 return volt_buffer
83 83
84 codes = numpy.conjugate(self.__input_buffer[:,pulseIndex[0]:pulseIndex[1]])/10000
84 codes = numpy.conjugate(self.__input_buffer[:, pulseIndex[0]:pulseIndex[1]]) / 10000
85 85
86 86 nsegments = self.dataOut.nHeights - self.dataOut.nProfiles
87 87
88 88 for i in range(nsegments):
89 volt_buffer[:,:,i] = volt_buffer[:,:,i]*codes
89 volt_buffer[:, :, i] = volt_buffer[:, :, i] * codes
90 90
91 91 return volt_buffer
92 92
@@ -106,9 +106,9 class SpectraLagsProc(ProcessingUnit):
106 106
107 107 fft_volt = numpy.fft.fft(datablock, n=self.dataOut.nFFTPoints, axis=1)
108 108
109 dc = fft_volt[:,0,:]
109 dc = fft_volt[:, 0, :]
110 110
111 #calculo de self-spectra
111 # calculo de self-spectra
112 112 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
113 113 spc = fft_volt * numpy.conjugate(fft_volt)
114 114 spc = spc.real
@@ -121,18 +121,18 class SpectraLagsProc(ProcessingUnit):
121 121 pairIndex = 0
122 122
123 123 if self.dataOut.pairsList != []:
124 #calculo de cross-spectra
124 # calculo de cross-spectra
125 125 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
126 126 for pair in self.dataOut.pairsList:
127 127 if pair[0] not in self.dataOut.channelList:
128 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)))
128 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (str(pair), str(self.dataOut.channelList)))
129 129 if pair[1] not in self.dataOut.channelList:
130 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)))
130 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (str(pair), str(self.dataOut.channelList)))
131 131
132 132 chan_index0 = self.dataOut.channelList.index(pair[0])
133 133 chan_index1 = self.dataOut.channelList.index(pair[1])
134 134
135 cspc[pairIndex,:,:] = fft_volt[chan_index0,:,:] * numpy.conjugate(fft_volt[chan_index1,:,:])
135 cspc[pairIndex, :, :] = fft_volt[chan_index0, :, :] * numpy.conjugate(fft_volt[chan_index1, :, :])
136 136 pairIndex += 1
137 137 blocksize += cspc.size
138 138
@@ -153,7 +153,7 class SpectraLagsProc(ProcessingUnit):
153 153 self.code = self.dataIn.code
154 154
155 155 if code is not None:
156 self.code = numpy.array(code).reshape(nCode,nBaud)
156 self.code = numpy.array(code).reshape(nCode, nBaud)
157 157
158 158 if self.dataIn.type == "Voltage":
159 159
@@ -189,7 +189,7 class SpectraLagsProc(ProcessingUnit):
189 189
190 190 return True
191 191
192 raise ValueError("The type of input object '%s' is not valid"%(self.dataIn.type))
192 raise ValueError("The type of input object '%s' is not valid" % (self.dataIn.type))
193 193
194 194 def __selectPairs(self, pairsList):
195 195
@@ -224,10 +224,10 class SpectraLagsProc(ProcessingUnit):
224 224
225 225 pairsIndexListSelected = []
226 226 for pairIndex in self.dataOut.pairsIndexList:
227 #First pair
227 # First pair
228 228 if self.dataOut.pairsList[pairIndex][0] not in channelList:
229 229 continue
230 #Second pair
230 # Second pair
231 231 if self.dataOut.pairsList[pairIndex][1] not in channelList:
232 232 continue
233 233
@@ -249,7 +249,7 class SpectraLagsProc(ProcessingUnit):
249 249
250 250 for channel in channelList:
251 251 if channel not in self.dataOut.channelList:
252 raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList)))
252 raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" % (channel, str(self.dataOut.channelList)))
253 253
254 254 index = self.dataOut.channelList.index(channel)
255 255 channelIndexList.append(index)
@@ -274,12 +274,12 class SpectraLagsProc(ProcessingUnit):
274 274
275 275 for channelIndex in channelIndexList:
276 276 if channelIndex not in self.dataOut.channelIndexList:
277 raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList))
277 raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " % (channelIndex, self.dataOut.channelIndexList))
278 278
279 279 # nChannels = len(channelIndexList)
280 280
281 data_spc = self.dataOut.data_spc[channelIndexList,:]
282 data_dc = self.dataOut.data_dc[channelIndexList,:]
281 data_spc = self.dataOut.data_spc[channelIndexList, :]
282 data_dc = self.dataOut.data_dc[channelIndexList, :]
283 283
284 284 self.dataOut.data_spc = data_spc
285 285 self.dataOut.data_dc = data_dc
@@ -337,36 +337,36 class SpectraLagsProc(ProcessingUnit):
337 337
338 338 return 1
339 339
340 def getBeaconSignal(self, tauindex = 0, channelindex = 0, hei_ref=None):
341 newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
340 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
341 newheis = numpy.where(self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
342 342
343 343 if hei_ref != None:
344 newheis = numpy.where(self.dataOut.heightList>hei_ref)
344 newheis = numpy.where(self.dataOut.heightList > hei_ref)
345 345
346 346 minIndex = min(newheis[0])
347 347 maxIndex = max(newheis[0])
348 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
349 heightList = self.dataOut.heightList[minIndex:maxIndex+1]
348 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
349 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
350 350
351 351 # determina indices
352 nheis = int(self.dataOut.radarControllerHeaderObj.txB/(self.dataOut.heightList[1]-self.dataOut.heightList[0]))
353 avg_dB = 10*numpy.log10(numpy.sum(data_spc[channelindex,:,:],axis=0))
352 nheis = int(self.dataOut.radarControllerHeaderObj.txB / (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
353 avg_dB = 10 * numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
354 354 beacon_dB = numpy.sort(avg_dB)[-nheis:]
355 355 beacon_heiIndexList = []
356 356 for val in avg_dB.tolist():
357 357 if val >= beacon_dB[0]:
358 358 beacon_heiIndexList.append(avg_dB.tolist().index(val))
359 359
360 #data_spc = data_spc[:,:,beacon_heiIndexList]
360 # data_spc = data_spc[:,:,beacon_heiIndexList]
361 361 data_cspc = None
362 362 if self.dataOut.data_cspc is not None:
363 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
364 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
363 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
364 # data_cspc = data_cspc[:,:,beacon_heiIndexList]
365 365
366 366 data_dc = None
367 367 if self.dataOut.data_dc is not None:
368 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
369 #data_dc = data_dc[:,beacon_heiIndexList]
368 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
369 # data_dc = data_dc[:,beacon_heiIndexList]
370 370
371 371 self.dataOut.data_spc = data_spc
372 372 self.dataOut.data_cspc = data_cspc
@@ -400,28 +400,28 class SpectraLagsProc(ProcessingUnit):
400 400 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
401 401
402 402 if (maxIndex >= self.dataOut.nHeights):
403 maxIndex = self.dataOut.nHeights-1
403 maxIndex = self.dataOut.nHeights - 1
404 404
405 #Spectra
406 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
405 # Spectra
406 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
407 407
408 408 data_cspc = None
409 409 if self.dataOut.data_cspc is not None:
410 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
410 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
411 411
412 412 data_dc = None
413 413 if self.dataOut.data_dc is not None:
414 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
414 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
415 415
416 416 self.dataOut.data_spc = data_spc
417 417 self.dataOut.data_cspc = data_cspc
418 418 self.dataOut.data_dc = data_dc
419 419
420 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
420 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
421 421
422 422 return 1
423 423
424 def removeDC(self, mode = 2):
424 def removeDC(self, mode=2):
425 425 jspectra = self.dataOut.data_spc
426 426 jcspectra = self.dataOut.data_cspc
427 427
@@ -434,43 +434,43 class SpectraLagsProc(ProcessingUnit):
434 434 num_pairs = jcspectra.shape[0]
435 435 else: jcspectraExist = False
436 436
437 freq_dc = jspectra.shape[1]/2
438 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
437 freq_dc = jspectra.shape[1] / 2
438 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
439 439
440 if ind_vel[0]<0:
441 ind_vel[list(range(0,1))] = ind_vel[list(range(0,1))] + self.num_prof
440 if ind_vel[0] < 0:
441 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
442 442
443 443 if mode == 1:
444 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
444 jspectra[:, freq_dc, :] = (jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
445 445
446 446 if jcspectraExist:
447 jcspectra[:,freq_dc,:] = (jcspectra[:,ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
447 jcspectra[:, freq_dc, :] = (jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
448 448
449 449 if mode == 2:
450 450
451 vel = numpy.array([-2,-1,1,2])
452 xx = numpy.zeros([4,4])
451 vel = numpy.array([-2, -1, 1, 2])
452 xx = numpy.zeros([4, 4])
453 453
454 454 for fil in range(4):
455 xx[fil,:] = vel[fil]**numpy.asarray(list(range(4)))
455 xx[fil, :] = vel[fil] ** numpy.asarray(list(range(4)))
456 456
457 457 xx_inv = numpy.linalg.inv(xx)
458 xx_aux = xx_inv[0,:]
458 xx_aux = xx_inv[0, :]
459 459
460 460 for ich in range(num_chan):
461 yy = jspectra[ich,ind_vel,:]
462 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
461 yy = jspectra[ich, ind_vel, :]
462 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
463 463
464 junkid = jspectra[ich,freq_dc,:]<=0
464 junkid = jspectra[ich, freq_dc, :] <= 0
465 465 cjunkid = sum(junkid)
466 466
467 467 if cjunkid.any():
468 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
468 jspectra[ich, freq_dc, junkid.nonzero()] = (jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
469 469
470 470 if jcspectraExist:
471 471 for ip in range(num_pairs):
472 yy = jcspectra[ip,ind_vel,:]
473 jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
472 yy = jcspectra[ip, ind_vel, :]
473 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
474 474
475 475
476 476 self.dataOut.data_spc = jspectra
@@ -478,23 +478,23 class SpectraLagsProc(ProcessingUnit):
478 478
479 479 return 1
480 480
481 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
481 def removeInterference(self, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None):
482 482
483 483 jspectra = self.dataOut.data_spc
484 484 jcspectra = self.dataOut.data_cspc
485 485 jnoise = self.dataOut.getNoise()
486 486 num_incoh = self.dataOut.nIncohInt
487 487
488 num_channel = jspectra.shape[0]
489 num_prof = jspectra.shape[1]
490 num_hei = jspectra.shape[2]
488 num_channel = jspectra.shape[0]
489 num_prof = jspectra.shape[1]
490 num_hei = jspectra.shape[2]
491 491
492 #hei_interf
492 # hei_interf
493 493 if hei_interf is None:
494 count_hei = num_hei/2 #Como es entero no importa
494 count_hei = num_hei / 2 # Como es entero no importa
495 495 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
496 496 hei_interf = numpy.asarray(hei_interf)[0]
497 #nhei_interf
497 # nhei_interf
498 498 if (nhei_interf == None):
499 499 nhei_interf = 5
500 500 if (nhei_interf < 1):
@@ -509,46 +509,46 class SpectraLagsProc(ProcessingUnit):
509 509 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
510 510 mask_prof = numpy.asarray(list(range(num_prof)))
511 511 num_mask_prof = mask_prof.size
512 comp_mask_prof = [0, num_prof/2]
512 comp_mask_prof = [0, num_prof / 2]
513 513
514 514
515 #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
515 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
516 516 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
517 517 jnoise = numpy.nan
518 518 noise_exist = jnoise[0] < numpy.Inf
519 519
520 #Subrutina de Remocion de la Interferencia
520 # Subrutina de Remocion de la Interferencia
521 521 for ich in range(num_channel):
522 #Se ordena los espectros segun su potencia (menor a mayor)
523 power = jspectra[ich,mask_prof,:]
524 power = power[:,hei_interf]
525 power = power.sum(axis = 0)
522 # Se ordena los espectros segun su potencia (menor a mayor)
523 power = jspectra[ich, mask_prof, :]
524 power = power[:, hei_interf]
525 power = power.sum(axis=0)
526 526 psort = power.ravel().argsort()
527 527
528 #Se estima la interferencia promedio en los Espectros de Potencia empleando
529 junkspc_interf = jspectra[ich,:,hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
528 # Se estima la interferencia promedio en los Espectros de Potencia empleando
529 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
530 530
531 531 if noise_exist:
532 532 # tmp_noise = jnoise[ich] / num_prof
533 533 tmp_noise = jnoise[ich]
534 534 junkspc_interf = junkspc_interf - tmp_noise
535 #junkspc_interf[:,comp_mask_prof] = 0
535 # junkspc_interf[:,comp_mask_prof] = 0
536 536
537 jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
537 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
538 538 jspc_interf = jspc_interf.transpose()
539 #Calculando el espectro de interferencia promedio
540 noiseid = numpy.where(jspc_interf <= tmp_noise/ numpy.sqrt(num_incoh))
539 # Calculando el espectro de interferencia promedio
540 noiseid = numpy.where(jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
541 541 noiseid = noiseid[0]
542 542 cnoiseid = noiseid.size
543 interfid = numpy.where(jspc_interf > tmp_noise/ numpy.sqrt(num_incoh))
543 interfid = numpy.where(jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
544 544 interfid = interfid[0]
545 545 cinterfid = interfid.size
546 546
547 547 if (cnoiseid > 0): jspc_interf[noiseid] = 0
548 548
549 #Expandiendo los perfiles a limpiar
549 # Expandiendo los perfiles a limpiar
550 550 if (cinterfid > 0):
551 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
551 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
552 552 new_interfid = numpy.asarray(new_interfid)
553 553 new_interfid = {x for x in new_interfid}
554 554 new_interfid = numpy.array(list(new_interfid))
@@ -556,89 +556,89 class SpectraLagsProc(ProcessingUnit):
556 556 else: new_cinterfid = 0
557 557
558 558 for ip in range(new_cinterfid):
559 ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
560 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
559 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
560 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf / 2], new_interfid[ip]]
561 561
562 562
563 jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
563 jspectra[ich, :, ind_hei] = jspectra[ich, :, ind_hei] - jspc_interf # Corregir indices
564 564
565 #Removiendo la interferencia del punto de mayor interferencia
565 # Removiendo la interferencia del punto de mayor interferencia
566 566 ListAux = jspc_interf[mask_prof].tolist()
567 567 maxid = ListAux.index(max(ListAux))
568 568
569 569
570 570 if cinterfid > 0:
571 for ip in range(cinterfid*(interf == 2) - 1):
572 ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/numpy.sqrt(num_incoh))).nonzero()
571 for ip in range(cinterfid * (interf == 2) - 1):
572 ind = (jspectra[ich, interfid[ip], :] < tmp_noise * (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
573 573 cind = len(ind)
574 574
575 575 if (cind > 0):
576 jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/numpy.sqrt(num_incoh))
576 jspectra[ich, interfid[ip], ind] = tmp_noise * (1 + (numpy.random.uniform(cind) - 0.5) / numpy.sqrt(num_incoh))
577 577
578 ind = numpy.array([-2,-1,1,2])
579 xx = numpy.zeros([4,4])
578 ind = numpy.array([-2, -1, 1, 2])
579 xx = numpy.zeros([4, 4])
580 580
581 581 for id1 in range(4):
582 xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
582 xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
583 583
584 584 xx_inv = numpy.linalg.inv(xx)
585 xx = xx_inv[:,0]
586 ind = (ind + maxid + num_mask_prof)%num_mask_prof
587 yy = jspectra[ich,mask_prof[ind],:]
588 jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
585 xx = xx_inv[:, 0]
586 ind = (ind + maxid + num_mask_prof) % num_mask_prof
587 yy = jspectra[ich, mask_prof[ind], :]
588 jspectra[ich, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
589 589
590 590
591 indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/numpy.sqrt(num_incoh))).nonzero()
592 jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/numpy.sqrt(num_incoh))
591 indAux = (jspectra[ich, :, :] < tmp_noise * (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
592 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * (1 - 1 / numpy.sqrt(num_incoh))
593 593
594 #Remocion de Interferencia en el Cross Spectra
594 # Remocion de Interferencia en el Cross Spectra
595 595 if jcspectra is None: return jspectra, jcspectra
596 num_pairs = jcspectra.size/(num_prof*num_hei)
596 num_pairs = jcspectra.size / (num_prof * num_hei)
597 597 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
598 598
599 599 for ip in range(num_pairs):
600 600
601 601 #-------------------------------------------
602 602
603 cspower = numpy.abs(jcspectra[ip,mask_prof,:])
604 cspower = cspower[:,hei_interf]
605 cspower = cspower.sum(axis = 0)
603 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
604 cspower = cspower[:, hei_interf]
605 cspower = cspower.sum(axis=0)
606 606
607 607 cspsort = cspower.ravel().argsort()
608 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
608 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
609 609 junkcspc_interf = junkcspc_interf.transpose()
610 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
610 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
611 611
612 612 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
613 613
614 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
615 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
616 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
614 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
615 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
616 junkcspc_interf[comp_mask_prof, :] = numpy.complex(median_real, median_imag)
617 617
618 618 for iprof in range(num_prof):
619 ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
620 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
619 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
620 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf / 2]]
621 621
622 #Removiendo la Interferencia
623 jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
622 # Removiendo la Interferencia
623 jcspectra[ip, :, ind_hei] = jcspectra[ip, :, ind_hei] - jcspc_interf
624 624
625 625 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
626 626 maxid = ListAux.index(max(ListAux))
627 627
628 ind = numpy.array([-2,-1,1,2])
629 xx = numpy.zeros([4,4])
628 ind = numpy.array([-2, -1, 1, 2])
629 xx = numpy.zeros([4, 4])
630 630
631 631 for id1 in range(4):
632 xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
632 xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
633 633
634 634 xx_inv = numpy.linalg.inv(xx)
635 xx = xx_inv[:,0]
635 xx = xx_inv[:, 0]
636 636
637 ind = (ind + maxid + num_mask_prof)%num_mask_prof
638 yy = jcspectra[ip,mask_prof[ind],:]
639 jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
637 ind = (ind + maxid + num_mask_prof) % num_mask_prof
638 yy = jcspectra[ip, mask_prof[ind], :]
639 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
640 640
641 #Guardar Resultados
641 # Guardar Resultados
642 642 self.dataOut.data_spc = jspectra
643 643 self.dataOut.data_cspc = jcspectra
644 644
@@ -652,7 +652,7 class SpectraLagsProc(ProcessingUnit):
652 652 return 1
653 653
654 654 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
655 #validacion de rango
655 # validacion de rango
656 656 if minHei == None:
657 657 minHei = self.dataOut.heightList[0]
658 658
@@ -660,13 +660,13 class SpectraLagsProc(ProcessingUnit):
660 660 maxHei = self.dataOut.heightList[-1]
661 661
662 662 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
663 print('minHei: %.2f is out of the heights range'%(minHei))
664 print('minHei is setting to %.2f'%(self.dataOut.heightList[0]))
663 print('minHei: %.2f is out of the heights range' % (minHei))
664 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
665 665 minHei = self.dataOut.heightList[0]
666 666
667 667 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
668 print('maxHei: %.2f is out of the heights range'%(maxHei))
669 print('maxHei is setting to %.2f'%(self.dataOut.heightList[-1]))
668 print('maxHei: %.2f is out of the heights range' % (maxHei))
669 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
670 670 maxHei = self.dataOut.heightList[-1]
671 671
672 672 # validacion de velocidades
@@ -679,13 +679,13 class SpectraLagsProc(ProcessingUnit):
679 679 maxVel = velrange[-1]
680 680
681 681 if (minVel < velrange[0]) or (minVel > maxVel):
682 print('minVel: %.2f is out of the velocity range'%(minVel))
683 print('minVel is setting to %.2f'%(velrange[0]))
682 print('minVel: %.2f is out of the velocity range' % (minVel))
683 print('minVel is setting to %.2f' % (velrange[0]))
684 684 minVel = velrange[0]
685 685
686 686 if (maxVel > velrange[-1]) or (maxVel < minVel):
687 print('maxVel: %.2f is out of the velocity range'%(maxVel))
688 print('maxVel is setting to %.2f'%(velrange[-1]))
687 print('maxVel: %.2f is out of the velocity range' % (maxVel))
688 print('maxVel is setting to %.2f' % (velrange[-1]))
689 689 maxVel = velrange[-1]
690 690
691 691 # seleccion de indices para rango
@@ -710,7 +710,7 class SpectraLagsProc(ProcessingUnit):
710 710 raise ValueError("some value in (%d,%d) is not valid" % (minIndex, maxIndex))
711 711
712 712 if (maxIndex >= self.dataOut.nHeights):
713 maxIndex = self.dataOut.nHeights-1
713 maxIndex = self.dataOut.nHeights - 1
714 714
715 715 # seleccion de indices para velocidades
716 716 indminvel = numpy.where(velrange >= minVel)
@@ -725,15 +725,15 class SpectraLagsProc(ProcessingUnit):
725 725 except:
726 726 maxIndexVel = len(velrange)
727 727
728 #seleccion del espectro
729 data_spc = self.dataOut.data_spc[:,minIndexVel:maxIndexVel+1,minIndex:maxIndex+1]
730 #estimacion de ruido
728 # seleccion del espectro
729 data_spc = self.dataOut.data_spc[:, minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
730 # estimacion de ruido
731 731 noise = numpy.zeros(self.dataOut.nChannels)
732 732
733 733 for channel in range(self.dataOut.nChannels):
734 daux = data_spc[channel,:,:]
734 daux = data_spc[channel, :, :]
735 735 noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
736 736
737 737 self.dataOut.noise_estimation = noise.copy()
738 738
739 return 1 No newline at end of file
739 return 1
@@ -1,8 +1,8
1 1 import sys
2 import numpy,math
2 import numpy, math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
5 from schainpy.model.data.jrodata import Voltage, hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from time import time
8 8
@@ -37,7 +37,7 class VoltageProc(ProcessingUnit):
37 37 self.dataOut.data = self.dataIn.data
38 38 self.dataOut.utctime = self.dataIn.utctime
39 39 self.dataOut.channelList = self.dataIn.channelList
40 #self.dataOut.timeInterval = self.dataIn.timeInterval
40 # self.dataOut.timeInterval = self.dataIn.timeInterval
41 41 self.dataOut.heightList = self.dataIn.heightList
42 42 self.dataOut.nProfiles = self.dataIn.nProfiles
43 43
@@ -61,7 +61,7 class selectChannels(Operation):
61 61 self.dataOut = dataOut
62 62 for channel in channelList:
63 63 if channel not in self.dataOut.channelList:
64 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
64 raise ValueError("Channel %d is not in %s" % (channel, str(self.dataOut.channelList)))
65 65
66 66 index = self.dataOut.channelList.index(channel)
67 67 channelIndexList.append(index)
@@ -89,16 +89,16 class selectChannels(Operation):
89 89
90 90 for channelIndex in channelIndexList:
91 91 if channelIndex not in self.dataOut.channelIndexList:
92 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
92 raise ValueError("The value %d in channelIndexList is not valid" % channelIndex)
93 93
94 94 if self.dataOut.type == 'Voltage':
95 95 if self.dataOut.flagDataAsBlock:
96 96 """
97 97 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
98 98 """
99 data = self.dataOut.data[channelIndexList,:,:]
99 data = self.dataOut.data[channelIndexList, :, :]
100 100 else:
101 data = self.dataOut.data[channelIndexList,:]
101 data = self.dataOut.data[channelIndexList, :]
102 102
103 103 self.dataOut.data = data
104 104 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
@@ -163,8 +163,14 class selectHeights(Operation):
163 163 """
164 164
165 165 self.dataOut = dataOut
166
167 if minHei and maxHei:
166
167 #if minHei and maxHei:
168 if 1:
169 if minHei == None:
170 minHei = self.dataOut.heightList[0]
171
172 if maxHei == None:
173 maxHei = self.dataOut.heightList[-1]
168 174
169 175 if (minHei < self.dataOut.heightList[0]):
170 176 minHei = self.dataOut.heightList[0]
@@ -188,7 +194,7 class selectHeights(Operation):
188 194 maxIndex = indb[0][-1]
189 195 except:
190 196 maxIndex = len(heights)
191
197
192 198 self.selectHeightsByIndex(minIndex, maxIndex)
193 199
194 200 return self.dataOut
@@ -217,12 +223,12 class selectHeights(Operation):
217 223 if (maxIndex >= self.dataOut.nHeights):
218 224 maxIndex = self.dataOut.nHeights
219 225
220 #voltage
226 # voltage
221 227 if self.dataOut.flagDataAsBlock:
222 228 """
223 229 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
224 230 """
225 data = self.dataOut.data[:,:, minIndex:maxIndex]
231 data = self.dataOut.data[:, :, minIndex:maxIndex]
226 232 else:
227 233 data = self.dataOut.data[:, minIndex:maxIndex]
228 234
@@ -232,7 +238,7 class selectHeights(Operation):
232 238 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
233 239
234 240 if self.dataOut.nHeights <= 1:
235 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
241 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" % (self.dataOut.nHeights))
236 242 elif self.dataOut.type == 'Spectra':
237 243 if (minIndex < 0) or (minIndex > maxIndex):
238 244 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
@@ -268,30 +274,30 class filterByHeights(Operation):
268 274 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
269 275
270 276 if window == None:
271 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
277 window = (dataOut.radarControllerHeaderObj.txA / dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
272 278
273 279 newdelta = deltaHeight * window
274 280 r = dataOut.nHeights % window
275 newheights = (dataOut.nHeights-r)/window
281 newheights = (dataOut.nHeights - r) / window
276 282
277 283 if newheights <= 1:
278 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
284 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" % (dataOut.nHeights, window))
279 285
280 286 if dataOut.flagDataAsBlock:
281 287 """
282 288 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
283 289 """
284 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
285 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
286 buffer = numpy.sum(buffer,3)
290 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights - r)]
291 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights / window), window)
292 buffer = numpy.sum(buffer, 3)
287 293
288 294 else:
289 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
290 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
291 buffer = numpy.sum(buffer,2)
295 buffer = dataOut.data[:, 0:int(dataOut.nHeights - r)]
296 buffer = buffer.reshape(dataOut.nChannels, int(dataOut.nHeights / window), int(window))
297 buffer = numpy.sum(buffer, 2)
292 298
293 299 dataOut.data = buffer
294 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
300 dataOut.heightList = dataOut.heightList[0] + numpy.arange(newheights) * newdelta
295 301 dataOut.windowOfFilter = window
296 302
297 303 return dataOut
@@ -299,14 +305,14 class filterByHeights(Operation):
299 305
300 306 class setH0(Operation):
301 307
302 def run(self, dataOut, h0, deltaHeight = None):
308 def run(self, dataOut, h0, deltaHeight=None):
303 309
304 310 if not deltaHeight:
305 311 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
306 312
307 313 nHeights = dataOut.nHeights
308 314
309 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
315 newHeiRange = h0 + numpy.arange(nHeights) * deltaHeight
310 316
311 317 dataOut.heightList = newHeiRange
312 318
@@ -315,7 +321,7 class setH0(Operation):
315 321
316 322 class deFlip(Operation):
317 323
318 def run(self, dataOut, channelList = []):
324 def run(self, dataOut, channelList=[]):
319 325
320 326 data = dataOut.data.copy()
321 327
@@ -325,7 +331,7 class deFlip(Operation):
325 331
326 332 if not channelList:
327 333 for thisProfile in profileList:
328 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
334 data[:, thisProfile, :] = data[:, thisProfile, :] * flip
329 335 flip *= -1.0
330 336 else:
331 337 for thisChannel in channelList:
@@ -333,20 +339,20 class deFlip(Operation):
333 339 continue
334 340
335 341 for thisProfile in profileList:
336 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
342 data[thisChannel, thisProfile, :] = data[thisChannel, thisProfile, :] * flip
337 343 flip *= -1.0
338 344
339 345 self.flip = flip
340 346
341 347 else:
342 348 if not channelList:
343 data[:,:] = data[:,:]*self.flip
349 data[:, :] = data[:, :] * self.flip
344 350 else:
345 351 for thisChannel in channelList:
346 352 if thisChannel not in dataOut.channelList:
347 353 continue
348 354
349 data[thisChannel,:] = data[thisChannel,:]*self.flip
355 data[thisChannel, :] = data[thisChannel, :] * self.flip
350 356
351 357 self.flip *= -1.
352 358
@@ -395,21 +401,21 class printAttribute(Operation):
395 401 class interpolateHeights(Operation):
396 402
397 403 def run(self, dataOut, topLim, botLim):
398 #69 al 72 para julia
399 #82-84 para meteoros
400 if len(numpy.shape(dataOut.data))==2:
401 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
402 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
403 #dataOut.data[:,botLim:limSup+1] = sampInterp
404 dataOut.data[:,botLim:topLim+1] = sampInterp
404 # 69 al 72 para julia
405 # 82-84 para meteoros
406 if len(numpy.shape(dataOut.data)) == 2:
407 sampInterp = (dataOut.data[:, botLim - 1] + dataOut.data[:, topLim + 1]) / 2
408 sampInterp = numpy.transpose(numpy.tile(sampInterp, (topLim - botLim + 1, 1)))
409 # dataOut.data[:,botLim:limSup+1] = sampInterp
410 dataOut.data[:, botLim:topLim + 1] = sampInterp
405 411 else:
406 412 nHeights = dataOut.data.shape[2]
407 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
408 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
409 f = interpolate.interp1d(x, y, axis = 2)
410 xnew = numpy.arange(botLim,topLim+1)
413 x = numpy.hstack((numpy.arange(botLim), numpy.arange(topLim + 1, nHeights)))
414 y = dataOut.data[:, :, list(range(botLim)) + list(range(topLim + 1, nHeights))]
415 f = interpolate.interp1d(x, y, axis=2)
416 xnew = numpy.arange(botLim, topLim + 1)
411 417 ynew = f(xnew)
412 dataOut.data[:,:,botLim:topLim+1] = ynew
418 dataOut.data[:, :, botLim:topLim + 1] = ynew
413 419
414 420 return dataOut
415 421
@@ -458,7 +464,7 class CohInt(Operation):
458 464 self.n = n
459 465 self.__byTime = False
460 466 else:
461 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
467 self.__integrationtime = timeInterval # * 60. #if (type(timeInterval)!=integer) -> change this line
462 468 self.n = 9999
463 469 self.__byTime = True
464 470
@@ -483,25 +489,25 class CohInt(Operation):
483 489 self.__profIndex += 1
484 490 return
485 491
486 #Overlapping data
492 # Overlapping data
487 493 nChannels, nHeis = data.shape
488 494 data = numpy.reshape(data, (1, nChannels, nHeis))
489 495
490 #If the buffer is empty then it takes the data value
496 # If the buffer is empty then it takes the data value
491 497 if self.__buffer is None:
492 498 self.__buffer = data
493 499 self.__profIndex += 1
494 500 return
495 501
496 #If the buffer length is lower than n then stakcing the data value
502 # If the buffer length is lower than n then stakcing the data value
497 503 if self.__profIndex < self.n:
498 504 self.__buffer = numpy.vstack((self.__buffer, data))
499 505 self.__profIndex += 1
500 506 return
501 507
502 #If the buffer length is equal to n then replacing the last buffer value with the data value
508 # If the buffer length is equal to n then replacing the last buffer value with the data value
503 509 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
504 self.__buffer[self.n-1] = data
510 self.__buffer[self.n - 1] = data
505 511 self.__profIndex = self.n
506 512 return
507 513
@@ -525,7 +531,7 class CohInt(Operation):
525 531
526 532 return data, n
527 533
528 #Integration with Overlapping
534 # Integration with Overlapping
529 535 data = numpy.sum(self.__buffer, axis=0)
530 536 # print data
531 537 # raise
@@ -568,7 +574,7 class CohInt(Operation):
568 574 if self.__profIndex == 0:
569 575 self.__buffer = [[data.copy(), datatime]]
570 576 else:
571 self.__buffer.append([data.copy(),datatime])
577 self.__buffer.append([data.copy(), datatime])
572 578 self.__profIndex += 1
573 579 self.__dataReady = False
574 580
@@ -625,19 +631,19 class CohInt(Operation):
625 631
626 632 def integrateByBlock(self, dataOut):
627 633
628 times = int(dataOut.data.shape[1]/self.n)
634 times = int(dataOut.data.shape[1] / self.n)
629 635 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
630 636
631 637 id_min = 0
632 638 id_max = self.n
633 639
634 640 for i in range(times):
635 junk = dataOut.data[:,id_min:id_max,:]
636 avgdata[:,i,:] = junk.sum(axis=1)
641 junk = dataOut.data[:, id_min:id_max, :]
642 avgdata[:, i, :] = junk.sum(axis=1)
637 643 id_min += self.n
638 644 id_max += self.n
639 645
640 timeInterval = dataOut.ippSeconds*self.n
646 timeInterval = dataOut.ippSeconds * self.n
641 647 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
642 648 self.__dataReady = True
643 649 return avgdata, avgdatatime
@@ -704,47 +710,47 class Decoder(Operation):
704 710 self.nCode = len(code)
705 711 self.nBaud = len(code[0])
706 712
707 if (osamp != None) and (osamp >1):
713 if (osamp != None) and (osamp > 1):
708 714 self.osamp = osamp
709 715 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
710 self.nBaud = self.nBaud*self.osamp
716 self.nBaud = self.nBaud * self.osamp
711 717
712 718 self.__nChannels = dataOut.nChannels
713 719 self.__nProfiles = dataOut.nProfiles
714 720 self.__nHeis = dataOut.nHeights
715 721
716 722 if self.__nHeis < self.nBaud:
717 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
723 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' % (self.__nHeis, self.nBaud))
718 724
719 #Frequency
725 # Frequency
720 726 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
721 727
722 __codeBuffer[:,0:self.nBaud] = self.code
728 __codeBuffer[:, 0:self.nBaud] = self.code
723 729
724 730 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
725 731
726 732 if dataOut.flagDataAsBlock:
727 733
728 self.ndatadec = self.__nHeis #- self.nBaud + 1
734 self.ndatadec = self.__nHeis # - self.nBaud + 1
729 735
730 736 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
731 737
732 738 else:
733 739
734 #Time
735 self.ndatadec = self.__nHeis #- self.nBaud + 1
740 # Time
741 self.ndatadec = self.__nHeis # - self.nBaud + 1
736 742
737 743 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
738 744
739 745 def __convolutionInFreq(self, data):
740 746
741 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
747 fft_code = self.fft_code[self.__profIndex].reshape(1, -1)
742 748
743 749 fft_data = numpy.fft.fft(data, axis=1)
744 750
745 conv = fft_data*fft_code
751 conv = fft_data * fft_code
746 752
747 data = numpy.fft.ifft(conv,axis=1)
753 data = numpy.fft.ifft(conv, axis=1)
748 754
749 755 return data
750 756
@@ -756,7 +762,7 class Decoder(Operation):
756 762
757 763 code = self.code[self.__profIndex]
758 764 for i in range(self.__nChannels):
759 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
765 self.datadecTime[i, :] = numpy.correlate(data[i, :], code, mode='full')[self.nBaud - 1:]
760 766
761 767 return self.datadecTime
762 768
@@ -765,12 +771,12 class Decoder(Operation):
765 771 repetitions = int(self.__nProfiles / self.nCode)
766 772 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
767 773 junk = junk.flatten()
768 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
774 code_block = numpy.reshape(junk, (self.nCode * repetitions, self.nBaud))
769 775 profilesList = range(self.__nProfiles)
770 776
771 777 for i in range(self.__nChannels):
772 778 for j in profilesList:
773 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
779 self.datadecTime[i, j, :] = numpy.correlate(data[i, j, :], code_block[j, :], mode='full')[self.nBaud - 1:]
774 780 return self.datadecTime
775 781
776 782 def __convolutionByBlockInFreq(self, data):
@@ -778,18 +784,18 class Decoder(Operation):
778 784 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
779 785
780 786
781 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
787 fft_code = self.fft_code[self.__profIndex].reshape(1, -1)
782 788
783 789 fft_data = numpy.fft.fft(data, axis=2)
784 790
785 conv = fft_data*fft_code
791 conv = fft_data * fft_code
786 792
787 data = numpy.fft.ifft(conv,axis=2)
793 data = numpy.fft.ifft(conv, axis=2)
788 794
789 795 return data
790 796
791 797
792 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
798 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode=0, osamp=None, times=None):
793 799
794 800 if dataOut.flagDecodeData:
795 801 print("This data is already decoded, recoding again ...")
@@ -798,17 +804,17 class Decoder(Operation):
798 804
799 805 if code is None:
800 806 if dataOut.code is None:
801 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
807 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" % dataOut.type)
802 808
803 809 code = dataOut.code
804 810 else:
805 code = numpy.array(code).reshape(nCode,nBaud)
811 code = numpy.array(code).reshape(nCode, nBaud)
806 812 self.setup(code, osamp, dataOut)
807 813
808 814 self.isConfig = True
809 815
810 816 if mode == 3:
811 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
817 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" % mode)
812 818
813 819 if times != None:
814 820 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
@@ -846,7 +852,7 class Decoder(Operation):
846 852 datadec = self.__convolutionInFreqOpt(dataOut.data)
847 853
848 854 if datadec is None:
849 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
855 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" % mode)
850 856
851 857 dataOut.code = self.code
852 858 dataOut.nCode = self.nCode
@@ -856,9 +862,9 class Decoder(Operation):
856 862
857 863 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
858 864
859 dataOut.flagDecodeData = True #asumo q la data esta decodificada
865 dataOut.flagDecodeData = True # asumo q la data esta decodificada
860 866
861 if self.__profIndex == self.nCode-1:
867 if self.__profIndex == self.nCode - 1:
862 868 self.__profIndex = 0
863 869 return dataOut
864 870
@@ -884,14 +890,14 class ProfileConcat(Operation):
884 890 self.times = 1
885 891
886 892 def setup(self, data, m, n=1):
887 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
888 self.nHeights = data.shape[1]#.nHeights
893 self.buffer = numpy.zeros((data.shape[0], data.shape[1] * m), dtype=type(data[0, 0]))
894 self.nHeights = data.shape[1] # .nHeights
889 895 self.start_index = 0
890 896 self.times = 1
891 897
892 898 def concat(self, data):
893 899
894 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
900 self.buffer[:, self.start_index:self.nHeights * self.times] = data.copy()
895 901 self.start_index = self.start_index + self.nHeights
896 902
897 903 def run(self, dataOut, m):
@@ -953,7 +959,7 class ProfileSelector(Operation):
953 959
954 960 return True
955 961
956 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
962 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList=None, nProfiles=None):
957 963
958 964 """
959 965 ProfileSelector:
@@ -978,14 +984,14 class ProfileSelector(Operation):
978 984 data dimension = [nChannels, nProfiles, nHeis]
979 985 """
980 986 if profileList != None:
981 dataOut.data = dataOut.data[:,profileList,:]
987 dataOut.data = dataOut.data[:, profileList, :]
982 988
983 989 if profileRangeList != None:
984 990 minIndex = profileRangeList[0]
985 991 maxIndex = profileRangeList[1]
986 profileList = list(range(minIndex, maxIndex+1))
992 profileList = list(range(minIndex, maxIndex + 1))
987 993
988 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
994 dataOut.data = dataOut.data[:, minIndex:maxIndex + 1, :]
989 995
990 996 if rangeList != None:
991 997
@@ -995,9 +1001,9 class ProfileSelector(Operation):
995 1001 minIndex = thisRange[0]
996 1002 maxIndex = thisRange[1]
997 1003
998 profileList.extend(list(range(minIndex, maxIndex+1)))
1004 profileList.extend(list(range(minIndex, maxIndex + 1)))
999 1005
1000 dataOut.data = dataOut.data[:,profileList,:]
1006 dataOut.data = dataOut.data[:, profileList, :]
1001 1007
1002 1008 dataOut.nProfiles = len(profileList)
1003 1009 dataOut.profileIndex = dataOut.nProfiles - 1
@@ -1065,7 +1071,7 class ProfileSelector(Operation):
1065 1071 return dataOut
1066 1072
1067 1073
1068 if beam != None: #beam is only for AMISR data
1074 if beam != None: # beam is only for AMISR data
1069 1075 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1070 1076 dataOut.flagNoData = False
1071 1077 dataOut.profileIndex = self.profileIndex
@@ -1089,8 +1095,8 class Reshaper(Operation):
1089 1095 def __appendProfile(self, dataOut, nTxs):
1090 1096
1091 1097 if self.__buffer is None:
1092 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1093 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1098 shape = (dataOut.nChannels, int(dataOut.nHeights / nTxs))
1099 self.__buffer = numpy.empty(shape, dtype=dataOut.data.dtype)
1094 1100
1095 1101 ini = dataOut.nHeights * self.__nitems
1096 1102 end = ini + dataOut.nHeights
@@ -1099,11 +1105,11 class Reshaper(Operation):
1099 1105
1100 1106 self.__nitems += 1
1101 1107
1102 return int(self.__nitems*nTxs)
1108 return int(self.__nitems * nTxs)
1103 1109
1104 1110 def __getBuffer(self):
1105 1111
1106 if self.__nitems == int(1./self.__nTxs):
1112 if self.__nitems == int(1. / self.__nTxs):
1107 1113
1108 1114 self.__nitems = 0
1109 1115
@@ -1120,15 +1126,15 class Reshaper(Operation):
1120 1126 if nTxs < 0:
1121 1127 raise ValueError("nTxs should be greater than 0")
1122 1128
1123 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1124 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1129 if nTxs < 1 and dataOut.nProfiles % (1. / nTxs) != 0:
1130 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" % (dataOut.nProfiles, (1. / nTxs)))
1125 1131
1126 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1132 shape = [dataOut.nChannels, dataOut.nProfiles * nTxs, dataOut.nHeights / nTxs]
1127 1133
1128 1134 return shape, nTxs
1129 1135
1130 if len(shape) != 2 and len(shape) != 3:
1131 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1136 if len(shape) != 2 and len(shape) != 3:
1137 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" % (dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1132 1138
1133 1139 if len(shape) == 2:
1134 1140 shape_tuple = [dataOut.nChannels]
@@ -1136,7 +1142,7 class Reshaper(Operation):
1136 1142 else:
1137 1143 shape_tuple = list(shape)
1138 1144
1139 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1145 nTxs = 1.0 * shape_tuple[1] / dataOut.nProfiles
1140 1146
1141 1147 return shape_tuple, nTxs
1142 1148
@@ -1152,7 +1158,7 class Reshaper(Operation):
1152 1158 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1153 1159 dataOut.flagNoData = False
1154 1160
1155 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1161 profileIndex = int(dataOut.nProfiles * self.__nTxs) - 1
1156 1162
1157 1163 else:
1158 1164
@@ -1165,16 +1171,16 class Reshaper(Operation):
1165 1171 dataOut.data = new_data
1166 1172 dataOut.flagNoData = False
1167 1173
1168 profileIndex = dataOut.profileIndex*nTxs
1174 profileIndex = dataOut.profileIndex * nTxs
1169 1175
1170 1176 else:
1171 1177 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1172 1178
1173 1179 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1174 1180
1175 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1181 dataOut.heightList = numpy.arange(dataOut.nHeights / self.__nTxs) * deltaHeight + dataOut.heightList[0]
1176 1182
1177 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1183 dataOut.nProfiles = int(dataOut.nProfiles * self.__nTxs)
1178 1184
1179 1185 dataOut.profileIndex = profileIndex
1180 1186
@@ -1195,18 +1201,18 class SplitProfiles(Operation):
1195 1201
1196 1202 if dataOut.flagDataAsBlock:
1197 1203
1198 #nchannels, nprofiles, nsamples
1204 # nchannels, nprofiles, nsamples
1199 1205 shape = dataOut.data.shape
1200 1206
1201 1207 if shape[2] % n != 0:
1202 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1208 raise ValueError("Could not split the data, n=%d has to be multiple of %d" % (n, shape[2]))
1203 1209
1204 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1210 new_shape = shape[0], shape[1] * n, int(shape[2] / n)
1205 1211
1206 1212 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1207 1213 dataOut.flagNoData = False
1208 1214
1209 profileIndex = int(dataOut.nProfiles/n) - 1
1215 profileIndex = int(dataOut.nProfiles / n) - 1
1210 1216
1211 1217 else:
1212 1218
@@ -1214,9 +1220,9 class SplitProfiles(Operation):
1214 1220
1215 1221 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1216 1222
1217 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1223 dataOut.heightList = numpy.arange(dataOut.nHeights / n) * deltaHeight + dataOut.heightList[0]
1218 1224
1219 dataOut.nProfiles = int(dataOut.nProfiles*n)
1225 dataOut.nProfiles = int(dataOut.nProfiles * n)
1220 1226
1221 1227 dataOut.profileIndex = profileIndex
1222 1228
@@ -1239,21 +1245,21 class CombineProfiles(Operation):
1239 1245
1240 1246 if dataOut.flagDataAsBlock:
1241 1247
1242 #nchannels, nprofiles, nsamples
1248 # nchannels, nprofiles, nsamples
1243 1249 shape = dataOut.data.shape
1244 new_shape = shape[0], shape[1]/n, shape[2]*n
1250 new_shape = shape[0], shape[1] / n, shape[2] * n
1245 1251
1246 1252 if shape[1] % n != 0:
1247 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1253 raise ValueError("Could not split the data, n=%d has to be multiple of %d" % (n, shape[1]))
1248 1254
1249 1255 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1250 1256 dataOut.flagNoData = False
1251 1257
1252 profileIndex = int(dataOut.nProfiles*n) - 1
1258 profileIndex = int(dataOut.nProfiles * n) - 1
1253 1259
1254 1260 else:
1255 1261
1256 #nchannels, nsamples
1262 # nchannels, nsamples
1257 1263 if self.__remData is None:
1258 1264 newData = dataOut.data
1259 1265 else:
@@ -1263,7 +1269,7 class CombineProfiles(Operation):
1263 1269
1264 1270 if self.__profileIndex < n:
1265 1271 self.__remData = newData
1266 #continue
1272 # continue
1267 1273 return
1268 1274
1269 1275 self.__profileIndex = 0
@@ -1272,14 +1278,14 class CombineProfiles(Operation):
1272 1278 dataOut.data = newData
1273 1279 dataOut.flagNoData = False
1274 1280
1275 profileIndex = dataOut.profileIndex/n
1281 profileIndex = dataOut.profileIndex / n
1276 1282
1277 1283
1278 1284 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1279 1285
1280 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1286 dataOut.heightList = numpy.arange(dataOut.nHeights * n) * deltaHeight + dataOut.heightList[0]
1281 1287
1282 dataOut.nProfiles = int(dataOut.nProfiles/n)
1288 dataOut.nProfiles = int(dataOut.nProfiles / n)
1283 1289
1284 1290 dataOut.profileIndex = profileIndex
1285 1291
@@ -1303,50 +1309,50 class PulsePairVoltage(Operation):
1303 1309 Affected:
1304 1310 self.dataOut.spc
1305 1311 '''
1306 isConfig = False
1307 __profIndex = 0
1308 __initime = None
1312 isConfig = False
1313 __profIndex = 0
1314 __initime = None
1309 1315 __lastdatatime = None
1310 __buffer = None
1311 noise = None
1312 __dataReady = False
1313 n = None
1314 __nch = 0
1315 __nHeis = 0
1316 removeDC = False
1317 ipp = None
1318 lambda_ = 0
1319
1320 def __init__(self,**kwargs):
1321 Operation.__init__(self,**kwargs)
1322
1323 def setup(self, dataOut, n = None, removeDC=False):
1316 __buffer = None
1317 noise = None
1318 __dataReady = False
1319 n = None
1320 __nch = 0
1321 __nHeis = 0
1322 removeDC = False
1323 ipp = None
1324 lambda_ = 0
1325
1326 def __init__(self, **kwargs):
1327 Operation.__init__(self, **kwargs)
1328
1329 def setup(self, dataOut, n=None, removeDC=False):
1324 1330 '''
1325 1331 n= Numero de PRF's de entrada
1326 1332 '''
1327 self.__initime = None
1328 self.__lastdatatime = 0
1329 self.__dataReady = False
1330 self.__buffer = 0
1331 self.__profIndex = 0
1332 self.noise = None
1333 self.__nch = dataOut.nChannels
1334 self.__nHeis = dataOut.nHeights
1335 self.removeDC = removeDC
1336 self.lambda_ = 3.0e8/(9345.0e6)
1337 self.ippSec = dataOut.ippSeconds
1338 self.nCohInt = dataOut.nCohInt
1339 print("IPPseconds",dataOut.ippSeconds)
1333 self.__initime = None
1334 self.__lastdatatime = 0
1335 self.__dataReady = False
1336 self.__buffer = 0
1337 self.__profIndex = 0
1338 self.noise = None
1339 self.__nch = dataOut.nChannels
1340 self.__nHeis = dataOut.nHeights
1341 self.removeDC = removeDC
1342 self.lambda_ = 3.0e8 / (9345.0e6)
1343 self.ippSec = dataOut.ippSeconds
1344 self.nCohInt = dataOut.nCohInt
1345 print("IPPseconds", dataOut.ippSeconds)
1340 1346
1341 1347 print("ELVALOR DE n es:", n)
1342 1348 if n == None:
1343 1349 raise ValueError("n should be specified.")
1344 1350
1345 1351 if n != None:
1346 if n<2:
1352 if n < 2:
1347 1353 raise ValueError("n should be greater than 2")
1348 1354
1349 self.n = n
1355 self.n = n
1350 1356 self.__nProf = n
1351 1357
1352 1358 self.__buffer = numpy.zeros((dataOut.nChannels,
@@ -1354,136 +1360,136 class PulsePairVoltage(Operation):
1354 1360 dataOut.nHeights),
1355 1361 dtype='complex')
1356 1362
1357 def putData(self,data):
1363 def putData(self, data):
1358 1364 '''
1359 1365 Add a profile to he __buffer and increase in one the __profiel Index
1360 1366 '''
1361 self.__buffer[:,self.__profIndex,:]= data
1362 self.__profIndex += 1
1367 self.__buffer[:, self.__profIndex, :] = data
1368 self.__profIndex += 1
1363 1369 return
1364 1370
1365 def pushData(self,dataOut):
1371 def pushData(self, dataOut):
1366 1372 '''
1367 1373 Return the PULSEPAIR and the profiles used in the operation
1368 1374 Affected : self.__profileIndex
1369 1375 '''
1370 1376 #----------------- Remove DC-----------------------------------
1371 if self.removeDC==True:
1372 mean = numpy.mean(self.__buffer,1)
1373 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1374 dc= numpy.tile(tmp,[1,self.__nProf,1])
1375 self.__buffer = self.__buffer - dc
1377 if self.removeDC == True:
1378 mean = numpy.mean(self.__buffer, 1)
1379 tmp = mean.reshape(self.__nch, 1, self.__nHeis)
1380 dc = numpy.tile(tmp, [1, self.__nProf, 1])
1381 self.__buffer = self.__buffer - dc
1376 1382 #------------------Calculo de Potencia ------------------------
1377 pair0 = self.__buffer*numpy.conj(self.__buffer)
1378 pair0 = pair0.real
1379 lag_0 = numpy.sum(pair0,1)
1383 pair0 = self.__buffer * numpy.conj(self.__buffer)
1384 pair0 = pair0.real
1385 lag_0 = numpy.sum(pair0, 1)
1380 1386 #------------------Calculo de Ruido x canal--------------------
1381 self.noise = numpy.zeros(self.__nch)
1387 self.noise = numpy.zeros(self.__nch)
1382 1388 for i in range(self.__nch):
1383 daux = numpy.sort(pair0[i,:,:],axis= None)
1384 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1389 daux = numpy.sort(pair0[i, :, :], axis=None)
1390 self.noise[i] = hildebrand_sekhon(daux , self.nCohInt)
1385 1391
1386 self.noise = self.noise.reshape(self.__nch,1)
1387 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1388 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1389 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1392 self.noise = self.noise.reshape(self.__nch, 1)
1393 self.noise = numpy.tile(self.noise, [1, self.__nHeis])
1394 noise_buffer = self.noise.reshape(self.__nch, 1, self.__nHeis)
1395 noise_buffer = numpy.tile(noise_buffer, [1, self.__nProf, 1])
1390 1396 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1391 1397 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1392 1398 #-------------------- Power --------------------------------------------------
1393 data_power = lag_0/(self.n*self.nCohInt)
1399 data_power = lag_0 / (self.n * self.nCohInt)
1394 1400 #------------------ Senal ---------------------------------------------------
1395 data_intensity = pair0 - noise_buffer
1396 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1397 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1401 data_intensity = pair0 - noise_buffer
1402 data_intensity = numpy.sum(data_intensity, axis=1) * (self.n * self.nCohInt) # *self.nCohInt)
1403 # data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1398 1404 for i in range(self.__nch):
1399 1405 for j in range(self.__nHeis):
1400 if data_intensity[i][j] < 0:
1406 if data_intensity[i][j] < 0:
1401 1407 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1402 1408
1403 1409 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1404 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1405 lag_1 = numpy.sum(pair1,1)
1406 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1407 data_velocity = (self.lambda_/2.0)*data_freq
1410 pair1 = self.__buffer[:, :-1, :] * numpy.conjugate(self.__buffer[:, 1:, :])
1411 lag_1 = numpy.sum(pair1, 1)
1412 data_freq = (-1 / (2.0 * math.pi * self.ippSec * self.nCohInt)) * numpy.angle(lag_1)
1413 data_velocity = (self.lambda_ / 2.0) * data_freq
1408 1414
1409 1415 #---------------- Potencia promedio estimada de la Senal-----------
1410 lag_0 = lag_0/self.n
1411 S = lag_0-self.noise
1416 lag_0 = lag_0 / self.n
1417 S = lag_0 - self.noise
1412 1418
1413 1419 #---------------- Frecuencia Doppler promedio ---------------------
1414 lag_1 = lag_1/(self.n-1)
1415 R1 = numpy.abs(lag_1)
1420 lag_1 = lag_1 / (self.n - 1)
1421 R1 = numpy.abs(lag_1)
1416 1422
1417 1423 #---------------- Calculo del SNR----------------------------------
1418 data_snrPP = S/self.noise
1424 data_snrPP = S / self.noise
1419 1425 for i in range(self.__nch):
1420 1426 for j in range(self.__nHeis):
1421 if data_snrPP[i][j] < 1.e-20:
1427 if data_snrPP[i][j] < 1.e-20:
1422 1428 data_snrPP[i][j] = 1.e-20
1423 1429
1424 1430 #----------------- Calculo del ancho espectral ----------------------
1425 L = S/R1
1426 L = numpy.where(L<0,1,L)
1427 L = numpy.log(L)
1428 tmp = numpy.sqrt(numpy.absolute(L))
1429 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1430 n = self.__profIndex
1431
1432 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1431 L = S / R1
1432 L = numpy.where(L < 0, 1, L)
1433 L = numpy.log(L)
1434 tmp = numpy.sqrt(numpy.absolute(L))
1435 data_specwidth = (self.lambda_ / (2 * math.sqrt(2) * math.pi * self.ippSec * self.nCohInt)) * tmp * numpy.sign(L)
1436 n = self.__profIndex
1437
1438 self.__buffer = numpy.zeros((self.__nch, self.__nProf, self.__nHeis), dtype='complex')
1433 1439 self.__profIndex = 0
1434 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
1440 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, n
1435 1441
1436 1442
1437 def pulsePairbyProfiles(self,dataOut):
1443 def pulsePairbyProfiles(self, dataOut):
1438 1444
1439 self.__dataReady = False
1440 data_power = None
1441 data_intensity = None
1442 data_velocity = None
1443 data_specwidth = None
1444 data_snrPP = None
1445 self.__dataReady = False
1446 data_power = None
1447 data_intensity = None
1448 data_velocity = None
1449 data_specwidth = None
1450 data_snrPP = None
1445 1451 self.putData(data=dataOut.data)
1446 if self.__profIndex == self.n:
1447 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
1448 self.__dataReady = True
1452 if self.__profIndex == self.n:
1453 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, n = self.pushData(dataOut=dataOut)
1454 self.__dataReady = True
1449 1455
1450 1456 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
1451 1457
1452 1458
1453 def pulsePairOp(self, dataOut, datatime= None):
1459 def pulsePairOp(self, dataOut, datatime=None):
1454 1460
1455 1461 if self.__initime == None:
1456 1462 self.__initime = datatime
1457 1463 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
1458 self.__lastdatatime = datatime
1464 self.__lastdatatime = datatime
1459 1465
1460 1466 if data_power is None:
1461 return None, None, None,None,None,None
1467 return None, None, None, None, None, None
1462 1468
1463 avgdatatime = self.__initime
1464 deltatime = datatime - self.__lastdatatime
1469 avgdatatime = self.__initime
1470 deltatime = datatime - self.__lastdatatime
1465 1471 self.__initime = datatime
1466 1472
1467 1473 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
1468 1474
1469 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1475 def run(self, dataOut, n=None, removeDC=False, overlapping=False, **kwargs):
1470 1476
1471 1477 if not self.isConfig:
1472 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1473 self.isConfig = True
1474 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1475 dataOut.flagNoData = True
1478 self.setup(dataOut=dataOut, n=n , removeDC=removeDC , **kwargs)
1479 self.isConfig = True
1480 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1481 dataOut.flagNoData = True
1476 1482
1477 1483 if self.__dataReady:
1478 dataOut.nCohInt *= self.n
1479 dataOut.dataPP_POW = data_intensity # S
1480 dataOut.dataPP_POWER = data_power # P
1481 dataOut.dataPP_DOP = data_velocity
1482 dataOut.dataPP_SNR = data_snrPP
1483 dataOut.dataPP_WIDTH = data_specwidth
1484 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1485 dataOut.utctime = avgdatatime
1486 dataOut.flagNoData = False
1484 dataOut.nCohInt *= self.n
1485 dataOut.dataPP_POW = data_intensity # S
1486 dataOut.dataPP_POWER = data_power # P
1487 dataOut.dataPP_DOP = data_velocity
1488 dataOut.dataPP_SNR = data_snrPP
1489 dataOut.dataPP_WIDTH = data_specwidth
1490 dataOut.PRFbyAngle = self.n # numero de PRF*cada angulo rotado que equivale a un tiempo.
1491 dataOut.utctime = avgdatatime
1492 dataOut.flagNoData = False
1487 1493 return dataOut
1488 1494
1489 1495
@@ -49,7 +49,7 class PXParametersProc(ProcessingUnit):
49 49 self.dataOut.data_param = numpy.array([self.dataOut.data[var] for var in self.dataOut.parameters])
50 50 self.dataOut.data_param[self.dataOut.data_param == self.dataOut.missing] = numpy.nan
51 51
52 if mode.upper()=='E':
52 if mode.upper() == 'E':
53 53 self.dataOut.heightList = self.dataOut.data['Azimuth']
54 54 else:
55 55 self.dataOut.heightList = self.dataOut.data['Elevation']
@@ -61,4 +61,4 class PXParametersProc(ProcessingUnit):
61 61 meta[attr] = getattr(self.dataOut, attr)
62 62
63 63 meta['mode'] = mode
64 self.dataOut.meta = meta No newline at end of file
64 self.dataOut.meta = meta
@@ -64,9 +64,9 class Remote(Thread):
64 64
65 65 self.stopFlag = False
66 66
67 print("[Remote Server] Opening server: %s" %self.__server)
67 print("[Remote Server] Opening server: %s" % self.__server)
68 68 if self.open(self.__server, self.__username, self.__password, self.__remotefolder):
69 print("[Remote Server] %s server was opened successfully" %self.__server)
69 print("[Remote Server] %s server was opened successfully" % self.__server)
70 70
71 71 self.close()
72 72
@@ -132,7 +132,7 class Remote(Thread):
132 132 Returns:
133 133 0 in error case else 1
134 134 """
135 print("[Remote Server] Uploading %s to %s:%s" %(fullfilename, self.server, self.remotefolder))
135 print("[Remote Server] Uploading %s to %s:%s" % (fullfilename, self.server, self.remotefolder))
136 136
137 137 if not self.status:
138 138 return 0
@@ -144,7 +144,7 class Remote(Thread):
144 144 return 0
145 145
146 146 if not self.sendFile(fullfilename):
147 print("[Remote Server] Error uploading file %s" %fullfilename)
147 print("[Remote Server] Error uploading file %s" % fullfilename)
148 148 return 0
149 149
150 150 print("[Remote Server] upload finished successfully")
@@ -184,7 +184,7 class Remote(Thread):
184 184 return
185 185
186 186 if not self.cd(self.remotefolder):
187 raise ValueError("Could not access to the new remote directory: %s" %self.remotefolder)
187 raise ValueError("Could not access to the new remote directory: %s" % self.remotefolder)
188 188
189 189 while True:
190 190
@@ -199,7 +199,7 class Remote(Thread):
199 199 # self.bussy = True
200 200 self.mutex.acquire()
201 201
202 print("[Remote Server] Opening %s" %self.__server)
202 print("[Remote Server] Opening %s" % self.__server)
203 203 if not self.open(self.__server, self.__username, self.__password, self.__remotefolder):
204 204 self.mutex.release()
205 205 continue
@@ -207,7 +207,7 class Remote(Thread):
207 207 for thisFile in self.fileList:
208 208 self.upload(thisFile, self.remotefolder)
209 209
210 print("[Remote Server] Closing %s" %self.__server)
210 print("[Remote Server] Closing %s" % self.__server)
211 211 self.close()
212 212
213 213 self.mutex.release()
@@ -261,7 +261,7 class FTPClient(Remote):
261 261 try:
262 262 ftpClientObj = ftplib.FTP(server)
263 263 except ftplib.all_errors as e:
264 print("[FTP Server]: FTP server connection fail: %s" %server)
264 print("[FTP Server]: FTP server connection fail: %s" % server)
265 265 print("[FTP Server]:", e)
266 266 self.status = 0
267 267 return 0
@@ -279,7 +279,7 class FTPClient(Remote):
279 279 try:
280 280 ftpClientObj.cwd(remotefolder)
281 281 except ftplib.all_errors:
282 print("[FTP Server]: FTP remote folder is invalid: %s" %remotefolder)
282 print("[FTP Server]: FTP remote folder is invalid: %s" % remotefolder)
283 283 remotefolder = ftpClientObj.pwd()
284 284
285 285 self.server = server
@@ -316,7 +316,7 class FTPClient(Remote):
316 316 try:
317 317 self.__ftpClientObj.mkd(dirname)
318 318 except ftplib.all_errors:
319 print("[FTP Server]: Error creating remote folder: %s" %remotefolder)
319 print("[FTP Server]: Error creating remote folder: %s" % remotefolder)
320 320 return 0
321 321
322 322 return 1
@@ -343,7 +343,7 class FTPClient(Remote):
343 343 try:
344 344 self.__ftpClientObj.cwd(remotefolder)
345 345 except ftplib.all_errors:
346 print('[FTP Server]: Error changing to %s' %remotefolder)
346 print('[FTP Server]: Error changing to %s' % remotefolder)
347 347 print('[FTP Server]: Trying to create remote folder')
348 348
349 349 if not self.mkdir(remotefolder):
@@ -368,7 +368,7 class FTPClient(Remote):
368 368
369 369 filename = os.path.basename(fullfilename)
370 370
371 command = "STOR %s" %filename
371 command = "STOR %s" % filename
372 372
373 373 try:
374 374 self.__ftpClientObj.storbinary(command, fp)
@@ -463,7 +463,7 class SSHClient(Remote):
463 463 self.status = 1
464 464
465 465 if not self.cd(remotefolder):
466 raise ValueError("[SSH Server]: Could not access to remote folder: %s" %remotefolder)
466 raise ValueError("[SSH Server]: Could not access to remote folder: %s" % remotefolder)
467 467 return 0
468 468
469 469 self.remotefolder = remotefolder
@@ -516,7 +516,7 class SSHClient(Remote):
516 516 0 in error case else 1
517 517 """
518 518
519 command = 'mkdir %s' %remotefolder
519 command = 'mkdir %s' % remotefolder
520 520
521 521 return self.__execute(command)
522 522
@@ -545,8 +545,8 class SSHClient(Remote):
545 545 if remotefolder == self.remotefolder:
546 546 return 1
547 547
548 chk_command = "cd %s; pwd" %remotefolder
549 mkdir_command = "mkdir %s" %remotefolder
548 chk_command = "cd %s; pwd" % remotefolder
549 mkdir_command = "mkdir %s" % remotefolder
550 550
551 551 if not self.__execute(chk_command):
552 552 if not self.__execute(mkdir_command):
@@ -569,7 +569,7 class SSHClient(Remote):
569 569 return 0
570 570
571 571 remotefile = os.path.join(self.remotefolder, os.path.split(fullfilename)[-1])
572 command = 'chmod 775 %s' %remotefile
572 command = 'chmod 775 %s' % remotefile
573 573
574 574 return self.__execute(command)
575 575
@@ -596,7 +596,7 class SendToServer(ProcessingUnit):
596 596 self.clientObj = SSHClient(server, username, password, remotefolder, period)
597 597
598 598 if not self.clientObj:
599 raise ValueError("%s has been chosen as remote access protocol but it is not valid" %protocol)
599 raise ValueError("%s has been chosen as remote access protocol but it is not valid" % protocol)
600 600
601 601 self.clientObj.start()
602 602
@@ -607,16 +607,16 class SendToServer(ProcessingUnit):
607 607 else:
608 608 folderList = self.localfolder
609 609
610 #Remove duplicate items
610 # Remove duplicate items
611 611 folderList = list(set(folderList))
612 612
613 613 fullfilenameList = []
614 614
615 615 for thisFolder in folderList:
616 616
617 print("[Remote Server]: Searching files on %s" %thisFolder)
617 print("[Remote Server]: Searching files on %s" % thisFolder)
618 618
619 filenameList = glob.glob1(thisFolder, '*%s' %self.ext)
619 filenameList = glob.glob1(thisFolder, '*%s' % self.ext)
620 620
621 621 if len(filenameList) < 1:
622 622
@@ -628,8 +628,8 class SendToServer(ProcessingUnit):
628 628 if fullfilename in fullfilenameList:
629 629 continue
630 630
631 #Only files modified in the last 30 minutes are considered
632 if os.path.getmtime(fullfilename) < time.time() - 30*60:
631 # Only files modified in the last 30 minutes are considered
632 if os.path.getmtime(fullfilename) < time.time() - 30 * 60:
633 633 continue
634 634
635 635 fullfilenameList.append(fullfilename)
@@ -667,7 +667,7 class FTP(object):
667 667 Written by "Daniel Suarez":mailto:daniel.suarez@jro.igp.gob.pe Oct. 26, 2010
668 668 """
669 669
670 def __init__(self,server = None, username=None, password=None, remotefolder=None):
670 def __init__(self, server=None, username=None, password=None, remotefolder=None):
671 671 """
672 672 This method is used to setting parameters for FTP and establishing connection to remote server
673 673
@@ -692,7 +692,7 class FTP(object):
692 692
693 693 """
694 694
695 if ((server == None) and (username==None) and (password==None) and (remotefolder==None)):
695 if ((server == None) and (username == None) and (password == None) and (remotefolder == None)):
696 696 server, username, password, remotefolder = self.parmsByDefault()
697 697
698 698 self.server = server
@@ -705,7 +705,7 class FTP(object):
705 705
706 706 try:
707 707 self.ftp = ftplib.FTP(self.server)
708 self.ftp.login(self.username,self.password)
708 self.ftp.login(self.username, self.password)
709 709 self.ftp.cwd(self.remotefolder)
710 710 # print 'Connect to FTP Server: Successfully'
711 711
@@ -734,7 +734,7 class FTP(object):
734 734
735 735 self.fileList = []
736 736 self.folderList = []
737 #only for test
737 # only for test
738 738 for f in self.dirList:
739 739 name, ext = os.path.splitext(f)
740 740 if ext != '':
@@ -750,7 +750,7 class FTP(object):
750 750 return server, username, password, remotefolder
751 751
752 752
753 def mkd(self,dirname):
753 def mkd(self, dirname):
754 754 """
755 755 mkd is used to make directory in remote server
756 756
@@ -763,13 +763,13 class FTP(object):
763 763 try:
764 764 self.ftp.mkd(dirname)
765 765 except:
766 print('Error creating remote folder:%s'%dirname)
766 print('Error creating remote folder:%s' % dirname)
767 767 return 1
768 768
769 769 return 0
770 770
771 771
772 def delete(self,filename):
772 def delete(self, filename):
773 773 """
774 774 delete is used to delete file in current working directory of remote server
775 775
@@ -783,12 +783,12 class FTP(object):
783 783 try:
784 784 self.ftp.delete(filename)
785 785 except:
786 print('Error deleting remote file:%s'%filename)
786 print('Error deleting remote file:%s' % filename)
787 787 return 1
788 788
789 789 return 0
790 790
791 def download(self,filename,localfolder):
791 def download(self, filename, localfolder):
792 792 """
793 793 download is used to downloading file from remote folder into local folder
794 794
@@ -805,11 +805,11 class FTP(object):
805 805
806 806
807 807 if not(filename in self.fileList):
808 print('filename:%s not exists'%filename)
808 print('filename:%s not exists' % filename)
809 809 self.status = 1
810 810 return self.status
811 811
812 newfilename = os.path.join(localfolder,filename)
812 newfilename = os.path.join(localfolder, filename)
813 813
814 814 self.file = open(newfilename, 'wb')
815 815
@@ -827,14 +827,14 class FTP(object):
827 827 return self.status
828 828
829 829
830 def __handleDownload(self,block):
830 def __handleDownload(self, block):
831 831 """
832 832 __handleDownload is used to handle writing file
833 833 """
834 834 self.file.write(block)
835 835
836 836
837 def upload(self,filename,remotefolder=None):
837 def upload(self, filename, remotefolder=None):
838 838 """
839 839 upload is used to uploading local file to remote directory
840 840
@@ -872,13 +872,13 class FTP(object):
872 872
873 873 self.file.close()
874 874
875 #back to initial directory in __init__()
875 # back to initial directory in __init__()
876 876 self.ftp.cwd(self.remotefolder)
877 877
878 878 return self.status
879 879
880 880
881 def dir(self,remotefolder):
881 def dir(self, remotefolder):
882 882 """
883 883 dir is used to change working directory of remote server and get folder and file list
884 884
@@ -902,7 +902,7 class FTP(object):
902 902 print('Error Change to ' + self.remotefolder)
903 903 infoList = None
904 904 self.folderList = None
905 return infoList,self.folderList
905 return infoList, self.folderList
906 906
907 907 self.dirList = []
908 908
@@ -914,27 +914,27 class FTP(object):
914 914 print("no files in this directory")
915 915 infoList = None
916 916 self.folderList = None
917 return infoList,self.folderList
917 return infoList, self.folderList
918 918 except ftplib.all_errors:
919 919 print('Error Displaying Dir-Files')
920 920 infoList = None
921 921 self.folderList = None
922 return infoList,self.folderList
922 return infoList, self.folderList
923 923
924 924 infoList = []
925 925 self.fileList = []
926 926 self.folderList = []
927 927 for f in self.dirList:
928 name,ext = os.path.splitext(f)
928 name, ext = os.path.splitext(f)
929 929 if ext != '':
930 930 self.fileList.append(f)
931 value = (f,self.ftp.size(f))
931 value = (f, self.ftp.size(f))
932 932 infoList.append(value)
933 933
934 934 if ext == '':
935 935 self.folderList.append(f)
936 936
937 return infoList,self.folderList
937 return infoList, self.folderList
938 938
939 939
940 940 def close(self):
@@ -989,8 +989,8 class SendByFTP(Operation):
989 989 return
990 990
991 991 def filterByExt(self, ext, localfolder):
992 fnameList = glob.glob1(localfolder,ext)
993 self.filenameList = [os.path.join(localfolder,x) for x in fnameList]
992 fnameList = glob.glob1(localfolder, ext)
993 self.filenameList = [os.path.join(localfolder, x) for x in fnameList]
994 994
995 995 if len(self.filenameList) == 0:
996 996 self.status = 0
@@ -1005,4 +1005,4 class SendByFTP(Operation):
1005 1005
1006 1006 self.counter = 0
1007 1007
1008 self.status = 1 No newline at end of file
1008 self.status = 1
@@ -23,26 +23,26 from schainpy.utils import log
23 23
24 24
25 25 PLOT_CODES = {
26 'rti': 0, # Range time intensity (RTI).
27 'spc': 1, # Spectra (and Cross-spectra) information.
28 'cspc': 2, # Cross-Correlation information.
29 'coh': 3, # Coherence map.
30 'base': 4, # Base lines graphic.
31 'row': 5, # Row Spectra.
32 'total': 6, # Total Power.
33 'drift': 7, # Drifts graphics.
34 'height': 8, # Height profile.
35 'phase': 9, # Signal Phase.
26 'rti': 0, # Range time intensity (RTI).
27 'spc': 1, # Spectra (and Cross-spectra) information.
28 'cspc': 2, # Cross-Correlation information.
29 'coh': 3, # Coherence map.
30 'base': 4, # Base lines graphic.
31 'row': 5, # Row Spectra.
32 'total': 6, # Total Power.
33 'drift': 7, # Drifts graphics.
34 'height': 8, # Height profile.
35 'phase': 9, # Signal Phase.
36 36 'power': 16,
37 37 'noise': 17,
38 38 'beacon': 18,
39 39 'wind': 22,
40 40 'skymap': 23,
41 41 'Unknown': 24,
42 'V-E': 25, # PIP Velocity.
43 'Z-E': 26, # PIP Reflectivity.
44 'V-A': 27, # RHI Velocity.
45 'Z-A': 28, # RHI Reflectivity.
42 'V-E': 25, # PIP Velocity.
43 'Z-E': 26, # PIP Reflectivity.
44 'V-A': 27, # RHI Velocity.
45 'Z-A': 28, # RHI Reflectivity.
46 46 }
47 47
48 48 def get_plot_code(s):
@@ -247,7 +247,7 class SendToFTP(Operation):
247 247
248 248 try:
249 249 if not self.ready:
250 if time.time()-self.current_time < self.timeout:
250 if time.time() - self.current_time < self.timeout:
251 251 return
252 252 else:
253 253 self.current_time = time.time()
@@ -281,8 +281,8 class SendToFTP(Operation):
281 281 def upload(self, src, dst):
282 282
283 283 log.log('Uploading {} -> {} '.format(
284 src.split('/')[-1], dst.split('/')[-1]),
285 self.name,
284 src.split('/')[-1], dst.split('/')[-1]),
285 self.name,
286 286 nl=False
287 287 )
288 288
@@ -338,10 +338,10 class SendToFTP(Operation):
338 338
339 339 if not self.isConfig:
340 340 self.setup(
341 server=server,
342 username=username,
343 password=password,
344 timeout=timeout,
341 server=server,
342 username=username,
343 password=password,
344 timeout=timeout,
345 345 **kwargs
346 346 )
347 347 self.isConfig = True
@@ -136,7 +136,7 class SCPClient(object):
136 136 self._pushed = 0
137 137 self.channel.settimeout(self.socket_timeout)
138 138 scp_command = (b'scp -t ', b'scp -r -t ')[recursive]
139 self.channel.exec_command(scp_command +
139 self.channel.exec_command(scp_command +
140 140 self.sanitize(asbytes(remote_path)))
141 141 self._recv_confirm()
142 142
@@ -174,20 +174,20 class SCPClient(object):
174 174 not os.path.isdir(os.path.abspath(local_path)))
175 175 if len(remote_path) > 1:
176 176 if not os.path.exists(self._recv_dir):
177 raise SCPException("Local path '%s' does not exist" %
177 raise SCPException("Local path '%s' does not exist" %
178 178 asunicode(self._recv_dir))
179 179 elif not os.path.isdir(self._recv_dir):
180 raise SCPException("Local path '%s' is not a directory" %
180 raise SCPException("Local path '%s' is not a directory" %
181 181 asunicode(self._recv_dir))
182 182 rcsv = (b'', b' -r')[recursive]
183 183 prsv = (b'', b' -p')[preserve_times]
184 184 self.channel = self._open()
185 185 self._pushed = 0
186 186 self.channel.settimeout(self.socket_timeout)
187 self.channel.exec_command(b"scp" +
188 rcsv +
189 prsv +
190 b" -f " +
187 self.channel.exec_command(b"scp" +
188 rcsv +
189 prsv +
190 b" -f " +
191 191 b' '.join(remote_path))
192 192 self._recv_all()
193 193 self.close()
@@ -227,7 +227,7 class SCPClient(object):
227 227 # The protocol can't handle \n in the filename.
228 228 # Quote them as the control sequence \^J for now,
229 229 # which is how openssh handles it.
230 self.channel.sendall(("C%s %d " % (mode, size)).encode('ascii') +
230 self.channel.sendall(("C%s %d " % (mode, size)).encode('ascii') +
231 231 basename.replace(b'\n', b'\\^J') + b"\n")
232 232 self._recv_confirm()
233 233 file_pos = 0
@@ -287,7 +287,7 class SCPClient(object):
287 287 basename = asbytes(os.path.basename(directory))
288 288 if self.preserve_times:
289 289 self._send_time(mtime, atime)
290 self.channel.sendall(('D%s 0 ' % mode).encode('ascii') +
290 self.channel.sendall(('D%s 0 ' % mode).encode('ascii') +
291 291 basename.replace(b'\n', b'\\^J') + b'\n')
292 292 self._recv_confirm()
293 293 self._pushed += 1
@@ -461,4 +461,4 class SCPClient(object):
461 461
462 462 class SCPException(Exception):
463 463 """SCP exception class"""
464 pass No newline at end of file
464 pass
@@ -4,27 +4,27 import os, sys
4 4 from schainpy.controller import Project
5 5
6 6 controllerObj = Project()
7 controllerObj.setup(id = '002', name='script02', description="JASMET Meteor Detection")
7 controllerObj.setup(id='002', name='script02', description="JASMET Meteor Detection")
8 8
9 9 #-------------------------------------- Setup -----------------------------------------
10 #Verificar estas variables
10 # Verificar estas variables
11 11
12 #Path para los archivos
12 # Path para los archivos
13 13 # path = '/mnt/jars/2016_08/NOCHE'
14 14 # path = '/media/joscanoa/DATA_JASMET/JASMET/2016_08/DIA'
15 15 # path = '/media/joscanoa/DATA_JASMET/JASMET/2016_08/NOCHE'
16 16 path = '/home/nanosat/data/jasmet'
17 17
18 #Path para los graficos
19 pathfig = os.path.join(os.environ['HOME'],'Pictures/JASMET30/201608/graphics')
18 # Path para los graficos
19 pathfig = os.path.join(os.environ['HOME'], 'Pictures/JASMET30/201608/graphics')
20 20
21 #Path para los archivos HDF5 de meteoros
22 pathfile = os.path.join(os.environ['HOME'],'Pictures/JASMET30/201608/meteor')
21 # Path para los archivos HDF5 de meteoros
22 pathfile = os.path.join(os.environ['HOME'], 'Pictures/JASMET30/201608/meteor')
23 23
24 #Fechas para busqueda de archivos
24 # Fechas para busqueda de archivos
25 25 startDate = '2010/08/29'
26 26 endDate = '2017/09/11'
27 #Horas para busqueda de archivos
27 # Horas para busqueda de archivos
28 28 startTime = '00:00:00'
29 29 endTime = '23:59:59'
30 30
@@ -84,9 +84,9 opObj10.addParameter(name='hmax', value='120', format='float')
84 84 opObj12 = procUnitConfObj1.addOperation(name='ParamWriter', optype='other')
85 85 opObj12.addParameter(name='path', value=pathfile)
86 86 opObj12.addParameter(name='blocksPerFile', value='1000', format='int')
87 opObj12.addParameter(name='metadataList',value='type,heightList,paramInterval,timeZone',format='list')
88 opObj12.addParameter(name='dataList',value='data_param,utctime',format='list')
89 opObj12.addParameter(name='mode',value='2',format='int')
87 opObj12.addParameter(name='metadataList', value='type,heightList,paramInterval,timeZone', format='list')
88 opObj12.addParameter(name='dataList', value='data_param,utctime', format='list')
89 opObj12.addParameter(name='mode', value='2', format='int')
90 90
91 91 #--------------------------------------------------------------------------------------------------
92 92
@@ -8,24 +8,24 sys.path.insert(0, path)
8 8 from schainpy.controller import Project
9 9
10 10 controllerObj = Project()
11 controllerObj.setup(id = '004', name='script04', description="JASMET Phase Calibration")
11 controllerObj.setup(id='004', name='script04', description="JASMET Phase Calibration")
12 12
13 13 #-------------------------------------- Setup -----------------------------------------
14 #Verificar estas variables
14 # Verificar estas variables
15 15
16 #Path donde estan los archivos HDF5 de meteoros
17 path = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/meteor')
16 # Path donde estan los archivos HDF5 de meteoros
17 path = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/meteor')
18 18
19 #Path para los graficos
20 pathfig = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/graphics')
19 # Path para los graficos
20 pathfig = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/graphics')
21 21
22 #Path donde se almacenaran las fases calculadas
23 pathfile = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/phase')
22 # Path donde se almacenaran las fases calculadas
23 pathfile = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/phase')
24 24
25 #Fechas para busqueda de archivos
25 # Fechas para busqueda de archivos
26 26 startDate = '2016/08/29'
27 27 endDate = '2016/09/11'
28 #Horas para busqueda de archivos
28 # Horas para busqueda de archivos
29 29 startTime = '00:00:00'
30 30 endTime = '23:59:59'
31 31
@@ -61,8 +61,8 opObj32.addParameter(name='figpath', value=pathfig, format='str')
61 61 opObj33 = procUnitConfObj1.addOperation(name='ParamWriter', optype='other')
62 62 opObj33.addParameter(name='path', value=pathfile)
63 63 opObj33.addParameter(name='blocksPerFile', value='1000', format='int')
64 opObj33.addParameter(name='metadataList',value='type,outputInterval,timeZone',format='list')
65 opObj33.addParameter(name='dataList',value='data_output,utctime',format='list')
64 opObj33.addParameter(name='metadataList', value='type,outputInterval,timeZone', format='list')
65 opObj33.addParameter(name='dataList', value='data_output,utctime', format='list')
66 66 # # opObj25.addParameter(name='mode',value='1,0,0',format='intlist')
67 67
68 controllerObj.start() No newline at end of file
68 controllerObj.start()
@@ -8,28 +8,28 sys.path.insert(0, path)
8 8 from schainpy.controller import Project
9 9
10 10 controllerObj = Project()
11 controllerObj.setup(id = '005', name='script05', description="JASMET Wind Estimation")
11 controllerObj.setup(id='005', name='script05', description="JASMET Wind Estimation")
12 12
13 13 #-------------------------------------- Setup -----------------------------------------
14 #Verificar estas variables
14 # Verificar estas variables
15 15
16 #Path donde estan los archivos HDF5 de meteoros
17 path = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/meteor')
16 # Path donde estan los archivos HDF5 de meteoros
17 path = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/meteor')
18 18
19 #Path para los graficos
20 pathfig = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/graphics')
19 # Path para los graficos
20 pathfig = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/graphics')
21 21
22 #Path donde se almacenaran las estimaciones de vientos
23 pathfile = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/phase')
22 # Path donde se almacenaran las estimaciones de vientos
23 pathfile = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/phase')
24 24
25 #Fechas para busqueda de archivos
25 # Fechas para busqueda de archivos
26 26 startDate = '2016/08/29'
27 27 endDate = '2016/09/11'
28 #Horas para busqueda de archivos
28 # Horas para busqueda de archivos
29 29 startTime = '00:00:00'
30 30 endTime = '23:59:59'
31 31
32 #Offsets optimos obtenidos con OptimumOffset.py
32 # Offsets optimos obtenidos con OptimumOffset.py
33 33 phaseOffsets = '-2.84, -1.77, 11.94, 9.71'
34 34 phaseOffsets = '-5.86, -0.93, -7.29, 23.35'
35 35 #------------------------------------------------------------------------------------------------
@@ -43,7 +43,7 readUnitConfObj = controllerObj.addReadUnit(datatype='ParamReader',
43 43 #--------------------------------------------------------------------------------------------------
44 44
45 45 procUnitConfObj1 = controllerObj.addProcUnit(datatype='ParametersProc', inputId=readUnitConfObj.getId())
46 opObj10 = procUnitConfObj1.addOperation(name='CorrectSMPhases',optype='other')
46 opObj10 = procUnitConfObj1.addOperation(name='CorrectSMPhases', optype='other')
47 47 opObj10.addParameter(name='phaseOffsets', value=phaseOffsets, format='floatlist')
48 48
49 49 opObj13 = procUnitConfObj1.addOperation(name='SkyMapPlot', optype='other')
@@ -67,7 +67,7 opObj23 = procUnitConfObj1.addOperation(name='WindProfilerPlot', optype='other')
67 67 opObj23.addParameter(name='id', value='2', format='int')
68 68 opObj23.addParameter(name='wintitle', value='Wind Profiler', format='str')
69 69 opObj23.addParameter(name='save', value='1', format='bool')
70 opObj23.addParameter(name='figpath', value = pathfig, format='str')
70 opObj23.addParameter(name='figpath', value=pathfig, format='str')
71 71 opObj23.addParameter(name='zmin', value='-140', format='int')
72 72 opObj23.addParameter(name='zmax', value='140', format='int')
73 73 opObj23.addParameter(name='xmin', value='0', format='float')
@@ -78,8 +78,8 opObj23.addParameter(name='ymax', value='110', format='float')
78 78 opObj33 = procUnitConfObj1.addOperation(name='ParamWriter', optype='other')
79 79 opObj33.addParameter(name='path', value=pathfile)
80 80 opObj33.addParameter(name='blocksPerFile', value='1000', format='int')
81 opObj33.addParameter(name='metadataList',value='type,outputInterval,timeZone',format='list')
82 opObj33.addParameter(name='dataList',value='data_output,utctime',format='list')
81 opObj33.addParameter(name='metadataList', value='type,outputInterval,timeZone', format='list')
82 opObj33.addParameter(name='dataList', value='data_output,utctime', format='list')
83 83 #--------------------------------------------------------------------------------------------------
84 84
85 controllerObj.start() No newline at end of file
85 controllerObj.start()
@@ -17,13 +17,13 def fiber(cursor, skip, q, dt):
17 17 startTime="00:00:00",
18 18 endTime="23:59:59",
19 19 online=0,
20 #set=1426485881,
20 # set=1426485881,
21 21 walk=1,
22 22 queue=q,
23 23 cursor=cursor,
24 24 skip=skip,
25 25 verbose=1
26 #timezone=-5*3600
26 # timezone=-5*3600
27 27 )
28 28
29 29 # #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
@@ -6,36 +6,36 import os, sys
6 6 import datetime
7 7 import time
8 8
9 #path = os.path.dirname(os.getcwd())
10 #path = os.path.dirname(path)
11 #sys.path.insert(0, path)
9 # path = os.path.dirname(os.getcwd())
10 # path = os.path.dirname(path)
11 # sys.path.insert(0, path)
12 12
13 13 from schainpy.controller import Project
14 14
15 15 desc = "USRP_test"
16 16 filename = "USRP_processing.xml"
17 17 controllerObj = Project()
18 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
18 controllerObj.setup(id='191', name='Test_USRP', description=desc)
19 19
20 20 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
21 21
22 22 #######################################################################
23 23 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
24 24 #######################################################################
25 #path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
25 # path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
26 26
27 27
28 path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
28 path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
29 29
30 30 figpath = '/home/soporte/data_hdf5_imag'
31 #remotefolder = "/home/wmaster/graficos"
31 # remotefolder = "/home/wmaster/graficos"
32 32 #######################################################################
33 33 ################# RANGO DE PLOTEO######################################
34 34 #######################################################################
35 35 dBmin = '30'
36 36 dBmax = '60'
37 37 xmin = '0'
38 xmax ='24'
38 xmax = '24'
39 39 ymin = '0'
40 40 ymax = '600'
41 41 #######################################################################
@@ -50,15 +50,15 yesterday = str2.strftime("%Y/%m/%d")
50 50 #######################################################################
51 51 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
52 52 path=path,
53 startDate="2019/01/01",#today,
54 endDate="2109/12/30",#today,
53 startDate="2019/01/01", # today,
54 endDate="2109/12/30", # today,
55 55 startTime='00:00:00',
56 56 endTime='23:59:59',
57 57 delay=0,
58 #set=0,
58 # set=0,
59 59 online=0,
60 60 walk=1,
61 ippKm = 1000)
61 ippKm=1000)
62 62
63 63 opObj11 = readUnitConfObj.addOperation(name='printInfo')
64 64 opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
@@ -71,31 +71,31 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=rea
71 71 # codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
72 72 # '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
73 73
74 #opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
75 #opObj11.addParameter(name='frequency', value='30e6', format='float')
74 # opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
75 # opObj11.addParameter(name='frequency', value='30e6', format='float')
76 76
77 #opObj10 = procUnitConfObjA.addOperation(name='Scope', optype='external')
78 #opObj10.addParameter(name='id', value='10', format='int')
79 ##opObj10.addParameter(name='xmin', value='0', format='int')
80 ##opObj10.addParameter(name='xmax', value='50', format='int')
81 #opObj10.addParameter(name='type', value='iq')
82 #opObj10.addParameter(name='ymin', value='-5000', format='int')
83 ##opObj10.addParameter(name='ymax', value='8500', format='int')
77 # opObj10 = procUnitConfObjA.addOperation(name='Scope', optype='external')
78 # opObj10.addParameter(name='id', value='10', format='int')
79 # #opObj10.addParameter(name='xmin', value='0', format='int')
80 # #opObj10.addParameter(name='xmax', value='50', format='int')
81 # opObj10.addParameter(name='type', value='iq')
82 # opObj10.addParameter(name='ymin', value='-5000', format='int')
83 # #opObj10.addParameter(name='ymax', value='8500', format='int')
84 84
85 #opObj10 = procUnitConfObjA.addOperation(name='setH0')
86 #opObj10.addParameter(name='h0', value='-5000', format='float')
85 # opObj10 = procUnitConfObjA.addOperation(name='setH0')
86 # opObj10.addParameter(name='h0', value='-5000', format='float')
87 87
88 #opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
89 #opObj11.addParameter(name='window', value='1', format='int')
88 # opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
89 # opObj11.addParameter(name='window', value='1', format='int')
90 90
91 #codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
92 #opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
93 #opObj11.addParameter(name='code', value=codigo, format='floatlist')
94 #opObj11.addParameter(name='nCode', value='1', format='int')
95 #opObj11.addParameter(name='nBaud', value='28', format='int')
91 # codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
92 # opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
93 # opObj11.addParameter(name='code', value=codigo, format='floatlist')
94 # opObj11.addParameter(name='nCode', value='1', format='int')
95 # opObj11.addParameter(name='nBaud', value='28', format='int')
96 96
97 #opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
98 #opObj11.addParameter(name='n', value='100', format='int')
97 # opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
98 # opObj11.addParameter(name='n', value='100', format='int')
99 99
100 100 #######################################################################
101 101 ########## OPERACIONES DOMINIO DE LA FRECUENCIA########################
@@ -103,41 +103,41 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=rea
103 103 procUnitConfObjSousySpectra = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
104 104 procUnitConfObjSousySpectra.addParameter(name='nFFTPoints', value='100', format='int')
105 105 procUnitConfObjSousySpectra.addParameter(name='nProfiles', value='100', format='int')
106 #procUnitConfObjSousySpectra.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
106 # procUnitConfObjSousySpectra.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
107 107
108 #opObj13 = procUnitConfObjSousySpectra.addOperation(name='removeDC')
109 #opObj13.addParameter(name='mode', value='2', format='int')
108 # opObj13 = procUnitConfObjSousySpectra.addOperation(name='removeDC')
109 # opObj13.addParameter(name='mode', value='2', format='int')
110 110
111 #opObj11 = procUnitConfObjSousySpectra.addOperation(name='IncohInt', optype='other')
112 #opObj11.addParameter(name='n', value='60', format='float')
111 # opObj11 = procUnitConfObjSousySpectra.addOperation(name='IncohInt', optype='other')
112 # opObj11.addParameter(name='n', value='60', format='float')
113 113 #######################################################################
114 114 ########## PLOTEO DOMINIO DE LA FRECUENCIA#############################
115 115 #######################################################################
116 #SpectraPlot
116 # SpectraPlot
117 117
118 118 opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraPlot', optype='external')
119 119 opObj11.addParameter(name='id', value='1', format='int')
120 120 opObj11.addParameter(name='wintitle', value='Spectra', format='str')
121 #opObj11.addParameter(name='xmin', value=-0.01, format='float')
122 #opObj11.addParameter(name='xmax', value=0.01, format='float')
123 #opObj11.addParameter(name='zmin', value=dBmin, format='int')
124 #opObj11.addParameter(name='zmax', value=dBmax, format='int')
125 #opObj11.addParameter(name='ymin', value=ymin, format='int')
126 #opObj11.addParameter(name='ymax', value=ymax, format='int')
121 # opObj11.addParameter(name='xmin', value=-0.01, format='float')
122 # opObj11.addParameter(name='xmax', value=0.01, format='float')
123 # opObj11.addParameter(name='zmin', value=dBmin, format='int')
124 # opObj11.addParameter(name='zmax', value=dBmax, format='int')
125 # opObj11.addParameter(name='ymin', value=ymin, format='int')
126 # opObj11.addParameter(name='ymax', value=ymax, format='int')
127 127 opObj11.addParameter(name='showprofile', value='1', format='int')
128 128 opObj11.addParameter(name='save', value=figpath, format='str')
129 129 opObj11.addParameter(name='save_period', value=10, format='int')
130 130
131 131
132 #RTIPLOT
132 # RTIPLOT
133 133
134 134 opObj11 = procUnitConfObjSousySpectra.addOperation(name='RTIPlot', optype='external')
135 135 opObj11.addParameter(name='id', value='2', format='int')
136 136 opObj11.addParameter(name='wintitle', value='RTIPlot', format='str')
137 #opObj11.addParameter(name='zmin', value=dBmin, format='int')
138 #opObj11.addParameter(name='zmax', value=dBmax, format='int')
139 #opObj11.addParameter(name='ymin', value=ymin, format='int')
140 #opObj11.addParameter(name='ymax', value=ymax, format='int')
137 # opObj11.addParameter(name='zmin', value=dBmin, format='int')
138 # opObj11.addParameter(name='zmax', value=dBmax, format='int')
139 # opObj11.addParameter(name='ymin', value=ymin, format='int')
140 # opObj11.addParameter(name='ymax', value=ymax, format='int')
141 141 opObj11.addParameter(name='xmin', value=0, format='int')
142 142 opObj11.addParameter(name='xmax', value=23, format='int')
143 143
@@ -171,9 +171,9 opObj11.addParameter(name='save_period', value=10, format='int')
171 171 #######################################################################
172 172 ############### UNIDAD DE ESCRITURA ###################################
173 173 #######################################################################
174 #opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraWriter', optype='other')
175 #opObj11.addParameter(name='path', value=wr_path)
176 #opObj11.addParameter(name='blocksPerFile', value='50', format='int')
174 # opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraWriter', optype='other')
175 # opObj11.addParameter(name='path', value=wr_path)
176 # opObj11.addParameter(name='blocksPerFile', value='50', format='int')
177 177 print ("Escribiendo el archivo XML")
178 178 print ("Leyendo el archivo XML")
179 179
@@ -17,13 +17,13 def fiber(cursor, skip, q, dt):
17 17 startTime="00:00:00",
18 18 endTime="23:59:59",
19 19 online=0,
20 #set=1426485881,
20 # set=1426485881,
21 21 delay=10,
22 22 walk=1,
23 23 queue=q,
24 24 cursor=cursor,
25 25 skip=skip,
26 #timezone=-5*3600
26 # timezone=-5*3600
27 27 )
28 28
29 29 # #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
@@ -12,34 +12,34 def findFiles(path):
12 12 fileList = []
13 13
14 14 for thisPath in os.listdir(path):
15 dirList.append(os.path.join(path,thisPath))
15 dirList.append(os.path.join(path, thisPath))
16 16 dirList.sort()
17 17
18 18 for thisDirectory in dirList:
19 19 files = glob.glob1(thisDirectory, "*.hdf5")
20 20 files.sort()
21 21 for thisFile in files:
22 fileList.append(os.path.join(thisDirectory,thisFile))
22 fileList.append(os.path.join(thisDirectory, thisFile))
23 23
24 24 return fileList
25 25
26 26 def readFiles(fileList):
27 27
28 meteors_array = numpy.zeros((1,4))
28 meteors_array = numpy.zeros((1, 4))
29 29
30 30 for thisFile in fileList:
31 31
32 #Leer
33 f1 = h5py.File(thisFile,'r')
32 # Leer
33 f1 = h5py.File(thisFile, 'r')
34 34 grp1 = f1['Data']
35 35 grp2 = grp1['data_output']
36 36 meteors1 = grp2['table0'][:]
37 meteors_array = numpy.vstack((meteors_array,meteors1))
38 #cerrar
37 meteors_array = numpy.vstack((meteors_array, meteors1))
38 # cerrar
39 39 f1.close()
40 40
41 41 meteors_array = numpy.delete(meteors_array, 0, axis=0)
42 meteors_list = [meteors_array[:,0],meteors_array[:,1],meteors_array[:,2],meteors_array[:,3]]
42 meteors_list = [meteors_array[:, 0], meteors_array[:, 1], meteors_array[:, 2], meteors_array[:, 3]]
43 43 return meteors_list
44 44
45 45 def estimateMean(offset_list):
@@ -49,16 +49,16 def estimateMean(offset_list):
49 49 axisX_off = []
50 50
51 51 for thisOffset in offset_list:
52 mean_aux = numpy.mean(thisOffset, axis = 0)
52 mean_aux = numpy.mean(thisOffset, axis=0)
53 53 mean_off.append(mean_aux)
54 axisX_off.append(numpy.array([0,numpy.size(thisOffset)]))
55 axisY_off.append(numpy.array([mean_aux,mean_aux]))
54 axisX_off.append(numpy.array([0, numpy.size(thisOffset)]))
55 axisY_off.append(numpy.array([mean_aux, mean_aux]))
56 56
57 57 return mean_off, axisY_off, axisX_off
58 58
59 59 def plotPhases(offset0, axisY0, axisX0, title):
60 60 f, axarr = plt.subplots(4, sharey=True)
61 color = ['b','g','r','c']
61 color = ['b', 'g', 'r', 'c']
62 62 # plt.grid()
63 63 for i in range(len(offset0)):
64 64 thisMeteor = offset0[i]
@@ -67,11 +67,11 def plotPhases(offset0, axisY0, axisX0, title):
67 67 thisColor = color[i]
68 68
69 69 opt = thisColor + 'o'
70 axarr[i].plot(thisMeteor,opt)
70 axarr[i].plot(thisMeteor, opt)
71 71 axarr[i].plot(thisX, thisY, thisColor)
72 72 axarr[i].set_ylabel('Offset ' + str(i))
73 73
74 plt.ylim((-180,180))
74 plt.ylim((-180, 180))
75 75 axarr[0].set_title(title + ' Offsets')
76 76 axarr[3].set_xlabel('Number of estimations')
77 77
@@ -81,11 +81,11 def filterOffsets(offsets0, stdvLimit):
81 81 offsets1 = []
82 82
83 83 for thisOffset in offsets0:
84 pstd = numpy.std(thisOffset)*stdvLimit
84 pstd = numpy.std(thisOffset) * stdvLimit
85 85 pmean = numpy.mean(thisOffset)
86 86 outlier1 = thisOffset > pmean - pstd
87 87 outlier2 = thisOffset < pmean + pstd
88 not_outlier = numpy.logical_and(outlier1,outlier2)
88 not_outlier = numpy.logical_and(outlier1, outlier2)
89 89 thisOffset1 = thisOffset[not_outlier]
90 90 offsets1.append(thisOffset1)
91 91
@@ -95,7 +95,7 def filterOffsets(offsets0, stdvLimit):
95 95
96 96 <<<<<<< HEAD
97 97 path = '/home/nanosat/Pictures/JASMET30_mp/201608/phase'
98 =======
98 == == == =
99 99 path = '/home/jespinoza/Pictures/JASMET30/201608/phase'
100 100 >>>>>>> master
101 101 stdvLimit = 0.5
@@ -111,7 +111,7 offsets1 = filterOffsets(offsets0, stdvLimit)
111 111 mean1, axisY1, axisX1 = estimateMean(offsets1)
112 112 plotPhases(offsets1, axisY1, axisX1, 'Filtered')
113 113
114 print "Original Offsets: %.2f, %.2f, %.2f, %.2f" % (mean0[0],mean0[1],mean0[2],mean0[3])
115 print "Filtered Offsets: %.2f, %.2f, %.2f, %.2f" % (mean1[0],mean1[1],mean1[2],mean1[3])
114 print "Original Offsets: %.2f, %.2f, %.2f, %.2f" % (mean0[0], mean0[1], mean0[2], mean0[3])
115 print "Filtered Offsets: %.2f, %.2f, %.2f, %.2f" % (mean1[0], mean1[1], mean1[2], mean1[3])
116 116
117 117 plt.show()
@@ -5,83 +5,83 import time
5 5 import h5py
6 6 import os
7 7
8 path="/home/alex/Downloads/pedestal"
9 ext=".hdf5"
8 path = "/home/alex/Downloads/pedestal"
9 ext = ".hdf5"
10 10
11 port ="5556"
12 if len(sys.argv)>1:
11 port = "5556"
12 if len(sys.argv) > 1:
13 13 port = sys.argv[1]
14 14 int(port)
15 15
16 if len(sys.argv)>2:
16 if len(sys.argv) > 2:
17 17 port1 = sys.argv[2]
18 18 int(port1)
19 19
20 #Socket to talk to server
20 # Socket to talk to server
21 21 context = zmq.Context()
22 socket = context.socket(zmq.SUB)
22 socket = context.socket(zmq.SUB)
23 23
24 24 print("Collecting updates from weather server...")
25 socket.connect("tcp://localhost:%s"%port)
25 socket.connect("tcp://localhost:%s" % port)
26 26
27 if len(sys.argv)>2:
28 socket.connect("tcp://localhost:%s"%port1)
27 if len(sys.argv) > 2:
28 socket.connect("tcp://localhost:%s" % port1)
29 29
30 #Subscribe to zipcode, default is NYC,10001
30 # Subscribe to zipcode, default is NYC,10001
31 31 topicfilter = "10001"
32 socket.setsockopt_string(zmq.SUBSCRIBE,topicfilter)
33 #Process 5 updates
34 total_value=0
35 count= -1
36 azi= []
37 elev=[]
38 time0=[]
39 #for update_nbr in range(250):
32 socket.setsockopt_string(zmq.SUBSCRIBE, topicfilter)
33 # Process 5 updates
34 total_value = 0
35 count = -1
36 azi = []
37 elev = []
38 time0 = []
39 # for update_nbr in range(250):
40 40 while(True):
41 string= socket.recv()
42 topic,ang_elev,ang_elev_dec,ang_azi,ang_azi_dec,seconds,seconds_dec= string.split()
43 ang_azi =float(ang_azi)+1e-3*float(ang_azi_dec)
44 ang_elev =float(ang_elev)+1e-3*float(ang_elev_dec)
45 seconds =float(seconds) +1e-6*float(seconds_dec)
41 string = socket.recv()
42 topic, ang_elev, ang_elev_dec, ang_azi, ang_azi_dec, seconds, seconds_dec = string.split()
43 ang_azi = float(ang_azi) + 1e-3 * float(ang_azi_dec)
44 ang_elev = float(ang_elev) + 1e-3 * float(ang_elev_dec)
45 seconds = float(seconds) + 1e-6 * float(seconds_dec)
46 46 azi.append(ang_azi)
47 47 elev.append(ang_elev)
48 48 time0.append(seconds)
49 count +=1
49 count += 1
50 50 if count == 100:
51 timetuple=time.localtime()
51 timetuple = time.localtime()
52 52 epoc = time.mktime(timetuple)
53 #print(epoc)
54 fullpath = path + ("/" if path[-1]!="/" else "")
53 # print(epoc)
54 fullpath = path + ("/" if path[-1] != "/" else "")
55 55
56 56 if not os.path.exists(fullpath):
57 57 os.mkdir(fullpath)
58 58
59 azi_array = numpy.array(azi)
59 azi_array = numpy.array(azi)
60 60 elev_array = numpy.array(elev)
61 time0_array= numpy.array(time0)
62 pedestal_array=numpy.array([azi,elev,time0])
63 count=0
64 azi= []
65 elev=[]
66 time0=[]
67 #print(pedestal_array[0])
68 #print(pedestal_array[1])
61 time0_array = numpy.array(time0)
62 pedestal_array = numpy.array([azi, elev, time0])
63 count = 0
64 azi = []
65 elev = []
66 time0 = []
67 # print(pedestal_array[0])
68 # print(pedestal_array[1])
69 69
70 meta='PE'
71 filex="%s%4.4d%3.3d%10.4d%s"%(meta,timetuple.tm_year,timetuple.tm_yday,epoc,ext)
72 filename = os.path.join(fullpath,filex)
73 fp = h5py.File(filename,'w')
74 #print("Escribiendo HDF5...",epoc)
75 #·················· Data·....······································
70 meta = 'PE'
71 filex = "%s%4.4d%3.3d%10.4d%s" % (meta, timetuple.tm_year, timetuple.tm_yday, epoc, ext)
72 filename = os.path.join(fullpath, filex)
73 fp = h5py.File(filename, 'w')
74 # print("Escribiendo HDF5...",epoc)
75 # ·················· Data·....······································
76 76 grp = fp.create_group("Data")
77 77 dset = grp.create_dataset("azimuth" , data=pedestal_array[0])
78 78 dset = grp.create_dataset("elevacion", data=pedestal_array[1])
79 79 dset = grp.create_dataset("utc" , data=pedestal_array[2])
80 #·················· Metadata·······································
80 # ·················· Metadata·······································
81 81 grp = fp.create_group("Metadata")
82 82 dset = grp.create_dataset("utctimeInit", data=pedestal_array[2][0])
83 timeInterval = pedestal_array[2][1]-pedestal_array[2][0]
83 timeInterval = pedestal_array[2][1] - pedestal_array[2][0]
84 84 dset = grp.create_dataset("timeInterval", data=timeInterval)
85 85 fp.close()
86 86
87 #print ("Average messagedata value for topic '%s' was %dF" % ( topicfilter,total_value / update_nbr))
87 # print ("Average messagedata value for topic '%s' was %dF" % ( topicfilter,total_value / update_nbr))
@@ -9,40 +9,40 import struct
9 9 from time import sleep
10 10 import zmq
11 11 import pickle
12 port="5556"
12 port = "5556"
13 13 context = zmq.Context()
14 14 socket = context.socket(zmq.PUB)
15 socket.bind("tcp://*:%s"%port)
15 socket.bind("tcp://*:%s" % port)
16 16 ###### PARAMETROS DE ENTRADA################################
17 17 print("PEDESTAL RESOLUCION 0.01")
18 18 print("MAXIMA VELOCIDAD DEL PEDESTAL")
19 19 ang_elev = 4.12
20 ang_azi = 30
21 velocidad= input ("Ingresa velocidad:")
22 velocidad= float(velocidad)
20 ang_azi = 30
21 velocidad = input ("Ingresa velocidad:")
22 velocidad = float(velocidad)
23 23 print (velocidad)
24 24 ############################################################
25 25 sleep(3)
26 26 print("Start program")
27 27 t1 = time.time()
28 count=0
28 count = 0
29 29 while(True):
30 tmp_vuelta = int(360/velocidad)
31 t1=t1+tmp_vuelta*count
32 count= count+1
30 tmp_vuelta = int(360 / velocidad)
31 t1 = t1 + tmp_vuelta * count
32 count = count + 1
33 33 muestras_seg = 100
34 34 t2 = time.time()
35 35 for i in range(tmp_vuelta):
36 36 for j in range(muestras_seg):
37 tmp_variable = (i+j/100.0)
38 ang_azi = (tmp_variable)*float(velocidad)
39 seconds = t1+ tmp_variable
40 topic=10001
41 print ("Azim°: ","%.4f"%ang_azi,"Time:" ,"%.5f"%seconds)
42 seconds_dec=(seconds-int(seconds))*1e6
43 ang_azi_dec= (ang_azi-int(ang_azi))*1e3
44 ang_elev_dec=(ang_elev-int(ang_elev))*1e3
37 tmp_variable = (i + j / 100.0)
38 ang_azi = (tmp_variable) * float(velocidad)
39 seconds = t1 + tmp_variable
40 topic = 10001
41 print ("Azim°: ", "%.4f" % ang_azi, "Time:" , "%.5f" % seconds)
42 seconds_dec = (seconds - int(seconds)) * 1e6
43 ang_azi_dec = (ang_azi - int(ang_azi)) * 1e3
44 ang_elev_dec = (ang_elev - int(ang_elev)) * 1e3
45 45 sleep(0.0088)
46 socket.send_string("%d %d %d %d %d %d %d"%(topic,ang_elev,ang_elev_dec,ang_azi,ang_azi_dec,seconds,seconds_dec))
46 socket.send_string("%d %d %d %d %d %d %d" % (topic, ang_elev, ang_elev_dec, ang_azi, ang_azi_dec, seconds, seconds_dec))
47 47 t3 = time.time()
48 print ("Total time for 1 vuelta in Seconds",t3-t2)
48 print ("Total time for 1 vuelta in Seconds", t3 - t2)
@@ -6,37 +6,37 import os, sys
6 6 import datetime
7 7 import time
8 8
9 #path = os.path.dirname(os.getcwd())
10 #path = os.path.dirname(path)
11 #sys.path.insert(0, path)
9 # path = os.path.dirname(os.getcwd())
10 # path = os.path.dirname(path)
11 # sys.path.insert(0, path)
12 12
13 13 from schainpy.controller import Project
14 14
15 15 desc = "USRP_test"
16 16 filename = "USRP_processing.xml"
17 17 controllerObj = Project()
18 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
18 controllerObj.setup(id='191', name='Test_USRP', description=desc)
19 19
20 20 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
21 21
22 22 #######################################################################
23 23 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
24 24 #######################################################################
25 #path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
25 # path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
26 26
27 27
28 #path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
29 path = '/home/alex/Downloads'
28 # path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
29 path = '/home/alex/Downloads'
30 30 figpath = '/home/alex/Downloads'
31 #figpath = '/home/soporte/data_hdf5_imag'
32 #remotefolder = "/home/wmaster/graficos"
31 # figpath = '/home/soporte/data_hdf5_imag'
32 # remotefolder = "/home/wmaster/graficos"
33 33 #######################################################################
34 34 ################# RANGO DE PLOTEO######################################
35 35 #######################################################################
36 36 dBmin = '30'
37 37 dBmax = '60'
38 38 xmin = '0'
39 xmax ='24'
39 xmax = '24'
40 40 ymin = '0'
41 41 ymax = '600'
42 42 #######################################################################
@@ -51,17 +51,17 yesterday = str2.strftime("%Y/%m/%d")
51 51 #######################################################################
52 52 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
53 53 path=path,
54 startDate="2020/01/01", #"2020/01/01",#today,
55 endDate= "2020/12/01", #"2020/12/30",#today,
54 startDate="2020/01/01", # "2020/01/01",#today,
55 endDate="2020/12/01", # "2020/12/30",#today,
56 56 startTime='00:00:00',
57 57 endTime='23:59:59',
58 58 delay=0,
59 #set=0,
59 # set=0,
60 60 online=0,
61 61 walk=1)
62 62
63 63 opObj11 = readUnitConfObj.addOperation(name='printInfo')
64 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
64 # opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
65 65 #######################################################################
66 66 ################ OPERACIONES DOMINIO DEL TIEMPO########################
67 67 #######################################################################
@@ -71,31 +71,31 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=rea
71 71 # codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
72 72 # '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
73 73
74 #opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
75 #opObj11.addParameter(name='frequency', value='30e6', format='float')
74 # opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
75 # opObj11.addParameter(name='frequency', value='30e6', format='float')
76 76
77 #opObj10 = procUnitConfObjA.addOperation(name='Scope', optype='external')
78 #opObj10.addParameter(name='id', value='10', format='int')
79 ##opObj10.addParameter(name='xmin', value='0', format='int')
80 ##opObj10.addParameter(name='xmax', value='50', format='int')
81 #opObj10.addParameter(name='type', value='iq')
82 #opObj10.addParameter(name='ymin', value='-5000', format='int')
83 ##opObj10.addParameter(name='ymax', value='8500', format='int')
77 # opObj10 = procUnitConfObjA.addOperation(name='Scope', optype='external')
78 # opObj10.addParameter(name='id', value='10', format='int')
79 # #opObj10.addParameter(name='xmin', value='0', format='int')
80 # #opObj10.addParameter(name='xmax', value='50', format='int')
81 # opObj10.addParameter(name='type', value='iq')
82 # opObj10.addParameter(name='ymin', value='-5000', format='int')
83 # #opObj10.addParameter(name='ymax', value='8500', format='int')
84 84
85 #opObj10 = procUnitConfObjA.addOperation(name='setH0')
86 #opObj10.addParameter(name='h0', value='-5000', format='float')
85 # opObj10 = procUnitConfObjA.addOperation(name='setH0')
86 # opObj10.addParameter(name='h0', value='-5000', format='float')
87 87
88 #opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
89 #opObj11.addParameter(name='window', value='1', format='int')
88 # opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
89 # opObj11.addParameter(name='window', value='1', format='int')
90 90
91 #codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
92 #opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
93 #opObj11.addParameter(name='code', value=codigo, format='floatlist')
94 #opObj11.addParameter(name='nCode', value='1', format='int')
95 #opObj11.addParameter(name='nBaud', value='28', format='int')
91 # codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
92 # opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
93 # opObj11.addParameter(name='code', value=codigo, format='floatlist')
94 # opObj11.addParameter(name='nCode', value='1', format='int')
95 # opObj11.addParameter(name='nBaud', value='28', format='int')
96 96
97 #opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
98 #opObj11.addParameter(name='n', value='100', format='int')
97 # opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
98 # opObj11.addParameter(name='n', value='100', format='int')
99 99
100 100 #######################################################################
101 101 ########## OPERACIONES DOMINIO DE LA FRECUENCIA########################
@@ -103,47 +103,47 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=rea
103 103 procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
104 104 procUnitConfObjB.addParameter(name='nFFTPoints', value='100', format='int')
105 105 procUnitConfObjB.addParameter(name='nProfiles', value='100', format='int')
106 #procUnitConfObjSousySpectra.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
106 # procUnitConfObjSousySpectra.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
107 107
108 #opObj13 = procUnitConfObjSousySpectra.addOperation(name='removeDC')
109 #opObj13.addParameter(name='mode', value='2', format='int')
108 # opObj13 = procUnitConfObjSousySpectra.addOperation(name='removeDC')
109 # opObj13.addParameter(name='mode', value='2', format='int')
110 110
111 #opObj11 = procUnitConfObjSousySpectra.addOperation(name='IncohInt', optype='other')
112 #opObj11.addParameter(name='n', value='60', format='float')
111 # opObj11 = procUnitConfObjSousySpectra.addOperation(name='IncohInt', optype='other')
112 # opObj11.addParameter(name='n', value='60', format='float')
113 113 #######################################################################
114 114 ########## PLOTEO DOMINIO DE LA FRECUENCIA#############################
115 115 #######################################################################
116 #SpectraPlot
116 # SpectraPlot
117 117
118 ##opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot', optype='external')
119 ##opObj11.addParameter(name='id', value='1', format='int')
120 ##opObj11.addParameter(name='wintitle', value='Spectra', format='str')
121 #opObj11.addParameter(name='xmin', value=-0.01, format='float')
122 #opObj11.addParameter(name='xmax', value=0.01, format='float')
123 #opObj11.addParameter(name='zmin', value=dBmin, format='int')
124 #opObj11.addParameter(name='zmax', value=dBmax, format='int')
125 #opObj11.addParameter(name='ymin', value=ymin, format='int')
126 #opObj11.addParameter(name='ymax', value=ymax, format='int')
127 ##opObj11.addParameter(name='showprofile', value='1', format='int')
128 ##opObj11.addParameter(name='save', value=figpath, format='str')
129 ##opObj11.addParameter(name='save_period', value=10, format='int')
118 # #opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot', optype='external')
119 # #opObj11.addParameter(name='id', value='1', format='int')
120 # #opObj11.addParameter(name='wintitle', value='Spectra', format='str')
121 # opObj11.addParameter(name='xmin', value=-0.01, format='float')
122 # opObj11.addParameter(name='xmax', value=0.01, format='float')
123 # opObj11.addParameter(name='zmin', value=dBmin, format='int')
124 # opObj11.addParameter(name='zmax', value=dBmax, format='int')
125 # opObj11.addParameter(name='ymin', value=ymin, format='int')
126 # opObj11.addParameter(name='ymax', value=ymax, format='int')
127 # #opObj11.addParameter(name='showprofile', value='1', format='int')
128 # #opObj11.addParameter(name='save', value=figpath, format='str')
129 # #opObj11.addParameter(name='save_period', value=10, format='int')
130 130
131 131
132 #RTIPLOT
132 # RTIPLOT
133 133
134 ##opObj11 = procUnitConfObjB.addOperation(name='RTIPlot', optype='external')
135 ##opObj11.addParameter(name='id', value='2', format='int')
136 ##opObj11.addParameter(name='wintitle', value='RTIPlot', format='str')
137 #opObj11.addParameter(name='zmin', value=dBmin, format='int')
138 #opObj11.addParameter(name='zmax', value=dBmax, format='int')
139 #opObj11.addParameter(name='ymin', value=ymin, format='int')
140 #opObj11.addParameter(name='ymax', value=ymax, format='int')
141 ##opObj11.addParameter(name='xmin', value=0, format='int')
142 ##opObj11.addParameter(name='xmax', value=23, format='int')
134 # #opObj11 = procUnitConfObjB.addOperation(name='RTIPlot', optype='external')
135 # #opObj11.addParameter(name='id', value='2', format='int')
136 # #opObj11.addParameter(name='wintitle', value='RTIPlot', format='str')
137 # opObj11.addParameter(name='zmin', value=dBmin, format='int')
138 # opObj11.addParameter(name='zmax', value=dBmax, format='int')
139 # opObj11.addParameter(name='ymin', value=ymin, format='int')
140 # opObj11.addParameter(name='ymax', value=ymax, format='int')
141 # #opObj11.addParameter(name='xmin', value=0, format='int')
142 # #opObj11.addParameter(name='xmax', value=23, format='int')
143 143
144 ##opObj11.addParameter(name='showprofile', value='1', format='int')
145 ##opObj11.addParameter(name='save', value=figpath, format='str')
146 ##opObj11.addParameter(name='save_period', value=10, format='int')
144 # #opObj11.addParameter(name='showprofile', value='1', format='int')
145 # #opObj11.addParameter(name='save', value=figpath, format='str')
146 # #opObj11.addParameter(name='save_period', value=10, format='int')
147 147
148 148
149 149 # opObj11 = procUnitConfObjSousySpectra.addOperation(name='CrossSpectraPlot', optype='other')
@@ -171,9 +171,9 procUnitConfObjB.addParameter(name='nProfiles', value='100', format='int')
171 171 #######################################################################
172 172 ############### UNIDAD DE ESCRITURA ###################################
173 173 #######################################################################
174 #opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraWriter', optype='other')
175 #opObj11.addParameter(name='path', value=wr_path)
176 #opObj11.addParameter(name='blocksPerFile', value='50', format='int')
174 # opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraWriter', optype='other')
175 # opObj11.addParameter(name='path', value=wr_path)
176 # opObj11.addParameter(name='blocksPerFile', value='50', format='int')
177 177
178 178 procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
179 179 procUnitConfObjC.addOperation(name='SpectralMoments')
@@ -6,39 +6,39 import os, sys
6 6 import datetime
7 7 import time
8 8
9 #path = os.path.dirname(os.getcwd())
10 #path = os.path.dirname(path)
11 #sys.path.insert(0, path)
9 # path = os.path.dirname(os.getcwd())
10 # path = os.path.dirname(path)
11 # sys.path.insert(0, path)
12 12
13 13 from schainpy.controller import Project
14 14
15 15 desc = "USRP_test"
16 16 filename = "USRP_processing.xml"
17 17 controllerObj = Project()
18 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
18 controllerObj.setup(id='191', name='Test_USRP', description=desc)
19 19
20 20 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
21 21
22 22 #######################################################################
23 23 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
24 24 #######################################################################
25 #path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
25 # path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
26 26
27 27
28 #path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
29 path = '/home/alex/Downloads'
28 # path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
29 path = '/home/alex/Downloads'
30 30 figpath = '/home/alex/Downloads'
31 pathfile = '/home/alex/Downloads/test_rawdata2'
31 pathfile = '/home/alex/Downloads/test_rawdata2'
32 32
33 #figpath = '/home/soporte/data_hdf5_imag'
34 #remotefolder = "/home/wmaster/graficos"
33 # figpath = '/home/soporte/data_hdf5_imag'
34 # remotefolder = "/home/wmaster/graficos"
35 35 #######################################################################
36 36 ################# RANGO DE PLOTEO######################################
37 37 #######################################################################
38 38 dBmin = '30'
39 39 dBmax = '60'
40 40 xmin = '0'
41 xmax ='24'
41 xmax = '24'
42 42 ymin = '0'
43 43 ymax = '600'
44 44 #######################################################################
@@ -53,17 +53,17 yesterday = str2.strftime("%Y/%m/%d")
53 53 #######################################################################
54 54 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
55 55 path=path,
56 startDate="2020/01/01", #"2020/01/01",#today,
57 endDate= "2020/12/01", #"2020/12/30",#today,
56 startDate="2020/01/01", # "2020/01/01",#today,
57 endDate="2020/12/01", # "2020/12/30",#today,
58 58 startTime='00:00:00',
59 59 endTime='23:59:59',
60 60 delay=0,
61 #set=0,
61 # set=0,
62 62 online=0,
63 63 walk=1)
64 64
65 65 opObj11 = readUnitConfObj.addOperation(name='printInfo')
66 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
66 # opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
67 67 #######################################################################
68 68 ################ OPERACIONES DOMINIO DEL TIEMPO########################
69 69 #######################################################################
@@ -1,19 +1,19
1 import os,sys
1 import os, sys
2 2 import datetime
3 3 import time
4 4 from schainpy.controller import Project
5 path = '/home/alex/Downloads/NEW_WR2'
5 path = '/home/alex/Downloads/NEW_WR2'
6 6 figpath = path
7 desc = "Simulator Test"
7 desc = "Simulator Test"
8 8
9 controllerObj = Project()
9 controllerObj = Project()
10 10
11 controllerObj.setup(id='10',name='Test Simulator',description=desc)
11 controllerObj.setup(id='10', name='Test Simulator', description=desc)
12 12
13 13 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
14 14 frequency=9.345e9,
15 FixRCP_IPP= 60,
16 Tau_0 = 30,
15 FixRCP_IPP=60,
16 Tau_0=30,
17 17 AcqH0_0=0,
18 18 samples=330,
19 19 AcqDH_0=0.15,
@@ -26,13 +26,13 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
26 26 online=0,
27 27 walk=0)
28 28
29 #opObj11 = readUnitConfObj.addOperation(name='printInfo')
29 # opObj11 = readUnitConfObj.addOperation(name='printInfo')
30 30
31 31 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
32 32
33 #opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
34 #opObj10.addParameter(name='channelList', value=[0,1])
35 #opObj10.addParameter(name='channelList', value='0',format='intlist')
33 # opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
34 # opObj10.addParameter(name='channelList', value=[0,1])
35 # opObj10.addParameter(name='channelList', value='0',format='intlist')
36 36
37 37 opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
38 38 opObj11.addParameter(name='n', value='32', format='int')
@@ -45,10 +45,10 opObj10.addParameter(name='wintitle', value=type )
45 45 opObj10.addParameter(name='type', value=type)
46 46 '''
47 47
48 type="WeatherPower"
48 type = "WeatherPower"
49 49 opObj10 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='external')
50 #opObj10.addParameter(name='id', value='12')
51 opObj10.addParameter(name='wintitle', value=type )
50 # opObj10.addParameter(name='id', value='12')
51 opObj10.addParameter(name='wintitle', value=type)
52 52
53 53 '''
54 54 type="WeatherVeloity"
@@ -6,11 +6,11 from schainpy.controller import Project
6 6 desc = "USRP_test"
7 7 filename = "USRP_processing.xml"
8 8 controllerObj = Project()
9 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
9 controllerObj.setup(id='191', name='Test_USRP', description=desc)
10 10
11 11 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
12 12 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
13 path = '/home/alex/Downloads/test_rawdata'
13 path = '/home/alex/Downloads/test_rawdata'
14 14 figpath = '/home/alex/Downloads/hdf5_test'
15 15 ######################## UNIDAD DE LECTURA#############################
16 16 '''
@@ -28,8 +28,8 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
28 28 '''
29 29 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
30 30 frequency=9.345e9,
31 FixRCP_IPP= 60,
32 Tau_0 = 30,
31 FixRCP_IPP=60,
32 Tau_0=30,
33 33 AcqH0_0=0,
34 34 samples=330,
35 35 AcqDH_0=0.15,
@@ -37,16 +37,16 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
37 37 FixRCP_TXB=0.15,
38 38 Fdoppler=600.0,
39 39 Hdoppler=36,
40 Adoppler=300,#300
40 Adoppler=300, # 300
41 41 delay=0,
42 42 online=0,
43 43 walk=0,
44 44 profilesPerBlock=625,
45 45 dataBlocksPerFile=100)
46 #nTotalReadFiles=2)
46 # nTotalReadFiles=2)
47 47
48 48
49 #opObj11 = readUnitConfObj.addOperation(name='printInfo')
49 # opObj11 = readUnitConfObj.addOperation(name='printInfo')
50 50
51 51 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
52 52
@@ -56,12 +56,12 procUnitConfObjB.addParameter(name='nProfiles', value=625, format='int')
56 56
57 57 opObj11 = procUnitConfObjB.addOperation(name='removeDC')
58 58 opObj11.addParameter(name='mode', value=2)
59 #opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot')
60 #opObj11 = procUnitConfObjB.addOperation(name='PowerProfilePlot')
59 # opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot')
60 # opObj11 = procUnitConfObjB.addOperation(name='PowerProfilePlot')
61 61
62 procUnitConfObjC= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjB.getId())
62 procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
63 63 procUnitConfObjC.addOperation(name='SpectralMoments')
64 #opObj11 = procUnitConfObjC.addOperation(name='PowerPlot')
64 # opObj11 = procUnitConfObjC.addOperation(name='PowerPlot')
65 65
66 66 '''
67 67 opObj11 = procUnitConfObjC.addOperation(name='SpectralMomentsPlot')
@@ -73,10 +73,10 opObj11.addParameter(name='showprofile', value=1)
73 73 '''
74 74
75 75 opObj10 = procUnitConfObjC.addOperation(name='ParameterWriter')
76 opObj10.addParameter(name='path',value=figpath)
77 #opObj10.addParameter(name='mode',value=0)
78 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
79 opObj10.addParameter(name='metadataList',value='utctimeInit,timeInterval',format='list')
80 opObj10.addParameter(name='dataList',value='data_POW,data_DOP,data_WIDTH,data_SNR')#,format='list'
76 opObj10.addParameter(name='path', value=figpath)
77 # opObj10.addParameter(name='mode',value=0)
78 opObj10.addParameter(name='blocksPerFile', value='100', format='int')
79 opObj10.addParameter(name='metadataList', value='utctimeInit,timeInterval', format='list')
80 opObj10.addParameter(name='dataList', value='data_POW,data_DOP,data_WIDTH,data_SNR') # ,format='list'
81 81
82 82 controllerObj.start()
@@ -1,19 +1,19
1 import os,sys
1 import os, sys
2 2 import datetime
3 3 import time
4 4 from schainpy.controller import Project
5 path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
5 path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
6 6 figpath = path
7 desc = "Simulator Test"
7 desc = "Simulator Test"
8 8
9 controllerObj = Project()
9 controllerObj = Project()
10 10
11 controllerObj.setup(id='10',name='Test Simulator',description=desc)
11 controllerObj.setup(id='10', name='Test Simulator', description=desc)
12 12
13 13 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
14 14 frequency=9.345e9,
15 FixRCP_IPP= 60,
16 Tau_0 = 30,
15 FixRCP_IPP=60,
16 Tau_0=30,
17 17 AcqH0_0=0,
18 18 samples=330,
19 19 AcqDH_0=0.15,
@@ -27,12 +27,12 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
27 27 walk=0,
28 28 nTotalReadFiles=4)
29 29
30 opObj11 = readUnitConfObj.addOperation(name='printInfo')
30 opObj11 = readUnitConfObj.addOperation(name='printInfo')
31 31
32 32 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
33 33
34 34 opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
35 opObj10.addParameter(name='channelList', value=[0,1])
35 opObj10.addParameter(name='channelList', value=[0, 1])
36 36
37 37 procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
38 38 procUnitConfObjB.addParameter(name='nFFTPoints', value=200, format='int')
@@ -41,8 +41,8 procUnitConfObjB.addParameter(name='nProfiles', value=200, format='int')
41 41 opObj11 = procUnitConfObjB.addOperation(name='removeDC')
42 42 opObj11.addParameter(name='mode', value=2)
43 43
44 #opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
45 #opObj11.addParameter(name='n', value='20', format='int')
44 # opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
45 # opObj11.addParameter(name='n', value='20', format='int')
46 46
47 47 procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
48 48 procUnitConfObjC.addOperation(name='SpectralMoments')
@@ -1,19 +1,19
1 import os,sys
1 import os, sys
2 2 import datetime
3 3 import time
4 4 from schainpy.controller import Project
5 path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
5 path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
6 6 figpath = path
7 desc = "Simulator Test"
7 desc = "Simulator Test"
8 8
9 controllerObj = Project()
9 controllerObj = Project()
10 10
11 controllerObj.setup(id='10',name='Test Simulator',description=desc)
11 controllerObj.setup(id='10', name='Test Simulator', description=desc)
12 12
13 13 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
14 14 frequency=9.345e9,
15 FixRCP_IPP= 60,
16 Tau_0 = 30,
15 FixRCP_IPP=60,
16 Tau_0=30,
17 17 AcqH0_0=0,
18 18 samples=330,
19 19 AcqDH_0=0.15,
@@ -27,14 +27,14 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
27 27 walk=0,
28 28 nTotalReadFiles=4)
29 29
30 opObj11 = readUnitConfObj.addOperation(name='printInfo')
30 opObj11 = readUnitConfObj.addOperation(name='printInfo')
31 31 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
32 32
33 33 opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
34 34 opObj10.addParameter(name='channelList', value=[0])
35 35
36 36 opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
37 opObj11.addParameter(name='n', value='32', format='int')#10
38 #opObj11.addParameter(name='removeDC', value=1, format='int')
37 opObj11.addParameter(name='n', value='32', format='int') # 10
38 # opObj11.addParameter(name='removeDC', value=1, format='int')
39 39
40 40 controllerObj.start()
@@ -1,20 +1,20
1 import os,sys
1 import os, sys
2 2 import datetime
3 3 import time
4 4 from schainpy.controller import Project
5 path = '/home/alex/Downloads/NEW_WR2'
6 pathfile = '/home/alex/Downloads/test_rawdata'
5 path = '/home/alex/Downloads/NEW_WR2'
6 pathfile = '/home/alex/Downloads/test_rawdata'
7 7 figpath = path
8 desc = "Simulator Test"
8 desc = "Simulator Test"
9 9
10 controllerObj = Project()
10 controllerObj = Project()
11 11
12 controllerObj.setup(id='10',name='Test Simulator',description=desc)
12 controllerObj.setup(id='10', name='Test Simulator', description=desc)
13 13
14 14 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
15 15 frequency=9.345e9,
16 FixRCP_IPP= 60,
17 Tau_0 = 30.0,
16 FixRCP_IPP=60,
17 Tau_0=30.0,
18 18 AcqH0_0=0,
19 19 samples=330,
20 20 AcqDH_0=0.15,
@@ -27,11 +27,11 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
27 27 online=0,
28 28 walk=0,
29 29 nTotalReadFiles=3)
30 #opObj11 = readUnitConfObj.addOperation(name='printInfo')
30 # opObj11 = readUnitConfObj.addOperation(name='printInfo')
31 31 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
32 #opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
33 #opObj10.addParameter(name='channelList', value=[0,1])
34 #opObj10.addParameter(name='channelList', value='0',format='intlist')
32 # opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
33 # opObj10.addParameter(name='channelList', value=[0,1])
34 # opObj10.addParameter(name='channelList', value='0',format='intlist')
35 35 opObj12 = procUnitConfObjA.addOperation(name='VoltageWriter', optype='other')
36 36 opObj12.addParameter(name='path', value=pathfile)
37 37 opObj12.addParameter(name='blocksPerFile', value='120', format='int')
@@ -6,17 +6,17 from schainpy.controller import Project
6 6 desc = "USRP_test"
7 7 filename = "USRP_processing.xml"
8 8 controllerObj = Project()
9 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
9 controllerObj.setup(id='191', name='Test_USRP', description=desc)
10 10
11 11 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
12 12 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
13 path = '/home/alex/Downloads/test_rawdata'
13 path = '/home/alex/Downloads/test_rawdata'
14 14 figpath = '/home/alex/Downloads'
15 15 ################# RANGO DE PLOTEO######################################
16 16 dBmin = '30'
17 17 dBmax = '60'
18 18 xmin = '0'
19 xmax ='24'
19 xmax = '24'
20 20 ymin = '0'
21 21 ymax = '600'
22 22 ########################FECHA##########################################
@@ -27,17 +27,17 yesterday = str2.strftime("%Y/%m/%d")
27 27 ######################## UNIDAD DE LECTURA#############################
28 28 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
29 29 path=path,
30 startDate="2020/01/01", #"2020/01/01",#today,
31 endDate= "2020/12/01", #"2020/12/30",#today,
30 startDate="2020/01/01", # "2020/01/01",#today,
31 endDate="2020/12/01", # "2020/12/30",#today,
32 32 startTime='00:00:00',
33 33 endTime='23:59:59',
34 34 delay=0,
35 #set=0,
35 # set=0,
36 36 online=0,
37 37 walk=1)
38 38
39 39 opObj11 = readUnitConfObj.addOperation(name='printInfo')
40 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
40 # opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
41 41 #######################################################################
42 42 ################ OPERACIONES DOMINIO DEL TIEMPO########################
43 43 #######################################################################
@@ -6,21 +6,21 from schainpy.controller import Project
6 6 desc = "USRP_test"
7 7 filename = "USRP_processing.xml"
8 8 controllerObj = Project()
9 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
9 controllerObj.setup(id='191', name='Test_USRP', description=desc)
10 10
11 11 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
12 12 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
13 path = '/home/alex/Downloads/test_rawdata'
13 path = '/home/alex/Downloads/test_rawdata'
14 14 figpath = '/home/alex/Downloads'
15 15 ######################## UNIDAD DE LECTURA#############################
16 16 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
17 17 path=path,
18 startDate="2020/01/01", #"2020/01/01",#today,
19 endDate= "2020/12/01", #"2020/12/30",#today,
18 startDate="2020/01/01", # "2020/01/01",#today,
19 endDate="2020/12/01", # "2020/12/30",#today,
20 20 startTime='00:00:00',
21 21 endTime='23:59:59',
22 22 delay=0,
23 #set=0,
23 # set=0,
24 24 online=0,
25 25 walk=1)
26 26
@@ -28,9 +28,9 opObj11 = readUnitConfObj.addOperation(name='printInfo')
28 28
29 29 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
30 30
31 #opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
32 #opObj10.addParameter(name='channelList', value=[0,1])
33 #opObj10.addParameter(name='channelList', value='0',format='intlist')
31 # opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
32 # opObj10.addParameter(name='channelList', value=[0,1])
33 # opObj10.addParameter(name='channelList', value='0',format='intlist')
34 34
35 35 opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
36 36 opObj11.addParameter(name='n', value='16', format='int')
@@ -6,21 +6,21 from schainpy.controller import Project
6 6 desc = "USRP_test"
7 7 filename = "USRP_processing.xml"
8 8 controllerObj = Project()
9 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
9 controllerObj.setup(id='191', name='Test_USRP', description=desc)
10 10
11 11 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
12 12 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
13 path = '/home/alex/Downloads/test_rawdata'
13 path = '/home/alex/Downloads/test_rawdata'
14 14 figpath = '/home/alex/Downloads'
15 15 ######################## UNIDAD DE LECTURA#############################
16 16 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
17 17 path=path,
18 startDate="2020/01/01", #"2020/01/01",#today,
19 endDate= "2020/12/01", #"2020/12/30",#today,
18 startDate="2020/01/01", # "2020/01/01",#today,
19 endDate="2020/12/01", # "2020/12/30",#today,
20 20 startTime='00:00:00',
21 21 endTime='23:59:59',
22 22 delay=0,
23 #set=0,
23 # set=0,
24 24 online=0,
25 25 walk=1)
26 26
@@ -40,14 +40,14 procUnitConfObjB.addParameter(name='nProfiles', value=64, format='int')
40 40 opObj11 = procUnitConfObjB.addOperation(name='removeDC')
41 41 opObj11.addParameter(name='mode', value=2)
42 42
43 #opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
44 #opObj11.addParameter(name='n', value='20', format='int')
43 # opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
44 # opObj11.addParameter(name='n', value='20', format='int')
45 45
46 46 procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
47 47 procUnitConfObjC.addOperation(name='SpectralMoments')
48 48 opObj11 = procUnitConfObjC.addOperation(name='SpectralMomentsPlot')
49 #opObj11.addParameter(name='xmin', value=14)
49 # opObj11.addParameter(name='xmin', value=14)
50 50 opObj11.addParameter(name='xmax', value=6)
51 #opObj11.addParameter(name='save', value=figpath)
51 # opObj11.addParameter(name='save', value=figpath)
52 52 opObj11.addParameter(name='showprofile', value=1)
53 53 controllerObj.start()
@@ -1,19 +1,19
1 import os,sys
1 import os, sys
2 2 import datetime
3 3 import time
4 4 from schainpy.controller import Project
5 path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
5 path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
6 6 figpath = path
7 desc = "Simulator Test"
7 desc = "Simulator Test"
8 8
9 controllerObj = Project()
9 controllerObj = Project()
10 10
11 controllerObj.setup(id='10',name='Test Simulator',description=desc)
11 controllerObj.setup(id='10', name='Test Simulator', description=desc)
12 12
13 13 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
14 14 frequency=9.345e9,
15 FixRCP_IPP= 60,
16 Tau_0 = 30,
15 FixRCP_IPP=60,
16 Tau_0=30,
17 17 AcqH0_0=0,
18 18 samples=330,
19 19 AcqDH_0=0.15,
@@ -27,7 +27,7 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
27 27 walk=0,
28 28 nTotalReadFiles=3)
29 29
30 opObj11 = readUnitConfObj.addOperation(name='printInfo')
30 opObj11 = readUnitConfObj.addOperation(name='printInfo')
31 31
32 32 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
33 33
@@ -41,27 +41,27 procUnitConfObjB.addParameter(name='nProfiles', value=300, format='int')
41 41 opObj11 = procUnitConfObjB.addOperation(name='removeDC')
42 42 opObj11.addParameter(name='mode', value=2)
43 43
44 #opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
45 #opObj11.addParameter(name='n', value='10', format='int')
44 # opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
45 # opObj11.addParameter(name='n', value='10', format='int')
46 46
47 #opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot')
48 #opObj11 = procUnitConfObjB.addOperation(name='PowerProfilePlot')
49 #opObj11.addParameter(name='xmin', value=13)
50 #opObj11.addParameter(name='xmax', value=.4)
51 #opObj11 = procUnitConfObjB.addOperation(name='NoisePlot')
52 #opObj11.addParameter(name='xmin', value=13)
53 #opObj11.addParameter(name='xmax', value=14)
47 # opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot')
48 # opObj11 = procUnitConfObjB.addOperation(name='PowerProfilePlot')
49 # opObj11.addParameter(name='xmin', value=13)
50 # opObj11.addParameter(name='xmax', value=.4)
51 # opObj11 = procUnitConfObjB.addOperation(name='NoisePlot')
52 # opObj11.addParameter(name='xmin', value=13)
53 # opObj11.addParameter(name='xmax', value=14)
54 54
55 55
56 56 procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
57 57 procUnitConfObjC.addOperation(name='SpectralMoments')
58 58
59 59 opObj11 = procUnitConfObjC.addOperation(name='SpectralMomentsPlot')
60 #opObj11.addParameter(name='xmin', value=14)
61 #opObj11.addParameter(name='xmax', value=15)
62 #opObj11.addParameter(name='save', value=figpath)
60 # opObj11.addParameter(name='xmin', value=14)
61 # opObj11.addParameter(name='xmax', value=15)
62 # opObj11.addParameter(name='save', value=figpath)
63 63 opObj11.addParameter(name='showprofile', value=1)
64 #opObj11.addParameter(name='save_period', value=10)
64 # opObj11.addParameter(name='save_period', value=10)
65 65 '''
66 66 opObj11 = procUnitConfObjC.addOperation(name='SnrPlot')
67 67 opObj11.addParameter(name='zmin', value=-10)
@@ -73,8 +73,8 opObj11.addParameter(name='zmax', value=40)
73 73 opObj11 = procUnitConfObjC.addOperation(name='SpectralWidthPlot')
74 74 opObj11.addParameter(name='xmin', value=5)
75 75 opObj11.addParameter(name='xmax', value=6)
76 #opObj11.addParameter(name='save', value=figpath)
77 #opObj11.addParameter(name='showprofile', value=1)
78 #opObj11.addParameter(name='save_period', value=10)
76 # opObj11.addParameter(name='save', value=figpath)
77 # opObj11.addParameter(name='showprofile', value=1)
78 # opObj11.addParameter(name='save_period', value=10)
79 79
80 80 controllerObj.start()
@@ -1,19 +1,19
1 import os,sys
1 import os, sys
2 2 import datetime
3 3 import time
4 4 from schainpy.controller import Project
5 path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
5 path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
6 6 figpath = path
7 desc = "Simulator Test"
7 desc = "Simulator Test"
8 8
9 controllerObj = Project()
9 controllerObj = Project()
10 10
11 controllerObj.setup(id='10',name='Test Simulator',description=desc)
11 controllerObj.setup(id='10', name='Test Simulator', description=desc)
12 12
13 13 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
14 14 frequency=9.345e9,
15 FixRCP_IPP= 60,
16 Tau_0 = 30,
15 FixRCP_IPP=60,
16 Tau_0=30,
17 17 AcqH0_0=0,
18 18 samples=330,
19 19 AcqDH_0=0.15,
@@ -21,12 +21,12 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
21 21 FixRCP_TXB=0.15,
22 22 Fdoppler=600.0,
23 23 Hdoppler=36,
24 Adoppler=300,#300
24 Adoppler=300, # 300
25 25 delay=0,
26 26 online=0,
27 27 walk=0,
28 28 profilesPerBlock=625,
29 dataBlocksPerFile=100)#,#nTotalReadFiles=2)
29 dataBlocksPerFile=100) # ,#nTotalReadFiles=2)
30 30 '''
31 31 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
32 32 path=path,
@@ -39,35 +39,35 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
39 39 online=0,
40 40 walk=1)
41 41 '''
42 opObj11 = readUnitConfObj.addOperation(name='printInfo')
42 opObj11 = readUnitConfObj.addOperation(name='printInfo')
43 43
44 44 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
45 #opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
46 #opObj11.addParameter(name='n', value='10', format='int')
45 # opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
46 # opObj11.addParameter(name='n', value='10', format='int')
47 47
48 #opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
49 #opObj10.addParameter(name='channelList', value=[0])
48 # opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
49 # opObj10.addParameter(name='channelList', value=[0])
50 50 opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
51 opObj11.addParameter(name='n', value='625', format='int')#10
51 opObj11.addParameter(name='n', value='625', format='int') # 10
52 52 opObj11.addParameter(name='removeDC', value=1, format='int')
53 53
54 #opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
55 #opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
54 # opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
55 # opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
56 56
57 57
58 #opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
59 #opObj11.addParameter(name='xmax', value=8)
58 # opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
59 # opObj11.addParameter(name='xmax', value=8)
60 60
61 #opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
61 # opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
62 62
63 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
63 procUnitConfObjB = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjA.getId())
64 64
65 65
66 66 opObj10 = procUnitConfObjB.addOperation(name='ParameterWriter')
67 opObj10.addParameter(name='path',value=figpath)
68 #opObj10.addParameter(name='mode',value=0)
69 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
70 opObj10.addParameter(name='metadataList',value='utctimeInit,timeInterval',format='list')
71 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,dataPP_SNR,dataPP_WIDTH')#,format='list'
67 opObj10.addParameter(name='path', value=figpath)
68 # opObj10.addParameter(name='mode',value=0)
69 opObj10.addParameter(name='blocksPerFile', value='100', format='int')
70 opObj10.addParameter(name='metadataList', value='utctimeInit,timeInterval', format='list')
71 opObj10.addParameter(name='dataList', value='dataPP_POW,dataPP_DOP,dataPP_SNR,dataPP_WIDTH') # ,format='list'
72 72
73 73 controllerObj.start()
@@ -1,4 +1,4
1 import os,numpy,h5py
1 import os, numpy, h5py
2 2 from shutil import copyfile
3 3
4 4 def isNumber(str):
@@ -8,23 +8,23 def isNumber(str):
8 8 except:
9 9 return False
10 10
11 def getfirstFilefromPath(path,meta,ext):
11 def getfirstFilefromPath(path, meta, ext):
12 12 validFilelist = []
13 fileList = os.listdir(path)
14 if len(fileList)<1:
13 fileList = os.listdir(path)
14 if len(fileList) < 1:
15 15 return None
16 16 # meta 1234 567 8-18 BCDE
17 17 # H,D,PE YYYY DDD EPOC .ext
18 18
19 19 for thisFile in fileList:
20 if meta =="PE":
20 if meta == "PE":
21 21 try:
22 number= int(thisFile[len(meta)+7:len(meta)+17])
22 number = int(thisFile[len(meta) + 7:len(meta) + 17])
23 23 except:
24 24 print("There is a file or folder with different format")
25 25 if meta == "D":
26 26 try:
27 number= int(thisFile[8:11])
27 number = int(thisFile[8:11])
28 28 except:
29 29 print("There is a file or folder with different format")
30 30
@@ -34,129 +34,129 def getfirstFilefromPath(path,meta,ext):
34 34 continue
35 35 validFilelist.sort()
36 36 validFilelist.append(thisFile)
37 if len(validFilelist)>0:
38 validFilelist = sorted(validFilelist,key=str.lower)
37 if len(validFilelist) > 0:
38 validFilelist = sorted(validFilelist, key=str.lower)
39 39 return validFilelist
40 40 return None
41 41
42 def gettimeutcfromDirFilename(path,file):
43 dir_file= path+"/"+file
44 fp = h5py.File(dir_file,'r')
45 epoc = fp['Metadata'].get('utctimeInit')[()]
42 def gettimeutcfromDirFilename(path, file):
43 dir_file = path + "/" + file
44 fp = h5py.File(dir_file, 'r')
45 epoc = fp['Metadata'].get('utctimeInit')[()]
46 46 fp.close()
47 47 return epoc
48 48
49 def getDatavaluefromDirFilename(path,file,value):
50 dir_file= path+"/"+file
51 fp = h5py.File(dir_file,'r')
52 array = fp['Data'].get(value)[()]
49 def getDatavaluefromDirFilename(path, file, value):
50 dir_file = path + "/" + file
51 fp = h5py.File(dir_file, 'r')
52 array = fp['Data'].get(value)[()]
53 53 fp.close()
54 54 return array
55 55
56 56
57 #·········· Velocidad de Pedestal·················
57 # ·········· Velocidad de Pedestal·················
58 58 w = input ("Ingresa velocidad de Pedestal: ")
59 59 w = 4
60 60 w = float(w)
61 #·········· Resolucion minimo en grados···········
61 # ·········· Resolucion minimo en grados···········
62 62 alfa = input ("Ingresa resolucion minima en grados: ")
63 63 alfa = 1
64 64 alfa = float(alfa)
65 #·········· IPP del Experimento ··················
66 IPP = input ("Ingresa el IPP del experimento: ")
67 IPP = 0.0004
68 IPP = float(IPP)
69 #·········· MODE ··················
65 # ·········· IPP del Experimento ··················
66 IPP = input ("Ingresa el IPP del experimento: ")
67 IPP = 0.0004
68 IPP = float(IPP)
69 # ·········· MODE ··················
70 70 mode = input ("Ingresa el MODO del experimento T or F: ")
71 71 mode = "T"
72 72 mode = str(mode)
73 73
74 #·········· Tiempo en generar la resolucion min···
75 #············ MCU ·· var_ang = w * (var_tiempo)···
76 var_tiempo = alfa/w
77 #·········· Tiempo Equivalente en perfiles········
78 #·········· var_tiempo = IPP * ( num_perfiles )·
79 num_perfiles = int(var_tiempo/IPP)
74 # ·········· Tiempo en generar la resolucion min···
75 # ············ MCU ·· var_ang = w * (var_tiempo)···
76 var_tiempo = alfa / w
77 # ·········· Tiempo Equivalente en perfiles········
78 # ·········· var_tiempo = IPP * ( num_perfiles )·
79 num_perfiles = int(var_tiempo / IPP)
80 80
81 #··········DATA PEDESTAL··························
81 # ··········DATA PEDESTAL··························
82 82 dir_pedestal = "/home/alex/Downloads/pedestal"
83 #·········· DATA ADQ······························
84 if mode=="T":
85 dir_adq = "/home/alex/Downloads/hdf5_testPP/d2020194" # Time domain
83 # ·········· DATA ADQ······························
84 if mode == "T":
85 dir_adq = "/home/alex/Downloads/hdf5_testPP/d2020194" # Time domain
86 86 else:
87 dir_adq = "/home/alex/Downloads/hdf5_test/d2020194" # Frequency domain
87 dir_adq = "/home/alex/Downloads/hdf5_test/d2020194" # Frequency domain
88 88
89 print( "Velocidad angular :", w)
90 print( "Resolucion minima en grados :", alfa)
91 print( "Numero de perfiles equivalente:", num_perfiles)
92 print( "Mode :", mode)
89 print("Velocidad angular :", w)
90 print("Resolucion minima en grados :", alfa)
91 print("Numero de perfiles equivalente:", num_perfiles)
92 print("Mode :", mode)
93 93
94 #············ First File·············
95 list_pedestal = getfirstFilefromPath(path=dir_pedestal,meta="PE",ext=".hdf5")
96 list_adq = getfirstFilefromPath(path=dir_adq ,meta="D",ext=".hdf5")
94 # ············ First File·············
95 list_pedestal = getfirstFilefromPath(path=dir_pedestal, meta="PE", ext=".hdf5")
96 list_adq = getfirstFilefromPath(path=dir_adq , meta="D", ext=".hdf5")
97 97
98 #············ utc time ··············
99 utc_pedestal= gettimeutcfromDirFilename(path=dir_pedestal,file=list_pedestal[0])
100 utc_adq = gettimeutcfromDirFilename(path=dir_adq ,file=list_adq[0])
98 # ············ utc time ··············
99 utc_pedestal = gettimeutcfromDirFilename(path=dir_pedestal, file=list_pedestal[0])
100 utc_adq = gettimeutcfromDirFilename(path=dir_adq , file=list_adq[0])
101 101
102 print("utc_pedestal :",utc_pedestal)
103 print("utc_adq :",utc_adq)
104 #·············Relacion: utc_adq (+/-) var_tiempo*nro_file= utc_pedestal
105 time_Interval_p = 0.01
106 n_perfiles_p = 100
107 if utc_adq>utc_pedestal:
108 nro_file = int((int(utc_adq) - int(utc_pedestal))/(time_Interval_p*n_perfiles_p))
109 ff_pedestal = list_pedestal[nro_file]
110 utc_pedestal = gettimeutcfromDirFilename(path=dir_pedestal,file=ff_pedestal)
111 nro_key_p = int((utc_adq-utc_pedestal)/time_Interval_p)
112 if utc_adq >utc_pedestal:
113 ff_pedestal = ff_pedestal
102 print("utc_pedestal :", utc_pedestal)
103 print("utc_adq :", utc_adq)
104 # ·············Relacion: utc_adq (+/-) var_tiempo*nro_file= utc_pedestal
105 time_Interval_p = 0.01
106 n_perfiles_p = 100
107 if utc_adq > utc_pedestal:
108 nro_file = int((int(utc_adq) - int(utc_pedestal)) / (time_Interval_p * n_perfiles_p))
109 ff_pedestal = list_pedestal[nro_file]
110 utc_pedestal = gettimeutcfromDirFilename(path=dir_pedestal, file=ff_pedestal)
111 nro_key_p = int((utc_adq - utc_pedestal) / time_Interval_p)
112 if utc_adq > utc_pedestal:
113 ff_pedestal = ff_pedestal
114 114 else:
115 nro_file = nro_file-1
116 ff_pedestal = list_pedestal[nro_file]
117 angulo = getDatavaluefromDirFilename(path=dir_pedestal,file=ff_pedestal,value="azimuth")
118 nro_key_p = int((utc_adq-utc_pedestal)/time_Interval_p)
119 print("nro_file :",nro_file)
120 print("name_file :",ff_pedestal)
121 print("utc_pedestal_file :",utc_pedestal)
122 print("nro_key_p :",nro_key_p)
123 print("utc_pedestal_init :",utc_pedestal+nro_key_p*time_Interval_p)
124 print("angulo_array :",angulo[nro_key_p])
125 #4+25+25+25+21
126 #while True:
127 list_pedestal = getfirstFilefromPath(path=dir_pedestal,meta="PE",ext=".hdf5")
128 list_adq = getfirstFilefromPath(path=dir_adq ,meta="D",ext=".hdf5")
115 nro_file = nro_file - 1
116 ff_pedestal = list_pedestal[nro_file]
117 angulo = getDatavaluefromDirFilename(path=dir_pedestal, file=ff_pedestal, value="azimuth")
118 nro_key_p = int((utc_adq - utc_pedestal) / time_Interval_p)
119 print("nro_file :", nro_file)
120 print("name_file :", ff_pedestal)
121 print("utc_pedestal_file :", utc_pedestal)
122 print("nro_key_p :", nro_key_p)
123 print("utc_pedestal_init :", utc_pedestal + nro_key_p * time_Interval_p)
124 print("angulo_array :", angulo[nro_key_p])
125 # 4+25+25+25+21
126 # while True:
127 list_pedestal = getfirstFilefromPath(path=dir_pedestal, meta="PE", ext=".hdf5")
128 list_adq = getfirstFilefromPath(path=dir_adq , meta="D", ext=".hdf5")
129 129
130 nro_file = nro_file #10
130 nro_file = nro_file # 10
131 131 nro_key_perfil = nro_key_p
132 blocksPerFile = 100
133 wr_path = "/home/alex/Downloads/hdf5_wr/"
132 blocksPerFile = 100
133 wr_path = "/home/alex/Downloads/hdf5_wr/"
134 134 # Lectura de archivos de adquisicion para adicion de azimuth
135 135 for thisFile in range(len(list_adq)):
136 print("thisFileAdq",thisFile)
137 angulo_adq = numpy.zeros(blocksPerFile)
138 tmp = 0
136 print("thisFileAdq", thisFile)
137 angulo_adq = numpy.zeros(blocksPerFile)
138 tmp = 0
139 139 for j in range(blocksPerFile):
140 iterador = nro_key_perfil + 25*(j-tmp)
140 iterador = nro_key_perfil + 25 * (j - tmp)
141 141 if iterador < n_perfiles_p:
142 142 nro_file = nro_file
143 143 else:
144 nro_file = nro_file+1
145 tmp = j
144 nro_file = nro_file + 1
145 tmp = j
146 146 iterador = nro_key_perfil
147 ff_pedestal = list_pedestal[nro_file]
148 angulo = getDatavaluefromDirFilename(path=dir_pedestal,file=ff_pedestal,value="azimuth")
149 angulo_adq[j]= angulo[iterador]
150 copyfile(dir_adq+"/"+list_adq[thisFile],wr_path+list_adq[thisFile])
151 fp = h5py.File(wr_path+list_adq[thisFile],'a')
152 grp = fp.create_group("Pedestal")
153 dset = grp.create_dataset("azimuth" , data=angulo_adq)
147 ff_pedestal = list_pedestal[nro_file]
148 angulo = getDatavaluefromDirFilename(path=dir_pedestal, file=ff_pedestal, value="azimuth")
149 angulo_adq[j] = angulo[iterador]
150 copyfile(dir_adq + "/" + list_adq[thisFile], wr_path + list_adq[thisFile])
151 fp = h5py.File(wr_path + list_adq[thisFile], 'a')
152 grp = fp.create_group("Pedestal")
153 dset = grp.create_dataset("azimuth" , data=angulo_adq)
154 154 fp.close()
155 print("Angulo",angulo_adq)
156 print("Angulo",len(angulo_adq))
157 nro_key_perfil=iterador + 25
158 if nro_key_perfil< n_perfiles_p:
155 print("Angulo", angulo_adq)
156 print("Angulo", len(angulo_adq))
157 nro_key_perfil = iterador + 25
158 if nro_key_perfil < n_perfiles_p:
159 159 nro_file = nro_file
160 160 else:
161 nro_file = nro_file+1
162 nro_key_perfil= nro_key_p
161 nro_file = nro_file + 1
162 nro_key_perfil = nro_key_p
@@ -25,13 +25,13 class build_ext(_build_ext):
25 25 self.include_dirs.append(numpy.get_include())
26 26
27 27 setup(
28 name = "schainpy",
29 version = __version__,
30 description = DOCLINES[0],
31 long_description = "\n".join(DOCLINES[2:]),
32 url = "https://github.com/JRO-Peru/schainpy",
33 author = "Jicamarca Radio Observatory",
34 author_email = "jro-developers@jro.igp.gob.pe",
28 name="schainpy",
29 version=__version__,
30 description=DOCLINES[0],
31 long_description="\n".join(DOCLINES[2:]),
32 url="https://github.com/JRO-Peru/schainpy",
33 author="Jicamarca Radio Observatory",
34 author_email="jro-developers@jro.igp.gob.pe",
35 35 license="BSD-3-Clause",
36 36 classifiers=[
37 37 "Development Status :: 4 - Beta",
@@ -49,7 +49,7 setup(
49 49 "Programming Language :: Python :: 3.8",
50 50 "Topic :: Scientific/Engineering",
51 51 ],
52 packages = {
52 packages={
53 53 'schainpy',
54 54 'schainpy.model',
55 55 'schainpy.model.data',
@@ -61,22 +61,22 setup(
61 61 'schainpy.gui',
62 62 'schainpy.cli',
63 63 },
64 package_data = {'': ['schain.conf.template'],
64 package_data={'': ['schain.conf.template'],
65 65 'schainpy.files': ['*.oga']
66 66 },
67 include_package_data = False,
68 scripts = ['schainpy/gui/schainGUI'],
69 entry_points = {
67 include_package_data=False,
68 scripts=['schainpy/gui/schainGUI'],
69 entry_points={
70 70 'console_scripts': [
71 71 'schain = schainpy.cli.cli:main',
72 72 ],
73 73 },
74 cmdclass = {'build_ext': build_ext},
74 cmdclass={'build_ext': build_ext},
75 75 ext_modules=[
76 76 Extension("schainpy.model.data._noise", ["schainc/_noise.c"]),
77 77 ],
78 setup_requires = ["numpy"],
79 install_requires = [
78 setup_requires=["numpy"],
79 install_requires=[
80 80 "scipy",
81 81 "h5py",
82 82 "matplotlib",
General Comments 0
You need to be logged in to leave comments. Login now