jroIO_usrp.py
600 lines
| 19.1 KiB
| text/x-python
|
PythonLexer
|
r568 | ''' | ||
Created on Jul 3, 2014 | ||||
@author: roj-idl71 | ||||
''' | ||||
|
r607 | import os | ||
|
r568 | import datetime | ||
import numpy | ||||
try: | ||||
from gevent import sleep | ||||
except: | ||||
from time import sleep | ||||
from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader | ||||
from schainpy.model.data.jrodata import Voltage | ||||
from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | ||||
try: | ||||
import digital_rf_hdf5 | ||||
except: | ||||
print 'You should install "digital_rf_hdf5" module if you want to read USRP data' | ||||
|
r897 | |||
|
r568 | class USRPReader(ProcessingUnit): | ||
''' | ||||
classdocs | ||||
''' | ||||
|
r897 | def __init__(self, **kwargs): | ||
|
r568 | ''' | ||
Constructor | ||||
''' | ||||
|
r897 | |||
ProcessingUnit.__init__(self, **kwargs) | ||||
|
r568 | self.dataOut = Voltage() | ||
self.__printInfo = True | ||||
self.__flagDiscontinuousBlock = False | ||||
self.__bufferIndex = 9999999 | ||||
|
r897 | |||
|
r568 | self.__ippKm = None | ||
self.__codeType = 0 | ||||
self.__nCode = None | ||||
self.__nBaud = None | ||||
self.__code = None | ||||
|
r897 | |||
|
r568 | def __getCurrentSecond(self): | ||
|
r897 | |||
|
r568 | return self.__thisUnixSample/self.__sample_rate | ||
|
r897 | |||
|
r568 | thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.") | ||
|
r897 | |||
|
r568 | def __setFileHeader(self): | ||
''' | ||||
In this method will be initialized every parameter of dataOut object (header, no data) | ||||
''' | ||||
|
r823 | ippSeconds = 1.0*self.__nSamples/self.__sample_rate | ||
|
r897 | |||
|
r823 | nProfiles = 1.0/ippSeconds #Number of profiles in one second | ||
|
r897 | |||
|
r568 | self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm, | ||
txA=0, | ||||
txB=0, | ||||
nWindows=1, | ||||
nHeights=self.__nSamples, | ||||
firstHeight=self.__firstHeigth, | ||||
deltaHeight=self.__deltaHeigth, | ||||
codeType=self.__codeType, | ||||
nCode=self.__nCode, nBaud=self.__nBaud, | ||||
code = self.__code) | ||||
|
r897 | |||
|
r568 | self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples, | ||
|
r600 | nProfiles=nProfiles, | ||
|
r568 | nChannels=len(self.__channelList), | ||
adcResolution=14) | ||||
|
r897 | |||
|
r568 | self.dataOut.type = "Voltage" | ||
|
r897 | |||
|
r568 | self.dataOut.data = None | ||
|
r897 | |||
|
r568 | self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')]) | ||
|
r897 | |||
|
r568 | # self.dataOut.nChannels = 0 | ||
|
r897 | |||
|
r568 | # self.dataOut.nHeights = 0 | ||
|
r897 | |||
|
r600 | self.dataOut.nProfiles = nProfiles | ||
|
r897 | |||
|
r568 | self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth | ||
|
r897 | |||
|
r568 | self.dataOut.channelList = self.__channelList | ||
|
r897 | |||
|
r568 | self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights() | ||
|
r897 | |||
|
r568 | # self.dataOut.channelIndexList = None | ||
|
r897 | |||
|
r568 | self.dataOut.flagNoData = True | ||
|
r897 | |||
#Set to TRUE if the data is discontinuous | ||||
|
r568 | self.dataOut.flagDiscontinuousBlock = False | ||
|
r897 | |||
|
r568 | self.dataOut.utctime = None | ||
|
r897 | |||
|
r568 | self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime | ||
self.dataOut.dstFlag = 0 | ||||
|
r897 | |||
|
r568 | self.dataOut.errorCount = 0 | ||
|
r897 | |||
|
r568 | self.dataOut.nCohInt = 1 | ||
|
r897 | |||
|
r568 | self.dataOut.flagDecodeData = False #asumo que la data esta decodificada | ||
|
r897 | |||
|
r568 | self.dataOut.flagDeflipData = False #asumo que la data esta sin flip | ||
|
r897 | |||
|
r568 | self.dataOut.flagShiftFFT = False | ||
|
r897 | |||
|
r823 | self.dataOut.ippSeconds = ippSeconds | ||
|
r897 | |||
#Time interval between profiles | ||||
|
r568 | #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt | ||
|
r897 | |||
|
r568 | self.dataOut.frequency = self.__frequency | ||
|
r897 | |||
|
r568 | self.dataOut.realtime = self.__online | ||
|
r897 | |||
|
r589 | def findDatafiles(self, path, startDate=None, endDate=None): | ||
|
r897 | |||
|
r607 | if not os.path.isdir(path): | ||
return [] | ||||
|
r897 | |||
|
r589 | try: | ||
digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True) | ||||
except: | ||||
digitalReadObj = digital_rf_hdf5.read_hdf5(path) | ||||
|
r897 | |||
|
r589 | channelNameList = digitalReadObj.get_channels() | ||
|
r897 | |||
|
r596 | if not channelNameList: | ||
return [] | ||||
|
r897 | |||
|
r589 | metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0]) | ||
|
r897 | |||
|
r589 | sample_rate = metadata_dict['sample_rate'][0] | ||
this_metadata_file = digitalReadObj.get_metadata(channelNameList[0]) | ||||
|
r897 | |||
|
r589 | try: | ||
timezone = this_metadata_file['timezone'].value | ||||
except: | ||||
timezone = 0 | ||||
|
r897 | |||
|
r589 | startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone | ||
|
r897 | |||
|
r589 | startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond) | ||
endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond) | ||||
|
r897 | |||
|
r589 | if not startDate: | ||
startDate = startDatetime.date() | ||||
|
r897 | |||
|
r589 | if not endDate: | ||
endDate = endDatatime.date() | ||||
|
r897 | |||
|
r589 | dateList = [] | ||
|
r897 | |||
|
r589 | thisDatetime = startDatetime | ||
|
r897 | |||
|
r589 | while(thisDatetime<=endDatatime): | ||
|
r897 | |||
|
r589 | thisDate = thisDatetime.date() | ||
|
r897 | |||
|
r589 | if thisDate < startDate: | ||
continue | ||||
|
r897 | |||
|
r589 | if thisDate > endDate: | ||
break | ||||
|
r897 | |||
|
r589 | dateList.append(thisDate) | ||
thisDatetime += datetime.timedelta(1) | ||||
|
r897 | |||
|
r589 | return dateList | ||
|
r897 | |||
|
r568 | def setup(self, path = None, | ||
|
r897 | startDate = None, | ||
endDate = None, | ||||
startTime = datetime.time(0,0,0), | ||||
endTime = datetime.time(23,59,59), | ||||
channelList = None, | ||||
nSamples = None, | ||||
|
r589 | ippKm = 60, | ||
|
r568 | online = False, | ||
|
r589 | delay = 60, | ||
|
r727 | buffer_size = 1024, | ||
|
r589 | **kwargs): | ||
|
r568 | ''' | ||
In this method we should set all initial parameters. | ||||
|
r897 | |||
|
r568 | Inputs: | ||
path | ||||
startDate | ||||
endDate | ||||
startTime | ||||
endTime | ||||
set | ||||
expLabel | ||||
ext | ||||
online | ||||
|
r589 | delay | ||
|
r568 | ''' | ||
|
r897 | |||
|
r607 | if not os.path.isdir(path): | ||
|
r897 | raise ValueError, "[Reading] Directory %s does not exist" %path | ||
|
r568 | try: | ||
self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True) | ||||
except: | ||||
self.digitalReadObj = digital_rf_hdf5.read_hdf5(path) | ||||
|
r897 | |||
|
r568 | channelNameList = self.digitalReadObj.get_channels() | ||
|
r897 | |||
|
r568 | if not channelNameList: | ||
|
r684 | raise ValueError, "[Reading] Directory %s does not have any files" %path | ||
|
r897 | |||
|
r568 | if not channelList: | ||
channelList = range(len(channelNameList)) | ||||
|
r897 | |||
|
r568 | ########## Reading metadata ###################### | ||
|
r897 | |||
|
r568 | metadata_dict = self.digitalReadObj.get_rf_file_metadata(channelNameList[channelList[0]]) | ||
|
r897 | |||
|
r568 | self.__sample_rate = metadata_dict['sample_rate'][0] | ||
|
r812 | # self.__samples_per_file = metadata_dict['samples_per_file'][0] | ||
|
r568 | self.__deltaHeigth = 1e6*0.15/self.__sample_rate | ||
|
r897 | |||
|
r568 | this_metadata_file = self.digitalReadObj.get_metadata(channelNameList[channelList[0]]) | ||
|
r897 | |||
|
r812 | self.__frequency = None | ||
try: | ||||
self.__frequency = this_metadata_file['center_frequencies'].value | ||||
except: | ||||
self.__frequency = this_metadata_file['fc'].value | ||||
|
r897 | |||
|
r812 | if not self.__frequency: | ||
raise ValueError, "Center Frequency is not defined in metadata file" | ||||
|
r897 | |||
|
r568 | try: | ||
self.__timezone = this_metadata_file['timezone'].value | ||||
except: | ||||
self.__timezone = 0 | ||||
|
r897 | |||
|
r568 | self.__firstHeigth = 0 | ||
|
r897 | |||
|
r568 | try: | ||
codeType = this_metadata_file['codeType'].value | ||||
except: | ||||
codeType = 0 | ||||
|
r897 | |||
|
r684 | nCode = 1 | ||
nBaud = 1 | ||||
|
r686 | code = numpy.ones((nCode, nBaud), dtype=numpy.int) | ||
|
r897 | |||
|
r568 | if codeType: | ||
nCode = this_metadata_file['nCode'].value | ||||
nBaud = this_metadata_file['nBaud'].value | ||||
code = this_metadata_file['code'].value | ||||
|
r897 | |||
|
r568 | if not ippKm: | ||
try: | ||||
#seconds to km | ||||
ippKm = 1e6*0.15*this_metadata_file['ipp'].value | ||||
except: | ||||
ippKm = None | ||||
|
r897 | |||
|
r568 | #################################################### | ||
startUTCSecond = None | ||||
endUTCSecond = None | ||||
|
r897 | |||
|
r568 | if startDate: | ||
startDatetime = datetime.datetime.combine(startDate, startTime) | ||||
startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone | ||||
|
r897 | |||
|
r568 | if endDate: | ||
endDatetime = datetime.datetime.combine(endDate, endTime) | ||||
endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone | ||||
|
r897 | |||
|
r568 | start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]]) | ||
|
r897 | |||
|
r568 | if not startUTCSecond: | ||
startUTCSecond = start_index/self.__sample_rate | ||||
|
r897 | |||
|
r568 | if start_index > startUTCSecond*self.__sample_rate: | ||
startUTCSecond = start_index/self.__sample_rate | ||||
|
r897 | |||
|
r568 | if not endUTCSecond: | ||
endUTCSecond = end_index/self.__sample_rate | ||||
|
r897 | |||
|
r568 | if end_index < endUTCSecond*self.__sample_rate: | ||
endUTCSecond = end_index/self.__sample_rate | ||||
|
r897 | |||
|
r568 | if not nSamples: | ||
if not ippKm: | ||||
raise ValueError, "[Reading] nSamples or ippKm should be defined" | ||||
|
r897 | |||
|
r800 | nSamples = int(ippKm / (1e6*0.15/self.__sample_rate)) | ||
|
r897 | |||
|
r568 | channelBoundList = [] | ||
channelNameListFiltered = [] | ||||
|
r897 | |||
|
r568 | for thisIndexChannel in channelList: | ||
thisChannelName = channelNameList[thisIndexChannel] | ||||
start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName) | ||||
channelBoundList.append((start_index, end_index)) | ||||
channelNameListFiltered.append(thisChannelName) | ||||
|
r897 | |||
|
r568 | self.profileIndex = 0 | ||
|
r897 | |||
|
r589 | self.__delay = delay | ||
|
r568 | self.__ippKm = ippKm | ||
self.__codeType = codeType | ||||
self.__nCode = nCode | ||||
self.__nBaud = nBaud | ||||
self.__code = code | ||||
|
r897 | |||
|
r568 | self.__datapath = path | ||
self.__online = online | ||||
self.__channelList = channelList | ||||
self.__channelNameList = channelNameListFiltered | ||||
self.__channelBoundList = channelBoundList | ||||
self.__nSamples = nSamples | ||||
|
r800 | self.__samples_to_read = int(buffer_size*nSamples) | ||
|
r568 | self.__nChannels = len(self.__channelList) | ||
|
r897 | |||
|
r568 | self.__startUTCSecond = startUTCSecond | ||
self.__endUTCSecond = endUTCSecond | ||||
|
r897 | |||
|
r568 | self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval | ||
|
r897 | |||
|
r568 | if online: | ||
# self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read) | ||||
startUTCSecond = numpy.floor(endUTCSecond) | ||||
|
r897 | |||
|
r568 | self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read | ||
|
r897 | |||
|
r568 | self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex) | ||
|
r897 | |||
|
r568 | self.__setFileHeader() | ||
self.isConfig = True | ||||
|
r897 | |||
|
r568 | print "[Reading] USRP Data was found from %s to %s " %( | ||
datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), | ||||
datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) | ||||
) | ||||
|
r897 | |||
|
r606 | print "[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone), | ||
datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone) | ||||
) | ||||
|
r897 | |||
|
r568 | def __reload(self): | ||
|
r897 | |||
|
r568 | if not self.__online: | ||
return | ||||
|
r897 | |||
|
r568 | |||
# print "%s not in range [%s, %s]" %( | ||||
# datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), | ||||
# datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), | ||||
# datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) | ||||
# ) | ||||
print "[Reading] reloading metadata ..." | ||||
|
r897 | |||
|
r589 | try: | ||
self.digitalReadObj.reload(complete_update=True) | ||||
except: | ||||
self.digitalReadObj.reload() | ||||
|
r897 | |||
|
r568 | start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]]) | ||
|
r897 | |||
|
r568 | if start_index > self.__startUTCSecond*self.__sample_rate: | ||
self.__startUTCSecond = 1.0*start_index/self.__sample_rate | ||||
|
r897 | |||
|
r568 | if end_index > self.__endUTCSecond*self.__sample_rate: | ||
self.__endUTCSecond = 1.0*end_index/self.__sample_rate | ||||
print "[Reading] New timerange found [%s, %s] " %( | ||||
datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), | ||||
datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) | ||||
) | ||||
return True | ||||
|
r897 | |||
|
r568 | return False | ||
|
r897 | |||
|
r568 | def __readNextBlock(self, seconds=30, volt_scale = 218776): | ||
''' | ||||
''' | ||||
|
r897 | |||
|
r568 | #Set the next data | ||
self.__flagDiscontinuousBlock = False | ||||
self.__thisUnixSample += self.__samples_to_read | ||||
|
r897 | |||
|
r568 | if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate: | ||
|
r800 | print "[Reading] There are no more data into selected time-range" | ||
|
r897 | |||
|
r568 | self.__reload() | ||
|
r897 | |||
|
r568 | if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate: | ||
self.__thisUnixSample -= self.__samples_to_read | ||||
return False | ||||
|
r897 | |||
|
r568 | indexChannel = 0 | ||
|
r897 | |||
|
r568 | dataOk = False | ||
|
r897 | |||
|
r568 | for thisChannelName in self.__channelNameList: | ||
|
r897 | |||
|
r568 | try: | ||
result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample, | ||||
self.__samples_to_read, | ||||
thisChannelName) | ||||
|
r897 | |||
|
r568 | except IOError, e: | ||
#read next profile | ||||
self.__flagDiscontinuousBlock = True | ||||
|
r629 | print "[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e | ||
|
r568 | break | ||
|
r897 | |||
|
r568 | if result.shape[0] != self.__samples_to_read: | ||
self.__flagDiscontinuousBlock = True | ||||
|
r629 | print "[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), | ||
result.shape[0], | ||||
self.__samples_to_read) | ||||
|
r568 | break | ||
|
r897 | |||
|
r568 | self.__data_buffer[indexChannel,:] = result*volt_scale | ||
|
r897 | |||
|
r568 | indexChannel += 1 | ||
|
r897 | |||
|
r568 | dataOk = True | ||
|
r897 | |||
|
r568 | self.__utctime = self.__thisUnixSample/self.__sample_rate | ||
|
r897 | |||
|
r568 | if not dataOk: | ||
return False | ||||
|
r897 | |||
|
r568 | print "[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), | ||
self.__samples_to_read, | ||||
self.__timeInterval) | ||||
|
r897 | |||
|
r568 | self.__bufferIndex = 0 | ||
|
r897 | |||
|
r568 | return True | ||
|
r897 | |||
|
r568 | def __isBufferEmpty(self): | ||
|
r897 | |||
|
r568 | if self.__bufferIndex <= self.__samples_to_read - self.__nSamples: | ||
return False | ||||
|
r897 | |||
|
r568 | return True | ||
|
r897 | |||
|
r568 | def getData(self, seconds=30, nTries=5): | ||
|
r897 | |||
|
r568 | ''' | ||
This method gets the data from files and put the data into the dataOut object | ||||
|
r897 | |||
|
r568 | In addition, increase el the buffer counter in one. | ||
|
r897 | |||
|
r568 | Return: | ||
data : retorna un perfil de voltages (alturas * canales) copiados desde el | ||||
buffer. Si no hay mas archivos a leer retorna None. | ||||
|
r897 | |||
|
r568 | Affected: | ||
self.dataOut | ||||
self.profileIndex | ||||
self.flagDiscontinuousBlock | ||||
self.flagIsNewBlock | ||||
''' | ||||
|
r897 | |||
|
r568 | err_counter = 0 | ||
self.dataOut.flagNoData = True | ||||
|
r897 | |||
|
r568 | if self.__isBufferEmpty(): | ||
|
r897 | |||
|
r568 | self.__flagDiscontinuousBlock = False | ||
|
r897 | |||
|
r568 | while True: | ||
if self.__readNextBlock(): | ||||
break | ||||
|
r897 | |||
|
r568 | if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate: | ||
return False | ||||
|
r897 | |||
|
r568 | if self.__flagDiscontinuousBlock: | ||
print '[Reading] discontinuous block found ... continue with the next block' | ||||
continue | ||||
|
r897 | |||
|
r568 | if not self.__online: | ||
return False | ||||
|
r897 | |||
|
r568 | err_counter += 1 | ||
if err_counter > nTries: | ||||
return False | ||||
|
r897 | |||
|
r568 | print '[Reading] waiting %d seconds to read a new block' %seconds | ||
sleep(seconds) | ||||
|
r897 | |||
|
r568 | self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples] | ||
self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate | ||||
self.dataOut.flagNoData = False | ||||
self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock | ||||
|
r823 | self.dataOut.profileIndex = self.profileIndex | ||
|
r897 | |||
self.__bufferIndex += self.__nSamples | ||||
|
r568 | self.profileIndex += 1 | ||
|
r897 | |||
|
r823 | if self.profileIndex == self.dataOut.nProfiles: | ||
self.profileIndex = 0 | ||||
|
r897 | |||
|
r568 | return True | ||
|
r897 | |||
|
r568 | def printInfo(self): | ||
''' | ||||
''' | ||||
if self.__printInfo == False: | ||||
return | ||||
|
r897 | |||
|
r568 | # self.systemHeaderObj.printInfo() | ||
# self.radarControllerHeaderObj.printInfo() | ||||
|
r897 | |||
|
r568 | self.__printInfo = False | ||
|
r897 | |||
|
r568 | def printNumberOfBlock(self): | ||
''' | ||||
''' | ||||
|
r897 | |||
|
r568 | print self.profileIndex | ||
|
r897 | |||
|
r568 | def run(self, **kwargs): | ||
''' | ||||
This method will be called many times so here you should put all your code | ||||
''' | ||||
|
r897 | |||
|
r568 | if not self.isConfig: | ||
self.setup(**kwargs) | ||||
|
r897 | |||
|
r589 | self.getData(seconds=self.__delay) | ||
|
r897 | |||
|
r568 | return | ||
|
r897 | |||
|
r568 | class USRPWriter(Operation): | ||
''' | ||||
classdocs | ||||
''' | ||||
|
r897 | |||
def __init__(self, **kwargs): | ||||
|
r568 | ''' | ||
Constructor | ||||
''' | ||||
|
r897 | Operation.__init__(self, **kwargs) | ||
|
r568 | self.dataOut = None | ||
|
r897 | |||
|
r568 | def setup(self, dataIn, path, blocksPerFile, set=0, ext=None): | ||
''' | ||||
In this method we should set all initial parameters. | ||||
|
r897 | |||
|
r568 | Input: | ||
dataIn : Input data will also be outputa data | ||||
|
r897 | |||
|
r568 | ''' | ||
self.dataOut = dataIn | ||||
|
r897 | |||
|
r568 | self.isConfig = True | ||
|
r897 | |||
|
r568 | return | ||
|
r897 | |||
|
r568 | def run(self, dataIn, **kwargs): | ||
''' | ||||
This method will be called many times so here you should put all your code | ||||
|
r897 | |||
|
r568 | Inputs: | ||
|
r897 | |||
|
r568 | dataIn : object with the data | ||
|
r897 | |||
|
r568 | ''' | ||
|
r897 | |||
|
r568 | if not self.isConfig: | ||
self.setup(dataIn, **kwargs) | ||||
if __name__ == '__main__': | ||||
|
r897 | |||
|
r568 | readObj = USRPReader() | ||
|
r897 | |||
|
r568 | while True: | ||
readObj.run(path='/Volumes/DATA/haystack/passive_radar/') | ||||
# readObj.printInfo() | ||||
readObj.printNumberOfBlock() | ||||