hf_dec_manual.py
1 |
#!/usr/bin/env python
|
---|---|
2 |
|
3 |
import numpy as n |
4 |
import matplotlib.pyplot as plt |
5 |
import stuffr |
6 |
import scipy.signal as s |
7 |
import glob |
8 |
|
9 |
#Input gdfdata
|
10 |
fl=glob.glob("/home/jm/Documents/PruebasSincronizacion/datarxcode02A/d2018289_1029/0/data*.gdf")
|
11 |
#fl=glob.glob("/home/jm/Documents/PruebasSincronizacion/datarxcode2B/d2018289_1034/0/data*.gdf")
|
12 |
#fl=glob.glob("/home/jm/Documents/PruebasSincronizacion/datarxcode2oroya/d2018289_1109/0/data*.gdf")
|
13 |
fl.sort() |
14 |
|
15 |
fl=fl[-11:-1] # Just test with the last 10 files |
16 |
z=n.zeros(100000*11,dtype=n.complex64) |
17 |
|
18 |
for i in range(11): |
19 |
print 'i:',i |
20 |
print i*100000+n.arange(100000) # rotating index from vector |
21 |
z[i*100000+n.arange(100000)]=n.fromfile(fl[i-1],dtype=n.complex64) |
22 |
|
23 |
clen=10000 #Bauds per frame. |
24 |
n_range=1000 #Number of heights |
25 |
# Generating PR Code
|
26 |
code2=stuffr.create_pseudo_random_code(seed=2)
|
27 |
code2=n.roll(code2,8) #Rollingback 8 laggs |
28 |
|
29 |
print 'len:code2:', len(code2) |
30 |
zl=len(z) #Datavector lenght |
31 |
|
32 |
##Previus check to know if the usrps aren't synchronized
|
33 |
|
34 |
#for i in range(10000):
|
35 |
# print("i %d %1.2f"%(i,n.abs(n.sum(n.conj(code2)*z[n.arange(10000)+i]))))
|
36 |
|
37 |
dt=10000
|
38 |
#Correlation with the periodic Correlation matrix.
|
39 |
A=stuffr.periodic_convolution_matrix(code2,rmin=0,rmax=n_range)["A"] |
40 |
|
41 |
# maximum likelihood: Juha's algorithm that estimate three sets of ranges
|
42 |
B=n.dot(n.linalg.inv(n.dot(n.transpose(n.conj(A)),A)),n.conj(n.transpose(A))) |
43 |
|
44 |
#print(B.shape)
|
45 |
|
46 |
n_rep=100 # number of frames or traces per second. |
47 |
idx=n.arange(clen) |
48 |
ridx=n.arange(n_range) |
49 |
|
50 |
S2=n.zeros([n_rep,n_range],dtype=n.complex64) |
51 |
|
52 |
for i in range(n_rep): |
53 |
# there is a range delay. usrps are not locked.
|
54 |
xhat=n.dot(B,z[i*clen+idx+dt]) |
55 |
x0=xhat[ridx] |
56 |
S2[i,:]=x0 |
57 |
|
58 |
SP2=n.zeros([n_rep,n_range]) |
59 |
w=s.hann(n_rep) |
60 |
|
61 |
for i in range(n_range): |
62 |
SP2[:,i]=n.fft.fftshift(n.abs(n.fft.fft(w*S2[:,i]))**2.0)
|
63 |
|
64 |
# return SNR
|
65 |
def whiten_spec(S): |
66 |
n_rep=S.shape[0]
|
67 |
# whiten spectrum
|
68 |
for i in range(n_rep): |
69 |
m0=n.median(S[i,:]) |
70 |
s0=n.median(n.abs(S[i,:]-m0)) |
71 |
S[i,:]=(S[i,:]-m0)/s0 |
72 |
return(S)
|
73 |
|
74 |
#Just plotting Code2 decodification
|
75 |
|
76 |
plt.pcolormesh(10.0*n.log10(SP2).transpose(),cmap='jet') |
77 |
plt.colorbar() |
78 |
plt.show() |
79 |
#whitening spectrum =)
|
80 |
SP2=whiten_spec(SP2) |
81 |
plt.pcolormesh(10.0*n.log10(SP2).transpose(),vmin=0,vmax=100,cmap='jet') |
82 |
plt.colorbar() |
83 |
plt.show() |
84 |
|