...
 
Commits (4)
bin filter=lfs diff=lfs merge=lfs -text
#!/bin/bash
# Input files:
BASE=base.ubx
UAV=uav.ubx
BASE=base
UAV=uav
BASE_FILE=${BASE}.ubx
UAV_FILE=${UAV}.ubx
EUREF_URL='ftp://igs.bkg.bund.de/EUREF/obs'
# Programs:
RNX2RTKP=rnx2rtkp
CONVBIN=convbin
RTKLIB2AVG=~/repo/sentiboard-utils/converters/rtklib2avg.py
CLOSEST_BS=./get_closest_basestation.py
RNX2RTKP=./bin/rnx2rtkp
CONVBIN=./bin/convbin
CRX2RNX=./bin/CRX2RNX
RTKLIB2AVG=./scripts/rtklib2avg.py
CLOSEST_BS=./scripts/get_closest_basestation.py
EUREF_URL_GENERATOR=./scripts/get_euref_urls.py
# Define colors:
YELLOW="\e[33m"
DEFAULT="\e[39m"
......@@ -24,34 +33,78 @@ else
fi
# Convert observations to RINEX
#${CONVBIN} ${UAV} -ts 2000/01/01 00:00:00
${CONVBIN} ${UAV_FILE} -ts 2000/01/01 00:00:00
if $HAS_BASE; then
${CONVBIN} ${BASE} -ts 2000/01/01 00:00:00
${CONVBIN} ${BASE_FILE} -ts 2000/01/01 00:00:00
fi
# Get public observations
if $HAS_BASE; then
MODE=3 # Static position
INPUT_FILES="base.*"
INPUT_FILES="$BASE.*"
else
MODE=0 # Single position
INPUT_FILES="uav.*"
INPUT_FILES="$UAV.*"
fi
#${RNX2RTKP} -p $MODE -o single.pos $INPUT_FILES
# Get single position from observations
${RNX2RTKP} -p $MODE -o single.pos $INPUT_FILES
# Get average single position from pos-file
llh=$(${RTKLIB2AVG} single.pos 5 6 | tail -n1)
echo
echo "Average single pos lat long: " ${llh}
echo
#llh=$(${RTKLIB2AVG} single.pos 5 6 | tail -n1)
#echo "Average single pos lat long: " ${llh}
#BS=$(${CLOSEST_BS} ${llh} | tail -n2)
#echo "Closest base station: " ${BS}
# Find the closest base station ID
BS=$(${CLOSEST_BS} ${llh} | tail -n2)
echo "Closest base station: " ${BS}
BSID=$(echo $BS | rev | cut -f 1 -d ' ' | rev )
echo "ID:" $BSID
echo
awk_string='{ printf "%04d-%02d-%02dT%02d:%02d:%02f",$1,$2,$3,$4,$5,$6; }'
FIRST_OBS=$(head -n 100 uav.obs | grep "FIRST OBS" |
# Find the start and end-file of the uav
awk_string='{ printf "%04d-%02d-%02dT%02d:%02d:%02.6f",$1,$2,$3,$4,$5,$6; }'
FIRST_OBS=$(head -n 100 ${UAV}.obs | grep "FIRST OBS" |
awk "${awk_string}")
LAST_OBS=$(head -n 100 uav.obs | grep "LAST OBS" |
LAST_OBS=$(head -n 100 ${UAV}.obs | grep "LAST OBS" |
awk "${awk_string}")
echo "First observation: " ${FIRST_OBS}
echo "Last observation: " ${LAST_OBS}
echo
# Generate the proper URL
BS_URL=$(${EUREF_URL_GENERATOR} ${EUREF_URL} ${BSID} ${FIRST_OBS} ${LAST_OBS})
BS_ZIPPEDNAME=$(echo $BS_URL | awk -F '/' '{ print $(NF) }')
BS_BASENAME=${BS_ZIPPEDNAME%.crx.gz}
BS_FILENAME=${BS_BASENAME}.rnx
# Download and unzip basestation file
cache_dir=basestations
mkdir -p $cache_dir
if [ ! -e "$cache_dir/$BS_FILENAME" ]; then
if [ ! -e "$cache_dir/$BS_BASENAME.crx" ]; then
if [ ! -e "$cache_dir/$BS_ZIPPEDNAME" ]; then
wget $BS_URL -P $cache_dir
fi
gunzip -f $cache_dir/$BS_ZIPPEDNAME
fi
${CRX2RNX} $cache_dir/$BS_BASENAME.crx
fi
# Calculate UAV position from basestation if we do not have local base
if [ $HAS_BASE == false ]; then
MODE=2 # Kinematic
INPUT_FILES="uav.*"
BASE_STATION_FILES="$cache_dir/$BS_FILENAME"
OPTIONS="-c -s | -d 5"
${RNX2RTKP} -o $UAV.pos -p ${MODE} $OPTIONS $INPUT_FILES $BASE_STATION_FILES
echo
echo "Done."
exit 0
fi
echo "Local base station not implemented yet"
exit -1
File added
File added
File added
#!/usr/bin/python
import sys
import datetime
if __name__ == '__main__':
if not len(sys.argv) == 5:
print('Usage:')
print(' %s base_url base_station start_time end_time' % sys.argv[0])
print('')
print('Time format: yyyy-mm-ddThh:mm:ss.fffff')
sys.exit(-1)
url = sys.argv[1]
base_station = sys.argv[2]
start_time_s = sys.argv[3]
end_time_s = sys.argv[4]
start_time = datetime.datetime.strptime(start_time_s, '%Y-%m-%dT%H:%M:%S.%f')
end_time = datetime.datetime.strptime(end_time_s, '%Y-%m-%dT%H:%M:%S.%f')
start_day = start_time.date()
end_day = end_time.date()
if start_day != end_day:
print('Different start and end days are not implemented yet.')
sys.exit(-1)
if start_day == datetime.date.today():
print('Files from today not implemented yet')
sys.exit(-1)
first_day = datetime.date(start_day.year, 1, 1)
gps_day = (start_day - first_day).days + 1
# filename = '%s_S_%04d%03d0000_01D_EN.rnx.gz' % (base_station, first_day.year, gps_day)
filename = '%s_S_%04d%03d0000_01D_30S_MO.crx.gz' % (base_station, first_day.year, gps_day)
print('%s/%04d/%03d/%s' % (url, first_day.year, gps_day, filename))
#!/usr/bin/env python
import sys
import os
import numpy as np
dir_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(dir_path, '..'))
def print_usage():
print('Usage:')
print(' python rtklib2matlab.py [rover.pos]')
sys.exit(1)
def parseline(line, names):
values = line.split()
converters = [int] + [float]*4 + [int]*2 + [float]*8
conv_vals = [c(v) for c, v in zip(converters, values) ]
return dict(zip(names, conv_vals))
def fix_name(name):
name = name.strip().lower()
if '(' in name:
name = name[:name.find('(')]
name = name.replace('-', '_')
return name
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print('No file specified.')
print('')
print_usage()
if len(sys.argv) >= 3:
min_quality = int(sys.argv[2])
else:
min_quality = 1
if len(sys.argv) >= 4:
min_sat = int(sys.argv[3])
else:
min_sat = 6
infile = sys.argv[1]
print('Extracting average from %s with Q <= %d, n_sat >= %d' % (infile, min_quality, min_sat))
names = ['week', 'tow', 'ecef_x', 'ecef_y', 'ecef_z', 'Q', 'ns', 'sdx', 'sdy',
'sdz', 'sdxy', 'sdyz', 'sdzx', 'age', 'ratio']
n_lines = 0
with open(infile, 'rU') as f:
for line in f:
if line.startswith('%'):
if 'ratio' in line:
names = [fix_name(name) for name in line[1:].split()]
if 'gpst' in names:
names.insert(0, 'week')
continue
n_lines += 1
f.seek(0)
arr = np.zeros((5, n_lines))
arr_ix=0
for line in f:
if line.startswith('%'):
continue
data_line = parseline(line, names)
for key, val in data_line.items():
if key in ['x_ecef', 'lat', 'latitude']:
arr[0, arr_ix] = val
elif key in ['y_ecef', 'lon', 'longitude']:
arr[1, arr_ix] = val
elif key in ['z_ecef', 'alt', 'height']:
arr[2, arr_ix] = val
elif key == 'q':
arr[3, arr_ix] = val
elif key == 'ns':
arr[4, arr_ix] = val
arr_ix += 1
print('')
print('Found %d lines' % arr_ix)
legal_arr = arr[:, arr[3,:] > 0]
valid_arr = legal_arr[:, legal_arr[3,:] <= min_quality]
valid_arr = valid_arr[:, valid_arr[4,:] >= min_sat]
print('Found %d quality points.' % valid_arr.shape[1])
for ix, key in enumerate(names):
if key in ['x_ecef', 'lat', 'latitude']:
name_start_ix = ix
break
print('')
print('Mean:')
print('\t'.join(names[name_start_ix:name_start_ix+3]))
llh = np.mean(valid_arr, axis=1)[:3]
print('%.12f\t%.12f\t%.12f' % (llh[0], llh[1], llh[2]))