diff --git a/peregrine/acquisition.py b/peregrine/acquisition.py index 9d4cc56..72cc9e8 100644 --- a/peregrine/acquisition.py +++ b/peregrine/acquisition.py @@ -54,14 +54,16 @@ class Acquisition: Array of samples to use for acquisition. Can be `None` but in this case `init_samples` *must* be called with an array of samples before any other acquisition functions are used. - sampling_freq : float, optional + sampling_freq : float The sampling frequency of the samples in Hz. - IF : float, optional + IF : float The receiver intermediate frequency used when capturing the samples. - samples_per_code : float, optional + samples_per_code : float The number of samples corresponding to one code length. - code_length : int, optional + code_length : int The number of chips in the chipping code. + n_codes_integrate : int, optional + The number of codes to integrate. offsets : int, optional Offsets, in units of code length (1ms), to use when performing long integrations to avoid clobbering by nav bit edges. diff --git a/peregrine/defaults.py b/peregrine/defaults.py index 3620e9c..d871d0c 100644 --- a/peregrine/defaults.py +++ b/peregrine/defaults.py @@ -15,6 +15,7 @@ useCache = True cacheDir = 'cache' ephemMaxAge = 4 * 3600.0 # Reject an ephemeris entry if older than this +elevMask = None # Optional elevation mask (degrees), None to disable # the size of the sample data block processed at a time processing_block_size = int(20e6) # [samples] diff --git a/peregrine/ephemeris.py b/peregrine/ephemeris.py index 580fcec..830993c 100644 --- a/peregrine/ephemeris.py +++ b/peregrine/ephemeris.py @@ -98,6 +98,27 @@ def read4(f): filename, count_healthy) return ephem +def rinex_creation_date(filename): + """ Read RINEX file creation time + + Returns: + datetime object + """ + dt = None + with open(filename,'r') as f: + while True: + line = f.readline()[:-1] + if not line: + break + if "PGM / RUN BY / DATE" not in line: + continue + + # Extract creation time + timestring = '-'.join(line.split()[2:4]) + dt = datetime.strptime(timestring,"%Y%m%d-%H%M%S") + + return dt + def obtain_ephemeris(t, settings): """ Finds an appropriate GNSS ephemeris file for a certain time, @@ -121,17 +142,31 @@ def obtain_ephemeris(t, settings): """ print "Obtaining ephemeris file for ", t - #TODO: If it's today and more than 1 hr old, check for an update - filename = t.strftime("brdc%j0.%yn") filedir = os.path.join(settings.cacheDir, "ephem") filepath = os.path.join(filedir, filename) url = t.strftime( 'http://qz-vision.jaxa.jp/USE/archives/ephemeris/%Y/') + filename + + # If not in cache, then fetch it if not os.path.isfile(filepath): if not os.path.exists(filedir): os.makedirs(filedir) _, hdrs = urllib.urlretrieve(url, filepath) + + # Otherwise, use cache file if isn't stale + else: + rinex_gen_date = rinex_creation_date(filepath) + if rinex_gen_date is None: + # Something wrong with RINEX format, update cache + _, hdrs = urllib.urlretrieve(url, filepath) + else: + # If rinex generated more than an hour then fetch it again + if (t-rinex_gen_date) > timedelta(hours=1): + _, hdrs = urllib.urlretrieve(url, filepath) + + rinex_gen_date = rinex_creation_date(filepath) + ephem = load_rinex_nav_msg(filepath, t, settings) if len(ephem) < 22: raise ValueError( diff --git a/peregrine/gps_time.py b/peregrine/gps_time.py index 9649fa0..7aac347 100644 --- a/peregrine/gps_time.py +++ b/peregrine/gps_time.py @@ -1,5 +1,78 @@ import datetime +# NTP timestamps are in units of seconds since the NTP epoch +# See leap-seconds.list for further details. +NTP_EPOCH = datetime.datetime(1900,1,1,0,0,0) + +def sec_since_NTP_epoch(dt): + """Return the number of seconds since the NTP epoch.""" + return (dt-NTP_EPOCH).total_seconds() + +def load_leapsecond_table(f="/usr/share/zoneinfo/leap-seconds.list"): + """" + Loads leap second table from system. + + The default file location is appropriate for Ubuntu and is provided by + the tzdata package. Refer to the leap-seconds.list file for formatting + information. + + Parameters + ---------- + f : string + Path to a leap-seconds.list file + + Returns: List of tuples in chronological order each containing an + epoch start time and number of leap seconds applicable for that epoch. + """ + + # Check the expiration date in the file + with open(f,'r') as fp: + for line in fp: + if line[0:2] != "#@": + continue + + expiration_time = int(line.split('\t')[-1]) + now = sec_since_NTP_epoch(datetime.datetime.now()) + + if expiration_time=epoch_start: + ls = leapseconds + + # Raise warning if specified time is after expiry date of leap second table + if ls==None: + raise UserWarning("Specified datetime is after expiry time of leap second table.") + + # GPS leap seconds relative to a Jan 1 1980, where TAI-UTC was 19 seconds. + gps_leap_seconds = ls - 19 + + return datetime.timedelta(seconds = gps_leap_seconds) + def datetime_to_tow(t, mod1024 = True): """ Convert a Python datetime object to GPS Week and Time Of Week. @@ -39,12 +112,7 @@ def gpst_to_utc(t_gpst): ------- t_utc : datetime """ - t_utc = t_gpst - datetime.timedelta(seconds = 16) - if t_utc <= datetime.datetime(2012, 7, 1) or \ - t_utc >= datetime.datetime(2015, 7, 1): - raise ValueError("Don't know how many leap seconds to use. " + - "Please implement something better in gpst_to_utc()") - return t_utc + return t_gpst - get_gpst_leap_seconds(t_gpst) def utc_to_gpst(t_utc): """ @@ -59,9 +127,7 @@ def utc_to_gpst(t_utc): ------- t_gpst : datetime """ - t_gpst = t_utc + datetime.timedelta(seconds = 16) - if t_utc <= datetime.datetime(2012, 7, 1) or \ - t_utc >= datetime.datetime(2015, 7, 1): - raise ValueError("Don't know how many leap seconds to use. " + - "Please implement something better in utc_to_gpst()") - return t_gpst + # TODO: there may be an unhandled corner case here for conversions + # where the GPST-to-UTCT interval spans a leap second. + + return t_utc + get_gpst_leap_seconds(t_utc) diff --git a/peregrine/short_set.py b/peregrine/short_set.py index 58a241b..eb22e5f 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -13,8 +13,9 @@ from datetime import datetime, timedelta from numpy import dot from numpy.linalg import norm +from numpy.linalg import inv from peregrine.ephemeris import calc_sat_pos, obtain_ephemeris -from peregrine.gps_time import datetime_to_tow +from peregrine.gps_time import datetime_to_tow, utc_to_gpst from scipy.optimize import fmin, fmin_powell from warnings import warn import cPickle @@ -47,12 +48,12 @@ def resolve_ms_integers(obs_pr, pred_pr, prn_ref, disp = True): print "Resolving millisecond integers:" for prn, pr in obs_pr.iteritems(): - pr_int_est = (pred_pr[prn] - pr) / gps.code_wavelength + pr_int_est = (pred_pr[prn] - pr) / gps.l1ca_code_wavelength pr_int = round(pr_int_est) if abs(pr_int - pr_int_est) > 0.15: logger.warn("Pseudorange integer for PRN %2d is %.4f" % ( prn + 1, pr_int_est) + ", which isn't very close to an integer.") - pr += pr_int * gps.code_wavelength + pr += pr_int * gps.l1ca_code_wavelength obs_pr[prn] = pr if disp: print ("PRN %2d: pred pseudorange = %9.3f km, obs = %9.3f, " + \ @@ -71,7 +72,7 @@ def fill_remainder(n_ms): return [ [1]*n_ms, [-1]*n_ms] return [b + f for b in [[1]*20, [-1]*20] for f in fill_remainder(n_ms - 20)] hs = [] - for nav_edge_phase in range(1,min(n_ms,20)): + for nav_edge_phase in range(1,min(n_ms,21)): h = [([1]*nav_edge_phase) + f for f in fill_remainder(n_ms - nav_edge_phase)] hs += h return [k for k,v in itertools.groupby(sorted(hs))] @@ -79,7 +80,7 @@ def fill_remainder(n_ms): def long_correlation(signal, ca_code, code_phase, doppler, settings, plot=False, coherent = 0, nav_bit_hypoth = None): from swiftnav.correlate import track_correlate code_freq_shift = (doppler / gps.l1) * gps.chip_rate - samples_per_chip = settings.samplingFreq / (gps.chip_rate + code_freq_shift) + samples_per_chip = settings.freq_profile['sampling_freq'] / (gps.chip_rate + code_freq_shift) samples_per_code = samples_per_chip * gps.chips_per_code numSamplesToSkip = round(code_phase * samples_per_chip) remCodePhase = (1.0 * numSamplesToSkip / samples_per_chip) - code_phase @@ -93,12 +94,22 @@ def long_correlation(signal, ca_code, code_phase, doppler, settings, plot=False, costas_q = 0.0 for loopCnt in range(n_ms): rawSignal = signal[numSamplesToSkip:]#[:blksize_] - I_E, Q_E, I_P, Q_P, I_L, Q_L, blksize, remCodePhase, remCarrPhase = track_correlate_( + E, P, L, blksize, remCodePhase, remCarrPhase = track_correlate( rawSignal, + 1023, # Chips to correlate code_freq_shift + gps.chip_rate, remCodePhase, - doppler + settings.IF, - remCarrPhase, ca_code, settings) + doppler + settings.freq_profile['GPS_L1_IF'], + remCarrPhase, ca_code, settings.freq_profile['sampling_freq'], + gps.L1CA) + + I_E = E.real + Q_E = E.imag + I_P = P.real + Q_P = P.imag + I_L = L.real + Q_L = L.imag + numSamplesToSkip += blksize #print "@ %d, I_P = %.0f, Q_P = %.0f" % (loopCnt, I_P, Q_P) i_p.append(I_P) @@ -140,12 +151,10 @@ def refine_ob(signal, acq_result, settings, print_results = True, return_sweeps # TODO: Fit code phase results for better resolution from peregrine.include.generateCAcode import caCodes from scipy import optimize as opt - samples_per_chip = settings.samplingFreq / gps.chip_rate + samples_per_chip = settings.freq_profile['sampling_freq'] / gps.chip_rate samples_per_code = samples_per_chip * gps.chips_per_code # Get a vector with the C/A code sampled 1x/chip ca_code = caCodes[acq_result.prn] - # Add wrapping to either end to be able to do early/late - ca_code = np.concatenate(([ca_code[1022]],ca_code,[ca_code[0]])) dopp_offset_search = 100 # Hz away from acquisition code_offsets = np.arange(-1,1, 1.0 / 16 / 2) @@ -287,14 +296,14 @@ def refine_obs(signal, acq_results, settings, return obs_cp, obs_dopp -def predict_observables(prior_traj, prior_datetime, prns, ephem, window): +def predict_observables(prior_traj, prior_datetime, prns, ephem, window, settings): from datetime import timedelta from numpy.linalg import norm from numpy import dot """Given a list of PRNs, a set of ephemerides, a nominal capture time (datetime) and a and a time window (seconds), compute the ranges and dopplers for each satellite at 1ms shifts.""" - timeres = 50 * gps.code_period # Might be important to keep this an integer number of code periods + timeres = 50 * settings.code_period # Might be important to keep this an integer number of code periods t0 = prior_datetime - timedelta(seconds=window / 2.0) ranges = {} dopplers = {} @@ -351,11 +360,11 @@ def minimize_doppler_error(obs_dopp, times, pred_dopp, plot = False): def plot_expected_vs_measured(acqed_prns, prn_ref, obs_pr, obs_dopp, prior_traj, t_better, - ephem): + ephem, settings): import matplotlib.pyplot as plt # Compute predicted observables around this new estimate of capture time - pred_ranges, pred_dopplers, times = predict_observables(prior_traj, t_better, acqed_prns, ephem, 20) + pred_ranges, pred_dopplers, times = predict_observables(prior_traj, t_better, acqed_prns, ephem, 20, settings) pred_pr = pseudoranges_from_ranges(pred_ranges, prn_ref) ax = plt.figure(figsize=(12,6)).gca() @@ -476,8 +485,14 @@ def plot_t_recv_sensitivity(r_init, t_ref, obs_pr, ephem, spread = 0.2, step = 0 def vel_solve(r_sol, t_sol, ephem, obs_pseudodopp, los, tot): prns = los.keys() pred_prr = {} + + # TODO: does this break if times of transmission for the different sats + # straddles a GPS week rollover? + t_sol_gpst = utc_to_gpst(t_sol) + wk, tow = datetime_to_tow(t_sol_gpst) + for prn in prns: - _, gps_v, _, clock_rate_err = calc_sat_pos(ephem[prn], tot[prn]) + _, gps_v, _, clock_rate_err = calc_sat_pos(ephem[prn], tot[prn], week=wk) pred_prr[prn] = -dot(gps_v, los[prn]) + clock_rate_err * gps.c los = np.array(los.values()) @@ -497,8 +512,51 @@ def vel_solve(r_sol, t_sol, ephem, obs_pseudodopp, los, tot): print "Receiver clock frequency error: %+6.1f Hz" % f_sol return v_sol, f_sol +def unit_vector(v): + mag = norm(v) + if mag==0: + print "unit_vector(): warning, zero magnitude vector!" + return v + return v/mag + +def compute_dops(prns, ephem, r, t, disp=False): + """ Compute dilution of precision parameters """ + + A = [] + for prn in prns: + wk, tow = datetime_to_tow(t) + gps_r, gps_v, clock_err, clock_rate_err = calc_sat_pos(ephem[prn], + tow, + week = wk, + warn_stale=False) + r_rx_to_gps = unit_vector(gps_r-r) + r_rx_to_gps = np.append(r_rx_to_gps,[1.0]) + A.append(r_rx_to_gps) + + A = np.array(A) + Q = inv(dot(A.T,A)) + qd = Q.diagonal() + + pdop = math.sqrt(qd[:3].sum()) + hdop = math.sqrt(qd[:2].sum()) + vdop = math.sqrt(qd[2]) + tdop = math.sqrt(qd[3]) + gdop = math.sqrt(pdop**2 + tdop**2) + + if disp: + print "Dilution of precision:" + print " PDOP: %.3f"%pdop + print " TDOP: %.3f"%tdop + print " GDOP: %.3f"%gdop + + return {"pdop":pdop, + "hdop":hdop, + "vdop":vdop, + "tdop":tdop, + "gdop":gdop} + def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, - plot = True): + plot = True, return_metadata = False): """ Postprocess a short baseband sample record into a navigation solution. @@ -517,14 +575,19 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, e.g. from peregrine.initSettings.initSettings() plot : bool Make pretty graphs. + return_metadata : bool + Return dict of metadata from the solver Returns ------- acq_results : [:class:`AcquisitionResult`] List of :class:`AcquisitionResult` objects loaded from the file. - + sol_metadata + Dict of metadata from the solver (optional) """ + metadata = {} + if hasattr(prior_trajectory, '__call__'): prior_traj_func = True prior_traj = prior_trajectory @@ -532,7 +595,7 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, prior_traj_func = False prior_traj = lambda t: prior_trajectory - sig_len_ms = len(signal) / settings.samplingFreq / 1E-3 + sig_len_ms = len(signal) / settings.freq_profile['sampling_freq'] / 1E-3 print "Signal is %.2f ms long." % sig_len_ms r_prior, v_prior = prior_traj(t_prior) @@ -553,6 +616,18 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, ephem, settings, n_codes_integrate) + # If acquisition failed, update the cache accordingly and abort + if len(acqed)==0: + logger.error("Acquisition failed, aborting processing of this capture") + if settings.useCache: + if not os.path.exists(obs_cache_dir): + os.makedirs(obs_cache_dir) + with open(obs_cache_file, 'wb') as f: + cPickle.dump(([], None, None), f, + protocol=cPickle.HIGHEST_PROTOCOL) + logger.error("Marked failed acquisition in cache") + return + # Rearrange to put sat with smallest range-rate first. # This makes graphs a bit less hairy. acqed.sort(key = lambda a: abs(a.doppler)) @@ -569,6 +644,8 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, cPickle.dump((acqed_prns, obs_cp, obs_dopp), f, protocol=cPickle.HIGHEST_PROTOCOL) + metadata['acqed_prns'] = len(acqed_prns) + # Check whether we have enough satellites if len(acqed_prns) < 5: logger.error(("Acquired %d SVs; need at least 5 for a solution" + @@ -583,18 +660,18 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, # Improve the time part of the prior estimate by minimizing doppler residuals pred_ranges, pred_dopplers, times = predict_observables(prior_traj, t_prior, acqed_prns, ephem, - 30) + 30, settings) i, t_better = minimize_doppler_error(obs_dopp, times, pred_dopplers, plot = plot) # Revise the prior state vector based on this new estimate of capture time r_better, v_better = prior_traj(t_better) - delta_t = t_better.second - t_prior.second + (t_better.microsecond - t_prior.microsecond)*1e-6 + delta_t = (t_better - t_prior).total_seconds() delta_r = np.linalg.norm(np.array(r_better) - r_prior) - print "By minimizing doppler residuals, adjusted the prior time and position by %s seconds, %.1f km" % ( + print "By minimizing doppler residuals, adjusted the prior time and position by %.6s seconds, %.3f km" % ( delta_t, delta_r/ 1e3) pred_ranges, pred_dopplers, times = predict_observables( - prior_traj, t_better, acqed_prns, ephem, 1e-9) + prior_traj, t_better, acqed_prns, ephem, 1e-9, settings) pred_pr_t_better = {prn: pred_ranges[prn][0] for prn in acqed_prns} @@ -604,12 +681,15 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, if plot: plot_expected_vs_measured(acqed_prns, prn_ref, obs_pr, obs_dopp, - prior_traj, t_better, ephem) + prior_traj, t_better, ephem, settings) # Perform PVT navigation solution r_sol, t_sol, los, tot, residuals = pt_solve(r_better, t_better, obs_pr, ephem) resid_norm_norm = norm(residuals) / len(acqed_prns) + print "PVT solution residuals:",residuals + print "PVT solution residual norm (meters per SV):",resid_norm_norm + if resid_norm_norm > settings.navSanityMaxResid: logger.error("PVT solution not satisfactorily converged: %.0f > %.0f" % ( resid_norm_norm, settings.navSanityMaxResid)) @@ -621,9 +701,17 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, v_sol, rx_freq_err = vel_solve(r_sol, t_sol, ephem, obs_dopp, los, tot) print "Velocity: %s (%.1f m/s)" % (v_sol, norm(v_sol)) + metadata['rx_freq_err'] = rx_freq_err + + # Compute DOPs + metadata["dops"] = compute_dops(acqed_prns, ephem, r_sol, t_sol, disp=True) + # How accurate is the time component of the solution? if plot: plot_t_recv_sensitivity(r_sol, t_sol, obs_pr, ephem, spread = 0.1, step = 0.01) - return r_sol, v_sol, t_sol + if return_metadata: + return r_sol, v_sol, t_sol, metadata + else: + return r_sol, v_sol, t_sol diff --git a/peregrine/warm_start.py b/peregrine/warm_start.py index b5f74c6..2c2accc 100755 --- a/peregrine/warm_start.py +++ b/peregrine/warm_start.py @@ -28,17 +28,26 @@ def horizon_dip(r): r_e = norm(swiftnav.coord_system.wgsllh2ecef_(lat, lon, 0)) return degrees(-acos(r_e / norm(r_e + height))) -def whatsup(ephem, r, t, mask = None): +def whatsup(ephem, r, t, mask = None, disp = False): + hd = horizon_dip(r) if mask is None: - mask = horizon_dip(r) + mask = hd wk, tow = datetime_to_tow(t) satsup = [] + + if disp: + print "Approximate horizon dip angle: %+4.1f deg"%hd + print "Satellite sky positions from prior (above mask and healthy):" + print "PRN\tAz\tEl\t" + for prn in ephem: pos, _, _, _ = ephemeris.calc_sat_pos(ephem[prn], tow, wk, warn_stale = False) az, el = swiftnav.coord_system.wgsecef2azel_(pos, r) if ephem[prn]['healthy'] and degrees(el) > mask: satsup.append(prn) + if disp: + print "%2d\t%5.1f\t%+5.1f" % (prn + 1, degrees(az), degrees(el)) return satsup def whatsdown(ephem, r, t, mask = -45): @@ -63,7 +72,9 @@ def warm_start(signal, t_prior, r_prior, v_prior, ephem, settings, receiver's position, velocity and time. """ - pred = whatsup(ephem, r_prior, t_prior) + pred = whatsup(ephem, r_prior, t_prior, + mask = settings.elevMask, + disp=True) pred_dopp = ephemeris.pred_dopplers(pred, ephem, r_prior, v_prior, t_prior) if settings.acqSanityCheck: notup = whatsdown(ephem, r_prior, t_prior) @@ -78,9 +89,14 @@ def warm_start(signal, t_prior, r_prior, v_prior, ephem, settings, if os.path.isfile("/etc/fftw/wisdom"): shutil.copy("/etc/fftw/wisdom", wiz_file) - a = acquisition.Acquisition(signal, settings.samplingFreq, - settings.IF, - settings.samplingFreq * gps.code_period, + samplingFreq = settings.freq_profile['sampling_freq'] + + a = acquisition.Acquisition(gps.L1CA, + signal, + samplingFreq, + settings.freq_profile['GPS_L1_IF'], + samplingFreq * settings.code_period, + settings.code_length, n_codes_integrate=n_codes_integrate, wisdom_file = wiz_file) # Attempt to acquire both the sats we predict are visible @@ -89,7 +105,7 @@ def warm_start(signal, t_prior, r_prior, v_prior, ephem, settings, prns = pred + notup, doppler_priors = pred_dopp + [0 for p in notup], doppler_search = settings.rxFreqTol * gps.l1, - show_progress = True, multi = True) + progress_bar_output='stderr', multi = True) nacq_results = acq_results[len(pred):] acq_results = acq_results[:len(pred)]