From 10038ab09e568f8b5d83ae538561ab153bc243c8 Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Thu, 28 Apr 2016 15:26:59 +0000 Subject: [PATCH 01/13] EPHEMERIS: Use OS tzdata for leap second calculation. The tzdata database, also known as the Olsen, database seems to be the definitive source for leap second data. It is maintained regularly by folks who follow this stuff. The suggestion to use this database came from: https://stackoverflow.com/questions/19332902/extract-historic-leap-seconds-from-tzdata An alterate approach would be to include the tz repository as a submodule but this would quickly go stale. Therefore, I opted to pull the file from tzdata which should be regularly updated by the OS. The current location is suitable for Ubuntu, not sure about other OSes. --- peregrine/gps_time.py | 90 +++++++++++++++++++++++++++++++++++++------ 1 file changed, 78 insertions(+), 12 deletions(-) diff --git a/peregrine/gps_time.py b/peregrine/gps_time.py index 9649fa0..7aac347 100644 --- a/peregrine/gps_time.py +++ b/peregrine/gps_time.py @@ -1,5 +1,78 @@ import datetime +# NTP timestamps are in units of seconds since the NTP epoch +# See leap-seconds.list for further details. +NTP_EPOCH = datetime.datetime(1900,1,1,0,0,0) + +def sec_since_NTP_epoch(dt): + """Return the number of seconds since the NTP epoch.""" + return (dt-NTP_EPOCH).total_seconds() + +def load_leapsecond_table(f="/usr/share/zoneinfo/leap-seconds.list"): + """" + Loads leap second table from system. + + The default file location is appropriate for Ubuntu and is provided by + the tzdata package. Refer to the leap-seconds.list file for formatting + information. + + Parameters + ---------- + f : string + Path to a leap-seconds.list file + + Returns: List of tuples in chronological order each containing an + epoch start time and number of leap seconds applicable for that epoch. + """ + + # Check the expiration date in the file + with open(f,'r') as fp: + for line in fp: + if line[0:2] != "#@": + continue + + expiration_time = int(line.split('\t')[-1]) + now = sec_since_NTP_epoch(datetime.datetime.now()) + + if expiration_time=epoch_start: + ls = leapseconds + + # Raise warning if specified time is after expiry date of leap second table + if ls==None: + raise UserWarning("Specified datetime is after expiry time of leap second table.") + + # GPS leap seconds relative to a Jan 1 1980, where TAI-UTC was 19 seconds. + gps_leap_seconds = ls - 19 + + return datetime.timedelta(seconds = gps_leap_seconds) + def datetime_to_tow(t, mod1024 = True): """ Convert a Python datetime object to GPS Week and Time Of Week. @@ -39,12 +112,7 @@ def gpst_to_utc(t_gpst): ------- t_utc : datetime """ - t_utc = t_gpst - datetime.timedelta(seconds = 16) - if t_utc <= datetime.datetime(2012, 7, 1) or \ - t_utc >= datetime.datetime(2015, 7, 1): - raise ValueError("Don't know how many leap seconds to use. " + - "Please implement something better in gpst_to_utc()") - return t_utc + return t_gpst - get_gpst_leap_seconds(t_gpst) def utc_to_gpst(t_utc): """ @@ -59,9 +127,7 @@ def utc_to_gpst(t_utc): ------- t_gpst : datetime """ - t_gpst = t_utc + datetime.timedelta(seconds = 16) - if t_utc <= datetime.datetime(2012, 7, 1) or \ - t_utc >= datetime.datetime(2015, 7, 1): - raise ValueError("Don't know how many leap seconds to use. " + - "Please implement something better in utc_to_gpst()") - return t_gpst + # TODO: there may be an unhandled corner case here for conversions + # where the GPST-to-UTCT interval spans a leap second. + + return t_utc + get_gpst_leap_seconds(t_utc) From 370fd726b8645a7e1f8db7345cf2e79c5dd4d215 Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Mon, 2 May 2016 21:34:41 +0000 Subject: [PATCH 02/13] SHORT_SET: fixes libswiftnav API issues * track_correlate now accepts sample_freq directly, does not deal with settings objects * track_correlate returned complex-typed results track_correlate not found in libswiftnav, changed name per Cython config, probably not the appropriate fix Peregrine wasn't able to find the track_correlate function in libswiftnav. As I understand it, "*_" function names are generally not exposed to the user. --- peregrine/short_set.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/peregrine/short_set.py b/peregrine/short_set.py index 58a241b..7c792eb 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -77,7 +77,7 @@ def fill_remainder(n_ms): return [k for k,v in itertools.groupby(sorted(hs))] def long_correlation(signal, ca_code, code_phase, doppler, settings, plot=False, coherent = 0, nav_bit_hypoth = None): - from swiftnav.correlate import track_correlate + from swiftnav.correlate import track_correlate_ code_freq_shift = (doppler / gps.l1) * gps.chip_rate samples_per_chip = settings.samplingFreq / (gps.chip_rate + code_freq_shift) samples_per_code = samples_per_chip * gps.chips_per_code @@ -93,12 +93,20 @@ def long_correlation(signal, ca_code, code_phase, doppler, settings, plot=False, costas_q = 0.0 for loopCnt in range(n_ms): rawSignal = signal[numSamplesToSkip:]#[:blksize_] - I_E, Q_E, I_P, Q_P, I_L, Q_L, blksize, remCodePhase, remCarrPhase = track_correlate_( + E, P, L, blksize, remCodePhase, remCarrPhase = track_correlate_( rawSignal, code_freq_shift + gps.chip_rate, remCodePhase, doppler + settings.IF, - remCarrPhase, ca_code, settings) + remCarrPhase, ca_code, settings.samplingFreq) + + I_E = E.real + Q_E = E.imag + I_P = P.real + Q_P = P.imag + I_L = L.real + Q_L = L.imag + numSamplesToSkip += blksize #print "@ %d, I_P = %.0f, Q_P = %.0f" % (loopCnt, I_P, Q_P) i_p.append(I_P) From 7b95104a44db7304a8258399d4dffd984792438b Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Thu, 5 May 2016 23:41:41 +0000 Subject: [PATCH 03/13] SHORT_SET: abort processing if acquisition fails and (settings.abortIfInsane==True) --- peregrine/short_set.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/peregrine/short_set.py b/peregrine/short_set.py index 7c792eb..cb49c9f 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -561,6 +561,18 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, ephem, settings, n_codes_integrate) + # If acquisition failed, update the cache accordingly and abort + if len(acqed)==0: + logger.error("Acquisition failed, aborting processing of this capture") + if settings.useCache: + if not os.path.exists(obs_cache_dir): + os.makedirs(obs_cache_dir) + with open(obs_cache_file, 'wb') as f: + cPickle.dump(([], None, None), f, + protocol=cPickle.HIGHEST_PROTOCOL) + logger.error("Marked failed acquisition in cache") + return + # Rearrange to put sat with smallest range-rate first. # This makes graphs a bit less hairy. acqed.sort(key = lambda a: abs(a.doppler)) From ea81d8472e9f4f135a6e74e4002bf1804834a1e8 Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Wed, 18 May 2016 22:31:10 +0000 Subject: [PATCH 04/13] SHORT_SET: fixes time calculation in doppler optimization output --- peregrine/short_set.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/peregrine/short_set.py b/peregrine/short_set.py index cb49c9f..a71bf84 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -609,9 +609,9 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, # Revise the prior state vector based on this new estimate of capture time r_better, v_better = prior_traj(t_better) - delta_t = t_better.second - t_prior.second + (t_better.microsecond - t_prior.microsecond)*1e-6 + delta_t = (t_better - t_prior).total_seconds() delta_r = np.linalg.norm(np.array(r_better) - r_prior) - print "By minimizing doppler residuals, adjusted the prior time and position by %s seconds, %.1f km" % ( + print "By minimizing doppler residuals, adjusted the prior time and position by %.6s seconds, %.3f km" % ( delta_t, delta_r/ 1e3) pred_ranges, pred_dopplers, times = predict_observables( prior_traj, t_better, acqed_prns, ephem, 1e-9) From 2086de24ab424334151187697598c2a780d00a6a Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Wed, 18 May 2016 23:13:16 +0000 Subject: [PATCH 05/13] SHORT_SET: one nav bit hypothesis was missing Nav bit hypothesis generator was missing a hypothesis possibility. Here is an example of the erroneous behavior that this commit fixes: In [34]: nbh_old = peregrine.short_set.nav_bit_hypotheses(32) In [35]: for i,n in enumerate(nbh_old): print "%i\t"%i,"".join(["1" if a<0 else "0" for a in n]) ....: 0 01111111111111111111111111111111 1 01111111111111111111100000000000 2 00111111111111111111111111111111 3 00111111111111111111110000000000 4 00011111111111111111111111111111 5 00011111111111111111111000000000 6 00001111111111111111111111111111 7 00001111111111111111111100000000 8 00000111111111111111111111111111 9 00000111111111111111111110000000 10 00000011111111111111111111111111 11 00000011111111111111111111000000 12 00000001111111111111111111111111 13 00000001111111111111111111100000 14 00000000111111111111111111111111 15 00000000111111111111111111110000 16 00000000011111111111111111111111 17 00000000011111111111111111111000 18 00000000001111111111111111111111 19 00000000001111111111111111111100 20 00000000000111111111111111111111 21 00000000000111111111111111111110 22 00000000000011111111111111111111 23 00000000000001111111111111111111 24 00000000000000111111111111111111 25 00000000000000011111111111111111 26 00000000000000001111111111111111 27 00000000000000000111111111111111 28 00000000000000000011111111111111 29 00000000000000000001111111111111 <--- Missing hypothesis 30 00000000000000000000011111111111 <--- between these two 31 00000000000000000000001111111111 32 00000000000000000000000111111111 33 00000000000000000000000011111111 34 00000000000000000000000001111111 35 00000000000000000000000000111111 36 00000000000000000000000000011111 37 00000000000000000000000000001111 38 00000000000000000000000000000111 39 00000000000000000000000000000011 40 00000000000000000000000000000001 41 00000000000000000000000000000000 --- peregrine/short_set.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/peregrine/short_set.py b/peregrine/short_set.py index a71bf84..3855cd6 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -71,7 +71,7 @@ def fill_remainder(n_ms): return [ [1]*n_ms, [-1]*n_ms] return [b + f for b in [[1]*20, [-1]*20] for f in fill_remainder(n_ms - 20)] hs = [] - for nav_edge_phase in range(1,min(n_ms,20)): + for nav_edge_phase in range(1,min(n_ms,21)): h = [([1]*nav_edge_phase) + f for f in fill_remainder(n_ms - nav_edge_phase)] hs += h return [k for k,v in itertools.groupby(sorted(hs))] From c49a9043962769db6aafdd9c731fef60249f25c5 Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Mon, 23 May 2016 15:25:24 +0000 Subject: [PATCH 06/13] SHORT_SET: added residuals stdout statements --- peregrine/short_set.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/peregrine/short_set.py b/peregrine/short_set.py index 3855cd6..bc0b4a4 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -630,6 +630,9 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, r_sol, t_sol, los, tot, residuals = pt_solve(r_better, t_better, obs_pr, ephem) resid_norm_norm = norm(residuals) / len(acqed_prns) + print "PVT solution residuals:",residuals + print "PVT solution residual norm (meters per SV):",resid_norm_norm + if resid_norm_norm > settings.navSanityMaxResid: logger.error("PVT solution not satisfactorily converged: %.0f > %.0f" % ( resid_norm_norm, settings.navSanityMaxResid)) From 6d85879e483fa00c1bbb8cb144a39c33bbf37ca1 Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Mon, 30 May 2016 12:48:43 -0700 Subject: [PATCH 07/13] WARM_START: adds table showing GNSS satellite sky positions --- peregrine/warm_start.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/peregrine/warm_start.py b/peregrine/warm_start.py index b5f74c6..afa08b4 100755 --- a/peregrine/warm_start.py +++ b/peregrine/warm_start.py @@ -28,17 +28,26 @@ def horizon_dip(r): r_e = norm(swiftnav.coord_system.wgsllh2ecef_(lat, lon, 0)) return degrees(-acos(r_e / norm(r_e + height))) -def whatsup(ephem, r, t, mask = None): +def whatsup(ephem, r, t, mask = None, disp = False): + hd = horizon_dip(r) if mask is None: - mask = horizon_dip(r) + mask = hd wk, tow = datetime_to_tow(t) satsup = [] + + if disp: + print "Approximate horizon dip angle: %+4.1f deg"%hd + print "Satellite sky positions from prior (above mask and healthy):" + print "PRN\tAz\tEl\t" + for prn in ephem: pos, _, _, _ = ephemeris.calc_sat_pos(ephem[prn], tow, wk, warn_stale = False) az, el = swiftnav.coord_system.wgsecef2azel_(pos, r) if ephem[prn]['healthy'] and degrees(el) > mask: satsup.append(prn) + if disp: + print "%2d\t%5.1f\t%+5.1f" % (prn + 1, degrees(az), degrees(el)) return satsup def whatsdown(ephem, r, t, mask = -45): @@ -63,7 +72,7 @@ def warm_start(signal, t_prior, r_prior, v_prior, ephem, settings, receiver's position, velocity and time. """ - pred = whatsup(ephem, r_prior, t_prior) + pred = whatsup(ephem, r_prior, t_prior, disp=True) pred_dopp = ephemeris.pred_dopplers(pred, ephem, r_prior, v_prior, t_prior) if settings.acqSanityCheck: notup = whatsdown(ephem, r_prior, t_prior) From 7fa8f0407617cfb23aef6c9f5c9034a4d6ee00f8 Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Mon, 30 May 2016 13:03:57 -0700 Subject: [PATCH 08/13] DEFAULTS/SHORT_SET: configurable elevation mask for short-set --- peregrine/defaults.py | 1 + peregrine/warm_start.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/peregrine/defaults.py b/peregrine/defaults.py index 3620e9c..d871d0c 100644 --- a/peregrine/defaults.py +++ b/peregrine/defaults.py @@ -15,6 +15,7 @@ useCache = True cacheDir = 'cache' ephemMaxAge = 4 * 3600.0 # Reject an ephemeris entry if older than this +elevMask = None # Optional elevation mask (degrees), None to disable # the size of the sample data block processed at a time processing_block_size = int(20e6) # [samples] diff --git a/peregrine/warm_start.py b/peregrine/warm_start.py index afa08b4..0ccbf4a 100755 --- a/peregrine/warm_start.py +++ b/peregrine/warm_start.py @@ -72,7 +72,9 @@ def warm_start(signal, t_prior, r_prior, v_prior, ephem, settings, receiver's position, velocity and time. """ - pred = whatsup(ephem, r_prior, t_prior, disp=True) + pred = whatsup(ephem, r_prior, t_prior, + mask = settings.elevMask, + disp=True) pred_dopp = ephemeris.pred_dopplers(pred, ephem, r_prior, v_prior, t_prior) if settings.acqSanityCheck: notup = whatsdown(ephem, r_prior, t_prior) From 932973bc8ef05dba9624f539c599598cd4a5334e Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Mon, 30 May 2016 17:22:06 -0700 Subject: [PATCH 09/13] SHORT_SET: add dilution of precision calculation and other metadata to microsample solver SHORT_SET: adds dilution of precision calculation SHORT_SET: adds option to return DOP values for solution SHORT_SET: adds option to return general metadata from processing attempt --- peregrine/short_set.py | 65 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 62 insertions(+), 3 deletions(-) diff --git a/peregrine/short_set.py b/peregrine/short_set.py index bc0b4a4..14a7788 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -13,6 +13,7 @@ from datetime import datetime, timedelta from numpy import dot from numpy.linalg import norm +from numpy.linalg import inv from peregrine.ephemeris import calc_sat_pos, obtain_ephemeris from peregrine.gps_time import datetime_to_tow from scipy.optimize import fmin, fmin_powell @@ -505,8 +506,51 @@ def vel_solve(r_sol, t_sol, ephem, obs_pseudodopp, los, tot): print "Receiver clock frequency error: %+6.1f Hz" % f_sol return v_sol, f_sol +def unit_vector(v): + mag = norm(v) + if mag==0: + print "unit_vector(): warning, zero magnitude vector!" + return v + return v/mag + +def compute_dops(prns, ephem, r, t, disp=False): + """ Compute dilution of precision parameters """ + + A = [] + for prn in prns: + wk, tow = datetime_to_tow(t) + gps_r, gps_v, clock_err, clock_rate_err = calc_sat_pos(ephem[prn], + tow, + week = wk, + warn_stale=False) + r_rx_to_gps = unit_vector(gps_r-r) + r_rx_to_gps = np.append(r_rx_to_gps,[1.0]) + A.append(r_rx_to_gps) + + A = np.array(A) + Q = inv(dot(A.T,A)) + qd = Q.diagonal() + + pdop = math.sqrt(qd[:3].sum()) + hdop = math.sqrt(qd[:2].sum()) + vdop = math.sqrt(qd[2]) + tdop = math.sqrt(qd[3]) + gdop = math.sqrt(pdop**2 + tdop**2) + + if disp: + print "Dilution of precision:" + print " PDOP: %.3f"%pdop + print " TDOP: %.3f"%tdop + print " GDOP: %.3f"%gdop + + return {"pdop":pdop, + "hdop":hdop, + "vdop":vdop, + "tdop":tdop, + "gdop":gdop} + def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, - plot = True): + plot = True, return_metadata = False): """ Postprocess a short baseband sample record into a navigation solution. @@ -525,14 +569,19 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, e.g. from peregrine.initSettings.initSettings() plot : bool Make pretty graphs. + return_metadata : bool + Return dict of metadata from the solver Returns ------- acq_results : [:class:`AcquisitionResult`] List of :class:`AcquisitionResult` objects loaded from the file. - + sol_metadata + Dict of metadata from the solver (optional) """ + metadata = {} + if hasattr(prior_trajectory, '__call__'): prior_traj_func = True prior_traj = prior_trajectory @@ -589,6 +638,8 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, cPickle.dump((acqed_prns, obs_cp, obs_dopp), f, protocol=cPickle.HIGHEST_PROTOCOL) + metadata['acqed_prns'] = len(acqed_prns) + # Check whether we have enough satellites if len(acqed_prns) < 5: logger.error(("Acquired %d SVs; need at least 5 for a solution" + @@ -644,9 +695,17 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, v_sol, rx_freq_err = vel_solve(r_sol, t_sol, ephem, obs_dopp, los, tot) print "Velocity: %s (%.1f m/s)" % (v_sol, norm(v_sol)) + metadata['rx_freq_err'] = rx_freq_err + + # Compute DOPs + metadata["dops"] = compute_dops(acqed_prns, ephem, r_sol, t_sol, disp=True) + # How accurate is the time component of the solution? if plot: plot_t_recv_sensitivity(r_sol, t_sol, obs_pr, ephem, spread = 0.1, step = 0.01) - return r_sol, v_sol, t_sol + if return_metadata: + return r_sol, v_sol, t_sol, metadata + else: + return r_sol, v_sol, t_sol From a21f87a0fecb6b0dd4336b914603e0771144f17e Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Thu, 16 Jun 2016 21:19:08 +0000 Subject: [PATCH 10/13] EPHEMERIS: improves logic to deal with stale ephemeris in cache --- peregrine/ephemeris.py | 39 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/peregrine/ephemeris.py b/peregrine/ephemeris.py index 580fcec..830993c 100644 --- a/peregrine/ephemeris.py +++ b/peregrine/ephemeris.py @@ -98,6 +98,27 @@ def read4(f): filename, count_healthy) return ephem +def rinex_creation_date(filename): + """ Read RINEX file creation time + + Returns: + datetime object + """ + dt = None + with open(filename,'r') as f: + while True: + line = f.readline()[:-1] + if not line: + break + if "PGM / RUN BY / DATE" not in line: + continue + + # Extract creation time + timestring = '-'.join(line.split()[2:4]) + dt = datetime.strptime(timestring,"%Y%m%d-%H%M%S") + + return dt + def obtain_ephemeris(t, settings): """ Finds an appropriate GNSS ephemeris file for a certain time, @@ -121,17 +142,31 @@ def obtain_ephemeris(t, settings): """ print "Obtaining ephemeris file for ", t - #TODO: If it's today and more than 1 hr old, check for an update - filename = t.strftime("brdc%j0.%yn") filedir = os.path.join(settings.cacheDir, "ephem") filepath = os.path.join(filedir, filename) url = t.strftime( 'http://qz-vision.jaxa.jp/USE/archives/ephemeris/%Y/') + filename + + # If not in cache, then fetch it if not os.path.isfile(filepath): if not os.path.exists(filedir): os.makedirs(filedir) _, hdrs = urllib.urlretrieve(url, filepath) + + # Otherwise, use cache file if isn't stale + else: + rinex_gen_date = rinex_creation_date(filepath) + if rinex_gen_date is None: + # Something wrong with RINEX format, update cache + _, hdrs = urllib.urlretrieve(url, filepath) + else: + # If rinex generated more than an hour then fetch it again + if (t-rinex_gen_date) > timedelta(hours=1): + _, hdrs = urllib.urlretrieve(url, filepath) + + rinex_gen_date = rinex_creation_date(filepath) + ephem = load_rinex_nav_msg(filepath, t, settings) if len(ephem) < 22: raise ValueError( From 90e16cd8e5ac687ee55c023c83ecb64a692dc0ee Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Thu, 16 Jun 2016 23:41:07 +0000 Subject: [PATCH 11/13] SHORT_SET: makes velocity solver respect possibility of TOE week rollover --- peregrine/short_set.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/peregrine/short_set.py b/peregrine/short_set.py index 14a7788..61927b9 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -15,7 +15,7 @@ from numpy.linalg import norm from numpy.linalg import inv from peregrine.ephemeris import calc_sat_pos, obtain_ephemeris -from peregrine.gps_time import datetime_to_tow +from peregrine.gps_time import datetime_to_tow, utc_to_gpst from scipy.optimize import fmin, fmin_powell from warnings import warn import cPickle @@ -485,8 +485,14 @@ def plot_t_recv_sensitivity(r_init, t_ref, obs_pr, ephem, spread = 0.2, step = 0 def vel_solve(r_sol, t_sol, ephem, obs_pseudodopp, los, tot): prns = los.keys() pred_prr = {} + + # TODO: does this break if times of transmission for the different sats + # straddles a GPS week rollover? + t_sol_gpst = utc_to_gpst(t_sol) + wk, tow = datetime_to_tow(t_sol_gpst) + for prn in prns: - _, gps_v, _, clock_rate_err = calc_sat_pos(ephem[prn], tot[prn]) + _, gps_v, _, clock_rate_err = calc_sat_pos(ephem[prn], tot[prn], week=wk) pred_prr[prn] = -dot(gps_v, los[prn]) + clock_rate_err * gps.c los = np.array(los.values()) From f65d55154b4a71416fbb4bfc1414225005d12e1a Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Sun, 19 Jun 2016 18:39:32 -0700 Subject: [PATCH 12/13] SHORT_SET: uses new signal/settings storage conventions --- peregrine/short_set.py | 2 +- peregrine/warm_start.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/peregrine/short_set.py b/peregrine/short_set.py index 61927b9..bfa9f2a 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -595,7 +595,7 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, prior_traj_func = False prior_traj = lambda t: prior_trajectory - sig_len_ms = len(signal) / settings.samplingFreq / 1E-3 + sig_len_ms = len(signal) / settings.freq_profile['sampling_freq'] / 1E-3 print "Signal is %.2f ms long." % sig_len_ms r_prior, v_prior = prior_traj(t_prior) diff --git a/peregrine/warm_start.py b/peregrine/warm_start.py index 0ccbf4a..be2c005 100755 --- a/peregrine/warm_start.py +++ b/peregrine/warm_start.py @@ -89,9 +89,11 @@ def warm_start(signal, t_prior, r_prior, v_prior, ephem, settings, if os.path.isfile("/etc/fftw/wisdom"): shutil.copy("/etc/fftw/wisdom", wiz_file) - a = acquisition.Acquisition(signal, settings.samplingFreq, - settings.IF, - settings.samplingFreq * gps.code_period, + samplingFreq = settings.freq_profile['sampling_freq'] + + a = acquisition.Acquisition(signal, samplingFreq, + settings.freq_profile['GPS_L1_IF'], + samplingFreq * settings.code_period, n_codes_integrate=n_codes_integrate, wisdom_file = wiz_file) # Attempt to acquire both the sats we predict are visible From f0f94b31fecd20ffa33d849ff09c6573e9a80ae3 Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Tue, 21 Jun 2016 17:32:09 -0700 Subject: [PATCH 13/13] Rebase updates mostly related to short_set and warm_start --- peregrine/acquisition.py | 10 ++++++---- peregrine/short_set.py | 34 +++++++++++++++++----------------- peregrine/warm_start.py | 7 +++++-- 3 files changed, 28 insertions(+), 23 deletions(-) diff --git a/peregrine/acquisition.py b/peregrine/acquisition.py index 9d4cc56..72cc9e8 100644 --- a/peregrine/acquisition.py +++ b/peregrine/acquisition.py @@ -54,14 +54,16 @@ class Acquisition: Array of samples to use for acquisition. Can be `None` but in this case `init_samples` *must* be called with an array of samples before any other acquisition functions are used. - sampling_freq : float, optional + sampling_freq : float The sampling frequency of the samples in Hz. - IF : float, optional + IF : float The receiver intermediate frequency used when capturing the samples. - samples_per_code : float, optional + samples_per_code : float The number of samples corresponding to one code length. - code_length : int, optional + code_length : int The number of chips in the chipping code. + n_codes_integrate : int, optional + The number of codes to integrate. offsets : int, optional Offsets, in units of code length (1ms), to use when performing long integrations to avoid clobbering by nav bit edges. diff --git a/peregrine/short_set.py b/peregrine/short_set.py index bfa9f2a..eb22e5f 100644 --- a/peregrine/short_set.py +++ b/peregrine/short_set.py @@ -48,12 +48,12 @@ def resolve_ms_integers(obs_pr, pred_pr, prn_ref, disp = True): print "Resolving millisecond integers:" for prn, pr in obs_pr.iteritems(): - pr_int_est = (pred_pr[prn] - pr) / gps.code_wavelength + pr_int_est = (pred_pr[prn] - pr) / gps.l1ca_code_wavelength pr_int = round(pr_int_est) if abs(pr_int - pr_int_est) > 0.15: logger.warn("Pseudorange integer for PRN %2d is %.4f" % ( prn + 1, pr_int_est) + ", which isn't very close to an integer.") - pr += pr_int * gps.code_wavelength + pr += pr_int * gps.l1ca_code_wavelength obs_pr[prn] = pr if disp: print ("PRN %2d: pred pseudorange = %9.3f km, obs = %9.3f, " + \ @@ -78,9 +78,9 @@ def fill_remainder(n_ms): return [k for k,v in itertools.groupby(sorted(hs))] def long_correlation(signal, ca_code, code_phase, doppler, settings, plot=False, coherent = 0, nav_bit_hypoth = None): - from swiftnav.correlate import track_correlate_ + from swiftnav.correlate import track_correlate code_freq_shift = (doppler / gps.l1) * gps.chip_rate - samples_per_chip = settings.samplingFreq / (gps.chip_rate + code_freq_shift) + samples_per_chip = settings.freq_profile['sampling_freq'] / (gps.chip_rate + code_freq_shift) samples_per_code = samples_per_chip * gps.chips_per_code numSamplesToSkip = round(code_phase * samples_per_chip) remCodePhase = (1.0 * numSamplesToSkip / samples_per_chip) - code_phase @@ -94,12 +94,14 @@ def long_correlation(signal, ca_code, code_phase, doppler, settings, plot=False, costas_q = 0.0 for loopCnt in range(n_ms): rawSignal = signal[numSamplesToSkip:]#[:blksize_] - E, P, L, blksize, remCodePhase, remCarrPhase = track_correlate_( + E, P, L, blksize, remCodePhase, remCarrPhase = track_correlate( rawSignal, + 1023, # Chips to correlate code_freq_shift + gps.chip_rate, remCodePhase, - doppler + settings.IF, - remCarrPhase, ca_code, settings.samplingFreq) + doppler + settings.freq_profile['GPS_L1_IF'], + remCarrPhase, ca_code, settings.freq_profile['sampling_freq'], + gps.L1CA) I_E = E.real Q_E = E.imag @@ -149,12 +151,10 @@ def refine_ob(signal, acq_result, settings, print_results = True, return_sweeps # TODO: Fit code phase results for better resolution from peregrine.include.generateCAcode import caCodes from scipy import optimize as opt - samples_per_chip = settings.samplingFreq / gps.chip_rate + samples_per_chip = settings.freq_profile['sampling_freq'] / gps.chip_rate samples_per_code = samples_per_chip * gps.chips_per_code # Get a vector with the C/A code sampled 1x/chip ca_code = caCodes[acq_result.prn] - # Add wrapping to either end to be able to do early/late - ca_code = np.concatenate(([ca_code[1022]],ca_code,[ca_code[0]])) dopp_offset_search = 100 # Hz away from acquisition code_offsets = np.arange(-1,1, 1.0 / 16 / 2) @@ -296,14 +296,14 @@ def refine_obs(signal, acq_results, settings, return obs_cp, obs_dopp -def predict_observables(prior_traj, prior_datetime, prns, ephem, window): +def predict_observables(prior_traj, prior_datetime, prns, ephem, window, settings): from datetime import timedelta from numpy.linalg import norm from numpy import dot """Given a list of PRNs, a set of ephemerides, a nominal capture time (datetime) and a and a time window (seconds), compute the ranges and dopplers for each satellite at 1ms shifts.""" - timeres = 50 * gps.code_period # Might be important to keep this an integer number of code periods + timeres = 50 * settings.code_period # Might be important to keep this an integer number of code periods t0 = prior_datetime - timedelta(seconds=window / 2.0) ranges = {} dopplers = {} @@ -360,11 +360,11 @@ def minimize_doppler_error(obs_dopp, times, pred_dopp, plot = False): def plot_expected_vs_measured(acqed_prns, prn_ref, obs_pr, obs_dopp, prior_traj, t_better, - ephem): + ephem, settings): import matplotlib.pyplot as plt # Compute predicted observables around this new estimate of capture time - pred_ranges, pred_dopplers, times = predict_observables(prior_traj, t_better, acqed_prns, ephem, 20) + pred_ranges, pred_dopplers, times = predict_observables(prior_traj, t_better, acqed_prns, ephem, 20, settings) pred_pr = pseudoranges_from_ranges(pred_ranges, prn_ref) ax = plt.figure(figsize=(12,6)).gca() @@ -660,7 +660,7 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, # Improve the time part of the prior estimate by minimizing doppler residuals pred_ranges, pred_dopplers, times = predict_observables(prior_traj, t_prior, acqed_prns, ephem, - 30) + 30, settings) i, t_better = minimize_doppler_error(obs_dopp, times, pred_dopplers, plot = plot) @@ -671,7 +671,7 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, print "By minimizing doppler residuals, adjusted the prior time and position by %.6s seconds, %.3f km" % ( delta_t, delta_r/ 1e3) pred_ranges, pred_dopplers, times = predict_observables( - prior_traj, t_better, acqed_prns, ephem, 1e-9) + prior_traj, t_better, acqed_prns, ephem, 1e-9, settings) pred_pr_t_better = {prn: pred_ranges[prn][0] for prn in acqed_prns} @@ -681,7 +681,7 @@ def postprocess_short_samples(signal, prior_trajectory, t_prior, settings, if plot: plot_expected_vs_measured(acqed_prns, prn_ref, obs_pr, obs_dopp, - prior_traj, t_better, ephem) + prior_traj, t_better, ephem, settings) # Perform PVT navigation solution r_sol, t_sol, los, tot, residuals = pt_solve(r_better, t_better, obs_pr, diff --git a/peregrine/warm_start.py b/peregrine/warm_start.py index be2c005..2c2accc 100755 --- a/peregrine/warm_start.py +++ b/peregrine/warm_start.py @@ -91,9 +91,12 @@ def warm_start(signal, t_prior, r_prior, v_prior, ephem, settings, samplingFreq = settings.freq_profile['sampling_freq'] - a = acquisition.Acquisition(signal, samplingFreq, + a = acquisition.Acquisition(gps.L1CA, + signal, + samplingFreq, settings.freq_profile['GPS_L1_IF'], samplingFreq * settings.code_period, + settings.code_length, n_codes_integrate=n_codes_integrate, wisdom_file = wiz_file) # Attempt to acquire both the sats we predict are visible @@ -102,7 +105,7 @@ def warm_start(signal, t_prior, r_prior, v_prior, ephem, settings, prns = pred + notup, doppler_priors = pred_dopp + [0 for p in notup], doppler_search = settings.rxFreqTol * gps.l1, - show_progress = True, multi = True) + progress_bar_output='stderr', multi = True) nacq_results = acq_results[len(pred):] acq_results = acq_results[:len(pred)]