Skip to content

Commit

Permalink
Remove russian crap, and simplify downloading
Browse files Browse the repository at this point in the history
  • Loading branch information
haraschax committed Feb 6, 2025
1 parent 28cb5d9 commit 8a8e876
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 106 deletions.
24 changes: 7 additions & 17 deletions laika/astro_dog.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from .helpers import ConstellationId, get_constellation, get_closest, get_el_az, TimeRangeHolder
from .ephemeris import Ephemeris, EphemerisType, GLONASSEphemeris, GPSEphemeris, PolyEphemeris, parse_sp3_orbits, parse_rinex_nav_msg_gps, \
parse_rinex_nav_msg_glonass
from .downloader import download_orbits_gps, download_orbits_russia_src, download_nav, download_ionex, download_dcb, download_prediction_orbits_russia_src
from .downloader import download_orbits_gps, download_nav, download_ionex, download_dcb
from .downloader import download_cors_station
from .trop import saast
from .iono import IonexMap, parse_ionex, get_slant_delay
Expand Down Expand Up @@ -212,27 +212,17 @@ def download_parse_orbit(self, gps_time: GPSTime, skip_before_epoch=None) -> dic
# Download multiple days to be able to polyfit at the start-end of the day
time_steps = [gps_time - SECS_IN_DAY, gps_time, gps_time + SECS_IN_DAY]
with ThreadPoolExecutor() as executor:
futures_other = [executor.submit(download_orbits_russia_src, t, self.cache_dir, self.valid_ephem_types) for t in time_steps]
futures_gps = None
if ConstellationId.GPS in self.valid_const:
futures_gps = [executor.submit(download_orbits_gps, t, self.cache_dir, self.valid_ephem_types) for t in time_steps]

files_other = [self.fetch_count(f.result()) for f in futures_other if f.result()]
ephems_other = parse_sp3_orbits(files_other, self.valid_const, skip_before_epoch)
files_gps = [self.fetch_count(f.result()) for f in futures_gps if f.result()] if futures_gps else []
ephems_us = parse_sp3_orbits(files_gps, self.valid_const, skip_before_epoch)

return {k: ephems_other.get(k, []) + ephems_us.get(k, []) for k in set(list(ephems_other.keys()) + list(ephems_us.keys()))}
futures = [executor.submit(download_orbits_gps, t, self.cache_dir, self.valid_ephem_types) for t in time_steps]
files = [self.fetch_count(f.result()) for f in futures if f.result()] if futures else []
ephems = parse_sp3_orbits(files, self.valid_const, skip_before_epoch)
return ephems
#{k: ephems_us.get(k, []) for k in set(list([]) + list(ephems_us.keys()))}

def download_parse_prediction_orbit(self, gps_time: GPSTime):
assert EphemerisType.ULTRA_RAPID_ORBIT in self.valid_ephem_types
skip_until_epoch = gps_time - 2 * SECS_IN_HR

result = self.fetch_count(download_prediction_orbits_russia_src(gps_time, self.cache_dir))
if result is not None:
result = [result]
elif ConstellationId.GPS in self.valid_const:
# Slower fallback. Russia src prediction orbits are published from 2022
if ConstellationId.GPS in self.valid_const:
result = [self.fetch_count(download_orbits_gps(t, self.cache_dir, self.valid_ephem_types)) for t in [gps_time - SECS_IN_DAY, gps_time]]
if result is None:
return {}
Expand Down
99 changes: 10 additions & 89 deletions laika/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from laika.ephemeris import EphemerisType
from .constants import SECS_IN_HR, SECS_IN_DAY, SECS_IN_WEEK
from .gps_time import GPSTime, tow_to_datetime
from .gps_time import GPSTime
from .helpers import ConstellationId

dir_path = os.path.dirname(os.path.realpath(__file__))
Expand Down Expand Up @@ -322,24 +322,6 @@ def download_nav(time: GPSTime, cache_dir, constellation: ConstellationId):
folder_and_filenames, cache_dir+'hourly_nav/', compression, overwrite=True)


def download_orbits_gps_cod0(time, cache_dir, ephem_types):
url_bases = (
mirror_url(CDDIS_BASE_URL, '/gnss/products/'),
)

if EphemerisType.ULTRA_RAPID_ORBIT not in ephem_types:
# TODO: raise error here
return None

tm = tow_to_datetime(time.tow, time.week).timetuple()
doy = str(tm.tm_yday).zfill(3)
filename = f"COD0OPSULT_{tm.tm_year}{doy}0000_02D_05M_ORB.SP3"
# TODO: add hour management

folder_path = "%i/" % time.week
folder_file_names = [(folder_path, filename)]
return download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'cddis_products/', compression='.gz')

def download_orbits_gps(time, cache_dir, ephem_types):
url_bases = (
mirror_url(CDDIS_BASE_URL, '/gnss/products/'),
Expand Down Expand Up @@ -369,74 +351,14 @@ def download_orbits_gps(time, cache_dir, ephem_types):
# Download filenames in order of quality. Final -> Rapid -> Ultra-Rapid(newest first)
if EphemerisType.FINAL_ORBIT in ephem_types and GPSTime.from_datetime(datetime.utcnow()) - time > 3 * SECS_IN_WEEK:
filenames.extend(ephem_strs[EphemerisType.FINAL_ORBIT])
if EphemerisType.RAPID_ORBIT in ephem_types:
if EphemerisType.RAPID_ORBIT in ephem_types and GPSTime.from_datetime(datetime.utcnow()) - time > 3 * SECS_IN_DAY:
filenames.extend(ephem_strs[EphemerisType.RAPID_ORBIT])
if EphemerisType.ULTRA_RAPID_ORBIT in ephem_types:
filenames.extend(ephem_strs[EphemerisType.ULTRA_RAPID_ORBIT])

folder_file_names = [(folder_path, filename) for filename in filenames]
ret = download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'cddis_products/', compression=compression)
if ret is not None:
return ret

# fallback to COD0 Ultra Rapid Orbits
return download_orbits_gps_cod0(time, cache_dir, ephem_types)


def download_prediction_orbits_russia_src(gps_time, cache_dir):
# Download single file that contains Ultra_Rapid predictions for GPS, GLONASS and other constellations
t = gps_time.as_datetime()
# Files exist starting at 29-01-2022
if t < datetime(2022, 1, 29):
return None
url_bases = (
mirror_url(GLONAS_IAC_BASE_URL, '/MCC/PRODUCTS/'),
)
folder_path = t.strftime('%y%j/ultra/')
file_prefix = "Stark_1D_" + t.strftime('%y%m%d')

# Predictions are 24H so previous day can also be used.
prev_day = (t - timedelta(days=1))
file_prefix_prev = "Stark_1D_" + prev_day.strftime('%y%m%d')
folder_path_prev = prev_day.strftime('%y%j/ultra/')

current_day = GPSTime.from_datetime(datetime(t.year, t.month, t.day))
# Ultra-Orbit is published in gnss-data-alt every 10th minute past the 5,11,17,23 hour.
# Predictions published are delayed by around 10 hours.
# Download latest file that includes gps_time with 20 minutes margin.:
if gps_time > current_day + 23.5 * SECS_IN_HR:
prev_day, current_day = [], [6, 12]
elif gps_time > current_day + 17.5 * SECS_IN_HR:
prev_day, current_day = [], [0, 6]
elif gps_time > current_day + 11.5 * SECS_IN_HR:
prev_day, current_day = [18], [0]
elif gps_time > current_day + 5.5 * SECS_IN_HR:
prev_day, current_day = [12, 18], []
else:
prev_day, current_day = [6, 12], []
# Example: Stark_1D_22060100.sp3
folder_and_file_names = [(folder_path, file_prefix + f"{h:02}.sp3") for h in reversed(current_day)] + \
[(folder_path_prev, file_prefix_prev + f"{h:02}.sp3") for h in reversed(prev_day)]
return download_and_cache_file_return_first_success(url_bases, folder_and_file_names, cache_dir+'russian_products/', raise_error=True)


def download_orbits_russia_src(time, cache_dir, ephem_types):
# Orbits from russian source. Contains GPS, GLONASS, GALILEO, BEIDOU
url_bases = (
mirror_url(GLONAS_IAC_BASE_URL, '/MCC/PRODUCTS/'),
)
t = time.as_datetime()
folder_paths = []
current_gps_time = GPSTime.from_datetime(datetime.utcnow())
filename = "Sta%i%i.sp3" % (time.week, time.dow)
if EphemerisType.FINAL_ORBIT in ephem_types and current_gps_time - time > 2 * SECS_IN_WEEK:
folder_paths.append(t.strftime('%y%j/final/'))
if EphemerisType.RAPID_ORBIT in ephem_types:
folder_paths.append(t.strftime('%y%j/rapid/'))
if EphemerisType.ULTRA_RAPID_ORBIT in ephem_types:
folder_paths.append(t.strftime('%y%j/ultra/'))
folder_file_names = [(folder_path, filename) for folder_path in folder_paths]
return download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'russian_products/')
return ret


def download_ionex(time, cache_dir):
Expand All @@ -447,17 +369,16 @@ def download_ionex(time, cache_dir):
folder_path = t.strftime('%Y/%j/')
# Format date change
if time >= GPSTime(2238, 0.0):
filenames = [t.strftime('COD0OPSFIN_%Y%j0000_01D_01H_GIM.INX'),
t.strftime('COD0OPSRAP_%Y%j0000_01D_01H_GIM.INX')]
compression = '.gz'
filenames = [t.strftime('COD0OPSFIN_%Y%j0000_01D_01H_GIM.INX.gz'),
t.strftime('COD0OPSRAP_%Y%j0000_01D_01H_GIM.INX.gz'),
t.strftime("c2pg%j0.%yi.Z")]
else:
filenames = [t.strftime("codg%j0.%yi"),
t.strftime("c1pg%j0.%yi"),
t.strftime("c2pg%j0.%yi")]
compression = '.Z'
filenames = [t.strftime("codg%j0.%yi.Z"),
t.strftime("c1pg%j0.%yi.Z"),
t.strftime("c2pg%j0.%yi.Z")]

folder_file_names = [(folder_path, f) for f in filenames]
return download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'ionex/', compression=compression, raise_error=True)
return download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'ionex/', raise_error=True)


def download_dcb(time, cache_dir):
Expand Down

0 comments on commit 8a8e876

Please sign in to comment.