From 7b76f3c7012334362d49812e822429edf059bfad Mon Sep 17 00:00:00 2001 From: Salim Maosumi Date: Tue, 22 Oct 2024 23:48:06 +0000 Subject: [PATCH] commit all files --- .gitignore | 57 + LICENSE | 202 +++ README.md | 104 ++ inp/config.yaml | 196 ++ inp/config_IGS2R03FIN_05M.yaml | 206 +++ inp/config_test_final.yaml | 231 +++ inp/config_test_ultra.yaml | 231 +++ logging.yaml | 40 + requirements.txt | 5 + rocs/__init__.py | 0 rocs/__main__.py | 75 + rocs/checkutils.py | 112 ++ rocs/combine_orbits.py | 780 ++++++++ rocs/coordinates.py | 195 ++ rocs/eclipse.py | 291 +++ rocs/formatters.py | 26 + rocs/gpscal.py | 359 ++++ rocs/helmert.py | 1340 ++++++++++++++ rocs/iau.py | 725 ++++++++ rocs/io_data.py | 1895 ++++++++++++++++++++ rocs/orbits.py | 3040 ++++++++++++++++++++++++++++++++ rocs/planets.py | 264 +++ rocs/report.py | 1305 ++++++++++++++ rocs/rotation.py | 115 ++ rocs/settings.py | 163 ++ rocs/setup_logging.py | 28 + setup.py | 43 + 27 files changed, 12028 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 inp/config.yaml create mode 100644 inp/config_IGS2R03FIN_05M.yaml create mode 100644 inp/config_test_final.yaml create mode 100644 inp/config_test_ultra.yaml create mode 100644 logging.yaml create mode 100644 requirements.txt create mode 100755 rocs/__init__.py create mode 100755 rocs/__main__.py create mode 100755 rocs/checkutils.py create mode 100755 rocs/combine_orbits.py create mode 100755 rocs/coordinates.py create mode 100755 rocs/eclipse.py create mode 100755 rocs/formatters.py create mode 100755 rocs/gpscal.py create mode 100755 rocs/helmert.py create mode 100755 rocs/iau.py create mode 100755 rocs/io_data.py create mode 100755 rocs/orbits.py create mode 100755 rocs/planets.py create mode 100755 rocs/report.py create mode 100755 rocs/rotation.py create mode 100755 rocs/settings.py create mode 100755 rocs/setup_logging.py create mode 100644 setup.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4240eb9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,57 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +venv/ +ENV/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Logs +logs/ +*.log + +# Local environment variables +.env +*.env + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# System files +.DS_Store +Thumbs.db + +# Backup files +*.bak +*.swp +*.swo + diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..8bf246d --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2024 Geoscience Australia + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..c9c60f2 --- /dev/null +++ b/README.md @@ -0,0 +1,104 @@ +# ROCS: Robust Orbit Combination Software +#### ROCS version 1.0 + +## Overview + +The *Robust Orbit Combination Software - ROCS* is a processing package developed in Python 3 to combine orbits (and in future clocks) from different analysis centres. The algoithm used by the software is mainly based on the legacy version of the IGS ACC combination software which was developed in Fortran with Perl and Shell Scripts by Astronomical Institute, University of Bern, Delft University of Technology, and Geodetic Survey of Canada, NRCan (see Beutler, Kouba and Springer: Combining the orbits of the IGS Analysis Centers; Bulletin Geodesique; 1995). + +This modern implementation of the combination software enables the combination of multi-GNSS orbits, and incorporates more complex techniques for weighting of the Analysis Centre solutions for different constellations/blocks/satellites. For details of the processing algorithms, please refer to: Zajdel, R., Masoumi, S., Sośnica, K. et al. Combination and SLR validation of IGS Repro3 orbits for ITRF2020. J Geod 97, 87 (2023). https://doi.org/10.1007/s00190-023-01777-3. + + +## How to run +Most of the configurations can be controlled by a YAML file. Examples of config.yaml files are provided in the ```inp/``` directory. + +## Directory structure + +The tree structure of the modules is as below: + +``` ++-- rocs +| +-- __main__.py --> main module executed when package is run +| +-- combine_orbits.py --> main module to execute orbit combination +| +-- orbits.py --> orbit comibnation module +| +-- helmert.py --> forward and inverse helmert transformation +| +-- checkutils.py --> consistency checks for sizes and types +| +-- coordinates.py --> coordinate transformations +| +-- eclipse.py --> satellite eclipse calculations +| +-- formatters.py --> cutsom formatters for logging +| +-- gpscal.py --> time conversions +| +-- iau.py --> International Astronomical Union models +| +-- io_data.py --> input/output of data files +| +-- planets.py --> planetary position calculations +| +-- report.py --> reporting and creating summary files +| +-- rotation.py --> calculation of rotation matrix and its derivative +| +-- settings.py --> settings and configurations +| +-- setup_logging.py --> setup logging information ++-- inp --> directory including examples of configuration YAML files ++-- README.md --> this readme file ++-- requirements.txt --> list of software dependencies ++-- setup.py --> package and distribution management ++-- logging.yaml --> YAML file for logging ++-- LICENSE --> software license +``` + +## How to install +To install the software in Linux, simply navigate to where you would like to install the software on your machine, and clone the repository: + +``` +git clone https://github.com/yourusername/rocs.git +cd ROCS +``` + +Then install the required packages: + +``` +pip3 install -r requirements.txt +``` + +To be able to run the software from anywhere on your system, add the package to your ```$PYTHONPATH```: + +``` +export PYTHONPATH=/path/to/your/ROCS/:$PYTHONPATH +``` + +It is recommended that you add the above line to your shell initializiation file (```~/.bashrc```, ```~/.cshrc```, etc.) so you do not need to run this everytime you start up the sell. + + +## How to run + +After installing the software, you need to prepare your campaign before running the combination. To do this, follow the below steps: + +- Create a configuration YAML file. You can choose the name and location of this file whatever you would like as you will need to point to it in the command that you will run. Examples of the configuration YAML file are given in the ```inp``` directory. Use one of these examples, and modify it to suit your preferences and environment (i.e. directory and file locations). +- Create a directory for the submissions, and copy all the individual (Analysis Centre) solutions there. The path to the directory should be indicated in the configuration YAML file created above in the ```subm_rootdir``` entry. +- Ensure the metadata and other required files indicated in the configuration YAML file exist in the indicated locations. These may include, e.g., satellite metadata file, EOP file, and NANU summary file +- Ensure that the ```prod_rootdir``` entry given in the configuration file exists and is where you would like the products to be written to. Previous file in that location may be overwritten by the output files from running the software. + +Once you have prepared the campaign, you can run the program by simply executing the command below: + +``` +python3 -m rocs -c +``` + +where `````` is GPS week, `````` is day of week (for the starting epoch of the combination), `````` is hour of the day (optional, only required for ultra-rapid combination), and `````` is the full path to your configuration YAML file. + +Running the package with the ```--help``` argument will show a simple help option: + +``` +python3 -m rocs --help + +usage: rocs [-h] [-c CONFIG_YAML] gpsweek dow [hr] + +Robust combination of orbit solutions + +positional arguments: + gpsweek GPS week for processing + dow Day of week for processing + hr Starting hour for ultr-rapid combination (default: + None) + +optional arguments: + -h, --help show this help message and exit + -c CONFIG_YAML, --config CONFIG_YAML + YAML file containing configurations for combination + (default: None) +``` diff --git a/inp/config.yaml b/inp/config.yaml new file mode 100644 index 0000000..3f96356 --- /dev/null +++ b/inp/config.yaml @@ -0,0 +1,196 @@ +# Configuration file in YAML format +# +# +# Processing configs +process: + + # verbose mode (normal/debugger) + verbose: normal + +# administrative information +admin: + + author: 'IGS AC Coordinator' + contact: 'acc@igs.org' + +# Orbit combination configs +orbits: + + # Analysis center contributions + ac_contribs: + + # weighted centers: + weighted: + systems: + COD: [G,R,E] + ESA: [G,R,E] + GFZ: [G,R,E] + GRG: [G,R,E] + JPL: [G,R,E] + MIT: [G,R,E] + NGS: [G,R,E] + TUG: [G,R,E] + WHU: [G,R,E] + prns: + svns: + + # unweighted centers (for comparison only) + unweighted: + systems: + IG2: [G] + IG3: [G] + prns: + svns: + + # excluded centers + excluded: + systems: + prns: + svns: + + # solution (FIN/RAP/ULT) + #solution: FIN + + # sampling rate (None or an integer in seconds) + sampling: 900 + + # root directory for orbit submissions + subm_rootdir: /data/repro3/ac_subm_repro3 + + # root directory for saving the products + prod_rootdir: /data/repro3/products/IGS2R03GRE_15M + + # path to satellite metadata file + sat_metadata_file: /data/repro3/metadata/igs_metadata_2114.snx + + # Earth orientation paramaters (EOP) source format + # (IERS_EOP14_C04/IERS_EOP_rapid/IGS_ERP2/None) + eop_format: IGS_ERP2 + + # path to the EOP data + eop_file: /data/repro3/metadata/igs_final.erp + + # center weighting method (global/by_constellation/by_block/by_sat) + cen_wht_method: by_sat + + # satellite weighting method (RMS_L1) + sat_wht_method: RMS_L1 + + # Transform orbits to a same reference frame? + # [Translations,Rotations,Scale] + rf_align: [False,True,False] + + # Root directory for reference frame combination summary files + # Needed for reference frame alignment + rf_rootdir: /data/repro3/products/sinex + + # Reference frame summary file name identifier + rf_name: IGS0R03SNX + + # Combination name/abbreviation (3 characters) + cmb_name: IGS + + # Combination version/solution identifier (0 to 9) + vid: 2 + + # Combination campaign specification + cmb_camp: R03 + + # Combination solution type identifier + cmb_sol: GRE + + # outlier removal (assess) settings + assess: + + # satellite rms test (auto/manual/strict) + sat_rms_tst: auto + + # coefficient for auto satellite rms test + coef_sat: 470.0 + + # threshold for manual satellite rms test for each satellite system (mm) + thresh_sat: + G: 15.0 + R: 25.0 + E: 15.0 + C: 50.0 + J: 100.0 + + # maximum number of outlier satellite rms for a center in order to keep + # being weighted + max_high_satrms: 5 + + # transformation parameters test (auto/manual/strict) + trn_tst: strict + + # manual transformation thresholds for translation (mm), rotation (mas) + # and scale (ppb) + thresh_trn: [null,0.3,null] + + # test for minimum number of centers for each satellite (strict/eased) + numcen_tst: eased + + # minimum number of centers for each satellite + min_numcen: 1 + + # maximum number of iterations for outlier removal + max_iter: 100 + + +# clock combination configs +clocks: + + # Analysis center contributions + ac_contribs: + + # weighted centers: + weighted: + systems: + COM: [G,C,J] + GFM: [G,C,J] + JAM: [ R,E,C,J] + SHM: [G, E, J] + TUM: [G,R,E,C,J] + WUM: [G,R,E ] + EMR: [G ] + EMX: [ R ] + ESA: [G,R ] + IAC: [ R ] + JPL: [G ] + MCC: [ R ] + MIT: [G ] + NGS: [G ] + SIO: [G ] + prns: + JAM: [G04] + GFM: [E01] + svns: + SHM: [R701,G048] + GFM: [E211] + + # unweighted centers (for comparison only) + unweighted: + systems: + COD: [G ] + COX: [ R ] + GFZ: [G,R ] + GFM: [R] + IGL: [R] + IGS: [G,E] + IGR: [G] + prns: + JAM: [G04,G28] + GFM: [E03] + svns: + GFM: [E214] + + # excluded centers + excluded: + systems: + COX: [R] + GFM: [C] + prns: + GFM: [J01,J02] + svns: + GFM: [E211] + diff --git a/inp/config_IGS2R03FIN_05M.yaml b/inp/config_IGS2R03FIN_05M.yaml new file mode 100644 index 0000000..6454999 --- /dev/null +++ b/inp/config_IGS2R03FIN_05M.yaml @@ -0,0 +1,206 @@ +# Configuration file in YAML format +# +# +# Processing configs +process: + + # verbose mode (normal/debugger) + verbose: normal + +# Orbit combination configs +orbits: + + # Analysis center contributions + ac_contribs: + + # weighted centers: + weighted: + systems: + COD: [G,R,E] + ESA: [G,R,E] + GFZ: [G,R,E] + GRG: [G,R,E] + JPL: [G,R,E] + MIT: [G,R,E] + NGS: [G,R,E] + TUG: [G,R,E] + WHU: [G,R,E] + prns: + svns: + + # unweighted centers (for comparison only) + unweighted: + systems: + IG2: [G] + IG3: [G] + IGF: [G] + IGQ: [G,R,E] + prns: + svns: + + # excluded centers + excluded: + systems: + prns: + svns: + + ac_acronyms: + COD: "Center for Orbit Determination in Europe (CODE)" + ESA: "European Space Agency" + GFZ: "GeoForschungsZentrum Potsdam" + GRG: "Centre National d'Etudes Spatiales (CNES/CLS)" + JPL: "Jet Propulsion Laboratory" + MIT: "Massachusetts Institute of Technology" + NGS: "NOAA/National Geodetic Survey" + TUG: "Graz University of Technology" + WHU: "Wuhan University" + IGF: "IGS final operational orbit product" + IG2: "IGS second reprocessing (repro2) orbit product" + IG3: "IGS repro3 GPS-only orbit product by legacy software" + IGQ: "IGS repro3 multi-GNSS 15-minute orbit product" + + # solution (ULT/RAP/FIN) + sol_id: FIN + + # Campaign/Project specification + camp_id: R03 + + # Combination name/abbreviation (3 characters) + cmb_name: IGS + + # Combination version/solution identifier (0 to 9) + vid: 2 + + # sampling rate (None or an integer in seconds) + sampling: 300 + + # root directory for orbit submissions + subm_rootdir: /data/repro3/ac_subm_repro3 + + # root directory for saving the products + prod_rootdir: /data/repro3/products/IGS2R03FIN_05M + + # path to satellite metadata file + sat_metadata_file: /data/repro3/metadata/igs_satellite_metadata.snx + + # Earth orientation paramaters (EOP) source format + # (IERS_EOP14_C04/IERS_EOP_rapid/IGS_ERP2/None) + eop_format: IGS_ERP2 + + # path to the EOP data + eop_file: /data/repro3/metadata/igs_repro3.erp + + # center weighting method (global/by_constellation/by_block/by_sat) + cen_wht_method: by_sat + + # satellite weighting method (RMS_L1) + sat_wht_method: RMS_L1 + + # Transform orbits to a same reference frame? + # [Translations,Rotations,Scale] + rf_align: [False,True,False] + + # Root directory for reference frame combination summary files + # Needed for reference frame alignment + rf_rootdir: /data/repro3/sinex + + # Reference frame summary file name identifier + rf_name: IGS1R03SNX + + # UT1 differences applied as Z rotations + # The ERP apriori and observed files for any center listed below must be + # presented at rf_rootdir specified above with the long filename formats + # {CEN}{V}{SOL}APR_{YYYYDDDHHMM}_{LEN}_{SMP}_ERP.ERP and + # {CEN}{V}{SOL}OBS_{YYYYDDDHHMM}_{LEN}_{SMP}_ERP.ERP + ut1_rot: [MIT] + # format of the centers EOP file used for the ut1 corrections + ut1_eop_format : IGS_ERP2 + + # remove DV maneuvering satellites + rm_dv: True + + # nanu summary file + nanu_sumfile: /data/repro3/metadata/nanus_sum_1997_2020 + + # exception for removing DV maneuvering satellites + no_rm_dv: [COD] + + # outlier removal (assess) settings + assess: + + # satellite rms test (auto/manual/strict) + sat_rms_tst: auto + + # coefficient for auto satellite rms test + coef_sat: 470.0 + + # threshold for manual satellite rms test for each satellite system (mm) + thresh_sat: + G: 15.0 + R: 25.0 + E: 15.0 + C: 50.0 + J: 100.0 + + # maximum number of outlier satellite rms for a center in order to keep + # being weighted + max_high_satrms: 5 + + # transformation parameters test (auto/manual/strict) + trn_tst: strict + + # manual transformation thresholds for translation (mm), rotation (mas) + # and scale (ppb) + thresh_trn: [null,0.3,null] + + # test for minimum number of centers for each satellite (strict/eased) + numcen_tst: eased + + # minimum number of centers for each satellite + min_numcen: 1 + + # maximum number of iterations for outlier removal + max_iter: 100 + + # SP3 header information + sp3_header: + + # coordinate system + coord_sys: IGSR3 + + # information to put into the comments + + # combination type + cmb_type: REPRO3 + + # antex file + antex: igsR3_2135 + + # ocean tide model + oload: FES2014b + + +# clock combination configs +clocks: + + # Analysis center contributions + ac_contribs: + + # weighted centers: + weighted: + systems: + prns: + svns: + + # unweighted centers (for comparison only) + unweighted: + systems: + prns: + svns: + + # excluded centers + excluded: + systems: + prns: + svns: + diff --git a/inp/config_test_final.yaml b/inp/config_test_final.yaml new file mode 100644 index 0000000..cd76ddc --- /dev/null +++ b/inp/config_test_final.yaml @@ -0,0 +1,231 @@ +# Configuration file in YAML format +# +# +# Processing configs +process: + + # verbose mode (INFO/DEBUG) + verbose: INFO + +# campaign specifications +campaign: + + author: 'IGS AC Coordinator' + contact: 'acc@igs.org' + + # solution (ULT/RAP/FIN) + sol_id: FIN + + # Campaign/Project specification + camp_id: TST + + # Combination name/abbreviation (3 characters) + cmb_name: IGS + + # Combination version/solution identifier (0 to 9) + vid : 0 + + # root directory for AC submissions + subm_rootdir: /data/acc_data/ac_subm/final + + # root directory for saving the products + prod_rootdir: /data/acc_combi/test/products + + # path to satellite metadata file + sat_metadata_file: /data/acc_data/metadata/igs_satellite_metadata.snx + + # Earth orientation paramaters (EOP) source format + # (IERS_EOP14_C04/IERS_EOP_rapid/IGS_ERP2) + eop_format: IERS_EOP_rapid + + # path to the EOP data + eop_file: /data/acc_data/erp/finals.data_2024 + + # Root directory for reference frame combination summary files + # Needed for reference frame alignment + rf_rootdir: /data/acc_data/metadata/sinex + + # Reference frame summary file name identifier + rf_name: IGS0OPSSNX + + # nanu summary file + nanu_sumfile: /data/acc_data/metadata/nanus_sum.2024 + + ac_acronyms: + COD: "Center for Orbit Determination in Europe (CODE)" + EMR: "Natural Resources Canada (NRC)" + ESA: "European Space Agency" + GFZ: "GeoForschungsZentrum Potsdam" + GRG: "Centre National d'Etudes Spatiales (CNES/CLS)" + JGX: "Geospatial Information Authority of Japan (GSI) and the Japan Aerospace Exploration Agency (JAXA)" + JPL: "Jet Propulsion Laboratory" + MIT: "Massachusetts Institute of Technology" + NGS: "NOAA/National Geodetic Survey" + SIO: "Scripps Institution of Oceanography (SIO)" + TUG: "Graz University of Technology" + USN: "The United States Naval Observatory (USNO)" + UXV: "USNO experimental GPS+GLONASS" + WHU: "Wuhan University" + BRD: "IGS Broadcast products" + IGC: "24-hour set of 4-10 hour predictions from the IGS Real-Time Service" + IGU: "IGS (GPS-only) ultra-rapid operational product" + IGV: "IGS (GPS+GLONASS) ultra-rapid experimental product, with GPS sourced from the IGS operational combination" + IGR: "IGS (GPS-only) rapid operational product" + IGF: "IGS final operational orbit product" + IG2: "IGS second reprocessing (repro2) orbit product" + IG3: "IGS repro3 GPS-only orbit product by legacy software" + IGQ: "IGS repro3 multi-GNSS 15-minute orbit product" + +# Orbit combination configs +orbits: + + # Analysis center contributions + ac_contribs: + + # weighted centers: + weighted: + systems: + COD: [G,R,E] + EMR: [G] + ESA: [G,R,E] + GFZ: [G,R,E] + GRG: [G,R,E] + JGX: [G,R,E] + MIT: [G,E] + NGS: [G] + SIO: [G] + prns: + svns: + + # unweighted centers (for comparison only) + unweighted: + systems: + JPL: [G] + ESC: [G,R,E] + IGF: [G] + IGL: [R] + prns: + svns: + + # excluded centers + excluded: + systems: + prns: + svns: + + # sampling rate (integer in seconds) + sampling: 300 + + # center weighting method (global/by_constellation/by_block/by_sat) + cen_wht_method: by_sat + + # satellite weighting method (RMS_L1) + sat_wht_method: RMS_L1 + + # Transform orbits to a same reference frame? + # [Translations,Rotations,Scale] + rf_align: [False,True,False] + + # UT1 differences applied as Z rotations + # The ERP apriori and observed files for any center listed below must be + # presented at rf_rootdir specified above with the long filename formats + # {CEN}{V}{SOL}APR_{YYYYDDDHHMM}_{LEN}_{SMP}_ERP.ERP and + # {CEN}{V}{SOL}OBS_{YYYYDDDHHMM}_{LEN}_{SMP}_ERP.ERP + ut1_rot: [] + + # remove DV maneuvering satellites + rm_dv: True + + # exception for removing DV maneuvering satellites + no_rm_dv: [COD] + + # outlier removal (assess) settings + assess: + + # satellite rms test (auto/manual/strict) + sat_rms_tst: auto + + # satellite rms test for unweighted centers (auto/manual/strict) + # Note: depending on how bad the unweighted centers are, having + # sat_rms_tst_unweighted on could result in long running time + # (e.g. if all the satellite solutions for an unweighted center + # are very bad, which means at least 5 iterations for removing + # the top five sats, assuming max_high_satrms is 5) + #sat_rms_tst_unweighted : auto + + # coefficient for auto satellite rms test + coef_sat: 470.0 + + # threshold for manual satellite rms test for each satellite system (mm) + thresh_sat: + G: 15.0 + R: 25.0 + E: 15.0 + C: 50.0 + J: 100.0 + + # maximum number of outlier satellite rms for a center in order to keep + # being weighted + max_high_satrms: 5 + + # transformation parameters test (auto/manual/strict) + trn_tst: strict + + # manual transformation thresholds for translation (mm), rotation (mas) + # and scale (ppb) + thresh_trn: [null,0.3,null] + + # test for minimum number of centers for each satellite (strict/eased) + numcen_tst: eased + + # minimum number of centers for each satellite + min_numcen: 1 + + # maximum number of iterations for outlier removal + max_iter: 100 + + # SP3 header information + sp3_header: + + # coordinate system + coord_sys: IGS20 + + # information to put into the comments + + # combination type + cmb_type: FINAL + + # clock source + clk_src: cmb + + # antex file + antex: IGS20_2309 + + # ocean tide model + oload: FES2014b + + +# clock combination configs +clocks: + + # Analysis center contributions + ac_contribs: + + # weighted centers: + weighted: + systems: + prns: + svns: + + # unweighted centers (for comparison only) + unweighted: + systems: + prns: + svns: + + # excluded centers + excluded: + systems: + prns: + svns: + diff --git a/inp/config_test_ultra.yaml b/inp/config_test_ultra.yaml new file mode 100644 index 0000000..b200b99 --- /dev/null +++ b/inp/config_test_ultra.yaml @@ -0,0 +1,231 @@ +# Configuration file in YAML format +# +# +# Processing configs +process: + + # verbose mode (INFO/DEBUG) + verbose: INFO + +# campaign specifications +campaign: + + author: 'IGS AC Coordinator' + contact: 'acc@igs.org' + + # solution (ULT/RAP/FIN) + sol_id: ULT + + # Campaign/Project specification + camp_id: TST + + # Combination name/abbreviation (3 characters) + cmb_name: IGS + + # Combination version/solution identifier (0 to 9) + vid : 0 + + # root directory for AC submissions + subm_rootdir: /data/acc_data/ac_subm/ultra + + # root directory for saving the products + prod_rootdir: /data/acc_combi/test/products + + # path to satellite metadata file + sat_metadata_file: /data/acc_data/metadata/igs_satellite_metadata.snx + + # Earth orientation paramaters (EOP) source format + # (IERS_EOP14_C04/IERS_EOP_rapid/IGS_ERP2) + eop_format: IERS_EOP_rapid + + # path to the EOP data + eop_file: /data/acc_data/erp/finals.daily + + # Root directory for reference frame combination summary files + # Needed for reference frame alignment + #rf_rootdir: /data/repro3/sinex + + # Reference frame summary file name identifier + #rf_name: IGS1R03SNX + + # nanu summary file + nanu_sumfile: /data/acc_data/metadata/nanus_sum.2024 + + ac_acronyms: + COD: "Center for Orbit Determination in Europe (CODE)" + EMR: "Natural Resources Canada (NRC)" + ESA: "European Space Agency" + GFZ: "GeoForschungsZentrum Potsdam" + GRG: "Centre National d'Etudes Spatiales (CNES/CLS)" + JGX: "Geospatial Information Authority of Japan (GSI) and the Japan Aerospace Exploration Agency (JAXA)" + JPL: "Jet Propulsion Laboratory" + MIT: "Massachusetts Institute of Technology" + NGS: "NOAA/National Geodetic Survey" + SIO: "Scripps Institution of Oceanography (SIO)" + TUG: "Graz University of Technology" + USN: "The United States Naval Observatory (USNO)" + UXV: "USNO experimental GPS+GLONASS" + WHU: "Wuhan University" + BRD: "IGS Broadcast products" + IGC: "24-hour set of 4-10 hour predictions from the IGS Real-Time Service" + IGU: "IGS (GPS-only) ultra-rapid operational product" + IGV: "IGS (GPS+GLONASS) ultra-rapid experimental product, with GPS sourced from the IGS operational combination" + IGR: "IGS (GPS-only) rapid operational product" + IGF: "IGS final operational orbit product" + IG2: "IGS second reprocessing (repro2) orbit product" + IG3: "IGS repro3 GPS-only orbit product by legacy software" + IGQ: "IGS repro3 multi-GNSS 15-minute orbit product" + +# Orbit combination configs +orbits: + + # Analysis center contributions + ac_contribs: + + # weighted centers: + weighted: + systems: + COD: [G,R,E] + EMR: [G,R] + ESA: [G,R] + GFZ: [G,R,E] + GRG: [E] + WHU: [G,R,E] + prns: + svns: + + # unweighted centers (for comparison only) + unweighted: + systems: + #BRD: [G] + GRG: [G] + JGX: [G,R,E] + SIO: [G] + USN: [G] + #WHU: [E] + IGV: [G,R] + prns: + svns: + + # excluded centers + excluded: + systems: + prns: + svns: + + # sampling rate (integer in seconds) + sampling: 300 + + # center weighting method (global/by_constellation/by_block/by_sat) + cen_wht_method: by_sat + + # satellite weighting method (RMS_L1) + sat_wht_method: RMS_L1 + + # Transform orbits to a same reference frame? + # [Translations,Rotations,Scale] + rf_align: [False,False,False] + + # UT1 differences applied as Z rotations + # The ERP apriori and observed files for any center listed below must be + # presented at rf_rootdir specified above with the long filename formats + # {CEN}{V}{SOL}APR_{YYYYDDDHHMM}_{LEN}_{SMP}_ERP.ERP and + # {CEN}{V}{SOL}OBS_{YYYYDDDHHMM}_{LEN}_{SMP}_ERP.ERP + ut1_rot: [] + + # remove DV maneuvering satellites + rm_dv: True + + # exception for removing DV maneuvering satellites + no_rm_dv: [IGV] + + # outlier removal (assess) settings + assess: + + # satellite rms test (auto/manual/strict) + sat_rms_tst: auto + + # satellite rms test for unweighted centers (auto/manual/strict) + # Note: depending on how bad the unweighted centers are, having + # sat_rms_tst_unweighted on could result in long running time + # (e.g. if all the satellite solutions for an unweighted center + # are very bad, which means at least 5 iterations for removing + # the top five sats, assuming max_high_satrms is 5) + #sat_rms_tst_unweighted : auto + + # coefficient for auto satellite rms test + coef_sat: 470.0 + + # threshold for manual satellite rms test for each satellite system (mm) + thresh_sat: + G: 15.0 + R: 25.0 + E: 15.0 + C: 50.0 + J: 100.0 + + # maximum number of outlier satellite rms for a center in order to keep + # being weighted + max_high_satrms: 5 + + # transformation parameters test (auto/manual/strict) + trn_tst: strict + + # manual transformation thresholds for translation (mm), rotation (mas) + # and scale (ppb) + thresh_trn: [null,0.3,null] + + # test for minimum number of centers for each satellite (strict/eased) + numcen_tst: eased + + # minimum number of centers for each satellite + min_numcen: 1 + + # maximum number of iterations for outlier removal + max_iter: 100 + + # SP3 header information + sp3_header: + + # coordinate system + coord_sys: IGS20 + + # information to put into the comments + + # combination type + cmb_type: ULTRA RAPID + + # clock source + clk_src: COD + + # antex file + antex: IGS20_2290 + + # ocean tide model + oload: FES2014b + + +# clock combination configs +clocks: + + # Analysis center contributions + ac_contribs: + + # weighted centers: + weighted: + systems: + prns: + svns: + + # unweighted centers (for comparison only) + unweighted: + systems: + prns: + svns: + + # excluded centers + excluded: + systems: + prns: + svns: + diff --git a/logging.yaml b/logging.yaml new file mode 100644 index 0000000..5e97c19 --- /dev/null +++ b/logging.yaml @@ -0,0 +1,40 @@ +version: 1 + +disable_existing_loggers: False + +formatters: + Brief: + format: '%(message)s' + Detailed: + format: "%(asctime)s - [%(levelname)s] In %(filename)s at line %(lineno)s:\n%(message)s\n" + Custom: + (): rocs.formatters.MixedFormatter + + +handlers: + console: + class: logging.StreamHandler + level: DEBUG + formatter: Custom + stream: ext://sys.stdout + + info_file_handler: + class: logging.FileHandler + level: DEBUG + formatter: Custom + filename: info.log + encoding: utf8 + mode: w + + error_file_handler: + class: logging.FileHandler + level: ERROR + formatter: Detailed + filename: errors.log + encoding: utf8 + mode: w + +root: + level: INFO + handlers: [error_file_handler, console, info_file_handler] + propagate: no diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..468810a --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +scipy +argparse +datetime +numpy +pyyaml diff --git a/rocs/__init__.py b/rocs/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/rocs/__main__.py b/rocs/__main__.py new file mode 100755 index 0000000..f1d677b --- /dev/null +++ b/rocs/__main__.py @@ -0,0 +1,75 @@ +import argparse +import logging +import time +from rocs.combine_orbits import combine_orbits +from rocs.setup_logging import setup_logging +import rocs.settings as settings + + +def main(): + + # Parse command line arguments + parser = argparse.ArgumentParser(prog='rocs', description = + "Robust combination of orbit solutions", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('gpsweek',metavar='gpsweek',nargs=1, + help='GPS week for processing') + parser.add_argument('dow',metavar='dow',nargs=1, + help='Day of week for processing') + parser.add_argument('hr',metavar='hr',nargs='?', + help='Starting hour for ultr-rapid combination') + parser.add_argument('-c','--config',nargs=1, dest = 'config_yaml', + help = ('YAML file containing configurations for combination ')) + args = parser.parse_args() + + # GPS week, day of week and hour + gpsweek = int(args.gpsweek[0]) + dow = int(args.dow[0]) + if args.hr is not None: + hr = int(args.hr) + else: + hr = 0 + + # Setup the configurations for combination by reading the yaml file + if args.config_yaml is not None: + config_yaml = str(args.config_yaml[0]) + config = settings.Config(config_yaml).config + else: + # if no config file specified, use the defaults + config_yaml = None + config = settings.Config().config + + # verbose mode + verbose = config['process']['verbose'] + allowed_verbose = ['INFO','DEBUG'] + if verbose not in allowed_verbose: + logger.error(f"\nVerbose mode must be one of {allowed_verbose}\n", stack_info=True) + raise ValueError(f"Verbose mode {verbose} not recognized!") + + # Setup the logging using logging.yaml + setup_logging() + logger = logging.getLogger() + logger.setLevel(verbose) + + logger.debug(f"\nthis is debug from main") + logger.info(f"\nthis is info from main") + + # time measurements + pc0 = time.perf_counter() + pt0 = time.process_time() + + # call orbit combination + combine_orbits(gpsweek,dow,hr,config) + + # time measurements + pc1 = time.perf_counter() - pc0 + pt1 = time.process_time() - pt0 + logger.info(f"Performance counter spent: {pc1}") + logger.info(f"Process time spent: {pt1}") + + + +if __name__ == '__main__': + main() + + diff --git a/rocs/checkutils.py b/rocs/checkutils.py new file mode 100755 index 0000000..55040df --- /dev/null +++ b/rocs/checkutils.py @@ -0,0 +1,112 @@ +# Module for checking utilities + +import numpy as np +import numbers +import logging + + +logger = logging.getLogger(__name__) + +def check_coords(coords,ncol=3,minrows=3): + + # Check the input coordinate system + # + # Input: + # coords: supposed to be an m by 3 array-like; where m is the number of + # points, + # and the three columns are for x,y and z. + # ncol: number of column expected (default 3 for x,y,z) + # minrows: minimum number of rows expected (default 3) + + if (np.ndim(coords) != 2): + logger.error("The input array must be m by 3 where m is the " + "number of points, and the columns relate to " + "x,y and z",stack_info=True) + raise ValueError("The input array must be m by 3 where m is the " + "number of points, and the columns relate to " + "x,y and z") + + if (np.shape(coords)[1] != ncol): + logger.error(f"The input array must have exactly {ncol} columns", + stack_info=True) + raise ValueError(f"The input array must have exactly {ncol} columns") + + if (np.shape(coords)[0] < minrows): + logger.error(f"The input array must contain at least {minrows} rows", + stack_info=True) + raise ValueError(f"The input array must contain at least {minrows} " + "rows") + + +def check_scalar(c): + + # Check the input scalar + + # Input: + # c: supposed to be a real scalar + + if np.ndim(c) != 0: + logger.error("The input attribute must be a scalar",stack_info=True) + raise ValueError("The input attribute must be a scalar") + if not isinstance(c,numbers.Number): + logger.error("The input attribute must be a real number", + stack_info=True) + raise TypeError("The input attribute must be a real number") + if isinstance(c,complex): + logger.error("The input attribute cannot be a complex number", + stack_info=True) + raise TypeError("The input attribute cannot be a complex number") + + +def check_array(coords,m,n=None): + + # check the input 1- or 2-d array + # + # Input: + # coords: supposed to be a 1- or 2-d array of real numbers + # if n is specifed: + # m,n: dimensions of coords + # if n is not specified: + # m: length of coords + + # Check the size + if n is None: + if np.shape(coords) != (m,): + logger.error("The input attribute must be an array of length " + + str(m),stack_info=True) + raise ValueError("The input attribute must be an array of length " + + str(m)) + + # Check the type + for item in coords: + if not isinstance(item,numbers.Number): + logger.error("The input attribute can only contain numbers", + stack_info=True) + raise TypeError("The input attribute can only contain numbers") + if isinstance(item,complex): + logger.error("The input attribute cannot contain complex " + "numbers",stack_info=True) + raise TypeError("The input attribute cannot contain complex " + "numbers") + else: + if np.shape(coords) != (m,n): + logger.error("The input attribute must be a " + + str(m) + " by " + str(n) + " array",stack_info=True) + raise ValueError("The input attribute must be a " + + str(m) + " by " + str(n) + " array") + + # Check the type + for row in coords: + for item in row: + if not isinstance(item,numbers.Number): + logger.error("The input attribute can only contain " + "numbers",stack_info=True) + raise TypeError("The input attribute can only contain " + "numbers") + if isinstance(item,complex): + logger.error("The input attribute cannot contain complex " + "numbers",stack_info=True) + raise TypeError("The input attribute cannot contain " + "complex numbers") + + diff --git a/rocs/combine_orbits.py b/rocs/combine_orbits.py new file mode 100755 index 0000000..9ddcdc5 --- /dev/null +++ b/rocs/combine_orbits.py @@ -0,0 +1,780 @@ +# Full orbit combination + +import glob +import argparse +import logging +import os +import sys +import numpy as np +import datetime +import rocs.orbits as orbits +from rocs.gpscal import gpsCal +import rocs.settings as settings +import rocs.io_data as io_data +import rocs.checkutils as checkutils +from rocs.report import OrbitReport + +logger = logging.getLogger(__name__) + +def combine_orbits(gpsweek,dow,hr,config): + + ## common campaign specifications + + # author + author = config['campaign']['author'] + if not isinstance(author,str): + logger.error("\nAuthor must be a string\n",stack_info=True) + raise TypeError(f"{author} not a string") + + # contact + contact = config['campaign']['contact'] + if not isinstance(contact,str): + logger.error("\nContact msust be a string\n",stack_info=True) + raise TypeError(f"{contact} not a string") + + # solution type identifier + sol_id = config['campaign']['sol_id'] + allowed_sol = ['ULT','RAP','FIN'] + if sol_id not in allowed_sol: + logger.error("\nSolution type identifier must be one of" + f"{allowed_sol}\n",stack_info=True) + raise ValueError(f"{sol_id} not in {allowed_sol}") + + if sol_id == 'ULT': + solution = 'ultra-rapid' + elif sol_id == 'RAP': + solution = 'rapid' + elif sol_id == 'FIN': + solution = 'final' + + if solution == 'ultra-rapid': + len_data = '02D' + else: + len_data = '01D' + + # campaign/project specification + camp_id = config['campaign']['camp_id'] + allowed_camp = ['DEM','MGX','OPS','TST'] + if camp_id not in allowed_camp and cam_id[0:1] != 'R': + logger.error("\nCampaign specification must be one of" + f"{allowed_camp} or Rnn for Repro\n") + raise ValueError(f"{camp_id} not recognized!") + + # combination name + cmb_name = config['campaign']['cmb_name'] + if not isinstance(cmb_name,str): + logger.error("\nCombination name abbreviation " + "must be a string\n",stack_info=True) + raise TypeError(f"{cmb_name} is not a string!") + if len(cmb_name) != 3: + logger.error("\nCombination name abbreviation must be a " + "3-character string\n",stack_info=True) + raise ValueError(f"{cmb_name} has length {len(cmb_name)}!") + + # version identifier for the combined orbit + vid = config['campaign']['vid'] + if not isinstance(vid,int): + logger.error("\nCombination version identifier must be an integer\n", + stack_info=True) + raise TypeError(f"Combination version identifier is of type " + f"{type(vid)}!") + if vid < 0 or vid > 9: + logger.error("\nCombination version identifier must be in the " + "range 0-9\n",stack_info=True) + raise ValueError(f"Combination version identifier {vid} is not in the " + f"range 0-9!") + + # information on whether to cut solutions at the start or end + cut_start = config['campaign']['cut_start'] + if not isinstance(cut_start,int): + logger.error("\ncut_start must be an integer\n", + stack_info=True) + raise TypeError(f"{cut_start} is of type {type(cut_start)}!") + + cut_end = config['campaign']['cut_end'] + if not isinstance(cut_end,int): + logger.error("\ncut_end must be an integer\n", + stack_info=True) + raise TypeError(f"{cut_end} is of type {type(cut_end)}!") + + # submissions root directory + subm_rootdir = config['campaign']['subm_rootdir'] + if not isinstance(subm_rootdir,str): + logger.error("\nSubmission root directory must be specified " + "as a string\n",stack_info=True) + raise TypeError(f"Submission root directory {subm_rootdir} is not " + "a string!") + + # products root directory + prod_rootdir = config['campaign']['prod_rootdir'] + if not isinstance(prod_rootdir,str): + logger.error("\nProducts root directory must be specified as " + "a string\n",stack_info=True) + raise TypeError(f"Products root directory {prod_rootdir} is not " + "a string!") + + # satellite metadata file + sat_metadata_file = config['campaign']['sat_metadata_file'] + if sat_metadata_file is not None and not isinstance(sat_metadata_file,str): + logger.error("\nPath to satellite metadata file must be specified as " + "a string\n",stack_info=True) + raise TypeError(f"satellite metadata {sat_metadata_file} is not " + "a string!") + + # EOP format + eop_format = config['campaign']['eop_format'] + allowed_eop_format = ['IERS_EOP14_C04','IERS_EOP_rapid','IGS_ERP2'] + if eop_format is not None and eop_format not in allowed_eop_format: + logger.error(f"\neop_format must be one of {allowed_eop_format}\n", + stack_info=True) + raise ValueError(f"eop_fomat {epo_format} not recognized!") + + # EOP file + eop_file = config['campaign']['eop_file'] + if eop_file is not None: + if not isinstance(eop_file,str): + logger.error("\nPath to EOP data filename " + "must be a string\n",stack_info=True) + raise TypeError(f"{eop_file} is not a string! {type(eop_file)}") + + # reference frame combination summary location + rf_rootdir = config['campaign']['rf_rootdir'] + if rf_rootdir is not None: + if not isinstance(rf_rootdir,str): + logger.error("\nPath to reference frame combination summaries " + "must be a string\n",stack_info=True) + raise TypeError(f"{rf_rootdir} is not a string!") + + # reference frame combination summary filename + rf_name = config['campaign']['rf_name'] + if rf_name is not None: + if not isinstance(rf_name,str): + logger.error("\nReference frame combination summary filename " + "must be a string\n",stack_info=True) + raise TypeError(f"{rf_name} is not a string!") + + # NANU summary file + nanu_sumfile = config['campaign']['nanu_sumfile'] + if nanu_sumfile is not None: + if not isinstance(nanu_sumfile,str): + logger.error("\nNANU summary filename " + "must be a string\n",stack_info=True) + raise TypeError(f"{nanu_sumfile} is not a string!") + + # AC acronyms + ac_acronyms = config['campaign']['ac_acronyms'] + if not isinstance(ac_acronyms,dict): + logger.error("\nac_acronyms must be a dict\n", + stack_info=True) + raise TypeError("ac_acronyms is not of type dict") + for key in ac_acronyms: + if not isinstance(key,str): + logger.error("\nAC acronyms must be " + "of type str\n",stack_info=True) + raise TypeError(f"AC acronym {key} is not a string!") + if not isinstance(ac_acronyms[key],str): + logger.error("\nAC acronym descriptions must be " + "of type str\n",stack_info=True) + raise TypeError(f"AC acronym description {ac_acronyms[key]}" + " is not a string!") + + ## orbit combination options + + # contributions to the orbit combination + ac_contribs_orbs = config['orbits']['ac_contribs'] + if not isinstance(ac_contribs_orbs,dict): + logger.error("\nac_contribs_orbs must be a dict\n", + stack_info=True) + raise TypeError(f"{ac_contribs_orbs} is not of type dict") + allowed_ac_contribs_keys = ['weighted','unweighted','excluded'] + for key in ac_contribs_orbs: + if key not in allowed_ac_contribs_keys: + logger.error("\nThe keys in ac_contribs_orbs must be " + f"one of {allowed_ac_contribs_keys}\n",stack_info=True) + raise ValueError(f"The key {key} not recognized! ") + if not isinstance(ac_contribs_orbs[key],dict): + logger.error("\nThe values in ac_contribs_orbs must be " + "of type dict\n",stack_info=True) + raise TypeError("There are non-dict valuess in " + f"{ac_contribs_orbs} ") + allowed_ac_contribs_keys1 = ['systems','prns','svns'] + for key1 in ac_contribs_orbs[key]: + if key1 not in allowed_ac_contribs_keys1: + logger.error(f"\nThe keys in {ac_contribs_orbs[key]} " + f"must be one of {allowed_ac_contribs_keys1}\n", + stack_info=True) + raise ValueError(f"The key {key1} not recognized! ") + if ac_contribs_orbs[key][key1] is not None: + for key2 in ac_contribs_orbs[key][key1]: + if not isinstance(key2,str): + logger.error("\nCenter names must be of type str", + stack_info=True) + raise TypeError(f"The key {key2} not str!") + if len(key2) != 3: + logger.error("\n Center names in " + f"{ac_contribs_orbs[key][key1]} must be " + "3-character strings\n",stack_info=True) + raise ValueError(f"Center name {key2} not 3 " + "characters!") + if not isinstance(ac_contribs_orbs[key][key1][key2],list): + logger.error(f"\nCenter name contributions " + "must be of type list\n",stack_info=True) + raise TypeError(f"{ac_contribs_orbs[key][key1][key2]}" + " is not a list!") + for item in ac_contribs_orbs[key][key1][key2]: + if not isinstance(item,str): + logger.error("\nConstellation codes must be " + " of type str\n",stack_info=True) + raise TypeError(f"{item} in " + f"{ac_contribs_orbs[key][key1][key2]}" + " is not a string!") + if len(item) != 1: + logger.error("\nConstellation codes must be " + " 1-character strings\n" + ,stack_info=True) + raise ValueError(f"{item} in " + f"{ac_contribs_orbs[key][key1][key2]}" + " is not a 3-character string") + + # orbit sampling + orbit_sampling = config['orbits']['sampling'] + if orbit_sampling is not None and not isinstance(orbit_sampling,int): + logger.error("\nOrbit sampling must be an integer\n", stack_info=True) + raise TypeError(f"Orbit sampling {orbit_sampling} is not an integer!") + + # center weighting method + # In case of weighting by constellation/block/sat, check if metadata file is + # specified; if not revert to global weighting and issue a warning + if (config['orbits']['cen_wht_method'] in + ['by_constellation','by_block','by_sat'] + and sat_metadata_file is None): + logger.warning(f"\nCenter weighting {cen_wht_method} is requested but " + f"there is no satellite metadata file\nspecified. " + f"Satellite metadata is required to identify satellite " + f"blocks.\nSetting the centre weighting method to the " + f"default global method.\n") + config['orbits']['cen_wht_method'] = 'global' + + cen_wht_method = config['orbits']['cen_wht_method'] + allowed_cen_wht_method = ['global','by_constellation','by_block','by_sat'] + if cen_wht_method not in allowed_cen_wht_method: + logger.error("\nCenter weighting method must be one of " + f"{allowed_cen_wht_method}\n", stack_info=True) + raise ValueError(f"Center weighting method {cen_wht_method} is not in" + f"{allowed_cen_wht_method}") + + # satellite weighting method + sat_wht_method = config['orbits']['sat_wht_method'] + if sat_wht_method not in ['RMS_L1']: + logger.error("\nSatellite weighting method can only be RMS_L1\n", + stack_info=True) + raise ValueError(f"Satellite weighting method {sat_wht_method} is not " + f"recognised!") + + # reference frame alignment options + rf_align = config['orbits']['rf_align'] + if not isinstance(rf_align,list): + logger.error("\nrf_align must be a list\n",stack_info=True) + raise TypeError(f"{rf_align} is not a list!") + for item in rf_align: + if not isinstance(item,bool): + logger.error("\nrf_align items must be booleans\n",stack_info=True) + raise TypeError(f"{item} is not a boolean!") + + # list of centres for which UT1 differences should be applied + # as corrections to Z rotations + ut1_rot = config['orbits']['ut1_rot'] + if ut1_rot is not None: + if not isinstance(ut1_rot,list): + logger.error("\nut1_rot must be a list\n ",stack_info=True) + raise TypeError(f"{ut1_rot} is not a list!") + for item in ut1_rot: + if not isinstance(item,str): + logger.error("\nCenter names in ut1_rot must be strings\n", + stack_info=True) + raise TypeError(f"{item} not a string!") + if len(item) != 3: + logger.error("\nCenter names in ut1_rot must be 3-character " + "strings\n",stack_info=True) + raise ValueError(f"Center name {item} not a 3-character " + "string!") + + # EOP format for the ut1 correction centers + ut1_eop_format = config['orbits']['ut1_eop_format'] + allowed_eop_format = ['IERS_EOP14_C04','IERS_EOP_rapid','IGS_ERP2'] + if ut1_eop_format not in allowed_eop_format: + logger.error(f"\nut1_eop_format must be one of {allowed_eop_format}\n", + stack_info=True) + raise ValueError(f"eop_format {epo_format} not recognized!") + + # maneuvering satellites options + rm_dv = config['orbits']['rm_dv'] + if not isinstance(rm_dv,bool): + logger.error("\nrm_dv must be boolean\n",stack_info=True) + raise TypeError(f"{rm_dv} is not of type boolean!") + + no_rm_dv = config['orbits']['no_rm_dv'] + if no_rm_dv is not None: + if not isinstance(no_rm_dv,list): + logger.error("\nno_rm_dv must be a list\n ",stack_info=True) + raise TypeError(f"{no_rm_dv} is not a list!") + for item in no_rm_dv: + if not isinstance(item,str): + logger.error("\nCenter names in no_rm_dv must be strings\n", + stack_info=True) + raise TypeError(f"{item} not a string!") + if len(item) != 3: + logger.error("\nCenter names in no_rm_dv must be 3-character " + "strings\n",stack_info=True) + raise ValueError(f"Center name {item} not a 3-character " + "string!") + + # outlier detection (assess) options + allowed_sat_rms_tst = ['auto','manual','strict'] + sat_rms_tst = config['orbits']['assess']['sat_rms_tst'] + if (sat_rms_tst is not None + and sat_rms_tst not in allowed_sat_rms_tst): + logger.error("\nsat_rms_tst must be one of " + f"{allowed_sat_rms_tst}\n", stack_info=True) + raise ValueError(f"The given sat_rms_tst {sat_rms_tst} is not in " + f"{allowed_sat_rms_tst}") + + sat_rms_tst_unweighted = (config['orbits']['assess'] + ['sat_rms_tst_unweighted']) + if (sat_rms_tst_unweighted is not None + and sat_rms_tst_unweighted not in allowed_sat_rms_tst): + logger.error("\nsat_rms_tst_unweighted must be one of " + f"{allowed_sat_rms_tst}\n", stack_info=True) + raise ValueError("The given sat_rms_tst_unweighted " + f"{sat_rms_tst_unweighted} is not in " + f"{allowed_sat_rms_tst}") + + coef_sat = config['orbits']['assess']['coef_sat'] + checkutils.check_scalar(coef_sat) + + thresh_sat = config['orbits']['assess']['thresh_sat'] + if sat_rms_tst == 'manual' and thresh_sat is None: + logger.error("\nthresh_sat must be specified when sat_rms_tst is " + "manual\n", stack_info=True) + raise ValueError("sat_rms_tst is manual but thresh_sat not " + "specified!") + if thresh_sat is not None: + if not isinstance(thresh_sat,dict): + logger.error("\nThe given thresh_sat must be a dict\n", + stack_info=True) + raise TypeError("The given thresh_sat is not of type dict") + for key in thresh_sat: + if not isinstance(key,str): + logger.error("\nThe keys in the given thresh_sat must be " + "of type str\n",stack_info=True) + raise TypeError("There are non-str keys in the given " + "thresh_sat") + checkutils.check_scalar(thresh_sat[key]) + + max_high_satrms = config['orbits']['assess']['max_high_satrms'] + if max_high_satrms is not None: + if not isinstance(max_high_satrms,int): + logger.error("\nThe given max_high_satrms must be an " + "integer\n", stack_info=True) + raise TypeError("The given max_high_satrms is not of type int") + + trn_tst = config['orbits']['assess']['trn_tst'] + if trn_tst is not None: + allowed_trn_tst = ['auto','manual','strict'] + if trn_tst not in allowed_trn_tst: + logger.error(f"\ntrn_tst must be one of {allowed_trn_tst}\n", + stack_info=True) + raise ValueError("The given trn_tst is not in " + "{allowed_trn_tst}") + + thresh_trn = config['orbits']['assess']['thresh_trn'] + if not isinstance(thresh_trn,list): + logger.error("\nthresh_trn must be a list of three items for " + "translation, rotation and scale thresholds\n", + stack_info=True) + raise TypeError("The given thresh_trn is not of type list") + if len(thresh_trn) != 3: + logger.error("\nthresh_trn must be a list of three items for " + "translation, rotation and scale thresholds\n", + stack_info=True) + raise TypeError(f"The given thresh_trn is of length " + f"{len(thresh_trn)}") + for item in thresh_trn: + if item is not None: + checkutils.check_scalar(item) + + numcen_tst = config['orbits']['assess']['numcen_tst'] + if numcen_tst is not None: + allowed_numcen_tst = ['strict','eased'] + if numcen_tst not in allowed_numcen_tst: + logger.error("\nnumcen_tst must be one of " + f"{allowed_numcen_tst}\n", stack_info=True) + raise ValueError("The given numcen_tst is not in " + "{allowed_numcen_tst}") + + min_numcen = config['orbits']['assess']['min_numcen'] + if min_numcen is not None: + if not isinstance(min_numcen,int): + logger.error("\nThe given min_numcen must be an " + "integer\n", stack_info=True) + raise TypeError("The given min_numcen is not of type int") + + max_iter = config['orbits']['assess']['max_iter'] + if not isinstance(max_iter,int): + logger.error("\nmax_iter must be an integer\n", stack_info=True) + raise TypeError(f"max_iter {max_iter} is not an integer!") + if max_iter < 1: + logger.error("\nmax_iter must be a positive number\n", stack_info=True) + raise TypeError(f"max_iter {max_iter} is not a positive number!") + + # SP3 header information + sp3_header = config['orbits']['sp3_header'] + if not isinstance(sp3_header,dict): + logger.error("\nSP3 header information must be specified " + "as a dictionary\n") + raise TypeError(f"SP3 header {sp3_header} is not of type dict!") + allowed_sp3_header_keys = ['coord_sys','cmb_type','clk_src','antex','oload'] + for key in sp3_header: + if key not in allowed_sp3_header_keys: + logger.error("\nSP3 header must be one of " + f"{allowed_sp3_header_keys}\n",stack_info=True) + raise ValueError(f"SP3 header item {key} not recognized!") + if not isinstance(sp3_header[key],str): + loogger.error("\nSP3 header items must be string\n",stack_info=True) + raise TypeError(f"SP3 hedaer {key} : {sp3_header[key]} not a " + "string!") + + # print out the command line + command = " ".join(sys.argv) + logger.info("\nStarted the combination program\nCommand line:\n" + f"{command}\n") + + if solution == 'ultra-rapid': + dowhr_line = f"day of week {dow}, hour {str(hr).zfill(2)}.\n" + else: + dowhr_line = f"day of week {dow}.\n" + + logger.info("\nStarted the orbit combination for GPS week " + f"{str(gpsweek).zfill(4)} {dowhr_line}\n" + f"Configuration file\n" + f"Solution: {solution}\nSampling interval for orbit " + f"combination (seconds): {orbit_sampling}\nRoot directory " + f"for submissions: {subm_rootdir}\nRoot directory for " + f"combination products: {prod_rootdir}\nSatellite metadata " + f"file: {sat_metadata_file}\nCenter weighting method: " + f"{cen_wht_method}\nSatellite weighting method: " + f"{sat_wht_method}\n") + + logger.debug(f"\nconfig {config}\n") + + # Determine some directories + subm_weekdir = subm_rootdir + '/w' + str(gpsweek).zfill(4) + prod_weekdir = prod_rootdir + '/w' + str(gpsweek).zfill(4) + rf_weekdir = rf_rootdir + '/w' + str(gpsweek).zfill(4) + + # Make directories if they do not exist + if (not os.path.isdir(prod_rootdir)): + os.mkdir(prod_rootdir) + if (not os.path.isdir(prod_weekdir)): + os.mkdir(prod_weekdir) + + # satellite metadata file + if sat_metadata_file is not None: + sat_metadata = io_data.SatelliteMetadata(sat_metadata_file) + + # Read the nanu summary file if required + if rm_dv: + nanu = io_data.NANU_sum(nanu_sumfile) + nanu.get_dv(solution) + + # Determine year and day of year + gc = gpsCal() + gc.set_wwww_dow(gpsweek,dow) + year = gc.yyyy() + doy = gc.ddd() + month = gc.MM() + dom = gc.dom() + start_epoch = (datetime.datetime(year,month,dom,hr,0,0) + + datetime.timedelta(seconds=cut_start)) + if solution != 'ultra-rapid': + end_epoch = (datetime.datetime(year,month,dom,23,59,59) + - datetime.timedelta(seconds=cut_end)) + else: + end_epoch = (start_epoch + datetime.timedelta(days=2) + - datetime.timedelta(seconds=1) + - datetime.timedelta(seconds=cut_end)) + + # Look into the submission directory for available submissions + sp3_subm_all = glob.glob(subm_weekdir+'/???????'+sol_id+'_'+str(year) + +str(doy).zfill(3)+str(hr).zfill(2)+'00_'+len_data + +'_???_ORB.SP3') + + # Get the list of SP3 files to be read + # If a configuration file is used, use all the weighted and unweighted + # centers (for at least one satellite); otherwise, use all available + # orbit solutions + if ac_contribs_orbs is not None: + + # set of contributing centers + contributing_acs = set() + + weighted = ac_contribs_orbs['weighted'] + for item in weighted: + if weighted[item] is not None: + for acname in weighted[item]: + contributing_acs.add(acname) + unweighted = ac_contribs_orbs['unweighted'] + for item in unweighted: + if unweighted[item] is not None: + for acname in unweighted[item]: + contributing_acs.add(acname) + if contributing_acs: + sp3_subm_list = [] + for sp3_subm in sp3_subm_all: + acname = sp3_subm[-38:-35] + if acname in contributing_acs: + sp3_subm_list.append(sp3_subm) + else: + logger.error(f"There is no center to be used (weighted or " + f"unweighted) in the config file ") + raise ValueError(f"contributing_acs is empty") + else: + logger.info(f"There is no configuration file given.\nUsing all " + f"available products.\n") + sp3_subm_list = sp3_subm_all + + sp3_subm_list.sort() + + logger.info(f"Orbit files used for orbit combination:\n") + for item in sp3_subm_list: + logger.info(f"{item}") + logger.info("") + + # Read the orbit sp3 files + sp3_dict = {} + for sp3_subm in sp3_subm_list: + acname = sp3_subm[-38:-35] + sp3_ac = io_data.sp3(sp3_subm) + sp3_ac.parse(start_epoch,end_epoch) + sp3_dict[acname] = sp3_ac.sp3dict + + ## Preprocessing of the orbits + + # Initialize the class instance + if ac_contribs_orbs is None: + if sat_metadata_file is None: + orbs = orbits.OrbitPrep(sp3all=sp3_dict) + else: + orbs = orbits.OrbitPrep(sp3all=sp3_dict,sat_metadata=sat_metadata) + else: + if sat_metadata_file is None: + orbs = orbits.OrbitPrep(sp3all=sp3_dict, + ac_contribs=ac_contribs_orbs) + else: + orbs = orbits.OrbitPrep(sp3all=sp3_dict, + ac_contribs=ac_contribs_orbs,sat_metadata=sat_metadata) + orbs.filter_contribs() + + # Resample if requested + if orbit_sampling is not None: + orbs.resample(orbit_sampling) + + # Remove DV maneuvering satellites if needed + if rm_dv: + orbs.rm_dv(nanu.dv,no_rm_dv) + ind = np.where(((nanu.dvfull[:,0]>=start_epoch) & + (nanu.dvfull[:,0]<=end_epoch)) | + ((nanu.dvfull[:,1]>=start_epoch) & + (nanu.dvfull[:,1]<=end_epoch))) + dvsats = nanu.dvfull[ind] + dvsats_new = [] + for row in dvsats: + sys_id = row[2][0:1] + prn = int(row[2][1:]) + ep = row[0] + svn_no = sat_metadata.get_svn(sys_id,prn,ep) + svn = sys_id + str(svn_no).zfill(3) + dvsats_new.append([row[0],row[1],row[2],svn]) + dvsats = dvsats_new + + # Convert orbit dictionaries to arrays + orbs.to_arrays() + + # Check if there is any orbit solution to be included + if not orbs.orbits: + if ac_contribs_orbs is not None: + logger.error(f"\nNo orbit solution is included as weighted or " + f"unweighted.\nCheck the config file " + f"and the orbit directory {subm_weekdir}\n", + stack_info=True) + raise ValueError(f"orbits is empty") + else: + logger.error(f"\nNo orbit solution is included as weighted or " + f"unweighted.\nCheck the orbit directory " + f"{subm_weekdir}\n",stack_info=True) + raise ValueError(f"orbits is empty") + + logger.debug(f"epochs:\n{orbs.epochs} {np.shape(orbs.epochs)}\n") + logger.debug(f"orbits:\n{orbs.orbits} {np.shape(orbs.orbits)}\n") + logger.debug(f"satinfo:\n{orbs.satinfo} {np.shape(orbs.satinfo)}\n") + + ## Orbit combination + + # Initialize the class instance + # For ultra-rapid, read clocks so an AC clock (clk_src) can be reported + # along with the combined orbits + if solution == 'ultra-rapid': + orbcmb = orbits.OrbitComb(orbits=orbs.orbits,epochs=orbs.epochs, + satinfo=orbs.satinfo,cenflags=orbs.cenflags, + weighted_cens_by_sys=orbs.weighted_cens_by_sys, + unweighted_cens_by_sys=orbs.unweighted_cens_by_sys, + weighted_sats=orbs.weighted_sats, + unweighted_sats=orbs.unweighted_sats, + clocks=orbs.clocks) + else: + orbcmb = orbits.OrbitComb(orbits=orbs.orbits,epochs=orbs.epochs, + satinfo=orbs.satinfo,cenflags=orbs.cenflags, + weighted_cens_by_sys=orbs.weighted_cens_by_sys, + unweighted_cens_by_sys=orbs.unweighted_cens_by_sys, + weighted_sats=orbs.weighted_sats, + unweighted_sats=orbs.unweighted_sats) + + logger.debug(f"orbits original: {orbcmb.orbits}") + + # If reference frame alignment is requested, read the rf combination + # summary yaml files to get the transformation parameters, and transform + # the orbits + transformations = {} + if any(rf_align): + + # First day of the week + gc = gpsCal() + gc.set_wwww_dow(gpsweek,0) + year_firstdow = gc.yyyy() + doy_firstdow = gc.ddd() + + # reference frame combination summary file + rf_summary = (rf_weekdir + '/' + rf_name + '_' + + str(year_firstdow).zfill(4) + + str(doy_firstdow).zfill(3) + '0000_07D_07D_SUM.YML') + ref_sum = io_data.Ref_sum(rf_summary) + ref_sum.transfo(rf_align=rf_align) + + # If the UT1 rotation is requested for any center, read the apriori + # and the observed ERP files for that center, and apply the Z rotation + if ut1_rot is not None: + + logger.debug(f"ut1_rot: {ut1_rot}") + logger.debug("transformations before ut1 correction:\n" + f"{ref_sum.transformations}") + + for acname in ut1_rot: + + erp_aprfile = glob.glob(rf_rootdir+'/'+acname.upper() + +'????'+'APR_???????????_???_???_ERP.ERP') + erp_obsfile = glob.glob(rf_rootdir+'/'+acname.upper() + +'????'+'OBS_???????????_???_???_ERP.ERP') + logger.debug(f"erp_aprfile: {erp_aprfile}") + if len(erp_aprfile) > 1: + raise ValueError(f"\nThere must be only one erp " + f"apriori file for a center. The " + f"files found:\n{erp_aprfile}") + if len(erp_obsfile) > 1: + raise ValueError(f"\nThere must be only one erp " + f"observed file for a center. The " + f"files found:\n{erp_obsfile}") + + ref_sum.ut1_rot(acname,erp_aprfile,erp_obsfile,ut1_eop_format) + + logger.debug("transformations after ut1 correction:\n" + f"{ref_sum.transformations}") + + transformations = ref_sum.transformations + + orbcmb.transform(transformations[dow]) + + logger.debug(f"transformations: {transformations[dow]}") + logger.debug(f"orbits transformed: {orbcmb.orbits}") + + # Set redo to True so the combination runs the first time + redo = True + + iter = 0 + + # Loop until no outlier is detected or maximum iteration exceeded + while redo and iter <= max_iter: + + iter += 1 + + # Perform the weighting + orbcmb.weight(cen_wht_method=cen_wht_method, + sat_wht_method=sat_wht_method) + + logger.debug(f"centre weights: {orbcmb.cen_weights}") + + # Perform the combination + orbcmb.combine() + + logger.debug(f"\ncentre RMS's: {orbcmb.cen_rms}\n") + logger.debug(f"\ncentre abdev's: {orbcmb.cen_abdev}\n") + + logger.debug(f"config {config}\n") + logger.debug(f"thresh_sat: {thresh_sat}") + orbcmb.assess(sat_rms_tst=sat_rms_tst, + sat_rms_tst_unweighted=sat_rms_tst_unweighted, + coef_sat=coef_sat,thresh_sat=thresh_sat, + max_high_satrms=max_high_satrms,trn_tst=trn_tst, + thresh_trn=thresh_trn,numcen_tst=numcen_tst, + min_numcen=min_numcen) + + orbcmb.flags() + + redo = orbcmb.rejection + + logger.info(f"redo: {redo}") + + # Convert the combined orbit to a sp3 dictionary + orbcmb.to_sp3dict(sample_rate=orbit_sampling,sp3_header=sp3_header) + + # Write the sp3 dictionary into a sp3 file + start_epoch = orbcmb.sp3_combined['data']['epochs'][0] + start_year = start_epoch.year + start_month = start_epoch.month + start_day = start_epoch.day + start_hour = start_epoch.hour + start_minute = start_epoch.minute + start_second = start_epoch.second + start_gc = gpsCal() + start_gc.set_yyyy_MM_dd_hh_mm_ss(start_year,start_month,start_day, + start_hour,start_minute,start_second) + start_doy = start_gc.ddd() + orb_smp = int(orbit_sampling/60) + smp = str(orb_smp).zfill(2) + 'M' + cmb_sp3_filename = (cmb_name + str(vid) + camp_id + sol_id + '_' + + str(start_year).zfill(4) + str(start_doy).zfill(3) + + str(start_hour).zfill(2) + str(start_minute).zfill(2) + + '_' + len_data + '_' + smp + '_' + 'ORB' + '.SP3') + + orb = io_data.sp3(sp3file=prod_weekdir+'/'+cmb_sp3_filename, + sp3dict=orbcmb.sp3_combined) + orb.write() + + + # Write out the summary file + orbrep = OrbitReport(orbcmb,sp3_subm_list,cmb_sp3_filename,prod_rootdir, + cmb_name,vid,camp_id,sol_id,author,contact,ac_acronyms, + rm_dv,dvsats,rf_align,transformations,sat_metadata_file) + + orbrep.eclipse(eop_file,eop_format) + orbrep.summary() + + if solution == 'ultra-rapid': + dowhr_line = f"day of week {dow}, hour {hr}.\n" + else: + dowhr_line = f"day of week {dow}.\n" + + logger.info(f"\nFinished the combination for GPS week " + f"{str(gpsweek).zfill(4)} {dowhr_line}\n" + f"The combined orbit is written into:\n" + f"{prod_weekdir}/{cmb_sp3_filename}\n") + diff --git a/rocs/coordinates.py b/rocs/coordinates.py new file mode 100755 index 0000000..098ebf9 --- /dev/null +++ b/rocs/coordinates.py @@ -0,0 +1,195 @@ +# Coordinate transformations module + +import numpy as np +import datetime +import numbers +import rocs.checkutils as checkutils +from rocs.rotation import Rotation +from rocs.iau import IAU + + +def diag_block_mat_boolindex(L): + """ + create a block-diagonal matrix from any number of same-size matrices + """ + shp = L[0].shape + mask = np.kron(np.eye(len(L)), np.ones(shp))==1 + out = np.zeros(np.asarray(shp)*len(L),dtype=float) + out[mask] = np.concatenate(L).ravel() + return out + + + +class Rectangular: + + """ + Class for a rectangular (Cartesian) reference frame + + """ + + def __init__(self,coords,ref_frame,time_utc): + + """ + Initialize Rectangular class + + Keyword arguments: + coords [list or numpy array] : 3-column array/list where the + columns represent the x,y,z coordinates + in the desired cartesian reference + frame, and the rows are for different + points in the reference frame + ref_frame [str] : the reference frame for the given coordinates + time_utc [datetime or list/array of datetimes] : UTC time(s) + corresponding to + the coordinates + + Updates: + self.coords [numpy array] + self.ref_frame [str] + self.time_utc [array of datetimes] + + """ + + # Check the given arguments and set the attributes + checkutils.check_coords(coords,ncol=3,minrows=1) + c_arr = np.array(coords) + checkutils.check_array(c_arr,np.shape(c_arr)[0],np.shape(c_arr)[1]) + self.coords = c_arr + + if not isinstance(ref_frame,str): + raise TypeError("The input ref_frame needs to be a string") + allowed_ref_frames = ['ECI','ECEF'] + if ref_frame not in allowed_ref_frames: + raise TypeError(f"The given reference frame {ref_frame} not " + f"recognized!\nAllowed reference frames: " + f"{allowed_ref_frames}") + self.ref_frame = ref_frame + + if not isinstance(time_utc,(list,np.ndarray,datetime.datetime)): + raise TypeError("The given time_utc needs to be either a datetime " + "object or a list/array of datetime objects") + if not all(isinstance(item,datetime.datetime) + for item in np.atleast_1d(time_utc)): + raise TypeError("There are non-datetime items in time_utc") + if np.shape(np.atleast_1d(time_utc)) == (1,): + self.time_utc = np.full( + len(self.coords),np.atleast_1d(time_utc)[0]) + else: + if np.shape(time_utc) != (len(self.coords),): + raise ValueError("The given time_utc must be either a datetime" + " object or a 1d array/list with the same " + "length as the given coords.") + self.time_utc = np.atleast_1d(time_utc) + + + def ToECEF(self,iau_model='IAU2006/2000A',transformation_method='CIO', + evaluation_method='series',ut1_utc=None,xp=None,yp=None): + + """ + convert the coordinates to Earth-Centered Earth-Fixed (ECEF) system + + Keyword arguments: + iau_model [str] : the IAU model for transformation: + IAU76/80/82/94 + IAU2000A + IAU2006/2000A + transformation_method [str] : the transformation method: + CIO + equinox + evaluation_method [str] : the evaluation method: + classical_angles + series + ut1_utc [scalar or list/array] : UT1-UTC in seconds corresponding + to the time_utc attribute of the + class object + xp [scalar or list/array] : polar x motion in radians + corresponding to the time_utc + attribute of the class object + yp [scalar or list/array] : polar y motion in radians + corresponding to the time_utc + attribute of the class object + + Updates: + self.iau_model [str] + self.transformation_method [str] + self.evaluation_method [str] + self.coords [numpy array] + self.ref_frame [str] + + """ + + # Check the given arguments and set the attributes + allowed_iau_models = (['IAU76/80/82/94','IAU2000A','IAU2006/2000A']) + if iau_model not in allowed_iau_models: + raise ValueError(f"The given IAU model {iau_model} not " + f"recognized!\nAllowed IAU models: " + f"{allowed_iau_models}") + + # Get the original reference frame + ref_frame_orig = self.ref_frame + + if self.ref_frame == 'ECI': + + # Initialize an IAU class for calculations + iau = IAU(self.time_utc) + + if iau_model == 'IAU76/80/82/94': + transformation_method == 'equinox' + evaluation_method == 'classical_angles' + + # we need xp,yp and ut1_utc + if (ut1_utc is None or xp is None or yp is None): + raise ValueError("ut1_utc, xp and yp must be given for " + "ECI to ECEF conversion") + + if not isinstance(ut1_utc,(list,np.ndarray,numbers.Number)): + raise TypeError("The given ut1_utc needs to be either a " + "number or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(ut1_utc)): + raise TypeError("There are non-number items in ut1_utc") + if np.shape(np.atleast_1d(ut1_utc)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and ut1_utc " + f"{np.shape(np.atleast_1d(ut1_utc))}") + self.ut1_utc = np.atleast_1d(ut1_utc) + + if not isinstance(xp,(list,np.ndarray,numbers.Number)): + raise TypeError("The given xp needs to be either a " + "number or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(xp)): + raise TypeError("There are non-number items in xp") + if np.shape(np.atleast_1d(xp)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and xp " + f"{np.shape(np.atleast_1d(xp))}") + self.xp = np.atleast_1d(xp) + + if not isinstance(yp,(list,np.ndarray,numbers.Number)): + raise TypeError("The given yp needs to be either a " + "number or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(yp)): + raise TypeError("There are non-number items in yp") + if np.shape(np.atleast_1d(yp)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and yp " + f"{np.shape(np.atleast_1d(yp))}") + self.yp = np.atleast_1d(yp) + + # call celestial_to_terrestrial to get the rotation matrix + iau.celestial_to_terrestrial(precession_model='iau1976', + nutation_model='iau1980',gast_model='iau1994', + ut1_utc=self.ut1_utc,xp=self.xp,yp=self.yp) + + # Convert the coordinates from ECI to ECEF + c2t = diag_block_mat_boolindex(tuple(iau.c2t.values())) + coords_c = np.reshape(self.coords,(3*len(self.coords),)) + coords_t = np.matmul(c2t,coords_c) + self.coords = np.reshape(coords_t,(len(self.coords),3)) + self.ref_frame = 'ECEF' + + else: + raise ValueError(f"The conversion from {ref_frame_orig} to ECEF " + "is not implemented!") diff --git a/rocs/eclipse.py b/rocs/eclipse.py new file mode 100755 index 0000000..f9a0bea --- /dev/null +++ b/rocs/eclipse.py @@ -0,0 +1,291 @@ +# Satellite eclipse calculations modeule + +import numpy as np +import numbers +import datetime +import rocs.checkutils as checkutils + + +class Eclipse: + + """ + Class for eclipsing satellite caclulations + + """ + + def __init__(self,r_sat,r_sun,eclipsing_body,radius_eclbody, + r_eclbody=None): + + """ + Initialize Eclipse class + + Keyword arguments: + r_sat [list or numpy array] : 3-column array/list where the columns + represent the x,y,z coordinates of + the satellite in an ECEF frame + r_sun [list or numpy array] : 3-column array/list where the columns + represent the x,y,z coordinates of + the Sun in an ECEF frame + eclipsing_body [str] : name of the eclipsing body + (Earth, Moon, etc.) + radius_eclbody [number] : radius of the eclipsing body in meters + r_eclbody [list or numpy array] : 3-column array/list where the + columns represent the x,y,z + coordinates of the eclipsing body in + an ECEF frame. If the eclipsing body + is Earth, not required as it will be + zeros + + Updates: + self.r_sat [numpy array] + self.r_sun [numpy array] + self.eclipsing_body [str] + self.r_eclbody [numpy array] + self.l [numpy array] : fraction of solar disk seen by satellite + l = 1.0 : no eclipse + l = 0.0 : full eclipse + 0.0 < l < 1.0 : partial eclipse + """ + + # Check the given arguments and set the attributes + checkutils.check_coords(r_sat,ncol=3,minrows=1) + r_sat = np.array(r_sat) + self.r_sat = r_sat + + if np.shape(np.array(r_sun)) != np.shape(np.array(r_sat)): + raise ValueError("r_sat and r_sun must be the same shape!") + r_sun = np.array(r_sun) + self.r_sun = r_sun + + if not isinstance(eclipsing_body,str): + raise TypeError("The given eclipsing_body must be of string type!") + if eclipsing_body == 'Earth' or eclipsing_body == 'earth': + r_eclbody = np.zeros_like(r_sun) + else: + if r_eclbody is None: + raise ValueError("For an eclipsing body other than the Earth, " + "r_eclbody must be given!") + elif np.shape(np.array(r_eclbody)) != np.shape(np.array(r_sat)): + raise ValueError("r_eclbody must be the same shape as r_sat " + "and r_sun!") + r_eclbody = np.array(r_eclbody) + self.r_eclbody = r_eclbody + + if not isinstance(radius_eclbody,numbers.Number): + raise TypeError("The given radius_eclbody must be a number") + self.radius_eclbody = radius_eclbody + + # radii of the Sun + radius_sun = 696340000 + + # Perform preliminary calculations + + # vector of eclipsing body -> Sun + r_eclbody_sun = r_sun - r_eclbody + + # vector of Sun -> satellite + r_sun_sat = r_sat - r_sun + + # distance between the eclipsing body and the Sun + d_eclbody_sun = np.linalg.norm(r_eclbody_sun,axis=1) + + # distance between the satellite and the Sun + d_sun_sat = np.linalg.norm(r_sun_sat,axis=1) + + # vector of eclsiping body -> satellite + r_eclbody_sat = r_sat - r_eclbody + + # unit vector of Sun -> satellite + u_sun_sat = r_sun_sat/d_sun_sat[:,None] + + # projection of eclipsing_body --> satellite vector onto the + # Sun --> satellite vector (dot product of the two vectors) + proj = np.sum(r_eclbody_sat*u_sun_sat,axis=1) + + # cross product of the eclipsing_body --> satellite vector and the + # Sun --> satellite unit vector + cr = np.cross(r_eclbody_sat,u_sun_sat) + + # apparent seperation of the center of the Sun and the eclipsing body + sep = np.linalg.norm(cr,axis=1)/proj + + # apparent radii of the Sun and the eclipsing body as seen from + # the satellite + r_sun_apparent = radius_sun/d_sun_sat + r_eclbody_apparent = radius_eclbody/proj + + # Calculate lambda, the fraction of the Sun disk visible from the + # satellite + + # Set lambda to 1.0 initially (i.e. no eclipse) + l = np.ones(len(r_sat)) + + # Go through where eclipse is possible to check if there is an eclipse + # impossible eclipse; lambda = 0 + no_eclipse = (d_sun_sat <= d_eclbody_sun) + + # possible eclipse + possible_eclipse = (r_sun_apparent + r_eclbody_apparent) > sep + possible_eclipse = np.logical_and(possible_eclipse,~no_eclipse) + + # full eclipse; lambda remains 1 + full_eclipse = (r_eclbody_apparent + r_sun_apparent) >= sep + full_eclipse = np.logical_and(full_eclipse,possible_eclipse) + ind = np.where(full_eclipse) + l[ind] = 0.0 + + #partial eclipse; lambda needs to be calculated + partial_eclipse = np.logical_and(possible_eclipse,~full_eclipse) + + # eclipsing body lies in the Sun's disk + ecl_lies_in_sun = (r_sun_apparent - r_eclbody_apparent) >= sep + ecl_lies_in_sun = np.logical_and(ecl_lies_in_sun,partial_eclipse) + ind = np.where(ecl_lies_in_sun) + l[ind] = ((r_sun_apparent[ind]**2-r_eclbody_apparent[ind]**2) + /r_sun_apparent[ind]**2) + + # Otherwise, eclipsing body and the Sun make an intersection + intersecting_disks = np.logical_and(partial_eclipse,~ecl_lies_in_sun) + ind_insc = np.where(intersecting_disks) + + # r of smaller disk and larger disk + r_small_disk = np.array([min(l1, l2) for l1, l2 in + zip(r_sun_apparent,r_eclbody_apparent)]) + r_large_disk = np.array([max(l1, l2) for l1, l2 in + zip(r_sun_apparent,r_eclbody_apparent)]) + + # one disk much larger than the other one + much_larger_disk = r_large_disk/r_small_disk > 5.0 + much_larger_disk = np.logical_and(much_larger_disk,intersecting_disks) + close_disk_size = np.logical_and(~much_larger_disk,intersecting_disks) + + # half of the angle subtended in the smaller disk by arc of + # intersection + phi = np.full_like(r_small_disk,np.nan) + phi[ind_insc] = np.arccos( + (r_small_disk[ind_insc]**2 + sep[ind_insc]**2 + - r_large_disk[ind_insc]**2) + /(2.0*r_small_disk[ind_insc]*sep[ind_insc])) + + area1 = (np.pi-phi)*r_small_disk**2 + area2 = np.full_like(area1,np.nan) + area3 = np.full_like(area1,np.nan) + hgt = np.full_like(area1,np.nan) + theta = np.full_like(area1,np.nan) + + ind = np.where(much_larger_disk) + hgt[ind] = np.sqrt( + r_small_disk[ind]**2-(sep[ind]-r_large_disk[ind])**2) + area2[ind] = hgt[ind]*(sep[ind]-r_large_disk[ind]) + area3[ind] = 0.0 + + ind = np.where(close_disk_size) + hgt[ind] = r_small_disk[ind]*np.sin(phi[ind]) + theta[ind] = np.arcsin(hgt[ind]/r_large_disk[ind]) + area2[ind] = sep[ind]*hgt[ind] + area3[ind] = theta[ind]*r_large_disk[ind]**2 + + # area of non-overlapped portion of the small disc + area = area1 + area2 + area3 + + # redfine area1 and area2 based on which disk is the smaller one + area1 = np.pi*r_sun_apparent**2 + + # eclipsing body is the smaller disk + ecl_smaller = r_sun_apparent > r_eclbody_apparent + ecl_smaller = np.logical_and(ecl_smaller,intersecting_disks) + ind = np.where(ecl_smaller) + area2[ind] = np.pi*r_eclbody_apparent[ind]**2 + l[ind] = (area1[ind]+area[ind]-area2[ind])/area1[ind] + + # Sun is the smaller disk + sun_smaller = r_sun_apparent <= r_eclbody_apparent + sun_smaller = np.logical_and(sun_smaller,intersecting_disks) + ind = np.where(sun_smaller) + l[ind] = area[ind]/area1[ind] + + self.l = l + + + def get_ecl_times(self,time): + + """ + Get the eclipse times in afrom-to format + + Keyword arguments: + time [list or numpy array] : array/list of datetime objects + corresponding to the positions given + in initialization + + Updates: + self.eclipsing [str] : flag for showing if the given satellite + experiences any eclipsing by the given body + 'full' : experiences full eclipse + 'partial' : experiences only partial elipse + (but not full eclipse) + 'none' : does not experience any eclipse + self.ecl_times [numpy array] : 3-column array containing eclipse + times where columns represent: + [time_from,time_to,eclipse_type] + """ + + # Check the given time + if not isinstance(time,(list,np.ndarray,datetime.datetime)): + raise TypeError("The given time needs to be either a datetime " + "object or a list/array of datetime objects") + if not all(isinstance(item,datetime.datetime) + for item in np.atleast_1d(time)): + raise TypeError("There are non-datetime items in time") + if np.shape(time) != (len(self.r_sat),): + raise ValueError("The given time must be either a datetime" + " object or a 1d array/list with the same " + "length as the given r_sat.") + self.time = np.atleast_1d(time) + + # time resolution + differences = ([abs(t2 - t1).seconds for t1,t2 in + zip(self.time[:-1], self.time[1:])]) + tres = max(differences) + + ecl_times = [] + eclipsing = 'none' + + # Look for full eclipses + ind = np.where(self.l==0) + time_eclipse = self.time[ind] + if time_eclipse.size: + eclipsing = 'full' + time_from = time_eclipse[0] + for c,t in enumerate(time_eclipse): + if (c tres): + time_to = t + ecl_times.append([time_from,time_to,'full']) + time_from = next_time + time_to = t + ecl_times.append([time_from,time_to,'full']) + + # Look for partial eclipses + ind = np.where((self.l>0) & (self.l<1)) + time_eclipse = self.time[ind] + if time_eclipse.size: + if eclipsing == 'none': + eclipsing = 'partial' + time_from = time_eclipse[0] + for c,t in enumerate(time_eclipse): + if (c tres): + time_to = t + ecl_times.append([time_from,time_to,'partial']) + time_from = next_time + time_to = t + ecl_times.append([time_from,time_to,'partial']) + + self.ecl_times = ecl_times + self.eclipsing = eclipsing + + + + diff --git a/rocs/formatters.py b/rocs/formatters.py new file mode 100755 index 0000000..ed26bfd --- /dev/null +++ b/rocs/formatters.py @@ -0,0 +1,26 @@ +# Custom formatters for logging + +import logging + +class MixedFormatter(logging.Formatter): + + def format(self, record): + if record.levelno is logging.WARNING: + if 'in function' not in record.msg: # avoid adding duplicates to + # the record.msg + record.msg = "[%s] In %s in function %s:\n %s\n" % ( + record.levelname, record.filename, + record.funcName, record.msg) + elif record.levelno in (logging.DEBUG, + logging.ERROR, + logging.CRITICAL): + if 'at line' not in record.msg: # avoid adding duplicates to the + # record.msg + record.msg = "[%s] In %s at line %s :\n %s\n" % ( + record.levelname, record.filename, + record.lineno, record.msg) + + return super(MixedFormatter , self).format(record) + + + diff --git a/rocs/gpscal.py b/rocs/gpscal.py new file mode 100755 index 0000000..0f660a8 --- /dev/null +++ b/rocs/gpscal.py @@ -0,0 +1,359 @@ +# GPS time calculations + +import numpy as np +import datetime as dt +import math + +class gpsCal: + + # First GPS epoch + # GPS week 0000 => 1980, 01 , 06 + gpsE0 = dt.datetime(int(1980),int(1),int(6)) + oned = dt.timedelta(days=1) + + + def __init__(self): + self.dto = dt.datetime.utcnow() + + + def dto(self): + return self.dto + + + def calendar(self): + #Sun Aug 20.08.2017 1963.0 57985 232 + format = "%a %b %d %H:%M:%S %Y %j" + for i in range(0,8): + s = self.dto.strftime(format) + s = s + " "+str(self.wwww())+" "+str(self.dow()) + s = s + " "+str(self.mjd()) + print(s) + self.dto = self.dto + self.oned + + + def dec_day(self,ndays=1): + """ + dec_day(ndays=1) + decrement the time by ndays (default is 1) + """ + self.dto = self.dto - (self.oned * ndays) + + + def yyyy(self): + return int(self.dto.strftime("%Y")) + + + def yy(self): + yy = int(str(self.yyyy())[2:4]) + return yy + + + def ddd(self): + return int(self.dto.strftime("%j")) + + + def MM(self): + return int(self.dto.strftime("%m")) + + + def dom(self): + return int(self.dto.strftime("%d")) + + + def hh(self): + """ + Return the hour (int) + """ + return int(self.dto.strftime("%H")) + + + def mm(self): + """ + Return the minutes (int) + """ + return int(self.dto.strftime("%M")) + + + def ss(self): + """ + Return the seconds (int) + """ + return int(self.dto.strftime("%S")) + + + def ms(self): + """ + Return the miliseconds (int) + """ + return int(self.dto.strftime("%f")) + + + def wwww(self): + """ + Workout the GPS week + """ + diff = self.dto - self.gpsE0 + diff_days = diff.days + week = int(diff_days/7.) + return week + + + def dow(self): + """ + Workout the GPS day-of-week + """ + diff = self.dto - self.gpsE0 + diff_days = diff.days + week = int(diff_days/7.) + dow = int(diff_days - (week * 7.)) + return dow + + + def sow(self): + """ + Workout the seconds-of-week + """ + diff = self.dto - self.gpsE0 + diff_days = diff.days + week = int(diff_days/7.) + dow = int(diff_days - (week * 7.)) + sod = self.hh()*3600 + self.mm()*60 + self.ss() + self.ms()/1000 + sow = (dow*86400)+sod + return sow + + + def jd(self): + """ + Convert Gregorian Calendar date to Julian Date based on + the US Navy's Astronomical Equation + Script adapted from: + https://stackoverflow.com/a/52431241 + Only valid for 1801 to 2099 + """ + + year = self.yyyy() + month = self.MM() + day = self.dom() + hour = self.hh() + minute = self.mm() + second = self.ss() + + jd = 367*year - int((7 * (year + int((month + 9) / 12.0))) / 4.0) + int( + (275 * month) / 9.0) + day + 1721013.5 + ( + hour + minute / 60.0 + second / math.pow(60,2)) / 24.0 - 0.5 * math.copysign( + 1, 100 * year + month - 190002.5) + 0.5 + return jd + + + def mjd(self): + return self.jd() - 2400000.5 + + + def mdt(self): + """ + mdt Take a date time object and convert it into a matplotlib date + All matplotlib date plotting is done by converting date instances into + days since the 0001-01-01 UTC + + Usage: mp_ts = mdt() + + Input: dto - a datetime object + + Output: 'mp_ts' (float) + a matplot lib time stamp which is days from 0001-01-01 + + """ + + mp_epoch = dt.datetime(1, 1, 1) + DAY = 86400 + td = self.dto - mp_epoch + mp_ts = td.days + 1 + (1000000 * td.seconds + td.microseconds) / 1e6 / DAY + return mp_ts + + + def tai(self): + """ + Calculate TAI (which does not implement leap seconds) + """ + leap_dates = np.array([ + dt.datetime(1972,1,1), + dt.datetime(1972,7,1), + dt.datetime(1973,1,1), + dt.datetime(1974,1,1), + dt.datetime(1975,1,1), + dt.datetime(1976,1,1), + dt.datetime(1977,1,1), + dt.datetime(1978,1,1), + dt.datetime(1979,1,1), + dt.datetime(1980,1,1), + dt.datetime(1981,7,1), + dt.datetime(1982,7,1), + dt.datetime(1983,7,1), + dt.datetime(1985,7,1), + dt.datetime(1988,1,1), + dt.datetime(1990,1,1), + dt.datetime(1991,1,1), + dt.datetime(1992,7,1), + dt.datetime(1993,7,1), + dt.datetime(1994,7,1), + dt.datetime(1996,1,1), + dt.datetime(1997,7,1), + dt.datetime(1999,1,1), + dt.datetime(2006,1,1), + dt.datetime(2009,1,1), + dt.datetime(2012,7,1), + dt.datetime(2015,7,1), + dt.datetime(2017,1,1), + ]) + ind = np.where(leap_dates < self.dto) + leap_seconds = len(ind[0]) + 9 + tai = self.dto + dt.timedelta(0,leap_seconds) + return tai + + + def ut1(self,ut1_utc): + """ + Calculate UT1 + ut1_utc: UT1-UTC in seconds + """ + ut1 = self.dto + dt.timedelta(0,ut1_utc) + return ut1 + + + def gpstime(self): + """ + Calculate GPS time + """ + return self.tai() - dt.timedelta(0,19) + + + def set_mdt(self,mdt): + """ + Set the time using the matplotlib date time stamp + """ + mp_epoch = dt.datetime(1,1,1) + stamp = mp_epoch + dt.timedelta(days=(int(mdt)-1)) + self.dto = stamp + return self + + + def set_yyyy_ddd(self,yyyy,ddd): + """ + Set the time using the format YYYY DDD + """ + dto = dt.datetime(int(yyyy),1,1,0,0,0,0) + dto = dto + dt.timedelta(days=(int(ddd) - 1)) + self.dto = dto + return self + + + def set_yyyy_ddd_sod(self,yyyy,ddd,sod): + """ + Set the time using the format YYYY DDD SOD + ddd: day of year + sod: seconds of day + """ + dto = dt.datetime(int(yyyy),1,1,0,0,0,0) + dto = dto + dt.timedelta(days=(int(ddd) - 1 + sod/86400.0)) + self.dto = dto + return self + + + def yy2yyyy(self,yy): + """ + Convert YY to YYYY + """ + if yy >= 80 and yy < 100: + yyyy = yy + 1900 + elif yy >= 0 and yy < 80: + yyyy = yy + 2000 + else: + print("Error value should be between 0 and 99") + return yyyy + + + def set_yy_ddd(self,yy,ddd): + """ + Set the time using the format YY DDD + """ + yyyy = self.yy2yyyy(yy) + dto = dt.datetime(int(yyyy),1,1,0,0,0,0) + dto = dto + dt.timedelta(days=(int(ddd) - 1)) + self.dto = dto + + + def set_yyyy_MM_dd_hh_mm_ss(self,yyyy,MM,dd,hh,mm,ss): + """ + Set the time using the format YY DDD + """ + dto = dt.datetime(int(yyyy),int(MM),int(dd),int(hh),int(mm),int(ss),0) + self.dto = dto + + + def set_wwww(self,wwww): + """ + set the time to the GPS week + """ + self.dto = ( (wwww * 7) * self.oned ) + self.gpsE0 + + + def set_wwww_dow(self,wwww,dow): + """ + set the time to the GPS week + """ + self.dto = ( (wwww * 7) * self.oned ) + self.gpsE0 + (dow*self.oned) + + + def yyyy_MM_dd_mm_ss_ms(self): + """ + yyyy_MM_dd_mm_ss_ms(dto) + Return the values needed to form a valid from string, + from a date time object + + """ + rtn = [] + rtn.append( self.yyyy() ) + rtn.append( self.MM() ) + rtn.append( self.dom() ) + rtn.append( self.hh() ) + rtn.append( self.mm() ) + rtn.append( self.ss() ) + rtn.append( self.ms() ) + return rtn + +#========================= + +if __name__ == "__main__": + + gc = gpsCal() + gc.calendar() + print("yy;",gc.yy()) + print("ms;",gc.ms()) + gc.set_yyyy_ddd(2017,1) + print("yy",gc.yy()) + print("ddd",gc.ddd()) + gc.set_yy_ddd(17,151) + print("yyyy",gc.yyyy()) + print("ddd",gc.ddd()) + print("yyyy_MM_dd_mm_ss_ms:",gc.yyyy_MM_dd_mm_ss_ms()) + gc.set_wwww(1963) + print("yyyy",gc.yyyy()) + print("ddd",gc.ddd()) + print("yyyy_MM_dd_mm_ss_ms:",gc.yyyy_MM_dd_mm_ss_ms()) + gc.dec_day(3) + print("yyyy",gc.yyyy()) + print("ddd",gc.ddd()) + print("yyyy_MM_dd_mm_ss_ms:",gc.yyyy_MM_dd_mm_ss_ms()) + print("====================================================================") + print("yyyy_MM_dd_mm_ss_ms:",gc.yyyy_MM_dd_mm_ss_ms()) + print("mdt",gc.mdt()) + mdt = gc.mdt() + print("gc.set_mdt(mdt)") + gc.set_mdt(mdt) + print("yyyy_MM_dd_mm_ss_ms:",gc.yyyy_MM_dd_mm_ss_ms()) + print("====================================================================") + print("Using a mdt of 736550.0 ") + gc.set_mdt(736550.0) + print("mdt",gc.mdt()) + print("yyyy_MM_dd_mm_ss_ms:",gc.yyyy_MM_dd_mm_ss_ms()) + print("====================================================================") diff --git a/rocs/helmert.py b/rocs/helmert.py new file mode 100755 index 0000000..193413d --- /dev/null +++ b/rocs/helmert.py @@ -0,0 +1,1340 @@ +# 7-parameter similarity Helmert transformation + +import numpy as np +from scipy import optimize +import warnings +import logging +import time +import rocs.checkutils as checkutils +from rocs.rotation import Rotation + +logger = logging.getLogger(__name__) + +# Toggle between a version compatible with old software and the new version +old_version = True + +class Helmert: + + + def __init__(self,helmert=None,sighelmert=None,coords0=None,coords1=None, + sigmas0=None,sigmas1=None,satinfo=None,orbflags=None, + weighted_center=True,acname=None): + + # Helmert transformation vector: + #self.helmert = [Tx,Ty,Tz,theta1,theta2,theta3,s] + + if helmert is None: + + # Default helmert = [0,0,0,0,0,0,1] + self.helmert = np.zeros(7) + self.helmert[6] = 1.0 + + else: + + # Check the given helmert + checkutils.check_array(helmert,7) + + # After the above check, set Helmert parameters + self.helmert = np.array(helmert) + + if sighelmert is None: + + # Default sighelmert = [1,1,1,1,1,1,1] + self.sighelmert = np.ones_like(self.helmert) + + else: + + # Check the given sighelmert + checkutils.check_array(sighelmert,7) + + # After the above check, set sigma of Helmert parameters + self.sighelmert = np.array(sighelmert) + + # coords0 and coords1 are the two coordinate systems + if coords0 is None and coords1 is None: # both coords0 and coords1 None + + # Default both to 3*3 zeros + self.coords0 = np.zeros((3,3)) + self.coords1 = np.zeros((3,3)) + + elif coords1 is None: # coords0 defined but coords1 None + + # Default coords1 to zeros of the same size as coords0 + checkutils.check_coords(coords0) + self.coords0 = np.array(coords0) + self.coords1 = np.zeros_like(coords0) + + elif coords0 is None: # coords0 None but coords1 defined + + # Default coords0 to zeros of the same size as coords1 + checkutils.check_coords(coords1) + self.coords1 = np.array(coords1) + self.coords0 = np.zeros_like(coords1) + + else: # both coords0 and coords1 defined + + checkutils.check_coords(coords0) + checkutils.check_coords(coords1) + + # Also, check if coords0 and coords1 have the same dimensions + if (np.shape(coords0) != np.shape(coords1)): + logger.error("The two input coordinates must of the same " + "size",stack_info=True) + raise ValueError("The two input coordinates must of the same " + "size") + + self.coords0 = np.array(coords0) + self.coords1 = np.array(coords1) + + # sigmas0 is an array containing the standard deviations of the coords0 + if sigmas0 is None: + + self.sigmas0 = np.ones_like(self.coords0) + + else: + + # check the size + checkutils.check_coords(sigmas0) + if (np.shape(sigmas0) != np.shape(coords0)): + logger.error("The input coordinate sigmas must be the same" + "size as the input coordinates",stack_info=True) + raise ValueError("The input coordinate sigmas must be the same" + "size as the input coordinates") + self.sigmas0 = np.array(sigmas0) + + # sigmas1 is an array containing the standard deviations of the coords1 + if sigmas1 is None: + + self.sigmas1 = np.ones_like(self.coords1) + + else: + + # check the size + checkutils.check_coords(sigmas1) + if (np.shape(sigmas1) != np.shape(coords1)): + logger.error("The input coordinate sigmas must be the same" + "size as the input coordinates",stack_info=True) + raise ValueError("The input coordinate sigmas must be the same" + "size as the input coordinates") + self.sigmas1 = np.array(sigmas1) + + if satinfo is not None: + + # Check the given satinfo + if np.shape(satinfo) != (np.shape(self.coords0)[0],4): + logger.error(f"\nThe given satinfo must be a " + f"{np.shape(self.coords0)[0]} by 4 array\n" + f"Shape of the given satinfo: " + f"{np.shape(satinfo)}\n", + stack_info=True) + raise ValueError(f"The input satinfo must be a " + f"{np.shape(self.coords0)[0]} by 4 array") + + for row in satinfo: + + if not isinstance(row[0],str): + logger.error(f"\nThe first column of satinfo " + f"(constellation ID) must be strings\n", + stack_info=True) + raise TypeError(f"The first column of satinfo " + f"(constellation ID) must be strings") + + if not isinstance(row[1],int): + logger.error(f"\nThe second column of satinfo (PRN " + f"number) must be integers\n", + stack_info=True) + raise TypeError(f"The second column of satinfo (PRN " + f"number) must be integers") + + if not isinstance(row[2],int) and not np.isnan(row[2]): + logger.error(f"\nThe third column of satinfo (SVN number) " + f"must be integers or nans\n", + stack_info=True) + raise TypeError(f"The third column of satinfo (SVN number " + f") must be integers or nans") + + if not isinstance(row[3],str): + logger.error(f"\nThe fourth column of satinfo " + f"(satellite block) must be strings\n", + stack_info=True) + raise TypeError(f"The fourth column of satinfo " + f"(satellite block) must be strings") + + if any(np.isnan(svn) for svn in satinfo[:,2]): + logger.warning("\nThere are unknown SVN numbers in satinfo.\n") + + + # After the above checks, set satinfo + self.satinfo = np.array(satinfo) + + if orbflags is not None: + + # check the given orbflags + if np.shape(orbflags) != np.shape(self.coords0): + logger.error(f"\nThe given orbflags must be the same shape as " + f"the given coordinates", stack_info=True) + raise ValueError(f"The input orbflags must be a " + f"{np.shape(self.coords0)[0]} by 3 array") + allowed_flags = (['okay','missing_val_other','excluded_sat_other', + 'missing_sys_other','missing_blk_other','missing_sat_other', + 'missing_val','excluded_sat','missing_sys','missing_blk', + 'missing_sat','excluded_sat_all','unweighted_sys','unweighted_sat']) + for row in orbflags: + for flag in row: + if flag not in allowed_flags: + logger.error("\nFlags in the orbflags can only be " + f"one of the following:\n " + f"{allowed_flags} \n", stack_info=True) + raise ValueError(f"Flag {flag} not recognized!") + else: + + # If not specified, set a default orbflags of 'okay' for all data + orbflags = np.full_like(coords0,'okay',dtype=object) + + # After the above checks, set orbflags attribute + self.orbflags = orbflags + + # Check the given weighted_center flag + if not isinstance(weighted_center,bool): + logger.error("\nThe given weighted_center must be a True/False " + "boolean flag\n", stack_info=True) + raise TypeError("weighted_center is not of type bool!") + + # After the above check, set weighted_center attribute + self.weighted_center = weighted_center + + # filtered versions for the calculations + self.coords0_flt = self.coords0 + self.coords1_flt = self.coords1 + self.sigmas0_flt = self.sigmas0 + self.sigmas1_flt = self.sigmas1 + if hasattr(self,'satinfo'): + self.satinfo_flt = self.satinfo + self.acname = acname + + + def printInfo(self): + + # print out the Helmert parameters + + logger.info("") + logger.info("Helmert transformation parameters: ") + logger.info(f"Tx : {self.helmert[0]} +- {self.sighelmert[0]}") + logger.info(f"Ty : {self.helmert[1]} +- {self.sighelmert[1]}") + logger.info(f"Tz : {self.helmert[2]} +- {self.sighelmert[2]}") + logger.info(f"theta1: {self.helmert[3]} +- {self.sighelmert[3]}") + logger.info(f"theta2: {self.helmert[4]} +- {self.sighelmert[4]}") + logger.info(f"theta3: {self.helmert[5]} +- {self.sighelmert[5]}") + logger.info(f"scale : {self.helmert[6]} +- {self.sighelmert[6]}") + logger.info("") + logger.info("First coordinate system coords0: ") + logger.info(self.coords0) + logger.info(np.shape(self.coords0)) + logger.info("") + logger.info("Second coordinate system coords1: ") + logger.info(self.coords1) + logger.info(np.shape(self.coords1)) + logger.info("") + logger.info("sigmas0: ") + logger.info(self.sigmas0) + logger.info("") + logger.info("sigmas1: ") + logger.info(self.sigmas1) + logger.info("") + + + def transform(self): + + # Forward transform the coordinate system coords0 to coords1 + + # Parameters of the transformation + Tx = self.helmert[0] + Ty = self.helmert[1] + Tz = self.helmert[2] + theta1 = self.helmert[3] + theta2 = self.helmert[4] + theta3 = self.helmert[5] + scale = self.helmert[6] + + # Rotation matrix R = R3.R2.R1 + R1 = Rotation(theta1,1).rot + R2 = Rotation(theta2,2).rot + R3 = Rotation(theta3,3).rot + R = np.matmul(R3,np.matmul(R2,R1)) + + # Translation vector + T = np.array([Tx,Ty,Tz]) + + # Transform the coordinates: X1 = s.R.(X0+T) + self.coords1 = np.transpose( + scale*np.matmul(R,np.transpose(self.coords0+T))) + + # Error propagation to determine the sigmas on coords1 + # sigmas1^2 = s^2.R^2.sigmas0^2 + ST = np.sqrt(np.transpose( + scale**2*np.matmul(np.power(R,2),np.transpose(self.sigmas0)))) + + + def Jacobian(self): + + # Create the Jacobian matrix containing derivatives of + # X1 = scale*R*(X0+T) with respect to the Helmert parameters + # Another implementation would be X1 = scale*R*X0 + T + # However, the first implementation, which is chosen here, + # avoids having ones in Jacobian matrix which further avoids + # getting zeros in the double-exponential function derivative (which + # has sign function in it; sum of a set of signs could yield zero). + # Also, create the weight matrix + + # Parameters of the transformation + Tx = self.helmert[0] + Ty = self.helmert[1] + Tz = self.helmert[2] + theta1 = self.helmert[3] + theta2 = self.helmert[4] + theta3 = self.helmert[5] + scale = self.helmert[6] + + T = np.array([Tx,Ty,Tz]) + + # Rotation matrix R = R3.R2.R1 + R1 = Rotation(theta1,1).rot + R2 = Rotation(theta2,2).rot + R3 = Rotation(theta3,3).rot + R = np.matmul(R3,np.matmul(R2,R1)) + + # derivative of rotation matrix with respect to the three rotations + dR1dtheta1 = Rotation(theta1,1).drot + dR2dtheta2 = Rotation(theta2,2).drot + dR3dtheta3 = Rotation(theta3,3).drot + + # dRdtheta1 = R3.R2.dR1dtheta1 + # dRdtheta2 = R3.dR2dtheta2.R1 + # dRdtheta3 = R3.R2.dR1dtheta1 + dRdtheta1 = np.matmul(R3,np.matmul(R2,dR1dtheta1)) + dRdtheta2 = np.matmul(R3,np.matmul(dR2dtheta2,R1)) + dRdtheta3 = np.matmul(dR3dtheta3,np.matmul(R2,R1)) + + # get the number of points m + m = np.shape(self.coords0_flt)[0] + + # Initialize the Jacobian matrix A + A = np.zeros((3*m,7)) + + # Initialize the weight matrix W + # To preserve memory, only store the diagonal elements. Non-diagonal + # elements are zero, thus not needed to be stored + W = np.zeros((3*m)) + + # Derivative wrt translations + # dX1dT = scale*R + dX1dT = scale*R + dX1dT = np.tile(dX1dT,[m,1]) + + # Derivative wrt rotations + dX1dtheta1 = np.array(np.transpose(scale*np.matmul( + dRdtheta1,np.transpose(self.coords0_flt+T))).flatten()) + dX1dtheta2 = np.array(np.transpose(scale*np.matmul( + dRdtheta2,np.transpose(self.coords0_flt+T))).flatten()) + dX1dtheta3 = np.array(np.transpose(scale*np.matmul( + dRdtheta3,np.transpose(self.coords0_flt+T))).flatten()) + + # Derivative wrt scale + dX1dscale = np.array(np.transpose(np.matmul( + R,np.transpose(self.coords0_flt+T))).flatten()) + + # Jacobian matrix A + A = np.transpose(np.vstack( + (dX1dT[:,0],dX1dT[:,1],dX1dT[:,2],dX1dtheta1, + dX1dtheta2,dX1dtheta3,dX1dscale))) + + # Weight matrix W + W = (1.0/(self.sigmas1_flt**2)).flatten() + + self.A = A + self.W = W + + + def l2norm(self,dx_threshold=1e-8,maxiter=1): + + # Use L2 norm (least-squares) to estimate Helmert parameters + # between the two coordinate systems coords0 and coords1. The current + # self.helmert parameters will be used as initial values, + # iterations of least-squares will be performed, and the + # estimated corrections will be applied; so, self.helmert will + # be updated. Also in the end of this function, self.minim_funcs() + # is called so the rms, as well as abdev and the robust functions + # for l1 norm minimization are calculated/updated. + + # Check the given attributes + checkutils.check_scalar(dx_threshold) + checkutils.check_scalar(maxiter) + if not isinstance(maxiter,int): + logger.error("The given value for maxiter must be an integer", + stack_info=True) + raise TypeError("The given value for maxiter must be an integer") + + # Find 'okay' data; I think we can change this to all 'okay', + # 'missing_val_other' and 'missing_sat_other' for all centers, + # weighted or unweighted + okay_rows = np.where( + (self.orbflags=='okay').all(axis=1) & + (~np.isnan(self.coords0)).all(axis=1) & + (~np.isnan(self.coords1)).all(axis=1))[0] + if self.weighted_center is False: + okay_rows = np.where( + (self.orbflags!='missing_val').all(axis=1) & + (self.orbflags!='missing_sys').all(axis=1) & + (self.orbflags!='missing_blk').all(axis=1) & + (self.orbflags!='missing_sat').all(axis=1) & + (self.orbflags!='excluded_sat_all').all(axis=1) & + (~np.isnan(self.coords0)).all(axis=1) & + (~np.isnan(self.coords1)).all(axis=1))[0] + + # Only perform l2norm if there is at least 1 'okay' data + if len(okay_rows > 0): + + # Exclude if sat/epoch data is missing from any weighted center + # (only 'okay' data is used) + self.coords0_flt = self.coords0[okay_rows,:] + self.coords1_flt = self.coords1[okay_rows,:] + self.sigmas0_flt = self.sigmas0[okay_rows,:] + self.sigmas1_flt = self.sigmas1[okay_rows,:] + if hasattr(self,'satinfo'): + self.satinfo_flt = self.satinfo[okay_rows,:] + + iter = 0 + deltaXcap = 9999.0*np.ones(len(self.helmert)) + + # Reiterate until the deltaXcap threshold reaches or maximum + # number of iterations have been performed + while ( any(item >= dx_threshold for item in deltaXcap) and + iter < maxiter): + + # Determine the Jacobian matrix A + self.Jacobian() + + # get the number of points m + m = np.shape(self.coords0_flt)[0] + + # Create the observations vector l, which is the flattened + # coords1_flt array + l = self.coords1_flt.flatten() + + # Create the computational observations vector (lc = A*params) + lc = np.matmul(self.A,self.helmert) + + # Calculate the normal matrix N = A'WA + N = np.matmul(np.multiply(np.transpose(self.A),self.W),self.A) + + # Calculate C = A'W(l-lc) + C = np.matmul(np.multiply(np.transpose(self.A),self.W),l-lc) + + # Determine the estimated adjustments to the parameters + # deltaXcap = inv(N).C = inv(A'WA).A'W(l-lc) + deltaXcap = np.matmul(np.linalg.inv(N),C) + + # Update self.helmert + self.helmert = self.helmert + deltaXcap + + # Calculate the residuals + vcap = l-np.matmul(self.A,self.helmert) + self.residuals = vcap.reshape(self.coords1_flt.shape) + + # Calculate rms = sqrt(vcap'vcap/df) where df = No. of + # observations minus No. of parameters + rms = np.sqrt(np.matmul(np.transpose(vcap),vcap) + /(len(vcap)-len(self.helmert))) + + # Sigma of the Helmert parameters sigXcap = rms.sqrt(inv(N)) + # We are only interested in the diagonal elements for sigmas of + # the estimated Helmert parameters. + sigXcap = np.zeros(len(self.helmert)) + for i in range(len(sigXcap)): + sigXcap[i] = rms * np.sqrt(np.linalg.inv(N)[i,i]) + self.sighelmert = sigXcap + + iter += 1 + self.vcap = vcap + + logger.debug(f"Number of iterations for L2 norm solution: {iter}") + logger.debug(f"L2 norm solution:\nhelmert: {self.helmert}") + + else: + logger.warning("There are no 'okay' data; skipping l2 norm") + + + def minim_funcs(self): + + # Calculate functions for the minimization problem. This includes l2 + # norm minimization (rms) as well as l1 norm (robust) functions. + # + # For the l1 norm, define the set of robust maximum likelihood + # functions (M-estimates) to be solved. + # + # The (local) M-estimate to be minimized is in general: + # + # N-1 N-1 + # --- --- + # \ yi - y(xi|a) \ yi - y(xi|a) + # | rho(-------------) = | rho(z) , z = ------------- + # / sigi / sigi + # --- --- + # i=0 i=0 + # + # The above function needs to be minimized over a (i.e. the set of + # k elements of vector a that minimize the above function) + # rho is the negative logarithm of the probablility density of + # the distribution + # + # if we define the derivative of rho(z) as psi(z): + # + # psi(z) = drho(z)/dz + # + # the above minimization can be written as the following set of + # M equations: + # + # N-1 + # --- + # \ 1 yi - y(xi) dy(xi|a) + # 0 = | ----*psi(------------)*(----------) , k = 0,...,M-1 + # / sigi sigi dak + # --- + # i=0 + # + # Therefore, the minimization problem turns into a root finding + # problem. + # + # The psi function acts as a weighting function for the individual + # data points. + # Depending on the assumed distribution of errors, we can use + # different psi functions (normal distribution will be l2 norm: + # more deviated points get more weights: + # rho(z) = (1/2)z^2 ; psi(z) = z) + # + # Here, we use a double (two-sided) exponential function: + # rho(z) = |z| ; psi(z) = sign(z) + # all points get the same weights + # + # + # So our choice of robust functions are: + # + # N-1 + # --- + # \ 1 yi - y(xi) dy(xi|a) + # F = | ----*sign(------------)*(----------) , k = 0,...,6 + # / sigi sigi dak + # --- + # i=0 + # + # The minimization algorithm is based on: + # Press WH, Teukolsky SA, Vetterling WT, Flannery BP (2007) Numerical + # recipes 3rd edition: the art of scientifc computing. Cambridge + # University Press, Cambridge + # The same reference contains other psi functions that could be used + # + # We also calculate some other minimization functions. + # + # The list of functions calculated: + # Fexp : robust function to be rooted for maximum likelihood + # estimate using a 2-sided exponential distribution of errors + # abdev : absolute deviation + # rms : root mean square error + + # Find 'okay' rows + okay_rows = np.where( + (self.orbflags != 'excluded_sat_other').all(axis=1) & + (self.orbflags != 'missing_sys_other').all(axis=1) & + (self.orbflags != 'missing_blk_other').all(axis=1) & + (self.orbflags != 'missing_sat_other').all(axis=1) & + (self.orbflags != 'unweighted_sat').all(axis=1) & + (self.orbflags != 'excluded_sat').all(axis=1) & + (~np.isnan(self.coords0)).all(axis=1) & + (~np.isnan(self.coords1)).all(axis=1))[0] + logger.debug(f"orbflags {self.orbflags}") + logger.debug(f"okay_rows {okay_rows} {len(okay_rows)}") + logger.debug(f"coords0 {np.shape(self.coords0)}") + self.coords0_flt = self.coords0[okay_rows,:] + self.coords1_flt = self.coords1[okay_rows,:] + self.sigmas0_flt = self.sigmas0[okay_rows,:] + self.sigmas1_flt = self.sigmas1[okay_rows,:] + logger.debug(f"coords0 {self.coords0} {np.shape(self.coords0)}") + logger.debug(f"coords0_flt {self.coords0_flt} " + f"{np.shape(self.coords0_flt)}") + + # 'fine' rows are more relaxed than 'okay' rows + fine_rows = np.where( + (self.orbflags!='missing_val').all(axis=1) & + (self.orbflags!='missing_sys').all(axis=1) & + (self.orbflags!='missing_blk').all(axis=1) & + (self.orbflags!='missing_sat').all(axis=1) & + (self.orbflags!='excluded_sat_all').all(axis=1) & + (~np.isnan(self.coords0)).all(axis=1) & + (~np.isnan(self.coords1)).all(axis=1))[0] + + coords0_fine = self.coords0[fine_rows,:] + coords1_fine = self.coords1[fine_rows,:] + sigmas0_fine = self.sigmas0[fine_rows,:] + sigmas1_fine = self.sigmas1[fine_rows,:] + orbflags_fine = self.orbflags[fine_rows,:] + if hasattr(self,'satinfo'): + satinfo_fine = self.satinfo[fine_rows,:] + logger.debug(f"coords1_fine {coords1_fine} " + f"{np.shape(coords1_fine)} " + f"{type(coords1_fine)}") + + # Determine the modelled data (computed observations vector) + # We do this by creating another instance of the helmert class + helmert_modeled = Helmert(helmert=self.helmert, + coords0=coords0_fine, + sigmas0=sigmas0_fine) + t0 = time.process_time() + helmert_modeled.transform() + t1 = time.process_time() - t0 + logger.debug(f"time transform: {t1}") + + # get the number of points m + m_okay = np.shape(self.coords0_flt)[0] + m_fine = np.shape(coords0_fine)[0] + m_all = np.shape(self.coords0)[0] + logger.debug(f"m_all={m_all}, m_fine={m_fine}, m_okay={m_okay}") + logger.debug(f"helmert_modeled.coords1: {helmert_modeled.coords1}") + + # Initialize sign vector and 1/sig vector (both 1 by 3*m) + sign = np.zeros((1,3*m_okay)) + oneover_sig = np.zeros((1,3*m_okay)) + + # Initialize some minimization functions + abdev = 0.0 + rms = 0.0 + c_okay = 0 + + # if satinfo exists, also initialize satellite-specific, + # block-specific and constellation-specific minimization functions + if hasattr(self,'satinfo'): + + # Satellite specific minimization functions + sat_abdev = {} + sat_rms = {} + + # Block specific minimization functions + blk_abdev = {} + blk_rms = {} + + # Constellation specific minimization functions + sys_abdev = {} + sys_rms = {} + + # Calculate the absolute and square deviations between the modelled + # and observed data (matrix way) + # Exclude if sat/epoch data is missing/excluded from any weighted + # center; i.e. only calculate center abdev and rms if all the + # weighted centers have data + D = abs(coords1_fine - helmert_modeled.coords1) + okay_rows_D = np.where( + (orbflags_fine != 'excluded_sat_other').all(axis=1) & + (orbflags_fine != 'missing_sys_other').all(axis=1) & + (orbflags_fine != 'missing_blk_other').all(axis=1) & + (orbflags_fine != 'missing_sat_other').all(axis=1) & + (orbflags_fine != 'unweighted_sat').all(axis=1) & + (orbflags_fine != 'excluded_sat').all(axis=1))[0] + D_okay = D[okay_rows_D] + logger.debug(f"D_okay: {D_okay}") + if hasattr(self,'satinfo'): + satinfo_okay = satinfo_fine[okay_rows_D] + logger.debug(f"len(D_okay): {len(D_okay)}") + self.abdev = np.nansum(D_okay)/(3*(len(D_okay))-len(self.helmert)) + self.rms = np.sqrt(np.nansum(D_okay**2)/(3*m_okay-len(self.helmert))) + sign = (np.sign((coords1_fine - helmert_modeled.coords1)/sigmas1_fine)) + sign_okay = sign[okay_rows_D].flatten() + oneover_sig = 1.0/sigmas1_fine + oneover_sig_okay = oneover_sig[okay_rows_D].flatten() + logger.debug(f"oneover_sig_okay: {oneover_sig_okay}") + + wht_rows_D = np.where( + (orbflags_fine != 'excluded_sat_other').all(axis=1) & + (orbflags_fine != 'missing_sys_other').all(axis=1) & + (orbflags_fine != 'missing_blk_other').all(axis=1) & + (orbflags_fine != 'missing_sat_other').all(axis=1) & + (orbflags_fine != 'unweighted_sat').all(axis=1) & + (orbflags_fine != 'unweighted_sys').all(axis=1) & + (orbflags_fine != 'excluded_sat').all(axis=1))[0] + D_wht = D[wht_rows_D] + self.abdev_wht = np.nansum(D_wht)/(3*(len(D_wht))-len(self.helmert)) + + if hasattr(self,'satinfo'): + + sats = np.unique(satinfo_fine.astype(" 0: + Dsat = D[sat_rows] + sat_abdev[sat[0],sat[1],sat[2]] = ( + np.nansum(Dsat)/(3*len(sat_rows)-len(self.helmert))) + sat_rms[sat[0],sat[1],sat[2]] = np.sqrt( + np.nansum(Dsat**2)/(3*len(sat_rows)-len(self.helmert))) + logger.debug(f"coords1_fine: {coords1_fine[sat_rows]}") + logger.debug(f"helmert_modeled.coords1: {helmert_modeled.coords1[sat_rows]}") + logger.debug(f"Dsat: {Dsat}") + logger.debug(f"TSTHEL: {self.acname} {sat[1]} {3*len(sat_rows)} " + f"{np.nansum(Dsat**2)} {sat_rms[sat[0],sat[1],sat[2]]*100.0}") + + blocks = np.unique(sats[:,3]) + blocks.sort() + for blk in blocks: + blk_rows = np.where((satinfo_fine[:,3]==blk) & + (orbflags_fine != 'excluded_sat_other').all(axis=1) & + (orbflags_fine != 'missing_sat_other').all(axis=1) & + (orbflags_fine != 'unweighted_sat').all(axis=1) & + (orbflags_fine != 'excluded_sat').all(axis=1))[0] + if len(blk_rows) > 0: + Dblk = D[blk_rows] + blk_abdev[blk] = ( + np.nansum(Dblk)/(3*len(blk_rows)-len(self.helmert))) + blk_rms[blk] = np.sqrt( + np.nansum(Dblk**2)/(3*len(blk_rows)-len(self.helmert))) + + systems = np.unique(sats[:,0]) + systems.sort() + for sys_id in systems: + sys_rows = np.where((satinfo_fine[:,0]==sys_id) & + (orbflags_fine != 'excluded_sat_other').all(axis=1) & + (orbflags_fine != 'missing_blk_other').all(axis=1) & + (orbflags_fine != 'missing_sat_other').all(axis=1) & + (orbflags_fine != 'unweighted_sat').all(axis=1) & + (orbflags_fine != 'excluded_sat').all(axis=1))[0] + if len(sys_rows) > 0: + Dsys = D[sys_rows] + sys_abdev[sys_id] = ( + np.nansum(Dsys)/(3*len(sys_rows)-len(self.helmert))) + sys_rms[sys_id] = np.sqrt( + np.nansum(Dsys**2)/(3*len(sys_rows)-len(self.helmert))) + + self.sat_abdev = sat_abdev + self.sat_rms = sat_rms + self.blk_abdev = blk_abdev + self.blk_rms = blk_rms + self.sys_abdev = sys_abdev + self.sys_rms = sys_rms + + # Consider sign of zero to be 1 + sign_okay[sign_okay == 0] = 1.0 + + # Create the Jacobian matrix + self.Jacobian() + + # Now form the robust function by performing the summation through + # multiplying the sign and Jacobian matrices: + # Fexp = (1/sigma).sign * A + self.Fexp = (np.matmul(oneover_sig_okay*sign_okay,self.A). + reshape(len(self.helmert))) + + if any(item ==0.0 for item in self.Fexp): + logger.debug(f"There are zeros in Fexp\n" + f"self.Fexp: {self.Fexp}\n" + f"oneover_sig_okay: {oneover_sig_okay}\n" + f"sign_okay: {sign_okay}\n" + f"self.A column 2:\n{self.A[:,2]}") + + + def bracket(self,interval=None,sigscale=3,maxiter=100): + + # Performs the bracketing: starting from the current state of + # self.helmert as the opening bracket, tries to find two sets of + # Helmert parameters so the robust functions Fexp have a root between + # the two sets of Helmert parameters + + # Inputs: + # interval: the vector defining the interval for moving the brackets + # sigscale: the scale given to Helmert sigmas to determine interval + # Note: + # One of the interval or sigscale is required + # sigscale is ignored if interval is given + # + # maxiter: maximum number of iterations for bracketing + + # Check the input arguments + + # interval/sigscale + if interval is None: + if hasattr(self,'sighelmert'): + checkutils.check_scalar(sigscale) + interval = sigscale*self.sighelmert + else: + logger.error("Input argument interval is not given, and there" + " is no sighelmert attribute! Try calling " + " helmert.l2norm() first or specifying interval!", + stack_info=True) + raise TypeError("bracket() missing argument: 'interval'") + else: + checkutils.check_array(interval,7) + + logger.debug(f"interval: {interval}") + + # maxiter + checkutils.check_scalar(maxiter) + if not isinstance(maxiter,int): + logger.error("The given value for maxiter must be an integer", + stack_info=True) + raise TypeError("The given value for maxiter must be an integer") + + + # Create two instances of helmert class for the bracketing + logger.debug(f"coords0 bracket {self.coords0} {np.shape(self.coords0)}") + logger.debug(f"orbflags bracket {self.orbflags} {np.shape(self.orbflags)}") + + # The first is the same as the current one + helmert1 = Helmert(helmert=self.helmert,sighelmert=self.sighelmert, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + helmert1.minim_funcs() + helm1 = helmert1.helmert + sighelm1 = helmert1.sighelmert + F1 = helmert1.Fexp + + # The second instance of the helmert class as prescribed below + helm2 = helm1 + interval*np.sign(F1) + helmert2 = Helmert(helmert=helm2,sighelmert=sighelm1, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + helmert2.minim_funcs() + helm2 = helmert2.helmert + sighelm2 = helmert2.sighelmert + F2 = helmert2.Fexp + + logger.debug("Initial brackets from L2 norm:") + logger.debug(f"helm1 {helm1}") + logger.debug(f"helm2 {helm2}") + logger.debug(f"rms1 F1 {helmert1.rms} {F1}") + logger.debug(f"rms2 F2 {helmert2.rms} {F2}") + + # Find the brackets that have different signs, so we + # are sure that there is a root between them + c = 0 + while (any(item > 0 for item in F1*F2) and c < maxiter): + for i in range(0,len(self.helmert)): + if (F1[i]*F2[i] > 0): + helmnew = 2.0*helm2[i]-helm1[i] + helm1[i] = helm2[i] + helm2[i] = helmnew + if old_version is True: + F1[i] = F2[i] # the old way where correlations + # are not considered. we now recalculate + # both F1 and F2 + + helmert1 = Helmert(helmert=helm1,sighelmert=sighelm1, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + + helmert2 = Helmert(helmert=helm2,sighelmert=sighelm2, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + + # call minim_funcs + helmert1.minim_funcs() + helmert2.minim_funcs() + if old_version is False: + F1 = helmert1.Fexp + F2 = helmert2.Fexp + logger.debug(f"brackets after iteration {c+1}") + logger.debug(f"helm1 {helm1}") + logger.debug(f"helm2 {helm2}") + logger.debug(f"rms1 F1 {(3*len(self.coords0)-7)*helmert1.rms**2} " + f"{F1}") + logger.debug(f"rms2 F2 {(3*len(self.coords0)-7)*helmert2.rms**2} " + f"{F2}") + c += 1 + + logger.debug(f'No. of iterations for bracketing: {c}\n') + if (c == maxiter): + logger.warning(f"Number of iterations in bracketing for L1 norm " + f"solution reached maxiter ({maxiter}). Use the " + f"results with caution!") + + self.helmert1 = helm1 + self.helmert2 = helm2 + self.F1 = F1 + self.F2 = F2 + + + def bisection(self,helm1=None,helm2=None,precision_level=None, + sigscale=0.1,precision_limits=[1e-15,1e-13],maxiter=100): + + # Performs the bisection method using the bracketed Helmert parameters + # + # Inputs: + # + # helm1 and helm2: the initial brackets for Helmert parameters + # precision_level: vector difining the target precision levels for + # Helmert parameters + # sigscale: the scale given to Helmert sigmas to determine precision + # level + # Note: + # One of the precision_level or sigscale is required + # sigscale is ignored if precision_level is given + # + # precision_limits: array-like of lenght 2 giving the allowed \ + # minimum and maximum limits of precision levels for + # all the parameters: [min_prec,max_prec] + # maxiter: maximum number of iterations for bracketing + + + # Check the input arguments + + # helm1 and helm2 + if helm1 is None: + if hasattr(self,'helmert1'): + helm1 = self.helmert1 + else: + logger.error("The first bracket is not given, and there is no" + " helm1 attribute! Try calling helmert.bracket() first!", + stack_info=True) + raise TypeError("bisection() missing argument: 'helm1'") + else: + checkutils.check_array(helm1,7) + + if helm2 is None: + if hasattr(self,'helmert2'): + helm2 = self.helmert2 + else: + logger.error("The second bracket is not given, and there is no" + " helm2 attribute! Try calling helmert.bracket() first!", + stack_info=True) + raise TypeError("bisection() missing argument: 'helm2'") + else: + checkutils.check_array(helm2,7) + + # precision_level/sigscale + if precision_level is None: + if hasattr(self,'sighelmert'): + checkutils.check_scalar(sigscale) + precision_level = sigscale*self.sighelmert + else: + logger.error("Input argument precision_level is not given, " + "and there is no sighelmert attribute! Try " + "calling helmert.l2norm() first or specifying " + "precision_level!", stack_info=True) + raise TypeError("bisection() missing argument: " + "'precision_level'") + else: + checkutils.check_array(precision_level,7) + + logger.debug(f"precision_level before checking limits:\n " + f"{precision_level}") + + # precision_limits: + checkutils.check_array(precision_limits,2) + + # Change precision_level based on precision_limits if required + for i,item in enumerate(precision_level): + if i in [0,1,2]: # translations + if item < 1e-7: + logger.warning(f"The precision_level for Helmert parameter" + f" {i} ({precision_level[i]}) too" + f" small; set to 1e-07 to" + f" avoid too many iterations") + precision_level[i] = 1e-7 + if item > 1e-5: + logger.warning(f"The precision_level for Helmert parameter" + f" {i} ({precision_level[i]}) too" + f" large; set to 1e-5 to" + f" converge to mm level") + precision_level[i] = 1e-5 + else: # rotations and scale + if item < precision_limits[0]: + logger.warning(f"The precision_level for Helmert parameter" + f" {i} ({precision_level[i]}) too" + f" small; set to {precision_limits[0]} to" + f" avoid too many iterations") + precision_level[i] = precision_limits[0] + if item > precision_limits[1]: + logger.warning(f"The precision_level for Helmert parameter" + f" {i} ({precision_level[i]}) too" + f" large; set to {precision_limits[0]} to" + f" converge to mm level") + precision_level[i] = precision_limits[1] + + logger.debug(f"precision levels for Helmert parameters: " + f"{precision_level}") + + # maxiter + checkutils.check_scalar(maxiter) + if not isinstance(maxiter,int): + logger.error("The given value for maxiter must be an integer", + stack_info=True) + raise TypeError("The given value for maxiter must be an integer") + + + if old_version is True: + if hasattr(self,'F1'): + F1 = self.F1 + else: + logger.error("The old_version flag is True but there is no F1 " + "attribute! Try calling helmert.bracket() first!", + stack_info=True) + raise AttributeError("object has no attribute 'F1'") + if hasattr(self,'F2'): + F2 = self.F2 + else: + logger.error("The old_version flag is True but there is no F2 " + "attribute! Try calling helmert.bracket() first!", + stack_info=True) + raise AttributeError("object has no attribute 'F2'") + + # Perform the bisection while the difference between two brackets is + # larger than the precision levels specified for all the Helmert + # parameters (or the number of iterations reaches the maximum number + # specified) + + # Initialize helm_mid (essential for the case of coords0 == coords1) + helm_mid = (helm1+helm2)/2.0 + helmert_mid = Helmert(helmert=helm_mid,sighelmert=self.sighelmert, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + helmert_mid.minim_funcs() + Fmid = helmert_mid.Fexp + + c = 0 + while (any( item < 0 for item in (precision_level-abs(helm1-helm2)) ) + and c < maxiter): + + # Calculate the mid-point of the brackets + helm_mid = (helm1+helm2)/2.0 + + t0_helmert = time.process_time() + helmert_mid = Helmert(helmert=helm_mid,sighelmert=self.sighelmert, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + t1_helmert = time.process_time() - t0_helmert + t0_minim = time.process_time() + helmert_mid.minim_funcs() + t1_minim = time.process_time() - t0_minim + t0_Fexp = time.process_time() + Fmid = helmert_mid.Fexp + t1_Fexp = time.process_time() - t0_Fexp + logger.debug(f"t1_helmert, t1_minim, t1_Fexp: " + f"{t1_helmert} {t1_minim} {t1_Fexp}") + + if old_version is False: + helmert1 = Helmert(helmert=helm1,sighelmert=self.sighelmert, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + + helmert2 = Helmert(helmert=helm2,sighelmert=self.sighelmert, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + + helmert1.minim_funcs() + F1 = helmert1.Fexp + helmert2.minim_funcs() + F2 = helmert2.Fexp + + logger.debug(f"iteration {c}") + logger.debug(f"helm1: {helm1}\nhelm2: {helm2}\n" + f"helm_mid: {helm_mid}") + logger.debug(f"F1: {F1}\nF2: {F2}\nFmid: {Fmid}") + logger.debug(f"|helm1-helm2|:\n {abs(helm1-helm2)}") + + # Replace the mid-points with the bracket of the same sign to + # shorten the bisection window + for i in range(len(helm1)): + + # check if F1 and F2 happened to have the same sign + # This is why bisection is not designed for multi dimensions. + # Other algorithms like Simplex could be considered + if F1[i]*F2[i] >= 0.0: + + # in this case,replace the closest one to Fmid by helm_mid + d1 = abs(Fmid[i]-F1[i]) + d2 = abs(Fmid[i]-F2[i]) + if d1= 0.0: + helm1[i] = helm_mid[i] + if old_version is True: + F1[i] = Fmid[i] + else: + helm2[i] = helm_mid[i] + if old_version is True: + F2[i] = Fmid[i] + + c += 1 + + logger.debug(f'No. of iterations for bisection: {c}\n') + if (c == maxiter): + logger.warning(f"Number of iterations in bisection for L1 norm " + f"solution reached maxiter ({maxiter}). Use the " + f"results with caution!") + + logger.debug(f"bisection results after {c} iterations:\n" + f"helm: {helm_mid}\n" + f"Fexp: {Fmid}\n" + f"|helm1-helm2|:\n {abs(helm1-helm2)}") + + logger.debug(f"Bisection solution after {c} iterations:") + logger.debug(f"helm: {helm_mid}") + + self.helmert = helm_mid + self.Fexp = Fmid + + # update minimum functions + self.minim_funcs() + + + def CalAbdev(self,helm): + + # Function to compute absolute deviation given a set of Helmert + # parameters helm; used as input for scipy minimization (l1norm) + + # Create an instance of helmert class using the given helm + helm_instance = Helmert(helmert=helm,sighelmert=self.sighelmert, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + + # Run minim_funcs method on the instance to get abdev + helm_instance.minim_funcs() + + # Return abdev + return helm_instance.abdev + + + def CalRes(self,helm): + + # Function to compute residuals vector given a set of Helmert + # parameters helm; used as input for scipy least squares (LS) + + # Create an instance of helmert class using the given helm + helm_instance = Helmert(helmert=helm,sighelmert=self.sighelmert, + coords0=self.coords0,sigmas0=self.sigmas0, + coords1=self.coords1,sigmas1=self.sigmas1, + orbflags=self.orbflags, + weighted_center=self.weighted_center) + + # Determine the Jacobian matrix A for the instance + helm_instance.Jacobian() + + # Create the observations vector l, which is the flattened coords1 + # array + l = helm_instance.coords1.flatten() + + # Create the computational observations vector (lc = A*params) + lc = np.matmul(helm_instance.A,helm_instance.helmert) + + # Calculate the residuals + vcap = l-np.matmul(helm_instance.A,helm_instance.helmert) + + # return the residuals + return vcap + + + def l1norm(self,helm0=None,method='Nelder-Mead',maxiter=100): + + # using scipy optimize module, estimate the Helmert parameters by + # minimizing the absolute deviation of the mean absolute deviation + # function (i.e. assuming a double-exponential distribution function, + # for which the density function is absolute deviation (see + # minim_funcs() method) + + # Check the given initial Helmert parameters + # (default is self.helmert) + if helm0 is None: + helm0 = self.helmert + else: + checkutils.check_array(helm0,7) + + # Check the given method of minimization + if method not in ['Nelder-Mead','Powell','CG','BFGS','Newton-CG', + 'L-BFGS-B','TNC','COBYLA','SLSQP', + 'trust-constr','dogleg','trust-ncg', + 'trust-exact','trust-krylov']: + logger.error(f"The input method {method} is not a recosgnized " + f"solver by scipy.optimize.minimize") + raise ValueError(f"Unknown solver {method}") + + # Check the given maxiter + checkutils.check_scalar(maxiter) + if not isinstance(maxiter,int): + logger.error("The given value for maxiter must be an integer", + stack_info=True) + raise TypeError("The given value for maxiter must be an integer") + + # Run the scipy minimize + # There are several options that can be used for mininmization, + # including tolerations for x and f. As many of these options are + # method-specific, they are not passed as arguments to the method. + # Different methods can be assessed to find the best, or the script + # can be made more generic as to pass the correct option based on the + # method chosen. For the options refer to scipy documentation. + # TBD if necessary + l1norm_solution = optimize.minimize(self.CalAbdev,helm0,method=method, + options={'maxiter':maxiter}) + + # update Helmert parameters + self.helmert = l1norm_solution.x + self.abdev = l1norm_solution.fun + + logger.debug(f"l1norm_solution: {l1norm_solution}") + + if l1norm_solution.success is False: + logger.warning("L1 norm minimzation problem has not been " + "successful! Use the results with caution!") + + logger.debug(f"L1 norm Minimzation solution after " + f"{l1norm_solution.nit} iterations:\n" + f"helm: {l1norm_solution.x}\n" + f"success: {l1norm_solution.success}\n" + f"status: {l1norm_solution.status}\n" + f"Number of function evaluations: " + f"{l1norm_solution.nfev}\n" + f"Solver message: {l1norm_solution.message}\n" + f"abdev: {l1norm_solution.fun}\n") + + # update minimum functions (abdev is already updated but will do this + # anyway for other functions) + self.minim_funcs() + + + def l2norm_scipy(self,helm0=None,method='trf',ftol=1e-8,xtol=1e-8): + + + # uses scipy optimize least_squares method to solve for a + # least-squares solution of the Helmert parameters + + # Check the given initial Helmert parameters + # (default is self.helmert) + if helm0 is None: + helm0 = self.helmert + else: + checkutils.check_array(helm0,7) + + # Check the given method of minimization + if method not in ['trf','dogbox','lm']: + logger.error(f"The input method {method} is not a recosgnized " + f"solver by scipy.optimize.least_squares") + raise ValueError(f"Unknown solver {method}") + + # Check the given tolerances + checkutils.check_scalar(ftol) + checkutils.check_scalar(xtol) + + # Run the scipy optimize least_squares + # There are several options that could be used. Refer to the scipy + # documentation. TBD if necessary + l2norm_solution = optimize.least_squares(self.CalRes,helm0,ftol=ftol, + xtol=xtol) + + # update Helmert parameters + self.helmert = l2norm_solution.x + + logger.debug(f"l2norm_solution: {l2norm_solution}") + + if l2norm_solution.success is False: + logger.warning("L2 norm minimzation problem has not been " + "successful! Use the results with caution!") + + logger.debug(f"L2 norm Minimzation solution after " + f"{l2norm_solution.nfev} function evaluations:\n" + f"helm: {l2norm_solution.x}\n" + f"success: {l2norm_solution.success}\n" + f"status: {l2norm_solution.status}\n" + f"Solver message: {l2norm_solution.message}\n" + f"cost function value: {l2norm_solution.cost}\n" + f"1st-order optimality measure: " + f"{l2norm_solution.optimality}\n") + + # update minimum functions + self.minim_funcs() + + + +#---------------------------------------------------------------------- + +if __name__ == "__main__": + + #helmert1 = Helmert([2,3,4,0,0,0,2],coords0=[[1,1,1],[2.5,2.4,2.2],[3.6,3.7,5],[3,4,5]],sigmas0=[[0.2,0.3,0.25],[0.24,0.32,0.57],[0.78,1.15,0.57],[0.54,2.1,1.2]]) + coords=[[12908.438637, -10025.115840, 20508.373977], + [15079.968827, -2630.253926, 22211.246286], + [23313.808954, -10932.861931, 6118.858775], + [20721.531368, -2122.508558, -16544.439968], + [-21032.861002, 1830.470014, -16349.972246], + [-6551.363943, -24529.143018, -7545.001977], + [6145.944042, -22903.241294, -10980.933964], + [24787.542561, 3766.698126, 9291.839227], + [10459.030800, -11836.885227, -21446.749065], + [-2003.840373, 17518.668479, 19954.907803], + [-11918.525418, -17026.839834, -16540.090875], + [-24034.134364, 9383.795236, 5861.320085], + [-22826.376648, -13657.462694, 1852.531329], + [6060.582427, -18936.839408, 17545.130199], + [-25088.447006, -1194.003253, 8463.170260], + [17704.951928, 5584.874095, -19488.460051], + [-3396.674852, -17552.616593, 20026.492178], + [-5597.088199, 23742.610593, -10264.639359], + [-12814.443316, -18896.440759, 13673.661409], + [-12454.294791, -9241.513176, -21493.998357], + [15860.682874, 2932.783763, 21730.111035], + [ -961.884088, -16463.460832, 21312.144959], + [-13869.071128, 19517.036235, 11310.168972], + [-15536.066519, 1716.492700, 21075.943816], + [-19260.987560, 17577.551005, -4529.192943], + [10034.802446, 12333.836317, -21413.360007], + [23056.216185, 13331.450416, -3198.777892], + [6046.656384, 25457.765360, -4592.672112], + [-12220.685797, 9825.111943, -21498.041633], + [-877.604021, -26355.777864, -1405.678417], + [10397.743912, 20557.783342, -13057.252264], + [ 8382.452652, 17852.750536, 18077.240947]] + coords0 = [[x * 1000 for x in row] for row in coords] + #helm_params = [-0.0002,-0.0005,0.005,2.96e-10,2.38e-10,2.18e-10,1-5e-11] + helm_params = [-2,-0.0005,0.005,2.96e-10,2.38e-10,2.18e-10,1-5e-11] + helmert1 = Helmert(helmert=helm_params,coords0=coords0) + helmert1.transform() + print("Helmert parameters applied to coords0 are written to coords1:") + helmert1.printInfo() + coords1_orig = helmert1.coords1 + helmert1.l2norm() + helmert1.bracket() + helmert1.bisection() + helmert2 = Helmert(helmert=helmert1.helmert,coords0=coords0) + helmert2.transform() + print("Helmert parameters estimated between coords0 and coords1\n" + "and then applied to coords0 to derive new coords1\n" + "(which should be close to the original parameters):") + helmert2.printInfo() + coords1_new = helmert2.coords1 + print(f"residuals:\n{coords1_new - coords1_orig}") diff --git a/rocs/iau.py b/rocs/iau.py new file mode 100755 index 0000000..d0eddc0 --- /dev/null +++ b/rocs/iau.py @@ -0,0 +1,725 @@ +# International Astronomical Union models module + +import numpy as np +import datetime +import numbers +from rocs.gpscal import gpsCal +from rocs.rotation import Rotation + + +class IAU: + + """ + Class if IAU models + Based on IERS conventions and SOFA package + References: + International Astronomival Union (2020): SOFA tools for Earth Attitude + Petit and Luzum eds. (2010): IERS Conventions (2010) (IERS Technical + Note; No. 36), International Earth Rotation and Reference Systems + Service (IERS) Central Bureau + + """ + def __init__(self,time_utc): + + """ + Initialize iau model class + + Keyword arguments: + time_utc [datetime or list/array of datetimes] : UTC time(s) + + Updates: + self.time_utc [array of datetimes] + + """ + + # Check the given arguments and set the attributes + if not isinstance(time_utc,(list,np.ndarray,datetime.datetime)): + raise TypeError("The given time_utc needs to be either a datetime " + "object or a list/array of datetime objects") + if not all(isinstance(item,datetime.datetime) + for item in np.atleast_1d(time_utc)): + raise TypeError("There are non-datetime items in time_utc") + self.time_utc = np.atleast_1d(time_utc) + + # time calculations + # Calculate t, which is time from 2000/01/01 12:00 UTC in centuries + # measured in international atomic time (no leap seconds) + tt = [] + for utc in self.time_utc: + gc = gpsCal() + gc.set_yyyy_MM_dd_hh_mm_ss(utc.year,utc.month,utc.day,utc.hour, + utc.minute,utc.second) + tai = gc.tai() + tt.append(tai + datetime.timedelta(0,32.184)) + jd2000 = datetime.datetime(2000,1,1,12,0,0) + t = np.array([(time-jd2000).total_seconds()/86400.0/36525.0 + for time in tt]) + self.t = t + + + def fundamental_fk5(self): + """ + Calculate fundamental arguments in the FK5 reference frame + + Updates: + self.fundamental_args [array] : array of fundamental arguments + where the columns correspond to + time epochs, and the rows are as + below: + row 0: el : mean longitude of the Moon minus mean longitude of + the Moon's perigee [radians] + row 1: elp : mean longitude of the Sun minus mean longitude of + the Sun's perigee [radians] + row 2: f : mean longitude of the Moon minus mean longitude of + the Moon's node [radians] + row 3: d : mean elongation of the Moon from the Sun [radians] + row 4: om : longitude of the mean ascending node of the lunar + orbit on the ecliptic, measured from the mean + equinox of date [radians] + """ + t = self.t + + # mean longitude of the Moon minus mean longitude of the Moon's + # perigee [radians] + el = (((485866.733 + 715922.633*t + 31.310*t**2 + 0.064*t**3)/3600.0) + *np.pi/180.0 + + ((1325.0*t)%1.0)*2*np.pi) + el = el%(2*np.pi) + + # mean longitude of the Sun minus mean longitude of the Sun's perigee + # [radians] + elp = (((1287099.804 + 1292581.224*t - 0.577*t**2 - 0.012*t**3)/3600.0) + *np.pi/180.0 + + ((99.0*t)%1.0)*2*np.pi) + elp = elp%(2*np.pi) + + # mean longitude of the Moon minus mean longitude of the Moon's node + # [radians] + f = (((335778.877 + 295263.137*t - 13.257*t**2 + 0.011*t**3)/3600.0) + *np.pi/180.0 + + ((1342.0*t)%1.0)*2*np.pi) + f = f%(2*np.pi) + + # mean elongation of the Moon from the Sun [radians] + d = (((1072261.307 + 1105601.328*t - 6.891*t**2 + 0.019*t**3)/3600.0) + *np.pi/180.0 + + ((1236.0*t)%1.0)*2*np.pi) + d = d%(2*np.pi) + + # longitude of the mean ascending node of the lunar orbit on the + # ecliptic, measured from the mean equinox of date [radians] + om = (((450160.280 - 482890.539*t + 7.455*t**2 + 0.008*t**3)/3600.0) + *np.pi/180.0 + + ((-5.0*t)%1.0)*2*np.pi) + om = om%(2*np.pi) + + # Create an array of fundamental arguments + self.fundamental_args = np.column_stack((el,elp,f,d,om)).T + + + def precession_iau1976(self): + + """ + Create the precession matrices based on the IAU1976 model + + Updates: + self.precession [dict] : precession matrix for each UTC time given + self.precession_model [str] : precession model + + """ + + t = self.t + + # Euler angles [arc degrees] + zeta = (2306.2181*t + 0.30188*t**2 + 0.017998*t**3)/3600.0 + zeta = np.array([item - int(item/360.0)*360.0 + for item in zeta]) + z = (2306.2181*t + 1.09468*t**2 + 0.018203*t**3)/3600.0 + z = np.array([item - int(item/360.0)*360.0 for item in z]) + theta = (2004.3109*t - 0.42665*t**2 - 0.041833*t**3)/3600.0 + theta = np.array( + [item - int(item/360.0)*360.0 for item in theta]) + + # Go epochwise (may need to optimize in future) + precession = {} + for c,epoch in enumerate(t): + + # Precession rotation matrix precession = r3.r2.r1 + r1 = Rotation(-np.deg2rad(zeta[c]),3).rot + r2 = Rotation(np.deg2rad(theta[c]),2).rot + r3 = Rotation(-np.deg2rad(z[c]),3).rot + precession[epoch] = np.matmul(r3,np.matmul(r2,r1)) + + self.precession = precession + self.precession_model = "iau1976" + + def nutation_iau1980(self): + + """ + Create the nutation matrices based on the IAU1980 model + + Updates: + self.fundamental_args [array] : see fundamental_fk5 + self.nutation [dict] : nutation matrix for each UTC time given + self.nutation_model [str] : nutation model + self.dpsi [array] : nutation in longitude for each UTC time + self.deps [array] : nutation in obliquity for each UTC time + self.eps0 [array] : mean obliquity of the ecliptic for each UTC + time + + """ + + # Table of multiples of arguments and coefficients for nutation + # The columns are: + # Multiple of Longitude Obliquity + # L L' F D Omega coeff. of sin coeff. of cos + # 1 t 1 t + # units are 0.1 mas fir coefficients and mas per Julian millennium for + # rates of change + nut = np.array(( + [ 0.0, 0.0, 0.0, 0.0, 1.0, -171996.0, -1742.0, 92025.0, 89.0], + [ 0.0, 0.0, 0.0, 0.0, 2.0, 2062.0, 2.0, -895.0, 5.0], + [-2.0, 0.0, 2.0, 0.0, 1.0, 46.0, 0.0, -24.0, 0.0], + [ 2.0, 0.0, -2.0, 0.0, 0.0, 11.0, 0.0, 0.0, 0.0], + [-2.0, 0.0, 2.0, 0.0, 2.0, -3.0, 0.0, 1.0, 0.0], + [ 1.0, -1.0, 0.0, -1.0, 0.0, -3.0, 0.0, 0.0, 0.0], + [ 0.0, -2.0, 2.0, -2.0, 1.0, -2.0, 0.0, 1.0, 0.0], + [ 2.0, 0.0, -2.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0], + [ 0.0, 0.0, 2.0, -2.0, 2.0, -13187.0, -16.0, 5736.0, -31.0], + [ 0.0, 1.0, 0.0, 0.0, 0.0, 1426.0, -34.0, 54.0, -1.0], + [ 0.0, 1.0, 2.0, -2.0, 2.0, -517.0, 12.0, 224.0, -6.0], + [ 0.0, -1.0, 2.0, -2.0, 2.0, 217.0, -5.0, -95.0, 3.0], + [ 0.0, 0.0, 2.0, -2.0, 1.0, 129.0, 1.0, -70.0, 0.0], + [ 2.0, 0.0, 0.0, -2.0, 0.0, 48.0, 0.0, 1.0, 0.0], + [ 0.0, 0.0, 2.0, -2.0, 0.0, -22.0, 0.0, 0.0, 0.0], + [ 0.0, 2.0, 0.0, 0.0, 0.0, 17.0, -1.0, 0.0, 0.0], + [ 0.0, 1.0, 0.0, 0.0, 1.0, -15.0, 0.0, 9.0, 0.0], + [ 0.0, 2.0, 2.0, -2.0, 2.0, -16.0, 1.0, 7.0, 0.0], + [ 0.0, -1.0, 0.0, 0.0, 1.0, -12.0, 0.0, 6.0, 0.0], + [-2.0, 0.0, 0.0, 2.0, 1.0, -6.0, 0.0, 3.0, 0.0], + [ 0.0, -1.0, 2.0, -2.0, 1.0, -5.0, 0.0, 3.0, 0.0], + [ 2.0, 0.0, 0.0, -2.0, 1.0, 4.0, 0.0, -2.0, 0.0], + [ 0.0, 1.0, 2.0, -2.0, 1.0, 4.0, 0.0, -2.0, 0.0], + [ 1.0, 0.0, 0.0, -1.0, 0.0, -4.0, 0.0, 0.0, 0.0], + [ 2.0, 1.0, 0.0, -2.0, 0.0, 1.0, 0.0, 0.0, 0.0], + [ 0.0, 0.0, -2.0, 2.0, 1.0, 1.0, 0.0, 0.0, 0.0], + [ 0.0, 1.0, -2.0, 2.0, 0.0, -1.0, 0.0, 0.0, 0.0], + [ 0.0, 1.0, 0.0, 0.0, 2.0, 1.0, 0.0, 0.0, 0.0], + [-1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0], + [ 0.0, 1.0, 2.0, -2.0, 0.0, -1.0, 0.0, 0.0, 0.0], + [ 0.0, 0.0, 2.0, 0.0, 2.0, -2274.0, -2.0, 977.0, -5.0], + [ 1.0, 0.0, 0.0, 0.0, 0.0, 712.0, 1.0, -7.0, 0.0], + [ 0.0, 0.0, 2.0, 0.0, 1.0, -386.0, -4.0, 200.0, 0.0], + [ 1.0, 0.0, 2.0, 0.0, 2.0, -301.0, 0.0, 129.0, -1.0], + [ 1.0, 0.0, 0.0, -2.0, 0.0, -158.0, 0.0, -1.0, 0.0], + [-1.0, 0.0, 2.0, 0.0, 2.0, 123.0, 0.0, -53.0, 0.0], + [ 0.0, 0.0, 0.0, 2.0, 0.0, 63.0, 0.0, -2.0, 0.0], + [ 1.0, 0.0, 0.0, 0.0, 1.0, 63.0, 1.0, -33.0, 0.0], + [-1.0, 0.0, 0.0, 0.0, 1.0, -58.0, -1.0, 32.0, 0.0], + [-1.0, 0.0, 2.0, 2.0, 2.0, -59.0, 0.0, 26.0, 0.0], + [ 1.0, 0.0, 2.0, 0.0, 1.0, -51.0, 0.0, 27.0, 0.0], + [ 0.0, 0.0, 2.0, 2.0, 2.0, -38.0, 0.0, 16.0, 0.0], + [ 2.0, 0.0, 0.0, 0.0, 0.0, 29.0, 0.0, -1.0, 0.0], + [ 1.0, 0.0, 2.0, -2.0, 2.0, 29.0, 0.0, -12.0, 0.0], + [ 2.0, 0.0, 2.0, 0.0, 2.0, -31.0, 0.0, 13.0, 0.0], + [ 0.0, 0.0, 2.0, 0.0, 0.0, 26.0, 0.0, -1.0, 0.0], + [-1.0, 0.0, 2.0, 0.0, 1.0, 21.0, 0.0, -10.0, 0.0], + [-1.0, 0.0, 0.0, 2.0, 1.0, 16.0, 0.0, -8.0, 0.0], + [ 1.0, 0.0, 0.0, -2.0, 1.0, -13.0, 0.0, 7.0, 0.0], + [-1.0, 0.0, 2.0, 2.0, 1.0, -10.0, 0.0, 5.0, 0.0], + [ 1.0, 1.0, 0.0, -2.0, 0.0, -7.0, 0.0, 0.0, 0.0], + [ 0.0, 1.0, 2.0, 0.0, 2.0, 7.0, 0.0, -3.0, 0.0], + [ 0.0, -1.0, 2.0, 0.0, 2.0, -7.0, 0.0, 3.0, 0.0], + [ 1.0, 0.0, 2.0, 2.0, 2.0, -8.0, 0.0, 3.0, 0.0], + [ 1.0, 0.0, 0.0, 2.0, 0.0, 6.0, 0.0, 0.0, 0.0], + [ 2.0, 0.0, 2.0, -2.0, 2.0, 6.0, 0.0, -3.0, 0.0], + [ 0.0, 0.0, 0.0, 2.0, 1.0, -6.0, 0.0, 3.0, 0.0], + [ 0.0, 0.0, 2.0, 2.0, 1.0, -7.0, 0.0, 3.0, 0.0], + [ 1.0, 0.0, 2.0, -2.0, 1.0, 6.0, 0.0, -3.0, 0.0], + [ 0.0, 0.0, 0.0, -2.0, 1.0, -5.0, 0.0, 3.0, 0.0], + [ 1.0, -1.0, 0.0, 0.0, 0.0, 5.0, 0.0, 0.0, 0.0], + [ 2.0, 0.0, 2.0, 0.0, 1.0, -5.0, 0.0, 3.0, 0.0], + [ 0.0, 1.0, 0.0, -2.0, 0.0, -4.0, 0.0, 0.0, 0.0], + [ 1.0, 0.0, -2.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0], + [ 0.0, 0.0, 0.0, 1.0, 0.0, -4.0, 0.0, 0.0, 0.0], + [ 1.0, 1.0, 0.0, 0.0, 0.0, -3.0, 0.0, 0.0, 0.0], + [ 1.0, 0.0, 2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0], + [ 1.0, -1.0, 2.0, 0.0, 2.0, -3.0, 0.0, 1.0, 0.0], + [-1.0, -1.0, 2.0, 2.0, 2.0, -3.0, 0.0, 1.0, 0.0], + [-2.0, 0.0, 0.0, 0.0, 1.0, -2.0, 0.0, 1.0, 0.0], + [ 3.0, 0.0, 2.0, 0.0, 2.0, -3.0, 0.0, 1.0, 0.0], + [ 0.0, -1.0, 2.0, 2.0, 2.0, -3.0, 0.0, 1.0, 0.0], + [ 1.0, 1.0, 2.0, 0.0, 2.0, 2.0, 0.0, -1.0, 0.0], + [-1.0, 0.0, 2.0, -2.0, 1.0, -2.0, 0.0, 1.0, 0.0], + [ 2.0, 0.0, 0.0, 0.0, 1.0, 2.0, 0.0, -1.0, 0.0], + [ 1.0, 0.0, 0.0, 0.0, 2.0, -2.0, 0.0, 1.0, 0.0], + [ 3.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0], + [ 0.0, 0.0, 2.0, 1.0, 2.0, 2.0, 0.0, -1.0, 0.0], + [-1.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, -1.0, 0.0], + [ 1.0, 0.0, 0.0, -4.0, 0.0, -1.0, 0.0, 0.0, 0.0], + [-2.0, 0.0, 2.0, 2.0, 2.0, 1.0, 0.0, -1.0, 0.0], + [-1.0, 0.0, 2.0, 4.0, 2.0, -2.0, 0.0, 1.0, 0.0], + [ 2.0, 0.0, 0.0, -4.0, 0.0, -1.0, 0.0, 0.0, 0.0], + [ 1.0, 1.0, 2.0, -2.0, 2.0, 1.0, 0.0, -1.0, 0.0], + [ 1.0, 0.0, 2.0, 2.0, 1.0, -1.0, 0.0, 1.0, 0.0], + [-2.0, 0.0, 2.0, 4.0, 2.0, -1.0, 0.0, 1.0, 0.0], + [-1.0, 0.0, 4.0, 0.0, 2.0, 1.0, 0.0, 0.0, 0.0], + [ 1.0, -1.0, 0.0, -2.0, 0.0, 1.0, 0.0, 0.0, 0.0], + [ 2.0, 0.0, 2.0, -2.0, 1.0, 1.0, 0.0, -1.0, 0.0], + [ 2.0, 0.0, 2.0, 2.0, 2.0, -1.0, 0.0, 0.0, 0.0], + [ 1.0, 0.0, 0.0, 2.0, 1.0, -1.0, 0.0, 0.0, 0.0], + [ 0.0, 0.0, 4.0, -2.0, 2.0, 1.0, 0.0, 0.0, 0.0], + [ 3.0, 0.0, 2.0, -2.0, 2.0, 1.0, 0.0, 0.0, 0.0], + [ 1.0, 0.0, 2.0, -2.0, 0.0, -1.0, 0.0, 0.0, 0.0], + [ 0.0, 1.0, 2.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0], + [-1.0, -1.0, 0.0, 2.0, 1.0, 1.0, 0.0, 0.0, 0.0], + [ 0.0, 0.0, -2.0, 0.0, 1.0, -1.0, 0.0, 0.0, 0.0], + [ 0.0, 0.0, 2.0, -1.0, 2.0, -1.0, 0.0, 0.0, 0.0], + [ 0.0, 1.0, 0.0, 2.0, 0.0, -1.0, 0.0, 0.0, 0.0], + [ 1.0, 0.0, -2.0, -2.0, 0.0, -1.0, 0.0, 0.0, 0.0], + [ 0.0, -1.0, 2.0, 0.0, 1.0, -1.0, 0.0, 0.0, 0.0], + [ 1.0, 1.0, 0.0, -2.0, 1.0, -1.0, 0.0, 0.0, 0.0], + [ 1.0, 0.0, -2.0, 2.0, 0.0, -1.0, 0.0, 0.0, 0.0], + [ 2.0, 0.0, 0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0], + [ 0.0, 0.0, 2.0, 4.0, 2.0, -1.0, 0.0, 0.0, 0.0], + [ 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0] + )) + + t = self.t + + # Calculate fundamental arguments in the FK5 reference frame + self.fundamental_fk5() + + # create the array of arguments for input into sin/cos + arg = np.matmul(nut[:,0:5],self.fundamental_args) + + # create the array of sine and cosine coefficients (t should be in + # millenia instead of centuries) + v = np.column_stack((np.ones(len(t)),t/10.0)).T + #v = np.array([1,t/10.0]) + s = np.matmul(nut[:,5:7],v) + c = np.matmul(nut[:,7:9],v) + + # create the array of sine and cosine terms + dp = s*np.sin(arg) + de = c*np.cos(arg) + + # Sum over all the sines and cosines, and convert from 0.1 mas to + # radians + dpsi = (dp.sum(axis=0)*1e-4/3600.0)*np.pi/180.0 # nutation in longitude + deps = (de.sum(axis=0)*1e-4/3600.0)*np.pi/180.0 # nutation in obliquity + + # Add corrections of frame bias, precession-rates and geophysical with + # respect to IAU1976/1980 [mas --> radians] + ddp80 = (-55.0655*1e-3/3600.0)*np.pi/180.0 + dde80 = (-6.358*1e-3/3600.0)*np.pi/180.0 + + dpsi = dpsi + ddp80 + deps = deps + dde80 + + # mean obliquity of the ecliptic based on IAU1980 model [radians] + eps0 = (84381.448 - 46.8150*t - 0.00059*t**2 + 0.001813*t**3)/3600.0 + eps0 = eps0*np.pi/180.0 + + # Go epochwise (may need to optimize in future) + nutation = {} + for c,epoch in enumerate(t): + + # Nutation rotation matrix nutation = r3.r2.r1 + r1 = Rotation(eps0[c],1).rot + r2 = Rotation(-dpsi[c],3).rot + r3 = Rotation(-(eps0[c]+deps[c]),1).rot + nutation[epoch] = np.matmul(r3,np.matmul(r2,r1)) + + self.nutation = nutation + self.nutation_model = "iau1980" + self.dpsi = dpsi + self.deps = deps + self.eps0 = eps0 + + + def gmst_iau1982(self,ut1_utc): + + """ + Calculate Greenwich mean sidereal time (GMST) based on IAU 1982 model + + Keyword arguments: + ut1_utc [scalar or list/array] : UT1-UTC in seconds corresponding + to the t attribute of the class object + Updates: + self.ut1_utc [array] + self.time_ut1 [array] + self.gmst [array] + self.gmst_model [array] + + """ + # Check the given arguments and set the attributes + if not isinstance(ut1_utc,(list,np.ndarray,numbers.Number)): + raise TypeError("The given ut1_utc needs to be either a number " + "or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(ut1_utc)): + raise TypeError("There are non-number items in ut1_utc") + if np.shape(np.atleast_1d(ut1_utc)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and ut1_utc " + f"{np.shape(np.atleast_1d(ut1_utc))}") + self.ut1_utc = np.atleast_1d(ut1_utc) + + # Calculate UT1 times + time_ut1 = [] + time_ut1_0 = [] + seconds = [] + for c,utc in enumerate(self.time_utc): + gc = gpsCal() + gc.set_yyyy_MM_dd_hh_mm_ss(utc.year,utc.month,utc.day,utc.hour, + utc.minute,utc.second) + # UT1 time + ut1 = gc.ut1(self.ut1_utc[c]) + + # UT1 at 00:00 + ut1_0 = ut1.replace(hour=0,minute=0,second=0,microsecond=0) + + # Number of seconds within the day + sec = (ut1 - ut1_0).total_seconds() + + time_ut1.append(ut1) + time_ut1_0.append(ut1_0) + seconds.append(sec) + + self.time_ut1 = np.atleast_1d(time_ut1) + time_ut1_0 = np.atleast_1d(time_ut1_0) + seconds = np.atleast_1d(seconds) + + # JD2000 + jd2000 = datetime.datetime(2000,1,1,12,0,0) + + # Number of Julian centuries since JD2000 (2000/01/01 12:00) to the + # calculation day at hour zero + t = np.array([(time-jd2000).total_seconds()/86400.0/36525.0 + for time in time_ut1_0]) + + # GMST at 00:00 + gmst0 = 24110.54841 + 8640184.812866*t + 0.093104*t**2 - 6.2e-6*t**3 + + # add the remainder of the day to get GMST in seconds + gmst = gmst0 + 1.002737909350795*seconds + + # Normalize to the part of the day in seconds + gmst = np.array([item%86400.0 for item in gmst]) + + # convert to radians + gmst = gmst*np.pi/43200.0 + + self.gmst = gmst + self.gmst_model = "iau1982" + + + def gast_iau1994(self,ut1_utc): + + """ + Calculate Greenwich apparent sidereal time (GAST), and its rotation + matrix based on IAU 1982 model for GMST and IAU1994 model for equation + of equinoxes + + Keyword arguments: + ut1_utc [scalar or list/array] : UT1-UTC in seconds corresponding + to the t attribute of the class object + Updates: + self.ut1_utc [array] + self.time_ut1 [array] + self.fundamental_args [array] : see fundamental_fk5 + self.nutation [dict] : nutation matrix for each UTC time given + self.nutation_model [str] : nutation model + self.dpsi [array] : nutation in longitude for each UTC time + self.deps [array] : nutation in obliquity for each UTC time + self.eps0 [array] : mean obliquity of the ecliptic for each UTC + time + self.gmst [array] + self.gmst_model [array] + self.gast [array] + self.gast_matrix [dict] + self.gast_model [array] + + """ + # Check the given arguments and set the attributes + if not isinstance(ut1_utc,(list,np.ndarray,numbers.Number)): + raise TypeError("The given ut1_utc needs to be either a number " + "or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(ut1_utc)): + raise TypeError("There are non-number items in ut1_utc") + if np.shape(np.atleast_1d(ut1_utc)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and ut1_utc " + f"{np.shape(np.atleast_1d(ut1_utc))}") + self.ut1_utc = np.atleast_1d(ut1_utc) + + # For the correction to GMST, we need to calculate the equation of + # equinoxes from the following: + # om : longitude of the mean ascending node of the lunar orbit on the + # ecliptic, measured from the mean equinox of date + # dpsi : nutation in longitude + # eps0 : mean obliquity of the ecliptic + # we get om from fundamental_fk5, and dpsi and eps0 from + # nutation_iau1980 + + self.fundamental_fk5() + om = np.array(self.fundamental_args[4,:]) + self.nutation_iau1980() + dpsi = self.dpsi + eps0 = self.eps0 + + # equation of equinoxes based on IAU1994 model + eqeq94 = (dpsi*np.cos(eps0) + (0.00264*np.sin(om) + + 0.000063*np.sin(2.0*om))*np.pi/180.0/3600.0) + + # GMST from 1982 model + self.gmst_iau1982(self.ut1_utc) + gmst = self.gmst + + # Add the correction to get GAST + gast = gmst + eqeq94 + + # Create the rotation matrix + # Go epochwise (may need to optimize in future) + t = self.t + gast_matrix = {} + for c,epoch in enumerate(t): + r = Rotation(gast[c],3).rot + gast_matrix[epoch] = r + + self.gast = gast + self.gast_matrix = gast_matrix + self.gast_model = "iau1994" + + + def polar_motion(self,xp,yp): + + """ + Calculate polar motion matrices + + Keyword arguments: + xp [scalar or list/array] : polar x motion in radians corresponding + to the t attribute of the class object + yp [scalar or list/array] : polar y motion in radians corresponding + to the t attribute of the class object + + Updates: + self.xp [array] : polar x motion for each UTC time + self.yp [array] : polar y motion for each UTC time + self.polar [dict] : polar motion matrix for each UTC time given + + """ + # Check the given arguments and set the attributes + if not isinstance(xp,(list,np.ndarray,numbers.Number)): + raise TypeError("The given xp needs to be either a number " + "or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(xp)): + raise TypeError("There are non-number items in xp") + if np.shape(np.atleast_1d(xp)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and xp " + f"{np.shape(np.atleast_1d(xp))}") + self.xp = np.atleast_1d(xp) + + if not isinstance(yp,(list,np.ndarray,numbers.Number)): + raise TypeError("The given yp needs to be either a number " + "or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(yp)): + raise TypeError("There are non-number items in yp") + if np.shape(np.atleast_1d(yp)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and yp " + f"{np.shape(np.atleast_1d(yp))}") + self.yp = np.atleast_1d(yp) + + t = self.t + + # Go epochwise (may need to optimize in future) + polar = {} + for c,epoch in enumerate(t): + + # polar motion rotation matrix polar = r2.r1 + r1 = Rotation(-yp[c],1).rot + r2 = Rotation(-xp[c],2).rot + polar[epoch] = np.matmul(r2,r1) + + self.polar = polar + + + def celestial_to_terrestrial(self,precession_model=None, + nutation_model=None,gast_model=None, + polar_motion=None,ut1_utc=None, + xp=None,yp=None): + + """ + Calculate polar motion matrices + + Keyword arguments: + precession_model [str] : precession model used + nutation_model [str] : nutation model used + gast_model [str] : GAST model used + polar_motion [bool] : Apply polar motion? + ut1_utc [scalar or list/array] : UT1-UTC in seconds corresponding + to the t attribute of the class object + xp [scalar or list/array] : polar x motion in radians corresponding + to the t attribute of the class object + yp [scalar or list/array] : polar y motion in radians corresponding + to the t attribute of the class object + + Updates: + self.nutation_precession [dict] : nutation-precession rotation + matrix for each UTC time + self.c2t_nopolar [dict] : celestial to terrestrial rotation matrix + with no polar motion applied for each + UTC time + self.c2t [dict] : full celestial to terretrial rotation matrix for + each UTC time + plus all the attributes updated over the calling of models + + """ + + # Check the given arguments and set the attributes + allowed_precession = ['iau1976'] + allowed_nutation = ['iau1980'] + allowed_gast = ['iau1994'] + + if (precession_model is not None + and precession_model not in allowed_precession): + raise ValueError(f"The given precession model {precession_model} " + f"not recognized! allowed precession models: " + f"{allowed_precession}") + + if (nutation_model is not None + and nutation_model not in allowed_nutation): + raise ValueError(f"The given nutationmodel {nutation_model} not " + f"recognized! allowed nutation models: " + f"{allowed_nutation}") + + if (gast_model is not None and gast_model not in allowed_gast): + raise ValueError(f"The given gast model {gast_model} not " + f"recognized! allowed gast models: " + f"{allowed_gast}") + + if polar_motion is not None: + if not isinstance(polar_motion,bool): + raise TypeError("The given polar_motion needs to be of " + "boolean type") + else: + polar_motion = False + + if (gast_model is not None and ut1_utc is None): + raise ValueError("ut1_utc must be given to calculate gast") + + if (polar_motion is True and (xp is None or yp is None) ): + raise ValueError("xp and yp must be given to calculate polar " + "motion rotation matrix") + + if ut1_utc is not None: + if not isinstance(ut1_utc,(list,np.ndarray,numbers.Number)): + raise TypeError("The given ut1_utc needs to be either a " + "number or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(ut1_utc)): + raise TypeError("There are non-number items in ut1_utc") + if np.shape(np.atleast_1d(ut1_utc)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and ut1_utc " + f"{np.shape(np.atleast_1d(ut1_utc))}") + self.ut1_utc = np.atleast_1d(ut1_utc) + + if xp is not None: + if not isinstance(xp,(list,np.ndarray,numbers.Number)): + raise TypeError("The given xp needs to be either a number " + "or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(xp)): + raise TypeError("There are non-number items in xp") + if np.shape(np.atleast_1d(xp)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and xp " + f"{np.shape(np.atleast_1d(xp))}") + self.xp = np.atleast_1d(xp) + + if yp is not None: + if not isinstance(yp,(list,np.ndarray,numbers.Number)): + raise TypeError("The given yp needs to be either a number " + "or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(yp)): + raise TypeError("There are non-number items in yp") + if np.shape(np.atleast_1d(yp)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and yp " + f"{np.shape(np.atleast_1d(yp))}") + self.yp = np.atleast_1d(yp) + + # call each model and calculate the rotation matrix + t = self.t + if precession_model == 'iau1976': + self.precession_iau1976() + elif precession_model is None: + precession = {} + for c,epoch in enumerate(t): + precession[epoch] = np.identity(3) + self.precession = precession + self.precession_model = None + else: + raise ValueError(f"precession model {precession_model} not " + "recognized!") + + if nutation_model == 'iau1980': + self.nutation_iau1980() + elif nutation_model is None: + nutation = {} + for c,epoch in enumerate(t): + nutation[epoch] = np.identity(3) + self.nutation = nutation + self.nutation_model = None + else: + raise ValueError(f"nutation model {nutation_model} not " + "recognized!") + + if gast_model == 'iau1994': + self.gast_iau1994(self.ut1_utc) + elif gast_model is None: + gast_matrix = {} + for c,epoch in enumerate(t): + gast_matrix[epoch] = np.identity(3) + self.gast_matrix = gast_matrix + self.gast_model = None + else: + raise ValueError(f"GAST model {gast_model} not recognized!") + + if polar_motion is True: + self.polar_motion(self.xp,self.yp) + else: + polar = {} + for c,epoch in enumerate(t): + polar[epoch] = np.identity(3) + self.polar = polar + + nutation_precession = {} + c2t_nopolar = {} + c2t = {} + for c,epoch in enumerate(t): + np_matrix = np.matmul(self.nutation[epoch], + self.precession[epoch]) + c2t_nopol = np.matmul(self.gast_matrix[epoch], + np_matrix) + c2t_matrix = np.matmul(self.polar[epoch],c2t_nopol) + + nutation_precession[epoch] = np_matrix + c2t_nopolar[epoch] = c2t_nopol + c2t[epoch] = c2t_matrix + + self.nutation_precession = nutation_precession + self.c2t_nopolar = c2t_nopolar + self.c2t = c2t + + + + diff --git a/rocs/io_data.py b/rocs/io_data.py new file mode 100755 index 0000000..b392243 --- /dev/null +++ b/rocs/io_data.py @@ -0,0 +1,1895 @@ +# Module for input and output of data + +import logging +import re +import numpy as np +import datetime +from collections import namedtuple +import pathlib +from scipy.interpolate import lagrange +import yaml +from rocs.gpscal import gpsCal + + +logger = logging.getLogger(__name__) + +class sp3: + + """ + Class of sp3 orbit data + Reads/writes a standard product 3 orbit format (sp3) file + Capable of reading/writing the extended sp3 (sp3-d) format + + """ + + def __init__(self,sp3file,sp3dict=None): + + """ + Initialize sp3 class + + Keyword arguments: + sp3file [str] : name of a sp3 file for input or output + sp3dict [dict], optional : a sp3 dictionary for output + + Updates: + self.sp3file [str] + self.sp3dict [dict] + + """ + + # Check and assign the given sp3 file + if not isinstance(sp3file,str): + logger.error("\nThe keyword argument sp3file must be string", + stack_info=True) + raise TypeError("sp3file is not a string!") + + self.sp3file = sp3file + + # Check and assign the given sp3dict + if sp3dict is not None: + if not isinstance(sp3dict,dict): + logger.error("\nThe keyword argument sp3dict must be a " + "dictionary",stack_info=True) + raise TypeError("sp3dict is not a dictionary!") + + if not sp3dict: + logger.error("\nThe keyword argument sp3dict is empty") + raise ValueError("sp3dict is empty!") + + self.sp3dict = sp3dict + + + def parse(self,start_epoch=None,end_epoch=None): + + """ + parse self.sp3file into self.sp3dict dictionary + + Keyword arguments: + start_epoch (datetime.datetime): start epoch to read data + end_epoch (datetime.datetime) : last epoch to read data + + Updates: + self.sp3dict + """ + + # Check the type of start_epoch and end_epoch + if (start_epoch is not None and + not isinstance(start_epoch,datetime.datetime)): + logger.error("\nThe keyword argument start_epoch must be a " + "datetime.datetime object",stack_info=True) + raise TypeError("start_epoch not datetime.datetime!") + + if (end_epoch is not None and + not isinstance(end_epoch,datetime.datetime)): + logger.error("\nThe keyword argument end_epoch must be a " + "datetime.datetime object",stack_info=True) + raise TypeError("end_epoch not datetime.datetime!") + if start_epoch is None: + start_epoch = datetime.datetime(1,1,1,0,0) + if end_epoch is None: + end_epoch = datetime.datetime(9999, 12, 31, 0, 0) + + # Create some RGX patterns + epoch_hdr_rgx = re.compile('^\*') + eof_rgx = re.compile('^EOF') + sat_rgx = re.compile('^\+ ') + accu_rgx = re.compile('^\+\+') + c_rgx = re.compile('^\%c') + f_rgx = re.compile('^\%f') + i_rgx = re.compile('^\%i') + comment_rgx = re.compile('^\/\*') + + flag = 0 + line_num = 0 + sat_counter = 0 + accu_counter = 0 + c_counter = 0 + f_counter = 0 + nout = 0 + sats = [] + sat_accuracy = [] + sp3dict = {} + sp3dict['data'] = {} + sp3dict['data']['epochs'] = [] + comments = [] + + # Try to open sp3file + try: + sp3_fid = open(self.sp3file,'r') + except IOError: + logger.error(f"\nThe specified sp3 file {self.sp3file} is not " + f"accessible\n",stack_info=True) + raise IOError(f"File {self.sp3file} not accessible!") + else: + with sp3_fid: + for line in sp3_fid: + line_num = line_num + 1 + + if line_num == 1: + line = line.ljust(60) + sp3dict['header'] = {} + sp3dict['header']['version'] = line[0:2] + sp3dict['header']['pvflag'] = line[2] + sp3dict['header']['start_year'] = int(line[3:8]) + sp3dict['header']['start_month']= int(line[8:10]) + sp3dict['header']['start_day'] = int(line[11:13]) + sp3dict['header']['start_hour'] = int(line[14:16]) + sp3dict['header']['start_min'] = int(line[17:19]) + sp3dict['header']['start_sec'] = float(line[20:31]) + sp3dict['header']['num_epochs'] = int(line[32:39]) + sp3dict['header']['data_used'] = line[40:45] + sp3dict['header']['coord_sys'] = line[46:51] + sp3dict['header']['orbit_type'] = line[52:55] + sp3dict['header']['agency'] = line[56:60] + sp3dict['header']['sats'] = [] + sp3dict['header']['sat_accuracy'] = [] + + elif line_num == 2: + line = line.ljust(60) + sp3dict['header']['gpsweek'] = int(line[3:7]) + sp3dict['header']['sow'] = float(line[8:23]) + sp3dict['header']['epoch_int'] = float(line[24:38]) + sp3dict['header']['modjul'] = int(line[39:44]) + sp3dict['header']['frac'] = float(line[45:60]) + + elif line_num == 3: + line = line.ljust(60) + sp3dict['header']['numsats'] = int(line[3:6]) + for i,c in enumerate(range(9,60,3)): + sat_counter += 1 + if sat_counter > sp3dict['header']['numsats']: + break + sp3dict['header']['sats'].append(line[c:c+3]) + + elif sat_rgx.search(line): + line = line.ljust(60) + for i,c in enumerate(range(9,60,3)): + sat_counter += 1 + if sat_counter > sp3dict['header']['numsats']: + break + sp3dict['header']['sats'].append(line[c:c+3]) + + elif accu_rgx.search(line): + line = line.ljust(60) + for i,c in enumerate(range(9,60,3)): + accu_counter += 1 + if accu_counter > sp3dict['header']['numsats']: + break + sp3dict['header']['sat_accuracy'].append( + int(line[c:c+3])) + + elif c_rgx.search(line): + line = line.ljust(60) + if c_counter == 0: + sp3dict['header']['file_type'] = line[3:5] + sp3dict['header']['time_system'] = line[9:12] + c_counter += 1 + else: + c_counter = 0 + + elif f_rgx.search(line): + line = line.ljust(60) + if f_counter == 0: + sp3dict['header']['base_pos'] = float(line[3:13]) + sp3dict['header']['base_clk'] = float(line[14:26]) + f_counter += 1 + else: + f_counter = 0 + + elif comment_rgx.search(line): + line = line.ljust(80) + comments.append(line[3:80].rstrip()) + + elif epoch_hdr_rgx.search(line): + line = line.ljust(31) + flag = 1 + year = int(line[2:7]) + month = int(line[8:10]) + dom = int(line[11:13]) + hh = int(line[14:16]) + mm = int(line[17:19]) + sec = float(line[20:31]) + gc = gpsCal() + gc.set_yyyy_MM_dd_hh_mm_ss(year,month,dom,hh,mm,sec) + epoch = gc.dto + if epoch < start_epoch or epoch > end_epoch: + nout += 1 + flag = 0 + else: + sp3dict['data']['epochs'].append(epoch) + for sat in sp3dict['header']['sats']: + sp3dict['data'][(sat,epoch,'Pflag')] = 0 + sp3dict['data'][(sat,epoch,'EPflag')] = 0 + sp3dict['data'][(sat,epoch,'Vflag')] = 0 + sp3dict['data'][(sat,epoch,'EVflag')] = 0 + + elif eof_rgx.search(line): + pass + + elif flag == 1: + # this should be one of the last elif statements + + # Symbol P EP V EV + if line[0] == 'P': # Position and clock record + + line = line.ljust(80) + + # Vehicle ID + sat = line[1:4] + sp3dict['data'][(sat,epoch,'Pflag')] = 1 + + # x - coordinate(km) + sp3dict['data'][(sat,epoch,'xcoord')] = float( + line[4:18]) + # y - coordinate(km) + sp3dict['data'][(sat,epoch,'ycoord')] = float( + line[18:32]) + # z - coordinate(km) + sp3dict['data'][(sat,epoch,'zcoord')] = float( + line[32:46]) + # clock (microsec) + sp3dict['data'][(sat,epoch,'clock')] = float( + line[46:60]) + + # Note: We have to read the following as strings + # instead of integers, because sometimes we are + # missing them in sp3 files + + # x-sdev (b**n mm) + sp3dict['data'][(sat,epoch,'xsdev')] = line[61:63] + + # y-sdev (b**n mm) + sp3dict['data'][(sat,epoch,'ysdev')] = line[64:66] + + # z-sdev (b**n mm) + sp3dict['data'][(sat,epoch,'zsdev')] = line[67:69] + + # c-sdev (b**n psec) + sp3dict['data'][(sat,epoch,'csdev')] = line[70:73] + + # clock event flag E + sp3dict['data'][(sat,epoch,'clk_event')] = ( + line[74]) + + # clock pred flag P + sp3dict['data'][(sat,epoch,'clk_pred')] = line[75] + + # maneuver flag + sp3dict['data'][sat,epoch,'maneuver'] = line[78] + + # orbit predict flag P + sp3dict['data'][(sat,epoch,'orbit_pred')] = ( + line[79]) + + # Position and clock correlation record + elif line[0:2] == 'EP': + + line = line.ljust(80) + sp3dict['data'][(sat,epoch,'EPflag')] = 1 + + # high-resolution x-sdev (mm) + sp3dict['data'][(sat,epoch,'xsdev-hres')] = ( + line[4:8]) + + # high-resolution y-sdev (mm) + sp3dict['data'][(sat,epoch,'ysdev-hres')] = ( + line[9:13]) + + # high-resolution z-sdev (mm) + sp3dict['data'][(sat,epoch,'zsdev-hres')] = ( + line[14:18]) + + # high-resolution c-sdev (psec) + sp3dict['data'][(sat,epoch,'csdev-hres')] = ( + line[19:26]) + # xy-correlation + sp3dict['data'][(sat,epoch,'xy-corr')] = ( + line[27:35]) + + # xz-correlation + sp3dict['data'][(sat,epoch,'xz-corr')] = ( + line[36:44]) + + # xc-correlation + sp3dict['data'][(sat,epoch,'xc-corr')] = ( + line[45:53]) + + # yz-correlation + sp3dict['data'][(sat,epoch,'yz-corr')] = ( + line[54:62]) + + # yc-correlation + sp3dict['data'][(sat,epoch,'yc-corr')] = ( + line[63:71]) + + # zc-correlation + sp3dict['data'][(sat,epoch,'zc-corr')] = ( + line[72:80]) + + # Velocity and clock rate-of-change record + elif line[0] == 'V': + + line = line.ljust(80) + + # Vehicle ID + sat = line[1:4] + sp3dict['data'][(sat,epoch,'Vflag')] = 1 + + # x - velocity (dm/s) + sp3dict['data'][(sat,epoch,'xvel')] = float( + line[4:18]) + + # y - velocity (dm/s) + sp3dict['data'][(sat,epoch,'yvel')] = float( + line[18:32]) + + # z - velocity(dm/s) + sp3dict['data'][(sat,epoch,'zvel')] = float( + line[32:46]) + + # clock rate-of-change (10**-4 microseconds/second) + sp3dict['data'][(sat,epoch,'clkrate')] = float( + line[46:60]) + + # xvel-sdev (b**n 10**-4 mm/sec) + sp3dict['data'][(sat,epoch,'xvel-sdev')] = ( + line[61:63]) + + # yvel-sdev (b**n 10**-4 mm/sec) + sp3dict['data'][(sat,epoch,'yvel-sdev')] = ( + line[64:66]) + + # zvel-sdev (b**n 10**-4 mm/sec) + sp3dict['data'][(sat,epoch,'zvel-sdev')] = ( + line[67:69]) + + # clkrate-sdev (b**n 10**-4 psec/sec) + sp3dict['data'][(sat,epoch,'clkrate-sdev')] = ( + line[70:73]) + + # Velocity and clock rate correlation record + elif line[0:2] == 'EV': + + line = line.ljust(80) + sp3dict['data'][(sat,epoch,'EVflag')] = 1 + + # high-resolution xvel-sdev (10**-4 mm/sec) + sp3dict['data'][(sat,epoch,'xvelsdev-hres')] = ( + line[4:8]) + + # high-resolution yvel-sdev (10**-4 mm/sec) + sp3dict['data'][(sat,epoch,'yvelsdev-hres')] = ( + line[9:13]) + + # high-resolution zvel-sdev (10**-4 mm/sec) + sp3dict['data'][(sat,epoch,'zvelsdev-hres')] = ( + line[14:18]) + + # high-resolution clkrate-sdev (10**-4 psec/sec) + (sp3dict['data'] + [(sat,epoch,'clkrate-sdev-hres')]) = ( + line[19:26]) + + # xy-correlation + sp3dict['data'][(sat,epoch,'xy-vel-corr')] = ( + line[27:35]) + + # xz-correlation + sp3dict['data'][(sat,epoch,'xz-vel-corr')] = ( + line[36:44]) + + # xc-correlation + (sp3dict['data'] + [(sat,epoch,'xvel-clkrate-corr')]) = ( + line[45:53]) + + # yz-correlation + sp3dict['data'][(sat,epoch,'yz-vel-corr')] = ( + line[54:62]) + + # yc-correlation + (sp3dict['data'] + [(sat,epoch,'yvel-clkrate-corr')]) = ( + line[63:71]) + # zc-correlation + (sp3dict['data'] + [(sat,epoch,'zvel-clkrate-corr')]) = ( + line[72:80]) + + sp3dict['header']['comments'] = comments + sp3dict['header']['num_epochs'] -= nout + + self.sp3dict = sp3dict + + + def write(self): + + """ + write self.sp3dict to a file named self.sp3file + + """ + + sp3dict = self.sp3dict + + # Try to get header information from sp3dict + try: + sp3_header = sp3dict['header'] + except KeyError: + logger.error("Keyword argument sp3dict lacks 'header' key", + stack_info=True) + raise KeyError("'header'") + + try: + version = sp3_header['version'] + pvflag = sp3_header['pvflag'] + start_year = sp3_header['start_year'] + start_month = sp3_header['start_month'] + start_day = sp3_header['start_day'] + start_hour = sp3_header['start_hour'] + start_min = sp3_header['start_min'] + start_sec = sp3_header['start_sec'] + num_epochs = sp3_header['num_epochs'] + data_used = sp3_header['data_used'] + coord_sys = sp3_header['coord_sys'] + orbit_type = sp3_header['orbit_type'] + agency = sp3_header['agency'] + sats = sp3_header['sats'] + sat_accuracy = sp3_header['sat_accuracy'] + gpsweek = sp3_header['gpsweek'] + sow = sp3_header['sow'] + epoch_int = sp3_header['epoch_int'] + modjul = sp3_header['modjul'] + frac = sp3_header['frac'] + numsats = sp3_header['numsats'] + file_type = sp3_header['file_type'] + time_system = sp3_header['time_system'] + base_pos = sp3_header['base_pos'] + base_clk = sp3_header['base_clk'] + comments = sp3_header['comments'] + except KeyError as e: + key = e.args[0] + logger.error(f"Keyword argument sp3dict lacks 'header':'{key}' " + f"key",stack_info=True) + raise KeyError(f"'header':'{key}'") + + # Try to get data epochs from sp3dict + try: + sp3_data = sp3dict['data'] + except KeyError: + logger.error("Keyword argument sp3dict lacks 'data' key", + stack_info=True) + raise KeyError("'data'") + + try: + epochs = sp3_data['epochs'] + except KeyError: + logger.error(f"Keyword argument sp3dict lacks 'data':'epochs' " + f"key",stack_info=True) + raise KeyError(f"'data':'{epochs}'") + + # Raise a warning if a file with name self.sp3file already exists + path = pathlib.Path(self.sp3file) + if path.is_file(): + logger.warning(f"\nOverwriting the file {self.sp3file}, which " + f"already exists.") + + # Try to open sp3file for writing + try: + f = open(self.sp3file,'w') + except IOError: + logger.error(f"\nThe specified sp3 file {self.sp3file} cannot be " + f"opened for writing\n",stack_info=True) + raise IOError(f"File {self.sp3file} cannot be opened for writing!") + else: + with f: + + # Line 1 + f.write('{0:2s}{1:1s}{2:4d} {3:2d} {4:2d} {5:2d} {6:2d} ' + '{7:11.8f} {8:7d} {9:5s} {10:5s} {11:3s} {12:4s}\n'.format( + version,pvflag,start_year,start_month,start_day,start_hour, + start_min,start_sec,num_epochs,data_used,coord_sys, + orbit_type,agency)) + + # Line 2 + f.write('{0:2s} {1:4d} {2:15.8f} {3:14.8f} {4:5d} ' + '{5:15.13f}\n'.format('##',gpsweek,sow,epoch_int,modjul,frac)) + + # Line 3 first part + f.write('{0:2s} {1:3d} '.format('+',numsats)) + + # Satellites + sat_ctr = 0 + line_ctr = 1 + for sat in sats: + sat_ctr += 1 + if (sat_ctr > 17): + line_ctr += 1 + f.write('\n{0:2s} '.format('+')) + sat_ctr = 1 + f.write('{0:3s}'.format(sat)) + numZeros = 17-sat_ctr + for i in range(0,numZeros): + f.write(' {0:2d}'.format(0)) + numZeroLines = 5 - line_ctr + for i in range(0,numZeroLines): + f.write('\n{0:2s} '.format('+')) + f.write((' {0:2d}'*17).format(0)) + + # Satellite accuracies + f.write('\n{0:2s} '.format('++')) + sat_ctr = 0 + line_ctr = 1 + for acc in sat_accuracy: + sat_ctr += 1 + if (sat_ctr > 17): + line_ctr += 1 + f.write('\n{0:2s} '.format('++')) + sat_ctr = 1 + f.write('{0:3d}'.format(acc)) + numZeros = 17-sat_ctr + for i in range(0,numZeros): + f.write('{0:3d}'.format(0)) + numZeroLines = 5 - line_ctr + for i in range(0,numZeroLines): + f.write('\n{0:2s} '.format('++')) + f.write(('{0:3d}'*17).format(0)) + + # c-lines + f.write('\n{0:2s} {1:2s} {2:2s} {3:3s} {4:3s}'.format( + '%c',file_type,'cc',time_system,'ccc')) + f.write((' {0:4s}'*4).format('cccc')) + f.write((' {0:5s}'*4).format('ccccc')) + f.write('\n{0:2s} {1:2s} {2:2s} {3:3s} {4:3s}'. + format('%c','cc','cc','ccc','ccc')) + f.write((' {0:4s}'*4).format('cccc')) + f.write((' {0:5s}'*4).format('ccccc')) + + # f-lines + f.write('\n{0:2s} {1:10.7f} {2:12.9f} {3:14.11f} {4:18.15f}' + .format('%f',base_pos,base_clk,0,0)) + f.write('\n{0:2s} {1:10.7f} {2:12.9f} {3:14.11f} {4:18.15f}' + .format('%f',0,0,0,0)) + + # i-lines + f.write('\n{0:2s}'.format('%i')) + f.write((' {0:4d}'*4).format(0)) + f.write((' {0:6d}'*4).format(0)) + f.write(' {0:9d}'.format(0)) + + f.write('\n{0:2s}'.format('%i')) + f.write((' {0:4d}'*4).format(0)) + f.write((' {0:6d}'*4).format(0)) + f.write(' {0:9d}'.format(0)) + + # comments + for comment in comments: + line = '\n{0:2s} {1:77s}'.format('/*',comment) + f.write(line.rstrip()) + + # we need at least 4 comment lines + numFillerComments = 4 - len(comments) + for i in range(0,numFillerComments): + f.write('\n{0:2s} '.format('/*')) + + # data lines + for i,epoch in enumerate(epochs): + f.write('\n{0:2s} {1:4d} {2:2d} {3:2d} {4:2d} {5:2d} ' + '{6:11.8f}'.format('*',epoch.year,epoch.month,epoch.day, + epoch.hour,epoch.minute,epoch.second)) + for sat in sats: + if sp3_data[(sat,epoch,'Pflag')] == 1: + if ((sat,epoch,'xcoord') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'xcoord')])): + xcoord = sp3_data[(sat,epoch,'xcoord')] + else: + xcoord = 0.0 + if ((sat,epoch,'ycoord') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'ycoord')])): + ycoord = sp3_data[(sat,epoch,'ycoord')] + else: + ycoord = 0.0 + if ((sat,epoch,'zcoord') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'zcoord')])): + zcoord = sp3_data[(sat,epoch,'zcoord')] + else: + zcoord = 0.0 + if ((sat,epoch,'clock') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'clock')])): + clock = sp3_data[(sat,epoch,'clock')] + else: + clock = 999999.999999 + if (sat,epoch,'xsdev') in sp3_data: + xsdev = str(sp3_data[(sat,epoch,'xsdev')]) + else: + xsdev = ' ' + if (sat,epoch,'ysdev') in sp3_data: + ysdev = str(sp3_data[(sat,epoch,'ysdev')]) + else: + ysdev = ' ' + if (sat,epoch,'zsdev') in sp3_data: + zsdev = str(sp3_data[(sat,epoch,'zsdev')]) + else: + zsdev = ' ' + if ((sat,epoch,'csdev') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'csdev')])): + csdev = str(sp3_data[(sat,epoch,'csdev')]) + else: + csdev = ' ' + if (sat,epoch,'clk_event') in sp3_data: + clk_event = sp3_data[(sat,epoch,'clk_event')] + else: + clk_event = ' ' + if (sat,epoch,'clk_pred') in sp3_data: + clk_pred = sp3_data[(sat,epoch,'clk_pred')] + else: + clk_pred = ' ' + if (sat,epoch,'maneuver') in sp3_data: + maneuver = sp3_data[(sat,epoch,'maneuver')] + else: + maneuver = ' ' + if (sat,epoch,'orbit_pred') in sp3_data: + orbit_pred = sp3_data[(sat,epoch,'orbit_pred')] + else: + orbit_pred = ' ' + str1 = '\n{0:1s}{1:3s}'.format('P',sat) + str2 = ('{0:14.6f}{1:14.6f}{2:14.6f}{3:14.6f}' + .format(xcoord,ycoord,zcoord,clock)) + str3 = (' {0:>2s} {1:>2s} {2:>2s} {3:>3s} {4:1s}' + '{5:1s} {6:1s}{7:1s}'.format(xsdev,ysdev,zsdev, + csdev,clk_event,clk_pred,maneuver,orbit_pred)) + line = str1 + str2 + str3 + f.write(line.rstrip()) + + if sp3_data[(sat,epoch,'EPflag')] == 1: + if ((sat,epoch,'xsdev-hres') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'xsdev-hres')])): + xsdev_hres = sp3_data[(sat,epoch,'xsdev-hres')] + else: + xsdev_hres = ' ' + if ((sat,epoch,'ysdev-hres') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'ysdev-hres')])): + ysdev_hres = sp3_data[(sat,epoch,'ysdev-hres')] + else: + ysdev_hres = ' ' + if ((sat,epoch,'zsdev-hres') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'zsdev-hres')])): + zsdev_hres = sp3_data[(sat,epoch,'zsdev-hres')] + else: + zsdev_hres = ' ' + if ((sat,epoch,'csdev-hres') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'csdev-hres')])): + csdev_hres = sp3_data[(sat,epoch,'csdev-hres')] + else: + csdev_hres = ' ' + if ((sat,epoch,'xy-corr') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'xy-corr')])): + xy_corr = sp3_data[(sat,epoch,'xy-corr')] + else: + xy_corr = ' ' + if ((sat,epoch,'xz-corr') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'xz-corr')])): + xz_corr = sp3_data[(sat,epoch,'xz-corr')] + else: + xz_corr = ' ' + if ((sat,epoch,'xc-corr') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'xc-corr')])): + xc_corr = sp3_data[(sat,epoch,'xc-corr')] + else: + xc_corr = ' ' + if ((sat,epoch,'yz-corr') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'yz-corr')])): + yz_corr = sp3_data[(sat,epoch,'yz-corr')] + else: + yz_corr = ' ' + if ((sat,epoch,'yc-corr') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'yc-corr')])): + yc_corr = sp3_data[(sat,epoch,'yc-corr')] + else: + yc_corr = ' ' + if ((sat,epoch,'zc-corr') in spop3_data + and ~np.isnan(sp3_data[(sat,epoch,'zc-corr')])): + zc_corr = sp3_data[(sat,epoch,'zc-corr')] + else: + zc_corr = ' ' + str1 = ('\n{0:2s} {1:4s} {2:4s} {3:4s} {4:7s}' + .format('EP',xsdev_hres,ysdev_hres, + zsdev_hres,csdev_hres)) + str2 = (' {0:8s} {1:8s} {2:8s} {3:8s} {4:8s} ' + '{5:8s}'.format(xy_corr,xz_corr,xc_corr, + yz_corr,yc_corr,zc_corr)) + line = str1 + str2 + f.write(line.rstrip()) + + if sp3_data[(sat,epoch,'Vflag')] == 1: + if ((sat,epoch,'xvel') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'xvel')])): + xvel = sp3_data[(sat,epoch,'xvel')] + else: + xvel = 0.0 + if ((sat,epoch,'yvel') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'yvel')])): + yvel = sp3_data[(sat,epoch,'yvel')] + else: + yvel = 0.0 + if ((sat,epoch,'zvel') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'zvel')])): + zvel = sp3_data[(sat,epoch,'zvel')] + else: + zvel = 0.0 + if ((sat,epoch,'clkrate') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'clkrate')])): + clkrate = sp3_data[(sat,epoch,'clkrate')] + else: + clkrate = 999999.999999 + if ((sat,epoch,'xvel-sdev') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'xvel-sdev')])): + xvel_sdev = sp3_data[(sat,epoch,'xvel-sdev')] + else: + xvel_sdev = ' ' + if ((sat,epoch,'yvel-sdev') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'yvel-sdev')])): + yvel_sdev = sp3_data[(sat,epoch,'yvel-sdev')] + else: + yvel_sdev = ' ' + if ((sat,epoch,'zvel-sdev') in sp3_data + and ~np.isnan(sp3_data[(sat,epoch,'zvel-sdev')])): + zvel_sdev = sp3_data[(sat,epoch,'zvel-sdev')] + else: + zvel_sdev = ' ' + if ((sat,epoch,'clkrate-sdev') in sp3_data + and ~np.isnan(sp3_data[ + (sat,epoch,'clkrate-sdev')])): + clkrate_sdev = (sp3_data + [(sat,epoch,'clkrate-sdev')]) + else: + clkrate_sdev = ' ' + str1 = '\n{0:1s}{1:3s}'.format('V',sat) + str2 = ('{0:14.6f}{1:14.6f}{2:14.6f}{3:14.6f}' + .format(xvel,yvel,zvel,clkrate)) + str3 = (' {0:2s} {1:2s} {2:2s} {3:3s} ' + .format(xvel_sdev,yvel_sdev,zvel_sdev, + clkrate_sdev)) + line = str1 + str2 + str3 + f.write(line.rstrip()) + + if sp3_data[(sat,epoch,'EVflag')] == 1: + if ((sat,epoch,'xvelsdev-hres') in sp3_data + and ~np.isnan(sp3_data[ + (sat,epoch,'xvelsdev-hres')])): + xvelsdev_hres = (sp3_data + [(sat,epoch,'xvelsdev-hres')]) + else: + xvelsdev_hres = ' ' + if ((sat,epoch,'yvelsdev-hres') in sp3_data + and ~np.isnan(sp3_data + [(sat,epoch,'yvelsdev-hres')])): + yvelsdev_hres = (sp3_data + [(sat,epoch,'yvelsdev-hres')]) + else: + yvelsdev_hres = ' ' + if ((sat,epoch,'zvelsdev-hres') in sp3_data + and ~np.isnan(sp3_data + [(sat,epoch,'zvelsdev-hres')])): + zvelsdev_hres = (sp3_data + [(sat,epoch,'zvelsdev-hres')]) + else: + zvelsdev_hres = ' ' + if ((sat,epoch,'clkrate-sdev-hres') in sp3_data + and ~np.isnan(sp3_data + [(sat,epoch,'clkrate-sdev-hres')])): + clkrate_sdev_hres = (sp3_data + [(sat,epoch,'clkrate-sdev-hres')]) + else: + clkrate_sdev_hres = ' ' + if ((sat,epoch,'xy-vel-corr') in sp3_data + and ~np.isnan(sp3_data + [(sat,epoch,'xy-vel-corr')])): + xy_vel_corr = (sp3_data + [(sat,epoch,'xy-vel-corr')]) + else: + xy_vel_corr = ' ' + if ((sat,epoch,'xz-vel-corr') in sp3_data + and ~np.isnan(sp3_data + [(sat,epoch,'xz-vel-corr')])): + xz_vel_corr = (sp3_data + [(sat,epoch,'xz-vel-corr')]) + else: + xz_vel_corr = ' ' + if ((sat,epoch,'yz-vel-corr') in sp3_data + and ~np.isnan(sp3_data + [(sat,epoch,'yz-vel-corr')])): + yz_vel_corr = (sp3_data + [(sat,epoch,'yz-vel-corr')]) + else: + yz_vel_cor = ' ' + if ((sat,epoch,'xvel-clkrate-corr') in sp3_data + and ~np.isnan(sp3_data + [(sat,epoch,'xvel-clkrate-corr')])): + xvel_clkrate_corr = (sp3_data + [(sat,epoch,'xvel-clkrate-corr')]) + else: + xvel_clkrate_corr = ' ' + if ((sat,epoch,'yvel-clkrate-corr') in sp3_data + and ~np.isnan(sp3_data + [(sat,epoch,'yvel-clkrate-corr')])): + yvel_clkrate_corr = (sp3_data + [(sat,epoch,'yvel-clkrate-corr')]) + else: + yvel_clkrate_corr = ' ' + if ((sat,epoch,'zvel-clkrate-corr') in sp3_data + and ~np.isnan(sp3_data + [(sat,epoch,'zvel-clkrate-corr')])): + zvel_clkrate_corr = (sp3_data + [(sat,epoch,'zvel-clkrate-corr')]) + else: + zvel_clkrate_corr = ' ' + str1 = ('\n{0:2s} {1:4s} {2:4s} {3:4s} {4:7s}' + .format('EV',xvelsdev_hres,yvelsdev_hres, + zvelsdev_hres,clkrate_sdev_hres)) + str2 = (' {0:8s} {1:8s} {2:8s} {3:8s} {4:8s} ' + '{5:8s}'.format(xy_vel_corr,xz_vel_corr, + xvel_clkrate_corr,yz_vel_corr, + yvel_clkrate_corr,zvel_clkrate_corr)) + line = str1 + str2 + f.write(line.rstrip()) + + # EOF line + f.write('\n{0:3s}\n'.format('EOF')) + + + +class SatelliteMetadata: + + """ + Class of satellite metadata + Reads an IGS-standard satellite sinex metadata file + + """ + + def __init__(self,metadata_file): + + """ + Initialize SatelliteMetadata class + + Keyword arguments: + metadata_file [str] : filename of the metadata sinex file + + Updates: + self.metadata_file [str] + self.svn_to_prn [dict]: dictionary containing information to map + from svn to prn + self.prn_to_svn [dict]: dictionary containing information to map + from prn to svn + self.sat_identifier [dict]: dictionary containing satellite + identifier information + self.freq_channel [dict]: dictionary containing frequency channel + information + """ + + # Check the given satellite metadata file + if not isinstance(metadata_file,str): + logger.error("The input metadata_file needs to be a string", + stack_info=True) + raise TypeError("The input metadata_file needs to be a string") + + # Set metadata_file attribute + self.metadata_file = metadata_file + + # dictionaries containing information to map between svn and prn + svn_to_prn = {} + prn_to_svn = {} + + # dictionary containing satellite identifier information + sat_identifier = {} + + # dictionary containing frequency channel information + freq_channel = {} + + # Regex + satid_hdr_rgx = re.compile("^\+SATELLITE\/IDENTIFIER") + svnprn_hdr_rgx = re.compile("^\+SATELLITE\/PRN") + freqch_hdr_rgx = re.compile("^\+SATELLITE\/FREQUENCY_CHANNEL") + comment_rgx = re.compile("^\*") + end_rgx = re.compile("^\-") + + # Try to open the metadata file and fill in the attributes + try: + meta_fid = open(self.metadata_file,'r',encoding='ISO-8859-1') + except IOError: + logger.error(f"Metadata file {self.metadata_file} is not " + f"accessible!", stack_info=True) + raise IOError(f"File {self.metadata_file} not found!") + else: + with meta_fid: + for line in meta_fid: + if satid_hdr_rgx.search(line): + for line in meta_fid: + if end_rgx.search(line): + break + if not comment_rgx.search(line): + stuff = line.strip().split() + system_id = stuff[0][0] + svn = int(stuff[0][1:]) + cospar_id = stuff[1] + sat_cat = stuff[2] + block = stuff[3] + comment = ' '.join(stuff[4:]) + + svn_full = stuff[0] + + # Fill in sat_identifier dictionary + sat_identifier[system_id,svn] = ( + [cospar_id,sat_cat,block,comment]) + + if svnprn_hdr_rgx.search(line): + for line in meta_fid: + if end_rgx.search(line): + break + if not comment_rgx.search(line): + stuff = line.strip().split() + system_id = stuff[0][0] + svn = int(stuff[0][1:]) + valid_from = stuff[1].split(":") + year_from = int(valid_from[0]) + doy_from = int(valid_from[1]) + sod_from = int(valid_from[2]) + valid_to = stuff[2].split(":") + year_to = int(valid_to[0]) + doy_to = int(valid_to[1]) + sod_to = int(valid_to[2]) + if year_to == 0: + year_to = 9999 + if doy_to == 0: + doy_to = 365 + gc_from = gpsCal() + gc_from.set_yyyy_ddd_sod(year_from, + doy_from,sod_from) + epoch_from = gc_from.dto + gc_to = gpsCal() + gc_to.set_yyyy_ddd_sod(year_to,doy_to,sod_to) + epoch_to = gc_to.dto + prn = int(stuff[3][1:]) + + # Fill in svn_to_prn dictionary + if ( (system_id,svn) not in + svn_to_prn.keys()): + svn_to_prn[system_id,svn] = ( + [[epoch_from,epoch_to,prn]]) + else: + svn_to_prn[system_id,svn] = np.vstack( + [svn_to_prn[system_id,svn], + [epoch_from,epoch_to,prn]]) + + # Fill in prn_to_svn dictionary + if ( (system_id,prn) not in + prn_to_svn.keys()): + prn_to_svn[system_id,prn] = ( + [[epoch_from,epoch_to,svn]]) + else: + prn_to_svn[system_id,prn] = np.vstack( + [prn_to_svn[system_id,prn], + [epoch_from,epoch_to,svn]]) + + + if freqch_hdr_rgx.search(line): + for line in meta_fid: + if end_rgx.search(line): + break + if not comment_rgx.search(line): + stuff = line.strip().split() + system_id = stuff[0][0] + svn = int(stuff[0][1:]) + valid_from = stuff[1].split(":") + year_from = int(valid_from[0]) + doy_from = int(valid_from[1]) + sod_from = int(valid_from[2]) + valid_to = stuff[2].split(":") + year_to = int(valid_to[0]) + doy_to = int(valid_to[1]) + sod_to = int(valid_to[2]) + if year_to == 0: + year_to = 9999 + if doy_to == 0: + doy_to = 365 + gc_from = gpsCal() + gc_from.set_yyyy_ddd_sod(year_from, + doy_from,sod_from) + epoch_from = gc_from.dto + gc_to = gpsCal() + gc_to.set_yyyy_ddd_sod(year_to,doy_to,sod_to) + epoch_to = gc_to.dto + chn = int(stuff[3]) + comment = stuff[4] + + # Fill in freq_channel dictionary + if ( (system_id,svn) not in + freq_channel.keys()): + freq_channel[system_id,svn] = ( + [[epoch_from,epoch_to,chn,comment]]) + else: + freq_channel[system_id,svn] = np.vstack( + [freq_channel[system_id,svn], + [epoch_from,epoch_to,chn,comment]]) + + + # Update attributes + self.svn_to_prn = svn_to_prn + self.prn_to_svn = prn_to_svn + self.sat_identifier = sat_identifier + self.freq_channel = freq_channel + + + def get_prn(self,system_id,svn,epoch): + """ + map svn to prn + + Input arguments: + system_id [str]: satellite constellation ID (e.g. 'G','R', etc.) + svn [int]: satellite svn number + epoch [datetime]: epoch in datetime format + """ + + # Check the type of given arguments + + if not isinstance(system_id,str): + logger.error("\nThe argument system_id must be a string\n") + raise TypeError("The argument system_id must be a string") + + if not isinstance(svn,int): + logger.error("\nThe argument svn must be an integer\n") + raise TypeError("The argument svn must be an integer") + + if not isinstance(epoch,datetime.date): + logger.error("\nThe argument epoch must be a datetime instance\n") + raise TypeError("The argument epoch must be a datetime instance") + + found = False + year = epoch.strftime("%Y").zfill(4) + doy = epoch.strftime("%j").zfill(3) + sod = str(epoch.hour*3600 + epoch.minute*60 + epoch.second).zfill(5) + + # Check if svn key exists + if (system_id,svn) not in self.svn_to_prn.keys(): + logger.error(f"\nSVN {system_id}{str(svn).zfill(3)} not found!\n" + f"Check the metadata file {self.metadata_file}\n", + stack_info=True) + raise KeyError(f"('{system_id}', {svn})") + + + # Search in svn_to_prn for the svn key + for row in self.svn_to_prn[system_id,svn]: + if (epoch >= row[0] and epoch <= row[1]): + + # if found flag already True, it means that we had found + # a match previously; issue a warning and use the last match + if found == True: + logger.warning(f"\nPossible duplicate PRN matches in " + f"metadata for SVN and epoch " + f"{system_id}{str(svn).zfill(3)} " + f"{year}:{doy}:{sod}\nCheck the metadata " + f"file {self.metadata_file}\n") + + # prn found; set found flag as True + prn = row[2] + found = True + + # if after searching the whole array, found is still False, we haven't + # found a match; raise error + if found is False: + logger.error(f"\nNo PRN match found for SVN and epoch " + f"{system_id}{str(svn).zfill(3)} {year}:{doy}:{sod} " + f"\nCheck the metadata file {self.metadata_file}\n", + stack_info=True) + raise ValueError(f"No match in self.svn_to_prn for SVN and epoch " + f"{system_id}{str(svn).zfill(3)} {epoch}") + + + return prn + + + def get_svn(self,system_id,prn,epoch): + """ + map prn to svn + + Input arguments: + system_id [str] : satellite constellation ID (e.g. 'G','R', etc.) + prn [int] : satellite prn number + epoch [datetime]: epoch in datetime format + """ + + # Check the type of given arguments + + if not isinstance(system_id,str): + logger.error("\nThe argument system_id must be a string\n") + raise TypeError("The argument system_id must be a string") + + if not isinstance(prn,int): + logger.error("\nThe argument prn must be an integer\n") + raise TypeError("The argument prn must be an integer") + + if not isinstance(epoch,datetime.date): + logger.error("\nThe argument epoch must be a datetime instance\n") + raise TypeError("The argument epoch must be a datetime instance") + + found = False + year = epoch.strftime("%Y").zfill(4) + doy = epoch.strftime("%j").zfill(3) + sod = str(epoch.hour*3600 + epoch.minute*60 + epoch.second).zfill(5) + + # Check if prn key exists + if (system_id,prn) not in self.prn_to_svn.keys(): + logger.error(f"\nPRN {system_id}{str(prn).zfill(2)} not found!\n" + f"Check the metadata file {self.metadata_file}\n", + stack_info=True) + raise KeyError(f"('{system_id}', {prn})") + + + # Search in prn_to_svn for the prn key + for row in self.prn_to_svn[system_id,prn]: + if (epoch >= row[0] and epoch <= row[1]): + + # if found flag already True, it means that we had found + # a match previously; issue a warning and use the last match + if found == True: + logger.warning(f"\nPossible duplicate SVN matches in " + f"metadata for epoch and PRN " + f"{year}:{doy}:{sod} " + f"{system_id}{str(prn).zfill(2)}" + f"\nCheck the metadata file " + f"{self.metadata_file}\n") + + # svn found; set found flag as True + svn = row[2] + found = True + + # if after searching the whole array, found is still False, we haven't + # found a match; raise error + if found is False: + logger.error(f"\nNo SVN match found for epoch and PRN " + f"{year}:{doy}:{sod} {system_id}{str(prn).zfill(2)}" + f"\nCheck the metadata file {self.metadata_file}\n", + stack_info=True) + raise ValueError(f"No match in self.prn_to_svn for PRN and epoch " + f"{system_id}{str(prn).zfill(2)} {epoch}") + + + return svn + + + def get_sat_identifier(self,system_id,svn): + """ + get satellite identifier information + + Input arguments: + system_id [str]: satellite constellation ID (e.g. 'G','R', etc.) + svn [int]: satellite svn number + + Returns: + requested_satID [namedtuple]: satellite identifier information + from metadata file, including block, + cospar_id, sat_cat, and comments + + """ + + # Check the given argument types + if not isinstance(system_id,str): + logger.error("\nThe argument system_id must be a string\n") + raise TypeError("The argument system_id must be a string") + + if not isinstance(svn,int): + logger.error("\nThe argument svn must be an integer\n") + raise TypeError("The argument svn must be an integer") + + + satID = namedtuple('satID', + 'system_id svn block cospar_id sat_cat comment') + + # search in self.sat_identifier for svn + if (system_id,svn) in self.sat_identifier.keys(): + + cospar_id = self.sat_identifier[system_id,svn][0] + sat_cat = self.sat_identifier[system_id,svn][1] + block = self.sat_identifier[system_id,svn][2] + comment = self.sat_identifier[system_id,svn][3] + + requested_satID = satID(system_id,svn,block,cospar_id,sat_cat, + comment) + + else: + + logger.error(f"\nSVN {system_id}{str(svn).zfill(3)} not found!\n" + f"Check the metadata file {self.metadata_file}\n", + stack_info=True) + raise KeyError(f"('{system_id}', {svn})") + + return requested_satID + + + def get_freq_ch(self,system_id,svn,epoch): + """ + get frequency channel + + Input arguments: + system_id [str]: satellite constellation ID (e.g. 'G','R', etc.) + svn [int]: satellite svn number + epoch [datetime]: epoch in datetime format + """ + + # Check the type of given arguments + + if not isinstance(system_id,str): + logger.error("\nThe argument system_id must be a string\n") + raise TypeError("The argument system_id must be a string") + + if not isinstance(svn,int): + logger.error("\nThe argument svn must be an integer\n") + raise TypeError("The argument svn must be an integer") + + if not isinstance(epoch,datetime.date): + logger.error("\nThe argument epoch must be a datetime instance\n") + raise TypeError("The argument epoch must be a datetime instance") + + found = False + year = epoch.strftime("%Y").zfill(4) + doy = epoch.strftime("%j").zfill(3) + sod = str(epoch.hour*3600 + epoch.minute*60 + epoch.second).zfill(5) + + # Check if svn key exists + if (system_id,svn) not in self.freq_channel.keys(): + logger.error(f"\nSVN {system_id}{str(svn).zfill(3)} not found!\n" + f"Check the metadata file {self.metadata_file}\n", + stack_info=True) + raise KeyError(f"('{system_id}', {svn})") + + + # Search in freq_channel for the svn key + for row in self.freq_channel[system_id,svn]: + if (epoch >= row[0] and epoch <= row[1]): + + # if found flag already True, it means that we had found + # a match previously; issue a warning and use the last match + if found == True: + logger.warning(f"\nPossible duplicate frequency channel " + f"in metadata for SVN and epoch " + f"{system_id}{str(svn).zfill(3)} " + f"{year}:{doy}:{sod}\nCheck the metadata " + f"file {self.metadata_file}\n") + + # channel found; set found flag as True + chn = row[2] + found = True + + # if after searching the whole array, found is still False, we haven't + # found a match; raise error + if found is False: + logger.error(f"\nNo frequency channel match found for SVN and " + f"epoch {system_id}{str(svn).zfill(3)} " + f"{year}:{doy}:{sod}\nCheck the metadata file " + f"{self.metadata_file}\n",stack_info=True) + raise ValueError(f"No match in self.freq_channel for SVN and " + f"epoch {system_id}{str(svn).zfill(3)} {epoch}") + + return chn + + + +class EOPdata: + + """ + Class of earth orientation parameters (EOP) data + Reads EOP parameters from different formats + + """ + + def __init__(self,eop_file,eop_format): + + """ + Initialize EOPdata class + + Keyword arguments: + eop_file [str] : filename of the EOP data + eop_format [str] : format of the EOP data + + Updates: + self.eop_file [str] + self.eop_format [str] + self.eop_data [numpy.ndarray]: array containing eop information + The columns of self.eop_data are: + [mjd,xp,yp,ut1_utc,lod,xprate,yprate, + xp_sig,yp_sig,ut1_utc_sig,lod_sig,xprate_sig,yprate_sig] + units are: + [days,rad,rad,sec,sec/day,rad,rad, + rad,rad,sec,sec/day,rad,rad] + """ + + # Check the given EOP file + if not isinstance(eop_file,str): + logger.error("The input eop_file needs to be a string", + stack_info=True) + raise TypeError("The input eop_file needs to be a string") + + # Set eop_file attribute + self.eop_file = eop_file + + # Check the given EOP format + if not isinstance(eop_format,str): + logger.error("The input eop_format needs to be a string", + stack_info=True) + raise TypeError("The input eop_format needs to be a string") + + allowed_formats = ['IERS_EOP14_C04','IERS_EOP_rapid','IGS_ERP2'] + if eop_format not in allowed_formats: + logger.error("The input eop_format needs to be in " + f"{allowed_formats}",stack_info=True) + raise TypeError("The input eop_format not recognized!") + self.eop_format = eop_format + + # Initialize EOP array + eop_data = np.empty((0,13)) + + # Regular expressions + # header lines of c04 format start with space or # + c04_header_rgx = re.compile("^(\s|\#)") + + # Try to open the EOP file and fill in the eop_data array + try: + eop_fid = open(self.eop_file,'r') + except IOError: + logger.error(f"EOP file {self.eop_file} is not " + f"accessible!", stack_info=True) + raise IOError(f"File {self.eop_file} not found!") + else: + with eop_fid: + for line in eop_fid: + + # Based on the source format, read EOP values + if self.eop_format == 'IERS_EOP14_C04': + if not c04_header_rgx.search(line): + mjd = float(line[12:19]) + # for xp and yp, convert arc seconds to radians + xp = (float(line[19:30])/3600.0)*np.pi/180.0 + yp = (float(line[30:41])/3600.0)*np.pi/180.0 + ut1_utc = float(line[41:53]) + lod = float(line[53:65]) + xprate = (float(line[65:76])/3600.0)*np.pi/180.0 + yprate = (float(line[76:87])/3600.0)*np.pi/180.0 + xp_sig = (float(line[87:98])/3600.0)*np.pi/180.0 + yp_sig = (float(line[98:109])/3600.0)*np.pi/180.0 + ut1_utc_sig = float(line[109:120]) + lod_sig = float(line[120:131]) + xprate_sig = ((float(line[131:143])/3600.0) + *np.pi/180.0) + yprate_sig = ((float(line[143:155])/3600.0) + *np.pi/180.0) + eop_data = np.append(eop_data,np.array( + [[mjd,xp,yp,ut1_utc,lod,xprate,yprate, + xp_sig,yp_sig,ut1_utc_sig,lod_sig, + xprate_sig,yprate_sig]]),axis=0) + + elif self.eop_format == 'IERS_EOP_rapid': + # Note: Bulletin A values are extracted + mjd = float(line[7:15]) + xp = (float(line[18:27])/3600.0)*np.pi/180.0 + xp_sig = (float(line[27:36])/3600.0)*np.pi/180.0 + yp = (float(line[37:46])/3600.0)*np.pi/180.0 + yp_sig = (float(line[46:55])/3600.0)*np.pi/180.0 + ut1_utc = float(line[58:68]) + ut1_utc_sig = float(line[68:78]) + try: + lod = float(line[79:86])*1e-3 + except: + lod = np.nan + try: + lod_sig = float(line[86:93])*1e-3 + except: + lod_sig = np.nan + try: + xprate = ((float(line[97:106])*1e-3/3600.0) + *np.pi/180.0) + except: + xprate = np.nan + try: + xprate_sig = ((float(line[106:115])*1e-3/3600.0) + *np.pi/180.0) + except: + xprate_sig = np.nan + try: + yprate = ((float(line[116:125])*1e-3/3600.0) + *np.pi/180.0) + except: + yprate = np.nan + try: + yprate_sig = ((float(line[125:134])*1e-3/3600.0) + *np.pi/180.0) + except: + yprate_sig = np.nan + eop_data = np.append(eop_data,np.array( + [[mjd,xp,yp,ut1_utc,lod,xprate,yprate, + xp_sig,yp_sig,ut1_utc_sig,lod_sig, + xprate_sig,yprate_sig]]),axis=0) + + elif self.eop_format == 'IGS_ERP2': + stuff = line.strip().split() + if (stuff and stuff[0] == 'MJD'): + # Column orders + clm_order = stuff + try: + mjd = float(stuff[0]) + except: # line is header/does not contain values + continue + xp = (float(stuff[1])*1e-6/3600.0)*np.pi/180.0 + yp = (float(stuff[2])*1e-6/3600.0)*np.pi/180.0 + ut1_utc = float(stuff[3])*1e-7 + lod = float(stuff[4])*1e-7 + xp_sig = (float(stuff[5])*1e-6/3600.0)*np.pi/180.0 + yp_sig = (float(stuff[6])*1e-6/3600.0)*np.pi/180.0 + ut1_utc_sig = float(stuff[7])*1e-7 + lod_sig = float(stuff[8])*1e-7 + try: + xprate = ((float(stuff[clm_order.index('Xrt')]) + *1e-6/3600.0)*np.pi/180.0) + except: + xprate = np.nan + try: + yprate = ((float(stuff[clm_order.index('Yrt')]) + *1e-6/3600.0)*np.pi/180.0) + except: + yprate = np.nan + try: + xprate_sig = ((float(stuff[clm_order. + index('Xrtsig')])*1e-6/3600.0)*np.pi/180.0) + except: + xprate_sig = np.nan + try: + yprate_sig = ((float(stuff[clm_order. + index('Yrtsig')])*1e-6/3600.0)*np.pi/180.0) + except: + yprate_sig = np.nan + eop_data = np.append(eop_data,np.array( + [[mjd,xp,yp,ut1_utc,lod,xprate,yprate, + xp_sig,yp_sig,ut1_utc_sig,lod_sig, + xprate_sig,yprate_sig]]),axis=0) + + # Update attributes + self.eop_data = eop_data + + def get_eop(self,time_utc,interp_window=4.0): + + """ + Get Earth Orientation Parameters for a given set of UTC times + + Keyword arguments: + time_utc [datetime or list/array of datetimes] : UTC time(s) + interp_window [float] : the window of data used around each UTC + time to be used for interpolation [days] + + Updates: + self.time_utc [array of datetimes] + self.eop_interp [numpy.ndarray]: array containing eop information + for the requested UTC times + The columns of self.eop_interp are: + [time_utc,xp,yp,ut1_utc,lod,xprate,yprate, + xp_sig,yp_sig,ut1_utc_sig,lod_sig,xprate_sig,yprate_sig] + units are: + [UTC datetime,rad,rad,sec,sec/day,rad,rad, + rad,rad,sec,sec/day,rad,rad] + """ + + # Check the given arguments and set the attributes + if not isinstance(time_utc,(list,np.ndarray,datetime.datetime)): + raise TypeError("The given time_utc needs to be either a datetime " + "object or a list/array of datetime objects") + if not all(isinstance(item,datetime.datetime) + for item in np.atleast_1d(time_utc)): + raise TypeError("There are non-datetime items in time_utc") + self.time_utc = np.atleast_1d(time_utc) + + # Perform a lagrange interpolation to get the parameters at requested + # epochs + # The scipy lagrange interpolation is numerically unstable and should + # not be used for more than 20 points; therefore, we cut only four + # points around the desired point of interpolation; we have to do + # this in a loop because the window for each point is different + eop_interp = np.empty((0,7)) + for t in self.time_utc: + year = t.year + month = t.month + dom = t.day + hh = t.hour + mm = t.minute + sec = t.second + gc = gpsCal() + gc.set_yyyy_MM_dd_hh_mm_ss(year,month,dom,hh,mm,sec) + mjd = gc.mjd() + + # Slice the eop_data to only around the requested time + ind = np.where(abs(self.eop_data[:,0]-mjd) <= interp_window/2.0) + if len(ind[0]) > 20: + logger.warning("\nThe number of data points for lagrange " + f"interpolation = {len(ind[0])} > 20.\nThe " + "interpolation could be numerically unstable.\n" + "Consider setting a smaller interpolation window " + "(interp_window).") + eop_ref = self.eop_data[ind] + + # Perform the lagrange interpolation with normalization + mjd_mean = np.mean(eop_ref[:,0]) + mjd_scale = np.std(eop_ref[:,0]) + mjd_data = (eop_ref[:,0] - mjd_mean)/mjd_scale + xp_mean = np.mean(eop_ref[:,1]) + xp_scale = np.std(eop_ref[:,1]) + xp_data = (eop_ref[:,1] - xp_mean)/xp_scale + yp_mean = np.mean(eop_ref[:,2]) + yp_scale = np.std(eop_ref[:,2]) + yp_data = (eop_ref[:,2] - yp_mean)/yp_scale + ut1_utc_mean = np.mean(eop_ref[:,3]) + ut1_utc_scale = np.std(eop_ref[:,3]) + ut1_utc_data = (eop_ref[:,3] - ut1_utc_mean)/ut1_utc_scale + lod_mean = np.mean(eop_ref[:,4]) + lod_scale = np.std(eop_ref[:,4]) + lod_data = (eop_ref[:,4] - lod_mean)/lod_scale + xprate_mean = np.mean(eop_ref[:,5]) + xprate_scale = np.std(eop_ref[:,5]) + xprate_data = (eop_ref[:,5] - xprate_mean)/xprate_scale + yprate_mean = np.mean(eop_ref[:,6]) + yprate_scale = np.std(eop_ref[:,6]) + yprate_data = (eop_ref[:,6] - yprate_mean)/yprate_scale + + xp_poly = lagrange(mjd_data,xp_data) + yp_poly = lagrange(mjd_data,yp_data) + ut1_utc_poly = lagrange(mjd_data,ut1_utc_data) + lod_poly = lagrange(mjd_data,lod_data) + xprate_poly = lagrange(mjd_data,xprate_data) + yprate_poly = lagrange(mjd_data,yprate_data) + + xp = xp_poly((mjd - mjd_mean)/mjd_scale)*xp_scale + xp_mean + yp = yp_poly((mjd - mjd_mean)/mjd_scale)*yp_scale + yp_mean + ut1_utc = (ut1_utc_poly((mjd - mjd_mean)/mjd_scale)*ut1_utc_scale + + ut1_utc_mean) + lod = lod_poly((mjd - mjd_mean)/mjd_scale)*lod_scale + lod_mean + xprate = (xprate_poly((mjd - mjd_mean)/mjd_scale)*xprate_scale + + xprate_mean) + yprate = (yprate_poly((mjd - mjd_mean)/mjd_scale)*yprate_scale + + yprate_mean) + + eop_interp = np.append(eop_interp,np.array( + [[mjd,xp,yp,ut1_utc,lod,xprate,yprate]]),axis=0) + + self.eop_interp = eop_interp + + + +class Ref_sum: + + """ + Class of reference frame combination summary file + Reads the summary file in yaml format + + """ + + def __init__(self,rfsum_yaml): + + """ + Read the reference frame summary file + + Keyword arguments: + rfsum_yaml [str] : filename of the reference frame summary file + + Updates: + self.rf_sum [dict] : contents of the rf summary file + """ + + # Check the given summary filename + if not isinstance(rfsum_yaml,str): + logger.error("The input rfsum_yaml file needs to be a string", + stack_info=True) + raise TypeError("The input rfsum_yaml needs to be a string") + + # Set eop_file attribute + self.rfsum_yaml = rfsum_yaml + + # Try to open the summary file + try: + stream = open(self.rfsum_yaml,'r') + except IOError: + raise IOError(f"File {self.rfsum_yaml} not accessible!") + else: + rf_sum = yaml.load(stream) + + # Update the attribute + self.rf_sum = rf_sum + + + def transfo(self,rf_align=[True,True,True]): + + """ + Get transformation paramaters into a more friendly dictionary type + + Keyword arguments: + rf_align [list] : list of booleans showing if we want to keep + the tranformation parameters for + [Translations,Rotations,Scale] + + Updates: + self.transformations [dict] : transformation parameters + """ + + transformations = {} + + for ac_item in self.rf_sum['transfo']: + + for item in ac_item: + + acname = item['ac'].upper() + dow = item['day'] + helmert = np.zeros(7) + helmert[6] = 1.0 + if rf_align[0]: # mm to meters + helmert[0:3] = np.array(item['T'])/1000.0 + # the rotation angle signs should be reversed because the + # SINEX combinations uses a left-handed coordinate system + if rf_align[1]: # mas to radians + helmert[3:6] = ( + -np.array(item['R'])/1000.0/3600.0*np.pi/180.0) + if rf_align[2]: # ppb to scale + helmert[6] = 1.0 + item['S']*1e-9 + + if dow not in transformations: + transformations[dow] = {} + transformations[dow][acname] = helmert + + self.transformations = transformations + + + def ut1_rot(self,acname,eop_aprfile,eop_obsfile,eop_format): + + """ + Correct transformation parameters for UT1 rotation + + Keyword arguments: + acname [str] : 3-characted name of the AC for which UT1 rotation + is to be applied + eop_aprfile [str] : A-priori EOP file + eop_obsfile [str] : Observed EOP file + eop_format [str] : format of the EOP files (assumes the + same format for a-priori and observed) + + Updates: + self.transformations [dict] : transformation parameters + """ + + for dow in range(0,7): + if acname in self.transformations[dow]: + + eop_apr = EOPdata(eop_aprfile[0],eop_format).eop_data + eop_obs = EOPdata(eop_obsfile[0],eop_format).eop_data + + if np.shape(eop_apr) != np.shape(eop_obs): + raise ValueError(f"\nSizes of the apriori and observed" + f" EOP files {eop_aprfile[0]} and " + f"{eop_obsfile[0]} must be the same!") + + # correction to Z rotations (apriori Ut1 - observed UT1) + mjd_cen = eop_apr[:,0] + zrot = -(eop_apr[:,3] - eop_obs[:,3]) # time seconds + zrot_dict = {} + for i,mjd in enumerate(mjd_cen): + zrot_dict[mjd] = (zrot[i]*((365.25/366.25)*15/3600) + *np.pi/180.0) # time seconds to arc-seconds to radians + + gc = gpsCal() + gc.set_wwww_dow(gpsweek,dow) + year = gc.yyyy() + doy = gc.ddd() + yr_frac = (doy+0.5)/365.25 + yrfloat = year + yr_frac + mjd = gc.mjd() + 0.5 + + # add ut1 correction to Z rotation + logger.debug(f"{acname} {mjd} dUT1: {zrot_dict[mjd]}") + logger.debug(f"trn {acname} before: {self.transformations[dow][acname][5]}") + self.transformations[dow][acname][5] += zrot_dict[mjd] + logger.debug(f"trn {acname} after: {self.transformations[dow][acname][5]}") + + + +class NANU_sum: + + """ + Class of nanu summary file + Reads the nanu summary file and looks for DV events + + """ + + def __init__(self,nanusum): + + """ + Read the nanu summary file + + Keyword arguments: + nanusum [str] : filename of the nanu summary file + + Updates: + self.nanu_sum [dict] : dict containing info on DV events + """ + + # Check the given summary filename + if not isinstance(nanusum,str): + logger.error("The input nanusum file needs to be a string", + stack_info=True) + raise TypeError("The input nanusum needs to be a string") + + # Set nanu_sum attribute + self.nanusum_file = nanusum + + # Initialize nanu array + nanu_sum = {} + + # Try to open the nanu file and fill in the nanu_sum dict + try: + nanu_fid = open(self.nanusum_file,'r') + except IOError: + logger.error(f"NANU summary file {self.nanusum_file} is not " + f"accessible!", stack_info=True) + raise IOError(f"File {self.nanusum_file} not found!") + else: + with nanu_fid: + read_flag = False + for line in nanu_fid: + if "+nanu_sum" in line: + read_flag = True + continue + elif "-nanu_sum" in line: + read_flag = False + if read_flag and line.strip(): + if ("nanu_sum" not in line and line[0]!="*"): + nanu_number = int(line[1:8]) + nanu_type = line[9:19].strip() + if "-" not in line[20:23]: + prn = int(line[20:23]) + else: + prn = 0 + if "-" not in line[25:32]: + ref_nanu = int(line[25:32]) + else: + ref_nanu = 0 + if "-" not in line[34:47]: + year_start = int(line[34:38]) + doy_start = int(line[39:42]) + hr_start = int(line[43:45]) + min_start = int(line[45:47]) + else: + year_start = 9999 + doy_start = 999 + hr_start = 99 + min_start = 99 + if "-" not in line[49:62]: + year_end = int(line[49:53]) + doy_end = int(line[54:57]) + hr_end = int(line[58:60]) + min_end = int(line[60:62]) + else: + year_end = 9999 + doy_end = 999 + hr_end = 99 + min_end = 99 + nanu_sum[nanu_number] = ( + [nanu_type,prn,ref_nanu, + year_start,doy_start,hr_start,min_start, + year_end,doy_end,hr_end,min_end]) + self.nanu_sum = nanu_sum + + + def get_dv(self,solution): + + """ + Get DV maneuver epochs + + Input: + solution [str] : solution: ultra-rapid/rapid/final + + Updates: + self.dv [array] : array of year,doy, start epoch and PRN for the + satellites experiencing maneuvers + self.dvfull [array] : array of year,doy, start and end epoch and PRN + for the maneuvering satellites + """ + + dv = np.empty((0,2)) + dvfull = np.empty((0,3)) + nanu_sum = self.nanu_sum + for nanu in nanu_sum: + nanu_type = nanu_sum[nanu][0] + logger.debug(f"RM_DV solution: {solution}") + if solution != "ultra-rapid": + if nanu_type == 'FCSTSUMM': + ref_nanu = nanu_sum[nanu][2] + if ref_nanu in nanu_sum: + nanu_type_ref = nanu_sum[ref_nanu][0] + if nanu_type_ref == 'FCSTDV': + prn = 'G'+str(nanu_sum[nanu][1]).zfill(2) + year_start = nanu_sum[nanu][3] + doy_start = nanu_sum[nanu][4] + hr_start = nanu_sum[nanu][5] + min_start = nanu_sum[nanu][6] + year_end = nanu_sum[nanu][7] + doy_end = nanu_sum[nanu][8] + hr_end = nanu_sum[nanu][9] + min_end = nanu_sum[nanu][10] + gc = gpsCal() + gc.set_yyyy_ddd(year_start,doy_start) + month_start = gc.MM() + dom_start = gc.dom() + dt_start = datetime.datetime( + year_start,month_start,dom_start,hr_start,min_start) + gc = gpsCal() + gc.set_yyyy_ddd(year_end,doy_end) + month_end = gc.MM() + dom_end = gc.dom() + dt_end = datetime.datetime( + year_end,month_end,dom_end,hr_end,min_end) + dv = np.append(dv,np.array([[dt_start,prn]]),axis=0) + dvfull = np.append(dvfull,np.array([[dt_start,dt_end,prn]]),axis=0) + delta = datetime.timedelta(days=1) + dt = dt_start + delta + dt = dt.replace(hour=0,minute=0) + while dt <= dt_end: + dv = np.append(dv,np.array([[dt,prn]]),axis=0) + dt += delta + else: + logger.debug(f"RM_DV solution is {solution}") + if nanu_type == 'FCSTDV': + prn = 'G'+str(nanu_sum[nanu][1]).zfill(2) + year_start = nanu_sum[nanu][3] + doy_start = nanu_sum[nanu][4] + hr_start = nanu_sum[nanu][5] + min_start = nanu_sum[nanu][6] + year_end = nanu_sum[nanu][7] + doy_end = nanu_sum[nanu][8] + hr_end = nanu_sum[nanu][9] + min_end = nanu_sum[nanu][10] + gc = gpsCal() + gc.set_yyyy_ddd(year_start,doy_start) + month_start = gc.MM() + dom_start = gc.dom() + dt_start = datetime.datetime( + year_start,month_start,dom_start,hr_start,min_start) + gc = gpsCal() + gc.set_yyyy_ddd(year_end,doy_end) + month_end = gc.MM() + dom_end = gc.dom() + dt_end = datetime.datetime( + year_end,month_end,dom_end,hr_end,min_end) + dv = np.append(dv,np.array([[dt_start,prn]]),axis=0) + dvfull = np.append(dvfull,np.array([[dt_start,dt_end,prn]]),axis=0) + delta = datetime.timedelta(days=1) + dt = dt_start + delta + dt = dt.replace(hour=0,minute=0) + while dt <= dt_end: + dv = np.append(dv,np.array([[dt,prn]]),axis=0) + dt += delta + logger.debug(f"RM_DV dv: {dv}") + logger.debug(f"RM_DV dvfull: {dvfull}") + self.dv = dv + self.dvfull = dvfull + diff --git a/rocs/orbits.py b/rocs/orbits.py new file mode 100755 index 0000000..391f115 --- /dev/null +++ b/rocs/orbits.py @@ -0,0 +1,3040 @@ +# Orbit combination module; includes preprocessing and combination classes + +import numpy as np +import logging +import datetime +from scipy.interpolate import lagrange,InterpolatedUnivariateSpline +from rocs.io_data import SatelliteMetadata +import rocs.checkutils as checkutils +from rocs.helmert import Helmert +from rocs.gpscal import gpsCal + + +logger = logging.getLogger(__name__) + +# Toggle between a version compatible with old software and the new version +old_version = True + + +class Data: + + # To represent a data point corresponding to x and y = f(x) + + def __init__(self, x, y): + self.x = x + self.y = y + + +def interpolate(f: list, xi: int, n: int) -> float: + + # function to interpolate the given data points using Lagrange's formula + # This function implements Lagrange's interpolation based on + # the example from GeeksforGeeks. + # Source: GeeksforGeeks, https://www.geeksforgeeks.org/lagranges-interpolation/ + # + # xi -> corresponds to the new data point whose value is to be obtained + # n -> represents the number of known data points + + # Initialize result + result = 0.0 + for i in range(n): + + # Compute individual terms of above formula + term = f[i].y + for j in range(n): + if j != i: + term = term * (xi - f[j].x) / (f[i].x - f[j].x) + + # Add current term to result + result += term + + return result + +def extract_windows_vectorized(array, clearing_time_index, max_time, sub_window_size): + start = clearing_time_index + 1 - sub_window_size + 1 + + sub_windows = ( + start + + # expand_dims are used to convert a 1D array to 2D array. + np.expand_dims(np.arange(sub_window_size), 0) + + np.expand_dims(np.arange(max_time + 1), 0).T + ) + + return array[sub_windows] + + + +class OrbitPrep: + + """ + class of individual orbits with preprocessing methods + + """ + + def __init__(self,sp3all,ac_contribs=None,sat_metadata=None): + + """ + initialize OrbitPrep class + + Keyword arguments: + sp3all [dict] : dictionary containing + all individual sp3 orbit + dictionaries + ac_contribs [dict], optional : center contributions to + the combination + sat_metadata + [class 'io_data.SatelliteMetadata'], + optional : an instance of + input SatelliteMetadata + class + Updates: + self.sp3all [dict] + self.ac_contribs [dict] + self.sat_metadata [class 'io_data.SatelliteMetadata'] + + """ + + # Check the given sp3all + if not isinstance(sp3all,dict): + logger.error("\nThe given sp3all must be a dictionary\n", + stack_info=True) + raise TypeError("sp3all is not a dictionary!") + + if not all(isinstance(key,str) for key in sp3all.keys()): + logger.error("\nKeys of sp3all must all be strings\n", + stack_info=True) + raise TypeError("sp3all keys must be strings!") + + if not all(isinstance(item,dict) for item in sp3all.values()): + logger.error("\nValues of sp3all must all be dictionaries\n", + stack_info=True) + raise TypeError("sp3all values must be dictionaries!") + + # After the above checks, assign sp3all to a class attribute + self.sp3all = sp3all + + # Check the given ac_contribs + if ac_contribs is not None: + if not isinstance(ac_contribs,dict): + logger.error("\nThe given argument ac_contribs must be a " + "dictionary\n") + raise TypeError("ac_contribs must be of type dict") + if not ac_contribs: + logger.error("\nThere must be at least one dictionary item in " + "ac_contribs\n") + raise ValueError("ac_contibs is empty!") + + # Assign the attribute + self.ac_contribs = ac_contribs + + # Check the given sat_metadata + if sat_metadata is not None: + + if not isinstance(sat_metadata,SatelliteMetadata): + logger.error("\nsat_metadata must be an instance of " + "SatelliteMetadata class\n") + raise TypeError("sat_metadata must an instance of " + "SatelliteMetadata class") + + # Assign the attribute + self.sat_metadata = sat_metadata + + + def filter_contribs(self): + + """ + Filter sp3all attribute based on ac_contribs attribute so each center + only contains weighted and unweighted data, and not excluded data + + Keyword arguments: + + Updates: + self.sp3all [dict] : filtered sp3all dictionary + self.weighted_centers [list] : list of weighted centers + self.unweighted_centers [list] : list of unweighted centers + self.filtered [bool] : boolean flag to show if sp3all has + been filtered + """ + + # Check if ac_contribs attribute exists + if not hasattr(self,'ac_contribs'): + logger.error(f"\nNo ac_contribs attribute exists!\n") + raise AttributeError(f"No ac_contribs attribute") + + sp3all_filtered = {} + weighted_centers = [] + unweighted_centers = [] + weighted_sats = {} + unweighted_sats = {} + weighted_cens_by_sys = {} + unweighted_cens_by_sys = {} + + # Loop over all ACs + for acname in self.sp3all.keys(): + + # Initialize the list of filtered satellites + sats = self.sp3all[acname]['header']['sats'] + sat_accuracy = self.sp3all[acname]['header']['sat_accuracy'] + epochs = self.sp3all[acname]['data']['epochs'] + sats_weighted = [] + sat_accuracy_weighted = [] + sats_unweighted = [] + sat_accuracy_unweighted = [] + sats_wu = [] # weighted and unweighted sats (not excluded) + sat_accuracy_wu = [] + + + # weighted centers + if 'weighted' in self.ac_contribs: + + weighted = self.ac_contribs['weighted'] + + if weighted is not None: + + # weighted systems + if ('systems' in weighted + and weighted['systems'] is not None + and acname in weighted['systems'] + and weighted['systems'][acname] is not None): + systems = weighted['systems'][acname] + + # Add sat to sats_weighted, if weighted + for c,sat in enumerate(sats): + if sat[0] in systems and sat not in sats_weighted: + sats_weighted.append(sat) + sat_accuracy_weighted.append(sat_accuracy[c]) + if sat[0] in systems and sat not in sats_wu: + sats_wu.append(sat) + sat_accuracy_wu.append(sat_accuracy[c]) + + # weighted prns + if ('prns' in weighted and weighted['prns'] is not None + and acname in weighted['prns'] + and weighted['prns'][acname] is not None): + prns = weighted['prns'][acname] + + # Add sat to sats_weighted, if weighted + for c,sat in enumerate(sats): + if sat in prns and sat not in sats_weighted: + sats_weighted.append(sat) + sat_accuracy_weighted.append(sat_accuracy[c]) + if sat in prns and sat not in sats_wu: + sats_wu.append(sat) + sat_accuracy_wu.append(sat_accuracy[c]) + + # weighted svns + if ('svns' in weighted and weighted['svns'] is not None + and acname in weighted['svns'] + and weighted['svns'][acname] is not None): + svns = weighted['svns'][acname] + + # Add sat to sats_weighted, if weighted + if hasattr(self,'sat_metadata'): + for c,sat in enumerate(sats): + for epoch in epochs: + svn = self.sat_metadata.get_svn( + sat[0],int(sat[1:]),epoch) + svn_str = sat[0] + str(svn).zfill(3) + if (svn_str in svns + and sat not in sats_weighted): + sats_weighted.append(sat) + sat_accuracy_weighted.append( + sat_accuracy[c]) + if (svn_str in svns + and sat not in sats_wu): + sats_wu.append(sat) + sat_accuracy_wu.append( + sat_accuracy[c]) + else: + logger.warning("Weighted svns exist in ac_contribs" + " but there is no satellite " + "metadata information.\n Ignoring " + f"weighted svns: {svns}\n") + + # unweighted centers + if 'unweighted' in self.ac_contribs: + + unweighted = self.ac_contribs['unweighted'] + + if unweighted is not None: + + # unweighted systems + if ('systems' in unweighted + and unweighted['systems'] is not None + and acname in unweighted['systems'] + and unweighted['systems'][acname] is not None): + systems = unweighted['systems'][acname] + + # Add sat to sats_unweighted, if unweighted + for c,sat in enumerate(sats): + if (sat[0] in systems + and sat not in sats_unweighted): + sats_unweighted.append(sat) + sat_accuracy_unweighted.append(sat_accuracy[c]) + if (sat[0] in systems + and sat not in sats_wu): + sats_wu.append(sat) + sat_accuracy_wu.append(sat_accuracy[c]) + + # Remove sat from sats_weighted, if unweighted + ind_rm = ([c for c,sat in enumerate(sats_weighted) + if sat[0] in systems]) + for i in sorted(ind_rm, reverse=True): + del sats_weighted[i] + del sat_accuracy_weighted[i] + + # unweighted prns + if ('prns' in unweighted and unweighted['prns'] is not None + and acname in unweighted['prns'] + and unweighted['prns'][acname] is not None): + prns = unweighted['prns'][acname] + + # Add sat to sats_unweighted, if unweighted + for c,sat in enumerate(sats): + if sat in prns and sat not in sats_unweighted: + sats_unweighted.append(sat) + sat_accuracy_unweighted.append(sat_accuracy[c]) + if sat in prns and sat not in sats_wu: + sats_wu.append(sat) + sat_accuracy_wu.append(sat_accuracy[c]) + + # Remove sat from sats_weighted, if unweighted + ind_rm = ([c for c,sat in enumerate(sats_weighted) + if sat in prns]) + for i in sorted(ind_rm, reverse=True): + del sats_weighted[i] + del sat_accuracy_weighted[i] + + # unweighted svns + if ('svns' in unweighted and unweighted['svns'] is not None + and acname in unweighted['svns'] + and unweighted['svns'][acname] is not None): + svns = unweighted['svns'][acname] + + if hasattr(self,'sat_metadata'): + + # Add sat to sats_unweighted, if unweighted + for c,sat in enumerate(sats): + for epoch in epochs: + svn = self.sat_metadata.get_svn( + sat[0],int(sat[1:]),epoch) + svn_str = sat[0] + str(svn).zfill(3) + + if (svn_str in svns + and sat not in sats_unweighted): + sats_unweighted.append(sat) + sat_accuracy_unweighted.append( + sat_accuracy[c]) + if (svn_str in svns + and sat not in sats_wu): + sats_wu.append(sat) + sat_accuracy_wu.append( + sat_accuracy[c]) + + # Remove sat from sats_weighted, if unweighted + ind_rm = [] + for c,sat in enumerate(sats_weighted): + remove_sat = True + for epoch in epochs: + svn = self.sat_metadata.get_svn( + sat[0],int(sat[1:]),epoch) + svn_str = sat[0] + str(svn).zfill(3) + if svn_str not in svns: + remove_sat = False + if remove_sat: + ind_rm.append(c) + if ind_rm: + for i in sorted(ind_rm, reverse=True): + del sats_weighted[i] + del sat_accuracy_weighted[i] + else: + logger.warning("Unweighted svns exist in " + "ac_contribs but there is no " + "satellite metadata information.\n " + "Ignoring unweighted svns: " + f"{svns}\n") + + # remove excluded centers + if 'excluded' in self.ac_contribs: + + excluded = self.ac_contribs['excluded'] + + if excluded is not None: + + # excluded systems + if ('systems' in excluded + and excluded['systems'] is not None + and acname in excluded['systems'] + and excluded['systems'][acname] is not None): + systems = excluded['systems'][acname] + + # Remove sat from sats_weighted, if excluded + ind_rm = ([c for c,sat in enumerate(sats_weighted) + if sat[0] in systems]) + for i in sorted(ind_rm, reverse=True): + del sats_weighted[i] + del sat_accuracy_weighted[i] + + # Remove sat from sats_unweighted, if excluded + ind_rm = ([c for c,sat in enumerate(sats_unweighted) + if sat[0] in systems]) + for i in sorted(ind_rm, reverse=True): + del sats_unweighted[i] + del sat_accuracy_unweighted[i] + + # Remove sat from sats_wu, if excluded + ind_rm = ([c for c,sat in enumerate(sats_wu) + if sat[0] in systems]) + for i in sorted(ind_rm, reverse=True): + del sats_wu[i] + del sat_accuracy_wu[i] + + # excluded prns + if ('prns' in excluded and excluded['prns'] is not None + and acname in excluded['prns'] + and excluded['prns'][acname] is not None): + prns = excluded['prns'][acname] + + # Remove sat from sats_weighted, if excluded + ind_rm = ([c for c,sat in enumerate(sats_weighted) + if sat in prns]) + for i in sorted(ind_rm, reverse=True): + del sats_weighted[i] + del sat_accuracy_weighted[i] + + # Remove sat from sats_unweighted, if excluded + ind_rm = ([c for c,sat in enumerate(sats_unweighted) + if sat in prns]) + for i in sorted(ind_rm, reverse=True): + del sats_unweighted[i] + del sat_accuracy_unweighted[i] + + # Remove sat from sats_wu, if excluded + ind_rm = ([c for c,sat in enumerate(sats_wu) + if sat in prns]) + for i in sorted(ind_rm, reverse=True): + del sats_wu[i] + del sat_accuracy_wu[i] + + # excluded svns + if ('svns' in excluded and excluded['svns'] is not None + and acname in excluded['svns'] + and excluded['svns'][acname] is not None): + svns = excluded['svns'][acname] + + if hasattr(self,'sat_metadata'): + + # Remove sat from sats_weighted, if excluded + ind_rm = [] + for c,sat in enumerate(sats_weighted): + remove_sat = True + for epoch in epochs: + svn = self.sat_metadata.get_svn( + sat[0],int(sat[1:]),epoch) + svn_str = sat[0] + str(svn).zfill(3) + if svn_str not in svns: + remove_sat = False + if remove_sat: + ind_rm.append(c) + if ind_rm: + for i in sorted(ind_rm, reverse=True): + del sats_weighted[i] + del sat_accuracy_weighted[i] + + # Remove sat from sats_unweighted, if excluded + ind_rm = [] + for c,sat in enumerate(sats_unweighted): + remove_sat = True + for epoch in epochs: + svn = self.sat_metadata.get_svn( + sat[0],int(sat[1:]),epoch) + svn_str = sat[0] + str(svn).zfill(3) + if svn_str not in svns: + remove_sat = False + if remove_sat: + ind_rm.append(c) + if ind_rm: + for i in sorted(ind_rm, reverse=True): + del sats_unweighted[i] + del sat_accuracy_unweighted[i] + + # Remove sat from sats_wu, if excluded + ind_rm = [] + for c,sat in enumerate(sats_wu): + remove_sat = True + for epoch in epochs: + svn = self.sat_metadata.get_svn( + sat[0],int(sat[1:]),epoch) + svn_str = sat[0] + str(svn).zfill(3) + if svn_str not in svns: + remove_sat = False + if remove_sat: + ind_rm.append(c) + if ind_rm: + for i in sorted(ind_rm, reverse=True): + del sats_wu[i] + del sat_accuracy_wu[i] + + else: + logger.warning("Excluded svns exist in ac_contribs" + " but there is no satellite " + "metadata information.\n Ignoring " + f"svn exclusions for: {svns}\n") + + # Update the main weighted_sats and unweighted_sats dictionary + weighted_sats[acname] = sats_weighted + unweighted_sats[acname] = sats_unweighted + + # if sats_weighted is not empty, we will have a sp3 dict for the + # weighted data of this center + if sats_weighted: + + # list of systems for this center + sys_ids_weighted = [] + for sat in sats_weighted: + if sat[0] not in sys_ids_weighted: + sys_ids_weighted.append(sat[0]) + + # list of variables in the original sp3 dictionary of this center + varnames = [] + for key in self.sp3all[acname]['data']: + if (type(key) is tuple): + if (key[2] not in varnames): + varnames.append(key[2]) + + # The weighted sp3 dictionary for this AC + sp3_weighted = {} + + # The header is the same as the original header except for + # numsats, sats, sat_accuracy, and file_type + sp3_weighted['header'] = {} + for key in self.sp3all[acname]['header'].keys(): + sp3_weighted['header'][key] = (self.sp3all[acname] + ['header'][key]) + sp3_weighted['header']['numsats'] = len(sats_weighted) + sp3_weighted['header']['sats'] = sats_weighted + sp3_weighted['header']['sat_accuracy'] \ + = sat_accuracy_weighted + if len(sys_ids_weighted) > 1: + sp3_weighted['header']['file_type'] = 'M ' + else: + sp3_weighted['header']['file_type'] \ + = sys_ids_weighted[0] + + # Data epochs are the same + sp3_weighted['data'] = {} + sp3_weighted['data']['epochs'] = (self.sp3all[acname] + ['data']['epochs']) + + # Only store data for sats_weighted + for epoch in sp3_weighted['data']['epochs']: + for sat in sats_weighted: + for varname in varnames: + key = (sat,epoch,varname) + if key in self.sp3all[acname]['data']: + sp3_weighted['data'][key] = (self.sp3all + [acname]['data'][key]) + + # add ac to weighted_centers + weighted_centers.append(acname) + + weighted_cens_by_sys[acname] = sys_ids_weighted + + # if sats_unweighted is not empty, we will have a sp3 dict for the + # unweighted data of this center + if sats_unweighted: + + # list of systems for this center + sys_ids_unweighted = [] + for sat in sats_unweighted: + if sat[0] not in sys_ids_unweighted: + sys_ids_unweighted.append(sat[0]) + + # list of variables in the original sp3 dictionary of this center + varnames = [] + for key in self.sp3all[acname]['data']: + if (type(key) is tuple): + if (key[2] not in varnames): + varnames.append(key[2]) + + # The unweighted sp3 dictionary for this AC + sp3_unweighted = {} + + # The header is the same as the original header except for + # numsats, sats, sat_accuracy, and file_type + sp3_unweighted['header'] = {} + for key in self.sp3all[acname]['header'].keys(): + sp3_unweighted['header'][key] = (self.sp3all[acname] + ['header'][key]) + sp3_unweighted['header']['numsats'] = len(sats_unweighted) + sp3_unweighted['header']['sats'] = sats_unweighted + sp3_unweighted['header']['sat_accuracy'] \ + = sat_accuracy_unweighted + if len(sys_ids_unweighted) > 1: + sp3_unweighted['header']['file_type'] = 'M ' + else: + sp3_unweighted['header']['file_type'] \ + = sys_ids_unweighted[0] + + # Data epochs are the same + sp3_unweighted['data'] = {} + sp3_unweighted['data']['epochs'] = (self.sp3all[acname] + ['data']['epochs']) + + # Only store data for sats_unweighted + for epoch in sp3_unweighted['data']['epochs']: + for sat in sats_unweighted: + for varname in varnames: + key = (sat,epoch,varname) + if key in self.sp3all[acname]['data']: + sp3_unweighted['data'][key] = (self.sp3all + [acname]['data'][key]) + + # add ac to unweighted_centers + unweighted_centers.append(acname) + unweighted_cens_by_sys[acname] = sys_ids_unweighted + + # if sats_wu is not empty, we will have a sp3 dict for the + # weighted/unweighted data of this center + if sats_wu: + + # list of systems for this center + sys_ids_wu = [] + for sat in sats_wu: + if sat[0] not in sys_ids_wu: + sys_ids_wu.append(sat[0]) + + # list of variables in the original sp3 dictionary of this + # center + varnames = [] + for key in self.sp3all[acname]['data']: + if (type(key) is tuple): + if (key[2] not in varnames): + varnames.append(key[2]) + + # The weighted/unweighted sp3 dictionary for this AC + sp3_wu = {} + + # The header is the same as the original header except for + # numsats, sats, sat_accuracy, and file_type + sp3_wu['header'] = {} + for key in self.sp3all[acname]['header'].keys(): + sp3_wu['header'][key] = (self.sp3all[acname] + ['header'][key]) + sp3_wu['header']['numsats'] = len(sats_wu) + sp3_wu['header']['sats'] = sats_wu + sp3_wu['header']['sat_accuracy'] \ + = sat_accuracy_wu + if len(sys_ids_wu) > 1: + sp3_wu['header']['file_type'] = 'M ' + else: + sp3_wu['header']['file_type'] \ + = sys_ids_wu[0] + + # Data epochs are the same + sp3_wu['data'] = {} + sp3_wu['data']['epochs'] = (self.sp3all[acname] + ['data']['epochs']) + + # Only store data for sats_wu + for epoch in sp3_wu['data']['epochs']: + for sat in sats_wu: + for varname in varnames: + key = (sat,epoch,varname) + if key in self.sp3all[acname]['data']: + sp3_wu['data'][key] = (self.sp3all + [acname]['data'][key]) + + + sp3all_filtered[acname] = sp3_wu + + # Update sp3all with the filtered version + self.sp3all = sp3all_filtered + + # Re-generate weighted_centers and unweighted_centers using weighted_sats and unweighted_sats + # Note this definition should only be used to indicate what centres have at least + # one satellite contributing + weighted_centers = [] + unweighted_centers = [] + for acname in weighted_sats: + if (len(weighted_sats[acname]) > 0 and acname not in weighted_centers): + weighted_centers.append(acname) + for acname in unweighted_sats: + if (len(unweighted_sats[acname]) > 0 and acname not in weighted_centers + and acname not in unweighted_centers): + unweighted_centers.append(acname) + weighted_centers.sort() + unweighted_centers.sort() + + print("weighted_sats: ", weighted_sats) + print("unweighted_sats: ", unweighted_sats) + + print("weighted_centers: ", weighted_centers) + print("unweighted_centers: ", unweighted_centers) + + print("weighted_cens_by_sys: ", weighted_cens_by_sys) + print("unweighted_cens_by_sys: ", unweighted_cens_by_sys) + + # Assign to attributes + self.weighted_centers = weighted_centers + self.unweighted_centers = unweighted_centers + self.filtered = True + self.weighted_sats = weighted_sats + self.unweighted_sats = unweighted_sats + self.weighted_cens_by_sys = weighted_cens_by_sys + self.unweighted_cens_by_sys = unweighted_cens_by_sys + + + def resample(self,sample_rate): + + """ + Downsample sp3all attribute to the given sampling rate + + Keyword arguments: + sample_rate [int]: the sampling rate of the resampled sp3all in seconds + + Updates: + self.sp3all [dict] + """ + + # Check the given sampling rate + if not isinstance(sample_rate,int): + logger.error(f"The given sample_rate must be an integer", + stack_info=True) + raise TypeError(f"The given sample_rate {sample_rate} is not of " + f"type integer") + + # Initialize the resampled sp3 dictionary + sp3all_rs = {} + + for acname in self.sp3all: + + # Copy the header items to the resampled sp3 dictionary + sp3all_rs[acname] = {} + sp3all_rs[acname]['header'] = {} + for key in self.sp3all[acname]['header'].keys(): + sp3all_rs[acname]['header'][key] = (self.sp3all[acname] + ['header'][key]) + sp3all_rs[acname]['data'] = {} + + # read the original sample rate + sample_rate_orig = self.sp3all[acname]['header']['epoch_int'] + epochs_orig = list(self.sp3all[acname]['data']['epochs']) + + # Check if we need to interpolate or to downsample + if sample_rate < sample_rate_orig: + + # if we know that center is unweighted, do not interpolate + if (hasattr(self,'weighted_centers') and + acname not in self.weighted_centers): + sp3all_rs[acname]['data'] = self.sp3all[acname]['data'] + epochs_rs = list(epochs_orig) + + else: + + # Interpolate + epoch_start = epochs_orig[0] + epoch_end = epochs_orig[-1] + epochs_interp = [] + for sat in self.sp3all[acname]['header']['sats']: + xcoords = {} + ycoords = {} + zcoords = {} + epochs = list(epochs_orig) + xcoords_interp = {} + ycoords_interp = {} + zcoords_interp = {} + + for epoch in epochs: + if ((sat,epoch,'xcoord') in + self.sp3all[acname]['data'] + and self.sp3all[acname]['data'] + [(sat,epoch,'xcoord')] != 0): + xcoords[epoch] = (self.sp3all[acname]['data'] + [(sat,epoch,'xcoord')]) + else: + xcoords[epoch] = np.nan + if ((sat,epoch,'ycoord') in + self.sp3all[acname]['data'] + and self.sp3all[acname]['data'] + [(sat,epoch,'ycoord')] != 0): + ycoords[epoch] = (self.sp3all[acname]['data'] + [(sat,epoch,'ycoord')]) + else: + ycoords[epoch] = np.nan + if ((sat,epoch,'zcoord') in + self.sp3all[acname]['data'] + and self.sp3all[acname]['data'] + [(sat,epoch,'zcoord')] != 0): + zcoords[epoch] = (self.sp3all[acname]['data'] + [(sat,epoch,'zcoord')]) + else: + zcoords[epoch] = np.nan + + xlist = [] + ylist = [] + zlist = [] + epochs.sort() + for epoch in epochs: + xlist.append(xcoords[epoch]) + ylist.append(ycoords[epoch]) + zlist.append(zcoords[epoch]) + + tsec = np.arange(0,11*sample_rate_orig,sample_rate_orig) + epochs_window = epochs[5:-5] + xwindows = extract_windows_vectorized( + np.array(xlist),9,len(xlist)-11,11) + ywindows = extract_windows_vectorized( + np.array(ylist),9,len(ylist)-11,11) + zwindows = extract_windows_vectorized( + np.array(zlist),9,len(zlist)-11,11) + xpoly = [] + ypoly = [] + zpoly = [] + for i in range(len(xwindows)): + xpoly.append([Data(tsec[k],xwindows[i][k]) + for k in range(len(tsec))]) + ypoly.append([Data(tsec[k],ywindows[i][k]) + for k in range(len(tsec))]) + zpoly.append([Data(tsec[k],zwindows[i][k]) + for k in range(len(tsec))]) + epochs_interp = [] + x_interp = [] + y_interp = [] + z_interp = [] + + # Interpolation for the first 4 epochs + for k in range(3,5): + for t in np.arange(0,sample_rate_orig,sample_rate): + epochs_interp.append( + epochs[k]+datetime.timedelta(seconds=t)) + x_interp.append(interpolate( + xpoly[0],k*sample_rate_orig+t,11)) + y_interp.append(interpolate( + ypoly[0],k*sample_rate_orig+t,11)) + z_interp.append(interpolate( + zpoly[0],k*sample_rate_orig+t,11)) + + for i in range(len(xpoly)): + for t in np.arange(0,sample_rate_orig,sample_rate): + epochs_interp.append( + epochs_window[i] + + datetime.timedelta(seconds=t)) + x_interp.append(interpolate( + xpoly[i],5*sample_rate_orig+t,11)) + y_interp.append(interpolate( + ypoly[i],5*sample_rate_orig+t,11)) + z_interp.append(interpolate( + zpoly[i],5*sample_rate_orig+t,11)) + + # Interpolation for the last 4 epochs (up to when there + # is original data; no extrapolation!) + for k in range(0,1): + for t in np.arange(0,sample_rate_orig,sample_rate): + epochs_interp.append( + epochs[len(epochs)-5+k] + + datetime.timedelta(seconds=t)) + x_interp.append(interpolate( + xpoly[-1],(k+6)*sample_rate_orig+t,11)) + y_interp.append(interpolate( + ypoly[-1],(k+6)*sample_rate_orig+t,11)) + z_interp.append(interpolate( + zpoly[-1],(k+6)*sample_rate_orig+t,11)) + + # First and last few epochs - no interpolation + epochs_interp.insert(0,epochs[2]) + x_interp.insert(0,xlist[2]) + y_interp.insert(0,ylist[2]) + z_interp.insert(0,zlist[2]) + epochs_interp.insert(0,epochs[1]) + x_interp.insert(0,xlist[1]) + y_interp.insert(0,ylist[1]) + z_interp.insert(0,zlist[1]) + epochs_interp.insert(0,epochs[0]) + x_interp.insert(0,xlist[0]) + y_interp.insert(0,ylist[0]) + z_interp.insert(0,zlist[0]) + epochs_interp.append(epochs[-4]) + x_interp.append(xlist[-4]) + y_interp.append(ylist[-4]) + z_interp.append(zlist[-4]) + epochs_interp.append(epochs[-3]) + x_interp.append(xlist[-3]) + y_interp.append(ylist[-3]) + z_interp.append(zlist[-3]) + epochs_interp.append(epochs[-2]) + x_interp.append(xlist[-2]) + y_interp.append(ylist[-2]) + z_interp.append(zlist[-2]) + epochs_interp.append(epochs[-1]) + x_interp.append(xlist[-1]) + y_interp.append(ylist[-1]) + z_interp.append(zlist[-1]) + + for i,epoch in enumerate(epochs_interp): + sp3all_rs[acname]['data'][(sat,epoch,'xcoord')] = ( + x_interp[i]) + sp3all_rs[acname]['data'][(sat,epoch,'ycoord')] = ( + y_interp[i]) + sp3all_rs[acname]['data'][(sat,epoch,'zcoord')] = ( + z_interp[i]) + + epochs_interp.sort() + + epochs_rs = list(epochs_interp) + + for sat in self.sp3all[acname]['header']['sats']: + for epoch in epochs_interp: + if ((sat,epoch,'xcoord') in + sp3all_rs[acname]['data']): + sp3all_rs[acname]['data'][(sat,epoch,'Pflag')] = 1 + else: + sp3all_rs[acname]['data'][(sat,epoch,'Pflag')] = 0 + sp3all_rs[acname]['data'][(sat,epoch,'EPflag')] = 0 + sp3all_rs[acname]['data'][(sat,epoch,'Vflag')] = 0 + sp3all_rs[acname]['data'][(sat,epoch,'EVflag')] = 0 + else: + + # Get the downsample rate + downsample_rate = sample_rate/sample_rate_orig + + # Check that downsampling rate is integer + if int(downsample_rate) != downsample_rate: + logger.error(f"The given sample_rate must be an integer " + f"multiple of all of the original sampling " + f"rates.\nsample_rate is {sample_rate} but " + f"original sample rate for {acname} is " + f"{sample_rate_orig}", stack_info=True) + raise ValueError(f"Downsampling is not possible because " + f"the sampling rate is not an integer " + f"multiple of the original sampling rate " + f"for {acname}") + + + # downsampled epochs + epochs_downsampled = epochs_orig[::int(downsample_rate)] + epochs_rs = list(epochs_downsampled) + + # Copy data only at the sampling rate + for key in self.sp3all[acname]['data'].keys(): + if (key != 'epochs' and key[1] in epochs_rs): + sp3all_rs[acname]['data'][key] = (self.sp3all[acname] + ['data'][key]) + + # Update the resmapled sp3 dictionary + sp3all_rs[acname]['data']['epochs'] = epochs_rs + + # Update header information + sp3all_rs[acname]['header']['start_year'] = (epochs_rs[0].year) + sp3all_rs[acname]['header']['start_month'] = (epochs_rs[0].month) + sp3all_rs[acname]['header']['start_day'] = (epochs_rs[0].day) + sp3all_rs[acname]['header']['start_hour'] = (epochs_rs[0].hour) + sp3all_rs[acname]['header']['start_min'] = (epochs_rs[0].minute) + sp3all_rs[acname]['header']['start_sec'] = (epochs_rs[0].second) + sp3all_rs[acname]['header']['num_epochs'] = len(epochs_rs) + sp3all_rs[acname]['header']['epoch_int'] = float(sample_rate) + if (hasattr(self,'weighted_centers') and + acname not in self.weighted_centers): + sp3all_rs[acname]['header']['epoch_int'] = float( + sample_rate_orig) + + # Update sp3all with the downsampled version + self.sp3all = sp3all_rs + self.sample_rate = sample_rate + + + def rm_dv(self,dv,no_rm_dv): + + """ + Remove maneuvered satellites from AC solutions + + Keyword arguments: + dv [array]: array of year,doy,hour,minute,prn for DV events + no_rm_dv [list]: list of AC's for which we want to keep maneuvered + satellites; this should normally be an AC that are + using some sort of modlling for maneuver events + Updates: + self.sp3all [dict] + """ + + # convert center names in no_rm_dv in case they are not + no_rm_dv = [item.upper() for item in no_rm_dv] + + # Initialize the DV-removed sp3 dictionary + sp3all_nodv = {} + + # Loop over centres + for acname in self.sp3all: + + if acname not in no_rm_dv: + + sp3all_nodv[acname] = {} + sp3all_nodv[acname]['data'] = {} + sp3all_nodv[acname]['header'] = {} + + # copy all epochs from the original dict to the new DV-removed dict + epochs = list(self.sp3all[acname]['data']['epochs']) + sp3all_nodv[acname]['data']['epochs'] = epochs + + # If there is a maneuvere for a satellite, remove any epochs + # after the maneuvere + sats = list(self.sp3all[acname]['header']['sats']) + sat_accuracy = list(self.sp3all[acname]['header']['sat_accuracy']) + numsats = int(self.sp3all[acname]['header']['numsats']) + remsat = {} + for row in dv: + if (row[0] >= min(epochs) and row[0] <= max(epochs) + and row[1] in sats): + logger.info(f"{row[1]} removed from {acname} solution " + f"from {row[0]} due to maneuver") + remsat[row[1]] = [] + for epoch in epochs: + if epoch >= row[0]: + remsat[row[1]].append(epoch) + + # if more than 50% of epochs are to be removed for this + # satellite, remove the satelite for the whole period + for sat in remsat: + if len(remsat[sat])/len(epochs) > 0.5: + remsat[sat] = epochs + ind = sats.index(sat) + del sats[ind] + del sat_accuracy[ind] + numsats -= 1 + logger.info(f"{sat} removed fully from {acname} solution " + f"because more than 50% of epochs were removed") + + # copy data except for maneuvering sats + for key in self.sp3all[acname]['data']: + if key != 'epochs': + sp3all_nodv[acname]['data'][key] = (self.sp3all[acname] + ['data'][key]) + if (key[0] in remsat and key[1] in remsat[key[0]]): + sp3all_nodv[acname]['data'][(key[0],key[1],'xcoord')] = 0.0 + sp3all_nodv[acname]['data'][(key[0],key[1],'ycoord')] = 0.0 + sp3all_nodv[acname]['data'][(key[0],key[1],'zcoord')] = 0.0 + sp3all_nodv[acname]['data'][(key[0],key[1],'clock')] = 999999.999999 + sp3all_nodv[acname]['data'][(key[0],key[1],'xsdev')] = ' ' + sp3all_nodv[acname]['data'][(key[0],key[1],'ysdev')] = ' ' + sp3all_nodv[acname]['data'][(key[0],key[1],'zsdev')] = ' ' + sp3all_nodv[acname]['data'][(key[0],key[1],'csdev')] = ' ' + + # Copy the header items to the DV-removed sp3 dictionary + for key in self.sp3all[acname]['header']: + if key not in ['sats','sat_accuracy','numsats']: + sp3all_nodv[acname]['header'][key] = (self.sp3all[acname] + ['header'][key]) + sp3all_nodv[acname]['header']['sats'] = sats + sp3all_nodv[acname]['header']['sat_accuracy'] = sat_accuracy + sp3all_nodv[acname]['header']['numsats'] = numsats + + else: + sp3all_nodv[acname] = self.sp3all[acname] + + self.sp3all = sp3all_nodv + + + def to_arrays(self): + + """ + Convert sp3all and sat_metadata attributes to orbits, epoch and + satinfo arrays, so they can be used by OrbitComb class + + Updates: + self.weighted_centers + [list] : list of weighted centers + self.unweighted_centers + [list] : list of unweighted centers + self.orbits [dict] : orbit arrays (x,y,z) of each AC (shape number + of observations by 3) + self.epochs [array] : epochs corresponding to rows of each orbits + array (same length as each of the arrays in + orbits) + self.satinfo [array]: full satellite information corresponding to + each orbits array (same length as epochs but + with four columns for constellation ID, PRN, + SVN and satellite block) + self.cenflags [dict]: center flags to indicate whether a center is + weighted or unweighted + self.clocks [dict]: clocks/clock sdev of each AC from the sp3 file + (shape number of observations by 2) + """ + + # If not previously filtered, and there is an ac_contribs attribute, + # filter sp3all to only include weighted and unweighted data, + # and update list of weighted and unweighted centers + if (not hasattr(self,'filtered') or + (hasattr(self,'filtered') and not self.filtered) ): + + if hasattr(self,'ac_contribs'): + + # filter sp3all attribute + self.filter_contribs() + + else: + + # assign weighted_centers to all centers, and unweighted centers + # to no center + self.weighted_centers = list(self.sp3all.keys()) + self.unweighted_centers = [] + + # Get list of all epochs and satellites across all the sp3 dicts from + # different weighted centers + epochs = [] + sats = [] + for acname in self.weighted_centers: + for epoch in self.sp3all[acname]['data']['epochs']: + if epoch not in epochs: + epochs.append(epoch) + for sat in self.weighted_sats[acname]: + if sat not in sats: + sats.append(sat) + epochs.sort() + sats.sort() + + # Initializations + orbits = {} + clocks = {} + all_epochs = [] + satinfo = np.zeros((len(epochs)*len(sats),4),dtype=object) + satinfo[:,0] = np.zeros(len(epochs)*len(sats),dtype=str) + satinfo[:,3] = np.zeros(len(epochs)*len(sats),dtype=str) + + # loop through centers, epochs and sats, and fill in orbits, + # all_epochs and satinfo + for ac_counter,acname in enumerate(self.sp3all.keys()): + + # Initialize orbits[acname]; missing values will be nan + orbits[acname] = np.full((len(epochs)*len(sats),3),np.nan) + clocks[acname] = np.full((len(epochs)*len(sats),2),np.nan) + + c = 0 + for epoch in epochs: + for sat in sats: + + # Check if there is data at this center/sat/epoch + if (sat,epoch,'xcoord') in self.sp3all[acname]['data']: + + xcoord = (self.sp3all[acname] + ['data'][(sat,epoch,'xcoord')]) + if xcoord != 0.0 and xcoord < 999999: # missing/bad values + orbits[acname][c,0] = 1000.0*xcoord + + if (sat,epoch,'ycoord') in self.sp3all[acname]['data']: + + ycoord = (self.sp3all[acname] + ['data'][(sat,epoch,'ycoord')]) + if ycoord != 0.0 and ycoord < 999999: # missing/bad values + orbits[acname][c,1] = 1000.0*ycoord + + if (sat,epoch,'zcoord') in self.sp3all[acname]['data']: + + zcoord = (self.sp3all[acname] + ['data'][(sat,epoch,'zcoord')]) + if zcoord != 0.0 and zcoord < 999999: # missing/bad values + orbits[acname][c,2] = 1000.0*zcoord + + if (sat,epoch,'clock') in self.sp3all[acname]['data']: + + clock = (self.sp3all[acname] + ['data'][(sat,epoch,'clock')]) + if clock != 0.0 and clock < 999999: # missing/bad values + clocks[acname][c,0] = clock + + if (sat,epoch,'csdev') in self.sp3all[acname]['data']: + + csdev = (self.sp3all[acname] + ['data'][(sat,epoch,'csdev')]) + try: + if float(csdev) != 0.0: # missing/bad values + clocks[acname][c,1] = float(csdev) + except: + pass + + # Only fill in self.epochs and satinfo once (as they are + # the same for all ACs) + if ac_counter == 0: + + all_epochs.append(epoch) + + # Constellation ID + satinfo[c,0] = sat[0] + + # PRN + satinfo[c,1] = int(sat[1:]) + + if hasattr(self,'sat_metadata'): + + # svn + satinfo[c,2] = (self.sat_metadata.get_svn(sat[0], + int(sat[1:]),epoch)) + + # satellite block + satinfo[c,3] = (self.sat_metadata. + get_sat_identifier(sat[0],satinfo[c,2]). + block) + + else: + + # If no metadata, set SVN as NaN, and block as + #'Unknown' + satinfo[c,2] = np.nan + satinfo[c,3] = 'Unknown' + c += 1 + + # Create center flags + cenflags = {} + for acname in orbits: + if acname in self.weighted_centers: + cenflags[acname] = 'weighted' + elif acname in self.unweighted_centers: + cenflags[acname] = 'unweighted' + else: + raise ValueError(f"center {acname} not in weighted_centers " + "nor in unweighted_centers") + + # Update attributes + self.orbits = orbits + self.clocks = clocks + self.epochs = all_epochs + self.satinfo = satinfo + self.cenflags = cenflags + + + +class OrbitComb: + + """ + class of orbit combination + + """ + + def __init__(self,orbits,epochs,satinfo,cenflags,weighted_cens_by_sys,unweighted_cens_by_sys,weighted_sats,unweighted_sats,clocks=None): + + """ + initialize OrbitComb class + + Input arguments: + orbits [dict] : dictionary containing all individual orbit arrays + which must be all the same size (Number of + observations by 3 for x,y,z) + epochs [array] : an array of the same length of each of + the arrays in orbits + satinfo [array]: an array of the same length as epochs but with 4 + columns for constellation ID, PRN, SVN and + satellite block + cenflags [dict]: center flags to indicate whether a center is + weighted or unweighted + clocks [dict] : dictionary containing all individual clocks and + their standard deviations dircetly from sp3 files + of the individual ACs (all the same size: Number + of observations by 2 for clock,csdev) + + Updates: + self.orbits [dict] + self.epochs [array] + self.satinfo [array] + self.cenflags [dict] + self.weighted_centers [list] + self.unweighted_centers [list] + self.cen_weights [dict] + self.sat_sigmas [dict] + self.sat_weights [dict] + self.exclude_highrms [list of tuples] + self.unweighted_max_high_satrms [list] + self.unweighted_high_tra [list of tuples] + self.exclude_lowcen [list] + self.clocks [dict] + + """ + + # Check the given orbits and assign to attribute + if not isinstance(orbits,dict): + logger.error("\nThe given orbits must be a dictionary\n", + stack_info=True) + raise TypeError("orbits is not of type dictionary!") + if not orbits: + logger.error("\nThere must be at least on dictionary item in " + "orbits\n", stack_info=True) + raise ValueError("orbits is empty!") + if not all(isinstance(key,str) for key in orbits.keys()): + logger.error("\nKeys of orbits dictionary must be strings\n", + stack_info=True) + raise TypeError("orbits keys must be strings!") + if not all(item.shape == list(orbits.values())[0].shape + for item in orbits.values()): + logger.error("\nArrays in orbits dictionary must be all of the " + "same shape\n", stack_info=True) + raise ValueError("Arrays in orbits dictionary are not of the same " + "shape!") + for item in orbits.values(): + checkutils.check_coords(item,minrows=1) + + self.orbits = orbits + + # Check the given epochs and assign to attribute + if len(epochs) != len(list(orbits.values())[0]): + logger.error(f"\nThe given epochs must be of length " + f"{len(list(orbits.values())[0])}\nLength of " + f"the given epochs is {len(epochs)}\n", + stack_info=True) + raise ValueError(f"The given epochs must be of length " + f"{len(list(orbits.values())[0])}") + if not all(isinstance(item,datetime.datetime) for item in epochs): + logger.error("\nThe given epoch can only contain " + "datetime.datetime objects\n", stack_info=True) + raise TypeError("There are non-datetime items in epochs") + + self.epochs = epochs + + # Check the given satinfo and assign to attribute + if np.shape(satinfo) != (list(orbits.values())[0].shape[0],4): + logger.error(f"\nThe given satinfo must be a " + f"{list(orbits.values())[0].shape[0]} by 4 " + f"array\nShape of the given satinfo: " + f"{np.shape(satinfo)}\n", stack_info=True) + raise ValueError(f"The given satinfo must be a " + f"{list(orbits.values())[0].shape[0]} by 4 " + f"array") + for row in satinfo: + if not isinstance(row[0],str): + logger.error(f"\nThe first column of satinfo " + f"(constellation ID) must be strings\n", + stack_info=True) + raise TypeError(f"The first column of satinfo " + f"(constellation ID) must be strings") + if not isinstance(row[1],int): + logger.error(f"\nThe second column of satinfo (PRN number) " + f"must be integers\n",stack_info=True) + raise TypeError(f"The second column of satinfo (PRN " + f"number) must be integers") + if not isinstance(row[2],int) and not np.isnan(row[2]): + logger.error(f"\nThe third column of satinfo (SVN number) " + f"must be integers or nans\n", stack_info=True) + raise TypeError(f"The third column of satinfo (SVN number " + f") must be integers or nans") + if not isinstance(row[3],str): + logger.error(f"\nThe fourth column of satinfo (satellite " + f"block) must be strings\n", stack_info=True) + raise TypeError(f"The fourth column of satinfo (satellite " + f"block) must be strings") + if any(np.isnan(svn) for svn in satinfo[:,2]): + logger.warning("\nThere are unknown SVN numbers in satinfo. Only " + "PRN information are used for identifying " + "satellites.\n") + if 'Unknown' in satinfo[:,3]: + logger.warning("\nThere are unknown satellite blocks in " + "satinfo.\n") + self.satinfo = satinfo + + # Check the given cenflags + if not isinstance(cenflags,dict): + logger.error("\nThe given cenflags must be a dictionary\n", + stack_info=True) + raise TypeError("cenflags is not of type dictionary!") + for acname in orbits: + if acname not in cenflags: + logger.error(f"\ncenter {acname} has not been assigned a " + "flag\n", stack_info=True) + raise ValueError(f"center {acname} present in orbits but " + "not in cenflags") + if cenflags[acname] not in ['weighted','unweighted']: + logger.error("\ncenflags items can only be either 'weighted' " + "or 'unweighted'\n") + raise ValueError(f"center flag for {acname}: " + f"{cenflags[acname]} not recognized!") + self.cenflags = cenflags + + self.weighted_cens_by_sys = weighted_cens_by_sys + self.unweighted_cens_by_sys = unweighted_cens_by_sys + self.weighted_sats = weighted_sats + self.unweighted_sats = unweighted_sats + + # Get a list of weighted and unweighted centers + weighted_centers = [] + unweighted_centers = [] + for acname in self.orbits: + if self.cenflags[acname] == 'weighted': + weighted_centers.append(acname) + elif self.cenflags[acname] == 'unweighted': + unweighted_centers.append(acname) + else: + raise ValueError(f"center flag {self.cenflags[acname]} for " + f"center {acname} not recognised!") + self.weighted_centers = weighted_centers + self.unweighted_centers = unweighted_centers + + # Initialize cen_weights + cen_weights = {} + for acname in orbits: + cen_weights[acname] = 1.0 + self.cen_weights = cen_weights + + # Initialize sat_sigmas and sat_weights + sat_sigmas = {} + sat_weights = {} + for row in satinfo: + system_id = row[0] + prn = row[1] + svn = row[2] + if (system_id,prn,svn) not in sat_sigmas: + sat_sigmas[system_id,prn,svn] = 1.0 + sat_weights[system_id,prn,svn] = 1.0 + self.sat_sigmas = sat_sigmas + self.sat_weights = sat_weights + + # Initialize exclusions, and satflags and orbflags by calling flags + self.exclude_highrms = [] + self.unweighted_max_high_satrms = [] + self.unweighted_high_tra = [] + self.exclude_lowcen = [] + self.flags() + + self.clocks = clocks + + def flags(self): + + """ + Create orbflags and satflags + + Updates: + self.orbflags [dict]: orbit data flags for each AC (same shape as + self.orbits) + self.satflags [dict]: overall satellite flags for each AC (dict + with keys as (ac,sat) + self.ngood [dict]: number of good (used) data for each satellite + for each ac (ac,sat) + """ + + # Get a list of all satellites + sats = [] + systems = [] + blocks = [] + sats_full = [] + for row in self.satinfo: + sat = (row[0],row[1],row[2]) + satfull = (row[0],row[1],row[2],row[3]) + sys = row[0] + blk = row[3] + if sat not in sats: + sats.append(sat) + if sys not in systems: + systems.append(sys) + if blk not in blocks: + blocks.append(blk) + if satfull not in sats_full: + sats_full.append(satfull) + sats.sort() + systems.sort() + blocks.sort() + sats_full.sort() + + logger.debug("sats %s", sats) + + # Initialize + orbflags = {} + satflags = {} + ngood = {} + + print("acnames : ", self.orbits.keys()) + # Loop through centers, and fill in orbflags and satflags + for acname in self.orbits: + + # Initialize dicts needed for inspection on satflags + missing_val = {} + missing_sat = {} + missing_sys = {} + missing_blk = {} + unweighted_sys = {} + for sat in sats: + missing_val[acname,sat] = False + missing_sat[acname,sat] = True + for sys in systems: + missing_sys[acname,sys] = True + for blk in blocks: + missing_blk[acname,blk] = True + + # Initialize orbflags[acname]; default to missing_val + orbflags[acname] = np.full_like(self.orbits[acname],'missing_val', + dtype=object) + + + # Loop through all data rows, and determine the flags + for r,row in enumerate(self.orbits[acname]): + + sat = (self.satinfo[r,0],self.satinfo[r,1], + self.satinfo[r,2]) + sys = self.satinfo[r,0] + blk = self.satinfo[r,3] + prn = sat[0]+str(sat[1]).zfill(2) + for c in range(0,3): + if not np.isnan(row[c]): + orbflags[acname][r,c] = 'okay' + missing_sat[acname,sat] = False + missing_sys[acname,sys] = False + missing_blk[acname,blk] = False + if (acname in self.unweighted_sats and prn in self.unweighted_sats[acname]): + orbflags[acname][r,c] = 'unweighted_sat' + if (acname in self.unweighted_cens_by_sys and sys in self.unweighted_cens_by_sys[acname]): + orbflags[acname][r,c] = 'unweighted_sys' + if (acname,sat) in self.exclude_highrms: + orbflags[acname][r,c] = 'excluded_sat' + if sat in self.exclude_lowcen: + orbflags[acname][r,c] = 'excluded_sat_all' + else: + missing_val[acname,sat] = True + + + # Fill in satflags from the inspections performed + for i,sat in enumerate(sats): + sys = sat[0] + blk = sats_full[i][3] + prn = sat[0]+str(sat[1]).zfill(2) + if sat in self.exclude_lowcen: + satflags[acname,sat] = 'excluded_sat_all' + elif missing_sys[acname,sys]: + satflags[acname,sat] = 'missing_sys' + elif missing_blk[acname,blk]: + satflags[acname,sat] = 'missing_blk' + elif missing_sat[acname,sat]: + satflags[acname,sat] = 'missing_sat' + elif (acname,sat) in self.exclude_highrms: + satflags[acname,sat] = 'excluded_sat' + elif (acname in self.unweighted_cens_by_sys + and sys in self.unweighted_cens_by_sys[acname]): + satflags[acname,sat] = 'unweighted_sys' + elif (acname in self.unweighted_sats + and prn in self.unweighted_sats[acname]): + satflags[acname,sat] = 'unweighted_sat' + elif missing_val[acname,sat]: + satflags[acname,sat] = 'missing_val' + else: + satflags[acname,sat] = 'okay' + + # Revise the orbflags so if there is a missing sat, change + # missing_val flags to missing_sat flags for that satellite + for r,row in enumerate(self.orbits[acname]): + sat = (self.satinfo[r,0],self.satinfo[r,1],self.satinfo[r,2]) + if satflags[acname,sat] == 'missing_sys': + for c in range(0,3): + orbflags[acname][r,c] = 'missing_sys' + if satflags[acname,sat] == 'missing_blk': + for c in range(0,3): + orbflags[acname][r,c] = 'missing_blk' + if satflags[acname,sat] == 'missing_sat': + for c in range(0,3): + orbflags[acname][r,c] = 'missing_sat' + + # Revise orbflags and satflags to add missing_val_other, + # missing_sat_other and excluded_sat_other flags + for acname in orbflags: + for r,row in enumerate(orbflags[acname]): + sat = (self.satinfo[r,0],self.satinfo[r,1],self.satinfo[r,2]) + if (acname,sat) not in ngood: + ngood[acname,sat] = 0 + for c in range(0,3): + if orbflags[acname][r,c] == ['okay']: + ac_others = list(self.weighted_centers) + if acname in ac_others: + ac_others.remove(acname) + other_flags = [] + for ac_other in ac_others: + other_flags.append(orbflags[ac_other][r,c]) + if 'missing_sat' in other_flags: + worst_flag = 'missing_sat' + elif 'missing_blk' in other_flags: + worst_flag = 'missing_blk' + elif 'missing_sys' in other_flags: + worst_flag = 'missing_sys' + elif 'excluded_sat' in other_flags: + worst_flag = 'excluded_sat' + elif 'missing_val' in other_flags: + worst_flag = 'missing_val' + else: + worst_flag = 'okay' + if worst_flag != 'okay': + orbflags[acname][r,c] = worst_flag + '_other' + if (orbflags[acname][r,c] not in + ['missing_val','excluded_sat','missing_sat', + 'missing_blk','missing_sys','excluded_sat_all']): + ngood[acname,sat] += 1 + + for acname in orbflags: + for sat in sats: + if satflags[acname,sat] in ['okay','missing_val']: + ac_others = list(self.weighted_centers) + if acname in ac_others: + ac_others.remove(acname) + other_flags = [] + for ac_other in ac_others: + other_flags.append(satflags[ac_other,sat]) + if 'missing_sat' in other_flags: + worst_flag = 'missing_sat' + elif 'missing_blk' in other_flags: + worst_flag = 'missing_blk' + elif 'missing_sys' in other_flags: + worst_flag = 'missing_sys' + elif 'excluded_sat' in other_flags: + worst_flag = 'excluded_sat' + elif 'missing_val' in other_flags: + worst_flag = 'missing_val' + else: + worst_flag = 'okay' + if satflags[acname,sat] == 'okay': + if worst_flag != 'okay': + satflags[acname,sat] = worst_flag + '_other' + elif satflags[acname,sat] == 'missing_val': + if worst_flag not in ['okay','missing_val']: + satflags[acname,sat] = worst_flag + '_other' + + # Update attributes + self.orbflags = orbflags + self.satflags = satflags + self.ngood = ngood + + + def transform(self,transformations): + + """ + Transform the orbits using the given transformation parameters + + Keyword arguments: + transformations [dict] : transformation parameters for each ac + + Updates: + self.orbits [dict] + + """ + + orbits_transformed = {} + for acname in self.orbits: + + if acname[0:3] in transformations: + + helm = Helmert(helmert=transformations[acname[0:3]], + coords0=self.orbits[acname]) + helm.transform() + orbits_transformed[acname] = helm.coords1 + else: + orbits_transformed[acname] =self.orbits[acname] + + self.orbits = orbits_transformed + + + def weight(self,cen_wht_method='global',sat_wht_method='RMS_L1', + l2_dx_threshold=1e-8,l2_maxiter=1,bracket_sigscale=3, + bracket_interval=None,bracket_maxiter=100, + bisection_precision_level=None,bisection_sigscale=0.1, + bisection_precision_limits=[1e-15,1e-13], + bisection_maxiter=100): + + """ + Perform the orbit weighting + + Input arguments: + cen_wht_method [str], optional : method for weighting + the centres + options: + 'global', default : weight each centre + based on the whole + constellations being + combined + 'by_constellation': weight each centre + by constellation + 'by_block' : weight each centre by + satellite block + 'by_sat' : weight each centre by + satellite + sat_wht_method [str], optional : method for weighting + the satellites + options: + 'RMS_L1', default : satellite weights + from sat-specific RMS + of a L1-norm solution + of helmert + transformation between + each orbit and the + mean orbit, averaged + over the centres + Other optional arguments (refer to the documentations of the + relevant Helmert class methods for full + descriptions): + l2_dx_threshold, l2_maxiter : dx_threshold and + l2_maxiter options + passed to + Helmert.l2norm + bracket_interval, + bracket_sigscale, bracket_maxiter : interval, sigscale + and maxiter options + passed to + Helmert.bracket + bisection_precision_level, + bisection_sigscale, + bisection_precision_limits, + bisection_maxiter : precision_level, + sigscale, + precision_limits, and + maxiter options + passed to + Helmert.bisection + + Updates: + self.cen_wht_method [str] : center weighting + method + self.sat_wht_method [str] : satellite weighting + method + self.cen_weights [dict] : centre weights + self.sat_sigmas [dict] : satellite-specific + sigmas + self.sat_weights [dict] : satellite weights + + """ + + # Check the given cen_wht_method and sat_wht_method + if cen_wht_method not in (['global','by_constellation','by_block', + 'by_sat']): + logger.error(f"\nCentre weighting method {cen_wht_method} not " + f"recognized!\n", stack_info=True) + raise ValueError(f"Centre weighting method {cen_wht_method} not " + f"recognized!") + + if cen_wht_method not in ['global','by_constellation','by_block', + 'by_sat']: + logger.warning(f"Center weighting method {cen_wht_method} not " + f"implemented yet! Using the default global " + f"method.") + cen_wht_method = 'global' + + if sat_wht_method not in (['RMS_L1']): + logger.error(f"\nSatellite weighting method {sat_wht_method} not " + f"recognized!\n", stack_info=True) + raise ValueError(f"Satellite weighting method {sat_wht_method} " + f"not recognized!") + + if sat_wht_method != 'RMS_L1': + logger.warning(f"Satellite weighting method {sat_wht_method} not " + f"implemented yet! Using the default L1_norm method.") + sat_wht_method = 'L1_norm' + + # Take a simple mean of the orbits from weighted centres + orbits_tuple = () + masks_tuple = () + logger.debug(f"weighted: {self.weighted_centers}") + logger.debug(f"unweighted: {self.unweighted_centers}") + for acname in self.weighted_centers: + + # Exclude if sat/epoch data is missing/excluded for this center + okay_rows = np.where( + (self.orbflags[acname]!='missing_val').all(axis=1) & + (self.orbflags[acname]!='unweighted_sys').all(axis=1) & + (self.orbflags[acname]!='unweighted_sat').all(axis=1) & + (self.orbflags[acname]!='excluded_sat').all(axis=1) & + (self.orbflags[acname]!='excluded_sat_all').all(axis=1) & + (self.orbflags[acname]!='missing_sys').all(axis=1) & + (self.orbflags[acname]!='missing_blk').all(axis=1) & + (self.orbflags[acname]!='missing_sat').all(axis=1) )[0] + mask = np.full_like(self.orbflags[acname],True,dtype=bool) + mask[okay_rows,:] = False + orbits_tuple = orbits_tuple + (self.orbits[acname],) + masks_tuple = masks_tuple + (mask,) + orbits_masked = np.ma.masked_array(orbits_tuple,masks_tuple) + orbitmean = np.ma.average(orbits_masked,axis=0) + orbitmean = orbitmean.filled(np.nan) + logger.debug(f"orbits_masked") + logger.debug(f"orbits_masked: {orbits_masked} {np.shape(orbits_masked)}") + logger.debug(f"orbitmean: {orbitmean} {np.shape(orbitmean)}") + logger.debug(f"orbitmean first epoch: {orbitmean[0:26,:]}") + + # Special case of only one weighted orbit (used for comparisons) + if len(self.weighted_centers) == 1: + self.cen_wht_method = 'global' + cen_weights = {} + for acname in self.weighted_centers: + cen_weights[acname] = 1.0 + self.cen_weights = cen_weights + + else: + + # Loop through centres, estimate helmert parameters, and calculate weights + cen_weights = {} + satsig2 = {} # satellite-specific sigma^2s for each AC + for acname in self.orbits: + + # weighted_center flag + if acname in self.weighted_centers: + weighted_center = True + elif acname in self.unweighted_centers: + weighted_center = False + else: + raise ValueError(f"center {acname} not in weighted_centers" + " nor in unweighted_centers") + + logger.debug(f"orbits {acname} {self.orbits[acname]} " + f"{np.shape(self.orbits[acname])}") + logger.debug(f"orbflags {acname} {self.orbflags[acname]}") + + # Exclude if sat/epoch data is missing for this center + okay_rows = np.where( + (self.orbflags[acname]!='missing_val').all(axis=1) & + (self.orbflags[acname]!='missing_sys').all(axis=1) & + (self.orbflags[acname]!='missing_blk').all(axis=1) & + (self.orbflags[acname]!='missing_sat').all(axis=1) )[0] + mask = np.full_like(self.orbflags[acname],True,dtype=bool) + mask[okay_rows,:] = False + orbit_masked = np.ma.masked_array(self.orbits[acname],mask) + + logger.debug(f"orbit_masked {acname}: {orbit_masked}") + + # Create an instance of Helmert class between the centre orbit + # and the mean orbit + helm = Helmert(coords0=orbit_masked,coords1=orbitmean, + satinfo=self.satinfo,orbflags=self.orbflags[acname], + weighted_center=weighted_center,acname=acname) + + # Perform L2 norm (to get the a priori helmert parameters) + helm.l2norm(dx_threshold=l2_dx_threshold,maxiter=l2_maxiter) + + # Perform L1 norm solution + helm.bracket(interval=bracket_interval, + sigscale=bracket_sigscale,maxiter=bracket_maxiter) + helm.bisection(precision_level=bisection_precision_level, + sigscale=bisection_sigscale, + precision_limits=bisection_precision_limits, + maxiter=bisection_maxiter) + + # Calculate center weight + if acname in self.weighted_centers: + if cen_wht_method == 'global': + logger.debug(f"abdev {acname}: {helm.abdev}") + cen_weights[acname] = 1/helm.abdev_wht**2 + logger.debug(f"cen_weight {acname}: " + f"{cen_weights[acname]}") + elif cen_wht_method == 'by_constellation': + for sys_id in helm.sys_abdev.keys(): + if sys_id in self.weighted_cens_by_sys[acname]: + cen_weights[acname,sys_id] = ( + 1/helm.sys_abdev[sys_id]**2) + elif cen_wht_method == 'by_block': + for block in helm.blk_abdev.keys(): + cen_weights[acname,block] = (1/ + helm.blk_abdev[block]**2) + elif cen_wht_method == 'by_sat': + for (sys_id,prn,svn) in helm.sat_abdev.keys(): + sat = (sys_id,prn,svn) + if (self.satflags[acname,sat] != 'excluded_sat' + and self.satflags[acname,sat] != 'unweighted_sys' + and self.satflags[acname,sat] != 'unweighted_sat'): + cen_weights[acname,sys_id,prn,svn] = ( + 1/helm.sat_abdev[sys_id,prn,svn]**2) + + # Caclulate satellite-specific sigma^2's + logger.debug(f"sat_rms {acname}: {helm.sat_rms}") + for (system_id,prn,svn) in helm.sat_rms: + + # Only include data if the center is weighted, and the + # satellite is not missing/excluded from any weighted center + sat = (system_id,prn,svn) + logger.debug(f"acname, sat: {acname} {sat}") + logger.debug(f"satflags: {self.satflags[acname,sat]}") + logger.debug(f"weihgted_centers: {self.weighted_centers}") + if (acname in self.weighted_centers + and self.satflags[acname,sat] in + ['okay','missing_val_other','missing_val']): + if sat_wht_method == 'RMS_L1': + if (system_id,prn,svn) not in satsig2: + satsig2[system_id,prn,svn] = {} + satsig2[system_id,prn,svn][acname] = ( + helm.sat_rms[system_id,prn,svn]**2) + else: + satsig2[system_id,prn,svn][acname] = ( + helm.sat_rms[system_id,prn,svn]**2) + + # End of centers loop + + logger.debug(f"satsig2: {satsig2}") + # Take the average of satellite sigmas over all the weighted centres + sat_sigmas = {} + sat_weights = {} + for (system_id,prn,svn) in satsig2.keys(): + sat_sigmas[system_id,prn,svn] = np.sqrt( + np.mean(list(satsig2[system_id,prn,svn].values()))) + + # the legacy software way of doing this + if old_version is True: + sat_sigmas[system_id,prn,svn] = np.sqrt( + np.mean(list(satsig2[system_id,prn,svn].values())) + + 1/len(list(satsig2[system_id,prn,svn].values()))) + + # Satellite weights (for statistics only) + sat_weights[system_id,prn,svn] = ( + 1/sat_sigmas[system_id,prn,svn]**2) + + # Update attributes + self.cen_weights = cen_weights + self.sat_sigmas = sat_sigmas + self.sat_weights = sat_weights + self.cen_wht_method = cen_wht_method + + self.sat_wht_method = sat_wht_method + + logger.debug(f"cen_weights: {self.cen_weights}") + logger.debug(f"sat_sigmas: {self.sat_sigmas}") + logger.debug(f"sat_weights: {self.sat_weights}") + + + def combine(self,l2_dx_threshold=1e-8,l2_maxiter=1,bracket_sigscale=3, + bracket_interval=None,bracket_maxiter=100, + bisection_precision_level=None,bisection_sigscale=0.1, + bisection_precision_limits=[1e-15,1e-13], + bisection_maxiter=100): + + """ + Perform the orbit weighting and combination + + Optional arguments (refer to the documentations of the relevant + Helmert class methods for full descriptions): + l2_dx_threshold, l2_maxiter : dx_threshold and l2_maxiter + options passed to Helmert.l2norm + bracket_interval, + bracket_sigscale, bracket_maxiter: interval, sigscale and maxiter + options passed to + Helmert.bracket + bisection_precision_level, + bisection_sigscale, + bisection_precision_limits, + bisection_maxiter : precision_level, sigscale, + precision_limits, and maxiter + options passed to + Helmert.bisection + + Updates: + self.combined_orbit [array] : combined orbit + self.cen_rms [dict] : RMS of centers + self.cen_abdev [dict] : absolute deviation of centres + self.sat_rms [dict] : overal sat-specific rms's + self.sat_abdev [dict] : overal sat-specific abdev's + + """ + + # Take a weighted mean of the orbits from weighted centres + orbits_tuple = () + masks_tuple = () + for acname in self.weighted_centers: + + # Exclude if sat/epoch data is missing/excluded/unweighted for this center + okay_rows = np.where( + (self.orbflags[acname]!='missing_val').all(axis=1) & + (self.orbflags[acname]!='unweighted_sys').all(axis=1) & + (self.orbflags[acname]!='unweighted_sat').all(axis=1) & + (self.orbflags[acname]!='excluded_sat').all(axis=1) & + (self.orbflags[acname]!='excluded_sat_all').all(axis=1) & + (self.orbflags[acname]!='missing_sys').all(axis=1) & + (self.orbflags[acname]!='missing_blk').all(axis=1) & + (self.orbflags[acname]!='missing_sat').all(axis=1) )[0] + mask = np.full_like(self.orbflags[acname],True,dtype=bool) + mask[okay_rows,:] = False + orbits_tuple = orbits_tuple + (self.orbits[acname],) + masks_tuple = masks_tuple + (mask,) + orbits_masked = np.ma.masked_array(orbits_tuple,masks_tuple) + logger.debug(f"orbits_masked: {orbits_masked}") + + if self.cen_wht_method == 'global': + + wcen = [] + for acname in self.weighted_centers: + wcen.append(self.cen_weights[acname]) + orbitmean = np.ma.average(orbits_masked,axis=0,weights=wcen) + + elif self.cen_wht_method == 'by_constellation': + + m = np.shape(orbits_masked)[1] + orbitmean = np.ma.array(np.full((m,3),np.nan)) + for c,row in enumerate(self.satinfo): + wcen = [] + sys_id = row[0] + for acname in self.weighted_centers: + if (acname,sys_id) in self.cen_weights.keys(): + wcen.append(self.cen_weights[acname,sys_id]) + else: + wcen.append(0) + orbitmean[c] = np.ma.average( + orbits_masked[:,c],axis=0,weights=wcen) + + elif self.cen_wht_method == 'by_block': + + m = np.shape(orbits_masked)[1] + orbitmean = np.ma.array(np.full((m,3),np.nan)) + for c,row in enumerate(self.satinfo): + wcen = [] + block = row[3] + for acname in self.weighted_centers: + if (acname,block) in self.cen_weights.keys(): + wcen.append(self.cen_weights[acname,block]) + else: + wcen.append(0) + orbitmean[c] = np.ma.average( + orbits_masked[:,c],axis=0,weights=wcen) + + elif self.cen_wht_method == 'by_sat': + + m = np.shape(orbits_masked)[1] + orbitmean = np.ma.array(np.full((m,3),np.nan)) + for c,row in enumerate(self.satinfo): + wcen = [] + sys_id = row[0] + prn = row[1] + svn = row[2] + for acname in self.weighted_centers: + if (acname,sys_id,prn,svn) in self.cen_weights.keys(): + wcen.append(self.cen_weights[acname,sys_id,prn,svn]) + else: + wcen.append(0) + orbitmean[c] = np.ma.average( + orbits_masked[:,c],axis=0,weights=wcen) + + orbitmean = orbitmean.filled(np.nan) + logger.debug(f"orbitmean: {orbitmean}") + + + + # Initialize transformed orbits + transformed_orbits = {} + transform_params = {} + + # Initialize center rms and abdev + cen_rms = {} + cen_rms_wht = {} + cen_abdev = {} + cen_abdev_wht = {} + + # Loop through centres and estimate helmert parameters + for acname in self.orbits: + + # weighted_center flag + if acname in self.weighted_centers: + weighted_center = True + elif acname in self.unweighted_centers: + weighted_center = False + else: + raise ValueError(f"center {acname} not in weighted_centers nor" + "in unweighted_centers") + + # using satellite sigmas, create sigmas for weighted least-squares + sigmas = np.ones_like(self.orbits[acname]) + logger.debug(f"sat_sigmas: {self.sat_sigmas}") + for r in range(len(self.orbits[acname])): + system_id = self.satinfo[r,0] + prn = self.satinfo[r,1] + svn = self.satinfo[r,2] + if (system_id,prn,svn) in self.sat_sigmas: + sigmas[r,0] = self.sat_sigmas[system_id,prn,svn] + sigmas[r,1] = self.sat_sigmas[system_id,prn,svn] + sigmas[r,2] = self.sat_sigmas[system_id,prn,svn] + else: + logger.debug(f"({system_id},{prn},{svn}) not in " + "sat_sigmas. The satellite is likely " + "missing from at least one center. Setting" + " the sigma as 1.") + + # Exclude if sat/epoch data is missing for this center + okay_rows = np.where( + (self.orbflags[acname]!='missing_val').all(axis=1) & + (self.orbflags[acname]!='missing_sys').all(axis=1) & + (self.orbflags[acname]!='missing_blk').all(axis=1) & + (self.orbflags[acname]!='missing_sat').all(axis=1) & + (self.orbflags[acname]!='excluded_sat_all').all(axis=1))[0] + mask = np.full_like(self.orbflags[acname],True,dtype=bool) + mask[okay_rows,:] = False + orbit_masked = np.ma.masked_array(self.orbits[acname],mask) + + # Create an instance of Helmert class between the centre orbit and + # the mean orbit + helm = Helmert(coords0=orbit_masked,coords1=orbitmean, + sigmas1=sigmas,satinfo=self.satinfo, + orbflags=self.orbflags[acname], + weighted_center=weighted_center) + + # Perform L2 norm (to get the a priori helmert parameters) + helm.l2norm(dx_threshold=l2_dx_threshold,maxiter=l2_maxiter) + + # Reset sigmas (and hence satellite weights) to one + helm.sigmas1 = np.ones_like(sigmas) + helm.sigmas1_flt = np.ones_like(helm.sigmas1_flt) + + # Perform L1 norm solution + + # Special case of only one weighted center; avoid unnecessary + # iterations for the weighted center + if (len(self.weighted_centers) == 1 and weighted_center): + helm.bracket(interval=bracket_interval, + sigscale=bracket_sigscale,maxiter=1) + helm.bisection(precision_level=bisection_precision_level, + sigscale=bisection_sigscale, + precision_limits=bisection_precision_limits, + maxiter=1) + else: + + helm.bracket(interval=bracket_interval, + sigscale=bracket_sigscale,maxiter=bracket_maxiter) + helm.bisection(precision_level=bisection_precision_level, + sigscale=bisection_sigscale, + precision_limits=bisection_precision_limits, + maxiter=bisection_maxiter) + + # Store rms and abdev statistics + cen_rms[acname] = helm.rms + cen_abdev[acname] = helm.abdev + cen_abdev_wht[acname] = helm.abdev_wht + for key in helm.sys_rms: + cen_rms[acname,key] = helm.sys_rms[key] + cen_abdev[acname,key] = helm.sys_abdev[key] + for key in helm.blk_rms: + cen_rms[acname,key] = helm.blk_rms[key] + cen_abdev[acname,key] = helm.blk_abdev[key] + for key in helm.sat_rms: + cen_rms[acname,key] = helm.sat_rms[key] + cen_abdev[acname,key] = helm.sat_abdev[key] + + # Perform forward transform for the center + helm.transform() + transformed_orbits[acname] = helm.coords1 + + # Get the helmert parameters; + # convert rotations from radians into arc seconds + # convert scale to part-per-million deviation from 1 + tr_pars = np.zeros(7) + for i in range(0,3): + tr_pars[i] = helm.helmert[i] + for i in range(3,6): + tr_pars[i] = helm.helmert[i]*180.0*3600.0/np.pi + tr_pars[6] = (helm.helmert[6]-1.0)*1e6 + transform_params[acname] = tr_pars + logger.debug(f"transformed_orbits[{acname}]: " + f"{transformed_orbits[acname]}") + logger.debug(f"helmert parameters {acname}: {helm.helmert}") + + # End of centers loop + + # Calculate satellite-specific rms over all the centers + # This will be the weighted mean of the satellite-specific rms over + # the centers with weights being 1/cen_abdev**2 + + # Create a list of all the satellites + sats = [] + for key in cen_rms: + if not isinstance(key,str): + if isinstance(key[1],tuple): + if key[1] not in sats: + sats.append(key[1]) + + # Loop over satellites and calculate sat-specific rms (sat_rms) + # and satellite accuracy exponent codes (sat_accuracy) + logger.debug(f"center {acname}") + logger.debug(f"cen_rms: {cen_rms}") + logger.debug(f"cen_abdev: {cen_abdev}") + sat_rms = {} + sat_accuracy = {} + rms_all_sats = [] + rms_sats_by_sys = {} + for sat in sats: + wcen = [] + rmscen = [] + sys = sat[0] + ng = 0 + if sys not in rms_sats_by_sys: + rms_sats_by_sys[sys] = [] + for acname in self.weighted_centers: + if ( (acname,sat) in cen_rms + and self.satflags[acname,sat] != 'excluded_sat' + and self.satflags[acname,sat] != 'unweighted_sys' + and self.satflags[acname,sat] != 'unweighted_sat'): + if self.cen_wht_method == 'global': + wcen.append(1/cen_abdev_wht[acname]**2) + elif self.cen_wht_method == 'by_constellation': + wcen.append(1/cen_abdev[acname,sys]**2) + elif self.cen_wht_method == 'by_sat': + wcen.append(1/cen_abdev[acname,sat]**2) + rmscen.append(cen_rms[acname,sat]**2) + ng += self.ngood[acname,sat] + + # For outlier removal, we now have missing_sys and missing_sys_other + # so if a constellation is fully missing, the satellites will get a + # total rms + if ( (acname,sat) in cen_rms + and self.satflags[acname,sat] not in + ['excluded_sat','missing_sat_other','missing_blk_other', + 'unweighted_sys','unweighted_sat']): + rms_all_sats.append(cen_rms[acname,sat]**2) + rms_sats_by_sys[sys].append(cen_rms[acname,sat]**2) + + # check if the satellite data comes from at least two centers + # with at least 90 percent overlap of x,y,z data; + # if not, set the satellite accuracy code 0 as unknown + if (len(rmscen) <= 1 or ng < 0.9*2*3*len(set(self.epochs))): + sat_rms[sat] = 0.0 + sat_accuracy[sat] = 0 + else: + sat_rms[sat] = np.sqrt( + np.ma.average(rmscen,weights=wcen)/(len(rmscen)-1)) + sat_accuracy[sat] = round(np.log2(1000.0*sat_rms[sat])) + + sat_rms_mean = np.sqrt(np.mean(rms_all_sats)) + sat_rms_sys = {} + for sys in rms_sats_by_sys: + sat_rms_sys[sys] = np.sqrt(np.mean(rms_sats_by_sys[sys])) + logger.debug(f"rms_sats_by_sys: {rms_sats_by_sys}") + logger.debug(f"sat_rms: {sat_rms}") + logger.debug(f"sat_rms_mean: {len(rms_all_sats)} {sat_rms_mean}") + logger.debug(f"sat_rms_sys: {sat_rms_sys}") + + # Loop over satellites and calculate sat-specific abdev + sat_abdev = {} + for sat in sats: + abdevcen = [] + for acname in self.weighted_centers: + if ( (acname,sat) in cen_abdev + and self.satflags[acname,sat] != 'excluded_sat' + and self.satflags[acname,sat] != 'unweighted_sys' + and self.satflags[acname,sat] != 'unweighted_sat'): + abdevcen.append(cen_abdev[acname,sat]) + if len(abdevcen) <= 1: + sat_abdev[sat] = 0.0 + else: + sat_abdev[sat] = np.ma.average(abdevcen) + logger.debug(f"sat_abdev: {sat_abdev}") + + orbits_tuple = () + masks_tuple = () + for acname in self.weighted_centers: + + # Exclude if sat/epoch data is missing/excluded/unweighted for this center + okay_rows = np.where( + (self.orbflags[acname]!='missing_val').all(axis=1) & + (self.orbflags[acname]!='unweighted_sys').all(axis=1) & + (self.orbflags[acname]!='unweighted_sat').all(axis=1) & + (self.orbflags[acname]!='excluded_sat').all(axis=1) & + (self.orbflags[acname]!='missing_sys').all(axis=1) & + (self.orbflags[acname]!='missing_blk').all(axis=1) & + (self.orbflags[acname]!='missing_sat').all(axis=1) )[0] + logger.debug(f"okay_rows {acname} {np.shape(okay_rows)}") + mask = np.full_like(self.orbflags[acname],True,dtype=bool) + mask[okay_rows,:] = False + orbits_tuple = orbits_tuple + (transformed_orbits[acname],) + masks_tuple = masks_tuple + (mask,) + orbits_masked = np.ma.masked_array(orbits_tuple,masks_tuple) + + if self.cen_wht_method == 'global': + + wcen = [] + for acname in self.weighted_centers: + wcen.append(self.cen_weights[acname]) + orbitmean = np.ma.average(orbits_masked,axis=0,weights=wcen) + m = np.shape(orbits_masked)[1] + sdev = np.ma.array(np.full((m,3),np.nan)) + for c,row in enumerate(self.satinfo): + n = (orbits_masked[:,c].count())/3 + if n==1: + for j in range(0,3): + sdev[c,j] = 0 + else: + sdev[c] = np.sqrt( + np.ma.average((orbits_masked[:,c]-orbitmean[c])**2, + axis=0,weights=wcen)/(n-1)) + + elif self.cen_wht_method == 'by_constellation': + + m = np.shape(orbits_masked)[1] + orbitmean = np.ma.array(np.full((m,3),np.nan)) + sdev = np.ma.array(np.full((m,3),np.nan)) + for c,row in enumerate(self.satinfo): + wcen = [] + sys_id = row[0] + n = 0 + for acname in self.weighted_centers: + if (acname,sys_id) in self.cen_weights.keys(): + wcen.append(self.cen_weights[acname,sys_id]) + n += 1 + else: + wcen.append(0) + orbitmean[c] = np.ma.average( + orbits_masked[:,c],axis=0,weights=wcen) + if n==1: + for j in range(0,3): + sdev[c,j] = 0 + else: + sdev[c] = np.sqrt( + np.ma.average((orbits_masked[:,c]-orbitmean[c])**2, + axis=0,weights=wcen)/(n-1)) + + elif self.cen_wht_method == 'by_block': + + m = np.shape(orbits_masked)[1] + orbitmean = np.ma.array(np.full((m,3),np.nan)) + for c,row in enumerate(self.satinfo): + wcen = [] + block = row[3] + for acname in self.weighted_centers: + if (acname,block) in self.cen_weights.keys(): + wcen.append(self.cen_weights[acname,block]) + else: + wcen.append(0) + orbitmean[c] = np.ma.average( + orbits_masked[:,c],axis=0,weights=wcen) + + elif self.cen_wht_method == 'by_sat': + + m = np.shape(orbits_masked)[1] + orbitmean = np.ma.array(np.full((m,3),np.nan)) + sdev = np.ma.array(np.full((m,3),np.nan)) + for c,row in enumerate(self.satinfo): + wcen = [] + sys_id = row[0] + prn = row[1] + svn = row[2] + n =0 + for acname in self.weighted_centers: + if (acname,sys_id,prn,svn) in self.cen_weights.keys(): + wcen.append(self.cen_weights[acname,sys_id,prn,svn]) + n += 1 + else: + wcen.append(0) + orbitmean[c] = np.ma.average( + orbits_masked[:,c],axis=0,weights=wcen) + if n==1: + for j in range(0,3): + sdev[c,j] = 0 + else: + sdev[c] = np.sqrt( + np.ma.average((orbits_masked[:,c]-orbitmean[c])**2, + axis=0,weights=wcen)/(n-1)) + + orbitmean = orbitmean.filled(np.nan) + sdev = sdev.filled(np.nan) + + # Update attributes + self.combined_orbit = orbitmean + self.sdev = sdev + self.cen_rms = cen_rms + self.cen_abdev = cen_abdev + self.cen_abdev_wht = cen_abdev_wht + self.sat_rms = sat_rms + self.sat_accuracy = sat_accuracy + self.sat_abdev = sat_abdev + self.sat_rms_mean = sat_rms_mean + self.sat_rms_sys = sat_rms_sys + self.transform_params = transform_params + + + def assess(self,sat_rms_tst=None,sat_rms_tst_unweighted=None,coef_sat=470.0, + thresh_sat=None,max_high_satrms=None, + trn_tst=None,thresh_trn=[None,None,None], + numcen_tst=None,min_numcen=None): + """ + Assess the quality of combination results against the given thresholds + + Keyword arguments: + sat_rms_tst [str] : option for testing of satellite rms + options: 'auto' : thresholds set by multiplying a + coefficient by the mean rms over each + constellation + 'manual' : thresholds set by user + 'strict' : thresholds will be lower of user- + defined and automatic + None : No satellite rms test + sat_rms_tst_unweighted [str] : similar to sat_rms_tst but for + unweighted centers + coef_sat [float] : coefficient for sat rms auto approach + thresh_sat [dict] : threshold for satellite rms (cm) for each + satellite system for manual approach + max_high_satrms [int] : maximum number of high-rms satellites for + a center + + trn_tst [str] : option for testing of transformation parameters + options: 'auto' : thresholds automatically set + 'manual' : thresholds set by user + 'strict' : thresholds will be lower of user- + defined and automatic + None : No transformation test + thresh_trn [list] : transformation thresholds for translation (mm), + rotation (mas) and scale (ppb) + + numcen_tst [str] : option for testing of minimum number of centers + for each satellite + options: 'strict' : minimum number set by user + 'eased' : minimum number set by user but may + be eased depending on the number of + centers + None : No test for minimum number of centers + min_numcen [int] : minimum number of centers for each satellite + + Updates: + self.rejection [bool] : whether rejections are found or not + (True/False) + self.exclude_highrms [list of tuples] : list of cen/sat exclusions + due to high rms + self.unweighted_max_high_satrms [list] : list of centers unweighted + due to high number of sats + excluded because of high rms + self.unweighted_high_tra [listof tuples] : list of centers unweighted due to + high transformation parameters + along with the index and value + of the outlier transformation + parameter + self.exclude_lowcen [list] : list of sat exclusions + due to low number of + centers contributing + """ + + # Check the given arguments + if sat_rms_tst is not None: + allowed_sat_rms_tst = ['auto','manual','strict'] + if sat_rms_tst not in allowed_sat_rms_tst: + logger.error("\nsat_rms_tst must be one of " + f"{allowed_sat_rms_tst}\n", stack_info=True) + raise ValueError(f"The given sat_rms_tst {sat_rms_tst} is not in " + f"{allowed_sat_rms_tst}") + + if sat_rms_tst_unweighted is not None: + allowed_sat_rms_tst = ['auto','manual','strict'] + if sat_rms_tst_unweighted not in allowed_sat_rms_tst: + logger.error("\nsat_rms_tst_unweighted must be one of " + f"{allowed_sat_rms_tst}\n", stack_info=True) + raise ValueError(f"The given sat_rms_tst_unweighted {sat_rms_tst_unweighted} is not in " + f"{allowed_sat_rms_tst}") + + checkutils.check_scalar(coef_sat) + + if len(self.weighted_centers) == 1: + logger.info("One weighted center only; sat_rms_tst set to manual") + sat_rms_tst = 'manual' + + if sat_rms_tst == 'manual' and thresh_sat is None: + logger.error("\nthresh_sat must be specified when sat_rms_tst is " + "manual\n", stack_info=True) + raise ValueError("sat_rms_tst is manual but thresh_sat not " + "specified!") + + if thresh_sat is not None: + if not isinstance(thresh_sat,dict): + logger.error("\nThe given thresh_sat must be a dict\n", + stack_info=True) + raise TypeError("The given thresh_sat is not of type dict") + for key in thresh_sat: + if not isinstance(key,str): + logger.error("\nThe keys in the given thresh_sat must be " + "of type str\n",stack_info=True) + raise TypeError("There are non-str keys in the given " + "thresh_sat") + checkutils.check_scalar(thresh_sat[key]) + + if max_high_satrms is not None: + if not isinstance(max_high_satrms,int): + logger.error("\nThe given max_high_satrms must be an " + "integer\n", stack_info=True) + raise TypeError("The given max_high_satrms is not of type int") + self.max_high_satrms = max_high_satrms + + if trn_tst is not None: + allowed_trn_tst = ['auto','manual','strict'] + if trn_tst not in allowed_trn_tst: + logger.error(f"\ntrn_tst must be one of {allowed_trn_tst}\n", + stack_info=True) + raise ValueError("The given trn_tst is not in " + "{allowed_trn_tst}") + + if not isinstance(thresh_trn,list): + logger.error("\nthresh_trn must be a list of three items for " + "translation, rotation and scale thresholds\n", + stack_info=True) + raise TypeError("The given thresh_trn is not of type list") + if len(thresh_trn) != 3: + logger.error("\nthresh_trn must be a list of three items for " + "translation, rotation and scale thresholds\n", + stack_info=True) + raise TypeError(f"The given thresh_trn is of length " + f"{len(thresh_trn)}") + for item in thresh_trn: + if item is not None: + checkutils.check_scalar(item) + + if numcen_tst is not None: + allowed_numcen_tst = ['strict','eased'] + if numcen_tst not in allowed_numcen_tst: + logger.error("\nnumcen_tst must be one of " + f"{allowed_numcen_tst}\n", stack_info=True) + raise ValueError("The given numcen_tst is not in " + "{allowed_numcen_tst}") + + if min_numcen is not None: + if not isinstance(min_numcen,int): + logger.error("\nThe given min_numcen must be an " + "integer\n", stack_info=True) + raise TypeError("The given min_numcen is not of type int") + + # Set the default rejection to False + rejection = False + + # Print out transformation values + logger.debug("Transformation values (mm, mas, ppb)") + for acname in self.transform_params: + logger.debug(f"{acname}: " + f"{[item*1000.0 for item in self.transform_params[acname]]}") + + # Get a list of all centers + centers_str = [] + for ac in self.weighted_centers: + centers_str.append(ac + ' ') + centers_str = centers_str + self.unweighted_centers + + # Create a list of all satellites + sats = [] + for key in self.cen_rms: + if not isinstance(key,str): + if isinstance(key[1],tuple): + if key[1] not in sats: + sats.append(key[1]) + + # Print out satellite rms statistics + logger.info("Sat-specific rms statistics (cm)") + logger.info(f" PRN | SVN | " + f"{' | '.join([ac for ac in centers_str])} | IGS") + logger.info(f"------------{'--------'*(len(centers_str)+1)}") + for sat in sats: + rms = [] + for acname in self.weighted_centers + self.unweighted_centers: + if (acname,sat) in self.cen_rms: + rms_str = f"{self.cen_rms[acname,sat]*100:^5.2f}" + else: + rms_str = " " + rms.append(rms_str) + if sat in self.sat_rms: + rms_str = f"{self.sat_rms[sat]*100:^5.2f}" + else: + rms_str = " " + rms.append(rms_str) + prn = sat[0] + str(sat[1]).zfill(2) + svn = sat[0] + str(sat[2]).zfill(3) + logger.info(f" {prn} | {svn} | " + f"{' | '.join([item for item in rms])}") + logger.info("\n") + + # Print out satellite flags + logger.info("Satellite flags") + logger.info(f" PRN | SVN | " + f"{' | '.join([ac.center(19) for ac in centers_str])}") + logger.info(f"------------" + f"{'----------------------'*(len(centers_str))}") + for sat in sats: + prn = sat[0] + str(sat[1]).zfill(2) + svn = sat[0] + str(sat[2]).zfill(3) + flags = [] + for acname in self.weighted_centers + self.unweighted_centers: + if (acname,sat) in self.satflags: + flag = self.satflags[acname,sat].center(19) + else: + flag = " " + flags.append(flag) + logger.info(f" {prn} | {svn} | " + f"{' | '.join([item for item in flags])}") + logger.info("\n") + + + logger.debug(f"thresh_sat={thresh_sat}, thresh_trn={thresh_trn}, " + f"min_numcen={min_numcen}") + + # Test 1 - sat-specific rms (remove one sat/cen pair at once) + + # Only do the test if requested, and any previous tests have been + # successfully passed + if (sat_rms_tst is not None and not rejection): + + # Reset outlier thresholds if auto or strict + thresh_sat_used = {} + if sat_rms_tst == 'auto': + for sys in self.sat_rms_sys: + thresh_sat_used[sys] = coef_sat*self.sat_rms_sys[sys] + elif sat_rms_tst == 'strict': + for sys in self.sat_rms_sys: + if (sys not in thresh_sat + or thresh_sat[sys] is None): + thresh_sat_used[sys] = coef_sat*self.sat_rms_sys[sys] + else: + thresh_sat_used[sys] = min(thresh_sat[sys]/100.0, + coef_sat*self.sat_rms_sys[sys]) + elif sat_rms_tst == 'manual': + for sys in self.sat_rms_sys: + if (sys in thresh_sat + and thresh_sat[sys] is not None): + thresh_sat_used[sys] = thresh_sat[sys]/100.0 + else: + raise ValueError(f"sat_rms_tst {sat_rms_tst} is not recognized!") + + logger.info(f"thresh_sat_used: {thresh_sat_used}") + # Look only into weighted centers used for combination (not excluded) + sat_rms = {} + for acname in self.weighted_centers: + for sat in sats: + sys = sat[0] + if sys not in sat_rms: + sat_rms[sys] = {} + if ( (acname,sat) in self.cen_rms + and self.satflags[acname,sat] != 'excluded_sat' + and self.satflags[acname,sat] != 'unweighted_sys' + and self.satflags[acname,sat] != 'unweighted_sat'): + sat_rms[sys][acname,sat] = self.cen_rms[acname,sat] + + # Find the largest outlier rms + outlier_found = False + maxrms = 0.0 + for sys in sat_rms: + if ( sys in thresh_sat_used + and max(sat_rms[sys].values()) > thresh_sat_used[sys]/100.0 ): + + logger.debug(f"max sat_rms weighted: {max(sat_rms[sys].values())*100}") + + outlier_found = True + + # Find the highest rms + if max(sat_rms[sys].values()) > maxrms: + maxrms = max(sat_rms[sys].values()) + sys_out = sys + outlier_censat = max(sat_rms[sys],key=sat_rms[sys].get) + + # If outlier found, proceed to the exclusion + if outlier_found: + + # There is at least one sat rms larger than the threshold + rejection = True + + # Add the corresponding cen/sat pair to exclude_highrms + cen_outlier = outlier_censat[0] + sat_outlier = outlier_censat[1] + logger.info("Exclusion due to outlier satellite RMS: " + f"{cen_outlier} {sat_outlier}") + + # Update exclude_highrms + if (cen_outlier,sat_outlier) not in self.exclude_highrms: + self.exclude_highrms.append((cen_outlier,sat_outlier)) + + # If maximum number of outlier satellites reaches for a center, + # unweight that center + if max_high_satrms is not None: + n_outlier = 0 + for key in self.exclude_highrms: + if key[0] == cen_outlier: + n_outlier += 1 + if n_outlier > max_high_satrms: + self.unweighted_max_high_satrms.append(cen_outlier) + self.cenflags[cen_outlier] = 'unweighted' + self.weighted_centers.remove(cen_outlier) + self.unweighted_centers.append(cen_outlier) + logger.info(f"Maximum number of satellite outliers " + f"exceeded for center {cen_outlier}: " + f"{n_outlier} > {max_high_satrms}\n" + f"Center {cen_outlier} unweighted.\n") + + # Test 2 - transformation parameters test + # If any transformation parameter of a center exceeds a threshold, + # unweight that center. If exceeded for more than one center, only + # unweight the center with the largest transformation + + # Only do the test if requested, there are more than one weighted + # centers, and any previous test has been successfully passed + ncen = len(self.weighted_centers) + if (trn_tst is not None + and not all(thresh != None for thresh in thresh_trn) + and ncen > 1 + and not rejection): + + # User-defined thresholds + rms_tra_usr = thresh_trn[0] + rms_rot_usr = thresh_trn[1] + rms_sca_usr = thresh_trn[2] + if rms_tra_usr is None: + rms_tra_usr = 9999.0 + if rms_rot_usr is None: + rms_rot_usr = 9999.0 + if rms_sca_usr is None: + rms_sca_usr = 9999.0 + + if trn_tst == 'manual': + + # Get the outlier levels + rms_tra = rms_tra_usr + rms_rot = rms_rot_usr + rms_sca = rms_sca_usr + + elif trn_tst == 'auto' or trn_tst == 'strict': + + # Calculate the outlier levels + tra2 = [] + rot2 = [] + sca2 = [] + for acname in self.weighted_centers: + for i in range(0,3): + tra2.append((self.transform_params[acname][i])**2) + for i in range(3,6): + rot2.append((self.transform_params[acname][i])**2) + sca2.append((self.transform_params[acname][6])**2) + + fact1 = 950.0*np.sqrt(3*ncen) + fact2 = 980.0*np.sqrt(ncen) + if ncen < 5: + fact1 = 4700.0 + fact2 = 4700.0 + rms_tra = fact1*np.sqrt(np.mean(tra2)) + rms_rot = fact1*np.sqrt(np.mean(rot2)) + rms_sca = fact2*np.sqrt(np.mean(sca2)) + logger.debug(f"fact1={fact1} fact2={fact2}") + logger.debug(f"rms_tra={rms_tra}, rms_rot={rms_rot}, " + f"rms_sca={rms_sca}") + if trn_tst == 'strict': + rms_tra = min(rms_tra,rms_tra_usr) + rms_rot = min(rms_rot,rms_rot_usr) + rms_sca = min(rms_sca,rms_sca_usr) + + else: + logger.error("Transformatoion test approach must be manual, " + "auto or strict!", stack_info=True) + raise ValueError(f"Transformation test approach {trn_tst} " + "not recognized!") + + if rms_sca < 1.0: + logger.warning(f"rms_sca {rms_sca} > 1.0. " + "Setting rms-sca to 1.0") + rms_sca = 1.0 + + logger.debug(f"rms_tra={rms_tra}, rms_rot={rms_rot}, " + f"rms_sca={rms_sca}") + + # vector of transformation thresholds + trn_thresh = np.zeros(7) + for i in range(0,3): + trn_thresh[i] = rms_tra/1000.0 + for i in range(3,6): + trn_thresh[i] = rms_rot/1000.0 + trn_thresh[6] = rms_sca/1000.0 + + # Search for the highest outlier + trn_out_flg = False + max_fact = 0.0 + fact = {} + logger.debug(f"trn_thresh: {trn_thresh}") + for acname in self.weighted_centers: + logger.debug(f"tr {acname}: {self.transform_params[acname]}") + fact[acname] = abs(self.transform_params[acname]/trn_thresh) + if any(fact[acname] > 1): + trn_out_flg = True + if any(fact[acname] > max_fact): + max_fact = max(fact[acname]) + ind_max_fact = list(fact[acname]).index(max_fact) + cen_outlier = acname + + # any rejection found? + if trn_out_flg: + + rejection = True + + # Unweight the center with the largest transformation outlier + self.unweighted_high_tra.append((cen_outlier,ind_max_fact,max_fact)) + self.cenflags[cen_outlier] = 'unweighted' + self.weighted_centers.remove(cen_outlier) + self.unweighted_centers.append(cen_outlier) + logger.info(f"Transformation parameter too large for center " + f"{cen_outlier}:\n" + f"{self.transform_params[acname]}\n" + f"Thresholds: {thresh_trn}\n" + f"Center {cen_outlier} unweighted.\n") + + # Test 3 - high center rms : this is the same as test 1 except that + # it is performed for unweighted centers + logger.debug(f"rejection: {rejection}") + if (sat_rms_tst_unweighted and thresh_sat is not None and not rejection): + + # Look only into unweighted centers (and sats not already excluded) + sat_rms = {} + for acname in self.weighted_centers + self.unweighted_centers: + n_outlier = 0 + for key in self.exclude_highrms: + if key[0] == acname: + n_outlier += 1 + for sat in sats: + sys = sat[0] + if ( (acname,sat) in self.cen_rms + and self.satflags[acname,sat] == 'unweighted_sys' + and self.satflags[acname,sat] == 'unweighted_sat' + and acname not in self.unweighted_max_high_satrms + and n_outlier <= max_high_satrms ): + if sys not in sat_rms: + sat_rms[sys] = {} + sat_rms[sys][acname,sat] = self.cen_rms[acname,sat] + + # Find the largest outlier rms + outlier_found = False + maxrms = 0.0 + for sys in sat_rms: + logger.debug(f"max sat_rms unweighted: {max(sat_rms[sys].values())*100}") + if ( sys in thresh_sat_used + and max(sat_rms[sys].values()) > thresh_sat_used[sys]/100.0 ): + + outlier_found = True + + # Find the highest rms + if max(sat_rms[sys].values()) > maxrms: + maxrms = max(sat_rms[sys].values()) + sys_out = sys + outlier_censat = max(sat_rms[sys],key=sat_rms[sys].get) + + # If outlier found, proceed to the exclusion + if outlier_found: + + # There is at least one sat rms larger than the threshold + rejection = True + + # Add the corresponding cen/sat pair to exclude_highrms + cen_outlier = outlier_censat[0] + sat_outlier = outlier_censat[1] + logger.info("Exclusion due to outlier satellite RMS: " + f"{cen_outlier} {sat_outlier} {sat_rms[sys_out][cen_outlier,sat_outlier]}") + + # Update exclude_highrms + if (cen_outlier,sat_outlier) not in self.exclude_highrms: + self.exclude_highrms.append((cen_outlier,sat_outlier)) + + # If maximum number of outlier satellites reaches for a center, + # issue a warning + if max_high_satrms is not None: + n_outlier = 0 + for key in self.exclude_highrms: + if key[0] == cen_outlier: + n_outlier += 1 + if n_outlier > max_high_satrms: + logger.info(f"Number of satellite outliers higher than" + f" the maximum for the unweighted center " + f"{cen_outlier}: " + f"{n_outlier} > {max_high_satrms}\n") + + # Test 4 - minimum number of centers for each satellite to be included + + # Only perform this test if requested, and any previous test has been + # successfully passed + if (numcen_tst is not None and min_numcen is not None + and not rejection): + + # Get the number of centers for the satellite which has the + # highest number of contributors + max_num_ac = 0 + ncen = {} + for sat in sats: + ncen[sat] = 0 + for acname in self.weighted_centers: + if ((acname,sat) in self.satflags + and self.satflags[acname,sat] != 'missing_sat' + and self.satflags[acname,sat] != 'missing_blk' + and self.satflags[acname,sat] != 'missing_sys' + and self.satflags[acname,sat] != 'unweighted_sys' + and self.satflags[acname,sat] != 'unweighted_sat' + and self.satflags[acname,sat] != 'excluded_sat'): + ncen[sat] += 1 + if ncen[sat] > max_num_ac: + max_num_ac = ncen[sat] + + # If the max number of acs for a satellite is smaller than the + # requested number of acs, set that max as the min number of acs + if min_numcen > max_num_ac: + min_numcen = max_num_ac + + # If eased, some other special circumstances + if numcen_tst == 'eased': + if (max_num_ac <= 4 and min_numcen > 2): + min_numcen = 2 + if (max_num_ac <= 5 and min_numcen > 3): + min_numcen = 3 + + # Now look for any satellite which has lower number of acs than + # the minimum specified; exclude that satellite from all the + # centers if not already excluded + for sat in sats: + if ncen[sat] < min_numcen: + + if sat not in self.exclude_lowcen: + logger.info(f"There are only {ncen[sat]} centers for " + f"{sat} < {min_numcen}. Excluding the " + f"satellite from combination.\n") + rejection = True + self.exclude_lowcen.append(sat) + + self.min_numcen = min_numcen + + # Update rejection attribute + self.rejection = rejection + + + def to_sp3dict(self,sample_rate,sp3_header): + + """ + Convert combined_orbit, epochs and satinfo attributes to a sp3 + dictionary, so it can be used by io_data module for writing to a sp3 + file + + Updates: + self.sp3_combined [dict]: combined sp3 dictionary + + """ + + # Create a list of all satellites + sats = [] + sat_accuracy = [] + sys_list = [] + for key in self.cen_rms: + if not isinstance(key,str): + if isinstance(key[1],tuple): + sys_id = key[1][0] + prn = key[1][1] + sat = sys_id + str(prn).zfill(2) + if sat not in sats: + sats.append(sat) + sat_accuracy.append(self.sat_accuracy[ + (key[1][0],key[1][1],key[1][2])]) + if sys_id not in sys_list: + sys_list.append(sys_id) + # sort + sat_accuracy = [x for _, x in sorted(zip(sats, sat_accuracy))] + sats.sort() + + # weighted centers + acnames = [] + acnames_0 = [] + ac_ctr = 0 + ctr = 0 + for ac in self.weighted_centers: + ac_ctr += 1 + ctr += 1 + acnames_0.append(ac) + if ctr == 14 or ac_ctr == len(self.weighted_centers): + acnames.append(' '.join([item for item in acnames_0])) + ctr = 0 + acnames_0 = [] + + # Initialize the sp3 dictionary + sp3_combined = {} + sp3_combined['header'] = {} + sp3_combined['data'] = {} + + # Header lines + sp3_combined['header']['version'] = '#d' + sp3_combined['header']['pvflag'] = 'P' + sp3_combined['header']['data_used'] = 'ORBIT' + sp3_combined['header']['coord_sys'] = sp3_header['coord_sys'] + sp3_combined['header']['orbit_type'] = 'HLM' + sp3_combined['header']['agency'] = 'IGS' + if len(sys_list) > 1: + sp3_combined['header']['file_type'] = 'M' + else: + sp3_combined['header']['file_type'] = sys_list[0] + sp3_combined['header']['time_system'] = 'GPS' + sp3_combined['header']['base_pos'] = 1.25 + sp3_combined['header']['base_clk'] = 1.025 + if sp3_header['cmb_type'] == "REPRO3": + sp3_combined['header']['comments'] = [ + f"{sp3_header['cmb_type']} ORBIT COMBINATION FROM WEIGHTED " + "AVERAGE OF:"] + else: + sp3_combined['header']['comments'] = [ + f"STRICTLY EXPERIMENTAL MULTI-GNSS COMBINATION", + f"{sp3_header['cmb_type']} ORBIT COMBINATION FROM WEIGHTED " + "AVERAGE OF:"] + sp3_combined['header']['comments'].extend(acnames) + sp3_combined['header']['comments'].extend([ + "REFERENCED TO GPS CLOCK AND TO WEIGHTED MEAN POLE:", + f"PCV:{sp3_header['antex']} OL/AL:{sp3_header['oload']} " + f"NONE Y ORB:CMB CLK:{sp3_header['clk_src']}"]) + + # should include other high-eccentric satellites in the future if any + if (sample_rate > 300 and 'E' in sys_list): + sp3_combined['header']['comments'].extend([ + "WARNING: The highly eccentric satellites could have", + "errors of up to ~10 mm due to the 15-minute sampling rates"]) + + sp3_combined['header']['start_year'] = self.epochs[0].year + sp3_combined['header']['start_month'] = self.epochs[0].month + sp3_combined['header']['start_day'] = self.epochs[0].day + sp3_combined['header']['start_hour'] = self.epochs[0].hour + sp3_combined['header']['start_min'] = self.epochs[0].minute + sp3_combined['header']['start_sec'] = float(self.epochs[0].second) + gc = gpsCal() + gc.set_yyyy_MM_dd_hh_mm_ss(self.epochs[0].year,self.epochs[0].month, + self.epochs[0].day,self.epochs[0].hour, + self.epochs[0].minute,self.epochs[0].second) + sp3_combined['header']['gpsweek'] = gc.wwww() + sp3_combined['header']['sow'] = gc.sow() + sp3_combined['header']['epoch_int'] = sample_rate + sp3_combined['header']['modjul'] = int(gc.mjd()) + sp3_combined['header']['frac'] = gc.mjd() - int(gc.mjd()) + + sp3_combined['header']['sats'] = sats + sp3_combined['header']['numsats'] = len(sats) + sp3_combined['header']['sat_accuracy'] = sat_accuracy + + # Get a unique list of epochs + eps = [] + for ep in self.epochs: + if ep not in eps: + eps.append(ep) + sp3_combined['data']['epochs'] = eps + sp3_combined['header']['num_epochs'] = len(eps) + + pred_start_ultra = self.epochs[0] + datetime.timedelta(days=1) + + # Loop over all orbit rows + for c,row in enumerate(self.combined_orbit): + + sat = self.satinfo[c,0]+str(self.satinfo[c,1]).zfill(2) + epoch = self.epochs[c] + + sp3_combined['data'][(sat,epoch,'Pflag')] = 1 + sp3_combined['data'][(sat,epoch,'EPflag')] = 0 + sp3_combined['data'][(sat,epoch,'Vflag')] = 0 + sp3_combined['data'][(sat,epoch,'EVflag')] = 0 + sp3_combined['data'][(sat,epoch,'xcoord')] = row[0]/1000.0 # to km + sp3_combined['data'][(sat,epoch,'ycoord')] = row[1]/1000.0 # to km + sp3_combined['data'][(sat,epoch,'zcoord')] = row[2]/1000.0 # to km + if self.clocks is not None: + sp3_combined['data'][(sat,epoch,'clock')] = self.clocks[sp3_header['clk_src']][c,0] + sp3_combined['data'][(sat,epoch,'csdev')] = self.clocks[sp3_header['clk_src']][c,1] + base_pos = sp3_combined['header']['base_pos'] + if (sp3_header['cmb_type'] == "ULTRA RAPID" and epoch >= pred_start_ultra): + sp3_combined['data'][(sat,epoch,'orbit_pred')] = 'P' + sp3_combined['data'][(sat,epoch,'clk_pred')] = 'P' + if not np.isnan(self.sdev[c,0]): + if self.sdev[c,0]*1000.0 > base_pos: + sp3_combined['data'][(sat,epoch,'xsdev')] = (int(round( + np.log(self.sdev[c,0]*1000.0)/ + np.log(base_pos)))) # to exponents in mm + if sp3_combined['data'][(sat,epoch,'xsdev')] > 99: + sp3_combined['data'][(sat,epoch,'xsdev')] = 99 + elif self.sdev[c,0]*1000.0 > 0.0: + sp3_combined['data'][(sat,epoch,'xsdev')] = 1 + if not np.isnan(self.sdev[c,1]): + if self.sdev[c,1]*1000.0 > base_pos: + sp3_combined['data'][(sat,epoch,'ysdev')] = (int(round( + np.log(self.sdev[c,1]*1000.0)/ + np.log(base_pos)))) # to exponents in mm + if sp3_combined['data'][(sat,epoch,'ysdev')] > 99: + sp3_combined['data'][(sat,epoch,'ysdev')] = 99 + elif self.sdev[c,1]*1000.0 > 0.0: + sp3_combined['data'][(sat,epoch,'ysdev')] = 1 + if not np.isnan(self.sdev[c,2]): + if self.sdev[c,2]*1000.0 > base_pos: + sp3_combined['data'][(sat,epoch,'zsdev')] = (int(round( + np.log(self.sdev[c,2]*1000.0)/ + np.log(base_pos)))) # to exponents in mm + if sp3_combined['data'][(sat,epoch,'zsdev')] > 99: + sp3_combined['data'][(sat,epoch,'zsdev')] = 99 + elif self.sdev[c,2]*1000.0 > 0.0: + sp3_combined['data'][(sat,epoch,'zsdev')] = 1 + + # Update attributes + self.sample_rate = sample_rate + self.sp3_combined = sp3_combined + diff --git a/rocs/planets.py b/rocs/planets.py new file mode 100755 index 0000000..9452b6b --- /dev/null +++ b/rocs/planets.py @@ -0,0 +1,264 @@ +# Planetary position calculations module + +import numpy as np +import datetime +import numbers +from rocs.gpscal import gpsCal +import rocs.coordinates as coordinates + + +class AnalyticalPosition: + + """ + Class for caculating the position of a planet based on analytical solution + Valid between 1950 to 2050 + + """ + + def __init__(self,planet,ref_frame,time_utc,ut1_utc,xp=None,yp=None): + + """ + Initialize PlanetPositionAnalytical class + + Keyword arguments: + planet [str] : planet for which the position is to be calculated + ref_frame [str] : the reference frame for the position of the + planet + time_utc [datetime or list/array of datetimes] : UTC time(s) for + calculating the + planetary positions + ut1_utc [scalar or list/array] : UT1-UTC in seconds corresponding + to the time_utc attribute of the + class object + xp [scalar or list/array] : polar x motion in radians + corresponding to the time_utc + attribute of the class object + yp [scalar or list/array] : polar y motion in radians + corresponding to the time_utc + attribute of the class object + + Updates: + self.planet [str] + self.ref_frame [str] + self.time_utc [array] + self.ut1_utc [array] + self.time_ut1 [array] + self.xp [array] + self.yp [array] + self.r [numpy array] : 3-column array where the columns represent + the x,y,z of the planet position in the + specified reference frame, and the rows + correspond to the given time_utc epochs + References: + - U.S. Nautical Almanac Office and U.S. Naval Observatory (2007) + The astronomical almanac for the year 2007. + - Ferrao (2013) Positioing with combined GPS and GLONASS + observations, MSc thesis, Tecnico Lisbona. + - Michalsky (1988) The Astronomical almanac's algorithm for + approximate solar position (1950-2050). Solar Energy. + - Seidelman (2007) Explanatory supplement to the astronomical + almanac, US Naval Observatory. + + """ + + # Check the given arguments and set the attributes + if not isinstance(planet,str): + raise TypeError("The input planet needs to be a string") + allowed_planets = ['sun','moon'] + if planet not in allowed_planets: + raise TypeError(f"The input planet {planet} not recognized!\n" + f"Allowed planets: {allowed_planets}") + self.planet = planet + + if not isinstance(ref_frame,str): + raise TypeError("The input ref_frame needs to be a string") + allowed_ref_frames = ['ECI','ECEF'] + if ref_frame not in allowed_ref_frames: + raise TypeError(f"The input reference frame {ref_frame} not " + f"recognized!\nAllowed reference frames: " + f"{allowed_ref_frames}") + self.ref_frame = ref_frame + + if not isinstance(time_utc,(list,np.ndarray,datetime.datetime)): + raise TypeError("The input time_utc needs to be either a datetime " + "object or a list/array of datetime objects") + if not all(isinstance(item,datetime.datetime) + for item in np.atleast_1d(time_utc)): + raise TypeError("There are non-datetime items in time_utc") + if (any(item < datetime.datetime(1950,1,1,0,0) + for item in np.atleast_1d(time_utc)) + or any(item >= datetime.datetime(2050,1,1,0,0) + for item in np.atleast_1d(time_utc))): + raise ValueError("The analytical equations for calculating " + "the approximate alamanc positions are only " + "valid between 1950-2050!") + self.time_utc = np.atleast_1d(time_utc) + + if not isinstance(ut1_utc,(list,np.ndarray,numbers.Number)): + raise TypeError("The given ut1_utc needs to be either a number " + "or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(ut1_utc)): + raise TypeError("There are non-number items in ut1_utc") + if np.shape(np.atleast_1d(ut1_utc)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and ut1_utc " + f"{np.shape(np.atleast_1d(ut1_utc))}") + self.ut1_utc = np.atleast_1d(ut1_utc) + + # For conversion to ECEF, we need xp and yp + if (ref_frame == 'ECEF'): + if (xp is None or yp is None): + raise ValueError("xp and yp must be given for " + "ECI to ECEF conversion") + + if not isinstance(xp,(list,np.ndarray,numbers.Number)): + raise TypeError("The given xp needs to be either a " + "number or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(xp)): + raise TypeError("There are non-number items in xp") + if np.shape(np.atleast_1d(xp)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and xp " + f"{np.shape(np.atleast_1d(xp))}") + self.xp = np.atleast_1d(xp) + + if not isinstance(yp,(list,np.ndarray,numbers.Number)): + raise TypeError("The given yp needs to be either a " + "number or a list/array of numbers") + if not all(isinstance(item,numbers.Number) + for item in np.atleast_1d(yp)): + raise TypeError("There are non-number items in yp") + if np.shape(np.atleast_1d(yp)) != np.shape(self.time_utc): + raise ValueError("Shape mismatch between self.time_utc " + f"{np.shape(self.time_utc)} and yp " + f"{np.shape(np.atleast_1d(yp))}") + self.yp = np.atleast_1d(yp) + + # convert the UTC time to UT1 + time_ut1 = [] + for c,utc in enumerate(self.time_utc): + gc = gpsCal() + gc.set_yyyy_MM_dd_hh_mm_ss(utc.year,utc.month,utc.day,utc.hour, + utc.minute,utc.second) + ut1 = gc.ut1(self.ut1_utc[c]) + time_ut1.append(ut1) + self.time_ut1 = np.atleast_1d(time_ut1) + + # Calculate the number of Julian centuries since JD2000 + # (2000/01/01 at 12:00) + jd2000 = datetime.datetime(2000,1,1,12,0,0) + t = np.array([(time-jd2000).total_seconds()/86400.0/36525.0 + for time in self.time_ut1]) + + # Calculate the positions in ECI, then convert to ECEF if required + if self.planet == 'sun': + + # obliquity of the ecliptic [degrees] + eps = 23.439291 - 0.0130042*t + + # mean anomaly [degress] + m = 357.5277233 + 35999.05034*t + m = np.array([item%360.0 for item in m]) + + # ecliptic longitude [degrees] + l = (280.460 + 36000.770*t + 1.914666471*np.sin(np.deg2rad(m)) + + 0.000139589*np.cos(np.deg2rad(2*m))) + l = np.array([item%360.0 for item in l]) + + # astronomical unit (average disctance between Earth and Sun + # [meters]) + au = 149597870691.0 + + # distance between the Earth and the Sun [meters] + r_magnitude = au*(1.000140612 - 0.016708617*np.cos(np.deg2rad(m)) + - 0.000139589*np.cos(np.deg2rad(2*m))) + + # vector of the Sun position in ECI [meters] + r0 = r_magnitude*np.cos(np.deg2rad(l)) + r1 = r_magnitude*np.cos(np.deg2rad(eps))*np.sin(np.deg2rad(l)) + r2 = r_magnitude*np.sin(np.deg2rad(eps))*np.sin(np.deg2rad(l)) + r = np.transpose(np.vstack((r0,r1,r2))) + + elif self.planet == 'moon': + + # perturbing factors from the Earth's nutation [radians] + f0 = (3600.0*134.96340251 + 1717915923.2178*t + 31.8792*t**2 + + 0.051635*t**3 - 0.00024470*t**4)/3600.0 + f0 = np.array([item - int(item/360.0)*360.0 for item in f0]) + f0 = np.array([item%360.0 for item in f0]) + f0 = np.deg2rad(f0) + + f1 = (3600.0*357.52910918 + 129596581.0481*t - 0.5532*t**2 + + 0.000136*t**3 - 0.00001149*t**4)/3600.0 + f1 = np.array([item%360.0 for item in f1]) + f1 = np.deg2rad(f1) + + f2 = (3600.0*93.27209062 + 1739527262.8478*t - 12.7512*t**2 + - 0.001037*t**3 + 0.00000417*t**4)/3600.0 + f2 = np.array([item%360.0 for item in f2]) + f2 = np.deg2rad(f2) + + f3 = (3600.0*297.85019547 + 1602961601.2090*t - 6.3706*t**2 + + 0.006593*t**3 - 0.00003169*t**4)/3600.0 + f3 = np.array([item%360.0 for item in f3]) + f3 = np.deg2rad(f3) + + # obliquity of the ecliptic [degrees] + eps = 23.439291 - 0.0130042*t + + # ecliptic longitude [degrees] + l = (218.32 + 481267.883*t + 6.29*np.sin(f0) + - 1.27*np.sin(f0-2*f3) + 0.66*np.sin(2*f3) + + 0.21*np.sin(2*f0) - 0.19*np.sin(f1) + - 0.11*np.sin(2*f2)) + l = np.array([item%360.0 for item in l]) + + # perturbation factor [degrees] + beta = (5.13*np.sin(f2) + 0.28*np.sin(f0+f2) + - 0.28*np.sin(f2-f0) - 0.17*np.sin(f2-2*f3)) + #beta = np.array([item - int(item/360.0)*360.0 for item in beta]) + beta = np.array([item%360.0 for item in beta]) + + # scaling factor? [degrees] + pi = (0.9508 + 0.0518*np.cos(f0) + 0.0095*np.cos(f0-2*f3) + + 0.0078*np.cos(2*f3) + 0.0028*np.cos(2*f0)) + pi = np.array([item%360.0 for item in pi]) + + # Earth's equatorial radius (IERS2003) [meters] + a = 6378136.6 + + # distance between the Moon and Earth [meters] + r_magnitude = a/np.sin(np.deg2rad(pi)) + + # vector of the moon position in ECI [meters] + r0 = r_magnitude*np.cos(np.deg2rad(beta))*np.cos(np.deg2rad(l)) + r1 = r_magnitude*(np.cos(np.deg2rad(eps))*np.cos(np.deg2rad(beta)) + *np.sin(np.deg2rad(l)) + - np.sin(np.deg2rad(eps))*np.sin(np.deg2rad(beta)) ) + r2 = r_magnitude*(np.sin(np.deg2rad(eps))*np.cos(np.deg2rad(beta)) + *np.sin(np.deg2rad(l)) + + np.cos(np.deg2rad(eps))*np.sin(np.deg2rad(beta)) ) + r = np.transpose(np.vstack((r0,r1,r2))) + + else: + raise TypeError(f"The input planet {self.planet} not recognized!") + + if self.ref_frame == 'ECI': + + pass + + elif self.ref_frame == 'ECEF': + + # convert to ECEF + coords = coordinates.Rectangular(r,'ECI',self.time_utc) + + coords.ToECEF(iau_model='IAU76/80/82/94', + transformation_method='equinox', + evaluation_method='classical_angles', + ut1_utc=self.ut1_utc,xp=self.xp,yp=self.yp) + r = coords.coords + + self.r = r + diff --git a/rocs/report.py b/rocs/report.py new file mode 100755 index 0000000..bb64e43 --- /dev/null +++ b/rocs/report.py @@ -0,0 +1,1305 @@ +# Module for reporting and creating summary files + +import logging +import numpy as np +import json +from rocs.gpscal import gpsCal +import rocs.io_data as io_data +import rocs.planets as planets +from rocs.eclipse import Eclipse + +logger = logging.getLogger(__name__) + +class OrbitReport: + + """ + Class of reporting for orbit combinations + """ + + def __init__(self,orbcmb,sp3_subm_list,cmb_sp3_filename,prod_rootdir, + cmb_name,vid,camp_id,sol_id,author,contact,ac_acronyms, + rm_dv,dvsats,rf_align,rf_transfo,sat_metadata_file): + + """ + Initialize report_orbits class + + Keyword arguments: + orbcmb [object of class OrbitComb] : combined orbit + sp3_subm_list [list] : list of used sp3 files for the combination + cmb_sp3_filename [str] : filename for the combined sp3 file + prod_rootdir [str] : root directory for products + cmb_name [str] : 3-character combination name + vid [int] : version identifier for the combination + camp_id [str] : 3-character campaign/project specification + sol_id [str] : 3-character solution identifier + author [str] : creator of the combination and the report + contact [str] : contact information of the author + ac_acronyms [dict] : dictionary of AC acronyms + rm_dv [bool] : if DV maneuvering satellites were to be removed + dvsats [list] : list of DV maneuvering satellites + rf_align [list] : reference frame alignment options + rf_transfo [dict] : dictionary of reference frame alignment + transformations + sat_metadata_file [str] : used satellite metadata file + + """ + + self.orbcmb = orbcmb + self.sp3_subm_list = sp3_subm_list + self.cmb_sp3_filename = cmb_sp3_filename + self.cmb_name = cmb_name + self.author = author + self.contact = contact + self.ac_acronyms = ac_acronyms + self.rm_dv = rm_dv + self.dvsats = dvsats + self.rf_align = rf_align + self.rf_transfo = rf_transfo + self.sat_metadata_file = sat_metadata_file + + if sat_metadata_file is not None: + self.sat_metadata = io_data.SatelliteMetadata(sat_metadata_file) + + # solution specifications + if sol_id == 'ULT': + solution = 'ultra-rapid' + elif sol_id == 'RAP': + solution = 'rapid' + elif sol_id == 'FIN': + solution = 'final' + + self.solution = solution + + if solution == 'ultra-rapid': + len_data = '02D' + else: + len_data = '01D' + + if camp_id == 'DEM': + campaign = 'demonstration' + elif camp_id == 'MGX': + campaign = 'Multi-GNSS Experiment' + elif camp_id == 'OPS': + campaign = 'operational' + elif camp_id == 'TST': + campaign = 'test' + elif camp_id[0:1] == 'R': + campaign = 'repro' + str(int(camp_id[1:])) + + self.campaign = campaign + + # datetime specifications + start_epoch = orbcmb.sp3_combined['data']['epochs'][0] + year = start_epoch.year + month = start_epoch.month + day = start_epoch.day + hr = start_epoch.hour + minute = start_epoch.minute + second = start_epoch.second + gc = gpsCal() + gc.set_yyyy_MM_dd_hh_mm_ss(year,month,day,hr,minute,second) + doy = gc.ddd() + gpsweek = gc.wwww() + dow = gc.dow() + + self.gpsweek = gpsweek + self.dow = dow + self.year = year + self.doy = doy + self.hr = hr + + prod_weekdir = prod_rootdir + '/w' + str(gpsweek).zfill(4) + + # Full path and name of the eclipse report file + self.ecl_fname = (prod_weekdir + '/' + 'eclipse_' + str(year).zfill(4) + + str(doy).zfill(3)) + + # Full path and name of the SUM summary file + self.sum_fname = (prod_weekdir + '/' + cmb_name + str(vid) + camp_id + + sol_id + '_' + str(year).zfill(4) + str(doy).zfill(3) + + str(hr).zfill(2) + str(minute).zfill(2) + '_' + len_data + + '_' + len_data + '_' + 'SUM' + '.SUM') + + # Full path and name of the JSON summary file + self.sumjson_fname = (prod_weekdir+'/'+cmb_name + str(vid) + camp_id + + sol_id + '_' + str(year).zfill(4) + str(doy).zfill(3) + + str(hr).zfill(2) + str(minute).zfill(2) + + '_' + len_data + '_' + len_data + '_' + 'SUM' + '.JSON') + + if solution == 'ultra-rapid': + end_epoch = orbcmb.sp3_combined['data']['epochs'][-1] + year_end = end_epoch.year + month_end = end_epoch.month + day_end = end_epoch.day + hr_end = end_epoch.hour + minute_end = end_epoch.minute + second_end = end_epoch.second + gc_end = gpsCal() + gc_end.set_yyyy_MM_dd_hh_mm_ss(year_end,month_end,day_end,hr_end, + minute_end,second_end) + wwww_end = gc_end.wwww() + dow_end = gc_end.dow() + doy_end = gc_end.ddd() + + self.wwww_end = wwww_end + self.dow_end = dow_end + self.year_end = year_end + self.doy_end = doy_end + self.hr_end = hr_end + + # create list of satellite systems, blocks, satellites + systems = [] + blocks = [] + sats = [] + + for key in orbcmb.cen_rms: + if isinstance(key,tuple): + if isinstance(key[1],str): + if len(key[1]) == 1: + if key[1] not in systems: + systems.append(key[1]) + else: + if key[1] not in blocks: + blocks.append(key[1]) + elif isinstance(key[1],tuple): + if key[1] not in sats: + sats.append(key[1]) + + # Sort the systems list + all_known_sys = ['G','R','E','C','J'] + sys_known = [] + sys_unknown = [] + for sys in systems: + if sys in all_known_sys: + sys_known.append(sys) + else: + sys_unknown.append(sys) + sys_known.sort(key=lambda x: all_known_sys.index(x)) + sys_unknown.sort() + systems = sys_known + systems.extend(sys_unknown) + + # Sort the blocks list + all_known_blk = ['GPS','GLO','GAL','BDS','QZS'] + blk_known = [] + blk_unknown = [] + for blk in blocks: + if blk[0:3] in all_known_blk: + blk_known.append(blk) + else: + blk_unknown.append(blk) + blk_known.sort(key=lambda x: x[3:]) + blk_known.sort(key=lambda x: all_known_blk.index(x[0:3])) + blk_unknown.sort() + blocks = blk_known + blocks.extend(blk_unknown) + + # Sort the satellites list + sat_known = [] + sat_unknown = [] + for sat in sats: + if sat[0] in all_known_sys: + sat_known.append(sat) + else: + sat_unknown.append(sat) + sat_known.sort(key=lambda x: x[1]) + sat_known.sort(key=lambda x: all_known_sys.index(x[0])) + sat_unknown.sort(key=lambda x: x[1]) + sat_unknown.sort(key=lambda x: x[0]) + sats = sat_known + sats.extend(sat_unknown) + + self.systems = systems + self.blocks = blocks + self.sats = sats + + # Detailed info on weighted and unweighted centers + # per constellation/satellite + weighted_cens = {} + unweighted_cens = {} + + # Add any extra indivdual satellites + satflags = orbcmb.satflags + for acname in orbcmb.weighted_sats: + + # constellations that are fully weighted for this ac + for sys_id in systems: + sats_weighted = [] + sats_unweighted = [] + nsat_total = 0 + nsat_weighted = 0 + nsat_unweighted = 0 + for sat in sats: + if sat[0] == sys_id: + nsat_total += 1 + if (satflags[acname,sat] not in ["missing_sat", + "missing_blk","missing_sys","excluded_sat", + "excluded_sat_all","unweighted_sys", + "unweighted_sat"]): + sats_weighted.append(sat) + nsat_weighted += 1 + elif (satflags[acname,sat] in + ["unweighted_sat","unweighted_sys"]): + sats_unweighted.append(sat) + nsat_unweighted += 1 + if (nsat_weighted/nsat_total >= 0.5): + if acname not in weighted_cens: + weighted_cens[acname] = [] + weighted_cens[acname].append(sys_id) + for sat in sats_unweighted: + if acname not in unweighted_cens: + unweighted_cens[acname] = [] + unweighted_cens[acname].append(sat) + elif (nsat_unweighted/nsat_total >= 0.5): + if acname not in unweighted_cens: + unweighted_cens[acname] = [] + unweighted_cens[acname].append(sys_id) + for sat in sats_weighted: + if acname not in weighted_cens: + weighted_cens[acname] = [] + weighted_cens[acname].append(sat) + + logger.debug(f"\nweighted_cens: {weighted_cens}") + logger.debug(f"\nunweighted_cens: {unweighted_cens}") + + self.weighted_cens = weighted_cens + self.unweighted_cens = unweighted_cens + + + def eclipse(self,eop_file,eop_format): + + """ + create a report of eclipsing satellites + + """ + + orbcmb = self.orbcmb + + # constants + radius_earth = 6371000.0 + radius_moon = 1737100.0 + + # Determine the satellites experiencing eclipses (needs an EOP + # file for converting planetary coordinates from ECI to ECEF) + ecl_earth = {} + ecl_moon = {} + + if eop_format is not None: + + # Read EOP data + time_utc = np.unique(orbcmb.epochs) + eopdata = io_data.EOPdata(eop_file,eop_format) + eopdata.get_eop(time_utc) + eop = eopdata.eop_interp + xp = eop[:,1] + yp = eop[:,2] + ut1_utc = eop[:,3] + + # calculate the Sun positions + sun = planets.AnalyticalPosition(planet='sun',ref_frame='ECEF', + time_utc=time_utc,ut1_utc=ut1_utc, + xp=xp,yp=yp) + r_sun = sun.r + + # calculate the moon position + moon = planets.AnalyticalPosition(planet='moon',ref_frame='ECEF', + time_utc=time_utc,ut1_utc=ut1_utc, + xp=xp,yp=yp) + r_moon = moon.r + + # eclipse report file + ecl_file = open(self.ecl_fname,'w') + header = ("Eclipse events experienced by satellites for week " + + str(self.gpsweek).zfill(4) + " day " + str(self.dow)) + ecl_file.write(f"{'-'*len(header)}\n") + ecl_file.write(f"{header}\n") + ecl_file.write(f"{'-'*len(header)}\n\n\n") + + # Earth-caused eclipses + header = ("1) Earth-caused eclipse events") + ecl_file.write(f" {header}\n") + ecl_file.write(f" {'-'*len(header)}\n\n") + header = (" PRN SVN DUR.(MIN) ENTERING " + "EXITING Type ") + ecl_file.write(f" {header}\n") + ecl_file.write(f" {'-'*len(header)}\n") + for sat in self.sats: + ind = np.where((orbcmb.satinfo[:,0]==sat[0]) + & (orbcmb.satinfo[:,1]==sat[1])) + r_sat = orbcmb.combined_orbit[ind] + t_sat = np.array(orbcmb.epochs)[ind] + ecl = Eclipse(r_sat,r_sun,'earth',radius_earth) + ecl.get_ecl_times(t_sat) + ecl_earth[sat] = ecl.eclipsing + if ecl.eclipsing != 'none': + for item in ecl.ecl_times: + tfrom = item[0] + tto = item[1] + duration = ((tto-tfrom).seconds)/60.0 + durmin = int(duration) + if (durmin==0): + dursec = int(duration*60.0) + else: + dursec = int(duration%durmin) + line = (f" {sat[0]}{sat[1]:02} {sat[0]}{sat[2]:03} " + f"{durmin:>3}:{dursec:02} " + f"{tfrom.year:>4}-{tfrom.month:02}-{tfrom.day:02} " + f"{tfrom.hour:02}:{tfrom.minute:02}:{tfrom.second:02}" + f" " + f"{tto.year:>4}-{tto.month:02}-{tto.day:02} " + f"{tto.hour:02}:{tto.minute:02}:{tto.second:02}" + f" {ecl.eclipsing}") + ecl_file.write(f" {line}\n") + + # Moon-caused eclipses + ecl_file.write("\n\n") + header = ("2) Moon-caused eclipse events") + ecl_file.write(f" {header}\n") + ecl_file.write(f" {'-'*len(header)}\n\n") + header = (" PRN SVN DUR.(MIN) ENTERING " + "EXITING Type ") + ecl_file.write(f" {header}\n") + ecl_file.write(f" {'-'*len(header)}\n") + for sat in self.sats: + ind = np.where((orbcmb.satinfo[:,0]==sat[0]) + & (orbcmb.satinfo[:,1]==sat[1])) + r_sat = orbcmb.combined_orbit[ind] + t_sat = np.array(orbcmb.epochs)[ind] + ecl = Eclipse(r_sat,r_sun,'moon',radius_moon,r_moon) + ecl.get_ecl_times(t_sat) + ecl_moon[sat] = ecl.eclipsing + if ecl.eclipsing != 'none': + for item in ecl.ecl_times: + tfrom = item[0] + tto = item[1] + duration = ((tto-tfrom).seconds)/60.0 + durmin = int(duration) + if (durmin==0): + dursec = int(duration*60.0) + else: + dursec = int(duration%durmin) + line = (f" {sat[0]}{sat[1]:02} {sat[0]}{sat[2]:03} " + f"{durmin:>3}:{dursec:02} " + f"{tfrom.year:>4}-{tfrom.month:02}-{tfrom.day:02} " + f"{tfrom.hour:02}:{tfrom.minute:02}:{tfrom.second:02}" + f" " + f"{tto.year:>4}-{tto.month:02}-{tto.day:02} " + f"{tto.hour:02}:{tto.minute:02}:{tto.second:02}" + f" {ecl.eclipsing}") + ecl_file.write(f" {line}\n") + + self.ecl_earth = ecl_earth + self.ecl_moon = ecl_moon + + + def summary(self): + + """ + generate daily reports (or 2-day for ultra-rapid combinations) + + """ + + orbcmb = self.orbcmb + + # initiate summary file + sumfull = open(self.sum_fname,'w') + + # dictionary for creating json file + sumdict = {} + + header1 = (self.cmb_name + " " + self.campaign + " " + self.solution + + " orbit combination for:") + header2 = ("week "+ str(self.gpsweek).zfill(4) + " day " + + str(self.dow) + " (year " + str(self.year) + + " doy " + str(self.doy).zfill(3) + ") ") + + if self.solution == 'ultra-rapid': + header3 = ("hour " + str(self.hr).zfill(2)) + header4 = (" to week " + str(self.wwww_end).zfill(4) + " day " + + str(self.dow_end).zfill(2) + " (year " + + str(self.year_end) + " doy " + + str(self.doy_end).zfill(3) + ") ") + header5 = ("hour "+ str(self.hr).zfill(2)) + header6 = ("The first 24 hours are observed, but the last 24 hours are " + "predicted orbits") + + if self.solution == 'ultra-rapid': + lenmax = max(len(header1),len(header2),len(header3),len(header4), + len(header5),len(header6)) + sumfull.write(f"{'-'*lenmax}\n") + sumfull.write(f"{header1}\n") + sumfull.write(f"{header2}") + sumfull.write(f"{header3}") + sumfull.write(f"{header4}") + sumfull.write(f"{header5}\n") + sumfull.write(f"{header6}\n") + sumfull.write(f"{'-'*lenmax}\n") + else: + lenmax = max(len(header1),len(header2)) + sumfull.write(f"{'-'*lenmax}\n") + sumfull.write(f"{header1}\n") + sumfull.write(f"{header2}\n") + sumfull.write(f"{'-'*lenmax}\n") + + if self.campaign == 'demonstration': + expt_comment = ("* * * THIS COMBINATION IS STRICTLY EXPERIMENTAL " + "-- USE WITH CAUTION * * *") + sumfull.write(f"{expt_comment}\n\n") + + sumfull.write(f" Author: {self.author}\n") + sumfull.write(f" Contact: {self.contact}\n\n") + + sumdict['header'] = {} + sumdict['header']['title'] = (self.cmb_name + " " + self.campaign + " " + + self.solution + " orbit combination") + sumdict['header']['gps week'] = self.gpsweek + sumdict['header']['day of week'] = self.dow + sumdict['header']['year'] = self.year + sumdict['header']['day of year'] = self.doy + sumdict['header']['author'] = self.author + sumdict['header']['contact'] = self.contact + + # solution/centers list + solution_names = {} + for sp3file in self.sp3_subm_list: + acname = sp3file[-38:-35] + solution_names[acname] = sp3file[-38:] + centers = [] + for acname in solution_names: + centers.append(acname) + centers.sort() + if "IGV" in centers: + centers.remove("IGV") + centers.append("IGV") + + sumfull.write(" All AC solutions:\n") + if solution_names: + for ac in centers: + if ac == "IGV": + sumfull.write(f" - {ac}") + else: + sumfull.write(f" - {ac} = {solution_names[ac]}") + if ac in self.ac_acronyms: + sumfull.write(f" : {self.ac_acronyms[ac]}\n") + else: + sumfull.write("\n") + else: + sumfull.write(" - None\n") + sumfull.write("\n\n") + + sumfull.write(" AC solutions used in the combination:\n\n") + if self.weighted_cens: + header = " AC | Sat. System or PRN/SVN " + sumfull.write(f" {header}\n") + lines = [] + len_lines = [] + aclist = [key for key, value in self.weighted_cens.items() + if isinstance(value, list) and len(value) > 0] + aclist.sort() + lines.append("-") + len_lines.append(len(header)) + for acname in aclist: + line = " " + acname + " |" + c = 0 + for item in self.weighted_cens[acname]: + c += 1 + if isinstance(item,str): + line = line + " " + item + else: + line = (line + " " + item[0] + str(item[1]).zfill(2) + + "/" + item[0] + str(item[2]).zfill(3)) + if c == 7: + lines.append(line) + len_lines.append(len(line)) + line = " " + acname + " " + c = 0 + if c > 0: + lines.append(line) + len_lines.append(len(line)) + lenmax = max(len_lines) + for line in lines: + if '-' in line: + sumfull.write(f" {'-'*lenmax}\n") + else: + sumfull.write(f" {line}\n") + else: + sumfull.write(" No weighted center!\n") + sumfull.write("\n\n") + + sumfull.write(" AC solutions not used in the combination (for comparison):\n\n") + if self.unweighted_cens: + header = " AC | Sat. System or PRN/SVN " + sumfull.write(f" {header}\n") + lines = [] + len_lines = [] + aclist = [key for key, value in self.unweighted_cens.items() + if isinstance(value, list) and len(value) > 0] + aclist.sort() + if "IGV" in aclist: + aclist.remove("IGV") + aclist.append("IGV") + lines.append("-") + len_lines.append(len(header)) + for acname in aclist: + line = " " + acname + " |" + c = 0 + for item in self.unweighted_cens[acname]: + c += 1 + if isinstance(item,str): + line = line + " " + item + else: + line = (line + " " + item[0] + str(item[1]).zfill(2) + + "/" + item[0] + str(item[2]).zfill(3)) + if c == 7: + lines.append(line) + len_lines.append(len(line)) + line = " " + acname + " " + c = 0 + if c > 0: + lines.append(line) + len_lines.append(len(line)) + lenmax = max(len_lines) + for line in lines: + if '-' in line: + sumfull.write(f" {'-'*lenmax}\n") + else: + sumfull.write(f" {line}\n") + else: + sumfull.write(" None\n") + sumfull.write("\n\n") + + sumfull.write(" Combined solution:\n") + sumfull.write(f" - {self.cmb_sp3_filename[0:3]} = {self.cmb_sp3_filename}\n\n") + + sumfull.write(f" IGS satellite metadata file used:\n") + sumfull.write(f" - {self.sat_metadata_file.split('/')[-1]}\n\n") + sumfull.write(f" AC weighting method:\n") + sumfull.write(f" - {orbcmb.cen_wht_method}\n\n") + sumfull.write(f" Orbit sampling for combination:\n") + orb_smp = int(orbcmb.sample_rate/60) + sumfull.write(f" - {orb_smp} minutes\n\n\n\n") + + sumdict['header']['weighted solutions'] = self.weighted_cens + sumdict['header']['unweighted solutions for comparison'] = self.unweighted_cens + sumdict['header']['combined solution'] = {} + sumdict['header']['combined solution'][self.cmb_sp3_filename[0:3]] = self.cmb_sp3_filename + sumdict['header']['ac acronyms'] = self.ac_acronyms + sumdict['header']['satellite metadata'] = self.sat_metadata_file.split('/')[-1] + sumdict['header']['AC weighting method'] = orbcmb.cen_wht_method + sumdict['header']['orbit interval'] = orb_smp + + # Outliers, exclusions and pre-processing + sumdict['preprocess'] = {} + sumdict['preprocess']['excluded due to high rms sats'] = [] + sumdict['preprocess']['unweighted due to too many high rms sats'] = [] + sumdict['preprocess']['unweighted due to high transformations'] = {} + sumdict['preprocess']['excluded sats due to low number of centers'] = [] + sumdict['preprocess']['DV maneuvering satellites'] = [] + + sumdict['events'] = {} + sumdict['events']['E'] = [] + sumdict['events']['M'] = [] + sumdict['events']['V'] = [] + + header = "1) Outliers, exclusions and pre-processing:" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n\n") + + # Sat/cen excluded due to high rms + header = "1.1) Satellites excluded from AC solutions due to high rms:" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + if orbcmb.exclude_highrms: + header = " AC | PRN | SVN " + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n") + for item in orbcmb.exclude_highrms: + line = (" " + item[0][0:3] + " | " + item[1][0] + + str(item[1][1]).zfill(2) + " | " + item[1][0] + + str(item[1][2]).zfill(3)) + sumfull.write(f" {line}\n") + sumdict['preprocess']['excluded due to high rms sats'].append((item[0][0:3], + item[1][0]+str(item[1][1]).zfill(2), + item[1][0]+str(item[1][2]).zfill(3))) + else: + sumfull.write(" No exclusions!\n") + sumfull.write("\n\n") + + # Centers unweighted due to too many sat exclusions + header = ("1.2) AC solutions unweighted due to too many satellite " + + f"exclusions (more than {orbcmb.max_high_satrms}):") + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + if orbcmb.unweighted_max_high_satrms: + for item in orbcmb.unweighted_max_high_satrms: + line = item[0:3] + sumfull.write(f" {line}\n") + sumdict['preprocess']['unweighted due to too many high rms sats'].append(line) + else: + sumfull.write(" No AC solution unweighted!\n") + sumfull.write("\n\n") + + # Centers unweighted due to high transformation parameters + header1 = ("1.3) AC solutions unweighted due to high Helmert " + + f"transformation parameters with") + header2 = " respect to the combined orbit:" + sumfull.write(f" {header1}\n{header2}\n") + sumfull.write(f" {'-'*len(header1)}\n\n") + trn_params = ['Tx','TY','TZ','RX','RY','RZ','SC'] + trn_units = ['mm','mm','mm','uas','uas','uas','ppb'] + if orbcmb.unweighted_high_tra: + for item in orbcmb.unweighted_high_tra: + acname = item[0] + trn_param = trn_params[item[1]] + trn_unit = trn_units[item[1]] + trn_val = orbcmb.transform_params[acname][item[1]] + line = (f"{acname[0:3]} : {trn_param} = {trn_val*1e6:6.2f}" + f" {trn_unit}") + sumfull.write(f" {line}\n") + sumdict['preprocess']['unweighted due to high transformations'][ + str((acname[0:3],trn_param)) + ] = trn_val*1000.0 + else: + sumfull.write(" No AC solution unweighted!\n") + sumfull.write("\n\n") + + # Sat excluded due to low number of centers + header = ("1.4) Satellites excluded due to low number of AC solutions " + + f"(lower than {orbcmb.min_numcen}):") + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + if orbcmb.exclude_lowcen: + header = " PRN | SVN " + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n") + for item in orbcmb.exclude_lowcen: + line = (" " + item[0] + str(item[1]).zfill(2) + " | " + item[0] + + str(item[2]).zfill(3)) + sumfull.write(f" {line}\n") + sumdict['preprocess']['excluded sats due to low number of centers'].append( + (item[0] + str(item[1]).zfill(2), + item[0] + str(item[2]).zfill(3))) + else: + sumfull.write(" No exclusions!\n") + sumfull.write("\n\n") + + # maneuvering satellites + header = ("1.5) Satellites experiencing Delta-V maneuvers: ") + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + if self.year >= 2004: + comment = ("Only COD solutions, which model maneuvers, are" + + " used after a Delta-V maneuver\n" + + " until the end of day, if available.") + else: + comment = ("Solutions are removed after a Delta-V maneuver\n" + + "until the end of day.") + sumfull.write(f" {comment}\n\n") + if self.rm_dv and self.dvsats: + header = " PRN | SVN | from | to " + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n") + for item in self.dvsats: + line = (" " + item[2] + " | " + item[3] + " | " + + str(item[0].year) + "_" + str(item[0].month).zfill(2) + "_" + + str(item[0].day).zfill(2) + " " + str(item[0].hour).zfill(2) + + ":" + str(item[0].minute).zfill(2) + " | " + + str(item[1].year) + "_" + str(item[1].month).zfill(2) + "_" + + str(item[1].day).zfill(2) + " " + str(item[1].hour).zfill(2) + + ":" + str(item[1].minute).zfill(2)) + sumfull.write(f" {line}\n") + sumdict['preprocess']['DV maneuvering satellites'].append( + [item[2],item[3],item[0],item[1]]) + else: + sumfull.write(" No Delta-V maneuvers!\n") + sumfull.write("\n\n") + + # pre-alignment of the orbits + if any(self.rf_align): + header = ("1.6) Pre-alignment of the orbits: ") + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + comment = ("transformation parameters applied to orbit solutions prior" + + " to the combination.\n\n" + + " The transformation parameters are estimated during the" + + " SINEX combination process,\n" + + " performed by the IGS Reference Frame Coordinator.") + sumfull.write(f" {comment}\n\n") + if self.year < 2017 and self.campaign == 'repro3': + comment = ("Additional corrections were added to the Z rotations" + + " (RZ) applied to the MIT\n" + + " orbit solutions, based on the differences between their" + + " a priori and observed\n" + + " values of UT1.") + sumfull.write(f" {comment}\n\n") + trn_header1 = ['Tx','TY','TZ','RX','RY','RZ','SC'] + trn_header1_aln = [] + sumdict['preprocess']['pre-alignment'] = {} + for item in trn_header1: + trn_header1_aln.append(item.center(8)) + trn_header2 = [' [mm]',' [mm]',' [mm]','[uas]','[uas]','[uas]','[ppb]'] + trn_header2_aln = [] + for item in trn_header2: + trn_header2_aln.append(item.center(8)) + header1 = f" {'|'.join([item for item in trn_header1_aln])}" + header2 = f" AC |{'|'.join([item for item in trn_header2_aln])}" + sumfull.write(f" {header1}\n") + sumfull.write(f" {header2}\n") + sumfull.write(f" {'-'*len(header2)}\n") + + for acname in self.rf_transfo[self.dow]: + trn = [] + sumdict['preprocess']['pre-alignment'][acname] = {} + sumdict['preprocess']['pre-alignment'][acname]['T'] = [] + sumdict['preprocess']['pre-alignment'][acname]['R'] = [] + sumdict['preprocess']['pre-alignment'][acname]['S'] = [] + for c,item in enumerate(self.rf_transfo[self.dow][acname]): + if c < 3: + trn_str = f"{item*1000:6.1f}" + if abs(item*1000) > 999.9: + trn_str = f"{999:6.0f}" + sumdict['preprocess']['pre-alignment'][acname]['T'].append(item*1000) + elif c < 6: + trn_str = f"{item*1e6*3600*180/np.pi:6.0f}" + if abs(item*1e6*3600*180/np.pi) > 99999: + trn_str = f"{99999:6.0f}" + sumdict['preprocess']['pre-alignment'][acname]['R'].append( + item*1000*3600*180/np.pi) + else: + trn_str = f"{(item-1.0)*1e9:6.2f}" + if abs((item-1.0)*1e9) > 999.99: + trn_str = f"{999:6.0f}" + sumdict['preprocess']['pre-alignment'][acname]['S'].append( + (item-1.0)*1e9) + trn.append(trn_str) + line = f" {acname} | {' | '.join([item for item in trn])}" + sumfull.write(f" {line}\n") + sumfull.write("\n\n") + sumfull.write("\n") + + # center weights + weighted_centers = orbcmb.weighted_centers + unweighted_centers = orbcmb.unweighted_centers + weighted_sats = orbcmb.weighted_sats + unweighted_sats = orbcmb.unweighted_sats + cen_wht_method = orbcmb.cen_wht_method + + centers_str = [] + for ac in weighted_centers: + centers_str.append(ac+ ' ') + centers_str = centers_str + unweighted_centers + + sumdict['AC weights'] = {} + header = f"2) AC weights [%] -- {cen_wht_method} weighting:" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + + if cen_wht_method == 'global': + logger.info(f"\nCenter weights ({cen_wht_method} weighting):\n") + logger.info(f" | {' | '.join([ac for ac in centers_str])}") + logger.info(f"-----------{'-------'*len(centers_str)}") + header = f" | {' | '.join([ac for ac in centers])}" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*(len(header)+3)}\n") + + wht = [] + whtfull = [] + sumwht = 0 + for acname in centers: + if acname in weighted_centers: + sumwht += orbcmb.cen_weights[acname] + for acname in centers: + if acname in weighted_centers: + wht_percent = 100.0*orbcmb.cen_weights[acname]/sumwht + else: + wht_percent = 0.0 + sumdict['AC weights'][acname] = wht_percent + wht_str = f"{wht_percent:^4.0f}" + wht.append(wht_str) + whtfull_str = f"{wht_percent:^7.3f}" + whtfull.append(whtfull_str) + logger.info(f" Weight | {' | '.join([item for item in wht])}") + line = f" Weight | {' | '.join([item for item in whtfull])}" + sumfull.write(f" {line}\n\n\n") + + elif cen_wht_method == 'by_constellation': + logger.info(f"\nCenter weights (%) ({cen_wht_method} weighting):\n") + logger.info(f" Sat. System | {' | '.join([ac for ac in centers])}") + logger.info(f"--------------{'------'*len(centers)}") + header = f" Sat." + sumfull.write(f" {header}\n") + header = f" System | {' | '.join([ac for ac in centers])}" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*(len(header)+3)}\n") + + for sys in self.systems: + wht = [] + whtfull = [] + sumwht = 0 + sumdict['AC weights'][sys] = {} + for acname in centers: + if (acname,sys) in orbcmb.cen_weights: + sumwht += orbcmb.cen_weights[acname,sys] + for acname in centers: + if (acname,sys) in orbcmb.cen_weights: + wht_percent = 100.0*orbcmb.cen_weights[acname,sys]/sumwht + else: + wht_percent = 0.0 + sumdict['AC weights'][sys][acname] = wht_percent + wht_str = f"{wht_percent:^4.0f}" + whtfull_str = f"{wht_percent:7.3f}" + wht.append(wht_str) + whtfull.append(whtfull_str) + logger.info(f" {sys} | " + f"{' | '.join([item for item in wht])}") + line = (f" {sys} | " + + f"{' | '.join([item for item in whtfull])}") + sumfull.write(f" {line}\n") + + elif cen_wht_method == 'by_block': + logger.info(f"\nCenter weights (%) ({cen_wht_method} weighting):\n") + logger.info(f" Block | {' | '.join([ac for ac in centers])}") + logger.info(f"-----------------{'------'*len(centers)}") + header = f" Block | {' | '.join([ac for ac in centers])}" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*(len(header)+3)}\n") + for blk in self.blocks: + wht = [] + whtfull = [] + sumwht = 0 + sumdict['AC weights'][blk] = {} + for acname in centers: + if (acname,blk) in orbcmb.cen_weights: + sumwht += orbcmb.cen_weights[acname,blk] + for acname in centers: + if (acname,blk) in orbcmb.cen_weights: + wht_percent = 100.0*orbcmb.cen_weights[acname,blk]/sumwht + else: + wht_percent = 0.0 + sumdict['AC weights'][blk][acname] = wht_percent + wht_str = f"{wht_percent:^4.0f}" + whtfull_str = f"{wht_percent:7.3f}" + wht.append(wht_str) + whtfull.append(whtfull_str) + logger.info(f"{blk:15} | {' | '.join([item for item in wht])}") + line = (f"{blk:15}| " + + f"{' | '.join([item for item in whtfull])}") + sumfull.write(f" {line}\n") + + elif cen_wht_method == 'by_sat': + logger.info("\n\nCenter weight tables separated for each block\n") + for blk in self.blocks: + logger.info(f"\n\nCenter weights (%) for block {blk} " + f"({cen_wht_method} weighting):\n") + logger.info(f" PRN | SVN | {' | '.join([ac for ac in centers])}") + logger.info(f"-------------{'------'*len(centers)}") + for sat in self.sats: + sat_id = self.sat_metadata.get_sat_identifier(sat[0],sat[2]) + if sat_id.block == blk: + wht = [] + whtfull = [] + sumwht = 0 + for acname in centers: + if (acname,sat[0],sat[1],sat[2]) in orbcmb.cen_weights: + sumwht += (orbcmb.cen_weights + [acname,sat[0],sat[1],sat[2]]) + for acname in centers: + if (acname,sat[0],sat[1],sat[2]) in orbcmb.cen_weights: + wht_percent = (100.0*orbcmb.cen_weights + [acname,sat[0],sat[1],sat[2]]/sumwht) + wht_str = f"{wht_percent:^4.0f}" + whtfull_str = f"{wht_percent:7.3f}" + else: + wht_str = " " + whtfull_str = " " + wht.append(wht_str) + whtfull.append(whtfull_str) + prn = sat[0] + str(sat[1]).zfill(2) + svn = sat[0] + str(sat[2]).zfill(3) + logger.info(f" {prn} | {svn} | " + f"{' | '.join([item for item in wht])}") + logger.info(f"\n\nCenter weights (%) for all satellites " + f"({cen_wht_method} weighting):\n") + logger.info(f" PRN | SVN | {' | '.join([ac for ac in centers])}") + logger.info(f"-------------{'------'*len(centers)}") + header = f" PRN | SVN | {' | '.join([ac for ac in centers])}" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*(len(header)+2)}\n") + + for sat in self.sats: + wht = [] + whtfull = [] + sumwht = 0 + prn = sat[0] + str(sat[1]).zfill(2) + svn = sat[0] + str(sat[2]).zfill(3) + sumdict['AC weights'][str((prn,svn))] = {} + for acname in centers: + if (acname,sat[0],sat[1],sat[2]) in orbcmb.cen_weights: + sumwht += orbcmb.cen_weights[acname,sat[0],sat[1],sat[2]] + for acname in centers: + if (acname,sat[0],sat[1],sat[2]) in orbcmb.cen_weights: + wht_percent = (100.0*orbcmb.cen_weights + [acname,sat[0],sat[1],sat[2]]/sumwht) + else: + wht_percent = 0.0 + sumdict['AC weights'][str((prn,svn))][acname] = wht_percent + wht_str = f"{wht_percent:^4.0f}" + whtfull_str = f"{wht_percent:6.2f}" + wht.append(wht_str) + whtfull.append(whtfull_str) + logger.info(f" {prn} | {svn} | " + f"{' | '.join([item for item in wht])}") + line = (f" {prn} | {svn} | " + + f"{' | '.join([item for item in whtfull])}") + sumfull.write(f" {line}\n") + sumfull.write("\n\n") + + logger.info("\n") + sumfull.write("\n") + + # RMS statistics + header = f"3) Orbit combination RMS statistics:" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n\n") + sumdict['RMS'] = {} + + logger.info("\nOveral RMS statistics of the centers:\n") + logger.info(f" | {' | '.join([ac for ac in centers_str])}") + logger.info(f"---------{'---------'*len(centers_str)}") + header = "3.1) Overall RMS [mm] of the ACs:" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + header = f" | {' | '.join([ac for ac in centers])}" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*(len(header)+2)}\n") + + rms = [] + rmsfull = [] + sumdict['RMS'] = {} + for acname in centers: + if acname in orbcmb.cen_rms: + ac = acname + else: + ac = acname + "c" + sumdict['RMS'][acname] = orbcmb.cen_rms[ac]*1000 + rms_str = f"{orbcmb.cen_rms[ac]*1000:^6.0f}" + rms.append(rms_str) + rmsfull_str = f"{orbcmb.cen_rms[ac]*1000:6.1f}" + if orbcmb.cen_rms[ac]*1000 > 9999.9: + rmsfull_str = f"{9999:6.0f}" + rmsfull.append(rmsfull_str) + logger.info(f"RMS(mm) | {' | '.join([item for item in rms])}") + line = f" RMS | {' | '.join([item for item in rmsfull])}" + sumfull.write(f" {line}\n\n\n") + + logger.info("\n\nCenter RMS statistics (mm) by constellation:\n") + logger.info(f" Sat. System | {' | '.join([ac for ac in centers_str])}") + logger.info(f"-------------{'---------'*len(centers_str)}") + header = "3.2) AC RMS [mm] by constellation:" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + header = f" Sat." + sumfull.write(f" {header}\n") + header = f" System | {' | '.join([ac for ac in centers])}" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*(len(header)+2)}\n") + + for sys in self.systems: + rms = [] + rmsfull = [] + sumdict['RMS'][sys] = {} + for acname in centers: + if acname in orbcmb.cen_rms: + ac = acname + else: + ac = acname + "c" + if (ac,sys) in orbcmb.cen_rms: + rms_str = f"{orbcmb.cen_rms[ac,sys]*1000:^6.0f}" + rmsfull_str = f"{orbcmb.cen_rms[ac,sys]*1000:6.1f}" + if orbcmb.cen_rms[ac,sys]*1000 > 9999.9: + rmsfull_str = f"{9999:6.0f}" + sumdict['RMS'][sys][acname] = orbcmb.cen_rms[ac,sys]*1000 + else: + rms_str = " " + rmsfull_str = " " + rms.append(rms_str) + rmsfull.append(rmsfull_str) + logger.info(f" {sys} | " + f"{' | '.join([item for item in rms])}") + line = (f" {sys} | " + + f"{' | '.join([item for item in rmsfull])}") + sumfull.write(f" {line}\n") + sumfull.write("\n\n") + + logger.info("\n\nCenter RMS statistics (mm) by block:\n") + logger.info( + f" Block | {' | '.join([ac for ac in centers_str])}") + logger.info(f"----------------{'---------'*len(centers_str)}") + header = "3.3) AC RMS [mm] by block:" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + header = f" Block | {' | '.join([ac for ac in centers])}" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*(len(header)+2)}\n") + + for blk in self.blocks: + rms = [] + rmsfull = [] + sumdict['RMS'][blk] = {} + for acname in centers: + if acname in orbcmb.cen_rms: + ac = acname + else: + ac = acname + "c" + if (ac,blk) in orbcmb.cen_rms: + rms_str = f"{orbcmb.cen_rms[ac,blk]*1000:^6.0f}" + rmsfull_str = f"{orbcmb.cen_rms[ac,blk]*1000:6.1f}" + if orbcmb.cen_rms[ac,blk]*1000 > 9999.9: + rmsfull_str = f"{9999:6.0f}" + sumdict['RMS'][blk][acname] = orbcmb.cen_rms[ac,blk]*1000 + else: + rms_str = " " + rmsfull_str = " " + rms.append(rms_str) + rmsfull.append(rmsfull_str) + logger.info(f"{blk:15} | {' | '.join([item for item in rms])}") + line = (f"{blk:15}| " + + f"{' | '.join([item for item in rmsfull])}") + sumfull.write(f" {line}\n") + sumfull.write("\n\n") + + logger.info("\n\nCenter RMS statistics (mm) for all satellites:\n") + logger.info(f" PRN | SVN | " + f"{' | '.join([ac for ac in centers_str])} | IGS") + logger.info(f"------------{'---------'*(len(centers_str)+1)}") + header = "3.4) AC RMS [mm] by satellite:" + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + comment = ("Event codes:\n" + + " E: satellite eclipsing caused by Earth\n" + + " M: satellite eclipsing caused by Moon\n" + + " V: satellite maneuvering (Delta-V)") + sumfull.write(f" {comment}\n\n") + header = (f" PRN | SVN | {' | '.join([ac for ac in centers])}" + + f" | Overall | event ") + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n") + + for sat in self.sats: + rms = [] + rmsfull = [] + prn = sat[0] + str(sat[1]).zfill(2) + svn = sat[0] + str(sat[2]).zfill(3) + sumdict['RMS'][str((prn,svn))] = {} + for acname in centers: + if acname in orbcmb.cen_rms: + ac = acname + else: + ac = acname + "c" + if (ac,sat) in orbcmb.cen_rms: + rms_str = f"{orbcmb.cen_rms[ac,sat]*1000:^6.0f}" + rmsfull_str = f"{orbcmb.cen_rms[ac,sat]*1000:6.1f}" + if orbcmb.cen_rms[ac,sat]*1000 > 9999.9: + rmsfull_str = f"{9999:6.0f}" + sumdict['RMS'][str((prn,svn))][acname] = orbcmb.cen_rms[ac,sat]*1000 + else: + rms_str = " " + rmsfull_str = " " + rms.append(rms_str) + rmsfull.append(rmsfull_str) + if sat in orbcmb.sat_rms: + rms_str = f"{orbcmb.sat_rms[sat]*1000:^6.0f}" + rmsfull_str = f"{orbcmb.sat_rms[sat]*1000:7.1f}" + if orbcmb.sat_rms[sat]*1000 > 99999.9: + rmsfull_str = f"{99999:7.0f}" + sumdict['RMS'][str((prn,svn))]["Overall"] = orbcmb.sat_rms[sat]*1000 + else: + rms_str = " " + rmsfull_str = " " + rms.append(rms_str) + rmsfull.append(rmsfull_str) + + event = list(" ") + if (hasattr(self,'ecl_earth') and sat in self.ecl_earth + and self.ecl_earth[sat] in ['full','partial']): + event[1] = "E" + sumdict['events']['E'].append(str((prn,svn))) + if (hasattr(self,'ecl_moon') and sat in self.ecl_moon + and self.ecl_moon[sat] in ['full','partial']): + event[2] = "M" + sumdict['events']['M'].append(str((prn,svn))) + if self.rm_dv and self.dvsats: + for item in self.dvsats: + if (item[2] == prn): + event[3] = "V" + sumdict['events']['V'].append(str((prn,svn))) + event_str = "".join(event) + logger.info(f" {prn} | {svn} | {' | '.join([item for item in rms])}") + line = (f" {prn} | {svn} | " + + f"{' | '.join([item for item in rmsfull])}" + + f" | {event_str}") + sumfull.write(f" {line}\n") + + logger.info("\n") + + logger.info("\n") + sumfull.write("\n") + + logger.info("\nOveral absolute deviation statistics of the centers:\n") + logger.info(f" | {' | '.join([ac for ac in centers_str])}") + logger.info(f"---------{'---------'*len(centers_str)}") + abdev = [] + abdevfull = [] + for acname in centers: + if acname in orbcmb.cen_rms: + ac = acname + else: + ac = acname + "c" + abdev_str = f"{orbcmb.cen_abdev[ac]*1000:^6.0f}" + abdev.append(abdev_str) + abdevfull_str = f"{orbcmb.cen_abdev[ac]*1000:^9.3f}" + abdevfull.append(abdevfull_str) + logger.info(f"ABDEV(mm)| {' | '.join([item for item in abdev])}") + + logger.info("\n\nCenter absolute deviation statistics (mm) by constellation:\n") + logger.info(f" Sat. System | {' | '.join([ac for ac in centers_str])}") + logger.info(f"-------------{'---------'*len(centers_str)}") + for sys in self.systems: + abdev = [] + abdevfull = [] + for acname in centers: + if acname in orbcmb.cen_rms: + ac = acname + else: + ac = acname + "c" + if (ac,sys) in orbcmb.cen_abdev: + abdev_str = f"{orbcmb.cen_abdev[ac,sys]*1000:^6.0f}" + abdevfull_str = f"{orbcmb.cen_abdev[ac,sys]*1000:^9.3f}" + else: + abdev_str = " " + abdevfull_str = " " + abdev.append(abdev_str) + abdevfull.append(abdevfull_str) + logger.info(f" {sys} | " + f"{' | '.join([item for item in abdev])}") + + logger.info("\n\nCenter absolute deviation statistics (mm) by block:\n") + logger.info( + f" Block | {' | '.join([ac for ac in centers_str])}") + logger.info(f"----------------{'---------'*len(centers_str)}") + for blk in self.blocks: + abdev = [] + abdevfull = [] + for acname in centers: + if acname in orbcmb.cen_rms: + ac = acname + else: + ac = acname + "c" + if (ac,blk) in orbcmb.cen_abdev: + abdev_str = f"{orbcmb.cen_abdev[ac,blk]*1000:^6.0f}" + abdevfull_str = f"{orbcmb.cen_abdev[ac,blk]*1000:^9.3f}" + else: + abdev_str = " " + abdevfull_str = " " + abdev.append(abdev_str) + abdevfull.append(abdevfull_str) + logger.info(f"{blk:15} | {' | '.join([item for item in abdev])}") + + logger.info("\n\nCenter absolute deviation statistics (mm) for all " + "satellites:\n") + logger.info( + f" PRN | SVN | {' | '.join([ac for ac in centers_str])} | IGS") + logger.info(f"------------{'---------'*(len(centers_str)+1)}") + for sat in self.sats: + abdev = [] + abdevfull = [] + for acname in centers: + if acname in orbcmb.cen_rms: + ac = acname + else: + ac = acname + "c" + if (ac,sat) in orbcmb.cen_abdev: + abdev_str = f"{orbcmb.cen_abdev[ac,sat]*1000:^6.0f}" + abdevfull_str = f"{orbcmb.cen_abdev[ac,sat]*1000:^9.3f}" + else: + abdev_str = " " + abdevfull_str = " " + abdev.append(abdev_str) + abdevfull.append(abdevfull_str) + if sat in orbcmb.sat_abdev: + abdev_str = f"{orbcmb.sat_abdev[sat]*1000:^6.0f}" + abdevfull_str = f"{orbcmb.sat_abdev[sat]*1000:^9.3f}" + else: + abdev_str = " " + abdevfull_str = " " + abdev.append(abdev_str) + abdevfull.append(abdevfull_str) + prn = sat[0] + str(sat[1]).zfill(2) + svn = sat[0] + str(sat[2]).zfill(3) + logger.info(f" {prn} | {svn} | {' | '.join([item for item in abdev])}") + logger.info("\n") + + sumfull.write("\n\n") + + # Helmert parameter estimates + header = ("4) 7-parameter transformations of ACs with respect to the " + "combined orbit:") + sumfull.write(f" {header}\n") + sumfull.write(f" {'-'*len(header)}\n\n") + trn_header1 = ['Tx','TY','TZ','RX','RY','RZ','SC'] + trn_header1_aln = [] + sumdict['transformation parameters'] = {} + for item in trn_header1: + trn_header1_aln.append(item.center(8)) + trn_header2 = [' [mm]',' [mm]',' [mm]','[uas]','[uas]','[uas]','[ppb]'] + trn_header2_aln = [] + for item in trn_header2: + trn_header2_aln.append(item.center(8)) + header1 = f" {'|'.join([item for item in trn_header1_aln])}" + header2 = f" AC |{'|'.join([item for item in trn_header2_aln])}" + sumfull.write(f" {header1}\n") + sumfull.write(f" {header2}\n") + sumfull.write(f" {'-'*len(header2)}\n") + + for acname in centers: + if acname in orbcmb.transform_params: + ac = acname + else: + ac = acname + "c" + trn = [] + sumdict['transformation parameters'][acname] = {} + sumdict['transformation parameters'][acname]['T'] = [] + sumdict['transformation parameters'][acname]['R'] = [] + sumdict['transformation parameters'][acname]['S'] = [] + c = 0 + for item in orbcmb.transform_params[ac]: + c += 1 + if c<4: + trn_str = f"{item*1000:6.1f}" + if abs(item*1000) > 999.9: + trn_str = f"{999:6.0f}" + trn.append(trn_str) + sumdict['transformation parameters'][acname]['T'].append(item*1000) + elif c<7: + trn_str = f"{item*1e6:6.0f}" + if abs(item*1e6) > 99999: + trn_str = f"{99999:6.0f}" + trn.append(trn_str) + sumdict['transformation parameters'][acname]['R'].append(item*1000) + else: + trn_str = f"{item*1000:6.2f}" + if abs(item*1000) > 999.99: + trn_str = f"{999:6.0f}" + trn.append(trn_str) + sumdict['transformation parameters'][acname]['S'].append(item*1000) + + line = f" {acname} | {' | '.join([item for item in trn])}" + sumfull.write(f" {line}\n") + + sumfull.write("\n") + sumfull.close() + + # write out summary in json format + with open(self.sumjson_fname,'w',encoding='utf-8') as outfile: + json.dump(sumdict,outfile,ensure_ascii=False, indent=4,default=str) + diff --git a/rocs/rotation.py b/rocs/rotation.py new file mode 100755 index 0000000..f846c49 --- /dev/null +++ b/rocs/rotation.py @@ -0,0 +1,115 @@ +# Rotation matrix of angle theta around the given axis and derivative of it + +import numpy as np +import logging +import rocs.checkutils as checkutils + + +logger = logging.getLogger(__name__) + + +class Rotation: + + def __init__(self,theta,axis): + + # theta: rotation angle in radians + # axis: 1, 2 or 3 + + # Check the given attributes + checkutils.check_scalar(theta) + + if axis not in ([1,2,3]): + logger.error("The rotation axis rotation.axis can only " + "be 1, 2 or 3",stack_info=True) + raise ValueError("The rotation axis rotation.axis can only " + "be 1, 2 or 3") + + self.theta = theta + self.axis = axis + + + # Create rotation matrix along axis + # + # R: a 3-by-3 rotation matrix + + # |1 0 0 | + # R1 = |0 cos(theta1) sin(theta1)| + # |0 -sin(theta1) cos(theta1)| + # + # |cos(theta2) 0 -sin(theta2)| + # R2 = |0 1 0 | + # |sin(theta2) 0 cos(theta2) | + # + # |cos(theta3) sin(theta3) 0| + # R3 = |-sin(theta3) cos(theta3) 0| + # |0 0 1| + + # generic matrix + R = np.array([[np.cos(theta),np.sin(theta),-np.sin(theta)], + [-np.sin(theta),np.cos(theta),np.sin(theta)], + [np.sin(theta),-np.sin(theta),np.cos(theta)]]) + + # Based on the axis, replace spceific rows and columns + if axis == 1: + R[:,0] = 0.0 + R[0,:] = 0.0 + R[0,0] = 1.0 + elif axis == 2: + R[:,1] = 0.0 + R[1,:] = 0.0 + R[1,1] = 1.0 + elif axis == 3: + R[:,2] = 0.0 + R[2,:] = 0.0 + R[2,2] = 1.0 + else: + logger.error("axis can only be 1, 2 or 3",stack_info=True) + raise ValueError("axis can only be 1, 2 or 3") + + self.rot = R + + + # Create the derivative of rotation matrix along axis + # + # Output: + # dR: a 3-by-3 matrix + + # |0 0 0 | + # dR1dtheta1 = |0 -sin(theta1) cos(theta1)| + # |0 -cos(theta1) -sin(theta1)| + # + # |-sin(theta2) 0 -cos(theta2)| + # dR2dtheta2 = |0 0 0 | + # | cos(theta2) 0 -sin(theta2)| + # + # |-sin(theta3) cos(theta3) 0| + # dR3dtheta3 = |-cos(theta3) -sin(theta3) 0| + # | 0 0 0| + + theta = self.theta + axis = self.axis + + # generic matrix + dR = np.array([[-np.sin(theta),np.cos(theta),-np.cos(theta)], + [-np.cos(theta),-np.sin(theta),np.cos(theta)], + [np.cos(theta),-np.cos(theta),-np.sin(theta)]]) + + # Based on the axis, replace spceific rows and columns + if axis == 1: + dR[:,0] = 0.0 + dR[0,:] = 0.0 + dR[0,0] = 0.0 + elif axis == 2: + dR[:,1] = 0.0 + dR[1,:] = 0.0 + dR[1,1] = 0.0 + elif axis == 3: + dR[:,2] = 0.0 + dR[2,:] = 0.0 + dR[2,2] = 0.0 + else: + logger.error("axis can only be 1, 2 or 3",stack_info=True) + raise ValueError("axis can only be 1, 2 or 3") + + self.drot =dR + diff --git a/rocs/settings.py b/rocs/settings.py new file mode 100755 index 0000000..381bf22 --- /dev/null +++ b/rocs/settings.py @@ -0,0 +1,163 @@ +# Module for setting up the configurations + +import logging +import yaml +import collections.abc + + +logger = logging.getLogger(__name__) + +# function to recursively update a nested dictionary with another dictionary +def update(d, u): + """ + Recursively update a nested dictionary with another dictionary + + Keyword arguments: + d [dict] : dictionary to be updated + u [dict] : dictionary used to update the items in d + + Returns: + d [dict] : updated dictionary d + + """ + if u is not None: + for k, v in u.items(): + if isinstance(d, collections.abc.Mapping): + if isinstance(v, collections.abc.Mapping): + r = update(d.get(k, {}), v) + d[k] = r + else: + d[k] = u[k] + else: + d = {k: u[k]} + return d + + +class Config: + + def __init__(self,config_yaml=None): + """ + Setup the configurations for combination + + Keyword arguments: + config_yaml [str], optional : YAML configuration file + + Updates: + self.config [dict] : configurations for combination + """ + # Set the defaults + config = { + 'process': { + 'verbose': 'normal' + }, + 'campaign': { + 'author': '', + 'contact': '', + 'sol_id' : 'FIN', + 'camp_id': 'OPS', + 'cmb_name': 'IGS', + 'vid': 0, + 'cut_start': 0, + 'cut_end': 0, + 'subm_rootdir': '/data/combination/test_data', + 'prod_rootdir': '/data/combination/test_results', + 'sat_metadata_file': None, + 'eop_format': None, + 'eop_file': None, + 'rf_rootdir': '/data/repro3/products/sinex', + 'rf_name': 'IGS0R03SNX', + 'nanu_sumfile':'/data/repro3/metadata/nanus_sum_1999_2017', + 'ac_acronyms': {} + }, + 'orbits': { + 'ac_contribs': { + 'weighted': { + 'systems': None, + 'prns': None, + 'svns': None + }, + 'unweighted': { + 'systems': None, + 'prns': None, + 'svns': None + }, + 'excluded': { + 'systems': None, + 'prns': None, + 'svns': None + } + }, + 'sampling': None, + 'cen_wht_method': 'global', + 'sat_wht_method': 'RMS_L1', + 'rf_align': [False,False,False], + 'ut1_rot': None, + 'ut1_eop_format' : 'IGS_ERP2', + 'rm_dv':False, + 'no_rm_dv':[], + 'assess': { + 'sat_rms_tst': None, + 'sat_rms_tst_unweighted': None, + 'coef_sat': 470.0, + 'thresh_sat': None, + 'max_high_satrms': 5, + 'trn_tst': None, + 'thresh_trn': [None,None,None], + 'numcen_tst': None, + 'min_numcen': None, + 'max_iter' : 100 + }, + 'sp3_header': { + 'coord_sys': 'IGb14', + 'cmb_type': 'FINAL', + 'clk_src': 'CMB', + 'antex': 'IGS14_2163', + 'oload': 'FES2004' + } + }, + 'clocks': { + 'ac_contribs': { + 'weighted': { + 'systems': None, + 'prns': None, + 'svns': None + }, + 'unweighted': { + 'systems': None, + 'prns': None, + 'svns': None + }, + 'excluded': { + 'systems': None, + 'prns': None, + 'svns': None + } + } + } + } + + # If a configuration yaml file is given, update the configurations + if config_yaml is not None: + + # Check the given config_yaml filename + if not isinstance(config_yaml,str): + logger.error("The given yaml config file must be a string", + stack_info=True) + raise TypeError("The given yaml config file name must be a " + "string") + + # Try to open the yaml file + try: + stream = open(config_yaml,'r') + except IOError: + logger.error(f"The configuratoin yaml file {config_yaml} is" + f" not accessible!",stack_info=True) + raise IOError(f"File {config_yaml} not accessible!") + else: + yaml_parsed = yaml.load(stream) + + # update the configurations + config = update(config,yaml_parsed) + + # Update the attribute + self.config = config diff --git a/rocs/setup_logging.py b/rocs/setup_logging.py new file mode 100755 index 0000000..8a32417 --- /dev/null +++ b/rocs/setup_logging.py @@ -0,0 +1,28 @@ +import os +import logging.config +import yaml + +def setup_logging( + default_path=None, + default_level=logging.INFO, + env_key='LOG_CFG' +): + """Setup logging configuration + """ + + # If no default path is provided, use project-relative logging.yaml + if not default_path: + current_dir = os.path.dirname(os.path.abspath(__file__)) # Directory of setup_logging.py + project_dir = os.path.abspath(os.path.join(current_dir, '..')) # Go one level up to project root + default_path = os.path.join(project_dir, 'logging.yaml') # Assuming logging.yaml is in the project root + + # Check for the environment variable or use the default path + path = os.getenv(env_key, default_path) + + # Try to load the logging config + if os.path.exists(path): + with open(path, 'rt') as f: + config = yaml.safe_load(f.read()) + logging.config.dictConfig(config) + else: + logging.basicConfig(level=default_level) diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..c31a4cb --- /dev/null +++ b/setup.py @@ -0,0 +1,43 @@ +from setuptools import setup, find_packages + +# Read the contents of your README file (optional) +with open("README.md", "r", encoding="utf-8") as fh: + long_description = fh.read() +with open("LICENSE", "r", encoding="utf-8") as fh: + license = fh.read() + +with open("requirements.txt", "r", encoding="utf-8") as fh: + requirements_lines = fh.read().splitlines() +requirements = [] +for item in requirements_lines: + requirements.append(item) + +setup( + name="rocs", + version="1.0", + license=license, + author="Geoscience Australia", + author_email="GNSSAnalysis@ga.gov.au", + description="Orbit combination software", + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/your-repo/your-package", + packages=find_packages(), # Automatically find all packages + classifiers=[ + "Programming Language :: Python :: 3", + f"License :: OSI Approved :: {license}", + "Operating System :: OS Independent", + ], + python_requires=">=3.6", # Python version compatibility + install_requires=requirements, + entry_points={ + "console_scripts": [ + "rocs =rocs.__main__:main", + ], + }, + include_package_data=True, + package_data={ + "": ["*.yaml"], + }, +) +