From 783c2aa6f4e77603552177cc868dbc2e275b73a6 Mon Sep 17 00:00:00 2001 From: gwbres Date: Sun, 28 Jul 2024 17:33:46 +0200 Subject: [PATCH] SBAS + Qc (#259) SBAS and BRDC Navigation Improved QC capabilities and reporting features Integrated plot Breaking changes in the command line opts Breaking changes: Graph mode is removed and integrated into analysis mode --------- Signed-off-by: Guillaume W. Bres --- .github/ISSUE_TEMPLATE.md | 4 +- .github/workflows/rust.yml | 56 +- .gitignore | 1 + README.md | 5 +- logs/.gitkeep | 0 qc-traits/Cargo.toml | 11 +- qc-traits/README.md | 15 + qc-traits/src/html.rs | 11 - qc-traits/src/lib.rs | 12 +- qc-traits/src/merge.rs | 35 + qc-traits/src/processing/averaging.rs | 43 + qc-traits/src/processing/decim.rs | 96 ++ qc-traits/src/processing/derivative.rs | 38 + .../src/processing}/filters/decim.rs | 0 qc-traits/src/processing/filters/interp.rs | 43 + qc-traits/src/processing/filters/smoothing.rs | 113 +++ qc-traits/src/processing/item.rs | 298 ++++++ qc-traits/src/processing/mask.rs | 407 ++++++++ qc-traits/src/processing/mod.rs | 218 +++++ rinex-cli/Cargo.toml | 30 +- rinex-cli/config/qc/gnss_snr30db.json | 4 - rinex-cli/config/qc/sv_manual_gap.json | 7 - .../{gpst_cpp_kf.json => gpst_ppp_kf.json} | 2 +- rinex-cli/src/analysis/mod.rs | 4 - rinex-cli/src/analysis/sampling.rs | 26 - rinex-cli/src/analysis/sv_epoch.rs | 104 --- .../src/cli/fops/{substract.rs => diff.rs} | 7 +- rinex-cli/src/cli/fops/mod.rs | 15 +- rinex-cli/src/cli/fops/split.rs | 2 - rinex-cli/src/cli/fops/time_binning.rs | 3 +- rinex-cli/src/cli/graph.rs | 237 ----- rinex-cli/src/cli/mod.rs | 174 ++-- rinex-cli/src/cli/positioning.rs | 15 +- rinex-cli/src/cli/qc.rs | 29 - rinex-cli/src/cli/workspace.rs | 90 ++ rinex-cli/src/fops.rs | 99 +- rinex-cli/src/graph/combination.rs | 104 --- rinex-cli/src/graph/context.rs | 74 -- rinex-cli/src/graph/csv.rs | 62 +- rinex-cli/src/graph/mod.rs | 651 ------------- rinex-cli/src/graph/naviplot.rs | 22 - rinex-cli/src/graph/record/ionex.rs | 87 -- rinex-cli/src/graph/record/ionosphere.rs | 6 +- rinex-cli/src/graph/record/meteo.rs | 124 --- rinex-cli/src/graph/record/mod.rs | 34 - rinex-cli/src/graph/record/navigation.rs | 5 +- rinex-cli/src/graph/record/observation.rs | 392 -------- rinex-cli/src/graph/record/sp3_plot.rs | 7 +- rinex-cli/src/graph/skyplot.rs | 56 -- rinex-cli/src/identification.rs | 283 ------ rinex-cli/src/main.rs | 117 +-- rinex-cli/src/positioning/cggtts/mod.rs | 73 +- .../src/positioning/cggtts/post_process.rs | 28 +- rinex-cli/src/positioning/cggtts/report.rs | 395 ++++++++ rinex-cli/src/positioning/mod.rs | 96 +- rinex-cli/src/positioning/orbit/mod.rs | 1 - rinex-cli/src/positioning/orbit/nav.rs | 95 +- rinex-cli/src/positioning/ppp/mod.rs | 4 +- rinex-cli/src/positioning/ppp/post_process.rs | 89 +- rinex-cli/src/positioning/ppp/report.rs | 870 ++++++++++++++++++ rinex-cli/src/preprocessing.rs | 591 +----------- rinex-cli/src/qc.rs | 52 -- rinex-cli/src/report/mod.rs | 140 +++ rinex-qc/Cargo.toml | 34 +- rinex-qc/README.md | 121 +++ rinex-qc/html/index.html | 378 ++++++++ rinex-qc/src/analysis/mod.rs | 104 --- rinex-qc/src/analysis/obs.rs | 634 ------------- rinex-qc/src/analysis/sampling.rs | 140 --- rinex-qc/src/analysis/sv.rs | 45 - rinex-qc/src/cfg.rs | 96 ++ {rinex => rinex-qc}/src/context.rs | 304 ++++-- rinex-qc/src/lib.rs | 265 +----- rinex-qc/src/opts.rs | 254 ----- rinex-qc/src/plot.rs | 352 +++++++ rinex-qc/src/report/combined.rs | 1 + rinex-qc/src/report/mod.rs | 423 +++++++++ rinex-qc/src/report/navi.rs | 183 ++++ rinex-qc/src/report/orbit.rs | 131 +++ rinex-qc/src/report/rinex/clock.rs | 311 +++++++ rinex-qc/src/report/rinex/doris.rs | 99 ++ rinex-qc/src/report/rinex/ionex.rs | 228 +++++ rinex-qc/src/report/rinex/meteo.rs | 325 +++++++ rinex-qc/src/report/rinex/mod.rs | 57 ++ rinex-qc/src/report/rinex/nav.rs | 104 +++ rinex-qc/src/report/rinex/obs.rs | 536 +++++++++++ rinex-qc/src/report/shared/mod.rs | 29 + rinex-qc/src/report/shared/sampling.rs | 200 ++++ rinex-qc/src/report/sp3.rs | 229 +++++ rinex-qc/src/report/summary/bias.rs | 106 +++ rinex-qc/src/report/summary/mod.rs | 135 +++ rinex-qc/src/report/summary/nav_post.rs | 106 +++ rinex/Cargo.toml | 58 +- rinex/src/algorithm/mod.rs | 3 - rinex/src/algorithm/target.rs | 507 ---------- rinex/src/bibliography.rs | 2 - rinex/src/clock/mod.rs | 12 +- rinex/src/clock/record.rs | 151 +-- rinex/src/constants.rs | 10 +- rinex/src/cospar.rs | 83 -- rinex/src/domes.rs | 117 --- rinex/src/doris/mod.rs | 51 +- rinex/src/doris/record.rs | 275 ++---- rinex/src/doris/station.rs | 16 +- rinex/src/epoch.rs | 58 ++ rinex/src/ground_position.rs | 101 +- rinex/src/hardware.rs | 163 ++-- rinex/src/header.rs | 132 +-- rinex/src/ionex/mod.rs | 92 +- rinex/src/ionex/record.rs | 123 ++- rinex/src/lib.rs | 301 +++--- rinex/src/macros.rs | 9 - rinex/src/meteo/mod.rs | 55 +- rinex/src/meteo/record.rs | 265 ++---- rinex/src/meteo/sensor.rs | 71 +- rinex/src/navigation/ephemeris.rs | 71 +- rinex/src/navigation/record.rs | 560 ++++------- rinex/src/observable.rs | 25 +- rinex/src/observation/mod.rs | 160 ++++ rinex/src/observation/record.rs | 750 +++++---------- rinex/src/record.rs | 88 -- rinex/src/tests/clock.rs | 21 +- rinex/src/tests/doris.rs | 1 - rinex/src/tests/filename.rs | 1 + rinex/src/tests/masking.rs | 48 - rinex/src/tests/mod.rs | 27 +- rinex/src/tests/obs.rs | 14 - rinex/src/tests/parsing.rs | 94 +- .../src/tests/{ => processing}/decimation.rs | 40 +- rinex/src/tests/processing/masking.rs | 228 +++++ rinex/src/tests/processing/mod.rs | 2 + rinex/src/tests/{ => processing}/sampling.rs | 0 rinex/src/tests/{ => processing}/smoothing.rs | 0 sinex/Cargo.toml | 4 +- sp3/Cargo.toml | 16 +- sp3/src/lib.rs | 520 +++++++---- sp3/src/merge.rs | 27 - sp3/src/position.rs | 16 +- sp3/src/tests/mod.rs | 4 +- sp3/src/tests/parser_3d.rs | 2 +- sp3/src/velocity.rs | 16 +- .../COD0MGXFIN_20241280000_01D_30S_CLK.CLK.gz | Bin 0 -> 5199466 bytes .../V3/BRUX00BEL_R_20240920000_01D_EN.rnx.gz | Bin 0 -> 158493 bytes .../V3/HERT00GBR_R_20240920000_01D_GN.rnx.gz | Bin 0 -> 34801 bytes test_resources/OBS/V3/GEOP092I.24o.gz | Bin 0 -> 412424 bytes .../SP3/CORD00ARG_R_20240920000_01D_MN.rnx.gz | Bin 0 -> 227840 bytes test_resources/SP3/Sta21114.sp3.gz | Bin 0 -> 324941 bytes tools/README.md | 27 + tools/ci-ppp.sh | 27 - tools/download.sh | 12 + tools/doy.py | 20 + tutorials/BDS-GEO/README.md | 5 + tutorials/BDS-GEO/esbjerg.sh | 13 + tutorials/BDS-GEO/mojdnk.sh | 26 + tutorials/BDS/README.md | 5 + tutorials/BDS/esbjerg-brdc.sh | 13 + tutorials/BDS/esbjerg.sh | 32 + tutorials/DIFF/README.md | 4 + tutorials/DIFF/esbjrg-mojn.sh | 27 + tutorials/DORIS/README.md | 6 + tutorials/DORIS/cs2-2018164.sh | 8 + tutorials/GAL+BDS-GEO/README.md | 5 + tutorials/GAL+BDS-GEO/mojdnk.sh | 27 + tutorials/GAL+BDS/README.md | 5 + tutorials/GAL+BDS/mojdnk.sh | 36 + tutorials/GAL+SBAS/README.md | 4 + tutorials/GAL+SBAS/esbjerg.sh | 25 + tutorials/GAL+SBAS/mojn.sh | 27 + tutorials/GAL/README.md | 4 + tutorials/GAL/esbjerg-brdc.sh | 34 + tutorials/GAL/esbjerg.sh | 33 + tutorials/GAL/mojdnk-brdc.sh | 34 + tutorials/GAL/mojdnk.sh | 38 + tutorials/GPS/README.md | 4 + tutorials/GPS/esbjerg-brdc.sh | 32 + tutorials/GPS/esbjerg.sh | 37 + tutorials/GPS/mojdnk-brdc.sh | 32 + tutorials/GPS/mojdnk.sh | 37 + tutorials/IONEX/README.md | 6 + tutorials/IONEX/ckmg2022020.sh | 7 + tutorials/IONEX/postdeu-2024254.sh | 12 + tutorials/JMF/2024_092_PARIS/cpp_kf.json | 18 + tutorials/JMF/2024_092_PARIS/galileo-brdc.sh | 19 + tutorials/JMF/2024_092_PARIS/galileo.sh | 48 + tutorials/JMF/2024_092_PARIS/gps-brdc.sh | 13 + tutorials/JMF/2024_092_PARIS/gps.sh | 43 + tutorials/JMF/README.md | 7 + tutorials/MERGE/README.md | 4 + tutorials/MERGE/clock.sh | 13 + tutorials/MERGE/crinex.sh | 13 + tutorials/MERGE/ionex.sh | 11 + tutorials/MERGE/meteo.sh | 11 + tutorials/METEO/README.md | 7 + tutorials/METEO/abvi0010.sh | 11 + tutorials/METEO/postdeu-2024254.sh | 12 + tutorials/QC/README.md | 4 + tutorials/QC/mojdnk.sh | 20 + tutorials/README.md | 66 ++ tutorials/SPLIT/README.md | 6 + tutorials/SPLIT/esbjrg.sh | 15 + tutorials/SPLIT/sp3-tbin8x3.sh | 12 + tutorials/TBIN/README.md | 6 + tutorials/TBIN/esbjrg-tbin6x4.sh | 10 + tutorials/TBIN/sp3-tbin8x3.sh | 12 + tutorials/config/README.md | 26 + tutorials/config/survey/README.md | 11 + tutorials/config/survey/cpp_kf.json | 18 + .../config/survey/cpp_lsq.json | 0 tutorials/config/survey/ppp_kf.json | 18 + tutorials/config/survey/ppp_lsq.json | 18 + .../config/survey/spp_lsq.json | 2 +- ublox-rnx/Cargo.toml | 2 +- 212 files changed, 11314 insertions(+), 7858 deletions(-) create mode 100644 logs/.gitkeep create mode 100644 qc-traits/README.md delete mode 100644 qc-traits/src/html.rs create mode 100644 qc-traits/src/merge.rs create mode 100644 qc-traits/src/processing/averaging.rs create mode 100644 qc-traits/src/processing/decim.rs create mode 100644 qc-traits/src/processing/derivative.rs rename {rinex/src/algorithm => qc-traits/src/processing}/filters/decim.rs (100%) create mode 100644 qc-traits/src/processing/filters/interp.rs create mode 100644 qc-traits/src/processing/filters/smoothing.rs create mode 100644 qc-traits/src/processing/item.rs create mode 100644 qc-traits/src/processing/mask.rs create mode 100644 qc-traits/src/processing/mod.rs delete mode 100644 rinex-cli/config/qc/gnss_snr30db.json delete mode 100644 rinex-cli/config/qc/sv_manual_gap.json rename rinex-cli/config/rtk/{gpst_cpp_kf.json => gpst_ppp_kf.json} (94%) delete mode 100644 rinex-cli/src/analysis/mod.rs delete mode 100644 rinex-cli/src/analysis/sampling.rs delete mode 100644 rinex-cli/src/analysis/sv_epoch.rs rename rinex-cli/src/cli/fops/{substract.rs => diff.rs} (80%) delete mode 100644 rinex-cli/src/cli/qc.rs create mode 100644 rinex-cli/src/cli/workspace.rs delete mode 100644 rinex-cli/src/graph/combination.rs delete mode 100644 rinex-cli/src/graph/context.rs delete mode 100644 rinex-cli/src/graph/mod.rs delete mode 100644 rinex-cli/src/graph/naviplot.rs delete mode 100644 rinex-cli/src/graph/record/ionex.rs delete mode 100644 rinex-cli/src/graph/record/meteo.rs delete mode 100644 rinex-cli/src/graph/record/mod.rs delete mode 100644 rinex-cli/src/graph/record/observation.rs delete mode 100644 rinex-cli/src/graph/skyplot.rs delete mode 100644 rinex-cli/src/identification.rs create mode 100644 rinex-cli/src/positioning/cggtts/report.rs create mode 100644 rinex-cli/src/positioning/ppp/report.rs delete mode 100644 rinex-cli/src/qc.rs create mode 100644 rinex-cli/src/report/mod.rs create mode 100644 rinex-qc/README.md create mode 100644 rinex-qc/html/index.html delete mode 100644 rinex-qc/src/analysis/mod.rs delete mode 100644 rinex-qc/src/analysis/obs.rs delete mode 100644 rinex-qc/src/analysis/sampling.rs delete mode 100644 rinex-qc/src/analysis/sv.rs create mode 100644 rinex-qc/src/cfg.rs rename {rinex => rinex-qc}/src/context.rs (66%) delete mode 100644 rinex-qc/src/opts.rs create mode 100644 rinex-qc/src/plot.rs create mode 100644 rinex-qc/src/report/combined.rs create mode 100644 rinex-qc/src/report/mod.rs create mode 100644 rinex-qc/src/report/navi.rs create mode 100644 rinex-qc/src/report/orbit.rs create mode 100644 rinex-qc/src/report/rinex/clock.rs create mode 100644 rinex-qc/src/report/rinex/doris.rs create mode 100644 rinex-qc/src/report/rinex/ionex.rs create mode 100644 rinex-qc/src/report/rinex/meteo.rs create mode 100644 rinex-qc/src/report/rinex/mod.rs create mode 100644 rinex-qc/src/report/rinex/nav.rs create mode 100644 rinex-qc/src/report/rinex/obs.rs create mode 100644 rinex-qc/src/report/shared/mod.rs create mode 100644 rinex-qc/src/report/shared/sampling.rs create mode 100644 rinex-qc/src/report/sp3.rs create mode 100644 rinex-qc/src/report/summary/bias.rs create mode 100644 rinex-qc/src/report/summary/mod.rs create mode 100644 rinex-qc/src/report/summary/nav_post.rs delete mode 100644 rinex/src/algorithm/target.rs delete mode 100644 rinex/src/cospar.rs delete mode 100644 rinex/src/domes.rs delete mode 100644 rinex/src/tests/masking.rs rename rinex/src/tests/{ => processing}/decimation.rs (68%) create mode 100644 rinex/src/tests/processing/masking.rs create mode 100644 rinex/src/tests/processing/mod.rs rename rinex/src/tests/{ => processing}/sampling.rs (100%) rename rinex/src/tests/{ => processing}/smoothing.rs (100%) delete mode 100644 sp3/src/merge.rs create mode 100644 test_resources/CLK/V3/COD0MGXFIN_20241280000_01D_30S_CLK.CLK.gz create mode 100644 test_resources/NAV/V3/BRUX00BEL_R_20240920000_01D_EN.rnx.gz create mode 100644 test_resources/NAV/V3/HERT00GBR_R_20240920000_01D_GN.rnx.gz create mode 100644 test_resources/OBS/V3/GEOP092I.24o.gz create mode 100644 test_resources/SP3/CORD00ARG_R_20240920000_01D_MN.rnx.gz create mode 100644 test_resources/SP3/Sta21114.sp3.gz create mode 100644 tools/README.md delete mode 100755 tools/ci-ppp.sh create mode 100755 tools/download.sh create mode 100755 tools/doy.py create mode 100644 tutorials/BDS-GEO/README.md create mode 100755 tutorials/BDS-GEO/esbjerg.sh create mode 100755 tutorials/BDS-GEO/mojdnk.sh create mode 100644 tutorials/BDS/README.md create mode 100755 tutorials/BDS/esbjerg-brdc.sh create mode 100755 tutorials/BDS/esbjerg.sh create mode 100644 tutorials/DIFF/README.md create mode 100755 tutorials/DIFF/esbjrg-mojn.sh create mode 100644 tutorials/DORIS/README.md create mode 100755 tutorials/DORIS/cs2-2018164.sh create mode 100644 tutorials/GAL+BDS-GEO/README.md create mode 100755 tutorials/GAL+BDS-GEO/mojdnk.sh create mode 100644 tutorials/GAL+BDS/README.md create mode 100755 tutorials/GAL+BDS/mojdnk.sh create mode 100644 tutorials/GAL+SBAS/README.md create mode 100755 tutorials/GAL+SBAS/esbjerg.sh create mode 100755 tutorials/GAL+SBAS/mojn.sh create mode 100644 tutorials/GAL/README.md create mode 100755 tutorials/GAL/esbjerg-brdc.sh create mode 100755 tutorials/GAL/esbjerg.sh create mode 100755 tutorials/GAL/mojdnk-brdc.sh create mode 100755 tutorials/GAL/mojdnk.sh create mode 100644 tutorials/GPS/README.md create mode 100755 tutorials/GPS/esbjerg-brdc.sh create mode 100755 tutorials/GPS/esbjerg.sh create mode 100755 tutorials/GPS/mojdnk-brdc.sh create mode 100755 tutorials/GPS/mojdnk.sh create mode 100644 tutorials/IONEX/README.md create mode 100755 tutorials/IONEX/ckmg2022020.sh create mode 100755 tutorials/IONEX/postdeu-2024254.sh create mode 100644 tutorials/JMF/2024_092_PARIS/cpp_kf.json create mode 100755 tutorials/JMF/2024_092_PARIS/galileo-brdc.sh create mode 100755 tutorials/JMF/2024_092_PARIS/galileo.sh create mode 100755 tutorials/JMF/2024_092_PARIS/gps-brdc.sh create mode 100755 tutorials/JMF/2024_092_PARIS/gps.sh create mode 100644 tutorials/JMF/README.md create mode 100644 tutorials/MERGE/README.md create mode 100755 tutorials/MERGE/clock.sh create mode 100755 tutorials/MERGE/crinex.sh create mode 100755 tutorials/MERGE/ionex.sh create mode 100755 tutorials/MERGE/meteo.sh create mode 100644 tutorials/METEO/README.md create mode 100755 tutorials/METEO/abvi0010.sh create mode 100755 tutorials/METEO/postdeu-2024254.sh create mode 100644 tutorials/QC/README.md create mode 100755 tutorials/QC/mojdnk.sh create mode 100644 tutorials/README.md create mode 100644 tutorials/SPLIT/README.md create mode 100755 tutorials/SPLIT/esbjrg.sh create mode 100755 tutorials/SPLIT/sp3-tbin8x3.sh create mode 100644 tutorials/TBIN/README.md create mode 100755 tutorials/TBIN/esbjrg-tbin6x4.sh create mode 100755 tutorials/TBIN/sp3-tbin8x3.sh create mode 100644 tutorials/config/README.md create mode 100644 tutorials/config/survey/README.md create mode 100644 tutorials/config/survey/cpp_kf.json rename rinex-cli/config/rtk/gpst_cpp_basic.json => tutorials/config/survey/cpp_lsq.json (100%) create mode 100644 tutorials/config/survey/ppp_kf.json create mode 100644 tutorials/config/survey/ppp_lsq.json rename rinex-cli/config/rtk/gpst_spp_basic.json => tutorials/config/survey/spp_lsq.json (92%) diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 59488882a..bb521d15f 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -2,8 +2,8 @@ Thank you for using our toolbox and trying to make it better 🛰️ 🌍 -If you are reporting an error related to one of the applications, [follow these guidelines](#applications-error-reporting). -Otherwise, [use those](#core-library-error-reporting) to report core library issues +When reporting applications issues, please follow the first paragraph. +When reporting errors in the core libraries, refer to the second paragraph. ## Applications error reporting diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index e75a68ba5..aae02e68b 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -13,29 +13,6 @@ env: CARGO_TERM_COLOR: always jobs: - - lint: - name: Linter - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions-rs/toolchain@v1 - name: Install Rust - with: - toolchain: stable - override: true - components: rustfmt, clippy - - uses: actions-rs/cargo@v1 - name: Linter - with: - command: fmt - args: --all -- --check - - name: Audit - continue-on-error: true - run: | - cargo install cargo-audit - cargo audit - build: name: Build runs-on: ubuntu-latest @@ -51,6 +28,23 @@ jobs: run: | sudo apt-get update sudo apt-get install -y libudev-dev + + - uses: actions-rs/cargo@v1 + name: Cargo update + with: + command: update + + - uses: actions-rs/cargo@v1 + name: Linter + with: + command: fmt + args: --all -- --check + + - uses: actions-rs/cargo@v1 + name: Build (all features) + with: + command: build + args: --all-features --release - uses: actions-rs/cargo@v1 name: Test @@ -64,12 +58,6 @@ jobs: command: test args: --verbose --all-features - - uses: actions-rs/cargo@v1 - name: Build (all features) - with: - command: build - args: --all-features --release - - uses: actions-rs/toolchain@v1 name: Install nightly with: @@ -90,6 +78,11 @@ jobs: toolchain: stable override: true + - uses: actions-rs/cargo@v1 + name: Cargo update + with: + command: update + - uses: actions-rs/cargo@v1 name: Build (default) with: @@ -112,6 +105,11 @@ jobs: toolchain: stable override: true + - uses: actions-rs/cargo@v1 + name: Cargo update + with: + command: update + - uses: actions-rs/cargo@v1 name: Build (default) with: diff --git a/.gitignore b/.gitignore index 32952d498..eb2e8853b 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ Cargo.lock *.swp *.swo *.patch +*.txt **/*.rs.bk .DS_Store diff --git a/README.md b/README.md index be207d626..1360e011b 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,8 @@ It is growing as some sort of Anubis/Teqc/Glab combination. No GUI currently ava It integrates a PVT and CGGTTS solutions solver. The application is auto-generated for a few architectures, you can directly [download it from Github.com](https://github.com/georust/rinex/releases) - +* [`tutorials`](tutorials/) is a superset of scripts (Linux/MacOS compatible) +to get started quickly. The examples span pretty much everything our applications allow. * [`sp3`](sp3/) High Precision Orbits (by IGS) * [`rnx2crx`](rnx2crx/) is a RINEX compressor (RINEX to Compact RINEX) * [`crx2rnx`](crx2rnx/) is a CRINEX decompresor (Compact RINEX to RINEX) @@ -67,6 +68,8 @@ The application is auto-generated for a few architectures, you can directly * [`sinex`](sinex/) SNX dedicated core library * [`ublox-rnx`](ublox-rnx/) is an application to generate RINEX files from Ublox receivers. This application is currently work in progress +* [`tools`](tools/) are utility scripts and development tools +* [`logs`](logs/) is dedicated to store session logs, if you work within this workspace directly. ## Relevant Ecosystem diff --git a/logs/.gitkeep b/logs/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/qc-traits/Cargo.toml b/qc-traits/Cargo.toml index bc2f06c23..bbc6e1fc5 100644 --- a/qc-traits/Cargo.toml +++ b/qc-traits/Cargo.toml @@ -13,9 +13,18 @@ edition = "2021" [features] default = [] # no features by default +# Unlock processing features +# - Filter designer: design and apply filter ops to GNSS datasets +processing = [ + "dep:gnss-rs", + "dep:hifitime", +] + [package.metadata.docs.rs] all-features = true rustdoc-args = ["--cfg", "docrs", "--generate-link-to-definition"] [dependencies] -horrorshow = "0.8" +thiserror = "1" +hifitime = { version = "4.0.0-alpha", optional = true } +gnss-rs = { version = "2.2", features = ["serde", "domes", "cospar"], optional = true } diff --git a/qc-traits/README.md b/qc-traits/README.md new file mode 100644 index 000000000..6e4b49ef8 --- /dev/null +++ b/qc-traits/README.md @@ -0,0 +1,15 @@ +# RINEX/GNSS QC and processing + +This crate is a small library to share and implement in other libraries +to form a coherent ecosystem to process and analyze GNSS data. + +As an example, this crate is implemented in the RINEX and SP3 libraries, the RINEX-QC +library and the RINEX-Cli application and allows the synthesis of analysis reports +and the processing on GNSS down to navigation. + +## Existing Modules + +- html: HTML report rendition +- merge: describes how we stack data into an already existing context +- processing: available on crate feature only, +describes a filter designer and processing ops diff --git a/qc-traits/src/html.rs b/qc-traits/src/html.rs deleted file mode 100644 index 23ee568bc..000000000 --- a/qc-traits/src/html.rs +++ /dev/null @@ -1,11 +0,0 @@ -//! RINEX QC reports in HTML -use horrorshow::RenderBox; - -pub trait HtmlReport { - /// Renders self to plain HTML. - /// Generates a whole HTML entity. - fn to_html(&self) -> String; - /// Renders self as an HTML node - /// to embed within external HTML. - fn to_inline_html(&self) -> Box; -} diff --git a/qc-traits/src/lib.rs b/qc-traits/src/lib.rs index c8c562e05..610fa8874 100644 --- a/qc-traits/src/lib.rs +++ b/qc-traits/src/lib.rs @@ -1,4 +1,10 @@ -//! Specific traits to generate RINEX quality reports. +#![doc(html_logo_url = "https://raw.githubusercontent.com/georust/meta/master/logo/logo.png")] +#![doc = include_str!("../README.md")] +#![cfg_attr(docrs, feature(doc_cfg))] -mod html; -pub use html::HtmlReport; +pub mod merge; +pub use merge::{Error as MergeError, Merge}; + +#[cfg(feature = "processing")] +#[cfg_attr(docrs, doc(cfg(feature = "processing")))] +pub mod processing; diff --git a/qc-traits/src/merge.rs b/qc-traits/src/merge.rs new file mode 100644 index 000000000..b46e9d588 --- /dev/null +++ b/qc-traits/src/merge.rs @@ -0,0 +1,35 @@ +//! Merge traits to extend data contexts +use thiserror::Error; + +/// [Merge] specific Errors. +#[derive(Error, Debug)] +pub enum Error { + /// When merging B into A, both types should match + /// otherwise operation in invalid. + #[error("file type mismatch")] + FileTypeMismatch, + /// Some file formats, to remain valid, require that + /// B and A be expressed in the same Timescale to remain valid + #[error("timescale mismatch")] + TimescaleMismatch, + /// Some file formats, to remain valid, require that coordinates + /// from B and A be expressed in the same Reference Frame to remain valid + #[error("reference frame mismatch")] + ReferenceFrameMismatch, + /// Some file formats, to remain valid, require that they are + /// published by the same publisher/agency to be merged to into one another + #[error("data provider (agency) mismatch")] + DataProviderAgencyMismatch, +} + +/// Merge Trait is impleted to extend Data Contexts. +pub trait Merge { + /// Merge "rhs" dataset into self, to form extend dataset. + /// We use this for example to extend 24h RINEX to 1week RINEX. + /// When merging File A and B types must match otherwise operation is invalid. + fn merge(&self, rhs: &Self) -> Result + where + Self: Sized; + /// [Self::merge] mutable implementation. + fn merge_mut(&mut self, rhs: &Self) -> Result<(), Error>; +} diff --git a/qc-traits/src/processing/averaging.rs b/qc-traits/src/processing/averaging.rs new file mode 100644 index 000000000..e59d0ece0 --- /dev/null +++ b/qc-traits/src/processing/averaging.rs @@ -0,0 +1,43 @@ +use hifitime::{Duration, Epoch}; + +fn moving_average( + data: Vec<(Epoch, T)>, + window: Duration, +) -> Vec<(Epoch, T)> { + let mut acc = T::default(); + let mut prev_epoch: Option = None; + let mut ret: Vec<(Epoch, T)> = Vec::new(); + for (epoch, value) in data {} + ret +} + +#[derive(Debug, Clone, Copy)] +pub enum Averager { + MovingAverage(Duration), +} + +impl Default for Averager { + fn default() -> Self { + Self::MovingAverage(Duration::from_seconds(600.0_f64)) + } +} + +impl Averager { + pub fn mov(window: Duration) -> Self { + Self::MovingAverage(window) + } + pub fn eval(&self, input: Vec<(Epoch, T)>) -> Vec<(Epoch, T)> { + match self { + Self::MovingAverage(dt) => moving_average(input, *dt), + } + } +} + +#[cfg(test)] +mod test { + use super::*; + #[test] + fn test_moving_average() { + let mov = Averager::mov(Duration::from_seconds(10.0_f64)); + } +} diff --git a/qc-traits/src/processing/decim.rs b/qc-traits/src/processing/decim.rs new file mode 100644 index 000000000..dd2390cf7 --- /dev/null +++ b/qc-traits/src/processing/decim.rs @@ -0,0 +1,96 @@ +use crate::processing::{FilterItem, ItemError}; +use hifitime::Duration; +use thiserror::Error; + +/// Decimation filter parsing error +#[derive(Error, Debug)] +pub enum Error { + #[error("invalid decimated item")] + InvalidDecimItem(#[from] ItemError), + #[error("failed to parse decimation attribute \"{0}\"")] + AttributeParsingError(String), +} + +/// Type of decimation filter +#[derive(Clone, Debug, PartialEq)] +pub enum DecimationFilterType { + /// Simple modulo decimation + Modulo(u32), + /// Duration decimation + Duration(Duration), +} + +#[derive(Clone, Debug, PartialEq)] +pub struct DecimationFilter { + /// Type of decimation filter + pub filter: DecimationFilterType, + /// Optional decimated item. + /// When item is None, all data is to be decimated. + /// When item is specified, only that subset is to be decimated. + pub item: Option, +} + +impl DecimationFilter { + /// Builds a new Duration decimation filter + pub fn duration(dt: Duration) -> Self { + Self { + item: None, + filter: DecimationFilterType::Duration(dt), + } + } + /// Builds new Modulo decimation filter + pub fn modulo(modulo: u32) -> Self { + Self { + item: None, + filter: DecimationFilterType::Modulo(modulo), + } + } + /// Adds targetted item to be decimated + pub fn with_item(&self, item: FilterItem) -> Self { + let mut s = self.clone(); + s.item = Some(item.clone()); + s + } +} + +/// The [Decimate] trait is implemented to reduce data rate prior analysis. +pub trait Decimate { + /// Immutable decimation + fn decimate(&self, f: &DecimationFilter) -> Self; + /// Mutable decimation + fn decimate_mut(&mut self, f: &DecimationFilter); +} + +impl std::str::FromStr for DecimationFilter { + type Err = Error; + fn from_str(content: &str) -> Result { + let items: Vec<&str> = content.trim().split(':').collect(); + if let Ok(dt) = Duration::from_str(items[0].trim()) { + Ok(Self { + item: { + if items.len() > 1 { + let item = FilterItem::from_str(items[1].trim())?; + Some(item) + } else { + None // no subset description + } + }, + filter: DecimationFilterType::Duration(dt), + }) + } else if let Ok(r) = items[0].trim().parse::() { + Ok(Self { + item: { + if items.len() > 1 { + let item = FilterItem::from_str(items[1].trim())?; + Some(item) + } else { + None + } + }, + filter: DecimationFilterType::Modulo(r), + }) + } else { + Err(Error::AttributeParsingError(items[0].to_string())) + } + } +} diff --git a/qc-traits/src/processing/derivative.rs b/qc-traits/src/processing/derivative.rs new file mode 100644 index 000000000..5de98965d --- /dev/null +++ b/qc-traits/src/processing/derivative.rs @@ -0,0 +1,38 @@ +use hifitime::Epoch; + +pub struct Derivative { + order: usize, +} + +/* + * Derivative of an number of array sorted by chronological Epoch + */ +pub(crate) fn derivative(input: Vec<(Epoch, f64)>, buf: &mut Vec<(Epoch, f64)>) { + let mut prev: Option<(Epoch, f64)> = None; + for (e, value) in input { + if let Some((prev_e, prev_v)) = prev { + let dt = e - prev_e; + let dy = (value - prev_v) / dt.to_seconds(); + buf.push((e, dy)); + } + prev = Some((e, value)); + } +} + +/* + * Derivative^2 of an number of array sorted by chronological Epoch + */ +impl Derivative { + pub fn new(order: usize) -> Self { + Self { order } + } + pub fn eval(&self, input: Vec<(Epoch, f64)>) -> Vec<(Epoch, f64)> { + let mut buf: Vec<(Epoch, f64)> = Vec::with_capacity(input.len()); + derivative(input, &mut buf); + //for i in 1..self.order { + // derivative(&ret, &mut ret); + //} + //ret + buf + } +} diff --git a/rinex/src/algorithm/filters/decim.rs b/qc-traits/src/processing/filters/decim.rs similarity index 100% rename from rinex/src/algorithm/filters/decim.rs rename to qc-traits/src/processing/filters/decim.rs diff --git a/qc-traits/src/processing/filters/interp.rs b/qc-traits/src/processing/filters/interp.rs new file mode 100644 index 000000000..d6892bb3d --- /dev/null +++ b/qc-traits/src/processing/filters/interp.rs @@ -0,0 +1,43 @@ +use super::TargetItem; +use crate::TimeSeries; + +#[derive(Debug, Clone, PartialEq)] +pub enum InterpMethod { + Linear, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct InterpFilter { + pub series: TimeSeries, + pub method: InterpMethod, + pub target: Option, +} + +/* + * Interpolates yp at xp + * +pub(crate) fn lerp(x0y0: (f64, f64), x1y1: (f64, f64), xp: f64) -> f64 { + let (x0, y0) = x0y0; + let (x1, y1) = x1y1; + y0 * (x1 - xp) + y1 * (xp - x0) / (x1 - x0) +} + */ + +/* +impl InterpFilter { + fn linear_interp(y: Vec) -> Vec ; + fn approx_linear_interp(y: Vec) -> Vec ; + /// Applies self to given vector + pub fn interp(y: Vec) -> Vec { + + } + /// Applies self in place to input vector + pub fn interp_mut(&mut y: Vec) { + + } +}*/ + +pub trait Interpolate { + fn interpolate(&self, series: TimeSeries) -> Self; + fn interpolate_mut(&mut self, series: TimeSeries); +} diff --git a/qc-traits/src/processing/filters/smoothing.rs b/qc-traits/src/processing/filters/smoothing.rs new file mode 100644 index 000000000..abf9d475c --- /dev/null +++ b/qc-traits/src/processing/filters/smoothing.rs @@ -0,0 +1,113 @@ +use crate::{preprocessing::TargetItem, Duration}; +use hifitime::EpochError; +use thiserror::Error; + +/// Supported Smoothing Filters +#[derive(Debug, Clone, PartialEq)] +pub enum SmoothingType { + /// Moving average filter + MovingAverage(Duration), + /// Hatch filter: Pseudo range specific smoothing method + Hatch, +} + +/// Smoothing Filter to smooth data subsets +#[derive(Debug, Clone, PartialEq)] +pub struct SmoothingFilter { + /// Possible targeted subset to narrow down filter's application. + /// When undefined, the filter applies to entire dataset + pub target: Option, + /// Type of smoothing to apply + pub stype: SmoothingType, +} + +#[derive(Error, Debug)] +pub enum Error { + #[error("invalid description \"{0}\"")] + InvalidDescription(String), + #[error("unknown smoothing filter \"{0}\"")] + UnknownFilter(String), + #[error("invalid target")] + InvalidTarget(#[from] crate::algorithm::target::Error), + #[error("failed to parse duration")] + DurationParsing(#[from] EpochError), +} + +impl std::str::FromStr for SmoothingFilter { + type Err = Error; + fn from_str(content: &str) -> Result { + let items: Vec<&str> = content.trim().split(':').collect(); + if items[0].trim().eq("hatch") { + Ok(Self { + target: { + if items.len() > 1 { + let target = TargetItem::from_str(items[1].trim())?; + Some(target) + } else { + None // no subset description + } + }, + stype: SmoothingType::Hatch, + }) + } else if items[0].trim().eq("mov") { + if items.len() < 2 { + return Err(Error::InvalidDescription(format!("{:?}", items))); + } + let dt = Duration::from_str(items[1].trim())?; + Ok(Self { + target: { + if items.len() > 2 { + let target = TargetItem::from_str(items[2].trim())?; + Some(target) + } else { + None // no data subset + } + }, + stype: SmoothingType::MovingAverage(dt), + }) + } else { + Err(Error::UnknownFilter(items[0].to_string())) + } + } +} + +pub trait Smooth { + /// Applies mov average filter to self + fn moving_average(&self, window: Duration) -> Self; + /// Moving average mutable implementation + fn moving_average_mut(&mut self, window: Duration); + /// Applies a Hatch smoothing filter to Pseudo Range observations + fn hatch_smoothing(&self) -> Self; + /// Hatch filter mutable implementation + fn hatch_smoothing_mut(&mut self); +} + +#[cfg(test)] +mod test { + use super::*; + use std::str::FromStr; + #[test] + fn from_str() { + for desc in ["hatch", "hatch:C1C", "hatch:c1c,c2p"] { + let filter = SmoothingFilter::from_str(desc); + assert!( + filter.is_ok(), + "smoothing_filter::from_str() failed on \"{}\"", + desc + ); + } + for desc in [ + "mov:10 min", + "mov:1 hour", + "mov:10 min:clk", + "mov:10 hour:clk", + ] { + let filter = SmoothingFilter::from_str(desc); + assert!( + filter.is_ok(), + "smoothing_filter::from_str() failed on \"{}\"", + desc + ); + } + } +} diff --git a/qc-traits/src/processing/item.rs b/qc-traits/src/processing/item.rs new file mode 100644 index 000000000..9578c3164 --- /dev/null +++ b/qc-traits/src/processing/item.rs @@ -0,0 +1,298 @@ +use std::{num::ParseFloatError, str::FromStr}; +use thiserror::Error; + +use gnss_rs::{ + constellation::ParsingError as ConstellationParsingError, + prelude::{Constellation, SV}, + sv::ParsingError as SVParsingError, +}; + +use hifitime::{Duration, Epoch, ParsingError as EpochParsingError}; + +#[derive(Debug, Error)] +pub enum ItemError { + #[error("unknown filter item \"{0}\"")] + UnknownItem(String), + #[error("item guessing error: {0}")] + TypeGuessingError(String), + #[error("two valid epochs are required to describe a duration")] + InvalidDuration, + #[error("invalid epoch description")] + InvalidEpoch, + #[error("invalid SNR description")] + InvalidSNR, + #[error("invalid elevation angle (0 <= e <= 90)")] + InvalidElevationAngle, + #[error("invalid azimuth angle description (0 <= a <= 360)")] + InvalidAzimuthAngle, + #[error("invalid float number")] + FloatParsing(#[from] ParseFloatError), + #[error("sv item parsing")] + SVParsing(#[from] SVParsingError), + #[error("constellation item parsing")] + ConstellationParing(#[from] ConstellationParsingError), + #[error("duration item parsing")] + InvalidDurationItem(#[from] EpochParsingError), +} + +/// [FilterItem] represents items that filters or other +/// GNSS processing ops may apply to. +#[derive(Clone, Debug, PartialEq, PartialOrd)] +pub enum FilterItem { + /// Epoch Item + EpochItem(Epoch), + /// Duration Item + DurationItem(Duration), + /// SNR value, expressed in [dB] + SNRItem(f64), + /// Elevation Angle Item in degrees, 0 <= e <= 90° + ElevationItem(f64), + /// Azimuth Angle Item in degrees, 0 <= a <= 360° + AzimuthItem(f64), + /// List of spacecrafts described as [SV] + SvItem(Vec), + /// List of [Constellation]s + ConstellationItem(Vec), + /// Clock Offset Item + ClockItem, + /// List of complex items originally described as Strings + ComplexItem(Vec), +} + +impl std::ops::BitOrAssign for FilterItem { + fn bitor_assign(&mut self, rhs: Self) { + *self = self.clone() | rhs; + } +} + +impl std::ops::BitOr for FilterItem { + type Output = Self; + fn bitor(self, rhs: Self) -> Self { + match self { + Self::SvItem(ref lhs) => match rhs { + Self::SvItem(rhs) => { + let mut lhs = lhs.clone(); + for r in rhs { + lhs.push(r); + } + Self::SvItem(lhs) + }, + _ => self.clone(), + }, + Self::ConstellationItem(ref lhs) => match rhs { + Self::ConstellationItem(rhs) => { + let mut lhs = lhs.clone(); + for r in rhs { + lhs.push(r); + } + Self::ConstellationItem(lhs) + }, + _ => self.clone(), + }, + _ => self.clone(), + } + } +} + +pub(crate) fn parse_sv_list(items: Vec<&str>) -> Result, SVParsingError> { + let mut ret: Vec = Vec::with_capacity(items.len()); + for item in items { + let sv = SV::from_str(item.trim())?; + ret.push(sv); + } + Ok(ret) +} + +pub(crate) fn parse_gnss_list( + items: Vec<&str>, +) -> Result, ConstellationParsingError> { + let mut ret: Vec = Vec::with_capacity(items.len()); + for item in items { + let c = Constellation::from_str(item.trim())?; + ret.push(c); + } + Ok(ret) +} + +fn parse_float_payload(content: &str) -> Result { + f64::from_str(content.trim()) +} + +impl FilterItem { + pub(crate) fn from_elevation(content: &str) -> Result { + if let Ok(float) = parse_float_payload(content) { + if float >= 0.0 && float <= 90.0 { + return Ok(Self::AzimuthItem(float)); + } + } + Err(ItemError::InvalidElevationAngle) + } + pub(crate) fn from_azimuth(content: &str) -> Result { + if let Ok(float) = parse_float_payload(content) { + if float >= 0.0 && float <= 360.0 { + return Ok(Self::AzimuthItem(float)); + } + } + Err(ItemError::InvalidAzimuthAngle) + } + pub(crate) fn from_snr(content: &str) -> Result { + if let Ok(float) = parse_float_payload(content) { + Ok(Self::SNRItem(float)) + } else { + Err(ItemError::InvalidSNR) + } + } +} + +// use itertools::Itertools; + +impl std::str::FromStr for FilterItem { + type Err = ItemError; + fn from_str(content: &str) -> Result { + /* + * Type guessing + */ + let c = content.trim(); + let items: Vec<&str> = c.split(',').collect(); + /* + * Epoch and Durations + */ + if let Ok(start) = Epoch::from_str(items[0].trim()) { + if items.len() == 1 { + Ok(Self::EpochItem(start)) + } else if items.len() == 2 { + if let Ok(end) = Epoch::from_str(items[1].trim()) { + Ok(Self::DurationItem(end - start)) + } else { + Err(ItemError::InvalidEpoch) + } + } else { + Err(ItemError::InvalidDuration) + } + /* + * SV + */ + } else if SV::from_str(items[0].trim()).is_ok() { + //TODO improve this: + // do not test 1st entry only but all possible content + Ok(Self::SvItem(parse_sv_list(items)?)) + /* + * GNSS Constellation + */ + } else if Constellation::from_str(items[0].trim()).is_ok() { + //TODO improve this: + // do not test 1st entry only but all possible content + Ok(Self::ConstellationItem(parse_gnss_list(items)?)) + } else { + // define this item a "complex" + Ok(Self::ComplexItem( + items.iter().map(|s| s.to_string()).collect(), + )) + } + } +} + +impl From for FilterItem { + fn from(e: Epoch) -> Self { + Self::EpochItem(e) + } +} + +impl From for FilterItem { + fn from(dt: Duration) -> Self { + Self::DurationItem(dt) + } +} + +impl From for FilterItem { + fn from(sv: SV) -> Self { + Self::SvItem(vec![sv]) + } +} + +impl From> for FilterItem { + fn from(sv: Vec) -> Self { + Self::SvItem(sv.clone()) + } +} + +impl From for FilterItem { + fn from(c: Constellation) -> Self { + Self::ConstellationItem(vec![c]) + } +} + +impl From> for FilterItem { + fn from(c: Vec) -> Self { + Self::ConstellationItem(c.clone()) + } +} + +impl std::fmt::Display for FilterItem { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + Self::ConstellationItem(gnss) => { + write!(f, "gnss: {:?}", gnss) + }, + Self::SvItem(svs) => { + write!(f, "sv: {:?}", svs) + }, + _ => Ok(()), + } + } +} + +#[cfg(test)] +mod test { + use super::*; + use gnss_rs::prelude::{Constellation, SV}; + use std::str::FromStr; + #[test] + fn algo_target_item() { + let e = Epoch::default(); + let target: FilterItem = e.into(); + assert_eq!(target, FilterItem::EpochItem(e)); + + assert_eq!( + FilterItem::from_str("g08,g09,R03").unwrap(), + FilterItem::SvItem(vec![ + SV::from_str("G08").unwrap(), + SV::from_str("G09").unwrap(), + SV::from_str("R03").unwrap() + ]) + ); + + assert_eq!( + FilterItem::from_str("GPS , BDS").unwrap(), + FilterItem::ConstellationItem(vec![Constellation::GPS, Constellation::BeiDou]) + ); + + let dt = Duration::from_str("1 d").unwrap(); + let target: FilterItem = dt.into(); + assert_eq!(target, FilterItem::DurationItem(dt)); + } + #[test] + fn test_from_elevation() { + let desc = "90"; + assert!( + FilterItem::from_elevation(desc).is_ok(), + "Failed to parse Elevation Target Item" + ); + } + #[test] + fn test_from_azimuth() { + let desc = " 12.34 "; + assert!( + FilterItem::from_azimuth(desc).is_ok(), + "Failed to parse Azimuth Target Item" + ); + } + #[test] + fn test_from_snr() { + let desc = " 12.34 "; + assert!( + FilterItem::from_snr(desc).is_ok(), + "Failed to parse SNR Target Item" + ); + } +} diff --git a/qc-traits/src/processing/mask.rs b/qc-traits/src/processing/mask.rs new file mode 100644 index 000000000..9eb31f0d9 --- /dev/null +++ b/qc-traits/src/processing/mask.rs @@ -0,0 +1,407 @@ +use crate::processing::{FilterItem, ItemError}; +use thiserror::Error; + +/// Mask filter parsing errors +#[derive(Error, Debug)] +pub enum Error { + #[error("invalid mask item")] + InvalidMaskitem(#[from] ItemError), + #[error("missing mask operand")] + MissingOperand, + #[error("invalid mask operand")] + InvalidOperand, + #[error("invalid mask target \"{0}\"")] + NonSupportedTarget(String), + #[error("invalid mask description")] + InvalidDescriptor, +} + +/// Masking trait, to retain specific GNSS data subsets. +/// This can be used to retain specific signals or [Constellation]s. +pub trait Masking { + /// Apply [MaskFilter] to mutable self. + fn mask_mut(&mut self, mask: &MaskFilter); + /// Apply [MaskFilter] to immutable self. + fn mask(&self, mask: &MaskFilter) -> Self; +} + +/// MaskOperand describes how to apply a given mask +#[derive(Debug, Clone, PartialEq)] +pub enum MaskOperand { + /// Greater than, is symbolized by ">". + GreaterThan, + /// Greater Equals, symbolized by ">=". + GreaterEquals, + /// Lower than, symbolized by "<"." + LowerThan, + /// Lower Equals, symbolized by "<=". + LowerEquals, + /// Equals, symbolized by "=". + /// Equals operand is implied anytime the operand is omitted in the description. + Equals, + /// Not Equals, symbolized by "!=". + NotEquals, +} + +impl std::str::FromStr for MaskOperand { + type Err = Error; + fn from_str(content: &str) -> Result { + let c = content.trim(); + if c.starts_with(">=") { + Ok(Self::GreaterEquals) + } else if c.starts_with('>') { + Ok(Self::GreaterThan) + } else if c.starts_with("<=") { + Ok(Self::LowerEquals) + } else if c.starts_with('<') { + Ok(Self::LowerThan) + } else if c.starts_with('=') { + Ok(Self::Equals) + } else if c.starts_with("!=") { + Ok(Self::NotEquals) + } else { + Err(Error::InvalidOperand) + } + } +} + +impl MaskOperand { + pub(crate) const fn formatted_len(&self) -> usize { + match &self { + Self::Equals | Self::GreaterThan | Self::LowerThan => 1, + Self::NotEquals | Self::LowerEquals | Self::GreaterEquals => 2, + } + } +} + +impl std::ops::Not for MaskOperand { + type Output = Self; + fn not(self) -> Self { + match self { + Self::Equals => Self::NotEquals, + Self::NotEquals => Self::Equals, + Self::GreaterEquals => Self::LowerEquals, + Self::GreaterThan => Self::LowerThan, + Self::LowerThan => Self::GreaterThan, + Self::LowerEquals => Self::GreaterEquals, + } + } +} + +/// Apply MaskFilters to focus on datasubsets you're interested in. +#[derive(Debug, Clone, PartialEq)] +pub struct MaskFilter { + /// Item describes what subset we this [MaskFilter] applies to. + pub item: FilterItem, + /// Operand describes how to apply this [MaskFilter] + pub operand: MaskOperand, +} + +impl std::ops::Not for MaskFilter { + type Output = MaskFilter; + fn not(self) -> Self { + Self { + operand: !self.operand, + item: self.item, + } + } +} + +impl std::ops::BitOr for MaskFilter { + type Output = Self; + fn bitor(self, rhs: Self) -> Self { + if self.operand == rhs.operand { + Self { + operand: self.operand, + item: self.item | rhs.item, + } + } else { + // not permitted on operand mismatch + self.clone() + } + } +} + +impl std::ops::BitOrAssign for MaskFilter { + fn bitor_assign(&mut self, rhs: Self) { + self.item = self.item.clone() | rhs.item; + } +} + +impl std::str::FromStr for MaskFilter { + type Err = Error; + fn from_str(content: &str) -> Result { + let cleanedup = content.trim_start(); + if cleanedup.len() < 2 { + /* + * we're most likely unable to parsed both + * an operand and a filter payload + */ + return Err(Error::InvalidDescriptor); + } + + let mut operand: Option = None; + let mut operand_offset: Option = None; + // In some cases, the target item comes first. + // This allows more "human readable" descriptions, + // but makes parsing a little harder. + + // Try to locate a mask operand within given content + for i in 0..cleanedup.len() - 1 { + if i < cleanedup.len() - 2 { + if let Ok(op) = MaskOperand::from_str(&cleanedup[i..i + 2]) { + operand = Some(op.clone()); + operand_offset = Some(i); + break; + } + } else if let Ok(op) = MaskOperand::from_str(&cleanedup[i..i + 1]) { + operand = Some(op.clone()); + operand_offset = Some(i); + break; + } + } + + let operand_omitted = operand_offset.is_none(); + + let (operand, operand_offset): (MaskOperand, usize) = match operand_offset.is_some() { + true => (operand.unwrap(), operand_offset.unwrap()), + false => { + /* + * Operand was not found, it's either omitted and Eq() is implied, + * or this parser will soon fail due to faulty content + */ + (MaskOperand::Equals, 0) + }, + }; + + if operand_offset > 0 { + // Some characters exist between .start() and identified operand. + // Type guessing for filter target will not work. + // This only exits for Elevation Angle, Azimuth Angle and SNR masks at the moment. + + // Simply due to the fact that the operand is located + // after the identifier, in those cases + + let start = &cleanedup[..operand_offset]; + if start[0..1].eq("e") { + // --> Elevation Mask case + let float_offset = operand_offset + operand.formatted_len() + 2; + Ok(Self { + operand, + item: FilterItem::from_elevation(cleanedup[float_offset..].trim())?, + }) + } else if content[0..1].eq("a") { + // --> Azimuth Mask case + let float_offset = operand_offset + operand.formatted_len() + 2; + Ok(Self { + operand, + item: FilterItem::from_azimuth(cleanedup[float_offset..].trim())?, + }) + } else { + // We're only left with SNR mask case + let float_offset = operand_offset + operand.formatted_len() + 2; + if content[0..3].eq("snr") { + Ok(Self { + operand, + item: FilterItem::from_snr(cleanedup[float_offset..].trim())?, + }) + } else { + Err(Error::NonSupportedTarget( + cleanedup[..operand_offset].to_string(), + )) + } + } + } else { + // Descriptor starts with mask operand. + // Filter target type guessing is possible. + let offset: usize = match operand_omitted { + false => operand_offset + operand.formatted_len(), + true => 0, + }; + + Ok(Self { + operand, + item: FilterItem::from_str(cleanedup[offset..].trim_start())?, + }) + } + } +} + +#[cfg(test)] +mod test { + use super::*; + use gnss_rs::prelude::{Constellation, SV}; + use hifitime::Epoch; + use std::str::FromStr; + #[test] + fn mask_operand() { + for (descriptor, opposite_desc) in [ + (">=", "<="), + (">", "<"), + ("=", "!="), + ("<", ">"), + ("<=", ">="), + ] { + let operand = MaskOperand::from_str(descriptor); + assert!( + operand.is_ok(), + "{} \"{}\"", + "Failed to parse MaskOperand from", + descriptor + ); + let opposite = MaskOperand::from_str(opposite_desc); + assert!( + opposite.is_ok(), + "{} \"{}\"", + "Failed to parse MaskOperand from", + opposite_desc + ); + assert_eq!(!operand.unwrap(), opposite.unwrap(), "MaskOperand::Not()"); + } + + let operand = MaskOperand::from_str("a"); + assert!( + operand.is_err(), + "Parsed unexpectedly \"{}\" MaskOperand correctly", + "a" + ); + } + #[test] + fn mask_epoch() { + let mask = MaskFilter::from_str(">2020-01-14T00:31:55 UTC").unwrap(); + assert_eq!( + mask, + MaskFilter { + operand: MaskOperand::GreaterThan, + item: FilterItem::EpochItem(Epoch::from_str("2020-01-14T00:31:55 UTC").unwrap()), + } + ); + let mask = MaskFilter::from_str(">JD 2452312.500372511 TAI"); + assert!(mask.is_ok()); + } + #[test] + fn mask_elev() { + for (desc, valid) in [ + ("e>1.0", true), + ("e< 40.0", true), + ("e != 30", true), + (" e<40.0", true), + (" e < 40.0", true), + (" e > 120", false), + (" e >= 120", false), + (" e = 30", true), + ] { + let mask = MaskFilter::from_str(desc); + assert_eq!( + mask.is_ok(), + valid, + "failed to parse elevation mask filter \"{}\"", + desc + ); + } + } + #[test] + fn mask_gnss() { + for (descriptor, opposite_desc) in [ + (" = GPS", "!= GPS"), + ("= GAL,GPS", "!= GAL,GPS"), + (" =GLO,GAL", "!= GLO,GAL"), + ] { + let mask = MaskFilter::from_str(descriptor); + assert!( + mask.is_ok(), + "Unable to parse MaskFilter from \"{}\"", + descriptor + ); + let opposite = MaskFilter::from_str(opposite_desc); + assert!( + opposite.is_ok(), + "Unable to parse MaskFilter from \"{}\"", + opposite_desc + ); + assert_eq!(!mask.unwrap(), opposite.unwrap(), "{}", "MaskFilter::Not()"); + } + + let mask = MaskFilter::from_str("=GPS,GAL,GLO").unwrap(); + assert_eq!( + mask, + MaskFilter { + operand: MaskOperand::Equals, + item: FilterItem::ConstellationItem(vec![ + Constellation::GPS, + Constellation::Galileo, + Constellation::Glonass + ]), + } + ); + + let mask = MaskFilter::from_str("!=BDS").unwrap(); + assert_eq!( + mask, + MaskFilter { + operand: MaskOperand::NotEquals, + item: FilterItem::ConstellationItem(vec![Constellation::BeiDou]), + } + ); + } + #[test] + fn mask_sv() { + for (descriptor, opposite_desc) in [(" = G01", "!= G01"), ("= R03, G31", "!= R03, G31")] { + let mask = MaskFilter::from_str(descriptor); + assert!( + mask.is_ok(), + "Unable to parse MaskFilter from \"{}\"", + descriptor + ); + let opposite = MaskFilter::from_str(opposite_desc); + assert!( + opposite.is_ok(), + "Unable to parse MaskFilter from \"{}\"", + opposite_desc + ); + assert_eq!(!mask.unwrap(), opposite.unwrap(), "{}", "MaskFilter::Not()"); + } + + let mask = MaskFilter::from_str("=G08, G09, R03").unwrap(); + assert_eq!( + mask, + MaskFilter { + operand: MaskOperand::Equals, + item: FilterItem::SvItem(vec![ + SV::from_str("G08").unwrap(), + SV::from_str("G09").unwrap(), + SV::from_str("R03").unwrap(), + ]), + } + ); + let m2 = MaskFilter::from_str("G08,G09,R03").unwrap(); + assert_eq!(mask, m2); + + let mask = MaskFilter::from_str("!=G31").unwrap(); + assert_eq!( + mask, + MaskFilter { + operand: MaskOperand::NotEquals, + item: FilterItem::SvItem(vec![SV::from_str("G31").unwrap(),]), + } + ); + let m2 = MaskFilter::from_str("!=G31").unwrap(); + assert_eq!(mask, m2); + } + #[test] + fn mask_complex() { + let mask = MaskFilter::from_str("=L1C,S1C,D1P,C1W").unwrap(); + assert_eq!( + mask, + MaskFilter { + operand: MaskOperand::Equals, + item: FilterItem::ComplexItem(vec![ + "L1C".to_string(), + "S1C".to_string(), + "D1P".to_string(), + "C1W".to_string() + ]) + } + ); + } +} diff --git a/qc-traits/src/processing/mod.rs b/qc-traits/src/processing/mod.rs new file mode 100644 index 000000000..6d7d0b6c3 --- /dev/null +++ b/qc-traits/src/processing/mod.rs @@ -0,0 +1,218 @@ +//! Processing toolkit, including filter designer. +use std::str::FromStr; +use thiserror::Error; + +mod item; +pub use item::{FilterItem, ItemError}; + +mod mask; +pub use mask::{Error as MaskError, MaskFilter, MaskOperand, Masking}; + +mod decim; +pub use decim::{Decimate, DecimationFilter, DecimationFilterType, Error as DecimationError}; + +/// Preprocessing Trait is usually implemented by GNSS data +/// to preprocess prior further analysis. +pub trait Preprocessing: Masking + Decimate { + /// Apply [Filter] algorithm on immutable dataset. + fn filter(&self, filter: &Filter) -> Self + where + Self: Sized, + { + match filter { + Filter::Mask(f) => self.mask(f), + Filter::Decimation(f) => self.decimate(f), + } + } + /// Apply [Filter] algorithm on mutable dataset. + fn filter_mut(&mut self, filter: &Filter) { + match filter { + Filter::Mask(f) => self.mask_mut(f), + Filter::Decimation(f) => self.decimate_mut(f), + } + } +} + +// pub use filters::{ +// Decimate, DecimationFilter, DecimationType, Filter, InterpFilter, InterpMethod, Interpolate, +// Mask, MaskFilter, MaskOperand, Preprocessing, Smooth, SmoothingFilter, SmoothingType, +// }; + +//pub use averaging::Averager; +//pub use derivative::Derivative; + +#[derive(Error, Debug)] +pub enum Error { + #[error("invalid filter")] + InvalidFilter, + #[error("unknown filter type \"{0}\"")] + UnknownFilterType(String), + #[error("invalid mask filter")] + MaskFilterParsing(#[from] MaskError), + #[error("invalid filter item")] + FilterItemError(#[from] ItemError), + #[error("invalid decimation filter")] + DecimationFilterParsing(#[from] DecimationError), +} + +/// Preprocessing filters, to preprocess RINEX data prior further analysis. +/// Filters can apply either on entire RINEX or subsets. +/// Refer to [TargetItem] definition to understand which data subsets exist. +#[derive(Debug, Clone, PartialEq)] +pub enum Filter { + /// Mask filter, to focus on specific data subsets + Mask(MaskFilter), + /// Decimation filter, filters to reduce sample rate + Decimation(DecimationFilter), + // /// Interpolation filter is work in progress and cannot be used at the moment + // Interp(InterpFilter), +} + +impl Filter { + /// Builds new [MaskFilter] from given specs + pub fn mask(operand: MaskOperand, item: FilterItem) -> Self { + Self::Mask(MaskFilter { operand, item }) + } + /// Builds new [MaskFilter] with Equals operand + /// from following [FilterItem] description + pub fn equals(item: &str) -> Result { + let item = FilterItem::from_str(item)?; + Ok(Self::mask(MaskOperand::Equals, item)) + } + /// Builds new [MaskFilter] with !Equals operand + /// from following [FilterItem] description + pub fn not_equals(item: &str) -> Result { + let item = FilterItem::from_str(item)?; + Ok(Self::mask(MaskOperand::NotEquals, item)) + } + /// Builds new [MaskFilter] with GreaterThan operand + /// from following [FilterItem] description + pub fn greater_than(item: &str) -> Result { + let item = FilterItem::from_str(item)?; + Ok(Self::mask(MaskOperand::GreaterThan, item)) + } + /// Builds new [MaskFilter] with GreaterEquals operand + /// from following [FilterItem] description + pub fn greater_equals(item: &str) -> Result { + let item = FilterItem::from_str(item)?; + Ok(Self::mask(MaskOperand::GreaterEquals, item)) + } + /// Builds new [MaskFilter] with LowerEquals operand + /// from following [FilterItem] description + pub fn lower_equals(item: &str) -> Result { + let item = FilterItem::from_str(item)?; + Ok(Self::mask(MaskOperand::LowerEquals, item)) + } + /// Builds new [MaskFilter] with LowerThan operand + /// from following [FilterItem] description + pub fn lower_than(item: &str) -> Result { + let item = FilterItem::from_str(item)?; + Ok(Self::mask(MaskOperand::LowerThan, item)) + } +} + +impl From for Filter { + fn from(mask: MaskFilter) -> Self { + Self::Mask(mask) + } +} + +impl std::ops::Not for Filter { + type Output = Self; + fn not(self) -> Self { + match self { + Self::Mask(f) => Self::Mask(!f), + _ => self.clone(), // does not apply + } + } +} + +impl From for Filter { + fn from(decim: decim::DecimationFilter) -> Self { + Self::Decimation(decim) + } +} + +impl std::str::FromStr for Filter { + type Err = Error; + fn from_str(content: &str) -> Result { + let items: Vec<&str> = content.split(':').collect(); + + let identifier = items[0].trim(); + if identifier.eq("decim") { + let offset = 6; //"decim:" + Ok(Self::Decimation(DecimationFilter::from_str( + content[offset..].trim(), + )?)) + } else if identifier.eq("mask") { + let offset = 5; //"mask:" + Ok(Self::Mask(MaskFilter::from_str(content[offset..].trim())?)) + } else { + // assume Mask (omitted identifier) + if let Ok(f) = MaskFilter::from_str(content.trim()) { + Ok(Self::Mask(f)) + } else { + Err(Error::UnknownFilterType(content.to_string())) + } + } + } +} + +#[cfg(test)] +mod test { + use super::*; + use std::str::FromStr; + #[test] + fn from_str() { + /* + * MASK FILTER description + */ + for descriptor in [ + "GPS", + "=GPS", + " != GPS", + "G08, G09, G10", + "=G08, G09, G10", + "!= GPS, GAL", + ">G08, G09", + "iode", + "iode,gps", + "iode,crs,gps", + "iode,crs", + ">2020-01-14T00:31:55 UTC", + ] { + assert!( + Filter::from_str(descriptor).is_ok(), + "Filter::from_str failed on \"{}\"", + descriptor + ); + } + /* + * DECIMATION FILTER description + */ + for desc in [ + "decim:10", + "decim:10 min", + "decim:1 hour", + "decim:10 min:l1c", + "decim:1 hour:L1C,L2C,L3C", + ] { + let filt = Filter::from_str(desc); + assert!(filt.is_ok(), "Filter::from_str failed on \"{}\"", desc); + } + /* + * SMOOTHING FILTER description + */ + for desc in [ + "smooth:mov:10 min", + "smooth:mov:1 hour", + "smooth:mov:1 hour:l1c", + "smooth:mov:10 min:clk", + "smooth:hatch", + "smooth:hatch:l1c", + ] { + let filt = Filter::from_str(desc); + assert!(filt.is_ok(), "Filter::from_str failed on \"{}\"", desc); + } + } +} diff --git a/rinex-cli/Cargo.toml b/rinex-cli/Cargo.toml index 1ea3bc3c9..d0a371e70 100644 --- a/rinex-cli/Cargo.toml +++ b/rinex-cli/Cargo.toml @@ -21,38 +21,36 @@ rust-version = "1.64" log = "0.4" gpx = "0.10" kml = "0.8" -statrs = "0.16" walkdir = "2.4.0" geo-types = "0.7.11" env_logger = "0.11" -rand = "0.8.4" serde_json = "1" lazy_static = "1.4" thiserror = "1" itertools = "0.13" map_3d = "0.1.5" -# ndarray = "0.15" -colorous = "1.0" -horrorshow = "0.8" +maud = "0.26" + clap = { version = "4.4.13", features = ["derive", "color"] } -plotly = { git = "https://github.com/plotly/plotly.rs", branch = "main" } +serde = { version = "1.0", default-features = false, features = ["derive"] } + +# plotly = "0.9" +# plotly = { path = "../../plotly-rs/plotly" } +plotly = { git = "https://github.com/gwbres/plotly", branch = "scattergeo" } anise = { version = "0.4.2", features = ["embed_ephem"] } -gnss-rs = { version = "2.2.0", features = ["serde"] } hifitime = { version = "4.0.0-alpha", features = ["serde", "std"] } -rinex = { path = "../rinex", version = "=0.16.1", features = ["full"] } -rinex-qc = { path = "../rinex-qc", version = "=0.1.14", features = ["serde"] } -sp3 = { path = "../sp3", version = "=1.0.8", features = ["serde", "flate2"] } -serde = { version = "1.0", default-features = false, features = ["derive"] } +gnss-rs = { version = "2.2.0", features = ["serde"] } # gnss-rtk = { version = "0.4.5", features = ["serde"] } -# gnss-rtk = { path = "../../gnss-rtk", features = ["serde"] } +# gnss-rtk = { path = "../../rtk-rs/gnss-rtk", features = ["serde"] } gnss-rtk = { git = "https://github.com/rtk-rs/gnss-rtk", branch = "main", features = ["serde"] } # cggtts = { version = "4.1.4", features = ["serde", "scheduler"] } # cggtts = { path = "../../cggtts/cggtts", features = ["serde", "scheduler"] } -cggtts = { git = "https://github.com/gwbres/cggtts", branch = "main", features = [ - "serde", - "scheduler", -] } +cggtts = { git = "https://github.com/gwbres/cggtts", branch = "main", features = ["serde", "scheduler"] } + +rinex = { path = "../rinex", version = "=0.16.1", features = ["full"] } +sp3 = { path = "../sp3", version = "=1.0.8", features = ["serde", "flate2"] } +rinex-qc = { path = "../rinex-qc", version = "=0.1.14", features = ["serde", "sp3"] } diff --git a/rinex-cli/config/qc/gnss_snr30db.json b/rinex-cli/config/qc/gnss_snr30db.json deleted file mode 100644 index cd123eafb..000000000 --- a/rinex-cli/config/qc/gnss_snr30db.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "classification": "GNSS", - "min_snr_db": 30.0 -} diff --git a/rinex-cli/config/qc/sv_manual_gap.json b/rinex-cli/config/qc/sv_manual_gap.json deleted file mode 100644 index d7b4f68f6..000000000 --- a/rinex-cli/config/qc/sv_manual_gap.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "classification": "Sv", - "gap_tolerance": { - "centuries": 0, - "nanoseconds": 7200000000000 - } -} diff --git a/rinex-cli/config/rtk/gpst_cpp_kf.json b/rinex-cli/config/rtk/gpst_ppp_kf.json similarity index 94% rename from rinex-cli/config/rtk/gpst_cpp_kf.json rename to rinex-cli/config/rtk/gpst_ppp_kf.json index fd915184e..bb686faf8 100644 --- a/rinex-cli/config/rtk/gpst_cpp_kf.json +++ b/rinex-cli/config/rtk/gpst_ppp_kf.json @@ -1,5 +1,5 @@ { - "method": "CPP", + "method": "PPP", "timescale": "GPST", "interp_order": 17, "min_sv_elev": 5.0, diff --git a/rinex-cli/src/analysis/mod.rs b/rinex-cli/src/analysis/mod.rs deleted file mode 100644 index faf9b3a50..000000000 --- a/rinex-cli/src/analysis/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -mod sv_epoch; -// pub use sv_epoch::sv_epoch; - -pub mod sampling; diff --git a/rinex-cli/src/analysis/sampling.rs b/rinex-cli/src/analysis/sampling.rs deleted file mode 100644 index 7d01c9bfb..000000000 --- a/rinex-cli/src/analysis/sampling.rs +++ /dev/null @@ -1,26 +0,0 @@ -use crate::graph::PlotContext; -use itertools::Itertools; -use plotly::Histogram; -use rinex::prelude::{ProductType, RnxContext}; - -/* - * Sampling histogram - */ -pub fn histogram(ctx: &RnxContext, plot_ctx: &mut PlotContext) { - plot_ctx.add_timedomain_plot("Sampling Histogram", "Count"); - for product in [ - ProductType::Observation, - ProductType::MeteoObservation, - ProductType::BroadcastNavigation, - ProductType::HighPrecisionClock, - ProductType::Ionex, - ] { - if let Some(data) = ctx.rinex(product) { - let histogram = data.sampling_histogram().sorted(); - let durations: Vec<_> = histogram.clone().map(|(dt, _)| dt.to_string()).collect(); - let populations: Vec<_> = histogram.clone().map(|(_, pop)| pop.to_string()).collect(); - let histogram = Histogram::new_xy(durations, populations).name(&format!("{}", product)); - plot_ctx.add_trace(histogram); - } - } -} diff --git a/rinex-cli/src/analysis/sv_epoch.rs b/rinex-cli/src/analysis/sv_epoch.rs deleted file mode 100644 index cef604df0..000000000 --- a/rinex-cli/src/analysis/sv_epoch.rs +++ /dev/null @@ -1,104 +0,0 @@ -// use crate::plot::{build_chart_epoch_axis, generate_markers, PlotContext}; -// use ndarray::Array; -// use plotly::common::{Marker, Mode, Title, Visible}; -// use plotly::layout::Axis; -// use rinex::prelude::RnxContext; -// use rinex::prelude::*; - -/* - * Sv periepoch analysis -pub fn sv_epoch(ctx: &RnxContext, plot_ctx: &mut PlotContext) { - plot_ctx.add_cartesian2d_plot("Sv per Epoch", "Sv(PRN#)"); - * plot customization - * We're plotting PRN#, set dy to +/- 1 - * for nicer rendition - let plot_item = plot_ctx.plot_item_mut().unwrap(); - let layout = plot_item.layout().clone().y_axis( - Axis::new() - .title(Title::new("PRN#")) - .zero_line(false) - .dtick(1.0), - ); - plot_item.set_layout(layout); - - // Design markers / symbols - // one per constellation system - let constellations: Vec<_> = ctx.primary_data().constellation().collect(); - let mut nb_markers = constellations.len(); - - if let Some(nav) = ctx.navigation_data() { - nb_markers += nav.constellation().count(); - } - - let markers = generate_markers(nb_markers); - - let data: Vec<_> = ctx.primary_data().sv_epoch().collect(); - - for (sv_index, sv) in ctx.primary_data().sv().enumerate() { - let epochs: Vec = data - .iter() - .filter_map(|(epoch, ssv)| { - if ssv.contains(&sv) { - Some(*epoch) - } else { - None - } - }) - .collect(); - let constell_index = constellations - .iter() - .position(|c| *c == sv.constellation) - .unwrap(); - let prn = Array::linspace(0.0, 1.0, epochs.len()); - let prn: Vec = prn.iter().map(|_| sv.prn as f64).collect(); - let marker = &markers[constell_index]; - let trace = build_chart_epoch_axis(&sv.to_string(), Mode::Markers, epochs, prn) - .marker(Marker::new().symbol(marker.clone())) - .visible({ - // improves plot generation speed, on large files - if sv_index < 4 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_ctx.add_trace(trace); - } - - if let Some(nav) = &ctx.navigation_data() { - let data: Vec<_> = nav.sv_epoch().collect(); - let nav_constell: Vec<_> = nav.constellation().collect(); - - for (sv_index, sv) in nav.sv().enumerate() { - let epochs: Vec = data - .iter() - .filter_map(|(epoch, ssv)| { - if ssv.contains(&sv) { - Some(*epoch) - } else { - None - } - }) - .collect(); - let constell_index = nav_constell - .iter() - .position(|c| *c == sv.constellation) - .unwrap(); - let prn = Array::linspace(0.0, 1.0, epochs.len()); - let prn: Vec = prn.iter().map(|_| sv.prn as f64).collect(); - let marker = &markers[constell_index]; - let trace = build_chart_epoch_axis(&format!("{}(NAV)", sv), Mode::Markers, epochs, prn) - .marker(Marker::new().symbol(marker.clone())) - .visible({ - // improves plot generation speed, on large files - if sv_index < 4 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_ctx.add_trace(trace); - } - } -} - */ diff --git a/rinex-cli/src/cli/fops/substract.rs b/rinex-cli/src/cli/fops/diff.rs similarity index 80% rename from rinex-cli/src/cli/fops/substract.rs rename to rinex-cli/src/cli/fops/diff.rs index 8579835b0..7b6017083 100644 --- a/rinex-cli/src/cli/fops/substract.rs +++ b/rinex-cli/src/cli/fops/diff.rs @@ -5,12 +5,11 @@ use std::path::PathBuf; use super::{SHARED_DATA_ARGS, SHARED_GENERAL_ARGS}; pub fn subcommand() -> Command { - Command::new("sub") - .long_flag("sub") + Command::new("diff") .arg_required_else_help(true) .about( - "RINEX(A)-RINEX(B) substraction operation. -This is typically used to compare two GNSS receivers together.", + "RINEX(A)-RINEX(B) substraction operation. This is a simple mean +to compare two GNSS receivers to one another, by direct PR or Phase data comparison.", ) .arg( Arg::new("file") diff --git a/rinex-cli/src/cli/fops/mod.rs b/rinex-cli/src/cli/fops/mod.rs index fecc690fe..c0eb88f50 100644 --- a/rinex-cli/src/cli/fops/mod.rs +++ b/rinex-cli/src/cli/fops/mod.rs @@ -1,7 +1,7 @@ +pub mod diff; pub mod filegen; pub mod merge; pub mod split; -pub mod substract; pub mod time_binning; use lazy_static::lazy_static; @@ -31,7 +31,18 @@ Otherwise, this ecosystem prefers modern (longer) filenames that contain more in Arg::new("gzip") .long("gzip") .action(ArgAction::SetTrue) - .help("Append .gz suffix and perform seamless Gzip compression."), + .help("Force .gzip compressed file generation, even if input data is not."), + Arg::new("unzip") + .long("unzip") + .action(ArgAction::SetTrue) + .help("Force plain/readable file generation. By default, if input data is gzip compressed, we will preserve +the input compression. Use this to bypass."), + Arg::new("csv") + .long("csv") + .action(ArgAction::SetTrue) + .help("Extract dataset and generate as CSV instead of RINEX. +Use this when targetting third party tools. +Only applies to Observation / Meteo RINEX files"), Arg::new("agency") .short('a') .long("agency") diff --git a/rinex-cli/src/cli/fops/split.rs b/rinex-cli/src/cli/fops/split.rs index c7bf2b434..1d4e4b8ac 100644 --- a/rinex-cli/src/cli/fops/split.rs +++ b/rinex-cli/src/cli/fops/split.rs @@ -6,8 +6,6 @@ use super::{SHARED_DATA_ARGS, SHARED_GENERAL_ARGS}; pub fn subcommand() -> Command { Command::new("split") - .short_flag('s') - .long_flag("split") .arg_required_else_help(true) .about("Split input file(s) at specified Epoch") .arg( diff --git a/rinex-cli/src/cli/fops/time_binning.rs b/rinex-cli/src/cli/fops/time_binning.rs index 043dfd590..efcc82b32 100644 --- a/rinex-cli/src/cli/fops/time_binning.rs +++ b/rinex-cli/src/cli/fops/time_binning.rs @@ -6,9 +6,8 @@ use super::{SHARED_DATA_ARGS, SHARED_GENERAL_ARGS}; pub fn subcommand() -> Command { Command::new("tbin") - .long_flag("tbin") .arg_required_else_help(true) - .about("Time binning. Split RINEX files into a batch of equal duration.") + .about("Time binning. Split files into a batch of equal duration.") .arg( Arg::new("interval") .value_parser(value_parser!(Duration)) diff --git a/rinex-cli/src/cli/graph.rs b/rinex-cli/src/cli/graph.rs index 74c9a9b17..e69de29bb 100644 --- a/rinex-cli/src/cli/graph.rs +++ b/rinex-cli/src/cli/graph.rs @@ -1,237 +0,0 @@ -use clap::{Arg, ArgAction, Command}; - -pub fn subcommand() -> Command { - Command::new("graph") - .short_flag('g') - .long_flag("graph") - .arg_required_else_help(true) - .about( - "RINEX data analysis and visualization, rendered as HTML or CSV in the workspace. See -g --help.", - ) - .long_about("Analysis and plots (in HTML). -When Observations are present, whether they come from Observation RINEX, Meteo or DORIS RINEX, -we can export the results as CSV too. This is particularly useful to export the results of the analysis -to other tools.") - .arg( - Arg::new("csv") - .long("csv") - .action(ArgAction::SetTrue) - .help("Extract Data as CSV along HTML plots. See --help.") - .long_help("This is particularly helpful if you are interested in -using our toolbox as data parser and preprocessor and inject the results to third party programs.") - ) - .next_help_heading( - "RINEX dependent visualizations. - Will only generate graphs if related dataset is present.", - ) - .next_help_heading("Observations rendering (OBS, Meteo, DORIS)") - .arg( - Arg::new("obs") - .short('o') - .long("obs") - .action(ArgAction::SetTrue) - .help("Plot all observables described in either Observation, Meteo or DORIS RINEX. See --help") - .long_help("Use this option to plot all observations. -OBS RINEX gives GNSS signals observations, but we also support Meteo RINEX and DORIS (special observation) RINEX. - -Example (1): render GNSS signals (all of them, whether it be Phase or PR) for GPS. -Extract as CSV at the same time: - -./target/release/rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -P GPS -g --obs --csv - -Example (2): render meteo sensor observations similary. - -./target/release/rinex-cli \\ - -f test_resources/MET/V3/POTS00DEU_R_20232540000_01D_05M_MM.rnx.gz \\ - -g --obs --csv - -Example (3): render DORIS observations similarly. - -./target/release/rinex-cli \\ - -f test_resources/OR/V3/cs2rx18164.gz -g --obs --csv - -Example (4): render OBS + Meteo combination at once. -RINEX-Cli allows loading OBS and Meteo in one session. -In graph mode, this means we can render both in a single run. - -./target/release/rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -f test_resources/MET/V3/POTS00DEU_R_20232540000_01D_05M_MM.rnx.gz \\ - -g --obs --csv -") - - ) - .next_help_heading("GNSS signals (requires OBS and/or DORIS RINEX)") - .arg( - Arg::new("dcb") - .long("dcb") - .action(ArgAction::SetTrue) - .help("Plot Differential Code Bias.") - .long_help( -"Plot Differential Code bias of the 5 following spacecrafts - -./target/release/rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -P G06,E13,C14,G15,E31 \\ - -g --dcb") - ) - .arg( - Arg::new("mp") - .long("mp") - .action(ArgAction::SetTrue) - .help("Plot Code Multipath.") - .long_help( -"Plot Code Multipath bias from the 5 following spacecrafts - -./target/release/rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -P G06,E13,C14,G15,E31 \\ - -g --mp") - ) - .arg( - Arg::new("if") - .short('i') - .long("if") - .action(ArgAction::SetTrue) - .help("Plot Ionosphere Free (IF) signal combination.") - .long_help( -"Plot Ionosphere free signal combination, for the 5 following spacecrafts - -./target/release/rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -P G06,E13,C14,G15,E31 \\ - -g --if") - ) - .arg( - Arg::new("gf") - .long("gf") - .short('g') - .action(ArgAction::SetTrue) - .help("Plot Geometry Free (GF) signal combination.") - .long_help( -"Plot Geometry free signal combination, for the 5 following spacecrafts - -./target/release/rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -P G06,E13,C14,G15,E31 \\ - -g --gf") - ) - .arg( - Arg::new("wl") - .long("wl") - .short('w') - .action(ArgAction::SetTrue) - .help("Plot Wide Lane (WL) signal combination.") - .long_help( -"Plot Widelane signal combination, for the 5 following spacecrafts - -./target/release/rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -P G06,E13,C14,G15,E31 \\ - -g --wl") - ) - .arg( - Arg::new("nl") - .long("nl") - .short('n') - .action(ArgAction::SetTrue) - .help("Plot Narrow Lane (WL) signal combination.") - .long_help( -"Plot Narrowlane signal combination, for the 5 following spacecrafts - -./target/release/rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -P G06,E13,C14,G15,E31 \\ - -g --nl") - ) - .arg( - Arg::new("mw") - .long("mw") - .short('m') - .action(ArgAction::SetTrue) - .help("Plot Melbourne-Wübbena (MW) signal combination.") - .long_help( -"Plot Melbourne-Wubbena signal combination for the 5 following spacecrafts - -./target/release/rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -P G06,E13,C14,G15,E31 \\ - -g --mw") - ) - .arg(Arg::new("cs").long("cs").action(ArgAction::SetTrue).help( - "Phase / Cycle Slip graph. -Plots raw phase signal with blackened sample where either CS was declared by receiver, -or we post processed determined a CS.", - )) - .next_help_heading("Navigation (requires NAV RINEX and/or SP3)") - .arg( - Arg::new("skyplot") - .short('s') - .long("sky") - .action(ArgAction::SetTrue) - .help("Skyplot: SV position in the sky, on a compass."), - ) - .arg( - Arg::new("orbit") - .long("orbit") - .action(ArgAction::SetTrue) - .help("3D projection of SV attitudes in the sky."), - ) - .arg( - Arg::new("orbit-residual") - .long("orbit-residual") - .action(ArgAction::SetTrue) - .help( - "Broadcast versus High Precision orbital product comparison |BRDC - SP3|. -Requires both NAV RINEX and SP3 that overlap in time. -It is the orbital equuivalent to |BRDC-CLK| requested with --clk-residual.")) - .arg( - Arg::new("naviplot") - .long("naviplot") - .action(ArgAction::SetTrue) - .help( - "SV orbital attitude projected in 3D. -Ideal for precise positioning decision making.", - ), - ) - .next_help_heading("Clock states (requires either NAV RINEX, CLK RINEX or SP3)") - .arg( - Arg::new("sv-clock") - .short('c') - .long("clk") - .action(ArgAction::SetTrue) - .help("SV clock bias (offset, drift, drift changes).") - ) - .arg( - Arg::new("clk-residual") - .long("clk-residual") - .action(ArgAction::SetTrue) - .help("Broadcast versus High Precision clock product comparison |BRDC - CLK|. -Requires both NAV RINEX and Clock RINEX that overlap in time. -It is the temporal equuivalent to |BRDC-SP3| requested with --sp3-residual.") - ) - .next_help_heading("Atmosphere conditions") - .arg( - Arg::new("tropo") - .long("tropo") - .action(ArgAction::SetTrue) - .help("Plot tropospheric delay from meteo sensors estimation. Requires METEO RINEX."), - ) - .arg( - Arg::new("tec") - .long("tec") - .action(ArgAction::SetTrue) - .help("Plot the TEC map. Requires at least one IONEX file. See --help") - .long_help("Plot the worldwide TEC map, usually presented in 24hr time frame. -Example: -rinex-cli -f test_resources/IONEX/V1/CKMG0080.09I.gz -g --tec") - ) - .arg( - Arg::new("ionod") - .long("ionod") - .action(ArgAction::SetTrue) - .help("Plot ionospheric delay per signal & SV, at latitude and longitude of signal sampling."), - ) -} diff --git a/rinex-cli/src/cli/mod.rs b/rinex-cli/src/cli/mod.rs index b6013ff0f..cb04416a1 100644 --- a/rinex-cli/src/cli/mod.rs +++ b/rinex-cli/src/cli/mod.rs @@ -1,29 +1,23 @@ -use log::info; use std::{ - fs::create_dir_all, - io::Write, + collections::hash_map::DefaultHasher, + hash::{Hash, Hasher}, path::{Path, PathBuf}, str::FromStr, }; -use clap::{value_parser, Arg, ArgAction, ArgMatches, ColorChoice, Command}; -use rinex::prelude::*; +use itertools::Itertools; -use crate::fops::open_with_web_browser; +use clap::{value_parser, Arg, ArgAction, ArgMatches, ColorChoice, Command}; +use rinex::prelude::GroundPosition; +use rinex_qc::prelude::{QcConfig, QcContext, QcReportType}; -// identification mode -mod identify; -// graph mode -mod graph; -// QC mode -mod qc; -// positioning mode +mod fops; mod positioning; +mod workspace; -// file operations -mod fops; +pub use workspace::Workspace; -use fops::{filegen, merge, split, substract, time_binning}; +use fops::{diff, filegen, merge, split, time_binning}; pub struct Cli { /// Arguments passed by user @@ -38,18 +32,18 @@ impl Default for Cli { /// Context defined by User. pub struct Context { - /// Data context defined by user - pub data: RnxContext, /// Quiet option pub quiet: bool, + /// Data context defined by user + pub data: QcContext, + /// Context name is derived from the primary file loaded in Self, + /// and mostly used in output products generation. + pub name: String, /// Workspace is the place where this session will generate data. /// By default it is set to $WORKSPACE/$PRIMARYFILE. /// $WORKSPACE is either manually definedd by CLI or we create it (as is). /// $PRIMARYFILE is determined from the most major file contained in the dataset. - pub workspace: PathBuf, - /// Context name is derived from the primary file loaded in Self, - /// and mostly used in session products generation. - pub name: String, + pub workspace: Workspace, /// (RX) reference position to be used in further analysis. /// It is either (priority order is important) /// 1. manually defined by CLI @@ -62,7 +56,7 @@ impl Context { * Utility to determine the most major filename stem, * to be used as the session workspace */ - pub fn context_stem(data: &RnxContext) -> String { + pub fn context_stem(data: &QcContext) -> String { let ctx_major_stem: &str = data .primary_path() .expect("failed to determine a context name") @@ -77,13 +71,6 @@ impl Context { let primary_stem: Vec<&str> = ctx_major_stem.split('.').collect(); primary_stem[0].to_string() } - /* - * Utility to prepare subdirectories in the session workspace - */ - pub fn create_subdir(&self, suffix: &str) { - create_dir_all(self.workspace.join(suffix)) - .unwrap_or_else(|e| panic!("failed to generate session dir {}: {:?}", suffix, e)); - } /* * Utility to create a file in this session */ @@ -92,21 +79,6 @@ impl Context { panic!("failed to create {}: {:?}", path.display(), e); }) } - /* - * Save HTML content, auto opens it if quiet (-q) is not turned on - */ - pub fn render_html(&self, filename: &str, html: String) { - let path = self.workspace.join(filename); - let mut fd = self.create_file(&path); - write!(fd, "{}", html).unwrap_or_else(|e| { - panic!("failed to render HTML content: {:?}", e); - }); - info!("html rendered in \"{}\"", path.display()); - - if !self.quiet { - open_with_web_browser(path.to_string_lossy().as_ref()); - } - } } impl Cli { @@ -118,10 +90,12 @@ impl Cli { .author("Guillaume W. Bres, ") .version(env!("CARGO_PKG_VERSION")) .about("RINEX post processing") + .long_about("RINEX-Cli is the command line interface +to operate the RINEX/SP3/RTK toolkit, until a GUI is made available. +Use it to analyze data, perform file operations and resolve navigation solutions.") .arg_required_else_help(true) .color(ColorChoice::Always) .arg(Arg::new("filepath") - .short('f') .long("fp") .value_name("FILE") .action(ArgAction::Append) @@ -141,14 +115,14 @@ Supported formats are: Example (1): Load a single file rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz + --fp test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz Example (2): define a PPP compliant context rinex-cli \\ - -f test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ - -f test_resources/NAV/V3/ESBC00DNK_R_20201770000_01D_MN.rnx.gz \\ - -f test_resources/CLK/V3/GRG0MGXFIN_20201770000_01D_30S_CLK.CLK.gz \\ - -f test_resources/SP3/GRG0MGXFIN_20201770000_01D_15M_ORB.SP3.gz + --fp test_resources/CRNX/V3/ESBC00DNK_R_20201770000_01D_30S_MO.crx.gz \\ + --fp test_resources/NAV/V3/ESBC00DNK_R_20201770000_01D_MN.rnx.gz \\ + --fp test_resources/CLK/V3/GRG0MGXFIN_20201770000_01D_30S_CLK.CLK.gz \\ + --fp test_resources/SP3/GRG0MGXFIN_20201770000_01D_15M_ORB.SP3.gz ")) .arg(Arg::new("directory") .short('d') @@ -178,7 +152,7 @@ but you can extend that with --depth. Refer to -f for more information.")) .short('q') .long("quiet") .action(ArgAction::SetTrue) - .help("Disable all terminal output. Also disables automatic HTML reports opening.")) + .help("Disable all terminal output. Disables automatic report opener (Web browser).")) .arg(Arg::new("workspace") .short('w') .long("workspace") @@ -189,8 +163,38 @@ but you can extend that with --depth. Refer to -f for more information.")) By default the $RINEX_WORKSPACE variable is prefered if it is defined. You can also use this flag to customize it. If none are defined, we will then try to create a local directory named \"WORKSPACE\" like it is possible in this very repo.")) + .next_help_heading("Report customization") + .arg( + Arg::new("report-sum") + .long("sum") + .action(ArgAction::SetTrue) + .help("Restrict report to summary header only (quicker rendition)") + ) + .arg( + Arg::new("report-force") + .short('f') + .long("force") + .action(ArgAction::SetTrue) + .help("Force report synthesis. +By default, report synthesis happens once per input set (file combnation and cli options). +Use this option to force report regeneration. +This has no effect on file operations that do not synthesize a report.")) + .arg( + Arg::new("report-brdc-sky") + .long("brdc-sky") + .action(ArgAction::SetTrue) + .help("When SP3 and/or BRDC RINEX is present, +the skyplot (compass) projection is only calculated from the SP3 coordinates (highest precision). +Use this option to also calculate it from radio messages (for comparison purposes for example).") + ) + .arg( + Arg::new("report-nostats") + .long("nostats") + .action(ArgAction::SetTrue) + .help("Hide statistical annotations that might be present in some plots. +This has no effect on applications compiled without plot and statistical options.") + ) .next_help_heading("Preprocessing") - .about("Preprocessing todo") .arg(Arg::new("gps-filter") .short('G') .action(ArgAction::SetTrue) @@ -207,6 +211,10 @@ If none are defined, we will then try to create a local directory named \"WORKSP .short('C') .action(ArgAction::SetTrue) .help("Filters out all BeiDou vehicles")) + .arg(Arg::new("bds-geo-filter") + .long("CG") + .action(ArgAction::SetTrue) + .help("Filter out all BeiDou Geo vehicles")) .arg(Arg::new("qzss-filter") .short('J') .action(ArgAction::SetTrue) @@ -222,12 +230,9 @@ If none are defined, we will then try to create a local directory named \"WORKSP .arg(Arg::new("preprocessing") .short('P') .num_args(1..) + .value_delimiter(';') .action(ArgAction::Append) .help("Filter designer. Refer to [].")) - .arg(Arg::new("lli-mask") - .long("lli-mask") - .help("Applies given LLI AND() mask. -Also drops observations that did not come with an LLI flag")) .next_help_heading("Receiver Antenna") .arg(Arg::new("rx-ecef") .long("rx-ecef") @@ -241,13 +246,10 @@ Otherwise it gets automatically picked up.")) .help("Define the (RX) antenna position manualy, in decimal degrees.")) .next_help_heading("Exclusive Opmodes: you can only run one at a time.") .subcommand(filegen::subcommand()) - .subcommand(graph::subcommand()) - .subcommand(identify::subcommand()) .subcommand(merge::subcommand()) .subcommand(positioning::subcommand()) - .subcommand(qc::subcommand()) .subcommand(split::subcommand()) - .subcommand(substract::subcommand()) + .subcommand(diff::subcommand()) .subcommand(time_binning::subcommand()) .get_matches() }, @@ -288,6 +290,9 @@ Otherwise it gets automatically picked up.")) pub fn bds_filter(&self) -> bool { self.matches.get_flag("bds-filter") } + pub fn bds_geo_filter(&self) -> bool { + self.matches.get_flag("bds-geo-filter") + } pub fn qzss_filter(&self) -> bool { self.matches.get_flag("qzss-filter") } @@ -335,4 +340,51 @@ Otherwise it gets automatically picked up.")) .map(|position| GroundPosition::from_geodetic(position).to_ecef_wgs84()) } } + /// True if File Operations to generate data is being deployed + pub fn has_fops_output_product(&self) -> bool { + match self.matches.subcommand() { + Some(("filegen", _)) | Some(("merge", _)) | Some(("split", _)) | Some(("tbin", _)) + | Some(("diff", _)) => true, + _ => false, + } + } + /// True if forced report synthesis is requested + pub fn force_report_synthesis(&self) -> bool { + self.matches.get_flag("report-force") + } + /* + * We hash all vital CLI information. + * This helps in determining whether we need to update an existing report + * or not. + */ + pub fn hash(&self) -> u64 { + let mut hasher = DefaultHasher::new(); + let mut string = self + .input_directories() + .into_iter() + .sorted() + .chain(self.input_files().into_iter().sorted()) + .chain(self.preprocessing().into_iter().sorted()) + .join(","); + if let Some(geo) = self.manual_geodetic() { + string.push_str(&format!("{:?}", geo)); + } + if let Some(ecef) = self.manual_ecef() { + string.push_str(&format!("{:?}", ecef)); + } + string.hash(&mut hasher); + hasher.finish() + } + /// Returns QcConfig from command line + pub fn qc_config(&self) -> QcConfig { + QcConfig { + manual_reference: None, + report: if self.matches.get_flag("report-sum") { + QcReportType::Summary + } else { + QcReportType::Full + }, + force_brdc_skyplot: self.matches.get_flag("report-brdc-sky"), + } + } } diff --git a/rinex-cli/src/cli/positioning.rs b/rinex-cli/src/cli/positioning.rs index c2278f8e8..3474329b6 100644 --- a/rinex-cli/src/cli/positioning.rs +++ b/rinex-cli/src/cli/positioning.rs @@ -3,21 +3,24 @@ use clap::{value_parser, Arg, ArgAction, Command}; use rinex::prelude::Duration; pub fn subcommand() -> Command { - Command::new("positioning") - .short_flag('p') + Command::new("ppp") .arg_required_else_help(false) - .about("Precise Positioning opmode. -Use this mode to resolve Position Velocity and Time (PVT) solutions from one GNSS context. See -p --help.") + .about("Post Processed Positioning. Use this mode to deploy the precise position solver. +The solutions are added to the final report as an extra chapter. See --help") + .long_about("Post Processed Positioning (ppp) opmode resolves +PVT solutions from RINEX data sampled by a single receiver (! This is not RTK!). +The solutions are presented in the analysis report (post processed results chapter). +Use --cggtts to convert solutions to CGGTTS special format.") .arg(Arg::new("cfg") .short('c') .long("cfg") .value_name("FILE") .required(false) .action(ArgAction::Append) - .help("Pass a Position Solver configuration file (JSON). See --help.") + .help("Position Solver configuration file (JSON). See --help.") .long_help(" Read the [https://github.com/georust/rinex/wiki/Positioning] tutorial. -Use [https://github.com/georust/rinex/rinex-cli/config.rtk] as a starting point. +Use [https://github.com/georust/rinex/config] as a starting point. [https://docs.rs/gnss-rtk/latest/gnss_rtk/prelude/struct.Config.html] is the structure to represent in JSON. ")) .arg(Arg::new("gpx") diff --git a/rinex-cli/src/cli/qc.rs b/rinex-cli/src/cli/qc.rs deleted file mode 100644 index 59c5975f0..000000000 --- a/rinex-cli/src/cli/qc.rs +++ /dev/null @@ -1,29 +0,0 @@ -// tbin opmode -use clap::{Arg, ArgAction, Command}; - -pub fn subcommand() -> Command { - Command::new("quality-check") - .short_flag('Q') - .long_flag("qc") - .about( - "File Quality analysis (statistical evaluation) of the dataset. -Typically used prior precise point positioning.", - ) - .arg( - Arg::new("spp") - .long("spp") - .action(ArgAction::SetTrue) - .help("Force solving method to SPP. -Otherwise we use the default Method. -See online documentations [https://docs.rs/gnss-rtk/latest/gnss_rtk/prelude/enum.Method.html#variants].")) - .arg( - Arg::new("cfg") - .short('c') - .long("cfg") - .required(false) - .value_name("FILE") - .action(ArgAction::Append) - .help("Pass a QC configuration file (JSON). -[] is the structure to represent in JSON. -See [] for meaningful examples.")) -} diff --git a/rinex-cli/src/cli/workspace.rs b/rinex-cli/src/cli/workspace.rs new file mode 100644 index 000000000..c54c7e7d6 --- /dev/null +++ b/rinex-cli/src/cli/workspace.rs @@ -0,0 +1,90 @@ +//! Workspace definition and helper +use std::{ + fs::{create_dir_all, File}, + path::{Path, PathBuf}, + process::Command, +}; + +use crate::cli::Cli; + +/// Workspace, describes past and current session +pub struct Workspace { + /// Root Fullpath for this session + pub root: PathBuf, +} + +impl Workspace { + /// Builds a new workspace either + /// 1. from $RINEX_WORKSPACE environment variable + /// 2. from -w workspace CLI argument + /// 3. or defaults to ./WORSPACE, that exists within this Git repo. + /// Refer to Wiki Pages. + pub fn new(session: &str, cli: &Cli) -> Self { + let root = match std::env::var("RINEX_WORKSPACE") { + Ok(path) => Path::new(&path).join(session).to_path_buf(), + _ => match cli.matches.get_one::("workspace") { + Some(path) => Path::new(path).join(session).to_path_buf(), + None => Path::new("WORKSPACE").join(session).to_path_buf(), + }, + }; + // make sure workspace does exists, otherwise create it + create_dir_all(&root).unwrap_or_else(|e| { + panic!( + "failed to create session workspace \"{}\": {}", + root.display(), + e + ) + }); + info!("session workspace is \"{}\"", root.to_string_lossy()); + Self { + root: root.to_path_buf(), + } + } + /// Creates subdirectory within self. + /// Will panic on write permission issues. + pub fn create_subdir(&self, dir: &str) { + create_dir_all(self.root.join(dir)).unwrap_or_else(|e| { + panic!("failed to create directory {} within workspace: {}", dir, e) + }); + } + /// Creates new file within this session. + /// Will panic on write permission issues. + pub fn create_file(&self, filename: &str) -> File { + let fullpath = self.root.join(filename).to_string_lossy().to_string(); + let fd = File::create(&fullpath) + .unwrap_or_else(|e| panic!("failed to create new file {}: {}", filename, e)); + info!("{} has been generated", fullpath); + fd + } + /// Opens root path with prefered web browser + #[cfg(target_os = "linux")] + pub fn open_with_web_browser(&self) { + let fullpath = self.root.join("index.html").to_string_lossy().to_string(); + let web_browsers = vec!["firefox", "chromium"]; + for browser in web_browsers { + let child = Command::new(browser).arg(fullpath.clone()).spawn(); + if child.is_ok() { + return; + } + } + } + /// Opens root path with prefered web browser + #[cfg(target_os = "macos")] + pub fn open_with_web_browser(&self) { + let fullpath = self.root.join("index.html").to_string_lossy().to_string(); + Command::new("open") + .args(&[fullpath]) + .output() + .expect("open() failed, can't open HTML content automatically"); + } + /// Opens root path with prefered web browser + #[cfg(target_os = "windows")] + pub fn open_with_web_browser(&self) { + let fullpath = self.root.join("index.html").to_string_lossy().to_string(); + Command::new("cmd") + .arg("/C") + .arg(format!(r#"start {}"#, fullpath)) + .output() + .expect("failed to open generated HTML content"); + } +} diff --git a/rinex-cli/src/fops.rs b/rinex-cli/src/fops.rs index 2b3706640..9a8209700 100644 --- a/rinex-cli/src/fops.rs +++ b/rinex-cli/src/fops.rs @@ -3,16 +3,16 @@ use crate::Error; use clap::ArgMatches; use std::path::PathBuf; -use std::process::Command; -use std::str::FromStr; +//use std::str::FromStr; use rinex::{ - prelude::{Duration, Epoch, ProductType, Rinex, RinexType}, - preprocessing::*, + prelude::{Duration, Epoch, Rinex, RinexType}, prod::{DataSource, DetailedProductionAttributes, ProductionAttributes, FFU, PPU}, Merge, Split, }; +use rinex_qc::prelude::{Filter, Preprocessing, ProductType}; + /* * Parses share RINEX production attributes. * This helps accurate file production, @@ -102,11 +102,12 @@ pub fn filegen(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { let ctx_data = &ctx.data; for product in [ - ProductType::Observation, ProductType::DORIS, + ProductType::Observation, ProductType::MeteoObservation, ProductType::BroadcastNavigation, ProductType::HighPrecisionClock, + ProductType::HighPrecisionOrbit, ProductType::IONEX, ProductType::ANTEX, ] { @@ -114,7 +115,13 @@ pub fn filegen(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { let prod = custom_prod_attributes(rinex, matches); let filename = output_filename(rinex, matches, prod); - let output_path = ctx.workspace.join(filename).to_string_lossy().to_string(); + let output_path = ctx + .workspace + .root + .join("OUTPUT") + .join(filename) + .to_string_lossy() + .to_string(); rinex.to_file(&output_path).unwrap_or_else(|_| { panic!("failed to generate {} RINEX \"{}\"", product, output_path) @@ -150,6 +157,18 @@ pub fn merge(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { .ok_or(Error::MissingNavigationRinex)?; rinex_a.merge(&rinex_b)? }, + RinexType::MeteoData => { + let rinex_a = ctx_data.meteo().ok_or(Error::MissingMeteoRinex)?; + rinex_a.merge(&rinex_b)? + }, + RinexType::IonosphereMaps => { + let rinex_a = ctx_data.ionex().ok_or(Error::MissingIONEX)?; + rinex_a.merge(&rinex_b)? + }, + RinexType::ClockData => { + let rinex_a = ctx_data.clock().ok_or(Error::MissingClockRinex)?; + rinex_a.merge(&rinex_b)? + }, _ => unimplemented!(), }; @@ -159,7 +178,12 @@ pub fn merge(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { .to_string_lossy() .to_string(); - let output_path = ctx.workspace.join(suffix).to_string_lossy().to_string(); + let output_path = ctx + .workspace + .root + .join(suffix) + .to_string_lossy() + .to_string(); rinex_c.to_file(&output_path)?; @@ -236,6 +260,7 @@ pub fn split(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { let output = ctx .workspace + .root .join(format!("{}-{}.{}", filename, file_suffix, extension)) .to_string_lossy() .to_string(); @@ -267,6 +292,7 @@ pub fn split(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { let output = ctx .workspace + .root .join(format!("{}-{}.{}", filename, file_suffix, extension)) .to_string_lossy() .to_string(); @@ -279,7 +305,7 @@ pub fn split(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { } /* - * Time reframing: subdivde a RINEX into a batch of equal duration + * Time reframing: subdivide a RINEX into a batch of equal duration */ pub fn time_binning(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { let ctx_data = &ctx.data; @@ -292,11 +318,13 @@ pub fn time_binning(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { } for product in [ + ProductType::IONEX, + ProductType::DORIS, ProductType::Observation, ProductType::MeteoObservation, ProductType::BroadcastNavigation, ProductType::HighPrecisionClock, - ProductType::IONEX, + ProductType::HighPrecisionOrbit, ] { // input data determination if let Some(rinex) = ctx_data.rinex(product) { @@ -322,16 +350,24 @@ pub fn time_binning(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { // run time binning algorithm while last <= end { - let rinex = rinex - .filter(Filter::from_str(&format!("< {:?}", last)).unwrap()) - .filter(Filter::from_str(&format!(">= {:?}", first)).unwrap()); + let lower = Filter::lower_than(&last.to_string()).unwrap(); + let greater = Filter::greater_equals(&first.to_string()).unwrap(); - // generate standardized name - let filename = output_filename(&rinex, matches, prod.clone()); + debug!("batch: {} < {}", first, last); + let batch = rinex.filter(&lower).filter(&greater); - let output = ctx.workspace.join(&filename).to_string_lossy().to_string(); + // generate standardized name + let filename = output_filename(&batch, matches, prod.clone()); + + let output = ctx + .workspace + .root + .join("OUTPUT") + .join(&filename) + .to_string_lossy() + .to_string(); - rinex.to_file(&output)?; + batch.to_file(&output)?; info!("{} RINEX \"{}\" has been generated", product, output); first += *duration; @@ -348,7 +384,7 @@ pub fn time_binning(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { /* * Substract RINEX[A]-RINEX[B] */ -pub fn substract(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { +pub fn diff(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { let ctx_data = &ctx.data; let path_a = ctx_data .files(ProductType::Observation) @@ -403,6 +439,7 @@ pub fn substract(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { let fullpath = ctx .workspace + .root .join(format!("DIFFERENCED.{}", extension)) .to_string_lossy() .to_string(); @@ -412,31 +449,3 @@ pub fn substract(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { info!("OBS RINEX \"{}\" has been generated", fullpath); Ok(()) } - -#[cfg(target_os = "linux")] -pub fn open_with_web_browser(path: &str) { - let web_browsers = vec!["firefox", "chromium"]; - for browser in web_browsers { - let child = Command::new(browser).args([path]).spawn(); - if child.is_ok() { - return; - } - } -} - -#[cfg(target_os = "macos")] -pub fn open_with_web_browser(path: &str) { - Command::new("open") - .args(&[path]) - .output() - .expect("open() failed, can't open HTML content automatically"); -} - -#[cfg(target_os = "windows")] -pub fn open_with_web_browser(path: &str) { - Command::new("cmd") - .arg("/C") - .arg(format!(r#"start {}"#, path)) - .output() - .expect("failed to open generated HTML content"); -} diff --git a/rinex-cli/src/graph/combination.rs b/rinex-cli/src/graph/combination.rs deleted file mode 100644 index 93640c3b0..000000000 --- a/rinex-cli/src/graph/combination.rs +++ /dev/null @@ -1,104 +0,0 @@ -use super::{build_chart_epoch_axis, generate_markers, Marker, Mode, PlotContext}; -use plotly::common::Visible; -use rinex::prelude::*; -use std::collections::{BTreeMap, HashMap}; - -pub fn plot_gnss_combination( - data: &HashMap<(Observable, Observable), BTreeMap>>, - plot_context: &mut PlotContext, - plot_title: &str, - y_title: &str, -) { - // add a plot - plot_context.add_timedomain_plot(plot_title, y_title); - - // generate 1 marker per OP - let markers = generate_markers(data.len()); - - // plot all ops - for (op_index, ((lhs_observable, ref_observable), vehicles)) in data.iter().enumerate() { - for (sv, epochs) in vehicles { - let data_x: Vec = epochs.iter().map(|((e, _flag), _v)| *e).collect(); - let data_y: Vec = epochs.iter().map(|(_, v)| *v).collect(); - let trace = build_chart_epoch_axis( - &format!("{}({}-{})", sv, lhs_observable, ref_observable), - Mode::Markers, - data_x, - data_y, - ) - .marker(Marker::new().symbol(markers[op_index].clone())) - .visible({ - if op_index < 2 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_context.add_trace(trace); - } - } -} - -/* - * Plot DCB analysis - */ -pub fn plot_gnss_dcb( - data: &HashMap>>, - plot_context: &mut PlotContext, - plot_title: &str, - y_title: &str, -) { - // add a plot - plot_context.add_timedomain_plot(plot_title, y_title); - // generate 1 marker per OP - let markers = generate_markers(data.len()); - // plot all ops - for (op_index, (op, vehicles)) in data.iter().enumerate() { - for (_sv, epochs) in vehicles { - let data_x: Vec = epochs.iter().map(|((e, _flag), _v)| *e).collect(); - let data_y: Vec = epochs.iter().map(|(_, v)| *v).collect(); - let trace = build_chart_epoch_axis(&op.to_string()[1..], Mode::Markers, data_x, data_y) - .marker(Marker::new().symbol(markers[op_index].clone())) - .visible({ - if op_index < 2 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_context.add_trace(trace); - } - } -} - -/* - * Plot MP analysis - */ -pub fn plot_gnss_code_mp( - data: &HashMap>>, - plot_context: &mut PlotContext, - plot_title: &str, - y_title: &str, -) { - // add a plot - plot_context.add_timedomain_plot(plot_title, y_title); - // generate 1 marker per OP - let markers = generate_markers(data.len()); - // plot all ops - for (op_index, (op, vehicles)) in data.iter().enumerate() { - for (_sv, epochs) in vehicles { - let data_x: Vec = epochs.iter().map(|((e, _flag), _v)| *e).collect(); - let data_y: Vec = epochs.iter().map(|(_, v)| *v).collect(); - let trace = build_chart_epoch_axis(&op.to_string()[1..], Mode::Markers, data_x, data_y) - .marker(Marker::new().symbol(markers[op_index].clone())) - .visible({ - if op_index < 2 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_context.add_trace(trace); - } - } -} diff --git a/rinex-cli/src/graph/context.rs b/rinex-cli/src/graph/context.rs deleted file mode 100644 index 62e453f96..000000000 --- a/rinex-cli/src/graph/context.rs +++ /dev/null @@ -1,74 +0,0 @@ -use super::{ - build_default_3d_plot, build_default_polar_plot, build_timedomain_2y_plot, - build_timedomain_plot, build_world_map, Plot, -}; -//use log::trace; -use plotly::{layout::MapboxStyle, Trace}; - -/// Plot Context -pub struct PlotContext { - plots: Vec, -} - -impl PlotContext { - pub fn new() -> Self { - Self { plots: Vec::new() } - } - /*pub fn plot_item(&self) -> Option<&Plot> { - self.plots.get(self.plots.len() - 1) - } - pub fn plot_item_mut(&mut self) -> Option<&mut Plot> { - let len = self.plots.len() - 1; - self.plots.get_mut(len) - }*/ - pub fn add_timedomain_plot(&mut self, title: &str, y_label: &str) { - self.plots.push(build_timedomain_plot(title, y_label)); - } - pub fn add_timedomain_2y_plot(&mut self, title: &str, y1_label: &str, y2_label: &str) { - self.plots - .push(build_timedomain_2y_plot(title, y1_label, y2_label)); - } - pub fn add_cartesian3d_plot( - &mut self, - title: &str, - x_label: &str, - y_label: &str, - z_label: &str, - ) { - self.plots - .push(build_default_3d_plot(title, x_label, y_label, z_label)); - } - pub fn add_polar2d_plot(&mut self, title: &str) { - self.plots.push(build_default_polar_plot(title)); - } - pub fn add_world_map( - &mut self, - title: &str, - show_legend: bool, - map_style: MapboxStyle, - center: (f64, f64), - zoom: u8, - ) { - self.plots - .push(build_world_map(title, show_legend, map_style, center, zoom)); - } - pub fn add_trace(&mut self, trace: Box) { - let len = self.plots.len() - 1; - self.plots[len].add_trace(trace); - } - pub fn to_html(&mut self) -> String { - let mut html = String::new(); - for (index, p) in self.plots.iter_mut().enumerate() { - /*if !tiny { - p.use_local_plotly(); - }*/ - if index == 0 { - html.push_str(&p.to_html()); - } else { - html.push_str(&p.to_inline_html(None)); - } - html.push('\n'); - } - html - } -} diff --git a/rinex-cli/src/graph/csv.rs b/rinex-cli/src/graph/csv.rs index c505df68e..1c1617ffd 100644 --- a/rinex-cli/src/graph/csv.rs +++ b/rinex-cli/src/graph/csv.rs @@ -1,10 +1,10 @@ //! helpers to export to CSV if desired, //! and not only generate HTML plots. +use crate::cli::Workspace; use hifitime::Epoch; use std::fs::File; use std::io::Write; -use std::path::Path; use thiserror::Error; #[derive(Error, Debug)] @@ -13,29 +13,41 @@ pub enum Error { IoError(#[from] std::io::Error), } -/* - * Use this to export Time domain plots (most widely used plot type) - */ -pub fn csv_export_timedomain( - path: &Path, - title: &str, - labels: &str, - x: &Vec, - y: &Vec, -) -> Result<(), Error> { - let mut fd = File::create(path)?; - writeln!(fd, "================================================")?; - writeln!(fd, "title : {}", title)?; - writeln!(fd, "labels : {}", labels)?; - writeln!( - fd, - "version: rinex-cli v{} - https://georust.org", - env!("CARGO_PKG_VERSION") - )?; - writeln!(fd, "================================================")?; - for (x, y) in x.iter().zip(y.iter()) { - writeln!(fd, "{:?}, {:.6E}", x, y)?; +/// Custom CSV report +pub struct CSV { + fd: File, +} + +impl CSV { + /// Creates new CSV report in this session. + /// Panics on write permission issues. + pub fn new( + workspace: &Workspace, + filename: &str, + title: &str, + labels: &str, + ) -> Result { + let mut fd = workspace.create_file(filename); + writeln!(fd, "================================================")?; + writeln!(fd, "title : {}", title)?; + writeln!(fd, "labels : {}", labels)?; + writeln!( + fd, + "version: rinex-cli v{} - https://georust.org", + env!("CARGO_PKG_VERSION") + )?; + writeln!(fd, "================================================")?; + Ok(Self { fd }) + } + /// Report timedomain data as CSV + pub fn export_timedomain( + &mut self, + x: &Vec, + y: &Vec, + ) -> Result<(), Error> { + for (x, y) in x.iter().zip(y.iter()) { + writeln!(self.fd, "{:?}, {:.6E}", x, y)?; + } + Ok(()) } - writeln!(fd, "================================================")?; - Ok(()) } diff --git a/rinex-cli/src/graph/mod.rs b/rinex-cli/src/graph/mod.rs deleted file mode 100644 index 39e6ad17f..000000000 --- a/rinex-cli/src/graph/mod.rs +++ /dev/null @@ -1,651 +0,0 @@ -use crate::{cli::Context, Error}; -use clap::ArgMatches; -use rinex::observation::{Combination, Combine, Dcb}; - -use plotly::{ - common::{ - AxisSide, - //DashType, - HoverInfo, - Marker, - MarkerSymbol, - Mode, - }, - layout::{Axis, Center, DragMode, Mapbox, MapboxStyle, Margin}, - Layout, Plot, Scatter, Scatter3D, -}; - -use rand::Rng; -use serde::Serialize; - -use rinex::prelude::*; - -mod record; -use record::{ - plot_atmosphere_conditions, plot_residual_ephemeris, plot_sv_nav_clock, plot_sv_nav_orbits, -}; - -mod context; -pub use context::PlotContext; - -mod skyplot; -use skyplot::skyplot; - -mod naviplot; - -mod combination; -use combination::{plot_gnss_code_mp, plot_gnss_combination, plot_gnss_dcb}; - -mod csv; // export to CSV instead of plotting -pub use csv::csv_export_timedomain; - -/* - * Generates N marker symbols to be used - * to differentiate data - */ -pub fn generate_markers(n: usize) -> Vec { - //TODO lazy static - let pool = vec![ - "Circle", - "CircleOpen", - "CircleDot", - "CircleOpenDot", - "Square", - "SquareOpen", - "SquareDot", - "SquareOpenDot", - "Diamond", - "DiamondOpen", - "DiamondDot", - "DiamondOpenDot", - "Cross", - "CrossOpen", - "CrossDot", - "CrossOpenDot", - "X", - "XOpen", - "XDot", - "XOpenDot", - "TriangleUp", - "TriangleUpOpen", - "TriangleUpDot", - "TriangleUpOpenDot", - "TriangleDown", - "TriangleDownOpen", - "TriangleDownDot", - "TriangleDownOpenDot", - "TriangleLeft", - "TriangleLeftOpen", - "TriangleLeftDot", - "TriangleLeftOpenDot", - "TriangleRight", - "TriangleRightOpen", - "TriangleRightDot", - "TriangleRightOpenDot", - "TriangleNE", - "TriangleNEOpen", - "TriangleNEDot", - "TriangleNEOpenDot", - "TriangleSE", - "TriangleSEOpen", - "TriangleSEDot", - "TriangleSEOpenDot", - "TriangleSW", - "TriangleSWOpen", - "TriangleSWDot", - "TriangleSWOpenDot", - "TriangleNW", - "TriangleNWOpen", - "TriangleNWDot", - "TriangleNWOpenDot", - "Pentagon", - "PentagonOpen", - "PentagonDot", - "PentagonOpenDot", - "Hexagon", - "HexagonOpen", - "HexagonDot", - "HexagonOpenDot", - "Hexagon2", - "Hexagon2Open", - "Hexagon2Dot", - "Hexagon2OpenDot", - "Octagon", - "OctagonOpen", - "OctagonDot", - "OctagonOpenDot", - "Star", - "StarOpen", - "StarDot", - "StarOpenDot", - "Hexagram", - "HexagramOpen", - "HexagramDot", - "HexagramOpenDot", - "StarTriangleUp", - "StarTriangleUpOpen", - "StarTriangleUpDot", - "StarTriangleUpOpenDot", - "StarTriangleDown", - "StarTriangleDownOpen", - "StarTriangleDownDot", - "StarTriangleDownOpenDot", - "StarSquare", - "StarSquareOpen", - "StarSquareDot", - "StarSquareOpenDot", - "StarDiamond", - "StarDiamondOpen", - "StarDiamondDot", - "StarDiamondOpenDot", - "DiamondTall", - "DiamondTallOpen", - "DiamondTallDot", - "DiamondTallOpenDot", - "DiamondWide", - "DiamondWideOpen", - "DiamondWideDot", - "DiamondWideOpenDot", - "Hourglass", - "HourglassOpen", - "BowTie", - "BowTieOpen", - "CircleCross", - "CircleCrossOpen", - "CircleX", - "CircleXOpen", - "SquareCross", - "SquareCrossOpen", - "SquareX", - "SquareXOpen", - "DiamondCross", - "DiamondCrossOpen", - "DiamondX", - "DiamondXOpen", - "CrossThin", - "CrossThinOpen", - "XThin", - "XThinOpen", - "Asterisk", - "AsteriskOpen", - "Hash", - "HashOpen", - "HashDot", - "HashOpenDot", - "YUp", - "YUpOpen", - "YDown", - "YDownOpen", - "YLeft", - "YLeftOpen", - "YRight", - "YRightOpen", - "LineEW", - "LineEWOpen", - "LineNS", - "LineNSOpen", - "LineNE", - "LineNEOpen", - "LineNW", - "LineNWOpen", - ]; - let mut rng = rand::thread_rng(); - let mut ret: Vec = Vec::with_capacity(n); - for _ in 0..n { - let symbol = pool[rng.gen_range(0..25)]; - let marker = match symbol { - "Circle" => MarkerSymbol::Circle, - "CircleOpen" => MarkerSymbol::CircleOpen, - "CircleDot" => MarkerSymbol::CircleDot, - "CircleOpenDot" => MarkerSymbol::CircleOpenDot, - "Square" => MarkerSymbol::Square, - "SquareDot" => MarkerSymbol::SquareDot, - "SquareOpen" => MarkerSymbol::SquareOpen, - "SquareOpenDot" => MarkerSymbol::SquareOpenDot, - "Diamond" => MarkerSymbol::Diamond, - "DiamondOpen" => MarkerSymbol::DiamondOpen, - "DiamondDot" => MarkerSymbol::DiamondDot, - "DiamondOpenDot" => MarkerSymbol::DiamondOpenDot, - "Hash" => MarkerSymbol::Hash, - "HashDot" => MarkerSymbol::HashDot, - "HashOpen" => MarkerSymbol::HashOpen, - "HashOpenDot" => MarkerSymbol::HashOpenDot, - "Cross" => MarkerSymbol::Cross, - "CrossDot" => MarkerSymbol::CrossDot, - "CrossOpen" => MarkerSymbol::CrossOpen, - "CrossOpenDot" => MarkerSymbol::CrossOpenDot, - "TriangleUp" => MarkerSymbol::TriangleUp, - "TriangleUpDot" => MarkerSymbol::TriangleUpDot, - "TriangleUpOpen" => MarkerSymbol::TriangleUpOpen, - "TriangleUpOpenDot" => MarkerSymbol::TriangleUpOpenDot, - "TriangleDown" => MarkerSymbol::TriangleDown, - "X" => MarkerSymbol::X, - "XOpen" => MarkerSymbol::XOpen, - "XDot" => MarkerSymbol::XDot, - "XOpenDot" => MarkerSymbol::XOpenDot, - "YUp" => MarkerSymbol::YUp, - "YUpOpen" => MarkerSymbol::YUpOpen, - "YDown" => MarkerSymbol::YDown, - "YDownOpen" => MarkerSymbol::YDownOpen, - _ => MarkerSymbol::Cross, - }; - ret.push(marker); - } - ret -} - -/* - * builds a standard 2D plot single Y scale, - * ready to plot data against time (`Epoch`) - */ -pub fn build_timedomain_plot(title: &str, y_title: &str) -> Plot { - build_plot( - title, - "MJD", - y_title, - (true, true), // y=0 lines - true, // show legend - true, // autosize - true, // show tick labels - 0.25, // ticks dx - "{:05}", // ticks fmt - ) -} - -/* - * builds a standard 3D plot - */ -pub fn build_default_3d_plot(title: &str, x_title: &str, y_title: &str, z_title: &str) -> Plot { - build_3d_plot( - title, - x_title, - y_title, - z_title, - (true, true, true), // x=0,y=0,z=0 bold lines - true, // show legend - true, // autosize - ) -} - -/* - * build a standard 2D plot dual Y axes, - * to plot against `Epochs` - */ -pub fn build_timedomain_2y_plot(title: &str, y1_title: &str, y2_title: &str) -> Plot { - build_plot_2y( - title, - "MJD", - y1_title, - y2_title, - (false, false), // y=0 lines - true, // show legend - true, // autosize - true, // show x tick label - 0.25, // dx tick - "{:05}", // x tick fmt - ) -} - -/* - * Builds a default Polar2D plot - */ -pub fn build_default_polar_plot(title: &str) -> Plot { - let layout = Layout::new() - .title(title) - .x_axis( - Axis::new().title("Latitude [°]").zero_line(true), //.show_tick_labels(show_tick_labels) - //.dtick(dx_tick) - //.tick_format(tick_fmt) - ) - .y_axis(Axis::new().title("Longitude [°]").zero_line(true)) - .show_legend(true) - .auto_size(true); - let mut p = Plot::new(); - p.set_layout(layout); - p -} - -/* - * Builds a world map, - * centered on given locations, in decimal degrees, - * zoom factor - */ -pub fn build_world_map( - title: &str, - show_legend: bool, - map_style: MapboxStyle, - center: (f64, f64), - zoom: u8, -) -> Plot { - let mut p = Plot::new(); - let layout = Layout::new() - .title(title) - .drag_mode(DragMode::Zoom) - .margin(Margin::new().top(0).left(0).bottom(0).right(0)) - .show_legend(show_legend) - .mapbox( - Mapbox::new() - .style(map_style) - .center(Center::new(center.0, center.1)) - .zoom(zoom), - ); - p.set_layout(layout); - p -} - -/* - * Builds a Plot - */ -fn build_plot( - title: &str, - x_axis_title: &str, - y_axis_title: &str, - zero_line: (bool, bool), // plots a bold line @ (x=0,y=0) - show_legend: bool, - auto_size: bool, - show_xtick_labels: bool, - dx_tick: f64, - x_tick_fmt: &str, -) -> Plot { - let layout = Layout::new() - .title(title) - .x_axis( - Axis::new() - .title(x_axis_title) - .zero_line(zero_line.0) - .show_tick_labels(show_xtick_labels) - .dtick(dx_tick) - .tick_format(x_tick_fmt), - ) - .y_axis(Axis::new().title(y_axis_title).zero_line(zero_line.0)) - .show_legend(show_legend) - .auto_size(auto_size); - let mut p = Plot::new(); - p.set_layout(layout); - p -} - -fn build_plot_2y( - title: &str, - x_title: &str, - y1_title: &str, - y2_title: &str, - zero_line: (bool, bool), // plots a bold line @ (x=0,y=0) - show_legend: bool, - auto_size: bool, - show_xtick_labels: bool, - dx_tick: f64, - xtick_fmt: &str, -) -> Plot { - let layout = Layout::new() - .title(title) - .x_axis( - Axis::new() - .title(x_title) - .zero_line(zero_line.0) - .show_tick_labels(show_xtick_labels) - .dtick(dx_tick) - .tick_format(xtick_fmt), - ) - .y_axis(Axis::new().title(y1_title).zero_line(zero_line.1)) - .y_axis2( - Axis::new() - .title(y2_title) - .overlaying("y") - .side(AxisSide::Right) - .zero_line(zero_line.1), - ) - .show_legend(show_legend) - .auto_size(auto_size); - let mut p = Plot::new(); - p.set_layout(layout); - p -} - -fn build_3d_plot( - title: &str, - x_title: &str, - y_title: &str, - z_title: &str, - zero_line: (bool, bool, bool), // plots a bold line @ (x=0,y=0,z=0) - show_legend: bool, - auto_size: bool, -) -> Plot { - let layout = Layout::new() - .title(title) - .x_axis( - Axis::new() - .title(x_title) - .zero_line(zero_line.0) - .show_tick_labels(false), - ) - .y_axis(Axis::new().title(y_title).zero_line(zero_line.1)) - .z_axis(Axis::new().title(z_title).zero_line(zero_line.2)) - .show_legend(show_legend) - .auto_size(auto_size); - let mut p = Plot::new(); - p.set_layout(layout); - p -} - -/* - * Builds a default chart, 2D, X = time axis - */ -pub fn build_chart_epoch_axis( - name: &str, - mode: Mode, - epochs: Vec, - data_y: Vec, -) -> Box> { - let txt: Vec = epochs.iter().map(|e| e.to_string()).collect(); - Scatter::new(epochs.iter().map(|e| e.to_mjd_utc_days()).collect(), data_y) - .mode(mode) - //.web_gl_mode(true) - .name(name) - .hover_text_array(txt) - .hover_info(HoverInfo::All) -} - -/* - * Builds a default 3D chart - */ -pub fn build_3d_chart_epoch_label( - name: &str, - mode: Mode, - epochs: Vec, - x: Vec, - y: Vec, - z: Vec, -) -> Box> { - let txt: Vec = epochs.iter().map(|e| e.to_string()).collect(); - Scatter3D::new(x, y, z) - .mode(mode) - //.web_gl_mode(true) - .name(name) - .hover_text_array(txt) - .hover_info(HoverInfo::All) -} - -/* Returns True if GNSS combination is to be plotted */ -fn gnss_combination_plot(matches: &ArgMatches) -> bool { - matches.get_flag("if") - || matches.get_flag("gf") - || matches.get_flag("wl") - || matches.get_flag("nl") - || matches.get_flag("mw") -} - -/* Returns True if Navigation plot is to be generated */ -fn navigation_plot(matches: &ArgMatches) -> bool { - matches.get_flag("skyplot") - || matches.get_flag("orbit") - || matches.get_flag("orbit-residual") - || matches.get_flag("sv-clock") -} - -/* Returns True if Atmosphere conditions is to be generated */ -fn atmosphere_plot(matches: &ArgMatches) -> bool { - matches.get_flag("tropo") || matches.get_flag("tec") || matches.get_flag("ionod") -} - -pub fn graph_opmode(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { - /* - * Prepare session: - * + HTML: (default) in this session directly - * + CSV: (option): generate a subdir - */ - let csv_export = matches.get_flag("csv"); - if csv_export { - ctx.create_subdir("CSV"); - } - /* - * Observations graphs - */ - if matches.get_flag("obs") { - let mut plot_ctx = PlotContext::new(); - if ctx.data.has_observation() { - record::plot_observations(ctx, &mut plot_ctx, csv_export); - } - if ctx.data.has_meteo() { - record::plot_meteo_observations(ctx, &mut plot_ctx, csv_export); - } - if ctx.data.has_doris() { - record::plot_doris_observations(ctx, &mut plot_ctx, csv_export); - } - - /* save observations */ - ctx.render_html("OBSERVATIONS.html", plot_ctx.to_html()); - } - /* - * GNSS combinations graphs - */ - if gnss_combination_plot(matches) { - let data = ctx - .data - .observation() - .ok_or(Error::MissingObservationRinex)?; - - let mut plot_ctx = PlotContext::new(); - if matches.get_flag("if") { - let combination = data.combine(Combination::IonosphereFree); - plot_gnss_combination( - &combination, - &mut plot_ctx, - "Ionosphere Free combination", - "Meters of delay", - ); - } - if matches.get_flag("gf") { - let combination = data.combine(Combination::GeometryFree); - plot_gnss_combination( - &combination, - &mut plot_ctx, - "Geometry Free combination", - "Meters of delay", - ); - } - if matches.get_flag("wl") { - let combination = data.combine(Combination::WideLane); - plot_gnss_combination( - &combination, - &mut plot_ctx, - "Wide Lane combination", - "Meters of delay", - ); - } - if matches.get_flag("nl") { - let combination = data.combine(Combination::NarrowLane); - plot_gnss_combination( - &combination, - &mut plot_ctx, - "Narrow Lane combination", - "Meters of delay", - ); - } - if matches.get_flag("mw") { - let combination = data.combine(Combination::MelbourneWubbena); - plot_gnss_combination( - &combination, - &mut plot_ctx, - "Melbourne Wubbena combination", - "Meters of delay", - ); - } - - /* save combinations */ - ctx.render_html("COMBINATIONS.html", plot_ctx.to_html()); - } - /* - * DCB visualization - */ - if matches.get_flag("dcb") { - let data = ctx - .data - .observation() - .ok_or(Error::MissingObservationRinex)?; - - let mut plot_ctx = PlotContext::new(); - let data = data.dcb(); - plot_gnss_dcb( - &data, - &mut plot_ctx, - "Differential Code Bias", - "Differential Code Bias [s]", - ); - - /* save DCB */ - ctx.render_html("DCB.html", plot_ctx.to_html()); - } - if matches.get_flag("mp") { - let data = ctx - .data - .observation() - .ok_or(Error::MissingObservationRinex)?; - - let mut plot_ctx = PlotContext::new(); - let data = data.code_multipath(); - plot_gnss_code_mp(&data, &mut plot_ctx, "Code Multipath", "Meters of delay"); - - /* save MP */ - ctx.render_html("MULTIPATH.html", plot_ctx.to_html()); - } - if navigation_plot(matches) { - let mut plot_ctx = PlotContext::new(); - - if matches.get_flag("skyplot") { - let rx_ecef = ctx - .rx_ecef - .expect("skyplot requires the receiver location to be defined."); - if ctx.data.sp3().is_none() && ctx.data.brdc_navigation().is_none() { - panic!("skyplot requires either BRDC or SP3."); - } - skyplot(&ctx.data, rx_ecef, &mut plot_ctx); - } - if matches.get_flag("orbit") { - plot_sv_nav_orbits(&ctx.data, &mut plot_ctx); - } - if matches.get_flag("orbit-residual") { - if ctx.data.sp3().is_none() || ctx.data.brdc_navigation().is_none() { - panic!("requires both BRDC and SP3."); - } - plot_residual_ephemeris(&ctx.data, &mut plot_ctx); - } - /* save NAV */ - ctx.render_html("NAVIGATION.html", plot_ctx.to_html()); - } - if matches.get_flag("sv-clock") { - let mut plot_ctx = PlotContext::new(); - plot_sv_nav_clock(&ctx.data, &mut plot_ctx); - - /* save CLK */ - ctx.render_html("CLOCKS.html", plot_ctx.to_html()); - } - if atmosphere_plot(matches) { - let mut plot_ctx = PlotContext::new(); - plot_atmosphere_conditions(ctx, &mut plot_ctx, matches); - - /* save ATMOSPHERE */ - ctx.render_html("ATMOSPHERE.html", plot_ctx.to_html()); - } - Ok(()) -} diff --git a/rinex-cli/src/graph/naviplot.rs b/rinex-cli/src/graph/naviplot.rs deleted file mode 100644 index 120b7754b..000000000 --- a/rinex-cli/src/graph/naviplot.rs +++ /dev/null @@ -1,22 +0,0 @@ -use crate::graph::PlotContext; -// use plotly::{ -// common::{Mode, Visible}, -// ScatterPolar, -// }; -// use rinex::prelude::Epoch; -use rinex::prelude::RnxContext; - -/* - * NAVI plot is an advanced 3D view - * which is basically the skyplot view with observation signals - * and ionospheric conditions taken into account - */ -pub fn naviplot(_ctx: &RnxContext, plot_context: &mut PlotContext) { - plot_context.add_cartesian3d_plot("NAVI Plot", "x", "y", "z"); - - // grab NAV context - // let nav_rnx = match &ctx.navigation_data() { - // Some(nav) => nav, - // _ => ctx.primary_data(), - // }; -} diff --git a/rinex-cli/src/graph/record/ionex.rs b/rinex-cli/src/graph/record/ionex.rs deleted file mode 100644 index 0ecae3a57..000000000 --- a/rinex-cli/src/graph/record/ionex.rs +++ /dev/null @@ -1,87 +0,0 @@ -use crate::graph::PlotContext; -use plotly::{ - color::NamedColor, - common::{Marker, MarkerSymbol}, - layout::MapboxStyle, - {DensityMapbox, ScatterMapbox}, -}; -use rinex::prelude::Rinex; - -pub fn plot_tec_map(data: &Rinex, _borders: ((f64, f64), (f64, f64)), plot_ctx: &mut PlotContext) { - let _cmap = colorous::TURBO; - // let hover_text: Vec = data.epoch().map(|e| e.to_string()).collect(); - /* - * TEC map visualization - * plotly-rs has no means to animate plots at the moment - * therefore.. we create one plot for each individual Epoch - */ - for epoch in data.epoch() { - let lat: Vec<_> = data - .tec() - .filter_map( - |(t, lat, _, _, _)| { - if t == epoch { - Some(lat) - } else { - None - } - }, - ) - .collect(); - let lon: Vec<_> = data - .tec() - .filter_map( - |(t, _, lon, _, _)| { - if t == epoch { - Some(lon) - } else { - None - } - }, - ) - .collect(); - let tec: Vec<_> = data - .tec() - .filter_map( - |(t, _, _, _, tec)| { - if t == epoch { - Some(tec) - } else { - None - } - }, - ) - .collect(); - - plot_ctx.add_world_map( - &epoch.to_string(), - true, - MapboxStyle::StamenTerrain, - (32.5, -40.0), - 1, - ); - - /* plot the map grid */ - let grid = ScatterMapbox::new(lat.clone(), lon.clone()) - .marker( - Marker::new() - .size(3) - .symbol(MarkerSymbol::Circle) - .color(NamedColor::Black) - .opacity(0.5), - ) - .name("grid"); - plot_ctx.add_trace(grid); - - //let map = AnimatedDensityMapbox::new(lat.clone(), lon.clone(), z) - let map = DensityMapbox::new(lat.clone(), lon.clone(), tec.clone()) - //.title("TEST") - .name(epoch.to_string()) - .opacity(0.66) - //.hover_text_array(hover_text.clone()) - .zauto(true) - //.animation_frame("test") - .zoom(3); - plot_ctx.add_trace(map); - } -} diff --git a/rinex-cli/src/graph/record/ionosphere.rs b/rinex-cli/src/graph/record/ionosphere.rs index 9f8b69920..0fd500557 100644 --- a/rinex-cli/src/graph/record/ionosphere.rs +++ b/rinex-cli/src/graph/record/ionosphere.rs @@ -10,10 +10,10 @@ use plotly::common::{ use rinex::carrier::Carrier; use rinex::navigation::Ephemeris; // use rinex::navigation::KbModel; -use rinex::prelude::RnxContext; +use rinex_qc::prelude::QcContext; -pub fn plot_ionospheric_delay(ctx: &RnxContext, plot_ctx: &mut PlotContext) { - let ref_pos = ctx.ground_position().unwrap_or_default(); +pub fn plot_ionospheric_delay(ctx: &QcContext, plot_ctx: &mut PlotContext) { + let ref_pos = ctx.reference_position().unwrap_or_default(); let ref_geo = ref_pos.to_geodetic(); let lat_lon_ddeg = (ref_geo.0, ref_geo.1); diff --git a/rinex-cli/src/graph/record/meteo.rs b/rinex-cli/src/graph/record/meteo.rs deleted file mode 100644 index 42963b0cb..000000000 --- a/rinex-cli/src/graph/record/meteo.rs +++ /dev/null @@ -1,124 +0,0 @@ -use crate::cli::Context; -use crate::graph::{build_chart_epoch_axis, csv_export_timedomain, PlotContext}; //generate_markers}; -use plotly::common::{Marker, MarkerSymbol, Mode}; -use plotly::ScatterPolar; -use rinex::prelude::Observable; -use statrs::statistics::Statistics; - -/* - * Plots Meteo observations - */ -pub fn plot_meteo_observations(ctx: &Context, plot_context: &mut PlotContext, csv_export: bool) { - let rnx = ctx.data.meteo().unwrap(); // infaillible - /* - * 1 plot per physics - */ - for observable in rnx.observable() { - let unit = match observable { - Observable::Pressure => "hPa", - Observable::Temperature => "°C", - Observable::HumidityRate | Observable::RainIncrement => "%", - Observable::ZenithWetDelay - | Observable::ZenithDryDelay - | Observable::ZenithTotalDelay => "s", - Observable::WindDirection => "°", - Observable::WindSpeed => "m/s", - Observable::HailIndicator => "", - _ => unreachable!(), - }; - if *observable == Observable::WindDirection { - // we plot this one differently: on a compass similar to skyplot - continue; - } - plot_context.add_timedomain_plot( - &format!("{} Observations", observable), - &format!("{} [{}]", observable, unit), - ); - let data_x: Vec<_> = rnx - .meteo() - .flat_map(|(e, observations)| { - observations.iter().filter_map( - |(obs, _value)| { - if obs == observable { - Some(*e) - } else { - None - } - }, - ) - }) - .collect(); - let data_y: Vec<_> = rnx - .meteo() - .flat_map(|(_e, observations)| { - observations.iter().filter_map(|(obs, value)| { - if obs == observable { - Some(*value) - } else { - None - } - }) - }) - .collect(); - let trace = build_chart_epoch_axis( - &observable.to_string(), - Mode::LinesMarkers, - data_x.clone(), - data_y.clone(), - ) - .marker(Marker::new().symbol(MarkerSymbol::TriangleUp)); - plot_context.add_trace(trace); - if csv_export { - let fullpath = ctx - .workspace - .join("CSV") - .join(&format!("{}.csv", observable)); - - let title = format!("{} observations", observable); - csv_export_timedomain( - &fullpath, - &title, - &format!("Epoch, {} [{}]", observable, unit), - &data_x, - &data_y, - ) - .expect("failed to render data as CSV"); - } - } - /* - * Plot Wind Direction - */ - let wind_speed = rnx.wind_speed().map(|(_, speed)| speed).collect::>(); - let wind_speed_max = wind_speed.max(); - - let theta = rnx - .wind_direction() - .map(|(_, angle)| angle) - .collect::>(); - - let has_wind_direction = !theta.is_empty(); - - let rho = rnx - .wind_direction() - .map(|(t, _)| { - if let Some(speed) = rnx - .wind_speed() - .find(|(ts, _)| *ts == t) - .map(|(_, speed)| speed) - { - speed / wind_speed_max - } else { - 1.0_f64 - } - }) - .collect::>(); - - let trace = ScatterPolar::new(theta, rho) - .marker(Marker::new().symbol(MarkerSymbol::TriangleUp)) - .connect_gaps(false) - .name("Wind direction [°]"); - if has_wind_direction { - plot_context.add_polar2d_plot("Wind direction (r= normalized speed)"); - plot_context.add_trace(trace); - } -} diff --git a/rinex-cli/src/graph/record/mod.rs b/rinex-cli/src/graph/record/mod.rs deleted file mode 100644 index e117f6063..000000000 --- a/rinex-cli/src/graph/record/mod.rs +++ /dev/null @@ -1,34 +0,0 @@ -mod doris; -mod ionex; -mod ionosphere; -mod meteo; -mod navigation; -mod observation; -mod sp3_plot; - -pub use doris::plot_doris_observations; -pub use meteo::plot_meteo_observations; -pub use navigation::plot_sv_nav_clock; -pub use navigation::plot_sv_nav_orbits; -pub use observation::plot_observations; -pub use sp3_plot::plot_residual_ephemeris; - -use crate::cli::Context; -use crate::graph::PlotContext; -use clap::ArgMatches; - -use ionex::plot_tec_map; -use ionosphere::plot_ionospheric_delay; - -pub fn plot_atmosphere_conditions(ctx: &Context, plot_ctx: &mut PlotContext, matches: &ArgMatches) { - if matches.get_flag("tropo") { - let _meteo = ctx.data.meteo().expect("--tropo requires METEO RINEX"); - } - if matches.get_flag("ionod") { - plot_ionospheric_delay(&ctx.data, plot_ctx); - } - if matches.get_flag("tec") { - let ionex = ctx.data.ionex().expect("--tec required IONEX"); - plot_tec_map(ionex, ((0.0_f64, 0.0_f64), (0.0_f64, 0.0_f64)), plot_ctx); - } -} diff --git a/rinex-cli/src/graph/record/navigation.rs b/rinex-cli/src/graph/record/navigation.rs index 2f93d6586..18d0e1ba9 100644 --- a/rinex-cli/src/graph/record/navigation.rs +++ b/rinex-cli/src/graph/record/navigation.rs @@ -2,6 +2,7 @@ use crate::graph::{build_3d_chart_epoch_label, build_chart_epoch_axis, PlotConte use plotly::common::{Mode, Visible}; use rinex::navigation::Ephemeris; use rinex::prelude::*; +use rinex_qc::prelude::QcContext; use itertools::Itertools; use std::collections::{BTreeMap, HashMap}; @@ -20,7 +21,7 @@ type CtxClockStates = HashMap>>; -pub fn plot_sv_nav_clock(ctx: &RnxContext, plot_ctx: &mut PlotContext) { +pub fn plot_sv_nav_clock(ctx: &QcContext, plot_ctx: &mut PlotContext) { if let Some(nav) = ctx.brdc_navigation() { let nav_sv = nav.sv().collect::>(); let clk = ctx.clock(); @@ -366,7 +367,7 @@ fn plot_system_time( } } -pub fn plot_sv_nav_orbits(ctx: &RnxContext, plot_ctx: &mut PlotContext) { +pub fn plot_sv_nav_orbits(ctx: &QcContext, plot_ctx: &mut PlotContext) { let mut pos_plot_created = false; let mut nav_sv = Vec::::with_capacity(32); /* diff --git a/rinex-cli/src/graph/record/observation.rs b/rinex-cli/src/graph/record/observation.rs deleted file mode 100644 index 44579742a..000000000 --- a/rinex-cli/src/graph/record/observation.rs +++ /dev/null @@ -1,392 +0,0 @@ -use crate::cli::Context; -use plotly::{ - color::NamedColor, - common::{Marker, MarkerSymbol, Mode, Visible}, -}; -use std::collections::HashMap; - -use rinex::{navigation::Ephemeris, prelude::*}; - -use crate::graph::{build_chart_epoch_axis, csv_export_timedomain, generate_markers, PlotContext}; - -#[derive(Debug, PartialEq, Eq, Hash)] -enum Physics { - SSI, - Doppler, - Phase, - PseudoRange, -} - -impl Physics { - fn from_observable(observable: &Observable) -> Self { - if observable.is_phase_observable() { - Self::Phase - } else if observable.is_doppler_observable() { - Self::Doppler - } else if observable.is_ssi_observable() { - Self::SSI - } else { - Self::PseudoRange - } - } - fn plot_title(&self) -> String { - match self { - Self::SSI => "SSI".to_string(), - Self::Phase => "Phase".to_string(), - Self::Doppler => "Doppler".to_string(), - Self::PseudoRange => "Pseudo Range".to_string(), - } - } - fn y_axis(&self) -> String { - match self { - Self::SSI => "Power [dB]".to_string(), - Self::Phase => "Carrier Cycles".to_string(), - Self::Doppler => "Doppler Shifts".to_string(), - Self::PseudoRange => "Pseudo Range".to_string(), - } - } -} - -/* - * Plots given Observation RINEX content - */ -pub fn plot_observations(ctx: &Context, plot_ctx: &mut PlotContext, csv_export: bool) { - let obs_data = ctx.data.observation().unwrap(); // infaillible - let header = &obs_data.header; - let record = obs_data.record.as_obs().unwrap(); // infaillible - - ///////////////////////////////////////////////////// - // Gather all data, complex type but single iteration.. - // RX OK or ERROR - // per physics, - // per observable (symbolized) - // per vehicle (color map) - // x: sampling timestamp, - // y: observation (raw), - ///////////////////////////////////////////////////// - let mut clk_offset_good: Vec<(Epoch, f64)> = Vec::with_capacity(64); - let mut clk_offset_bad: Vec<(Epoch, f64)> = Vec::with_capacity(64); - let mut dataset_good: HashMap>>> = - HashMap::with_capacity(1024); - let mut dataset_bad: HashMap>>> = - HashMap::with_capacity(1024); - - for ((epoch, flag), (clock_offset, vehicles)) in record { - if flag.is_ok() { - if let Some(value) = clock_offset { - clk_offset_good.push((*epoch, *value)); - } - for (sv, observations) in vehicles { - for (observable, data) in observations { - let observable_code = observable.to_string(); - let physics = Physics::from_observable(observable); - let y = data.obs; - - if let Some(data) = dataset_good.get_mut(&physics) { - if let Some(data) = data.get_mut(&observable_code) { - if let Some(data) = data.get_mut(sv) { - data.push((*epoch, y)); - } else { - data.insert(*sv, vec![(*epoch, y)]); - } - } else { - let mut map: HashMap> = HashMap::new(); - map.insert(*sv, vec![(*epoch, y)]); - data.insert(observable_code, map); - } - } else { - let mut map: HashMap> = HashMap::new(); - map.insert(*sv, vec![(*epoch, y)]); - let mut mmap: HashMap>> = - HashMap::new(); - mmap.insert(observable_code, map); - dataset_good.insert(physics, mmap); - } - } - } - } else { - if let Some(value) = clock_offset { - clk_offset_bad.push((*epoch, *value)); - } - for (sv, observations) in vehicles { - for (observable, data) in observations { - let observable_code = observable.to_string(); - let physics = Physics::from_observable(observable); - let y = data.obs; - - if let Some(data) = dataset_bad.get_mut(&physics) { - if let Some(data) = data.get_mut(&observable_code) { - if let Some(data) = data.get_mut(sv) { - data.push((*epoch, y)); - } else { - data.insert(*sv, vec![(*epoch, y)]); - } - } else { - let mut map: HashMap> = HashMap::new(); - map.insert(*sv, vec![(*epoch, y)]); - data.insert(observable_code, map); - } - } else { - let mut map: HashMap> = HashMap::new(); - map.insert(*sv, vec![(*epoch, y)]); - let mut mmap: HashMap>> = - HashMap::new(); - mmap.insert(observable_code, map); - dataset_bad.insert(physics, mmap); - } - } - } - } - } - - ///////////////////////////// - // Plot Clock offset (if any) - ///////////////////////////// - if !clk_offset_good.is_empty() || !clk_offset_bad.is_empty() { - plot_ctx.add_timedomain_plot("Receiver Clock Offset", "Clock Offset [s]"); - let good_x: Vec = clk_offset_good.iter().map(|(x, _)| *x).collect(); - let good_y: Vec = clk_offset_good.iter().map(|(_, y)| *y).collect(); - - if csv_export { - let fullpath = ctx.workspace.join("CSV").join("clock-offset.csv"); - - let title = match header.rcvr.as_ref() { - Some(rcvr) => { - format!("{} (#{}) Clock Offset", rcvr.model, rcvr.sn) - }, - _ => "Receiver Clock Offset".to_string(), - }; - csv_export_timedomain( - &fullpath, - &title, - "Epoch, Clock Offset [s]", - &good_x, - &good_y, - ) - .expect("failed to render data as CSV"); - } - - let trace = build_chart_epoch_axis("Clk Offset", Mode::LinesMarkers, good_x, good_y) - .marker(Marker::new().symbol(MarkerSymbol::TriangleUp)); - plot_ctx.add_trace(trace); - trace!("receiver clock offsets"); - } - - //////////////////////////////// - // Generate 1 plot per physics - //////////////////////////////// - for physics in [Physics::PseudoRange, Physics::Phase, Physics::Doppler] { - if let Some(observables) = dataset_good.get(&physics) { - let title = physics.plot_title(); - let y_label = physics.y_axis(); - plot_ctx.add_timedomain_plot(&title, &y_label); - - let markers = generate_markers(observables.len()); - for (index, (observable, vehicles)) in observables.iter().enumerate() { - for (sv_index, (sv, data)) in vehicles.iter().enumerate() { - let good_x: Vec<_> = data.iter().map(|(x, _y)| *x).collect::<_>(); - let good_y: Vec<_> = data.iter().map(|(_x, y)| *y).collect::<_>(); - - if csv_export { - let fullpath = ctx - .workspace - .join("CSV") - .join(&format!("{}-{}.csv", sv, observable)); - csv_export_timedomain( - &fullpath, - &format!("{} observations", observable), - "Epoch, Observation", - &good_x, - &good_y, - ) - .expect("failed to render data as CSV"); - } - - let trace = build_chart_epoch_axis( - &format!("{:X}({})", sv, observable), - Mode::Markers, - good_x, - good_y, - ) - .marker(Marker::new().symbol(markers[index].clone())) - .visible({ - if index == 0 && sv_index == 0 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_ctx.add_trace(trace); - } - } - if let Some(bad_observables) = dataset_bad.get(&physics) { - for (index, (bad_observable, bad_sv)) in bad_observables.iter().enumerate() { - for (sv_index, (sv, data)) in bad_sv.iter().enumerate() { - let bad_x: Vec<_> = data.iter().map(|(x, _y)| *x).collect::<_>(); - let bad_y: Vec<_> = data.iter().map(|(_x, y)| *y).collect::<_>(); - let trace = build_chart_epoch_axis( - &format!("{:X}({})", sv, bad_observable), - Mode::Markers, - bad_x, - bad_y, - ) - .marker( - Marker::new() - .symbol(markers[index].clone()) - .color(NamedColor::Black), - ) - .visible({ - if index == 0 && sv_index == 0 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_ctx.add_trace(trace); - } - } - } - trace!("{} plot", title); - } - } - - //////////////////////////////////////////// - // Generate 1 plot for SSI - // that we possibly augment with NAV context - //////////////////////////////////////////// - if let Some(good_observables) = dataset_good.get(&Physics::SSI) { - let title = Physics::SSI.plot_title(); - let y_label = Physics::SSI.y_axis(); - - let augmented = ctx.data.has_brdc_navigation() || ctx.data.has_sp3(); - - if augmented { - plot_ctx.add_timedomain_2y_plot(&title, &y_label, "Elevation [Degrees]"); - } else { - plot_ctx.add_timedomain_plot(&title, &y_label); - } - - // Plot Observations - let markers = generate_markers(good_observables.len()); - for (index, (observable, vehicles)) in good_observables.iter().enumerate() { - for (sv_index, (sv, data)) in vehicles.iter().enumerate() { - let good_x: Vec<_> = data.iter().map(|(x, _y)| *x).collect::<_>(); - let good_y: Vec<_> = data.iter().map(|(_x, y)| *y).collect::<_>(); - - if csv_export { - let fullpath = ctx - .workspace - .join("CSV") - .join(&format!("{}-{}.csv", sv, observable)); - csv_export_timedomain( - &fullpath, - &format!("{} observations", observable), - "Epoch, Observation", - &good_x, - &good_y, - ) - .expect("failed to render data as CSV"); - } - - // Augment (if possible) - if augmented && index == 0 { - // determine SV state - let rx_ecef = ctx.rx_ecef.unwrap(); - - if let Some(nav) = ctx.data.brdc_navigation() { - let data = good_x - .iter() - .filter_map(|t| { - nav.sv_position_interpolate(*sv, *t, 5) - .map(|(x_km, y_km, z_km)| { - ( - *t, - Ephemeris::elevation_azimuth( - (x_km * 1.0E3, y_km * 1.0E3, z_km * 1.0E3), - rx_ecef, - ) - .0, - ) - }) - }) - .collect::>(); - // plot - let data_x = data.iter().map(|(x, _)| *x).collect::>(); - let data_y = data.iter().map(|(_, y)| *y).collect::>(); - let trace = build_chart_epoch_axis( - &format!("BRDC_Elev({:X})", sv), - Mode::Markers, - data_x, - data_y, - ) - .y_axis("y2") - .marker(Marker::new().symbol(markers[index].clone())) - .visible({ - if sv_index == 0 && index == 0 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_ctx.add_trace(trace); - } - if let Some(sp3) = ctx.data.sp3() { - let data = good_x - .iter() - .filter_map(|t| { - sp3.sv_position_interpolate(*sv, *t, 5) - .map(|(x_km, y_km, z_km)| { - ( - *t, - Ephemeris::elevation_azimuth( - (x_km * 1.0E3, y_km * 1.0E3, z_km * 1.0E3), - rx_ecef, - ) - .0, - ) - }) - }) - .collect::>(); - // plot - let data_x = data.iter().map(|(x, _)| *x).collect::>(); - let data_y = data.iter().map(|(_, y)| *y).collect::>(); - let trace = build_chart_epoch_axis( - &format!("SP3_Elev({:X})", sv), - Mode::Markers, - data_x, - data_y, - ) - .y_axis("y2") - .marker(Marker::new().symbol(markers[index].clone())) - .visible({ - if sv_index == 0 && index == 0 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_ctx.add_trace(trace); - } - } - - let trace = build_chart_epoch_axis( - &format!("{:X}({})", sv, observable), - Mode::Markers, - good_x, - good_y, - ) - .marker(Marker::new().symbol(markers[index].clone())) - .y_axis("y1") - .visible({ - if index == 0 && sv_index == 0 { - Visible::True - } else { - Visible::LegendOnly - } - }); - plot_ctx.add_trace(trace); - } - } - trace!("{} observations", y_label); - } -} diff --git a/rinex-cli/src/graph/record/sp3_plot.rs b/rinex-cli/src/graph/record/sp3_plot.rs index 27d665f30..f327e5745 100644 --- a/rinex-cli/src/graph/record/sp3_plot.rs +++ b/rinex-cli/src/graph/record/sp3_plot.rs @@ -1,8 +1,7 @@ use crate::graph::{build_chart_epoch_axis, PlotContext}; use plotly::common::{Mode, Visible}; //Marker, MarkerSymbol -use rinex::prelude::Epoch; -use rinex::prelude::RnxContext; -use rinex::prelude::SV; +use rinex::prelude::{Epoch, SV}; +use rinex_qc::prelude::QcContext; use std::collections::HashMap; /* @@ -10,7 +9,7 @@ use std::collections::HashMap; * compares residual error between broadcast ephemeris * and SP3 high precision orbits */ -pub fn plot_residual_ephemeris(ctx: &RnxContext, plot_ctx: &mut PlotContext) { +pub fn plot_residual_ephemeris(ctx: &QcContext, plot_ctx: &mut PlotContext) { let sp3 = ctx .sp3() // cannot fail at this point .unwrap(); diff --git a/rinex-cli/src/graph/skyplot.rs b/rinex-cli/src/graph/skyplot.rs deleted file mode 100644 index 9711beba1..000000000 --- a/rinex-cli/src/graph/skyplot.rs +++ /dev/null @@ -1,56 +0,0 @@ -use crate::graph::PlotContext; -use plotly::{ - common::{Mode, Visible}, - ScatterPolar, -}; -use rinex::prelude::{Epoch, GroundPosition, RnxContext}; - -/* - * Skyplot view - */ -pub fn skyplot(ctx: &RnxContext, rx_ecef: (f64, f64, f64), plot_context: &mut PlotContext) { - plot_context.add_polar2d_plot("Skyplot"); - - if let Some(rnx) = ctx.brdc_navigation() { - for (svnn_index, svnn) in rnx.sv().enumerate() { - // per sv - // grab related elevation data - // Rho = degrees(elev) - // Theta = degrees(azim) - let data: Vec<(Epoch, f64, f64)> = rnx - .sv_elevation_azimuth(Some(GroundPosition::from_ecef_wgs84(rx_ecef))) - .filter_map(|(epoch, sv, (elev, azi))| { - if sv == svnn { - let rho = elev; - let theta = azi; - Some((epoch, rho, theta)) - } else { - None - } - }) - .collect(); - - let rho: Vec = data.iter().map(|(_e, rho, _theta)| *rho).collect(); - let theta: Vec = data.iter().map(|(_e, _rho, theta)| *theta).collect(); - - //TODO: color gradient to emphasize day course - let trace = ScatterPolar::new(theta, rho) - .mode(Mode::LinesMarkers) - .web_gl_mode(true) - .visible({ - /* - * Plot only first few dataset, - * to improve performance when opening plots - */ - if svnn_index < 4 { - Visible::True - } else { - Visible::LegendOnly - } - }) - .connect_gaps(false) - .name(format!("{:X}", svnn)); - plot_context.add_trace(trace); - } - } -} diff --git a/rinex-cli/src/identification.rs b/rinex-cli/src/identification.rs deleted file mode 100644 index d00bbcba2..000000000 --- a/rinex-cli/src/identification.rs +++ /dev/null @@ -1,283 +0,0 @@ -use clap::ArgMatches; - -use rinex::{ - observation::SNR, - prelude::{Constellation, Epoch, Observable, ProductType, Rinex, RnxContext}, - preprocessing::*, -}; - -use std::str::FromStr; - -use itertools::Itertools; -use serde::Serialize; -use std::collections::HashMap; - -use map_3d::{ecef2geodetic, Ellipsoid}; - -/* - * Dataset identification operations - */ -pub fn dataset_identification(ctx: &RnxContext, matches: &ArgMatches) { - /* - * Browse all possible types of data, and apply relevant ID operation - */ - if let Some(files) = ctx.files(ProductType::Observation) { - let files = files - .iter() - .map(|p| p.file_name().unwrap().to_string_lossy().to_string()) - .collect::>(); - println!("\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"); - println!("%%%%%%%%%%%% Observation Data %%%%%%%%%"); - println!("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"); - println!("{:?}", files); - } - if let Some(data) = ctx.observation() { - if matches.get_flag("all") || matches.get_flag("epochs") { - println!("{:#?}", EpochReport::from_data(data)); - } - if matches.get_flag("all") || matches.get_flag("gnss") { - let constel = data - .constellation() - .sorted() - .map(|c| format!("{:X}", c)) - .collect::>(); - println!("Constellations: {:?}", constel); - } - if matches.get_flag("all") || matches.get_flag("sv") { - let sv = data - .sv() - .sorted() - .map(|sv| format!("{:X}", sv)) - .collect::>(); - println!("SV: {:?}", sv); - } - if matches.get_flag("all") || matches.get_flag("observables") { - let observables = data - .observable() - .sorted() - .map(|obs| obs.to_string()) - .collect::>(); - println!("Observables: {:?}", observables); - } - if matches.get_flag("all") || matches.get_flag("snr") { - let report = SNRReport::from_data(data); - println!("SNR: {:#?}", report); - } - if matches.get_flag("all") || matches.get_flag("anomalies") { - let anomalies = data.epoch_anomalies().collect::>(); - if anomalies.is_empty() { - println!("No anomalies reported."); - } else { - println!("Anomalies: {:#?}", anomalies); - } - } - } - - if let Some(files) = ctx.files(ProductType::MeteoObservation) { - let files = files - .iter() - .map(|p| p.file_name().unwrap().to_string_lossy().to_string()) - .collect::>(); - println!("\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"); - println!("%%%%%%%%%%%% Meteo Data %%%%%%%%%"); - println!("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"); - println!("{:?}", files); - } - if let Some(data) = ctx.meteo() { - if matches.get_flag("all") || matches.get_flag("epochs") { - println!("{:#?}", EpochReport::from_data(data)); - } - if matches.get_flag("all") || matches.get_flag("observables") { - let observables = data - .observable() - .sorted() - .map(|obs| obs.to_string()) - .collect::>(); - println!("Observables: {:?}", observables); - } - if let Some(header) = &data.header.meteo { - for sensor in &header.sensors { - println!("{} sensor: ", sensor.observable); - if let Some(model) = &sensor.model { - println!("model: \"{}\"", model); - } - if let Some(sensor_type) = &sensor.sensor_type { - println!("type: \"{}\"", sensor_type); - } - if let Some(ecef) = &sensor.position { - let (lat, lon, alt) = ecef2geodetic(ecef.0, ecef.1, ecef.2, Ellipsoid::WGS84); - if !lat.is_nan() && !lon.is_nan() { - println!("coordinates: lat={}°, lon={}°", lat, lon); - } - if alt.is_nan() { - println!("altitude above sea: {}m", ecef.3); - } else { - println!("altitude above sea: {}m", alt + ecef.3); - } - } - } - } - } - - if let Some(files) = ctx.files(ProductType::BroadcastNavigation) { - let files = files - .iter() - .map(|p| p.file_name().unwrap().to_string_lossy().to_string()) - .collect::>(); - println!("\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"); - println!("%%%%%%%%%%%% Navigation Data (BRDC) %%%%%%%%%"); - println!("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"); - println!("{:?}", files); - } - if let Some(data) = ctx.brdc_navigation() { - if matches.get_flag("all") || matches.get_flag("nav-msg") { - let msg = data.nav_msg_type().collect::>(); - println!("BRDC NAV Messages: {:?}", msg); - } - println!("BRDC Ephemerides: "); - let ephemerides = data.filter(Filter::from_str("EPH").unwrap()); - if matches.get_flag("all") || matches.get_flag("epochs") { - println!("{:#?}", EpochReport::from_data(data)); - } - if matches.get_flag("all") || matches.get_flag("gnss") { - let constel = ephemerides - .constellation() - .sorted() - .map(|c| format!("{:X}", c)) - .collect::>(); - println!("Constellations: {:?}", constel); - } - if matches.get_flag("all") || matches.get_flag("sv") { - let sv = ephemerides - .sv() - .sorted() - .map(|sv| format!("{:X}", sv)) - .collect::>(); - println!("SV: {:?}", sv); - } - } - - if let Some(files) = ctx.files(ProductType::HighPrecisionOrbit) { - let files = files - .iter() - .map(|p| p.file_name().unwrap().to_string_lossy().to_string()) - .collect::>(); - println!("\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"); - println!("%%%%%%%%%%%% Precise Orbits (SP3) %%%%%%%%%"); - println!("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"); - println!("{:?}", files); - } - if let Some(data) = ctx.sp3() { - println!("SP3 orbits: "); - if matches.get_flag("all") || matches.get_flag("epochs") { - let report = EpochReport { - first: match data.first_epoch() { - Some(first) => first.to_string(), - None => "Undefined".to_string(), - }, - last: match data.last_epoch() { - Some(last) => last.to_string(), - None => "Undefined".to_string(), - }, - sampling: { - [( - format!("dt={}s", data.epoch_interval.to_seconds()), - data.nb_epochs(), - )] - .into() - }, - system: { - if let Some(system) = data.constellation.timescale() { - system.to_string() - } else { - "Undefined".to_string() - } - }, - }; - println!("{:#?}", report); - } - if matches.get_flag("all") || matches.get_flag("sv") { - let sv = data - .sv() - .sorted() - .map(|sv| format!("{:X}", sv)) - .collect::>(); - println!("SV: {:?}", sv); - } - } -} - -#[derive(Clone, Debug, Serialize)] -struct EpochReport { - pub first: String, - pub last: String, - pub system: String, - pub sampling: HashMap, -} - -impl EpochReport { - fn from_data(data: &Rinex) -> Self { - let first_epoch = data.first_epoch(); - Self { - first: { - if let Some(first) = first_epoch { - first.to_string() - } else { - "NONE".to_string() - } - }, - last: { - if let Some(last) = data.last_epoch() { - last.to_string() - } else { - "NONE".to_string() - } - }, - sampling: { - data.sampling_histogram() - .map(|(dt, pop)| (format!("dt={}s", dt.to_seconds()), pop)) - .collect() - }, - system: { - if data.is_observation_rinex() || data.is_meteo_rinex() { - if let Some(first) = first_epoch { - first.time_scale.to_string() - } else { - "Undefined".to_string() - } - } else if data.is_navigation_rinex() { - match data.header.constellation { - Some(Constellation::Mixed) => "Mixed".to_string(), - Some(c) => c.timescale().unwrap().to_string(), - None => "Undefined".to_string(), - } - } else { - "Undefined".to_string() - } - }, - } - } -} - -#[derive(Clone, Debug, Serialize)] -struct SNRReport { - pub worst: Option<(Epoch, String, Observable, SNR)>, - pub best: Option<(Epoch, String, Observable, SNR)>, -} - -impl SNRReport { - fn from_data(data: &Rinex) -> Self { - Self { - worst: { - data.snr() - .min_by(|(_, _, _, snr_a), (_, _, _, snr_b)| snr_a.cmp(snr_b)) - .map(|((t, _), sv, obs, snr)| (t, sv.to_string(), obs.clone(), snr)) - }, - best: { - data.snr() - .max_by(|(_, _, _, snr_a), (_, _, _, snr_b)| snr_a.cmp(snr_b)) - .map(|((t, _), sv, obs, snr)| (t, sv.to_string(), obs.clone(), snr)) - }, - } - } -} diff --git a/rinex-cli/src/main.rs b/rinex-cli/src/main.rs index 72dbb54ed..3e77fba74 100644 --- a/rinex-cli/src/main.rs +++ b/rinex-cli/src/main.rs @@ -5,18 +5,16 @@ //mod analysis; // basic analysis mod cli; // command line interface mod fops; -mod graph; -mod identification; // high level identification/macros mod positioning; -mod qc; // QC report generator // plotting operations // file operation helpers // graphical analysis // positioning + CGGTTS opmode mod preprocessing; use preprocessing::preprocess; -use rinex::prelude::RnxContext; +mod report; +use report::Report; -use std::fs::create_dir_all; -use std::path::{Path, PathBuf}; +use rinex_qc::prelude::{QcContext, QcExtraPage}; +use std::path::Path; use walkdir::WalkDir; extern crate gnss_rs as gnss; @@ -25,7 +23,7 @@ extern crate gnss_rtk as rtk; use rinex::prelude::Rinex; use sp3::prelude::SP3; -use cli::{Cli, Context}; +use cli::{Cli, Context, Workspace}; use map_3d::{ecef2geodetic, rad2deg, Ellipsoid}; @@ -38,18 +36,24 @@ use thiserror::Error; #[derive(Debug, Error)] pub enum Error { + #[error("i/o error")] + StdioError(#[from] std::io::Error), #[error("rinex error")] RinexError(#[from] rinex::Error), #[error("missing OBS RINEX")] MissingObservationRinex, #[error("missing (BRDC) NAV RINEX")] MissingNavigationRinex, + #[error("missing IONEX")] + MissingIONEX, + #[error("missing Meteo RINEX")] + MissingMeteoRinex, + #[error("missing Clock RINEX")] + MissingClockRinex, #[error("merge ops failure")] MergeError(#[from] rinex::merge::Error), #[error("split ops failure")] SplitError(#[from] rinex::split::Error), - #[error("failed to create QC report: permission denied!")] - QcReportCreationError, #[error("positioning solver error")] PositioningSolverError(#[from] positioning::Error), } @@ -57,8 +61,8 @@ pub enum Error { /* * Parses and preprepocess all files passed by User */ -fn user_data_parsing(cli: &Cli) -> RnxContext { - let mut ctx = RnxContext::default(); +fn user_data_parsing(cli: &Cli) -> QcContext { + let mut ctx = QcContext::default(); let max_depth = match cli.matches.get_one::("depth") { Some(value) => *value as usize, @@ -129,12 +133,11 @@ fn user_data_parsing(cli: &Cli) -> RnxContext { warn!("non supported file format \"{}\"", path.display()); } } - /* - * Preprocess whole context + * Preprocessing */ preprocess(&mut ctx, cli); - debug!("{:?}", ctx); + debug!("{:?}", ctx); // context visualization ctx } @@ -154,7 +157,7 @@ pub fn main() -> Result<(), Error> { // User Data parsing let mut data_ctx = user_data_parsing(&cli); - let ctx_position = data_ctx.ground_position(); + let ctx_position = data_ctx.reference_position(); let ctx_stem = Context::context_stem(&mut data_ctx); // Form context @@ -162,31 +165,7 @@ pub fn main() -> Result<(), Error> { name: ctx_stem.clone(), data: data_ctx, quiet: cli.matches.get_flag("quiet"), - workspace: { - /* - * Supports both an environment variable and - * a command line opts. Otherwise we use ./workspace directly - * but its creation must pass. - * This is documented in Wiki pages. - */ - let path = match std::env::var("RINEX_WORKSPACE") { - Ok(path) => Path::new(&path).join(&ctx_stem).to_path_buf(), - _ => match cli.matches.get_one::("workspace") { - Some(base_dir) => Path::new(base_dir).join(&ctx_stem).to_path_buf(), - None => Path::new("WORKSPACE").join(&ctx_stem).to_path_buf(), - }, - }; - // make sure the workspace is viable and exists, otherwise panic - create_dir_all(&path).unwrap_or_else(|e| { - panic!( - "failed to create session workspace \"{}\": {:?}", - path.display(), - e - ) - }); - info!("session workspace is \"{}\"", path.to_string_lossy()); - path - }, + workspace: Workspace::new(&ctx_stem, &cli), rx_ecef: { /* * Determine and store RX (ECEF) position @@ -238,38 +217,64 @@ pub fn main() -> Result<(), Error> { }, }; + // On File Operations (Data synthesis) + // prepare one subfolder to store the output products + if cli.has_fops_output_product() { + ctx.workspace.create_subdir("OUTPUT"); + } + /* * Exclusive opmodes */ + let mut extra_pages = Vec::::new(); + match cli.matches.subcommand() { - Some(("filegen", submatches)) => { + /* + * File operations abort here and do not windup in analysis opmode. + * Users needs to then deploy analysis mode on previously generated files. + */ + Some(("generate", submatches)) => { fops::filegen(&ctx, submatches)?; - }, - Some(("graph", submatches)) => { - graph::graph_opmode(&ctx, submatches)?; - }, - Some(("identify", submatches)) => { - identification::dataset_identification(&ctx.data, submatches); + return Ok(()); }, Some(("merge", submatches)) => { fops::merge(&ctx, submatches)?; + return Ok(()); }, Some(("split", submatches)) => { fops::split(&ctx, submatches)?; - }, - Some(("quality-check", submatches)) => { - qc::qc_report(&ctx, submatches)?; - }, - Some(("positioning", submatches)) => { - positioning::precise_positioning(&ctx, submatches)?; + return Ok(()); }, Some(("tbin", submatches)) => { fops::time_binning(&ctx, submatches)?; + return Ok(()); + }, + Some(("diff", submatches)) => { + fops::diff(&ctx, submatches)?; + return Ok(()); }, - Some(("sub", submatches)) => { - fops::substract(&ctx, submatches)?; + Some(("ppp", submatches)) => { + let chapter = positioning::precise_positioning(&ctx, submatches)?; + extra_pages.push(chapter); }, - _ => error!("no opmode specified!"), + _ => {}, + } + + // report + let cfg = cli.qc_config(); + let mut report = Report::new(&cli, &ctx, cfg); + + // customization + for extra in extra_pages { + report.customize(extra); } + + // generation + report.generate(&cli, &ctx)?; + + if !ctx.quiet { + ctx.workspace.open_with_web_browser(); + } + Ok(()) } // main diff --git a/rinex-cli/src/positioning/cggtts/mod.rs b/rinex-cli/src/positioning/cggtts/mod.rs index bcb45421c..edf6c943b 100644 --- a/rinex-cli/src/positioning/cggtts/mod.rs +++ b/rinex-cli/src/positioning/cggtts/mod.rs @@ -3,7 +3,10 @@ use clap::ArgMatches; use std::collections::HashMap; mod post_process; -pub use post_process::{post_process, Error as PostProcessingError}; +pub use post_process::post_process; + +mod report; +pub use report::Report; use gnss::prelude::{Constellation, SV}; @@ -34,6 +37,8 @@ use crate::{ cast_rtk_carrier, kb_model, ng_model, //tropo_components, + rtk_carrier_cast, + rtk_reference_carrier, Error as PositioningError, Time, }, @@ -93,6 +98,7 @@ where .dominant_sample_rate() .expect("RNX2CGGTTS requires steady GNSS observations"); + // let mut initialized = false; // solver state let mut time = Time::from_ctx(ctx); // CGGTTS specifics @@ -161,20 +167,22 @@ where continue; } + let mut ref_observable = observable.to_string(); + let mut codes = vec![PseudoRange { carrier: rtk_carrier, snr: { data.snr.map(|snr| snr.into()) }, value: data.obs, }]; - //let mut doppler = Option::::None; - let mut phases = Vec::::with_capacity(4); - // Subsidary Pseudo Range (if needed) match solver.cfg.method { Method::CPP | Method::PPP => { - // Attach secondary PR + // locate secondary signal for (second_obs, second_data) in observations { + if !second_obs.is_pseudorange_observable() { + continue; + } let rhs_carrier = Carrier::from_observable(sv.constellation, second_obs); if rhs_carrier.is_err() { @@ -183,33 +191,46 @@ where let rhs_carrier = rhs_carrier.unwrap(); let rtk_carrier = cast_rtk_carrier(rhs_carrier); - if second_obs.is_pseudorange_observable() && rhs_carrier != carrier { + if rhs_carrier != carrier { codes.push(PseudoRange { carrier: rtk_carrier, value: second_data.obs, snr: { data.snr.map(|snr| snr.into()) }, }); } + // update ref. observable if this one is to serve as reference + if rtk_reference_carrier(rtk_carrier) { + ref_observable = second_obs.to_string(); + } } }, _ => {}, // not needed }; - // Subsidary Phase Range (if needed) + // Dual Phase Range (if needed) + //let mut doppler = Option::::None; + let mut phases = Vec::::with_capacity(4); + if solver.cfg.method == Method::PPP { - for (second_obs, second_data) in observations { - if second_obs.is_phase_observable() { - let rhs_carrier = - Carrier::from_observable(sv.constellation, second_obs); - if rhs_carrier.is_err() { + for code in &codes { + let target_carrier = rtk_carrier_cast(code.carrier); + for (obs, data) in observations { + if !obs.is_phase_observable() { + continue; + } + let carrier = Carrier::from_observable(sv.constellation, obs); + if carrier.is_err() { + continue; + } + let carrier = carrier.unwrap(); + + if target_carrier != carrier { continue; } - let rhs_carrier = rhs_carrier.unwrap(); - let rtk_carrier = cast_rtk_carrier(rhs_carrier); phases.push(PhaseRange { ambiguity: None, - carrier: rtk_carrier, - value: second_data.obs, + carrier: code.carrier, + value: data.obs, snr: { data.snr.map(|snr| snr.into()) }, }); } @@ -218,20 +239,6 @@ where let candidate = Candidate::new(*sv, *t, clock_corr, sv_eph.tgd(), codes, phases); - let ref_observable = Carrier::L1; - //TODO - //match solver.cfg.method { - // Method::SPP => candidate - // .prefered_pseudorange() - // .and_then(|sig| Some(sig.carrier)), - // Method::CPP => candidate - // .code_if_combination() - // .and_then(|cmb| Some(cmb.reference)), - // Method::PPP => candidate - // .phase_if_combination() - // .and_then(|cmb| Some(cmb.reference)), - //}; - match solver.resolve(*t, &vec![candidate], &iono_bias, &tropo_bias) { Ok((t, pvt_solution)) => { let pvt_data = pvt_solution.sv.get(sv).unwrap(); // infaillible @@ -337,7 +344,7 @@ where }, 0, // TODO "rcvr_channel" > 0 if known GlonassChannel::default(), //TODO - &ref_observable.to_string(), + &ref_observable, ) }, _ => { @@ -357,7 +364,7 @@ where _ => None, }, 0, // TODO "rcvr_channel" > 0 if known - &ref_observable.to_string(), + &ref_observable, ) }, }; // match constellation @@ -400,5 +407,7 @@ where info!("{:?} - {} until next track", t, next_release.unwrap() - *t); } //.observations() + tracks.sort_by(|a, b| a.epoch.cmp(&b.epoch)); + Ok(tracks) } diff --git a/rinex-cli/src/positioning/cggtts/post_process.rs b/rinex-cli/src/positioning/cggtts/post_process.rs index de2c29f69..276323e7a 100644 --- a/rinex-cli/src/positioning/cggtts/post_process.rs +++ b/rinex-cli/src/positioning/cggtts/post_process.rs @@ -3,26 +3,16 @@ use crate::cli::Context; use cggtts::prelude::*; use cggtts::Coordinates; use clap::ArgMatches; -use std::fs::File; use std::io::Write; -use thiserror::Error; - -#[derive(Debug, Error)] -pub enum Error { - #[error("failed to write cggtts file (permission denied)")] - IoError(#[from] std::io::Error), -} - -use crate::fops::open_with_web_browser; /* * CGGTTS file generation and solutions post processing */ pub fn post_process( ctx: &Context, - mut tracks: Vec, + tracks: &Vec, matches: &ArgMatches, -) -> Result<(), Error> { +) -> std::io::Result<()> { /* * CGGTTS formation and customization */ @@ -76,21 +66,11 @@ pub fn post_process( env!("CARGO_PKG_VERSION") )); - tracks.sort_by(|a, b| a.epoch.cmp(&b.epoch)); - for track in tracks { - cggtts.tracks.push(track); + cggtts.tracks.push(track.clone()); } - let filename = ctx.workspace.join(cggtts.filename()); - let mut fd = File::create(&filename)?; + let mut fd = ctx.workspace.create_file(&cggtts.filename()); write!(fd, "{}", cggtts)?; - info!("{} has been generated", filename.to_string_lossy()); - - if !ctx.quiet { - let path = filename.to_string_lossy().to_string(); - open_with_web_browser(&path); - } - Ok(()) } diff --git a/rinex-cli/src/positioning/cggtts/report.rs b/rinex-cli/src/positioning/cggtts/report.rs new file mode 100644 index 000000000..45b3afbce --- /dev/null +++ b/rinex-cli/src/positioning/cggtts/report.rs @@ -0,0 +1,395 @@ +use crate::cli::Context; +use itertools::Itertools; + +use cggtts::prelude::{CommonViewClass, Duration, Epoch, Track, SV}; +use rinex::prelude::GroundPosition; +use rinex_qc::prelude::{html, MarkerSymbol, Markup, Mode, Plot, QcExtraPage, Render}; + +struct ReportTab {} + +impl Render for ReportTab { + fn render(&self) -> Markup { + html! { + a id="menu:cggtts" { + span class="icon" { + i class="fa-solid fa-clock" {} + } + "CGGTTS Solutions" + } + } + } +} + +struct Summary { + last_epoch: Epoch, + first_epoch: Epoch, + duration: Duration, + satellites: Vec, + trk_duration: Duration, + cv_class: CommonViewClass, + ground_pos: GroundPosition, +} + +impl Summary { + fn new(ctx: &Context, solutions: &Vec) -> Self { + let mut trk_duration = Duration::default(); + let mut cv_class = CommonViewClass::default(); + let (mut first_epoch, mut last_epoch) = (Epoch::default(), Epoch::default()); + let satellites = solutions + .iter() + .map(|trk| trk.sv) + .unique() + .collect::>(); + for (trk_index, track) in solutions.iter().enumerate() { + if trk_index == 0 { + cv_class = track.class; + first_epoch = track.epoch; + trk_duration = track.duration; + } + last_epoch = track.epoch; + } + Self { + satellites, + trk_duration, + cv_class, + first_epoch, + last_epoch, + duration: last_epoch - first_epoch, + ground_pos: ctx.data.reference_position().unwrap(), + } + } +} + +impl Render for Summary { + fn render(&self) -> Markup { + html! { + div class="table-container" { + table class="table is-bordered" { + tbody { + tr { + th class="is-info" { + "Common View" + } + td { + (self.cv_class.to_string()) + } + } + tr { + th class="is-info" { + "Track duration" + } + td { + (self.trk_duration.to_string()) + } + } + tr { + th class="is-info" { + "Position" + } + td { + (self.ground_pos.render()) + } + } + tr { + th class="is-info" { + "Satellites" + } + td { + (self.satellites.iter().join(", ")) + } + } + tr { + th class="is-info" { + "First Epoch" + } + td { + (self.first_epoch.to_string()) + } + } + tr { + th class="is-info" { + "Last Epoch" + } + td { + (self.last_epoch.to_string()) + } + } + tr { + th class="is-info" { + "Duration" + } + td { + (self.duration.to_string()) + } + } + } + } + } + } + } +} + +/// Solutions report +struct ReportContent { + summary: Summary, + sv_plot: Plot, + elev_plot: Plot, + sky_plot: Plot, + ionod_plot: Plot, + refsv_plot: Plot, + refsys_plot: Plot, + tropod_plot: Plot, +} + +impl ReportContent { + pub fn new(ctx: &Context, solutions: &Vec) -> Self { + //let epochs = solutions.iter().map(|trk| trk.epoch).collect::>(); + let summary = Summary::new(ctx, solutions); + Self { + sv_plot: { + let mut plot = Plot::timedomain_plot("sv_plot", "SV Plot", "PRN #", true); + for sv in summary.satellites.iter() { + let x = solutions + .iter() + .filter_map(|trk| if trk.sv == *sv { Some(trk.epoch) } else { None }) + .collect::>(); + let y = solutions + .iter() + .filter_map(|trk| if trk.sv == *sv { Some(sv.prn) } else { None }) + .collect::>(); + let trace = Plot::timedomain_chart( + &sv.to_string(), + Mode::Markers, + MarkerSymbol::Cross, + &x, + y, + ); + plot.add_trace(trace); + } + plot + }, + elev_plot: { + let mut plot = + Plot::timedomain_plot("elev_plot", "Elevation", "Elevation [°]", true); + for sv in summary.satellites.iter() { + let x = solutions + .iter() + .filter_map(|trk| if trk.sv == *sv { Some(trk.epoch) } else { None }) + .collect::>(); + let y = solutions + .iter() + .filter_map(|trk| { + if trk.sv == *sv { + Some(trk.elevation) + } else { + None + } + }) + .collect::>(); + let trace = Plot::timedomain_chart( + &sv.to_string(), + Mode::Markers, + MarkerSymbol::Cross, + &x, + y, + ); + plot.add_trace(trace); + } + plot + }, + ionod_plot: { + let plot = + Plot::timedomain_plot("ionod_plot", "Ionospheric Delay", "Error [m]", true); + plot + }, + tropod_plot: { + let mut plot = + Plot::timedomain_plot("tropod_plot", "Tropospheric Delay", "Error [m]", true); + for sv in summary.satellites.iter() { + let x = solutions + .iter() + .filter_map(|trk| if trk.sv == *sv { Some(trk.epoch) } else { None }) + .collect::>(); + let y = solutions + .iter() + .filter_map(|trk| { + if trk.sv == *sv { + Some(trk.data.mdtr) + } else { + None + } + }) + .collect::>(); + let trace = Plot::timedomain_chart( + &sv.to_string(), + Mode::Markers, + MarkerSymbol::Cross, + &x, + y, + ); + plot.add_trace(trace); + } + plot + }, + refsys_plot: { + let mut plot = Plot::timedomain_plot("refsys_plot", "REFSYS", "REFSYS [s]", true); + for sv in summary.satellites.iter() { + let x = solutions + .iter() + .filter_map(|trk| if trk.sv == *sv { Some(trk.epoch) } else { None }) + .collect::>(); + let y = solutions + .iter() + .filter_map(|trk| { + if trk.sv == *sv { + Some(trk.data.refsys) + } else { + None + } + }) + .collect::>(); + let trace = Plot::timedomain_chart( + &sv.to_string(), + Mode::Markers, + MarkerSymbol::Cross, + &x, + y, + ); + plot.add_trace(trace); + } + plot + }, + refsv_plot: { + let mut plot = Plot::timedomain_plot("refsv_plot", "REFSV", "SRSV [s]", true); + for sv in summary.satellites.iter() { + let x = solutions + .iter() + .filter_map(|trk| if trk.sv == *sv { Some(trk.epoch) } else { None }) + .collect::>(); + let y = solutions + .iter() + .filter_map(|trk| { + if trk.sv == *sv { + Some(trk.data.refsv) + } else { + None + } + }) + .collect::>(); + let trace = Plot::timedomain_chart( + &sv.to_string(), + Mode::Markers, + MarkerSymbol::Cross, + &x, + y, + ); + plot.add_trace(trace); + } + plot + }, + sky_plot: { + let plot = Plot::sky_plot("sky_plot", "Sky Plot", true); + plot + }, + summary, + } + } +} + +impl Render for ReportContent { + fn render(&self) -> Markup { + html! { + div class="table-container" { + table class="table is-bordered" { + tbody { + tr { + th class="is-info" { + "Summary" + } + td { + (self.summary.render()) + } + } + tr { + th class="is-info" { + "SV Plot" + } + td { + (self.sv_plot.render()) + } + } + tr { + th class="is-info" { + "Elevation" + } + td { + (self.elev_plot.render()) + } + } + tr { + th class="is-info" { + "Sky Plot" + } + td { + (self.sky_plot.render()) + } + } + tr { + th class="is-info" { + "REFSYS" + } + td { + (self.refsys_plot.render()) + } + } + tr { + th class="is-info" { + "REFSV" + } + td { + (self.refsv_plot.render()) + } + } + tr { + th class="is-info" { + "Ionosphere" + } + td { + (self.ionod_plot.render()) + } + } + tr { + th class="is-info" { + "Troposphere" + } + td { + (self.tropod_plot.render()) + } + } + } + } + } + } + } +} + +pub struct Report { + tab: ReportTab, + content: ReportContent, +} + +impl Report { + pub fn formalize(self) -> QcExtraPage { + QcExtraPage { + tab: Box::new(self.tab), + html_id: "cggtts".to_string(), + content: Box::new(self.content), + } + } + pub fn new(ctx: &Context, solutions: &Vec) -> Self { + Self { + tab: ReportTab {}, + content: ReportContent::new(ctx, solutions), + } + } +} diff --git a/rinex-cli/src/positioning/mod.rs b/rinex-cli/src/positioning/mod.rs index 6cc7c6c49..0ddae7bf5 100644 --- a/rinex-cli/src/positioning/mod.rs +++ b/rinex-cli/src/positioning/mod.rs @@ -1,24 +1,25 @@ use crate::cli::Context; +use clap::ArgMatches; use std::cell::RefCell; use std::fs::read_to_string; +// use anise::almanac::Almanac; -mod ppp; -// precise point positioning -use ppp::post_process as ppp_post_process; -use ppp::PostProcessingError as PPPPostProcessingError; +mod ppp; // precise point positioning +use ppp::Report as PPPReport; mod cggtts; // CGGTTS special solver -use cggtts::post_process as cggtts_post_process; -use cggtts::PostProcessingError as CGGTTSPostProcessingError; +use cggtts::{post_process as cggtts_post_process, Report as CggttsReport}; -use clap::ArgMatches; -use gnss::prelude::Constellation; // SV}; -use rinex::carrier::Carrier; -use rinex::prelude::Rinex; +use rinex::{ + carrier::Carrier, + prelude::{Constellation, Rinex}, +}; + +use rinex_qc::prelude::QcExtraPage; use rtk::prelude::{ BdModel, Carrier as RTKCarrier, Config, Duration, Epoch, Error as RTKError, KbModel, Method, - NgModel, PVTSolutionType, Solver, + NgModel, PVTSolutionType, Position, Solver, Vector3, }; use thiserror::Error; @@ -36,10 +37,33 @@ pub use interp::Buffer as BufferTrait; pub enum Error { #[error("solver error")] SolverError(#[from] RTKError), - #[error("ppp post processing error")] - PPPPostProcessingError(#[from] PPPPostProcessingError), - #[error("cggtts post processing error")] - CGGTTSPostProcessingError(#[from] CGGTTSPostProcessingError), + #[error("no solutions: check your settings or input")] + NoSolutions, + #[error("i/o error")] + StdioError(#[from] std::io::Error), +} + +/* + * Converts `RTK Carrier` into compatible struct + */ +pub fn rtk_carrier_cast(carrier: RTKCarrier) -> Carrier { + match carrier { + RTKCarrier::L2 => Carrier::L2, + RTKCarrier::L5 => Carrier::L5, + RTKCarrier::L6 => Carrier::L6, + RTKCarrier::E1 => Carrier::E1, + RTKCarrier::E5 => Carrier::E5, + RTKCarrier::E6 => Carrier::E6, + RTKCarrier::E5A => Carrier::E5a, + RTKCarrier::E5B => Carrier::E5b, + RTKCarrier::B1I => Carrier::B1I, + RTKCarrier::B2 => Carrier::B2, + RTKCarrier::B3 => Carrier::B3, + RTKCarrier::B2A => Carrier::B2A, + RTKCarrier::B2iB2b => Carrier::B2I, + RTKCarrier::B1aB1c => Carrier::B1A, + RTKCarrier::L1 => Carrier::L1, + } } /* @@ -65,6 +89,14 @@ pub fn cast_rtk_carrier(carrier: Carrier) -> RTKCarrier { } } +// helper in reference signal determination +fn rtk_reference_carrier(carrier: RTKCarrier) -> bool { + matches!( + carrier, + RTKCarrier::L1 | RTKCarrier::E1 | RTKCarrier::B1aB1c | RTKCarrier::B1I + ) +} + //use map_3d::{ecef2geodetic, rad2deg, Ellipsoid}; //pub fn tropo_components(meteo: Option<&Rinex>, t: Epoch, lat_ddeg: f64) -> Option<(f64, f64)> { @@ -183,7 +215,7 @@ pub fn ng_model(nav: &Rinex, t: Epoch) -> Option { .map(|(_, model)| NgModel { a: model.a }) } -pub fn precise_positioning(ctx: &Context, matches: &ArgMatches) -> Result<(), Error> { +pub fn precise_positioning(ctx: &Context, matches: &ArgMatches) -> Result { /* Load customized config script, or use defaults */ let cfg = match matches.get_one::("cfg") { Some(fp) => { @@ -263,9 +295,29 @@ pub fn precise_positioning(ctx: &Context, matches: &ArgMatches) -> Result<(), Er // print config to be used info!("Using {:?} method", cfg.method); + // The CGGTTS opmode (TimeOnly) is not designed + // to support lack of apriori knowledge + let apriori = if matches.get_flag("cggtts") { + if let Some((x, y, z)) = ctx.rx_ecef { + let apriori_ecef = Vector3::new(x, y, z); + Some(Position::from_ecef(apriori_ecef)) + } else { + panic!( + "--cggtts opmode cannot work without a priori position knowledge. +You either need to specify it manually (see --help), or use RINEX files that define +a static reference position" + ); + } + } else { + None + }; + + //let almanac = Almanac::until_2035() + // .unwrap_or_else(|e| panic!("failed to retrieve latest Almanac: {}", e)); + let solver = Solver::new( &cfg, - None, + apriori, /* state vector interpolator */ |t, sv, _order| orbit.borrow_mut().next_at(t, sv), )?; @@ -273,17 +325,19 @@ pub fn precise_positioning(ctx: &Context, matches: &ArgMatches) -> Result<(), Er if matches.get_flag("cggtts") { /* CGGTTS special opmode */ let tracks = cggtts::resolve(ctx, solver, matches)?; - cggtts_post_process(ctx, tracks, matches)?; + cggtts_post_process(&ctx, &tracks, matches)?; + let report = CggttsReport::new(&ctx, &tracks); + Ok(report.formalize()) } else { /* PPP */ let solutions = ppp::resolve(ctx, solver); if solutions.len() > 0 { - /* save solutions (graphs, reports..) */ - ppp_post_process(ctx, solutions, matches)?; + let report = PPPReport::new(&cfg, &ctx, &solutions); + Ok(report.formalize()) } else { error!("solver did not generate a single solution"); error!("verify your input data and configuration setup"); + Err(Error::NoSolutions) } } - Ok(()) } diff --git a/rinex-cli/src/positioning/orbit/mod.rs b/rinex-cli/src/positioning/orbit/mod.rs index c84fbd64b..a2c5bfc5b 100644 --- a/rinex-cli/src/positioning/orbit/mod.rs +++ b/rinex-cli/src/positioning/orbit/mod.rs @@ -1,7 +1,6 @@ use crate::cli::Context; use gnss_rtk::prelude::{Epoch, InterpolationResult, SV}; -// would you mind adapting the includes in ::sp3 ? mod sp3; use sp3::Orbit as SP3Orbit; diff --git a/rinex-cli/src/positioning/orbit/nav.rs b/rinex-cli/src/positioning/orbit/nav.rs index 024c28c17..827dd65fd 100644 --- a/rinex-cli/src/positioning/orbit/nav.rs +++ b/rinex-cli/src/positioning/orbit/nav.rs @@ -6,8 +6,8 @@ use gnss_rtk::prelude::{Epoch, InterpolationResult as RTKInterpolationResult, Ti use rinex::navigation::Ephemeris; pub struct Orbit<'a> { - buffer: HashMap>, - iter: Box + 'a>, + buffer: HashMap>, + iter: Box + 'a>, } impl<'a> Orbit<'a> { @@ -18,36 +18,39 @@ impl<'a> Orbit<'a> { .expect("BRDC navigation required"); Self { buffer: HashMap::with_capacity(64), - iter: Box::new(brdc.ephemeris().map(|(_toc, (_, sv, eph))| (sv, eph))), + iter: Box::new(brdc.ephemeris().map(|(toc, (_, sv, eph))| (sv, toc, eph))), } } fn feasible(&self, t: Epoch, sv: SV, sv_ts: TimeScale) -> bool { - let max_dtoe = Ephemeris::max_dtoe(sv.constellation).unwrap(); - if let Some(dataset) = self.buffer.get(&sv) { - let mut index = dataset.len(); - while index > 1 { - index -= 1; - let eph_i = &dataset[index]; - if let Some(toe) = eph_i.toe_gpst(sv_ts) { - if toe < t && (t - toe) < max_dtoe { - return true; + if sv.constellation.is_sbas() { + // TOE does not exist + self.buffer.get(&sv).is_some() + } else { + let max_dtoe = Ephemeris::max_dtoe(sv.constellation).unwrap(); + if let Some(dataset) = self.buffer.get(&sv) { + let mut index = dataset.len(); + while index > 1 { + index -= 1; + let eph_i = &dataset[index].1; + if let Some(toe) = eph_i.toe_gpst(sv_ts) { + if toe < t && (t - toe) < max_dtoe { + return true; + } } } } false - } else { - false } } pub fn next_at(&mut self, t: Epoch, sv: SV) -> Option { let sv_ts = sv.timescale()?; while !self.feasible(t, sv, sv_ts) { - if let Some((sv_i, eph_i)) = self.iter.next() { + if let Some((sv_i, toc_i, eph_i)) = self.iter.next() { if let Some(dataset) = self.buffer.get_mut(&sv_i) { - dataset.push(eph_i.clone()); + dataset.push((*toc_i, eph_i.clone())); } else { - self.buffer.insert(sv_i, vec![eph_i.clone()]); + self.buffer.insert(sv_i, vec![(*toc_i, eph_i.clone())]); } } else { // EOF @@ -57,13 +60,57 @@ impl<'a> Orbit<'a> { let output = match self.buffer.get(&sv) { Some(eph) => { - let eph_i = eph.iter().min_by_key(|eph_i| { - let toe_i = eph_i.toe_gpst(sv_ts).unwrap(); - t - toe_i - })?; - let (x_km, y_km, z_km) = eph_i.kepler2position(sv, t)?; - let (x, y, z) = (x_km * 1.0E3, y_km * 1.0E3, z_km * 1.0E3); - Some(RTKInterpolationResult::from_position((x, y, z))) + if sv.constellation.is_sbas() { + let (_toc_i, eph_i) = eph.iter().filter(|(toc_i, _)| *toc_i < t).min_by_key( + |(_toc_i, eph_i)| { + let toe_i = eph_i.toe_gpst(sv_ts).unwrap(); + t - toe_i + }, + )?; + + let (x, y, z) = ( + eph_i.get_orbit_f64("satPosX")? * 1.0E3, + eph_i.get_orbit_f64("satPosY")? * 1.0E3, + eph_i.get_orbit_f64("satPosZ")? * 1.0E3, + ); + // NAV RINEX null payload means missing field + if x == 0.0 || y == 0.0 || z == 0.0 { + return None; + } + //let (vx_kms, vy_kms, vz_kms) = ( + // eph_i.get_orbit_f64("velX")? * 1.0E3, + // eph_i.get_orbit_f64("velY")? * 1.0E3, + // eph_i.get_orbit_f64("velZ")? * 1.0E3, + //); + //let (ax_kms, ay_kms, az_kms) = ( + // eph_i.get_orbit_f64("accelX")? * 1.0E3, + // eph_i.get_orbit_f64("accelY")? * 1.0E3, + // eph_i.get_orbit_f64("accelZ")? * 1.0E3, + //); + //let (x, y, z) = ( + // x + // + vx_kms * dt, + // //+ ax_kms * dt * dt / 2.0, + // y + // + vy_kms * dt, + // //+ ay_kms * dt * dt / 2.0, + // z + // + vz_kms * dt, + // //+ az_kms * dt * dt / 2.0, + //); + Some(RTKInterpolationResult::from_position((x, y, z))) + } else { + let (_, eph_i) = eph.iter().filter(|(toc_i, _)| *toc_i < t).min_by_key( + |(_toc_i, eph_i)| { + let toe_i = eph_i.toe_gpst(sv_ts).unwrap(); + t - toe_i + }, + )?; + + let (x_km, y_km, z_km) = eph_i.kepler2position(sv, t)?; + let (x, y, z) = (x_km * 1.0E3, y_km * 1.0E3, z_km * 1.0E3); + Some(RTKInterpolationResult::from_position((x, y, z))) + } }, None => None, }; diff --git a/rinex-cli/src/positioning/ppp/mod.rs b/rinex-cli/src/positioning/ppp/mod.rs index 1c9d12526..2bbd6fdc0 100644 --- a/rinex-cli/src/positioning/ppp/mod.rs +++ b/rinex-cli/src/positioning/ppp/mod.rs @@ -13,8 +13,8 @@ use std::collections::BTreeMap; use rinex::{carrier::Carrier, observation::LliFlags, prelude::SV}; -mod post_process; -pub use post_process::{post_process, Error as PostProcessingError}; +mod report; +pub use report::Report; use rtk::prelude::{ Candidate, Epoch, InterpolationResult, IonosphereBias, PVTSolution, PhaseRange, PseudoRange, diff --git a/rinex-cli/src/positioning/ppp/post_process.rs b/rinex-cli/src/positioning/ppp/post_process.rs index 03fb17ae7..c37dcabc9 100644 --- a/rinex-cli/src/positioning/ppp/post_process.rs +++ b/rinex-cli/src/positioning/ppp/post_process.rs @@ -1,21 +1,17 @@ use std::{ collections::{BTreeMap, HashMap}, - fs::File, io::Write, }; use crate::{ cli::Context, - fops::open_with_web_browser, - graph::{build_3d_chart_epoch_label, build_chart_epoch_axis, PlotContext}, }; -use clap::ArgMatches; +use clap::ArgMatches; +use itertools::Itertools; +use rtk::prelude::{Carrier, Config, Epoch, Method, PVTSolution, SV}; use thiserror::Error; -use hifitime::Epoch; -use rtk::prelude::PVTSolution; - extern crate gpx; use gpx::{errors::GpxError, Gpx, GpxVersion, Waypoint}; @@ -125,15 +121,20 @@ fn html_add_apriori_position( pub fn post_process( ctx: &Context, + cfg: &Config, solutions: BTreeMap, matches: &ArgMatches, ) -> Result<(), Error> { // create a dedicated plot context let mut plot_ctx = PlotContext::new(); + let nb_solutions = solutions.len(); + let epochs = solutions.keys().copied().collect::>(); + let mut ambiguities: HashMap<(SV, Carrier), (Epoch, f64)> = HashMap::new(); + // Convert solutions to geodetic DDEG let (mut lat, mut lon) = (Vec::::new(), Vec::::new()); - for solution in solutions.values() { + for (t, solution) in &solutions { let (lat_rad, lon_rad, _) = ecef2geodetic( solution.position.x, solution.position.y, @@ -142,11 +143,12 @@ pub fn post_process( ); lat.push(rad2deg(lat_rad)); lon.push(rad2deg(lon_rad)); + for ((sv, carrier), amb) in &solution.ambiguities { + ambiguities.insert((*sv, *carrier), (*t, amb.n_1 as f64)); + } } - let nb_solutions = solutions.len(); let final_solution_ddeg = (lat[nb_solutions - 1], lon[nb_solutions - 1]); - let epochs = solutions.keys().copied().collect::>(); let lat0_rad = if let Some(apriori_ecef) = ctx.rx_ecef { ecef2geodetic( @@ -331,30 +333,61 @@ pub fn post_process( .y_axis("y2"); plot_ctx.add_trace(trace); + if cfg.method == Method::PPP { + // Ambiguities + plot_ctx.add_timedomain_plot("Signal Ambiguities", "Cycles"); + for (sv, carrier) in ambiguities.keys().sorted().unique() { + let epochs = ambiguities + .iter() + .filter_map(|((sv_i, sig_i), (t, _amb))| { + if sv_i == sv && sig_i == carrier { + Some(*t) + } else { + None + } + }) + .collect::>(); + let ambiguities = ambiguities + .iter() + .filter_map(|((sv_i, sig_i), (_t, amb))| { + if sv_i == sv && sig_i == carrier { + Some(*amb) + } else { + None + } + }) + .collect::>(); + let trace = build_chart_epoch_axis( + &format!("{}/{}", sv, carrier), + Mode::Markers, + epochs, + ambiguities, + ); + plot_ctx.add_trace(trace); + } + } + + // Extend report with _Apriori_ when it is known + // Serves as survey quality comparison and reference point. if let Some(apriori_ecef) = ctx.rx_ecef { html_add_apriori_position(&epochs, &solutions, apriori_ecef, &mut plot_ctx); } // render plots - let graphs = ctx.workspace.join("Solutions.html"); - let graphs = graphs.to_string_lossy().to_string(); - let mut fd = File::create(&graphs).unwrap_or_else(|_| panic!("failed to crate \"{}\"", graphs)); + let mut fd = ctx.workspace.create_file("Solutions.html"); write!(fd, "{}", plot_ctx.to_html()).expect("failed to render PVT solutions"); - info!("\"{}\" solutions generated", graphs); /* * Generate txt, GPX, KML.. */ - let txtpath = ctx.workspace.join("solutions.csv"); - let txtfile = txtpath.to_string_lossy().to_string(); - let mut fd = File::create(&txtfile)?; + let mut fd = ctx.workspace.create_file("Solutions.csv"); let mut gpx_track = gpx::Track::default(); let mut kml_track = Vec::::new(); writeln!( fd, - "Epoch, x_ecef, y_ecef, z_ecef, speed_x, speed_y, speed_z, hdop, vdop, rcvr_clock_bias, tdop" + "Epoch, x_ecef, y_ecef, z_ecef, speed_x, speed_y, speed_z, hdop, vdop, rx_clock_offset, tdop" )?; for (epoch, solution) in solutions { @@ -425,13 +458,9 @@ pub fn post_process( })); } } - info!("\"{}\" generated", txtfile); if matches.get_flag("gpx") { let prefix = ctx.name.clone(); - let gpxpath = ctx.workspace.join(format!("{}.gpx", prefix)); - let gpxfile = gpxpath.to_string_lossy().to_string(); - - let fd = File::create(&gpxfile)?; + let fd = ctx.workspace.create_file(&format!("{}.gpx", prefix)); let mut gpx = Gpx::default(); gpx.version = GpxVersion::Gpx11; @@ -440,14 +469,10 @@ pub fn post_process( gpx.tracks.push(gpx_track); gpx::write(&gpx, fd)?; - info!("{} gpx track generated", gpxfile); } if matches.get_flag("kml") { let prefix = ctx.name.clone(); - let kmlpath = ctx.workspace.join(format!("{}.kml", prefix)); - let kmlfile = kmlpath.to_string_lossy().to_string(); - - let mut fd = File::create(&kmlfile)?; + let mut fd = ctx.workspace.create_file(&format!("{}.kml", prefix)); let kmldoc = KmlDocument { version: KmlVersion::V23, @@ -466,14 +491,6 @@ pub fn post_process( }; let mut writer = KmlWriter::from_writer(&mut fd); writer.write(&Kml::KmlDocument(kmldoc))?; - info!("{} kml track generated", kmlfile); - } - - if !ctx.quiet { - let graphs = ctx.workspace.join("Solutions.html"); - let graphs = graphs.to_string_lossy().to_string(); - open_with_web_browser(&graphs); } - Ok(()) } diff --git a/rinex-cli/src/positioning/ppp/report.rs b/rinex-cli/src/positioning/ppp/report.rs new file mode 100644 index 000000000..02dffc581 --- /dev/null +++ b/rinex-cli/src/positioning/ppp/report.rs @@ -0,0 +1,870 @@ +use crate::cli::Context; +use std::collections::BTreeMap; + +use rtk::prelude::{ + Config as NaviConfig, Duration, Epoch, Filter as NaviFilter, Method as NaviMethod, PVTSolution, + TimeScale, SV, +}; + +use rinex_qc::{ + plot::{MapboxStyle, NamedColor}, + prelude::{html, MarkerSymbol, Markup, Mode, Plot, QcExtraPage, Render}, +}; + +use itertools::Itertools; + +use map_3d::{ + //ecef2enu, + ecef2geodetic, + geodetic2enu, + Ellipsoid, +}; + +struct ReportTab {} + +impl Render for ReportTab { + fn render(&self) -> Markup { + html! { + a id="menu:ppp" { + span class="icon" { + i class="fa-solid fa-location-crosshairs" {} + } + "PPP Solutions" + } + } + } +} + +enum Technique { + GeodeticSurvey, +} + +impl std::fmt::Display for Technique { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + Self::GeodeticSurvey => write!(f, "Geodetic Survey"), + } + } +} + +impl Technique { + fn tooltip(&self) -> String { + match self { + Self::GeodeticSurvey => { + "Static Geodetic survey (fixed point coordinates evaluation)".to_string() + }, + } + } +} + +struct Summary { + technique: Technique, + method: NaviMethod, + filter: NaviFilter, + orbit: String, + first_epoch: Epoch, + last_epoch: Epoch, + duration: Duration, + satellites: Vec, + timescale: TimeScale, + final_geo: (f64, f64, f64), + final_err_m: (f64, f64, f64), +} + +impl Render for Summary { + fn render(&self) -> Markup { + html! { + div class="table-container" { + table class="table is-bordered" { + tbody { + tr { + th class="is-info" { + button aria-label=(self.technique.tooltip()) data-balloon-pos="right" { + (self.technique.to_string()) + } + } + } + tr { + th class="is-info" { + "Navigation method" + } + td { + (self.method.to_string()) + } + } + tr { + th class="is-info" { + } + td { + (self.orbit) + } + } + tr { + th class="is-info" { + button aria-label="Satellites that contributed to the solutions" data-balloon-pos="right" { + "Satellites" + } + } + td { + (self.satellites.iter().join(" ,")) + } + } + tr { + th class="is-info" { + "First solution" + } + td { + (self.first_epoch.to_string()) + } + } + tr { + th class="is-info" { + "Last solution" + } + td { + (self.last_epoch.to_string()) + } + } + tr { + th class="is-info" { + "Duration" + } + td { + (self.duration.to_string()) + } + } + tr { + th class="is-info" { + "Timescale" + } + td { + (self.timescale.to_string()) + } + } + tr { + th class="is-info" { + "Navigation Filter" + } + td { + (self.filter.to_string()) + } + } + tr { + th class="is-info" { + "Final" + } + td { + table class="table is-bordered" { + tr { + th class="is-info" { + "WGS84" + } + td { + (format!("x={:.5}°", self.final_geo.0.to_degrees())) + } + td { + (format!("x={:.5}°", self.final_geo.1.to_degrees())) + } + td { + (format!("alt={:.3}m", self.final_geo.2)) + } + } + tr { + th class="is-info" { + "Error (m)" + } + td { + (format!("x={:.3E}", self.final_err_m.0)) + } + td { + (format!("y={:.3E}", self.final_err_m.1)) + } + td { + (format!("z={:.3E}", self.final_err_m.2)) + } + } + } + } + } + } + } + } + } + } +} + +impl Summary { + fn new( + cfg: &NaviConfig, + ctx: &Context, + solutions: &BTreeMap, + rx_ecef: (f64, f64, f64), + ) -> Self { + let (x0, y0, z0) = rx_ecef; + //let (lat0_rad, lon0_rad, alt0_m) = ecef2geodetic(x0, y0, z0, Ellipsoid::WGS84); + + let mut timescale = TimeScale::default(); + let (mut first_epoch, mut last_epoch) = (Epoch::default(), Epoch::default()); + let mut final_err_m = (0.0_f64, 0.0_f64, 0.0_f64); + let mut final_geo = (0.0_f64, 0.0_f64, 0.0_f64); + + let satellites = solutions + .values() + .map(|sol| sol.sv()) + .fold(vec![], |mut list, svnn| { + for sv in svnn { + list.push(sv); + } + list + }) + .into_iter() + .unique() + .sorted() + .collect::>(); + + for (index, (t, sol)) in solutions.iter().enumerate() { + if index == 0 { + first_epoch = *t; + } + let (err_x, err_y, err_z) = ( + sol.position.x - x0, + sol.position.y - y0, + sol.position.z - z0, + ); + final_err_m = (err_x, err_y, err_z); + final_geo = ecef2geodetic( + sol.position.x, + sol.position.y, + sol.position.z, + Ellipsoid::WGS84, + ); + + last_epoch = *t; + timescale = sol.timescale; + } + Self { + first_epoch, + last_epoch, + timescale, + satellites, + final_geo, + final_err_m, + orbit: { + if ctx.data.has_sp3() { + format!("Interpolation X{}", cfg.interp_order) + } else { + "Kepler".to_string() + } + }, + method: cfg.method, + filter: cfg.solver.filter, + duration: last_epoch - first_epoch, + technique: Technique::GeodeticSurvey, + } + } +} + +struct ReportContent { + /// summary + summary: Summary, + /// sv_plot + sv_plot: Plot, + /// map_proj + map_proj: Plot, + /// clk_plot + clk_plot: Plot, + /// neu_plot + neu_plot: Plot, + /// coords_err + coords_err_plot: Plot, + /// 3d_plot + coords_err3d_plot: Plot, + /// velocity_plot + vel_plot: Plot, + /// DOP + dop_plot: Plot, + /// TDOP + tdop_plot: Plot, + // /// NAVI + // navi_plot: Plot, + /// tropod + tropod_plot: Plot, + /// ionod + ionod_plot: Plot, +} + +impl ReportContent { + pub fn new(cfg: &NaviConfig, ctx: &Context, solutions: &BTreeMap) -> Self { + let nb_solutions = solutions.len(); + let epochs = solutions.keys().cloned().collect::>(); + + let (x0_ecef, y0_ecef, z0_ecef) = ctx.rx_ecef.unwrap_or_default(); + let (lat0_rad, lon0_rad, alt0_m) = + ecef2geodetic(x0_ecef, y0_ecef, z0_ecef, Ellipsoid::WGS84); + let (lat0_ddeg, lon0_ddeg) = (lat0_rad.to_degrees(), lon0_rad.to_degrees()); + + let summary = Summary::new(cfg, ctx, solutions, (x0_ecef, y0_ecef, z0_ecef)); + + Self { + map_proj: { + let mut map_proj = Plot::world_map( + "map_proj", + "Map Projection", + MapboxStyle::OpenStreetMap, + (lat0_ddeg, lon0_ddeg), + 18, + true, + ); + let apriori = Plot::mapbox( + vec![lat0_ddeg], + vec![lon0_ddeg], + "apriori", + MarkerSymbol::Circle, + NamedColor::Red, + 1.0, + true, + ); + map_proj.add_trace(apriori); + let mut prev_pct = 0; + for (index, (_, sol_i)) in solutions.iter().enumerate() { + let pct = index * 100 / nb_solutions; + if pct % 10 == 0 && index > 0 && pct != prev_pct || index == nb_solutions - 1 { + let (name, visible) = if index == nb_solutions - 1 { + ("FINAL".to_string(), true) + } else { + (format!("Solver: {:02}%", pct), false) + }; + let (lat_rad, lon_rad, _) = ecef2geodetic( + sol_i.position.x, + sol_i.position.y, + sol_i.position.z, + Ellipsoid::WGS84, + ); + let (lat_ddeg, lon_ddeg) = (lat_rad.to_degrees(), lon_rad.to_degrees()); + let scatter = Plot::mapbox( + vec![lat_ddeg], + vec![lon_ddeg], + &name, + MarkerSymbol::Circle, + NamedColor::Black, + 1.0, + visible, + ); + map_proj.add_trace(scatter); + } + prev_pct = pct; + } + map_proj + }, + sv_plot: { + let mut plot = Plot::timedomain_plot("sv_plot", "SV ID#", "PRN #", true); + for sv in summary.satellites.iter() { + let epochs = solutions + .iter() + .filter_map(|(t, sol)| { + if sol.sv.keys().contains(sv) { + Some(*t) + } else { + None + } + }) + .collect::>(); + let prn = epochs.iter().map(|_| sv.prn).collect::>(); + let trace = Plot::timedomain_chart( + &sv.to_string(), + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + prn, + ); + plot.add_trace(trace); + } + plot + }, + neu_plot: { + let mut plot = Plot::timedomain_plot( + "neu_plot", + "North / East / Up Coordinates", + "Coordinates [m]", + true, + ); + let neu = solutions + .iter() + .map(|(_, sol)| { + let (lat_rad, lon_rad, alt_m) = ecef2geodetic( + sol.position.x, + sol.position.y, + sol.position.z, + Ellipsoid::WGS84, + ); + let enu = geodetic2enu( + lat_rad, + lon_rad, + alt_m, + lat0_rad, + lon0_rad, + alt0_m, + Ellipsoid::WGS84, + ); + (enu.1.to_degrees(), enu.0.to_degrees(), enu.2) + }) + .collect::>(); + let north = neu.iter().map(|neu| neu.0).collect::>(); + let east = neu.iter().map(|neu| neu.1).collect::>(); + let up = neu.iter().map(|neu| neu.2).collect::>(); + let trace = Plot::timedomain_chart( + "north", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + north, + ); + plot.add_trace(trace); + let trace = Plot::timedomain_chart( + "east", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + east, + ); + plot.add_trace(trace); + let trace = + Plot::timedomain_chart("upt", Mode::Markers, MarkerSymbol::Cross, &epochs, up); + plot.add_trace(trace); + plot + }, + vel_plot: { + let mut plot = + Plot::timedomain_plot("vel_plot", "Velocity", "Velocity [m/s]", true); + let x = solutions + .iter() + .map(|(_, sol)| sol.velocity.x) + .collect::>(); + let y = solutions + .iter() + .map(|(_, sol)| sol.velocity.y) + .collect::>(); + let z = solutions + .iter() + .map(|(_, sol)| sol.velocity.z) + .collect::>(); + let trace = + Plot::timedomain_chart("vel_x", Mode::Markers, MarkerSymbol::Cross, &epochs, x); + plot.add_trace(trace); + let trace = + Plot::timedomain_chart("vel_y", Mode::Markers, MarkerSymbol::Cross, &epochs, y); + plot.add_trace(trace); + let trace = + Plot::timedomain_chart("vel_z", Mode::Markers, MarkerSymbol::Cross, &epochs, z); + plot.add_trace(trace); + plot + }, + tropod_plot: { + let mut plot = + Plot::timedomain_plot("tropo", "Troposphere Bias", "Error [m]", true); + for sv in summary.satellites.iter() { + let x = solutions + .iter() + .filter_map(|(t, sol)| { + if sol.sv.keys().contains(sv) { + Some(*t) + } else { + None + } + }) + .collect::>(); + let y = solutions + .iter() + .filter_map(|(_, sol)| { + if let Some(value) = + sol.sv.iter().filter(|(s, _)| *s == sv).reduce(|k, _| k) + { + value.1.tropo_bias.value() + } else { + None + } + }) + .collect::>(); + let trace = Plot::timedomain_chart( + &sv.to_string(), + Mode::Markers, + MarkerSymbol::Cross, + &x, + y, + ); + plot.add_trace(trace); + } + plot + }, + ionod_plot: { + let mut plot = Plot::timedomain_plot("iono", "Ionosphere Bias", "Error [m]", true); + for sv in summary.satellites.iter() { + let x = solutions + .iter() + .filter_map(|(t, sol)| { + if sol.sv.keys().contains(sv) { + Some(*t) + } else { + None + } + }) + .collect::>(); + let y = solutions + .iter() + .filter_map(|(_, sol)| { + if let Some(value) = + sol.sv.iter().filter(|(s, _)| *s == sv).reduce(|k, _| k) + { + value.1.iono_bias.value() + } else { + None + } + }) + .collect::>(); + let trace = Plot::timedomain_chart( + &sv.to_string(), + Mode::Markers, + MarkerSymbol::Cross, + &x, + y, + ); + plot.add_trace(trace); + } + plot + }, + tdop_plot: { + let mut plot = Plot::timedomain_plot( + "tdop", + "Temporal dillution of precision", + "Error [m]", + true, + ); + let tdop = solutions + .iter() + .map(|(_, sol)| sol.tdop) + .collect::>(); + + let trace = Plot::timedomain_chart( + "tdop", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + tdop, + ); + plot.add_trace(trace); + plot + }, + dop_plot: { + let mut plot = + Plot::timedomain_plot("dop", "Dillution of Precision", "Error [m]", true); + + let gdop = solutions + .iter() + .map(|(_, sol)| sol.gdop) + .collect::>(); + + let trace = Plot::timedomain_chart( + "gdop", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + gdop, + ); + plot.add_trace(trace); + + let vdop = solutions + .iter() + .map(|(_, sol)| sol.vdop(lat0_rad, lon0_rad)) + .collect::>(); + + let trace = Plot::timedomain_chart( + "vdop", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + vdop, + ); + plot.add_trace(trace); + + let hdop = solutions + .iter() + .map(|(_, sol)| sol.hdop(lat0_rad, lon0_rad)) + .collect::>(); + + let trace = Plot::timedomain_chart( + "hdop", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + hdop, + ); + plot.add_trace(trace); + plot + }, + clk_plot: { + let mut plot = + Plot::timedomain_plot("clk_offset", "Clock Offset", "Offset [s]", true); + + let dt = solutions + .iter() + .map(|(_, sol)| sol.dt.to_seconds()) + .collect::>(); + + let trace = Plot::timedomain_chart( + "offset", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + dt, + ); + plot.add_trace(trace); + plot + }, + coords_err_plot: { + let mut plot = Plot::timedomain_plot("xy_plot", "X/Y/Z Error", "Error [m]", true); + let trace = Plot::timedomain_chart( + "x err", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + solutions + .values() + .map(|sol| sol.position.x - x0_ecef) + .collect(), + ); + plot.add_trace(trace); + let trace = Plot::timedomain_chart( + "y err", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + solutions + .values() + .map(|sol| sol.position.y - y0_ecef) + .collect(), + ); + plot.add_trace(trace); + let trace = Plot::timedomain_chart( + "z err", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + solutions + .values() + .map(|sol| sol.position.z - z0_ecef) + .collect(), + ); + plot.add_trace(trace); + plot + }, + coords_err3d_plot: { + let mut plot = Plot::plot_3d( + "3d_sphere", + "3D errors", + "X error [m]", + "Y Error [m]", + "Z Error [m]", + true, + ); + let trace = Plot::chart_3d( + "Error", + Mode::Markers, + MarkerSymbol::Cross, + &epochs, + solutions + .values() + .map(|sol| sol.position.x - x0_ecef) + .collect(), + solutions + .values() + .map(|sol| sol.position.y - y0_ecef) + .collect(), + solutions + .values() + .map(|sol| sol.position.z - z0_ecef) + .collect(), + ); + plot.add_trace(trace); + plot + }, + //navi_plot: { + // let plot = Plot::timedomain_plot("navi_plot", "NAVI Plot", "Error [m]", true); + // plot + //}, + summary, + } + } +} + +impl Render for ReportContent { + fn render(&self) -> Markup { + html! { + div class="table-container" { + table class="table is-bordered" { + tbody { + tr { + th class="is-info" { + "Summary" + } + td { + (self.summary.render()) + } + } + tr { + th class="is-info" { + "Map Proj" + } + td { + (self.map_proj.render()) + } + } + //tr { + // th class="is-info" { + // "NAVI Plot" + // } + // td { + // (self.navi_plot.render()) + // } + //} + tr { + th class="is-info" { + button aria-label="SV Contribution over time" data-balloon-pos="right" { + "SV Plot" + } + } + td { + (self.sv_plot.render()) + } + } + tr { + th class="is-info" { + button aria-label="Receiver Clock Offset with respected to Timescale" data-balloon-pos="right" { + "Clock offset" + } + } + td { + (self.clk_plot.render()) + } + } + tr { + th class="is-info" { + button aria-label="Absolute North / East and Altitude coordinates" data-balloon-pos="right" { + "N/E/U coordinates" + } + } + td { + (self.neu_plot.render()) + } + } + tr { + th class="is-info" { + button aria-label="3D errors (surveying applications only)" data-balloon-pos="right" { + "Errors" + } + } + td { + table class="table is-bordered" { + tr { + th class="is-info" { + "Coordinates" + } + td { + (self.coords_err_plot.render()) + } + } + tr { + th class="is-info" { + "3D" + } + td { + (self.coords_err3d_plot.render()) + } + } + } + } + } + tr { + th class="is-info" { + "Velocity" + } + td { + (self.vel_plot.render()) + } + } + tr { + th class="is-info" { + "DOP" + } + td { + div class="table-container" { + table class="table is-bordered" { + tr { + th class="is-info" { + "Geometric DOP" + } + td { + (self.dop_plot.render()) + } + } + tr { + th class="is-info" { + "Temporal DOP" + } + td { + (self.tdop_plot.render()) + } + } + button aria-label="Geometric Dillution of Precision" data-balloon-pos="right" { + } + } + } + } + } + tr { + th class="is-info" { + button aria-label="Error due to Ionospheric delay" data-balloon-pos="right" { + "Ionosphere" + } + } + td { + (self.ionod_plot.render()) + } + } + tr { + th class="is-info" { + button aria-label="Error due to Tropospheric delay" data-balloon-pos="right" { + "Troposphere" + } + } + td { + (self.tropod_plot.render()) + } + } + } + } + } + } + } +} + +/// Solutions report +pub struct Report { + tab: ReportTab, + content: ReportContent, +} + +impl Report { + pub fn formalize(self) -> QcExtraPage { + QcExtraPage { + tab: Box::new(self.tab), + html_id: "ppp".to_string(), + content: Box::new(self.content), + } + } + pub fn new(cfg: &NaviConfig, ctx: &Context, solutions: &BTreeMap) -> Self { + Self { + tab: ReportTab {}, + content: ReportContent::new(cfg, ctx, solutions), + } + } +} diff --git a/rinex-cli/src/preprocessing.rs b/rinex-cli/src/preprocessing.rs index 9c9acfa26..bc2055406 100644 --- a/rinex-cli/src/preprocessing.rs +++ b/rinex-cli/src/preprocessing.rs @@ -1,600 +1,63 @@ -use itertools::Itertools; use log::error; use std::str::FromStr; use crate::Cli; -use rinex::prelude::{Epoch, RnxContext}; -use rinex::preprocessing::*; +use rinex_qc::prelude::{Filter, QcContext}; -use sp3::prelude::{DataType as SP3DataType, SP3}; - -/* - * SP3 toolkit does not implement the Processing Traits - * since they're currently defined in RINEX.. - * Work around this by implementing the ""typical"" preprocessing ops - * manually here. This allows to shrink the SP3 context, which - * is quite heavy, and make future Epoch iterations much quicker - */ -fn sp3_filter_mut(filter: Filter, sp3: &mut SP3) { - match filter { - Filter::Mask(mask) => sp3_mask_mut(mask, sp3), - Filter::Decimation(decim) => sp3_decimate_mut(decim, sp3), - _ => {}, // does not apply - } -} - -fn sp3_mask_mut(mask: MaskFilter, sp3: &mut SP3) { - match mask.operand { - MaskOperand::Equals => match mask.item { - TargetItem::EpochItem(epoch) => { - sp3.clock.retain(|t, _| *t == epoch); - sp3.clock_rate.retain(|t, _| *t == epoch); - sp3.position.retain(|t, _| *t == epoch); - sp3.velocities.retain(|t, _| *t == epoch); - }, - TargetItem::ConstellationItem(constells) => { - sp3.clock.retain(|_t, data| { - data.retain(|sv, _| constells.contains(&sv.constellation)); - !data.is_empty() - }); - sp3.clock_rate.retain(|_t, data| { - data.retain(|sv, _| constells.contains(&sv.constellation)); - !data.is_empty() - }); - sp3.position.retain(|_t, data| { - data.retain(|sv, _| constells.contains(&sv.constellation)); - !data.is_empty() - }); - sp3.velocities.retain(|_t, data| { - data.retain(|sv, _| constells.contains(&sv.constellation)); - !data.is_empty() - }); - }, - TargetItem::SvItem(svs) => { - sp3.clock.retain(|_t, data| { - data.retain(|sv, _| svs.contains(sv)); - !data.is_empty() - }); - sp3.clock_rate.retain(|_t, data| { - data.retain(|sv, _| svs.contains(sv)); - !data.is_empty() - }); - sp3.position.retain(|_t, data| { - data.retain(|sv, _| svs.contains(sv)); - !data.is_empty() - }); - sp3.velocities.retain(|_t, data| { - data.retain(|sv, _| svs.contains(sv)); - !data.is_empty() - }); - }, - _ => {}, // does not apply - }, - MaskOperand::NotEquals => match mask.item { - TargetItem::EpochItem(epoch) => { - sp3.clock.retain(|t, _| *t != epoch); - sp3.clock_rate.retain(|t, _| *t != epoch); - sp3.position.retain(|t, _| *t != epoch); - sp3.velocities.retain(|t, _| *t != epoch); - }, - TargetItem::ConstellationItem(constells) => { - sp3.clock.retain(|_t, data| { - data.retain(|sv, _| !constells.contains(&sv.constellation)); - !data.is_empty() - }); - sp3.clock_rate.retain(|_t, data| { - data.retain(|sv, _| !constells.contains(&sv.constellation)); - !data.is_empty() - }); - sp3.position.retain(|_t, data| { - data.retain(|sv, _| !constells.contains(&sv.constellation)); - !data.is_empty() - }); - sp3.velocities.retain(|_t, data| { - data.retain(|sv, _| !constells.contains(&sv.constellation)); - !data.is_empty() - }); - }, - TargetItem::SvItem(svs) => { - sp3.clock.retain(|_t, data| { - data.retain(|sv, _| !svs.contains(sv)); - !data.is_empty() - }); - sp3.clock_rate.retain(|_t, data| { - data.retain(|sv, _| !svs.contains(sv)); - !data.is_empty() - }); - sp3.position.retain(|_t, data| { - data.retain(|sv, _| !svs.contains(sv)); - !data.is_empty() - }); - sp3.velocities.retain(|_t, data| { - data.retain(|sv, _| !svs.contains(sv)); - !data.is_empty() - }); - }, - _ => {}, // does not apply - }, - MaskOperand::GreaterEquals => match mask.item { - TargetItem::EpochItem(epoch) => { - sp3.clock.retain(|t, _| *t >= epoch); - sp3.clock_rate.retain(|t, _| *t >= epoch); - sp3.position.retain(|t, _| *t >= epoch); - sp3.velocities.retain(|t, _| *t >= epoch); - }, - TargetItem::SvItem(svs) => { - let constells = svs - .iter() - .map(|sv| sv.constellation) - .unique() - .collect::>(); - sp3.clock.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - >= svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.clock_rate.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - >= svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.position.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - >= svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.velocities.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - >= svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - }, - _ => {}, // does not apply - }, - MaskOperand::GreaterThan => match mask.item { - TargetItem::EpochItem(epoch) => { - sp3.clock.retain(|t, _| *t > epoch); - sp3.clock_rate.retain(|t, _| *t > epoch); - sp3.position.retain(|t, _| *t > epoch); - sp3.position.retain(|t, _| *t > epoch); - }, - TargetItem::SvItem(svs) => { - let constells = svs - .iter() - .map(|sv| sv.constellation) - .unique() - .collect::>(); - sp3.clock.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - > svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.clock_rate.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - > svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.position.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - > svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.velocities.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - > svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - }, - _ => {}, // does not apply - }, - MaskOperand::LowerThan => match mask.item { - TargetItem::EpochItem(epoch) => { - sp3.clock.retain(|t, _| *t < epoch); - sp3.clock_rate.retain(|t, _| *t < epoch); - sp3.position.retain(|t, _| *t < epoch); - sp3.velocities.retain(|t, _| *t < epoch); - }, - TargetItem::SvItem(svs) => { - let constells = svs - .iter() - .map(|sv| sv.constellation) - .unique() - .collect::>(); - sp3.clock.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - < svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.clock_rate.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - < svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.position.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - < svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.velocities.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - < svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - }, - _ => {}, // does not apply - }, - MaskOperand::LowerEquals => match mask.item { - TargetItem::EpochItem(epoch) => { - sp3.clock.retain(|t, _| *t <= epoch); - sp3.clock_rate.retain(|t, _| *t <= epoch); - sp3.position.retain(|t, _| *t <= epoch); - sp3.velocities.retain(|t, _| *t <= epoch); - }, - TargetItem::SvItem(svs) => { - let constells = svs - .iter() - .map(|sv| sv.constellation) - .unique() - .collect::>(); - sp3.clock.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - <= svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.clock_rate.retain(|_t, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - <= svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.position.retain(|_, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - <= svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - sp3.velocities.retain(|_, data| { - data.retain(|sv, _| { - constells.contains(&sv.constellation) - && sv.prn - <= svs - .iter() - .filter(|svs| svs.constellation == sv.constellation) - .reduce(|k, _| k) - .unwrap() - .prn - }); - !data.is_empty() - }); - }, - _ => {}, // does not apply - }, - } -} - -fn sp3_decimate_mut(decim: DecimationFilter, sp3: &mut SP3) { - match decim.dtype { - DecimationType::DecimByRatio(r) => { - let mut i = 0; - sp3.clock.retain(|_, _| { - let retained = (i % r) == 0; - i += 1; - retained - }); - let mut i = 0; - sp3.clock_rate.retain(|_, _| { - let retained = (i % r) == 0; - i += 1; - retained - }); - let mut i = 0; - sp3.position.retain(|_, _| { - let retained = (i % r) == 0; - i += 1; - retained - }); - let mut i = 0; - sp3.velocities.retain(|_, _| { - let retained = (i % r) == 0; - i += 1; - retained - }); - }, - DecimationType::DecimByInterval(interval) => { - let mut last_retained = Option::::None; - sp3.clock.retain(|t, _| { - if let Some(last) = last_retained { - let dt = *t - last; - if dt >= interval { - last_retained = Some(*t); - true - } else { - false - } - } else { - last_retained = Some(*t); - true // always retain 1st Epoch - } - }); - let mut last_retained = Option::::None; - sp3.clock_rate.retain(|t, _| { - if let Some(last) = last_retained { - let dt = *t - last; - if dt >= interval { - last_retained = Some(*t); - true - } else { - false - } - } else { - last_retained = Some(*t); - true // always retain 1st Epoch - } - }); - let mut last_retained = Option::::None; - sp3.position.retain(|t, _| { - if let Some(last) = last_retained { - let dt = *t - last; - if dt >= interval { - last_retained = Some(*t); - true - } else { - false - } - } else { - last_retained = Some(*t); - true // always retain 1st Epoch - } - }); - let mut last_retained = Option::::None; - sp3.velocities.retain(|t, _| { - if let Some(last) = last_retained { - let dt = *t - last; - if dt >= interval { - last_retained = Some(*t); - true - } else { - false - } - } else { - last_retained = Some(*t); - true // always retain 1st Epoch - } - }); - }, - } -} - -/* - * Once SP3 payload has been reworked, - * we rework its header fields to the remaining payload. - * This keeps file header consistent and allows for example - * to generate a new SP3 that is consistent and correct. - */ -pub fn sp3_rework_mut(sp3: &mut SP3) { - let svs = sp3 - .sv_position() - .map(|(_, sv, _)| sv) - .unique() - .collect::>(); - sp3.sv.retain(|sv| svs.contains(sv)); - - let epochs = sp3 - .sv_position() - .map(|(t, _, _)| t) - .unique() - .collect::>(); - sp3.epoch.retain(|t| epochs.contains(t)); - - if sp3.data_type == SP3DataType::Velocity && sp3.sv_velocities().count() == 0 { - // dropped all Velocity information - sp3.data_type = SP3DataType::Position; - } -} - -pub fn preprocess(ctx: &mut RnxContext, cli: &Cli) { +pub fn preprocess(ctx: &mut QcContext, cli: &Cli) { // GNSS filters let mut gnss_filters = Vec::<&str>::new(); - + /* + * Special teqc like filters + * Design one filter per specs + */ if cli.gps_filter() { - gnss_filters.push("!=gps"); - trace!("applying -G filter.."); + gnss_filters.push("!=GPS"); + info!("GPS filtered out"); } if cli.glo_filter() { - gnss_filters.push("!=glo"); - trace!("applying -R filter.."); + gnss_filters.push("!=GLO"); + info!("Glonass filtered out"); } if cli.gal_filter() { - gnss_filters.push("!=gal"); - trace!("applying -E filter.."); + gnss_filters.push("!=Gal"); + info!("Galileo filtered out"); } if cli.bds_filter() { - gnss_filters.push("!=bds"); - trace!("applying -C filter.."); + gnss_filters.push("!=BDS"); + info!("BeiDou filtered out"); + } + if cli.bds_geo_filter() { + gnss_filters.push(">C05; Result<(), Error> { - let cfg = match matches.get_one::("cfg") { - Some(fp) => { - let content = read_to_string(fp) - .unwrap_or_else(|_| panic!("failed to read QC configuration: permission denied")); - let cfg = serde_json::from_str(&content) - .unwrap_or_else(|_| panic!("failed to parse QC configuration: invalid content")); - info!("using custom QC configuration: {:#?}", cfg); - cfg - }, - None => { - let cfg = QcOpts::default(); - info!("using default QC configuration: {:#?}", cfg); - cfg - }, - }; - - /* - * print more infos - */ - info!("Classification method : {:?}", cfg.classification); - info!("Reference position : {:?}", cfg.ground_position); - info!("Minimal SNR : {:?}", cfg.min_snr_db); - info!("Elevation mask : {:?}", cfg.elev_mask); - info!("Sampling gap tolerance: {:?}", cfg.gap_tolerance); - - let html = QcReport::html(&ctx.data, cfg); - let report_path = ctx.workspace.join("QC.html"); - - let mut fd = File::create(&report_path).map_err(|_| Error::QcReportCreationError)?; - - write!(fd, "{}", html).expect("failed to render HTML report"); - - info!("QC report \"{}\" has been generated", report_path.display()); - - if !ctx.quiet { - let fullpath = report_path.to_string_lossy().to_string(); - open_with_web_browser(&fullpath); - } - Ok(()) -} diff --git a/rinex-cli/src/report/mod.rs b/rinex-cli/src/report/mod.rs new file mode 100644 index 000000000..88fd70df3 --- /dev/null +++ b/rinex-cli/src/report/mod.rs @@ -0,0 +1,140 @@ +//! Analysis report +use log::{error, info, warn}; + +use std::{ + fs::{read_to_string, File}, + io::Write, + //io::Read, +}; + +use crate::cli::{Cli, Context}; + +use rinex_qc::prelude::{QcConfig, QcExtraPage, QcReport, Render}; + +/// Quality check report +pub enum Report { + /// New report generation/synthesis + Pending(QcReport), + /// Report iteration (preserved past run) + Iteration(String), +} + +impl Report { + /// Create a new report + pub fn new(cli: &Cli, ctx: &Context, cfg: QcConfig) -> Self { + let report_path = ctx.workspace.root.join("index.html"); + let hash_path = ctx.workspace.root.join(".hash"); + if !cli.force_report_synthesis() && report_path.exists() && hash_path.exists() { + // determine whether we can preserve previous report or not + if let Ok(content) = read_to_string(hash_path) { + if let Ok(prev_hash) = content.parse::() { + if prev_hash == cli.hash() { + if let Ok(content) = read_to_string(report_path) { + info!("preserving previous report"); + Self::Iteration(content) + } else { + info!("generating new report"); + Self::Pending(QcReport::new(&ctx.data, cfg)) + } + } else { + info!("generating new report"); + Self::Pending(QcReport::new(&ctx.data, cfg)) + } + } else { + error!("failed to parse hashed value"); + warn!("forcing new report synthesis"); + Self::Pending(QcReport::new(&ctx.data, cfg)) + } + } else { + // new report + info!("report synthesis"); + Self::Pending(QcReport::new(&ctx.data, cfg)) + } + } else { + // new report + info!("report synthesis"); + Self::Pending(QcReport::new(&ctx.data, cfg)) + } + } + /// Customize report with extra page + pub fn customize(&mut self, page: QcExtraPage) { + match self { + Self::Pending(report) => report.add_chapter(page), + Self::Iteration(ref mut content) => { + // Render new html content + let new_tab = page.tab.render().into_string(); + let new_content = page.content.render().into_string(); + if content.find(&new_tab).is_none() { + // tab creation + let pattern = "
  • {}
  • ", new_tab,)); + } + } + let pattern = format!( + "
    ", + page.html_id + ); + if let Some(start) = content.find(&pattern) { + // overwrite with new content + let end_pat = format!( + "
    ", + page.html_id + ); + if let Some(end) = content.find(&end_pat) { + content.replace_range( + start..=end + end_pat.len(), + &format!("{}{}{}", pattern, new_content, end_pat), + ); + } + } else { + // first run + for known_chapter in ["ppp", "cggtts"] { + let pattern = format!( + "
    ", + known_chapter + ); + let intro = format!( + "
    ", + page.html_id + ); + let conclusion = format!( + "
    ", + page.html_id + ); + if let Some(start) = content.rfind(&pattern) { + content.insert_str( + start + pattern.len(), + &format!("{}{}{}", intro, new_content, conclusion), + ); + break; + } + } + } + }, + } + } + /// Render as html + fn render(&self) -> String { + match self { + Self::Pending(report) => report.render().into_string(), + Self::Iteration(report) => report.to_string(), + } + } + /// Generate (dump) report + pub fn generate(&self, cli: &Cli, ctx: &Context) -> std::io::Result<()> { + let html = self.render(); + let path = ctx.workspace.root.join("index.html"); + + let mut fd = File::create(&path)?; + write!(fd, "{}", html)?; + info!("{} report generated", path.display()); + + // store past settings + if let Ok(mut fd) = File::create(ctx.workspace.root.join(".hash")) { + let _ = write!(fd, "{}", cli.hash()); + } + + Ok(()) + } +} diff --git a/rinex-qc/Cargo.toml b/rinex-qc/Cargo.toml index 061266e02..17d2c23da 100644 --- a/rinex-qc/Cargo.toml +++ b/rinex-qc/Cargo.toml @@ -3,10 +3,10 @@ name = "rinex-qc" version = "0.1.14" license = "MIT OR Apache-2.0" authors = ["Guillaume W. Bres "] -description = "RINEX data analysis" +description = "RINEX and more broadly, GNSS data processing" homepage = "https://github.com/georust/rinex" repository = "https://github.com/georust/rinex" -keywords = ["rinex", "timing", "gps", "glonass", "galileo"] +keywords = ["rinex", "timing", "gnss", "gps", "glonass", "galileo"] categories = ["science", "science::geo", "parsing"] edition = "2021" rust-version = "1.64" @@ -14,24 +14,42 @@ rust-version = "1.64" [features] default = [] # no features by default +# Unlock support of high precision SP3 files. +# When targetting highest precision analysis and solutions, like in PPP, +# SP3 files are mandatory. When deactivated, we simply cannot load +# such files into a context, only RINEX post processing is possible. +sp3 = ["dep:sp3"] + +# Unlock graphical analysis (plots rendering), otherwise reports are solely text based +# plot = [ +# "dep:plotly", +# ] + [package.metadata.docs.rs] all-features = true rustdoc-args = ["--cfg", "docrs", "--generate-link-to-definition"] [dependencies] +maud = "0.26" +thiserror = "1" strum = "0.26" -statrs = "0.16" -horrorshow = "0.8" itertools = "0.13.0" strum_macros = "0.26" +serde = { version = "1.0", optional = true, default-features = false, features = ["derive"] } + +statrs = { version = "0.16", optional = true } + +# plotly = { version = "0.9", optional = true } +# plotly = { path = "../../plotly-rs/plotly", optional = true } +plotly = { git = "https://github.com/gwbres/plotly", branch = "scattergeo"} + hifitime = "4.0.0-alpha" +gnss-rs = { version = "2.2.0", features = ["serde"] } -sp3 = { path = "../sp3", version = "=1.0.8", features = ["serde"] } -rinex-qc-traits = { path = "../qc-traits", version = "=0.1.1" } rinex = { path = "../rinex", version = "=0.16.1", features = ["full"] } -serde = { version = "1.0", optional = true, default-features = false, features = ["derive"] } +rinex-qc-traits = { path = "../qc-traits", version = "=0.1.1", features = ["processing"] } -gnss-rs = { version = "2.2.0", features = ["serde"] } +sp3 = { path = "../sp3", version = "=1.0.8", features = ["qc", "processing", "serde"], optional = true } [dev-dependencies] serde_json = "1" diff --git a/rinex-qc/README.md b/rinex-qc/README.md new file mode 100644 index 000000000..292beeddd --- /dev/null +++ b/rinex-qc/README.md @@ -0,0 +1,121 @@ +RINEX / GNSS QC +=============== + +The QC library was created to analyze complex GNSS datasets. +It currently accepts RINEX (all supported formats) and/or SP3 files, which are the +basic requirements to precise navigation. + +The Qc library generates a `QcReport` (also refered to as output product), from the input context. +The report content depends on the provided combination of input files (also refered +to as, input products). +QC standing for Quality Control, as it is a widely spread term in preprocessing +applications, the QC may apply to navigation applications, atmosphere analysis +and timing applications. + +The `QcReport` comprises one tab per input product (dedicated tab), +may have tabs depending on the operations that the input context allows. +For example SP3 and/or BRDC RINEX will enable the `Orbit Projection tab`. + +The report is render in HTML and that is currently the only format we can render. + +`QcReport` allows customization with extra chapters, so you can append +as many chapters as you need, depending on your requirements and capabilities, +as long as you can implement the rendition Trait. + +## Create features + +- activate the `sp3` feature to support SP3 format +- activate the `plot` feature for your reports to integrate graphs analysis +- activate the `flate2` feature to directly load Gzip compressed input products + +## RINEX analysis + +Parse one or more RINEX files and render an analysis. +When built with `flate2` support, gzip compressed files can be naturally loaded: + +```rust +use rinex_qc::prelude::*; + +// Build a setup +let mut ctx = QcContext::default(); +let cfg = QcConfig::default(); // basic + +let path = Path::new( + "../test_resources/NAV/V3/ESBC00DNK_R_20201770000_01D_MN.rnx.gz" +); +let rinex = Rinex::from_path(&path) + .unwrap(); +ctx.load_rinex(&path, rinex); + +// Generate a report +let report = QcReport::new(&ctx, cfg); +let _ = report.render().into_string(); +``` + +## SP3 analysis + +The QcReport works on any file combination and any supported input product. +The resulting report solely depends on the provided product combination. + +Once again, gzip compressed files are naturally supported when built with `flate2` feature: + +```rust +use rinex_qc::prelude::*; + +// Build a setup +let mut ctx = QcContext::default(); +let cfg = QcConfig::default(); // basic + +let path = Path::new("../test_resources/SP3/GRG0MGXFIN_20201770000_01D_15M_ORB.SP3.gz"); +let sp3 = SP3::from_path(&path) + .unwrap(); + +ctx.load_sp3(&path, sp3); + +// Generate a report +let report = QcReport::new(&ctx, cfg); +let _ = report.render().into_string(); +``` + +## SP3 / NAV RINEX + +When both SP3 and NAV RINEX files exist, we prefer SP3 for everything related +to Orbit states, because they provide highest accuracy. You can +force the consideration (along SP3) by using a custom `QcConfig`: + +```rust +use rinex_qc::prelude::*; + +// Build a setup +let mut ctx = QcContext::default(); +let cfg = QcConfig::default(); // basic +``` + +## PPP analysis + +PPP compliant contexts are made of RINEX files and SP3 files, for the same time frame. +The QcSummary report will let you know how compliant your input context is +and what may restrict performances: + +```rust +use rinex_qc::prelude::*; + +let mut ctx = QcContext::default(); +let cfg = QcConfig::default(); // basic setup +``` + +## Custom chapters + +Format your custom chapters as `QcExtraPage` so you can create your own report! + +```rust +use rinex_qc::prelude::*; + +let mut ctx = QcContext::default(); +let cfg = QcConfig::default(); // basic setup +``` + +## More info + +Refer to the RINEX Wiki pages hosted on Github and the tutorial scripts data base, shipped +with the RINEX library, for high level examples. diff --git a/rinex-qc/html/index.html b/rinex-qc/html/index.html new file mode 100644 index 000000000..4d87e7fdf --- /dev/null +++ b/rinex-qc/html/index.html @@ -0,0 +1,378 @@ + + + + + + + + + + + + + + + + +
    + RINEX QC +
    +
    +
    + +
    +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + +
    MOJN00DNK_R_20201770000_01D_30S_MO
    TimescaleGPST
    Reference position + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    ECEF (WGS84)
    X3628427.912 mY562059.094 mZ5197872.215 m
    GEO
    Latitude54.944315°Longitude8.805378°Altitude57.129 m
    DMS54°56'39.5344"NDMS8°48'19.3625"E
    +
    Compliancy + + + + + + + + + +
    NAVI + CPPPPPPPP + (Ultra)
    +
    Bias + + + + + + + + + + + + +
    Troposphere BiasModel + optimization
    Ionosphere BiasModel + optimizationCancelling +
    +
    +
    +
    +
    + +
    +
    +
    + + + + \ No newline at end of file diff --git a/rinex-qc/src/analysis/mod.rs b/rinex-qc/src/analysis/mod.rs deleted file mode 100644 index d3948e049..000000000 --- a/rinex-qc/src/analysis/mod.rs +++ /dev/null @@ -1,104 +0,0 @@ -use crate::QcOpts; -use horrorshow::{box_html, helper::doctype, html, RenderBox}; -use rinex::prelude::*; -use rinex_qc_traits::HtmlReport; //table_lengthy_td - -mod sv; - -mod obs; -use obs::QcObsAnalysis; - -mod sampling; - -use sampling::QcSamplingAnalysis; -use sv::QcSvAnalysis; - -#[derive(Debug, Clone)] -/// RINEX File Quality analysis report -pub struct QcAnalysis { - /// RINEX file sampling analysis - /// - dominant sample rate - /// - data gaps, etc.. - sampling: QcSamplingAnalysis, - /// [crate::Sv] specific analysis - /// - identifies, PRN# versus time - /// - Rise and Fall datetime, etc.. - sv: QcSvAnalysis, - /// [crate::observation::Record] specific analysis, - /// is truly complete when both "obs" and "processing" - /// features are enabled - observ: QcObsAnalysis, -} - -impl QcAnalysis { - /// Creates a new Analysis Report from given RINEX context. - /// primary : primary file - pub fn new(primary: &Rinex, _nav: &Option, opts: &QcOpts) -> Self { - Self { - sv: QcSvAnalysis::new(primary, opts), - sampling: QcSamplingAnalysis::new(primary, opts), - observ: QcObsAnalysis::new(primary, opts), - } - } -} - -impl HtmlReport for QcAnalysis { - fn to_html(&self) -> String { - format!( - "{}", - html! { - : doctype::HTML; - html { - head { - meta(content="text/html", charset="utf-8"); - meta(name="viewport", content="width=device-width, initial-scale=1"); - link(rel="stylesheet", href="https:////cdn.jsdelivr.net/npm/bulma@0.9.4/css/bulma.min.css"); - title { - : "RINEX QC analysis" - } - } - body { - : self.to_inline_html() - } - } - } - ) - } - fn to_inline_html(&self) -> Box { - box_html! { - div(id="analysis; style=\"display: flex; flex-direction: column; gap: 30px\"") { - div(id="sampling") { - table(class="table is-bordered; style=\"margin-bottom: 30px\"") { - thead { - th { - : "Sampling" - } - } - tbody { - : self.sampling.to_inline_html() - } - } - } - div(id="sv") { - table(class="table is-bordered; style=\"margin-bottom: 30px\"") { - thead { - th { - : "Sv" - } - } - tbody { - : self.sv.to_inline_html() - } - } - } - div(id="observations") { - table(class="table is-bordered; style=\"margin-bottom: 30px\"") { - tbody { - : self.observ.to_inline_html() - } - } - } - } - } - } -} diff --git a/rinex-qc/src/analysis/obs.rs b/rinex-qc/src/analysis/obs.rs deleted file mode 100644 index 63c62d768..000000000 --- a/rinex-qc/src/analysis/obs.rs +++ /dev/null @@ -1,634 +0,0 @@ -use horrorshow::{box_html, RenderBox}; -use itertools::Itertools; -use std::collections::HashMap; -use std::str::FromStr; - -extern crate gnss_rs as gnss; - -use gnss::prelude::SV; - -use crate::QcOpts; -//use rinex::carrier; -use rinex::carrier::Carrier; -use rinex::observation::SNR; -use rinex::prelude::{Epoch, EpochFlag, Observable, Rinex}; -use rinex::preprocessing::Derivative; - -use rinex_qc_traits::HtmlReport; -use statrs::statistics::Statistics; - -/* - * GNSS signal special formatting - */ -fn report_signals(list: &Vec) -> String { - let mut s = String::with_capacity(3 * list.len()); - for index in 0..list.len() - 1 { - s.push_str(&format!( - "{} ({:.3} MHz), ", - list[index], - list[index].frequency_mhz() - )); - } - s.push_str(&format!( - "{} ({:.3} MHz)", - list[list.len() - 1], - list[list.len() - 1].frequency_mhz() - )); - s -} - -/* - * Report RX Clock drift analysis - */ -fn report_clock_drift(data: &Vec<(Epoch, f64)>) -> Box { - box_html! { - @ if data.is_empty() { - table(class="table is-bordered") { - tr { - th { - : "Unfeasible" - } - td { - : "Missing Data" - } - } - } - } else { - table(class="table is-bordered") { - tr { - th { - : "Epoch" - } - th { - : "Mean Clock drift [s/s]" - } - } - //@ for (epoch, drift) in data { - // tr { - // td { - // : epoch.to_string() - // } - // td { - // : format!("{:e}", drift) - // } - // } - //} - } - } - } -} - -/* - * Epoch anomalies formatter - */ -fn report_anomalies<'a>( - cs: &'a Vec, - power: &'a Vec, - other: &'a Vec<(Epoch, EpochFlag)>, -) -> Box { - box_html! { - tr { - th { - : "Power Failures" - } - @ if power.is_empty() { - td { - : "None" - } - } else { - td { - : format!("{:?}", power) - } - tr { - th { - : "Longest" - } - td { - //: power.iter().max_by(|(_, d1), (_, d2)| d1.cmp(d2)).unwrap().to_string() - : "TODO" - } - td { - : "Average Duration" - } - td { - : "TODO" - } - } - } - } - tr { - th { - : "Cycle slips" - } - @ if cs.is_empty() { - td { - : "None" - } - } else { - td { - : format!("{:?}", cs) - } - } - } - tr { - th { - : "Other anomalies" - } - @ if other.is_empty() { - td { - : "None" - } - } else { - td { - : "Epoch" - } - td { - : "Event" - } - @ for (e, event) in other { - td { - : "" - } - td { - : format!("{}", e) - } - @ if *event == EpochFlag::AntennaBeingMoved { - td { - : "Antenna Being Moved" - } - } else if *event == EpochFlag::NewSiteOccupation { - td { - : "New Site Occupation" - } - } else if *event == EpochFlag::ExternalEvent { - td { - : "External Event" - } - } else { - td { - : "Other" - } - } - } - } - } - } -} - -/* - * Epoch Epoch completion, - * defined as at least 1 SV with PR + PH observed on both L1 and - * "rhs" signal, - * also SNR condition for both signals above current mask - */ -fn report_epoch_completion( - total: usize, - total_with_obs: usize, - complete: &HashMap<(SV, Carrier), usize>, -) -> Box { - box_html! { - table(class="table is-bordered") { - tr { - th { - : "Total#" - } - td { - : total.to_string() - } - } - tr { - th { - : "w/ observations" - } - td { - : format!("{} ({}%)", total_with_obs, total_with_obs * 100 / total) - } - } - tr { - td { - b { - : "Complete" - } - p { - : "Epochs with at least Phase + PR" - } - p { - : "in dual frequency, with" - } - p { - : "both SNR and elev above masks" - } - } - td { - @ for mut chunk in &complete.iter().chunks(8) { - p { - @ while let Some(((sv, carrier), count)) = chunk.next() { - p { - : format!("{:X} {}/L1 | {} ({}%)", sv, carrier, count, count * 100 / total) - } - } - } - } - } - } - } - } -} - -/* - * SNR analysis report - */ -fn report_snr_statistics( - snr_stats: &HashMap, -) -> Box { - box_html! { - tr { - td { - : "" - } - @ for (observable, _) in snr_stats { - @ if observable.is_phase_observable() || observable.is_pseudorange_observable() { - td { - : observable.to_string() - } - } - } - } - tr { - th { - : "Best" - } - @ for (observable, (_, max)) in snr_stats { - @ if observable.is_phase_observable() || observable.is_pseudorange_observable() { - td { - b { - : format!("{:e}", SNR::from_str(&format!("{}", max.1)).unwrap()) - } - p { - : format!("@{}", max.0) - } - } - } - } - } - tr { - th { - : "Worst" - } - @ for (observable, (min, _)) in snr_stats { - @ if observable.is_phase_observable() || observable.is_pseudorange_observable() { - td { - b { - : format!("{:e}", SNR::from_str(&format!("{}", min.1)).unwrap()) - } - p { - : format!("@{}", min.0) - } - } - } - } - } - } -} - -/* - * Reports statistical analysis results for SSx observations - */ -fn report_ssi_statistics(ssi_stats: &HashMap) -> Box { - box_html! { - table(class="table is-bordered") { - thead { - tr { - td { - : "" - } - @ for (signal, _) in ssi_stats { - th { - : signal.to_string() - } - } - } - } - tbody { - tr { - th { - : "Mean" - } - @ for (_, (mean, _)) in ssi_stats { - td { - : format!("{:.3} dB", mean) - } - } - } - tr { - th { - : "Deviation" // (σ)" - } - @ for (_, (_, std)) in ssi_stats { - td { - : format!("{:.3} dB", std) - } - } - } - } - } - } -} - -#[derive(Debug, Clone)] -/// OBS RINEX specific QC analysis. -/// Full OBS RINEX analysis requires both the "obs" and "processing" features. -pub struct QcObsAnalysis { - /// Identified Observables - observables: Vec, - /// Identified Signals - signals: Vec, - /// Codes that were idenfitied - codes: Vec, - /// true if doppler observations were identified - has_doppler: bool, - /// CS anomalies - cs_anomalies: Vec, - /// Epochs where power failures took place, and their duration - power_failures: Vec, - /// Other abnormal events, by chronological epochs - other_anomalies: Vec<(Epoch, EpochFlag)>, - /// Total number of epochs identified - total_epochs: usize, - /// Epochs with at least 1 observation - total_with_obs: usize, - /// Complete epoch counter with respect to given signal (other than L1) per SV - complete_epochs: HashMap<(SV, Carrier), usize>, - /// Min. & Max. SNR (signal @ epoch) - snr_stats: HashMap, - /// SSI statistical analysis (mean, stddev) - ssi_stats: HashMap, - /// RX clock drift - clock_drift: Vec<(Epoch, f64)>, -} - -impl QcObsAnalysis { - pub fn new(rnx: &Rinex, opts: &QcOpts) -> Self { - let doppler_obs = rnx.observable().filter(|obs| obs.is_doppler_observable()); - - let mut observables: Vec = rnx.observable().map(|obs| obs.to_string()).collect(); - - let mut signals: Vec<_> = rnx.carrier().unique().collect(); - let mut codes: Vec<_> = rnx.code().map(|c| c.to_string()).collect(); - - let cs_anomalies: Vec<_> = rnx - .epoch_anomalies() - .filter_map(|(e, flag)| { - if flag == EpochFlag::CycleSlip { - Some(e) - } else { - None - } - }) - .collect(); - - let power_failures: Vec<_> = rnx - .epoch_anomalies() - .filter_map(|(e, flag)| { - if flag == EpochFlag::PowerFailure { - Some(e) - } else { - None - } - }) - .collect(); - - let other_anomalies: Vec<_> = rnx - .epoch_anomalies() - .filter_map(|(e, flag)| { - if flag != EpochFlag::PowerFailure && flag != EpochFlag::CycleSlip { - Some((e, flag)) - } else { - None - } - }) - .collect(); - - let mut total_epochs = rnx.epoch().count(); - let mut complete_epochs: HashMap<(SV, Carrier), usize> = HashMap::new(); - for (_, complete) in rnx.complete_epoch(Some(SNR::from(opts.min_snr_db))) { - for (sv, carrier) in complete { - if let Some(counter) = complete_epochs.get_mut(&(sv, carrier)) { - *counter += 1; - } else { - complete_epochs.insert((sv, carrier), 1); - } - } - } - - let mut epoch_with_obs: Vec = Vec::new(); - if let Some(r) = rnx.record.as_obs() { - total_epochs = r.len(); - for ((epoch, _flag), (_clk, svs)) in r { - for (_sv, observables) in svs { - if !observables.is_empty() && !epoch_with_obs.contains(epoch) { - epoch_with_obs.push(*epoch); - } - } - } - } - // append ssi: drop vehicle differentiation - let mut ssi: HashMap> = HashMap::new(); - for (_, _, obs, value) in rnx.ssi() { - if let Some(values) = ssi.get_mut(obs) { - values.push(value); - } else { - ssi.insert(obs.clone(), vec![value]); - } - } - /* - * SSI statistical analysis: {mean, stddev,} - * per signal: we do not differentiate vehicles - */ - let ssi_stats: HashMap = ssi - .iter() - .map(|(obs, values)| (obs.clone(), (values.mean(), values.std_dev()))) - .collect(); - // append snr: drop vehicle differentiation - let mut snr: HashMap> = HashMap::new(); - for ((e, _), _, obs, snr_value) in rnx.snr() { - let snr_f64: f64 = (snr_value as u8).into(); - if let Some(values) = snr.get_mut(obs) { - values.push((e, snr_f64)); - } else { - snr.insert(obs.clone(), vec![(e, snr_f64)]); - } - } - /* - * SNR analysis: {min, max} - * per signal: we do not differentiate vehicles - */ - let mut snr_stats: HashMap = HashMap::new(); - for (obs, data) in snr { - let values: Vec = data.iter().map(|(_e, value)| *value).collect(); - let min = values.clone().min(); - let epoch_min = data.iter().find(|(_e, value)| *value == min).unwrap().0; - let max = values.clone().max(); - let epoch_max = data.iter().find(|(_e, value)| *value == max).unwrap().0; - snr_stats.insert(obs, ((epoch_min, min), (epoch_max, max))); - } - /* - * sort, prior reporting - */ - codes.sort(); - observables.sort(); - signals.sort(); - //complete_epochs.sort(); - - Self { - codes, - signals, - observables, - has_doppler: doppler_obs.count() > 0, - cs_anomalies, - power_failures, - other_anomalies, - total_epochs, - total_with_obs: epoch_with_obs.len(), - complete_epochs, - snr_stats, - ssi_stats, - clock_drift: { - let rx_clock: Vec<_> = rnx - .recvr_clock() - .map(|((e, _flag), value)| (e, value)) - .collect(); - let der = Derivative::new(1); - let rx_clock_drift: Vec<(Epoch, f64)> = der.eval(rx_clock); - //TODO - //let mov = Averager::mov(opts.clock_drift_window); - //mov.eval(rx_clock_drift) - rx_clock_drift - }, - } - } -} - -impl HtmlReport for QcObsAnalysis { - fn to_html(&self) -> String { - unreachable!("never used by itself") - } - fn to_inline_html(&self) -> Box { - box_html! { - tr { - th { - : "Signals" - } - td { - : report_signals(&self.signals) - } - } - tr { - th { - : "Codes" - } - td { - @ for mut chunks in &self.codes.iter().chunks(12) { - p { - @ while let Some(code) = chunks.next() { - p { - : format!("{}, ", code) - } - } - } - } - } - } - tr { - th { - : "Observables" - } - td { - @ for mut chunks in &self.observables.iter().chunks(12) { - p { - @ while let Some(observable) = chunks.next() { - p { - : format!("{}, ", observable) - } - } - } - } - } - } - tr { - th { - : "Has Doppler" - } - @ if self.has_doppler { - td { - : "True" - } - } else { - td { - : "False" - } - } - } - tr { - table(class="table is-bordered") { - thead { - th { - : "Anomalies" - } - } - tbody { - : report_anomalies(&self.cs_anomalies, &self.power_failures, &self.other_anomalies) - } - } - } - tr { - table(class="table is-bordered") { - thead { - th { - : "Epochs" - } - } - tbody { - : report_epoch_completion(self.total_epochs, self.total_with_obs, &self.complete_epochs) - } - } - } - tr { - table(class="table is-bordered") { - thead { - th { - : "SNR" - } - } - tbody { - : report_snr_statistics(&self.snr_stats) - } - } - } - tr { - table(class="table is-bordered") { - thead { - th { - : "SSI" - } - } - tbody { - : report_ssi_statistics(&self.ssi_stats) - } - } - } - tr { - table(class="table is-bordered") { - thead { - th { - : "(RX) Clock Drift" - } - } - tbody { - : report_clock_drift(&self.clock_drift) - } - } - } - } - } -} diff --git a/rinex-qc/src/analysis/sampling.rs b/rinex-qc/src/analysis/sampling.rs deleted file mode 100644 index d24bd0a6b..000000000 --- a/rinex-qc/src/analysis/sampling.rs +++ /dev/null @@ -1,140 +0,0 @@ -use hifitime::Unit; -use horrorshow::box_html; -use rinex::prelude::{Duration, Epoch, EpochFlag, Rinex}; - -use crate::QcOpts; - -#[derive(Debug, Clone)] -pub struct QcSamplingAnalysis { - /// First [`Epoch`] identified in time - pub first_epoch: Option, - /// Last [`Epoch`] identified in time - pub last_epoch: Option, - /// Time span of this RINEX context - pub duration: Option, - /// File [`Header`] sample rate - pub sample_rate: Option, - /// Dominant sample rate - pub dominant_sample_rate: Option, - /// Unusual data gaps - pub gaps: Vec<(Epoch, Duration)>, - /// Epoch anomalies such as - /// possible receiver loss of lock, bad conditions.. - pub anomalies: Vec<(Epoch, EpochFlag)>, -} - -impl QcSamplingAnalysis { - pub fn new(rnx: &Rinex, opts: &QcOpts) -> Self { - Self { - first_epoch: rnx.first_epoch(), - last_epoch: rnx.last_epoch(), - duration: rnx.duration(), - sample_rate: rnx.sample_rate(), - dominant_sample_rate: rnx.dominant_sample_rate(), - gaps: rnx.data_gaps(opts.gap_tolerance).collect(), - anomalies: rnx.epoch_anomalies().collect(), - } - } -} - -use horrorshow::RenderBox; -use rinex_qc_traits::HtmlReport; - -impl HtmlReport for QcSamplingAnalysis { - fn to_html(&self) -> String { - todo!() - } - fn to_inline_html(&self) -> Box { - box_html! { - tr { - th { - : "Start" - } - th { - : "End" - } - th { - : "Span" - } - } - tr { - @ if let Some(epoch) = self.first_epoch { - td { - : epoch.to_string() - } - } else { - td { - : "Unknown" - } - } - @ if let Some(epoch) = self.last_epoch { - td { - : epoch.to_string() - } - } else { - td { - : "Unknown" - } - } - @ if let Some(duration) = self.duration { - td { - : duration.to_string() - } - } else { - td { - : "Unknown" - } - } - } - tr { - th { - : "Sample rate (Header)" - } - @ if let Some(rate) = self.sample_rate { - td { - : format!("{} ({:.3} Hz)", rate, 1.0 / rate.to_unit(Unit::Second)) - } - } else { - th { - : "Unspecified" - } - } - } - tr { - th { - : "Dominant Sample rate" - } - @ if let Some(rate) = self.dominant_sample_rate { - td { - : format!("{} ({:.3} Hz)", rate, 1.0 / rate.to_unit(Unit::Second)) - } - } else { - th { - : "Undetermined" - } - } - } - tr { - th { - : "Gap analysis" - } - - @ if self.gaps.is_empty() { - th { - : "No gaps detected" - } - } else { - tr { - td { - @ for (epoch, dt) in &self.gaps { - p { - : format!("Start : {}, Duration: {}", epoch, dt) - } - } - } - } - } - } - } - } -} diff --git a/rinex-qc/src/analysis/sv.rs b/rinex-qc/src/analysis/sv.rs deleted file mode 100644 index 84fffad49..000000000 --- a/rinex-qc/src/analysis/sv.rs +++ /dev/null @@ -1,45 +0,0 @@ -use super::QcOpts; -use rinex::prelude::{Rinex, SV}; - -use horrorshow::{box_html, RenderBox}; -use rinex_qc_traits::HtmlReport; - -#[derive(Debug, Clone)] -pub struct QcSvAnalysis { - pub sv: Vec, -} - -use itertools::Itertools; - -impl QcSvAnalysis { - pub fn new(primary: &Rinex, _opts: &QcOpts) -> Self { - let sv: Vec<_> = primary.sv().collect(); - Self { sv } - } -} - -impl HtmlReport for QcSvAnalysis { - fn to_html(&self) -> String { - panic!("sv analysis cannot be rendered on its own") - } - fn to_inline_html(&self) -> Box { - box_html! { - tr { - th { - : "PRN#" - } - td { - p { - @ for mut chunks in &self.sv.iter().chunks(12) { - p { - @ while let Some(sv) = chunks.next() { - : format!("{:x}, ", sv) - } - } - } - } - } - } - } - } -} diff --git a/rinex-qc/src/cfg.rs b/rinex-qc/src/cfg.rs new file mode 100644 index 000000000..5f5d7908c --- /dev/null +++ b/rinex-qc/src/cfg.rs @@ -0,0 +1,96 @@ +use maud::{html, Markup, Render}; +use rinex::prelude::*; +use thiserror::Error; + +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; + +/// Configuration Error +#[derive(Debug, Clone, Error)] +pub enum Error { + #[error("invalid report type")] + InvalidReportType, +} + +use std::fmt::Display; +use std::str::FromStr; + +/// [QcReportType] +#[derive(Default, Debug, Clone, PartialEq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub enum QcReportType { + /// In [Summary] mode, only the summary section + /// of the report is to be generated. It is the lightest + /// form we can generate. + Summary, + /// In [Full] mode, we generate the [CombinedReport] as well, + /// which results from the consideration of all input [ProductType]s + /// at the same time. + #[default] + Full, +} + +impl FromStr for QcReportType { + type Err = Error; + fn from_str(s: &str) -> Result { + match s.trim().to_lowercase().as_str() { + "sum" | "summ" | "summary" => Ok(Self::Summary), + "full" => Ok(Self::Full), + _ => Err(Error::InvalidReportType), + } + } +} + +impl Display for QcReportType { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + Self::Full => f.write_str("Full"), + Self::Summary => f.write_str("Summary"), + } + } +} + +#[derive(Debug, Clone, Default)] +#[cfg_attr(feature = "serde", derive(Deserialize))] +pub struct QcConfig { + #[cfg_attr(feature = "serde", serde(default))] + pub report: QcReportType, + #[cfg_attr(feature = "serde", serde(default))] + pub manual_reference: Option, + #[cfg_attr(feature = "serde", serde(default))] + /// When both SP3 and BRDC NAV are present, + /// SP3 is prefered for skyplot project: set true here to + /// also compute for BRDC NAV. + pub force_brdc_skyplot: bool, +} + +impl QcConfig { + pub fn set_report_type(&mut self, report_type: QcReportType) { + self.report = report_type; + } + pub fn set_reference_position(&mut self, pos: GroundPosition) { + self.manual_reference = Some(pos.clone()); + } +} + +impl Render for QcConfig { + fn render(&self) -> Markup { + html! { + tr { + td { + "Report" + } + td { + (self.report.to_string()) + } + } + @if let Some(position) = self.manual_reference { + tr { + td { + (position.render()) + } + } + } + } + } +} diff --git a/rinex/src/context.rs b/rinex-qc/src/context.rs similarity index 66% rename from rinex/src/context.rs rename to rinex-qc/src/context.rs index ed0f90cd6..5e01f86cf 100644 --- a/rinex/src/context.rs +++ b/rinex-qc/src/context.rs @@ -1,27 +1,31 @@ -//! RINEX post processing context +//! GNSS processing context definition. use thiserror::Error; use std::collections::HashMap; use std::ffi::OsStr; use std::path::{Path, PathBuf}; -use crate::{ +use rinex::{ + carrier::Carrier, merge::{Error as RinexMergeError, Merge as RinexMerge}, - prelude::{GroundPosition, Rinex}, + prelude::{Epoch, GroundPosition, Observable, Rinex, TimeScale, SV}, types::Type as RinexType, Error as RinexError, }; -use sp3::{prelude::SP3, Merge as SP3Merge, MergeError as SP3MergeError}; +#[cfg(feature = "sp3")] +use sp3::prelude::SP3; -#[cfg(feature = "qc")] -use horrorshow::{box_html, helper::doctype, html, RenderBox}; - -#[cfg(feature = "qc")] -use rinex_qc_traits::HtmlReport; +use qc_traits::{ + processing::{Filter, Preprocessing}, + Merge, MergeError, +}; +/// Context Error #[derive(Debug, Error)] pub enum Error { + #[error("failed to extend gnss context")] + ContextExtensionError(#[from] MergeError), #[error("non supported file format")] NonSupportedFileFormat, #[error("failed to determine filename")] @@ -30,11 +34,9 @@ pub enum Error { RinexError(#[from] RinexError), #[error("failed to extend rinex context")] RinexMergeError(#[from] RinexMergeError), - #[error("failed to extend sp3 context")] - SP3MergeError(#[from] SP3MergeError), } -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum ProductType { /// GNSS carrier signal observation in the form /// of Observation RINEX data. @@ -46,27 +48,30 @@ pub enum ProductType { /// Broadcast Navigation message as contained in /// Navigation RINEX files. BroadcastNavigation, - /// High precision clock data wrapped in Clock RINEX files. - HighPrecisionOrbit, /// High precision orbital attitudes wrapped in Clock RINEX files. HighPrecisionClock, /// Antenna calibration information wrapped in ANTEX special RINEX files. ANTEX, /// Precise Ionosphere state wrapped in IONEX special RINEX files. IONEX, + #[cfg(feature = "sp3")] + #[cfg_attr(docrs, doc(cfg(feature = "sp3")))] + /// High precision clock data wrapped in SP3 files. + HighPrecisionOrbit, } impl std::fmt::Display for ProductType { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { - Self::Observation => write!(f, "Observation"), - Self::MeteoObservation => write!(f, "Meteo"), - Self::BroadcastNavigation => write!(f, "Broadcast Navigation"), - Self::HighPrecisionOrbit => write!(f, "High Precision Orbit (SP3)"), - Self::HighPrecisionClock => write!(f, "High Precision Clock"), Self::ANTEX => write!(f, "ANTEX"), Self::IONEX => write!(f, "IONEX"), Self::DORIS => write!(f, "DORIS RINEX"), + Self::Observation => write!(f, "Observation"), + Self::MeteoObservation => write!(f, "Meteo"), + Self::HighPrecisionClock => write!(f, "High Precision Clock"), + Self::BroadcastNavigation => write!(f, "Broadcast Navigation (BRDC)"), + #[cfg(feature = "sp3")] + Self::HighPrecisionOrbit => write!(f, "High Precision Orbit (SP3)"), } } } @@ -86,10 +91,11 @@ impl From for ProductType { } enum BlobData { - /// SP3 content - Sp3(SP3), /// RINEX content Rinex(Rinex), + #[cfg(feature = "sp3")] + /// SP3 content + Sp3(SP3), } impl BlobData { @@ -108,6 +114,8 @@ impl BlobData { } } /// Returns reference to inner SP3 data. + #[cfg(feature = "sp3")] + #[cfg_attr(docrs, doc(cfg(feature = "sp3")))] pub fn as_sp3(&self) -> Option<&SP3> { match self { Self::Sp3(s) => Some(s), @@ -115,6 +123,8 @@ impl BlobData { } } /// Returns mutable reference to inner SP3 data. + #[cfg(feature = "sp3")] + #[cfg_attr(docrs, doc(cfg(feature = "sp3")))] pub fn as_mut_sp3(&mut self) -> Option<&mut SP3> { match self { Self::Sp3(s) => Some(s), @@ -123,19 +133,43 @@ impl BlobData { } } -/// RnxContext is a structure dedicated to RINEX post processing workflows, -/// like precise timing, positioning or atmosphere analysis. +/// [QcContext] is a general structure capable to store most common +/// GNSS data. It is dedicated to post processing workflows, +/// precise timing or atmosphere analysis. #[derive(Default)] -pub struct RnxContext { +pub struct QcContext { /// Files merged into self files: HashMap>, /// Context blob created by merging each members of each category blob: HashMap, } -impl RnxContext { - /// Returns path to File considered as Primary in this Context. - /// Observation then Navigation files are prefered as Primary files. +impl QcContext { + /// Returns main [TimeScale] for Self + pub fn timescale(&self) -> Option { + #[cfg(feature = "sp3")] + if let Some(sp3) = self.sp3() { + return Some(sp3.time_scale); + } + + if let Some(obs) = self.observation() { + let first = obs.first_epoch()?; + Some(first.time_scale) + } else if let Some(dor) = self.doris() { + let first = dor.first_epoch()?; + Some(first.time_scale) + } else if let Some(clk) = self.clock() { + let first = clk.first_epoch()?; + Some(first.time_scale) + } else if self.meteo().is_some() { + Some(TimeScale::UTC) + } else if self.ionex().is_some() { + Some(TimeScale::UTC) + } else { + None + } + } + /// Returns path to File considered as Primary product in this Context. /// When a unique file had been loaded, it is obviously considered Primary. pub fn primary_path(&self) -> Option<&PathBuf> { /* @@ -147,10 +181,11 @@ impl RnxContext { ProductType::DORIS, ProductType::BroadcastNavigation, ProductType::MeteoObservation, - ProductType::HighPrecisionClock, - ProductType::HighPrecisionOrbit, ProductType::IONEX, ProductType::ANTEX, + ProductType::HighPrecisionClock, + #[cfg(feature = "sp3")] + ProductType::HighPrecisionOrbit, ] { if let Some(paths) = self.files(product) { /* @@ -239,6 +274,8 @@ impl RnxContext { self.data_mut(product)?.as_mut_rinex() } /// Returns reference to inner SP3 data + #[cfg(feature = "sp3")] + #[cfg_attr(docrs, doc(cfg(feature = "sp3")))] pub fn sp3(&self) -> Option<&SP3> { self.data(ProductType::HighPrecisionOrbit)?.as_sp3() } @@ -293,6 +330,8 @@ impl RnxContext { .as_mut_rinex() } /// Returns mutable reference to inner [ProductType::HighPrecisionOrbit] data + #[cfg(feature = "sp3")] + #[cfg_attr(docrs, doc(cfg(feature = "sp3")))] pub fn sp3_mut(&mut self) -> Option<&mut SP3> { self.data_mut(ProductType::HighPrecisionOrbit)?.as_mut_sp3() } @@ -312,6 +351,8 @@ impl RnxContext { pub fn has_brdc_navigation(&self) -> bool { self.brdc_navigation().is_some() } + #[cfg(feature = "sp3")] + #[cfg_attr(docrs, doc(cfg(feature = "sp3")))] /// Returns true if [ProductType::HighPrecisionOrbit] are present in Self pub fn has_sp3(&self) -> bool { self.sp3().is_some() @@ -324,6 +365,8 @@ impl RnxContext { pub fn has_meteo(&self) -> bool { self.meteo().is_some() } + #[cfg(feature = "sp3")] + #[cfg_attr(docrs, doc(cfg(feature = "sp3")))] /// Returns true if High Precision Orbits also contains temporal information. pub fn sp3_has_clock(&self) -> bool { if let Some(sp3) = self.sp3() { @@ -363,6 +406,7 @@ impl RnxContext { /// Load a single SP3 file into Self. /// File revision must be supported and must be correctly formatted /// for this operation to be effective. + #[cfg(feature = "sp3")] pub fn load_sp3(&mut self, path: &Path, sp3: SP3) -> Result<(), Error> { let prod_type = ProductType::HighPrecisionOrbit; // extend context blob @@ -388,9 +432,115 @@ impl RnxContext { } Ok(()) } + /// True if Self is compatible with navigation + pub fn nav_compatible(&self) -> bool { + self.observation().is_some() && self.brdc_navigation().is_some() + } + /// True if Self is compatible with CPP positioning, + /// see + pub fn cpp_compatible(&self) -> bool { + if let Some(obs) = self.observation() { + let mut prev_t = Option::::None; + let mut prev_obs = HashMap::>::new(); + for ((t, _), sv, pr, _) in obs.pseudo_range() { + if let Some(prev_t) = prev_t { + if prev_t != t { + prev_obs.clear(); + } + } + if let Some(prev_obs) = prev_obs.get(&sv) { + if let Ok(first) = Carrier::from_observable(sv.constellation, pr) { + for ob in prev_obs { + if let Ok(second) = Carrier::from_observable(sv.constellation, ob) { + if second != first { + return true; + } + } + } + } + } else { + prev_obs.insert(sv, vec![pr.clone()]); + } + prev_t = Some(t); + } + } + false + } + /// True if self is compatible with PPP positioning + #[cfg(not(feature = "sp3"))] + pub fn ppp_compatible(&self) -> bool { + false + } + /// True if Self is compatible with CPP positioning, + /// see + #[cfg(feature = "sp3")] + pub fn ppp_compatible(&self) -> bool { + let has_dual_phase = if let Some(obs) = self.observation() { + let mut compatible = false; + let mut prev_t = Option::::None; + let mut prev_obs = HashMap::>::new(); + for ((t, _), sv, pr, _) in obs.carrier_phase() { + if compatible { + break; + } + if let Some(prev_t) = prev_t { + if prev_t != t { + prev_obs.clear(); + } + } + if let Some(prev_obs) = prev_obs.get(&sv) { + if let Ok(first) = Carrier::from_observable(sv.constellation, pr) { + for ob in prev_obs { + if let Ok(second) = Carrier::from_observable(sv.constellation, ob) { + if second != first { + compatible |= true; + } + } + } + } + } else { + prev_obs.insert(sv, vec![pr.clone()]); + } + prev_t = Some(t); + } + compatible + } else { + false + }; + self.clock().is_some() && self.sp3_has_clock() && self.cpp_compatible() && has_dual_phase + } + /// SP3 is require to 100% PPP compatibility + #[cfg(not(feature = "sp3"))] + pub fn ppp_ultra_compatible(&self) -> bool { + false + } + #[cfg(feature = "sp3")] + pub fn ppp_ultra_compatible(&self) -> bool { + let same_timescale = if let Some(first_clk) = self.clock().and_then(|rnx| rnx.first_epoch()) + { + if let Some(first_obs) = self.observation().and_then(|rnx| rnx.first_epoch()) { + first_clk.time_scale == first_obs.time_scale + } else { + false + } + } else { + false + }; + self.ppp_compatible() && same_timescale + } + /// Returns true if provided Input products allow Ionosphere bias + /// model optimization + pub fn iono_bias_model_optimization(&self) -> bool { + self.ionex().is_some() // TODO: BRDC V3 or V4 + } + /// Returns true if provided Input products allow Troposphere bias + /// model optimization + pub fn tropo_bias_model_optimization(&self) -> bool { + self.has_meteo() + } /// Returns possible Reference position defined in this context. /// Usually the Receiver location in the laboratory. - pub fn ground_position(&self) -> Option { + pub fn reference_position(&self) -> Option { if let Some(data) = self.observation() { if let Some(pos) = data.header.ground_position { return Some(pos); @@ -403,76 +553,35 @@ impl RnxContext { } None } -} - -#[cfg(feature = "qc")] -impl HtmlReport for RnxContext { - fn to_html(&self) -> String { - format!( - "{}", - html! { - : doctype::HTML; - html { - head { - meta(charset="UTF-8"); - meta(name="viewport", content="width=device-width, initial-scale=1"); - link(rel="stylesheet", href="https:////cdn.jsdelivr.net/npm/bulma@0.9.4/css/bulma.min.css"); - script(defer="true", src="https://use.fontawesome.com/releases/v5.3.1/js/all.js"); - title: self.name(); - } - body { - : self.to_inline_html() - } - } - } - ) - } - fn to_inline_html(&self) -> Box { - box_html! { - tr { - th { - : "File" - } - th { - : "Name" - } - } - @ for product in [ - ProductType::Observation, - ProductType::BroadcastNavigation, - ProductType::MeteoObservation, - ProductType::HighPrecisionOrbit, - ProductType::HighPrecisionClock, - ProductType::IONEX, - ProductType::ANTEX, - ] { - tr { - td { - : product.to_string() - } - td { - @ if let Some(paths) = self.files(product) { - @ if paths.is_empty() { - : "None" - } else { - @ for path in paths { - br { - : path.file_name() - .unwrap() - .to_string_lossy() - .to_string() - } - } - } - } - } - } - } + /// Apply preprocessing filter algorithm to mutable [Self]. + /// Filter will apply to all data contained in the context. + pub fn filter_mut(&mut self, filter: &Filter) { + if let Some(data) = self.observation_mut() { + data.filter_mut(filter); + } + if let Some(data) = self.brdc_navigation_mut() { + data.filter_mut(filter); + } + if let Some(data) = self.doris_mut() { + data.filter_mut(filter); + } + if let Some(data) = self.meteo_mut() { + data.filter_mut(filter); + } + if let Some(data) = self.clock_mut() { + data.filter_mut(filter); + } + if let Some(data) = self.ionex_mut() { + data.filter_mut(filter); + } + #[cfg(feature = "sp3")] + if let Some(data) = self.sp3_mut() { + data.filter_mut(filter); } } } -impl std::fmt::Debug for RnxContext { +impl std::fmt::Debug for QcContext { /// Debug formatting, prints all loaded files per Product category. fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Primary: \"{}\"", self.name())?; @@ -480,10 +589,11 @@ impl std::fmt::Debug for RnxContext { ProductType::Observation, ProductType::BroadcastNavigation, ProductType::MeteoObservation, - ProductType::HighPrecisionOrbit, ProductType::HighPrecisionClock, ProductType::IONEX, ProductType::ANTEX, + #[cfg(feature = "sp3")] + ProductType::HighPrecisionOrbit, ] { if let Some(files) = self.files(product) { write!(f, "\n{}: ", product)?; diff --git a/rinex-qc/src/lib.rs b/rinex-qc/src/lib.rs index 0318428c9..b0d3ef8b4 100644 --- a/rinex-qc/src/lib.rs +++ b/rinex-qc/src/lib.rs @@ -1,238 +1,29 @@ -//! RINEX Quality analysis library -//use strum_macros::EnumString; -use horrorshow::helper::doctype; -use horrorshow::html; // RenderBox}; -use rinex_qc_traits::HtmlReport; - -mod opts; -pub use opts::{QcClassification, QcOpts}; - -mod analysis; -use analysis::QcAnalysis; - -use rinex::prelude::{ProductType, RnxContext}; - -/* - * Methods used when reporting lenghty vectors or data subsets in a table. - * Makes tables cleaner and nicer by wrapping string content, into several paragraphs. - * -pub(crate) fn table_lengthy_td( - list: &Vec, - max_items: usize, -) -> Box { - let mut content = String::with_capacity(64 * max_items); - let mut paragraphs: Vec = Vec::new(); - - for i in 0..list.len() { - content.push_str(&format!("{}, ", list[i])); - if i.rem_euclid(max_items) == 0 { - paragraphs.push(content.clone()); - content.clear(); - } else if i == list.len() - 1 { - paragraphs.push(content.clone()); - } - } - box_html! { - @ for paragraph in paragraphs { - p { - : paragraph.to_string() - } - } - } -} -*/ - -use rinex::preprocessing::{MaskFilter, MaskOperand, Preprocessing, TargetItem}; - -pub struct QcReport {} - -impl QcReport { - fn build_analysis(ctx: &RnxContext, opts: &QcOpts) -> Vec { - /* - * QC analysis not feasible when Observations not provided - */ - if !ctx.has_observation() { - return Vec::new(); - } - - let observation = ctx.observation().unwrap(); - - // build analysis to perform - let mut analysis: Vec = Vec::new(); - /* - * QC Classification: - * the end user has the ability to sort the generated report per physics, - * signals, or any other usual data subsets. - * To support that, we use the preprocessing toolkit, if available, - * first convert the classification method to a compatible object, - * so we can apply a mask filter - */ - let mut filter_targets: Vec = Vec::new(); - - match opts.classification { - QcClassification::GNSS => { - for gnss in observation.constellation() { - filter_targets.push(TargetItem::from(gnss)); - } - }, - QcClassification::SV => { - for sv in observation.sv() { - filter_targets.push(TargetItem::from(sv)); - } - }, - QcClassification::Physics => { - let mut observables = observation.observable().cloned().collect::>(); - observables.sort(); // improves report rendition - for obsv in observables { - filter_targets.push(TargetItem::from(obsv)); - } - }, - } - // apply mask filters and generate an analysis on resulting data set - for target in filter_targets { - let mask = MaskFilter { - item: target, - operand: MaskOperand::Equals, - }; - - let subset = observation.filter(mask.clone().into()); - - // Perform analysis on all grouped subsets. - // Improve this: - // QcAnalysis::new() should construct from Context directly - // and we should have grouped smaller contexts here - if let Some(brdc) = ctx.brdc_navigation() { - let brdc = brdc.filter(mask.clone().into()); - - // perform analysis on these subsets - analysis.push(QcAnalysis::new(&subset, &Some(brdc), opts)); - } else { - // perform analysis on these subsets - analysis.push(QcAnalysis::new(&subset, &None, opts)); - } - } - analysis - } - /// Generates a Quality Check Report from provided Context and parametrization, - /// in html format. - pub fn html(context: &RnxContext, opts: QcOpts) -> String { - let analysis = Self::build_analysis(context, &opts); - format!( - "{}", - html! { - : doctype::HTML; - html { - head { - meta(charset="UTF-8"); - meta(name="viewport", content="width=device-width, initial-scale=1"); - link(rel="stylesheet", href="https:////cdn.jsdelivr.net/npm/bulma@0.9.4/css/bulma.min.css"); - script(defer="true", src="https://use.fontawesome.com/releases/v5.3.1/js/all.js"); - title: context.name(); - } - body { - div(id="version") { - h2(class="title") { - : "RINEX Quality Check summary" - } - table(class="table is-bordered; style=\"margin-bottom: 20px\"") { - tbody { - tr { - th { - : "Version" - } - td { - : format!("rinex-qc: v{}", env!("CARGO_PKG_VERSION")) - } - } - } - } - }//div=header - div(id="context") { - table(class="table is-bordered; style=\"margin-bottom: 20px\"") { - thead { - th { - : "Context" - } - } - tbody { - : context.to_inline_html() - } - } - }//div=context - div(id="parameters") { - table(class="table is-bordered; style=\"margin-bottom: 20px\"") { - thead { - th { - : "Parameters" - } - } - tbody { - : opts.to_inline_html() - } - } - } //div=parameters - div(id="header") { - table(class="table is-bordered; style=\"margin-bottom: 20px\"") { - thead { - th { - : "File Header" - } - } - @ if let Some(data) = context.rinex(ProductType::Observation) { - tbody { - : data.header.to_inline_html() - } - } else { - tbody { - : "Undefined" - } - } - } - } - /* - * Report all analysis that were performed - */ - div(id="analysis") { - /* - * Report all analysis - * and emphasize how they were sorted (self.opts.classfication) - */ - @ for i in 0..analysis.len() { - table(class="table is-bordered; style=\"margin-bottom: 20px\"") { - thead { - @ if opts.classification == QcClassification::GNSS { - th { - : format!("{:X} analysis", context - .observation() - .unwrap() // infaillible: QC needs observation RINEX - .constellation().nth(i).unwrap()) - } - } else if opts.classification == QcClassification::SV { - th { - : format!("{:X} analysis", context - .observation() - .unwrap() // infaillible: QC needs observation RINEX - .sv().nth(i).unwrap()) - } - - } else if opts.classification == QcClassification::Physics { - th { - : format!("{} analysis", context - .observation() - .unwrap() // infaillible: QC needs observation RINEX - .observable().nth(i).unwrap()) - } - } - } - tbody { - : analysis[i].to_inline_html() - } - } - } - }//div=analysis - } - } - } - ) - } +#![doc(html_logo_url = "https://raw.githubusercontent.com/georust/meta/master/logo/logo.png")] +#![doc = include_str!("../README.md")] +#![cfg_attr(docrs, feature(doc_cfg))] + +extern crate gnss_rs as gnss; +extern crate rinex_qc_traits as qc_traits; + +mod cfg; + +pub mod plot; + +mod context; +mod report; + +pub mod prelude { + pub use crate::{ + cfg::{QcConfig, QcReportType}, + context::{ProductType, QcContext}, + report::{QcExtraPage, QcReport}, + }; + // Pub re-export + pub use crate::plot::{Marker, MarkerSymbol, Mode, Plot}; + pub use maud::{html, Markup, Render}; + pub use qc_traits::processing::{Filter, Preprocessing}; + pub use rinex::prelude::{Error as RinexError, Rinex}; + #[cfg(feature = "sp3")] + pub use sp3::prelude::{Error as SP3Error, SP3}; + pub use std::path::Path; } diff --git a/rinex-qc/src/opts.rs b/rinex-qc/src/opts.rs deleted file mode 100644 index 33f2ea44a..000000000 --- a/rinex-qc/src/opts.rs +++ /dev/null @@ -1,254 +0,0 @@ -use rinex::prelude::*; -use rinex::{geodetic, wgs84}; -use rinex_qc_traits::HtmlReport; - -use horrorshow::{box_html, RenderBox}; - -// #[cfg(feature = "serde")] -// use std::str::FromStr; - -#[cfg(feature = "serde")] -use serde::{ - //de::Error, - //Serializer, - Deserialize, - //Deserializer, - Serialize, -}; - -#[derive(Default, Debug, Clone, PartialEq)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub enum CsStrategy { - /// Study CS events and report them - #[default] - Study, - /// Study CS events and repair them - StudyAndRepair, -} - -#[derive(Debug, Clone, PartialEq)] -#[cfg_attr(feature = "serde", derive(Deserialize))] -pub struct ProcessingOpts { - /// Cs analysis/reporting strategy - pub cs: CsStrategy, - /// Ionospheric variation tolerance - pub iono_rate_tolerance: f64, - pub iono_rate_tolerance_dt: Duration, - /// Clock Drift Moving average window slot - pub clock_drift_window: Duration, - /// Increment of the elelavtion mask - pub elev_mask_increment: f64, -} - -impl Default for ProcessingOpts { - fn default() -> Self { - Self { - cs: CsStrategy::default(), - iono_rate_tolerance: 400.0E-2_f64, - iono_rate_tolerance_dt: Duration::from_seconds(60.0_f64), - clock_drift_window: Duration::from_seconds(600.0_f64), - elev_mask_increment: 10.0_f64, - } - } -} - -/// Qc Report classification method -#[derive(Default, Debug, Clone, PartialEq)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -/// Classify the QC report by desired data set -pub enum QcClassification { - /// Report per GNSS system - #[default] - GNSS, - /// Report per SV - SV, - /// Report per Physics (Observable, Orbit..) - Physics, -} - -impl std::fmt::Display for QcClassification { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match self { - QcClassification::GNSS => f.write_str("GNSS Constellations"), - QcClassification::SV => f.write_str("Satellite Vehicles"), - QcClassification::Physics => f.write_str("Physics"), - } - } -} - -#[derive(Debug, Clone)] -#[cfg_attr(feature = "serde", derive(Deserialize))] -pub struct QcOpts { - #[cfg_attr(feature = "serde", serde(default))] - #[cfg_attr(docs, doc(cfg(feature = "processing")))] - pub classification: QcClassification, - /// Minimum SNR level to consider in our analysis. - /// For example, this is used when determining whether - /// an epoch is "complete" or not. - #[cfg_attr(feature = "serde", serde(default))] - pub min_snr_db: f64, - /// Elevation mask - pub elev_mask: Option, - /// Min. duration tolerated, so it is not reported as a data gap. - /// If None: dominant sample rate is prefered. - pub gap_tolerance: Option, - /// Manually defined Ground position (ECEF) - pub ground_position: Option, - /// Window duration to be used, during RX clock drift analysis - #[cfg_attr(feature = "serde", serde(default = "default_drift_window"))] - pub clock_drift_window: Duration, -} - -impl QcOpts { - pub fn with_classification(&self, classification: QcClassification) -> Self { - let mut s = self.clone(); - s.classification = classification; - s - } - - pub fn with_min_snr(&self, snr_db: f64) -> Self { - let mut s = self.clone(); - s.min_snr_db = snr_db; - s - } - - pub fn with_ground_position_ecef(&self, pos: (f64, f64, f64)) -> Self { - let mut s = self.clone(); - s.ground_position = Some(wgs84!(pos.0, pos.1, pos.2)); - s - } - - pub fn with_ground_position_geo(&self, pos: (f64, f64, f64)) -> Self { - let mut s = self.clone(); - s.ground_position = Some(geodetic!(pos.0, pos.1, pos.2)); - s - } -} - -fn default_drift_window() -> Duration { - Duration::from_seconds(3600.0) -} - -impl Default for QcOpts { - fn default() -> Self { - Self { - gap_tolerance: None, - ground_position: None, - min_snr_db: 20.0, // dB - elev_mask: None, - classification: QcClassification::default(), - clock_drift_window: default_drift_window(), - } - } -} - -impl HtmlReport for QcOpts { - fn to_html(&self) -> String { - panic!("qcopts cannot be rendered on its own") - } - fn to_inline_html(&self) -> Box { - box_html! { - tr { - th { - : "Classification" - } - th { - : format!("{}", self.classification) - } - } - tr { - th { - : "Min. SNR" - } - td { - : format!("{} dB", self.min_snr_db) - } - } - tr { - th { - : "Elevation mask" - } - @ if let Some(mask) = self.elev_mask { - td { - : format!("{} °", mask) - } - } else { - td { - : "None" - } - } - } - tr { - th { - : "Data gap" - } - @ if let Some(tol) = self.gap_tolerance { - td { - : format!("{} tolerance", tol) - } - } else { - td { - : "No tolerance" - } - } - } - tr { - th { - : "Clock Drift Window" - } - td { - : self.clock_drift_window.to_string() - } - } - } - } -} - -#[cfg(feature = "serde")] -#[cfg(test)] -mod test { - use super::*; - #[test] - fn qc_opts_serdes() { - let content = r#" - { - "classification": "GNSS" - }"#; - let _opts: QcOpts = serde_json::from_str(content).unwrap(); - - let content = r#" - { - "classification": "SV" - }"#; - let _opts: QcOpts = serde_json::from_str(content).unwrap(); - - /*let content = r#" - { - "statistics": { - "window": "10 seconds" - } - }"#; - - let opts: QcOpts = serde_json::from_str(content).unwrap(); - assert_eq!(opts.reporting, ReportingStrategy::PerSv); - assert_eq!(opts.statistics, Some(StatisticsOpts { - window: Slot::Duration(Duration::from_seconds(10.0)), - })); - assert!(opts.processing.is_none()); - - let content = r#" - { - "statistics": { - "window": "10 %" - } - }"#; - - let opts: QcOpts = serde_json::from_str(content).unwrap(); - assert_eq!(opts.reporting, ReportingStrategy::PerSignal); - assert_eq!(opts.statistics, Some(StatisticsOpts { - window: Slot::Percentage(10.0_f64), - })); - assert!(opts.processing.is_none()); - */ - } -} diff --git a/rinex-qc/src/plot.rs b/rinex-qc/src/plot.rs new file mode 100644 index 000000000..f2072da08 --- /dev/null +++ b/rinex-qc/src/plot.rs @@ -0,0 +1,352 @@ +use hifitime::Epoch; +use maud::{html, Markup, PreEscaped, Render}; +use plotly::{ + common::HoverInfo, + layout::{ + update_menu::UpdateMenu, Axis, Center, DragMode, Mapbox, Margin, RangeSelector, + RangeSlider, SelectorButton, SelectorStep, + }, + DensityMapbox, Layout, Plot as Plotly, Scatter, Scatter3D, ScatterGeo, ScatterMapbox, + ScatterPolar, Trace, +}; + +use serde::Serialize; + +pub use plotly::{ + color::NamedColor, + common::{Marker, MarkerSymbol, Mode, Visible}, + layout::{ + update_menu::{Button, ButtonBuilder}, + MapboxStyle, + }, +}; + +pub struct CompassArrow { + pub scatter: Box>, +} + +impl CompassArrow { + /// Creates new [CompassArrow] to be projected in Polar. + /// tip_base_fraction: fraction of r base as unitary fraction. + /// tip_angle_deg: angle with base in degrees + pub fn new( + mode: Mode, + rho: f64, + theta: f64, + hover_text: String, + visible: bool, + tip_base_fraction: f64, + tip_angle_deg: f64, + ) -> Self { + let (tip_left_rho, tip_left_theta) = + (rho * (1.0 - tip_base_fraction), theta + tip_angle_deg); + let (tip_right_rho, tip_right_theta) = + (rho * (1.0 - tip_base_fraction), theta - tip_angle_deg); + Self { + scatter: { + ScatterPolar::new( + vec![0.0, theta, tip_left_theta, theta, tip_right_theta], + vec![0.0, rho, tip_left_rho, rho, tip_right_rho], + ) + .mode(mode) + .web_gl_mode(true) + .hover_text_array(vec![hover_text]) + .hover_info(HoverInfo::All) + .visible({ + if visible { + Visible::True + } else { + Visible::LegendOnly + } + }) + .connect_gaps(false) + }, + } + } +} + +pub struct Plot { + /// [Plotly] + plotly: Plotly, + /// html (div) id + plot_id: String, +} + +impl Render for Plot { + fn render(&self) -> Markup { + html! { + div id=(&self.plot_id) { + (PreEscaped (self.plotly.to_inline_html(None))) + } + } + } +} + +impl Plot { + /// Adds one [Trace] to self + pub fn add_trace(&mut self, t: Box) { + self.plotly.add_trace(t); + } + /// Define custom controls for [Self] + pub fn add_custom_controls(&mut self, buttons: Vec