8000 Plot health flags by jtec · Pull Request #177 · jtec/prx · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

Plot health flags #177

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 14 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
173 changes: 109 additions & 64 deletions src/prx/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import numpy as np
import git
import prx.util
from prx.util import add_range_column, timedelta_2_seconds, timestamp_2_timedelta

from prx import atmospheric_corrections as atmo, util
from prx.constants import carrier_frequencies_hz
Expand All @@ -30,9 +31,7 @@ def write_prx_file(
):
output_writers = {"jsonseq": write_json_text_sequence_file, "csv": write_csv_file}
if output_format not in output_writers.keys():
assert False, (
f"Output format {output_format} not supported, we can do {list(output_writers.keys())}"
)
assert False, f"Output format {output_format} not supported, we can do {list(output_writers.keys())}"
return output_writers[output_format](
prx_header, prx_records, file_name_without_extension
)
Expand Down Expand Up @@ -216,28 +215,88 @@ def check_assumptions(
):
obs_header = georinex.rinexheader(rinex_3_obs_file)
if "RCV CLOCK OFFS APPL" in obs_header.keys():
assert obs_header["RCV CLOCK OFFS APPL"].strip() == "0", (
"Handling of 'RCV CLOCK OFFS APPL' != 0 not implemented yet."
)
assert obs_header["TIME OF FIRST OBS"].split()[-1].strip() == "GPS", (
"Handling of observation files using time scales other than GPST not implemented yet."
)
assert (
obs_header["RCV CLOCK OFFS APPL"].strip() == "0"
), "Handling of 'RCV CLOCK OFFS APPL' != 0 not implemented yet."
assert (
obs_header["TIME OF FIRST OBS"].split()[-1].strip() == "GPS"
), "Handling of observation files using time scales other than GPST not implemented yet."


def parse_rinex_nav_or_obs_file(rinex_file_path: Path):
if is_rinex_3_obs_file(rinex_file_path):
return parse_rinex_obs_file(rinex_file_path)
elif is_rinex_3_nav_file(rinex_file_path):
return parse_rinex_nav_file(rinex_file_path)
assert False, (
f"File {rinex_file_path} appears to be neither RINEX 3 OBS nor NAV file."
)
assert (
False
), f"File {rinex_file_path} appears to be neither RINEX 3 OBS nor NAV file."


def warm_up_parser_cache(rinex_files):
_ = [parse_rinex_nav_or_obs_file(file) for file in rinex_files]


def compute_ephemeris_discontinuities(
rinex_3_ephemerides_files: list[Path],
sat_states: pd.DataFrame,
approximate_receiver_ecef_position_m: np.ndarray,
):
# Extract observations right after a change in ephemeris
sat_states = sat_states.sort_values("query_time_isagpst")
sat_states["after_discontinuity"] = False
for _, group_df in sat_states.groupby(["sv", "signal"]):
after_discontinuity = group_df.ephemeris_hash != group_df.ephemeris_hash.shift(
1
)
# The very first of a signal's sat states can't be after a discontinuity
after_discontinuity.iloc[0] = False
sat_states.loc[group_df.index, "after_discontinuity"] = (
after_discontinuity.values
)
# Satellite states with ephemeris after the discontinuity
df_new_ephemeris = (
sat_states[sat_states.after_discontinuity]
.copy()
.drop(columns=["after_discontinuity"])
.reset_index(drop=True)
)
# Now compute satellite states after discontinuity with ephemeris before the discontinuity
query = sat_states.loc[
sat_states.after_discontinuity, ["sv", "signal", "query_time_isagpst"]
].drop_duplicates()
# Select the previous ephemeris
query["ephemeris_selection_time_isagpst"] = query.query_time_isagpst - pd.Timedelta(
"10s"
)
# But still compute satellite states for after the discontinuity
query = query.sort_values("ephemeris_selection_time_isagpst")
df_previous_ephemeris = rinex_evaluate.compute(
rinex_3_ephemerides_files, query
).reset_index(drop=True)
shared_columns = ["query_time_isagpst", "sv", "signal"]
df_new_ephemeris = df_new_ephemeris.sort_values(by=shared_columns).reset_index(
drop=True
)
df_previous_ephemeris = df_previous_ephemeris.sort_values(
by=shared_columns
).reset_index(drop=True)
df_new_ephemeris = add_range_column(
df_new_ephemeris, approximate_receiver_ecef_position_m
)
df_previous_ephemeris = add_range_column(
df_previous_ephemeris, approximate_receiver_ecef_position_m
)
assert df_new_ephemeris[shared_columns].equals(
df_previous_ephemeris[shared_columns]
)
numeric_columns = df_new_ephemeris.select_dtypes(include=np.number).columns.tolist()
dsc = df_new_ephemeris
dsc[numeric_columns] -= df_previous_ephemeris[numeric_columns]
dsc["previous_ephemeris_hash"] = df_previous_ephemeris.ephemeris_hash
return dsc


@prx.util.timeit
def build_records(
rinex_3_obs_file,
Expand Down Expand Up @@ -325,31 +384,30 @@ def build_records(
},
)

sat_states_per_day = []
for file in rinex_3_ephemerides_files:
# get year and doy from NAV filename
year = int(file.name[12:16])
doy = int(file.name[16:19])
day_query = query.loc[
(
query.query_time_isagpst
>= pd.Timestamp(year=year, month=1, day=1) + pd.Timedelta(days=doy - 1)
)
& (
query.query_time_isagpst
< pd.Timestamp(year=year, month=1, day=1) + pd.Timedelta(days=doy)
)
sat_states = rinex_evaluate.compute_parallel(
rinex_3_ephemerides_files,
query,
).reset_index(drop=True)

discontinuities = compute_ephemeris_discontinuities(
rinex_3_ephemerides_files, sat_states, approximate_receiver_ecef_position_m
)
discontinuities = discontinuities[
[
"sv",
"signal",
"query_time_isagpst",
"ephemeris_hash",
"previous_ephemeris_hash",
"range_m",
"sat_clock_offset_m",
]
if day_query.empty:
continue
log.info(f"Computing satellite states for {year}-{doy:03d}")
sat_states_per_day.append(
rinex_evaluate.compute_parallel(
file,
day_query,
)
)
sat_states = pd.concat(sat_states_per_day)
]
discontinuities.query_time_isagpst = discontinuities.query_time_isagpst.apply(
lambda ts: timedelta_2_seconds(timestamp_2_timedelta(ts, "GPST"))
)
discontinuities = discontinuities.to_dict("records")

sat_states = sat_states.rename(
columns={
"sv": "satellite",
Expand Down Expand Up @@ -406,36 +464,22 @@ def build_records(
sat_states.elevation_rad.to_numpy(),
)
sat_states["tropo_delay_m"] = tropo_delay_m

# Merge in all sat states that are not signal-specific, i.e. can be copied into
# rows with Doppler and carrier phase observations
# TODO understand why dropping duplicates with
# subset=['satellite', 'time_of_emission_isagpst']
# leads to fewer rows here, looks like there are multiple position/velocity/clock values for
# the same satellite and the same time of emission
sat_specific = sat_states[
sat_states.columns.drop(
[
"observation_type",
"sat_code_bias_m",
"time_of_reception_in_receiver_time",
]
)
].drop_duplicates(subset=["satellite", "time_of_emission_isagpst"])
# Group delays are signal-specific, so we merge them in separately
code_specific = sat_states[
["satellite", "observation_type", "time_of_emission_isagpst", "sat_code_bias_m"]
].drop_duplicates(
subset=["satellite", "observation_type", "time_of_emission_isagpst"]
)
flat_obs = flat_obs.merge(
sat_specific, on=["satellite", "time_of_emission_isagpst"], how="left"
# Merge sat states into observation dataframe. Due to Galileo's FNAV/INAV ephemerides
# being signal-specific, we merge on the code identifier here and not only the satellite
sat_states["code_id"] = sat_states["observation_type"].str[1:3]
flat_obs["code_id"] = flat_obs["observation_type"].str[1:3]
sat_states = sat_states.drop(
columns=["observation_type", "time_of_reception_in_receiver_time"]
)
flat_obs = flat_obs.merge(
code_specific,
on=["satellite", "observation_type", "time_of_emission_isagpst"],
sat_states,
on=["satellite", "code_id", "time_of_emission_isagpst"],
how="left",
)
# Fix code biases being merged into lines with signals that are not code signals
flat_obs.loc[
~(flat_obs.observation_type.str.startswith("C")), "sat_code_bias_m"
] = np.nan
# GLONASS satellites with both FDMA and CDMA signals have a frequency slot for FDMA signals,
# for CDMA signals we use the common carrier frequency of those signals.
glo_cdma = flat_obs[
Expand Down Expand Up @@ -521,7 +565,7 @@ def assign_carrier_frequencies(flat_obs):
"iono_delay_m",
] = np.nan

return flat_obs
return flat_obs, discontinuities


@prx.util.timeit
Expand All @@ -543,11 +587,12 @@ def process(observation_file_path: Path, output_format="csv"):
)
metadata["processing_start_time"] = t0
prx.util.repair_with_gfzrnx(rinex_3_obs_file)
records = build_records(
records, discontinuities = build_records(
rinex_3_obs_file,
aux_files["broadcast_ephemerides"],
metadata["approximate_receiver_ecef_position_m"],
)
metadata["discontinuities"] = discontinuities
return write_prx_file(
metadata,
records,
Expand Down
73 changes: 73 additions & 0 deletions src/prx/rinex_nav/ephemerides_stats.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import numpy as np
import pandas as pd
from plotly.subplots import make_subplots
import plotly.graph_objs as go
from prx.rinex_nav.evaluate import parse_rinex_nav_file
from src.prx.rinex_nav.nav_file_discovery import discover_or_download_ephemerides


def main(t1: pd.Timestamp, t2: pd.Timestamp) -> None:
nav_files = discover_or_download_ephemerides(t1, t2)
ephemeris_blocks = []
for path in nav_files:
block = parse_rinex_nav_file(path)
# Not using iono model parameters here, removing them from dataframe attributes to
# enable concatenation
block.attrs.pop("ionospheric_corr_GPS", None)
ephemeris_blocks.append(block)
ephemerides = pd.concat(ephemeris_blocks)
ephemerides.loc[ephemerides.sv.str.startswith("C"), "health"] = ephemerides.loc[
ephemerides.sv.str.startswith("C"), "SatH1"
]
ephemerides = (
ephemerides[ephemerides.sv.str.startswith("G")]
.sort_values(by=["sv"])
.reset_index(drop=True)
)
fig = make_subplots(rows=1, cols=1)

flag2string = {
int(0): "healthy",
int(1): "unhealthy",
}
health2color = {
"healthy": "green",
"unhealthy": "red",
}
ephemerides.health = ephemerides.health.astype(int).map(flag2string)
for (sv, health), group_df in ephemerides.groupby(["sv", "health"]):
prn = float(sv[1:])
offset = 0 if health == "healthy" else 0.1
fig.add_trace(
go.Scatter(
x=group_df["time"],
y=np.full_like(group_df["time"].to_numpy(), prn, dtype=float) + offset,
mode="lines+markers",
marker=dict(
color=health2color[health],
size=5,
),
customdata=group_df[["TransTime", "ephemeris_hash"]],
hovertemplate="<br>".join(
[
"timestamp: %{x}",
"PRN: %{y:.0f}",
"time of transmission [tow]: %{customdata[0]}",
"ephemeris_hash [-]: %{customdata[1]}",
]
),
name=f"{sv}",
legendgroup=sv,
showlegend=True,
)
)
fig.update_xaxes(title="ephemeris time stamp")
fig.update_yaxes(title="PRN")
fig.show()
pass


if __name__ == "__main__":
t1 = pd.Timestamp("2025-04-10T00:00:00")
t2 = pd.Timestamp("2025-04-11T00:00:00")
main(t1, t2)
Loading
Loading
0