Skip to content

Commit

Permalink
NPI-3669 break out trim_to_first_n_epochs() as a utility function, up…
Browse files Browse the repository at this point in the history
…date test_file_creation_util.py
  • Loading branch information
treefern committed Dec 24, 2024
1 parent 8dd48a2 commit 48c3701
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 11 deletions.
23 changes: 22 additions & 1 deletion gnssanalysis/gn_io/sp3.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import pandas as _pd
from scipy import interpolate as _interpolate

from .. import filenames
from .. import gn_aux as _gn_aux
from .. import gn_const as _gn_const
from .. import gn_datetime as _gn_datetime
Expand Down Expand Up @@ -967,10 +968,30 @@ def trim_df(
# Slice to the subset that we actually care about
trimmed_df = sp3_df.loc[first_keep_time:last_keep_time]
trimmed_df.index = trimmed_df.index.remove_unused_levels()
# trimmed_df.attrs["HEADER"].HEAD.ORB_TYPE = "FIT"
return trimmed_df


def trim_to_first_n_epochs(
sp3_df: _pd.DataFrame,
epoch_count: int,
sp3_filename: Optional[str] = None,
sp3_sample_rate: Optional[timedelta] = None,
) -> _pd.DataFrame:
"""
Utility function to trim an SP3 dataframe to the first n epochs, given either the filename, or sample rate
"""
sample_rate = sp3_sample_rate
if not sample_rate:
if not sp3_filename:
raise ValueError("Either sp3_sample_rate or sp3_filename must be provided")
sample_rate = filenames.convert_nominal_span(
filenames.determine_properties_from_filename(sp3_filename)["sampling_rate"]
)

time_offset_from_start: timedelta = sample_rate * (epoch_count - 1)
return trim_df(sp3_df, keep_first_delta_amount=time_offset_from_start)


def sp3_hlm_trans(
a: _pd.DataFrame,
b: _pd.DataFrame,
Expand Down
11 changes: 3 additions & 8 deletions gnssanalysis/test_file_creation_util.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from datetime import timedelta
from typing import Optional
from gnssanalysis.filenames import convert_nominal_span, determine_properties_from_filename
from gnssanalysis.gn_io.sp3 import filter_by_svs, read_sp3, trim_df, write_sp3, remove_offline_sats
from gnssanalysis.gn_io.sp3 import filter_by_svs, read_sp3, trim_to_first_n_epochs, write_sp3, remove_offline_sats
import logging

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -32,18 +32,13 @@
sample_rate: timedelta = convert_nominal_span(determine_properties_from_filename(filename)["sampling_rate"])
print(f"sample_rate is: {sample_rate}")


# time_start_offset: timedelta = timedelta(0)
time_offset_from_start: timedelta = sample_rate * (trim_to_num_epochs - 1)


# Load
print("Loading SP3 into DataFrame...")
sp3_df = read_sp3(src_path)

# Trim to first x epochs
print(f"Trimming to first {trim_to_num_epochs} epochs (timedelta from start: {time_offset_from_start})")
sp3_df = trim_df(sp3_df, keep_first_delta_amount=time_offset_from_start)
print(f"Trimming to first {trim_to_num_epochs} epochs")
sp3_df = trim_to_first_n_epochs(sp3_df=sp3_df, epoch_count=trim_to_num_epochs, sp3_filename=filename)

# Filter to chosen SVs or number of SVs...
print(
Expand Down
4 changes: 2 additions & 2 deletions tests/test_sp3.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,14 +273,14 @@ def test_trim_df(self, mock_file):
sample_rate, timedelta(minutes=5), "Sample rate should've been parsed as 5 minutes, from filename"
)

sp3_df_trimmed = sp3.trim_to_epoch_count(sp3_df, epoch_count=2, sp3_sample_rate=sample_rate)
sp3_df_trimmed = sp3.trim_to_first_n_epochs(sp3_df, epoch_count=2, sp3_sample_rate=sample_rate)
self.assertEqual(
sp3_df_trimmed.index.get_level_values(0).unique().array.tolist(),
[784792800, 784793100],
"Should be first two epochs after trimming with trim_to_epoch_count() using sample_rate",
)

sp3_df_trimmed = sp3.trim_to_epoch_count(sp3_df, epoch_count=2, sp3_filename=filename)
sp3_df_trimmed = sp3.trim_to_first_n_epochs(sp3_df, epoch_count=2, sp3_filename=filename)
self.assertEqual(
sp3_df_trimmed.index.get_level_values(0).unique().array.tolist(),
[784792800, 784793100],
Expand Down

0 comments on commit 48c3701

Please sign in to comment.