Skip to content

Commit

Permalink
Major refactor of statistics functions
Browse files Browse the repository at this point in the history
  • Loading branch information
robbibt authored Dec 20, 2024
1 parent ab09f43 commit 281599e
Show file tree
Hide file tree
Showing 8 changed files with 671 additions and 822 deletions.
225 changes: 131 additions & 94 deletions docs/notebooks/Tide_statistics.ipynb

Large diffs are not rendered by default.

13 changes: 10 additions & 3 deletions eo_tides/eo.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ def _pixel_tides_resample(
resample_method="bilinear",
dask_chunks=None,
dask_compute=True,
name="tide_height",
):
"""Resamples low resolution tides modelled by `pixel_tides` into the
geobox (e.g. spatial resolution and extent) of the original higher
Expand All @@ -125,6 +126,8 @@ def _pixel_tides_resample(
Whether to compute results of the resampling step using Dask.
If False, this will return `tides_highres` as a lazy loaded
Dask-enabled array.
name : str, optional
The name used for the output array. Defaults to "tide_height".
Returns
-------
Expand All @@ -145,7 +148,11 @@ def _pixel_tides_resample(
how=gbox,
chunks=dask_chunks,
resampling=resample_method,
).rename("tide_height")
)

# Set output name
if name is not None:
tides_highres = tides_highres.rename(name)

# Optionally process and load into memory with Dask
if dask_compute:
Expand Down Expand Up @@ -373,7 +380,7 @@ def pixel_tides(
`data` has a geographic CRS (e.g. degree units).
resample_method : str, optional
If resampling is requested (see `resample` above), use this
resampling method when converting from low resolution to high
resampling method when resampling from low resolution to high
resolution pixels. Defaults to "bilinear"; valid options include
"nearest", "cubic", "min", "max", "average" etc.
dask_chunks : tuple of float, optional
Expand All @@ -385,7 +392,7 @@ def pixel_tides(
`(2048, 2048)`.
dask_compute : bool, optional
Whether to compute results of the resampling step using Dask.
If False, `tides_highres` will be returned as a Dask array.
If False, `tides_highres` will be returned as a Dask-enabled array.
**model_tides_kwargs :
Optional parameters passed to the `eo_tides.model.model_tides`
function. Important parameters include `cutoff` (used to
Expand Down
8 changes: 5 additions & 3 deletions eo_tides/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import pyTMD
from tqdm import tqdm

from .utils import DatetimeLike, _set_directory, _standardise_models, _standardise_time, idw, list_models
from .utils import DatetimeLike, _set_directory, _standardise_models, _standardise_time, idw


def _parallel_splits(
Expand Down Expand Up @@ -486,7 +486,7 @@ def model_tides(
- "spline": scipy bivariate spline interpolation
- "bilinear": quick bilinear interpolation
extrapolate : bool, optional
Whether to extrapolate tides for x and y coordinates outside of
Whether to extrapolate tides into x and y coordinates outside of
the valid tide modelling domain using nearest-neighbor.
cutoff : float, optional
Extrapolation cutoff in kilometers. The default is None, which
Expand Down Expand Up @@ -544,7 +544,7 @@ def model_tides(
time = _standardise_time(time)

# Validate input arguments
assert time is not None, "Times for modelling tides muyst be provided via `time`."
assert time is not None, "Times for modelling tides must be provided via `time`."
assert method in ("bilinear", "spline", "linear", "nearest")
assert output_units in (
"m",
Expand All @@ -555,6 +555,8 @@ def model_tides(
"long",
"wide",
), "Output format must be either 'long' or 'wide'."
assert np.issubdtype(x.dtype, np.number), "`x` must contain only valid numeric values, and must not be None."
assert np.issubdtype(y.dtype, np.number), "`y` must contain only valid numeric values, and must not be None.."
assert len(x) == len(y), "x and y must be the same length."
if mode == "one-to-one":
assert len(x) == len(time), (
Expand Down
Loading

0 comments on commit 281599e

Please sign in to comment.