Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Sep 26, 2023
1 parent 2ca4038 commit 4e296e4
Show file tree
Hide file tree
Showing 12 changed files with 6 additions and 8 deletions.
1 change: 0 additions & 1 deletion gpm_api/bucket/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ def get_cut_lon_breaks_labels(bin_spacing):
def pl_add_geographic_bins(
df, xbin_column, ybin_column, bin_spacing, x_column="lon", y_column="lat"
):

cut_lon_breaks, cut_lon_labels = get_cut_lon_breaks_labels(bin_spacing)
cut_lat_breaks, cut_lat_labels = get_cut_lat_breaks_labels(bin_spacing)
df = df.with_columns(
Expand Down
3 changes: 2 additions & 1 deletion gpm_api/bucket/writers.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,7 @@ def write_granules_bucket(
####--------------------------------------------------------------------------.
#### Single GPM Granule Routines OLD


# TODO: Currently used for final merging
# --> Adapt code to use write_partitioned_dataset
def write_parquet_dataset(
Expand Down Expand Up @@ -477,6 +478,7 @@ def name_function(i):

# return None


####--------------------------------------------------------------------------.
#### GPM Datasets Routines
@print_task_elapsed_time(prefix="Dataset Bucket Operation Terminated.")
Expand All @@ -498,7 +500,6 @@ def write_dataset_bucket(
use_threads=True,
**writer_kwargs,
):

df = convert_ds_to_df(
ds=ds,
preprocessing_function=preprocessing_function,
Expand Down
1 change: 0 additions & 1 deletion gpm_api/dataset/crs.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,6 @@ def _add_proj_coords_attrs(ds, crs) -> xr.Dataset:

# If available, add attributes
if x_dim is not None and y_dim is not None:

# Retrieve existing coordinate attributes
src_x_coord_attrs = dict(ds[x_dim].attrs)
src_y_coord_attrs = dict(ds[y_dim].attrs)
Expand Down
1 change: 0 additions & 1 deletion gpm_api/old_dataset/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,6 @@ def _get_granule_info(filepath, scan_mode, variables, groups):
"""Retrieve coordinates, attributes and valid variables and groups from the HDF file."""
# Open HDF5 file
with h5py.File(filepath, "r", locking=False, swmr=SWMR) as hdf:

# Get coordinates
coords = get_coords(hdf, scan_mode)

Expand Down
1 change: 1 addition & 0 deletions gpm_api/scripts/download_gpm_daily_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
warnings.filterwarnings("ignore")
sys.tracebacklimit = 0 # avoid full traceback error if occur


# -------------------------------------------------------------------------.
# Click Command Line Interface decorator
@click.command()
Expand Down
1 change: 1 addition & 0 deletions gpm_api/scripts/download_gpm_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
warnings.filterwarnings("ignore")
sys.tracebacklimit = 0 # avoid full traceback error if occur


# -------------------------------------------------------------------------.
# Click Command Line Interface decorator
@click.command()
Expand Down
1 change: 1 addition & 0 deletions gpm_api/scripts/download_gpm_monthly_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
warnings.filterwarnings("ignore")
sys.tracebacklimit = 0 # avoid full traceback error if occur


# -------------------------------------------------------------------------.
# Click Command Line Interface decorator
@click.command()
Expand Down
1 change: 0 additions & 1 deletion gpm_api/tests/0_tmp/dataset/0_create_test_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ def _copy_attrs(src_h5_obj, dst_h5_obj):
def _copy_datasets(src_group, dst_group, subset_size=5):
for name, h5_obj in src_group.items():
if isinstance(h5_obj, h5py.Dataset):

# Determine the subset shape (2 indices per dimension)
subset_shape, subset_chunks = _get_subset_shape_chunks(h5_obj, subset_size=subset_size)

Expand Down
1 change: 1 addition & 0 deletions gpm_api/utils/utils_HDF5.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,7 @@ def h5dump(filepath, group="/", dataset_attrs=True, group_attrs=True):
# for item in x.keys():
# print_hdf5_shape(x[item])


# -----------------------------------------------------------------------------.
def hdf5_objects_names(hdf):
l_objs = []
Expand Down
1 change: 0 additions & 1 deletion gpm_api/visualization/orbit.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,6 @@ def wrapper(*args, **kwargs):

# - Call the function over each slice
for i, slc in enumerate(list_slices):

if not rgb:
# Retrieve contiguous data array
tmp_da = da.isel({"along_track": slc})
Expand Down
1 change: 0 additions & 1 deletion gpm_api/visualization/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -573,7 +573,6 @@ def plot_map(
cbar_kwargs={},
**plot_kwargs,
):

from gpm_api.checks import is_grid, is_orbit
from gpm_api.visualization.grid import plot_grid_map
from gpm_api.visualization.orbit import plot_orbit_map
Expand Down
1 change: 0 additions & 1 deletion gpm_api/visualization/profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,6 @@ def select_transect(
transect_kwargs={},
keep_only_valid_variables=True,
):

# Identify transect isel_dict
transect_slices = get_transect_slices(
xr_obj,
Expand Down

0 comments on commit 4e296e4

Please sign in to comment.