Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move more lints to ruff #336

Merged
merged 2 commits into from
Feb 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 18 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,6 @@ repos:
- id: file-contents-sorter
files: requirements-dev.txt

- repo: https://github.com/psf/black
rev: 24.2.0
hooks:
- id: black
language_version: python3

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.8.0
hooks:
Expand Down Expand Up @@ -59,6 +53,24 @@ repos:
rev: v0.2.2
hooks:
- id: ruff
args: ["--fix", "--show-fixes"]
- id: ruff-format

- repo: https://github.com/nbQA-dev/nbQA
rev: 1.7.1
hooks:
- id: nbqa-check-ast
- id: nbqa-black
- id: nbqa-ruff
args: [
--fix,
--config=ruff.toml,
]

- repo: https://github.com/bdice/nb-strip-paths
rev: v0.1.0
hooks:
- id: nb-strip-paths

- repo: https://github.com/tox-dev/pyproject-fmt
rev: 1.7.0
Expand Down
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ prune notebooks
prune tests

exclude .coveragerc
exclude ruff.toml
exclude .gitignore
exclude .isort.cfg
exclude .pre-commit-config.yaml
Expand Down
14 changes: 11 additions & 3 deletions erddapy/core/griddap.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@ def _griddap_get_constraints(
phrase, *__ = var.split("[")
var_name = phrase.split(" ")[-1]
variable_names.append(var_name)
table = pd.DataFrame({"dimension name": [], "min": [], "max": [], "length": []})
table = pd.DataFrame(
{"dimension name": [], "min": [], "max": [], "length": []},
)
for dim in dim_names:
url = f"{dataset_url}.csvp?{dim}"
data = pd.read_csv(url).values
Expand Down Expand Up @@ -62,15 +64,21 @@ def _griddap_get_constraints(
return constraints_dict, dim_names, variable_names


def _griddap_check_constraints(user_constraints: dict, original_constraints: dict):
def _griddap_check_constraints(
user_constraints: dict,
original_constraints: dict,
):
"""Check that constraints changed by user match those expected by dataset."""
if user_constraints.keys() != original_constraints.keys():
raise ValueError(
"keys in e.constraints have changed. Re-run e.griddap_initialize",
)


def _griddap_check_variables(user_variables: ListLike, original_variables: ListLike):
def _griddap_check_variables(
user_variables: ListLike,
original_variables: ListLike,
):
"""Check user has not requested variables that do not exist in dataset."""
invalid_variables = []
for variable in user_variables:
Expand Down
4 changes: 3 additions & 1 deletion erddapy/core/interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@ def to_pandas(
try:
return pd.read_csv(data, **(pandas_kwargs or {}))
except Exception as e:
raise ValueError(f"Could not read url {url} with Pandas.read_csv.") from e
raise ValueError(
f"Could not read url {url} with Pandas.read_csv.",
) from e


def to_ncCF(
Expand Down
6 changes: 5 additions & 1 deletion erddapy/core/netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,11 @@ def _tempnc(data: BinaryIO) -> Generator[str, None, None]:

tmp = None
try:
tmp = NamedTemporaryFile(suffix=".nc", prefix="erddapy_", delete=delete)
tmp = NamedTemporaryFile(
suffix=".nc",
prefix="erddapy_",
delete=delete,
)
tmp.write(data.read())
tmp.flush()
yield tmp.name
Expand Down
16 changes: 12 additions & 4 deletions erddapy/core/url.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ def _sort_url(url):
else:
variables, constraints = parts.query.split("&", maxsplit=1)
sorted_variables = ",".join(sorted(variables.split(",")))
sorted_query = OrderedDict(sorted(dict(parse.parse_qsl(constraints)).items()))
sorted_query = OrderedDict(
sorted(dict(parse.parse_qsl(constraints)).items()),
)
sorted_query_str = parse.unquote(parse.urlencode(sorted_query))
sorted_url = f"{parts.scheme}://{parts.netloc}{parts.path}?{parts.params}{sorted_variables}&{sorted_query_str}{parts.fragment}"
else:
Expand Down Expand Up @@ -134,7 +136,9 @@ def _format_constraints_url(kwargs: dict) -> str:
def _check_substrings(constraint):
"""Extend the OPeNDAP with extra strings."""
substrings = ["now", "min", "max"]
return any(True for substring in substrings if substring in str(constraint))
return any(
True for substring in substrings if substring in str(constraint)
)


def parse_dates(
Expand Down Expand Up @@ -326,7 +330,9 @@ def get_info_url(

"""
if not dataset_id:
raise ValueError(f"You must specify a valid dataset_id, got {dataset_id}")
raise ValueError(
f"You must specify a valid dataset_id, got {dataset_id}",
)

url = f"{server}/info/{dataset_id}/index.{response}"
return url
Expand Down Expand Up @@ -393,7 +399,9 @@ def get_download_url(

"""
if not dataset_id:
raise ValueError(f"Please specify a valid `dataset_id`, got {dataset_id}")
raise ValueError(
f"Please specify a valid `dataset_id`, got {dataset_id}",
)

if not protocol:
raise ValueError(f"Please specify a valid `protocol`, got {protocol}")
Expand Down
40 changes: 32 additions & 8 deletions erddapy/erddapy.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,9 @@ def griddap_initialize(
f"Method only valid using griddap protocol, got {self.protocol}",
)
if dataset_id is None:
raise ValueError(f"Must set a valid dataset_id, got {self.dataset_id}")
raise ValueError(
f"Must set a valid dataset_id, got {self.dataset_id}",
)
# Return the opendap URL without any slicing so the user can do it later.
if self.response == "opendap":
return
Expand Down Expand Up @@ -315,10 +317,14 @@ def get_download_url(
constraints = constraints if constraints else self.constraints

if not dataset_id:
raise ValueError(f"Please specify a valid `dataset_id`, got {dataset_id}")
raise ValueError(
f"Please specify a valid `dataset_id`, got {dataset_id}",
)

if not protocol:
raise ValueError(f"Please specify a valid `protocol`, got {protocol}")
raise ValueError(
f"Please specify a valid `protocol`, got {protocol}",
)

if (
protocol == "griddap"
Expand Down Expand Up @@ -362,7 +368,11 @@ def to_pandas(
response = kw.pop("response", "csvp")
distinct = kw.pop("distinct", False)
url = self.get_download_url(response=response, distinct=distinct)
return to_pandas(url, requests_kwargs=requests_kwargs, pandas_kwargs=dict(**kw))
return to_pandas(
url,
requests_kwargs=requests_kwargs,
pandas_kwargs=dict(**kw),
)

def to_ncCF(self, protocol: str = None, **kw):
"""Load the data request into a Climate and Forecast compliant netCDF4-python object."""
Expand Down Expand Up @@ -392,7 +402,12 @@ def to_xarray(
requests_kwargs = {**{"auth": self.auth}, **requests_kwargs}
else:
requests_kwargs = {"auth": self.auth}
return to_xarray(url, response, requests_kwargs, xarray_kwargs=dict(**kw))
return to_xarray(
url,
response,
requests_kwargs,
xarray_kwargs=dict(**kw),
)

def to_iris(self, **kw):
"""Load the data request into an iris.CubeList.
Expand All @@ -409,7 +424,9 @@ def _get_variables_uncached(self, dataset_id: OptionalStr = None) -> dict:
dataset_id = self.dataset_id

if dataset_id is None:
raise ValueError(f"You must specify a valid dataset_id, got {dataset_id}")
raise ValueError(
f"You must specify a valid dataset_id, got {dataset_id}",
)

url = self.get_info_url(dataset_id=dataset_id, response="csv")

Expand All @@ -419,14 +436,21 @@ def _get_variables_uncached(self, dataset_id: OptionalStr = None) -> dict:
self._dataset_id = dataset_id
for variable in set(_df["Variable Name"]):
attributes = (
_df.loc[_df["Variable Name"] == variable, ["Attribute Name", "Value"]]
_df.loc[
_df["Variable Name"] == variable,
["Attribute Name", "Value"],
]
.set_index("Attribute Name")
.to_dict()["Value"]
)
variables.update({variable: attributes})
return variables

def get_var_by_attr(self, dataset_id: OptionalStr = None, **kwargs) -> list[str]:
def get_var_by_attr(
self,
dataset_id: OptionalStr = None,
**kwargs,
) -> list[str]:
"""
Return a variable based on its attributes.

Expand Down
5 changes: 4 additions & 1 deletion erddapy/multiple_server_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,10 @@ def search_servers(
f"Protocol must be tabledap or griddap, got {protocol}",
)
if servers_list:
urls = {server: _format_search_string(server, query) for server in servers_list}
urls = {
server: _format_search_string(server, query)
for server in servers_list
}
else:
urls = {
key: _format_search_string(server.url, query)
Expand Down
3 changes: 1 addition & 2 deletions notebooks/00-quick_intro.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
"source": [
"from erddapy import ERDDAP\n",
"\n",
"\n",
"server = \"https://standards.sensors.ioos.us/erddap\"\n",
"e = ERDDAP(\n",
" server=server,\n",
Expand Down Expand Up @@ -63,7 +62,7 @@
" \"latitude\",\n",
" \"longitude\",\n",
" \"sea_water_temperature\",\n",
" \"air_temperature\"\n",
" \"air_temperature\",\n",
"]\n",
"\n",
"e.constraints = {\n",
Expand Down
9 changes: 2 additions & 7 deletions notebooks/01a-griddap.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
"import geopandas\n",
"import pooch\n",
"\n",
"\n",
"url = \"https://naturalearth.s3.amazonaws.com/4.1.1/50m_physical/ne_50m_geography_marine_polys.zip\"\n",
"fname = pooch.retrieve(\n",
" url,\n",
Expand Down Expand Up @@ -87,9 +86,7 @@
"e.griddap_initialize()\n",
"\n",
"print(f\"variables in this dataset:\\n\\n{e.variables}\")\n",
"print(\n",
" f\"\\nconstraints of this dataset:\\n\\n{json.dumps(e.constraints, indent=1)}\"\n",
")"
"print(f\"\\nconstraints of this dataset:\\n\\n{json.dumps(e.constraints, indent=1)}\")"
]
},
{
Expand Down Expand Up @@ -188,7 +185,6 @@
"import cartopy.crs as ccrs\n",
"import matplotlib.pyplot as plt\n",
"\n",
"\n",
"fig, ax = plt.subplots(subplot_kw={\"projection\": ccrs.PlateCarree()})\n",
"ds[\"ROSE\"].plot(ax=ax)\n",
"ax.coastlines();"
Expand All @@ -210,7 +206,6 @@
"source": [
"import regionmask\n",
"\n",
"\n",
"region = regionmask.from_geopandas(SA, name=name)\n",
"region.plot();"
]
Expand Down Expand Up @@ -340,7 +335,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.1"
"version": "3.10.13"
}
},
"nbformat": 4,
Expand Down
Loading
Loading