Skip to content

Commit

Permalink
Merge pull request #125 from equinor/refactor/replace-black-flake8-wi…
Browse files Browse the repository at this point in the history
…th-ruff

chore: replace black flake8 with ruff
  • Loading branch information
sean-sinclair authored Dec 20, 2024
2 parents 54004b1 + e8366a2 commit 85cd312
Show file tree
Hide file tree
Showing 14 changed files with 137 additions and 106 deletions.
27 changes: 27 additions & 0 deletions .github/workflows/linting_and_formatting.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Check formatting and linting

on:
pull_request:
push: { branches: [main] }

jobs:
ruff-check:
name: Run ruff lint and format checks
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4

- uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: 'pip'

- name: Installing dependencies
run: pip install ruff

- name: Run ruff lint
run: ruff check .

- name: Run ruff format
run: ruff format . --check
16 changes: 16 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
repos:
- repo: local
hooks:
- id: lint
name: Ruff Lint
description: Linting using ruff
entry: bash -c 'ruff check .'
language: system
stages: ["pre-commit", "pre-push"]

- id: format
name: Ruff Format
description: Formatting using ruff
entry: bash -c 'ruff format . --check'
language: system
stages: ["pre-commit", "pre-push"]
32 changes: 23 additions & 9 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,6 @@
requires = ["setuptools", "setuptools-scm"]
build-backend = "setuptools.build_meta"

[tool.setuptools_scm]

[tool.isort]
profile = "black"

[tool.black]
line-length = 79

[project]
name = "fmu-sumo-sim2sumo"
requires-python = ">=3.9"
Expand All @@ -26,7 +18,7 @@ dependencies = [

[project.optional-dependencies]
test = ["pytest"]
dev = ["pytest", "black", "flake8"]
dev = ["pytest", "ruff", "pre-commit"]
nokomodo = ["ert"]

docs = [
Expand All @@ -46,3 +38,25 @@ sim2sumo = "fmu.sumo.sim2sumo.main:main"

[project.entry-points.ert]
fmu_sumo_sim2sumo_jobs = "fmu.sumo.sim2sumo.hook_implementations.jobs"

[tool.ruff]
exclude = [".env", ".git", ".github", ".venv", "venv"]

line-length = 79

[tool.ruff.lint]
ignore = ["E501", "N802"]

extend-select = [
"C4", # Flake8-comprehensions
"I", # isort
"SIM", # Flake8-simplify
"TC", # Flake8-type-checking
"TID", # Flake8-tidy-imports
"N", # pep8-naming
"PD", # Pandas
"NPY", # NumPy
]

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"]
14 changes: 5 additions & 9 deletions src/fmu/sumo/sim2sumo/_special_treatments.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Special treatment of some options used in res2df"""

import contextlib
import importlib
import logging
from inspect import signature
Expand Down Expand Up @@ -73,7 +74,7 @@ def find_functions_and_docstring(submod):
"extract": func,
"options": tuple(
name
for name in signature(func).parameters.keys()
for name in signature(func).parameters
if name not in {"deck", "eclfiles"}
),
"arrow_convertor": find_arrow_convertor(import_path),
Expand Down Expand Up @@ -101,10 +102,8 @@ def _define_submodules():
except AttributeError:
submod_string = "vfp._vfp"
submod = "vfp"
try:
with contextlib.suppress(AttributeError):
submodules[submod] = find_functions_and_docstring(submod_string)
except AttributeError:
pass # No df function in submod_path, skip it

return tuple(submodules.keys()), submodules

Expand All @@ -128,7 +127,7 @@ def tidy(frame):
)
unwanted_posix.unlink()
if "WELLETC" in frame.columns:
frame.drop(["WELLETC"], axis=1, inplace=True)
frame = frame.drop(["WELLETC"], axis=1)

return frame

Expand All @@ -151,10 +150,7 @@ def vfp_to_arrow_dict(datafile, options):
vfp_dict = {}
keyword = options.get("keyword", ["VFPPROD", "VFPINJ"])
vfpnumbers = options.get("vfpnumbers", None)
if isinstance(keyword, str):
keywords = [keyword]
else:
keywords = keyword
keywords = [keyword] if isinstance(keyword, str) else keyword

for keyword in keywords:
vfp_dict[keyword] = res2df.vfp._vfp.pyarrow_tables(
Expand Down
32 changes: 10 additions & 22 deletions src/fmu/sumo/sim2sumo/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,15 @@

import psutil
import yaml
from res2df.common import convert_lyrlist_to_zonemap, parse_lyrfile

from fmu.dataio import ExportData
from fmu.sumo.uploader import SumoConnection
from fmu.sumo.uploader._upload_files import upload_files
from fmu.sumo.sim2sumo._special_treatments import (
SUBMOD_DICT,
SUBMODULES,
)

from res2df.common import convert_lyrlist_to_zonemap, parse_lyrfile
from fmu.sumo.uploader import SumoConnection
from fmu.sumo.uploader._upload_files import upload_files


def yaml_load(file_name):
Expand Down Expand Up @@ -123,34 +122,26 @@ def find_datafiles(seedpoint=None):
datafiles.append(full_path)
else:
datafiles.extend(
[
f
for f in full_path.parent.rglob(
f"{full_path.name}"
)
]
list(full_path.parent.rglob(f"{full_path.name}"))
)
else:
for filetype in valid_filetypes:
if not full_path.is_dir():
# Search for valid files within the directory
datafiles.extend(
[
f
for f in full_path.parent.rglob(
list(
full_path.parent.rglob(
f"{full_path.name}*{filetype}"
)
]
)
)
else:
# Search for valid files within the directory
datafiles.extend(
[f for f in full_path.rglob(f"*{filetype}")]
)
datafiles.extend(list(full_path.rglob(f"*{filetype}")))
else:
# Search the current working directory if no seedpoint is provided
for filetype in valid_filetypes:
datafiles.extend([f for f in cwd.rglob(f"*/*/*{filetype}")])
datafiles.extend(list(cwd.rglob(f"*/*/*{filetype}")))
# Filter out files with duplicate stems, keeping the first occurrence
unique_stems = set()
unique_datafiles = []
Expand Down Expand Up @@ -327,10 +318,7 @@ def find_datefield(text_string):
str| None: date as string or None
"""
datesearch = re.search(".*_([0-9]{8})$", text_string)
if datesearch is not None:
date = datesearch.group(1)
else:
date = None
date = datesearch.group(1) if datesearch is not None else None
return date


Expand Down
1 change: 1 addition & 0 deletions src/fmu/sumo/sim2sumo/forward_models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import subprocess

from ert import (
ForwardModelStepJSON,
ForwardModelStepPlugin,
Expand Down
21 changes: 10 additions & 11 deletions src/fmu/sumo/sim2sumo/grid3d.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,26 @@
#!/usr/bin/env python
"""Upload grid3d data from reservoir simulators to Sumo
Does three things:
1. Extracts data from simulator to roff files
2. Adds the required metadata while exporting to disc
3. Uploads to Sumo
Does three things:
1. Extracts data from simulator to roff files
2. Adds the required metadata while exporting to disc
3. Uploads to Sumo
"""

import logging
from pathlib import Path
from datetime import datetime

from io import BytesIO
from pathlib import Path

import numpy as np
from resdata.grid import Grid
from resdata.resfile import ResdataRestartFile
from xtgeo import GridProperty, grid_from_file
from xtgeo.grid3d import _gridprop_import_eclrun as eclrun
from xtgeo.io._file import FileWrapper
from fmu.sumo.uploader._fileonjob import FileOnJob

from fmu.dataio import ExportData
from fmu.sumo.uploader._fileonjob import FileOnJob

from .common import find_datefield, give_name


Expand Down Expand Up @@ -59,10 +61,7 @@ def generate_grid3d_meta(datafile, obj, prefix, config):
else:
content = {"property": {"is_discrete": False}}

if prefix == "grid":
name = prefix
else:
name = f"{prefix}-{obj.name}"
name = prefix if prefix == "grid" else f"{prefix}-{obj.name}"
tagname = give_name(datafile)
exp_args = {
"config": config,
Expand Down
8 changes: 2 additions & 6 deletions src/fmu/sumo/sim2sumo/hook_implementations/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,7 @@ def _get_jobs_from_directory(directory):

# pylint: disable=no-value-for-parameter
@hook_implementation
@plugin_response(
plugin_name=PLUGIN_NAME
) # pylint: disable=no-value-for-parameter
@plugin_response(plugin_name=PLUGIN_NAME) # pylint: disable=no-value-for-parameter
def installable_jobs():
"""Return installable jobs
Expand All @@ -87,9 +85,7 @@ def installable_jobs():


@hook_implementation
@plugin_response(
plugin_name=PLUGIN_NAME
) # pylint: disable=no-value-for-parameter
@plugin_response(plugin_name=PLUGIN_NAME) # pylint: disable=no-value-for-parameter
def job_documentation(job_name):
sumo_fmu_jobs = set(installable_jobs().data.keys())
if job_name not in sumo_fmu_jobs:
Expand Down
11 changes: 6 additions & 5 deletions src/fmu/sumo/sim2sumo/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@

import argparse
import logging
import sys
from os import environ

from .common import Dispatcher, create_config_dict, yaml_load
from .grid3d import upload_simulation_runs
from .tables import upload_tables
from .common import yaml_load, Dispatcher, create_config_dict


def parse_args():
Expand Down Expand Up @@ -52,9 +53,9 @@ def main():
logger = logging.getLogger(__file__ + ".main")

missing = []
for envVar in REQUIRED_ENV_VARS:
if envVar not in environ:
missing.append(envVar)
for env_var in REQUIRED_ENV_VARS:
if env_var not in environ:
missing.append(env_var)

if missing:
print(
Expand All @@ -63,7 +64,7 @@ def main():
"This can happen if sim2sumo was called outside the ERT context.\n"
"Stopping."
)
exit()
sys.exit()

args = parse_args()

Expand Down
29 changes: 13 additions & 16 deletions src/fmu/sumo/sim2sumo/tables.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,34 @@
"""Upload tabular data from reservoir simulators to sumo
Does three things:
1. Extracts data from simulator to arrow files
2. Adds the required metadata while exporting to disc
3. Uploads to Sumo
Does three things:
1. Extracts data from simulator to arrow files
2. Adds the required metadata while exporting to disc
3. Uploads to Sumo
"""

import logging
import sys
from pathlib import Path
from typing import Union

import pandas as pd
import pyarrow as pa
import pyarrow.parquet as pq
import pandas as pd
import res2df

from fmu.dataio import ExportData
from fmu.sumo.uploader._fileonjob import FileOnJob

from ._special_treatments import (
SUBMOD_DICT,
tidy,
convert_to_arrow,
tidy,
vfp_to_arrow_dict,
)

from pathlib import Path
from fmu.dataio import ExportData
from .common import (
find_datefield,
give_name,
)


SUBMOD_CONTENT = {
"summary": "timeseries",
"satfunc": "relperm",
Expand Down Expand Up @@ -140,12 +139,10 @@ def get_table(
logger = logging.getLogger(__file__ + ".get_table")
extract_df = SUBMOD_DICT[submod]["extract"]
arrow = kwargs.get("arrow", True)
try:
del kwargs[
"arrow"
] # This argument should not be passed to extract function
except KeyError:
pass # No arrow key to delete
from contextlib import suppress

with suppress(KeyError):
del kwargs["arrow"]
output = None
try:
logger.info(
Expand Down
Loading

0 comments on commit 85cd312

Please sign in to comment.