diff --git a/docs/rms_oneliners.rst b/docs/rms_oneliners.rst new file mode 100644 index 000000000..f969c792a --- /dev/null +++ b/docs/rms_oneliners.rst @@ -0,0 +1,47 @@ +RMS targeted functions +====================== + +For lowerering the user threshold, some "one-liner" functions have been made for RMS. The purpose +is both to make it simpler for users to export certain items, and in addition secure a better +consistency. Hence the end user is not burdened to provide details, and only a script with quite +a few lines will be needed. + +Currently only volumes are exposed, but this will be extended in the near future. + +Exporting volumetrics from RMS +------------------------------ + +Volumetrics in RMS is always done in a so-called volume jobs. The intention with the simplification +is to use the RMS API behind the scene to retrieve all necessary data needed for ``fmu.dataio``. + +Example: + +.. code-block:: python + + from fmu.dataio.export.rms import export_rms_volumetrics + ... + + # here 'Geogrid' is the grid model name, and 'geogrid_volumes' is the name of the volume job + outfiles = export_rms_volumetrics(project, "Geogrid", "geogrid_volumes") + + print(f"Output volumes to {outfiles}") + +Most ``dataio`` settings are here defaulted, but some keys can be altered optionally, e.g.: + +.. code-block:: python + + outfiles = export_rms_volumetrics( + project, + "Geogrid", + "geogrid_volumes", + global_variables="../whatever/global_variables.yml", + tagname="vol", + subfolder="volumes", + ) + + +Details +------- + +.. automodule:: fmu.dataio.export.rms.volumetrics + :members: diff --git a/docs/src/conf.py b/docs/src/conf.py index 1ec4a1f04..2ff1446f5 100755 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -69,8 +69,8 @@ def filter(self, record: logging.LogRecord) -> bool: # Sort members by input order in classes autodoc_member_order = "bysource" autodoc_default_flags = ["members", "show_inheritance"] -# Mocking ert, pydantic module -autodoc_mock_imports = ["ert", "pydantic"] +# Mocking ert, rms, pydantic module +autodoc_mock_imports = ["ert", "pydantic", "rmsapi", "_rmsapi", "roxar", "_roxar"] napoleon_include_special_with_doc = False @@ -85,6 +85,7 @@ def filter(self, record: logging.LogRecord) -> bool: current_year = date.today().year copyright = f"Equinor {current_year} (fmu-dataio release {release})" + exclude_patterns = ["_build"] pygments_style = "sphinx" diff --git a/docs/src/index.rst b/docs/src/index.rst index 2d957bca4..05773b785 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -40,5 +40,6 @@ post-processing services, new and improved cloud-only version of Webviz and much overview preparations examples + rms_oneliners apiref/modules datamodel/index diff --git a/src/fmu/dataio/__init__.py b/src/fmu/dataio/__init__.py index f16056158..0b4cb0f52 100644 --- a/src/fmu/dataio/__init__.py +++ b/src/fmu/dataio/__init__.py @@ -1,11 +1,12 @@ """Top-level package for fmu-dataio""" -# noqa -from fmu.dataio.dataio import AggregatedData # noqa # type: ignore -from fmu.dataio.dataio import ExportData # noqa # type: ignore -from fmu.dataio.dataio import InitializeCase # noqa # type: ignore -from fmu.dataio.dataio import read_metadata # noqa -from fmu.dataio.preprocessed import ExportPreprocessedData # noqa # type: ignore +from fmu.dataio.dataio import ( + AggregatedData, + ExportData, + InitializeCase, + read_metadata, +) +from fmu.dataio.preprocessed import ExportPreprocessedData try: from .version import version @@ -13,3 +14,11 @@ __version__ = version except ImportError: __version__ = "0.0.0" + +__all__ = [ + "AggregatedData", + "ExportData", + "InitializeCase", + "read_metadata", + "ExportPreprocessedData", +] diff --git a/src/fmu/dataio/export/__init__.py b/src/fmu/dataio/export/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/fmu/dataio/export/rms/__init__.py b/src/fmu/dataio/export/rms/__init__.py new file mode 100644 index 000000000..5fcbf3b16 --- /dev/null +++ b/src/fmu/dataio/export/rms/__init__.py @@ -0,0 +1,3 @@ +from .volumetrics import export_rms_volumetrics + +__all__ = ["export_rms_volumetrics"] diff --git a/src/fmu/dataio/export/rms/_conditional_rms_imports.py b/src/fmu/dataio/export/rms/_conditional_rms_imports.py new file mode 100644 index 000000000..ce66e95a2 --- /dev/null +++ b/src/fmu/dataio/export/rms/_conditional_rms_imports.py @@ -0,0 +1,46 @@ +"""Handle rmsapi or roxar (deprecated version of rmsapi); only present inside RMS""" + +from __future__ import annotations + +import warnings +from typing import TYPE_CHECKING, Any + +from fmu.dataio._logging import null_logger + +_logger = null_logger(__name__) + + +def import_rms_package() -> dict[str, Any] | None: + """ + Attempts to import the 'rmsapi' package first. If 'rmsapi' is not available, + it attempts to import the 'roxar' package while suppressing deprecation warnings. + Returns a dictionary with the imported modules or raises ImportError if neither + is available. + """ + try: + import rmsapi + import rmsapi.jobs as jobs + + return {"rmsapi": rmsapi, "jobs": jobs} + except ImportError: + try: + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", category=DeprecationWarning, module="roxar" + ) + import roxar as rmsapi + import roxar.jobs as jobs + + return {"rmsapi": rmsapi, "jobs": jobs} + except ImportError: + raise ImportError( + "Neither 'roxar' nor 'rmsapi' are available. You have to be inside " + "RMS to use this function." + ) + + +if TYPE_CHECKING: + import rmsapi + import rmsapi.jobs + + _logger.debug("Importing both %s and %s", rmsapi, rmsapi.jobs) diff --git a/src/fmu/dataio/export/rms/volumetrics.py b/src/fmu/dataio/export/rms/volumetrics.py new file mode 100644 index 000000000..18adb1d0f --- /dev/null +++ b/src/fmu/dataio/export/rms/volumetrics.py @@ -0,0 +1,243 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Final +from warnings import warn + +import pandas as pd +from packaging.version import parse as versionparse + +import fmu.dataio as dio +from fmu.config.utilities import yaml_load +from fmu.dataio._logging import null_logger + +from ._conditional_rms_imports import import_rms_package + +_modules = import_rms_package() +if _modules: + rmsapi = _modules["rmsapi"] + jobs = _modules["jobs"] + + +_logger: Final = null_logger(__name__) + +# rename columns to FMU standard +_RENAME_COLUMNS_FROM_RMS: Final = { + "Proj. real.": "REAL", + "Zone": "ZONE", + "Segment": "REGION", + "Boundary": "LICENSE", + "Facies": "FACIES", + "BulkOil": "BULK_OIL", + "NetOil": "NET_OIL", + "PoreOil": "PORV_OIL", + "HCPVOil": "HCPV_OIL", + "STOIIP": "STOIIP_OIL", + "AssociatedGas": "ASSOCIATEDGAS_OIL", + "BulkGas": "BULK_GAS", + "NetGas": "NET_GAS", + "PoreGas": "PORV_GAS", + "HCPVGas": "HCPV_GAS", + "GIIP": "GIIP_GAS", + "AssociatedLiquid": "ASSOCIATEDOIL_GAS", + "Bulk": "BULK_TOTAL", + "Net": "NET_TOTAL", + "Pore": "PORV_TOTAL", +} + + +@dataclass +class _ExportVolumetricsRMS: + project: Any + grid_name: str + volume_job_name: str + + # optional and defaulted + global_config: str | Path | dict = "../../fmuconfig/output/global_variables.yml" + forcefolder: str = "" # allowed until deprecated + subfolder: str = "" + name: str = "" + tagname: str = "vol" + classification: str = "restricted" + workflow: str = "rms volumetric run" + + # internal storage instance variables + _global_config: dict = field(default_factory=dict, init=False) + _volume_job: dict = field(default_factory=dict, init=False) + _volume_table_name: str = field(default="", init=False) + _dataframe: pd.DataFrame = field(default_factory=pd.DataFrame, init=False) + _units: str = field(default="metric", init=False) + + def __post_init__(self) -> None: + _logger.debug("Process data, estiblish state prior to export.") + self._check_rmsapi_version() + self._set_global_config() + self._rms_volume_job_settings() + self._read_volume_table_name_from_rms() + self._voltable_as_dataframe() + self._set_units() + self._warn_if_forcefolder() + _logger.debug("Process data... DONE") + + @staticmethod + def _check_rmsapi_version() -> None: + """Check if we are working in a RMS API, and also check RMS versions?""" + _logger.debug("Check API version...") + if versionparse(rmsapi.__version__) < versionparse("1.7"): + raise RuntimeError( + "You need at least API version 1.7 (RMS 13.1) to use this function." + ) + _logger.debug("Check API version... DONE") + + def _set_global_config(self) -> None: + """Set the global config data by reading the file.""" + _logger.debug("Set global config...") + + if isinstance(self.global_config, dict): + self._global_config = self.global_config + _logger.debug("Set global config (from input dictionary)... DONE!") + return + + global_config_path = Path(self.global_config) + + if not global_config_path.is_file(): + raise FileNotFoundError( + f"Cannot find file for global config: {self.global_config}" + ) + self._global_config = yaml_load(global_config_path) + _logger.debug("Read config from yaml... DONE") + + def _rms_volume_job_settings(self) -> None: + """Get information out from the RMS job API.""" + _logger.debug("RMS VOLJOB settings...") + self._volume_job = jobs.Job.get_job( + owner=["Grid models", self.grid_name, "Grid"], + type="Volumetrics", + name=self.volume_job_name, + ).get_arguments() + _logger.debug("RMS VOLJOB settings... DONE") + + def _read_volume_table_name_from_rms(self) -> None: + """Read the volume table name from RMS.""" + _logger.debug("Read volume table name from RMS...") + voltable = self._volume_job.get("Report") + if isinstance(voltable, list): + voltable = voltable[0] + self._volume_table_name = voltable.get("ReportTableName") + + if not self._volume_table_name: + raise RuntimeError( + "You need to configure output to Report file: Report table " + "in the volumetric job. Provide a table name and rerun the job." + ) + + _logger.debug("The volume table name is %s", self._volume_table_name) + _logger.debug("Read volume table name from RMS... DONE") + + def _voltable_as_dataframe(self) -> None: + """Convert table to pandas dataframe""" + _logger.debug("Read values and convert to pandas dataframe...") + dict_values = ( + self.project.volumetric_tables[self._volume_table_name] + .get_data_table() + .to_dict() + ) + _logger.debug("Dict values are: %s", dict_values) + self._dataframe = pd.DataFrame.from_dict(dict_values) + self._dataframe.rename(columns=_RENAME_COLUMNS_FROM_RMS, inplace=True) + self._dataframe.drop("REAL", axis=1, inplace=True, errors="ignore") + + _logger.debug("Read values and convert to pandas dataframe... DONE") + + def _set_units(self) -> None: + """See if the RMS project is defined in metric or feet.""" + + units = self.project.project_units + _logger.debug("Units are %s", units) + self._units = str(units) + + def _warn_if_forcefolder(self) -> None: + if self.forcefolder: + warn( + "A 'forcefolder' is set. This is strongly discouraged and will be " + "removed in coming versions", + FutureWarning, + ) + + def _export_volume_table(self) -> dict[str, str]: + """Do the actual volume table export using dataio setup.""" + + edata = dio.ExportData( + config=self._global_config, + content="volumes", + unit="m3" if self._units == "metric" else "ft3", + vertical_domain={"depth": "msl"}, + workflow=self.workflow, + forcefolder=self.forcefolder, + classification=self.classification, + tagname=self.tagname, + name=self.name if self.name else f"{self.grid_name}_volumes", + rep_include=False, + ) + + out = edata.export(self._dataframe) + _logger.debug("Volume result to: %s", out) + return {"volume_table": out} + + def export(self) -> dict[str, str]: + """Export the volume table.""" + return self._export_volume_table() + + +def export_rms_volumetrics( + project: Any, + grid_name: str, + volume_job_name: str, + global_config: str | Path | dict = "../../fmuconfig/output/global_variables.yml", + forcefolder: str = "", # unsure if we shall allow this? + subfolder: str = "", + name: str = "", + tagname: str = "", + classification: str = "restricted", + workflow: str = "rms volumetric run", +) -> dict[str, str]: + """Simplified interface when exporting volume tables (and assosiated data) from RMS. + + As the export_volumetrics may have multiple output (storing both tables, maps and + 3D grids), the output from this function is always a dictionary. The table is + mandatory output, while maps and 3D grid data are optional (not yet implemented). + + Args: + project: The 'magic' project variable in RMS. + grid_name: Name of 3D grid model in RMS. + volume_job_name: Name of the volume job. + global_config: Optional. The global config can either point to the + global_variables file, or it can be a dictionary. As default, it assumes + a the current standard in FMU: + ``'../../fmuconfig/output/global_variables.yml'`` + forcefolder: Optional. As default, volume tables will be exported to the agreed + file structure, and the folder name will be 'tables'. This can be + overriden here, but there will be warnings. For optional assosiated + volume maps and grids, the default folder names cannot be changed. + subfolder: Name of subfolder for local storage, below the standard folder. + name: Optional. Name of export item. Is defaulted to name of grid + '_volumes'. + tagname: Optional. Defaulted to 'vol' for this function. Tagnames are part of + file names, and should not be applied as metadata. + classification: Optional. Use 'internal' or 'restricted' (default). + workflow: Optional. Information about the work flow; defaulted to + 'rms volumetrics'. + """ + + return _ExportVolumetricsRMS( + project, + grid_name, + volume_job_name, + global_config=global_config, + forcefolder=forcefolder, + subfolder=subfolder, + name=name, + tagname=tagname, + classification=classification, + workflow=workflow, + ).export() diff --git a/tests/conftest.py b/tests/conftest.py index 938ec0106..b1ebd188a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -47,6 +47,14 @@ def _current_function_name(): return inspect.currentframe().f_back.f_code.co_name +@pytest.fixture(scope="function", autouse=True) +def return_to_original_directory(): + # store original folder, and restore after each function (before and after yield) + original_directory = os.getcwd() + yield + os.chdir(original_directory) + + @pytest.fixture def set_export_data_inside_rms(monkeypatch): monkeypatch.setattr(ExportData, "_inside_rms", True) @@ -217,6 +225,22 @@ def rmssetup(tmp_path_factory, global_config2_path): return rmspath +@pytest.fixture(scope="module") +def rmssetup_with_fmuconfig(tmp_path_factory, global_config2_path): + """Create the folder structure to mimic RMS project and standard global config.""" + + tmppath = tmp_path_factory.mktemp("revision") + rmspath = tmppath / "rms/model" + rmspath.mkdir(parents=True, exist_ok=True) + fmuconfigpath = tmppath / "fmuconfig/output" + fmuconfigpath.mkdir(parents=True, exist_ok=True) + shutil.copy(global_config2_path, fmuconfigpath) + + logger.debug("Ran %s", _current_function_name()) + + return rmspath + + @pytest.fixture(name="rmsglobalconfig", scope="module") def fixture_rmsglobalconfig(rmssetup): """Read global config.""" diff --git a/tests/test_export_rms/conftest.py b/tests/test_export_rms/conftest.py new file mode 100644 index 000000000..a95f223a2 --- /dev/null +++ b/tests/test_export_rms/conftest.py @@ -0,0 +1,227 @@ +"""The conftest.py, providing magical fixtures to tests.""" + +import sys +from unittest.mock import MagicMock + +import pytest + +# retrieved from Drogon in RMS 14.2 +VOLJOB_PARAMS = { + "Input": [ + { + "BulkVolumeProperty": [], + "SelectedZoneNames": ["Valysar", "Therys", "Volon"], + "RegionProperty": ["Grid models", "Geogrid", "Region"], + "SelectedRegionNames": [ + "WestLowland", + "CentralSouth", + "CentralNorth", + "NorthHorst", + "CentralRamp", + "CentralHorst", + "EastLowland", + ], + "FaciesProperty": ["Grid models", "Geogrid", "FACIES"], + "SelectedFaciesNames": [ + "Floodplain", + "Channel", + "Crevasse", + "Coal", + "Calcite", + "Offshore", + "Lowershoreface", + "Uppershoreface", + ], + "LicenseBoundaries": [], + } + ], + "Output": [ + { + "Prefix": "", + "UseOil": True, + "UseGas": True, + "UseTotals": True, + "AreaAverage": False, + "CreateDiscreteFluidProperty": False, + "AcceptNegativeCellVolumes": False, + "MapLayout": [], + "MapOutput": "CLIPBOARD", + "MapIncrementMultiplier": 1.0, + "Calculations": [ + { + "Type": "BULK", + "CreateProperty": True, + "CreateZoneMap": False, + "CreateTotalMap": False, + }, + { + "Type": "PORE", + "CreateProperty": True, + "CreateZoneMap": False, + "CreateTotalMap": False, + }, + { + "Type": "HCPV", + "CreateProperty": False, + "CreateZoneMap": False, + "CreateTotalMap": False, + }, + { + "Type": "STOIIP", + "CreateProperty": False, + "CreateZoneMap": False, + "CreateTotalMap": False, + }, + { + "Type": "ASSOCIATED_GAS", + "CreateProperty": False, + "CreateZoneMap": False, + "CreateTotalMap": False, + }, + { + "Type": "GIIP", + "CreateProperty": False, + "CreateZoneMap": False, + "CreateTotalMap": False, + }, + { + "Type": "ASSOCIATED_LIQUID", + "CreateProperty": False, + "CreateZoneMap": False, + "CreateTotalMap": False, + }, + ], + } + ], + "Report": [ + { + "ReportLayout": "TABULAR", + "FileType": "EXCEL", + "FileName": "", + "AppendRelisationInfo": False, + "UseRealizationNumber": True, + "ExportUnits": False, + "AddHeaders": False, + "ScientificNotation": False, + "DecimalCount": 2, + "OutputGrouping": ["Zone", "Region index"], + "ReportTableName": "geogrid_volumes", + } + ], + "Variables": [ + { + "Formation Variables": [ + { + "InputType": "ALL_ZONES_AND_REGIONS", + "InputSource": "TABLE", + "TableValues": 1.0, + "DataInput": [], + "Name": "NG", + }, + { + "InputType": "ALL_ZONES_AND_REGIONS", + "InputSource": "TABLE", + "TableValues": 0.0, + "DataInput": [["Grid models", "Geogrid", "PHIT"]], + "Name": "POR", + }, + ], + "Gas Variables": [ + { + "InputType": "ALL_ZONES_AND_REGIONS", + "InputSource": "TABLE", + "TableValues": 0.0, + "DataInput": [["Grid models", "Geogrid", "SW"]], + "Name": "SW", + }, + { + "InputType": "EACH_REGION", + "InputSource": "REGION_MODEL", + "TableValues": 0.0, + "DataInput": [], + "Name": "BG_FACTOR", + }, + { + "InputType": "EACH_REGION", + "InputSource": "REGION_MODEL", + "TableValues": 0.0, + "DataInput": [], + "Name": "LGR_RATIO", + }, + ], + "Oil Variables": [ + { + "InputType": "EACH_REGION", + "InputSource": "REGION_MODEL", + "TableValues": 0.0, + "DataInput": [], + "Name": "GOC", + }, + { + "InputType": "EACH_REGION", + "InputSource": "REGION_MODEL", + "TableValues": 0.0, + "DataInput": [], + "Name": "OWC", + }, + { + "InputType": "ALL_ZONES_AND_REGIONS", + "InputSource": "TABLE", + "TableValues": 0.0, + "DataInput": [["Grid models", "Geogrid", "SW"]], + "Name": "SW", + }, + { + "InputType": "EACH_REGION", + "InputSource": "REGION_MODEL", + "TableValues": 0.0, + "DataInput": [], + "Name": "BO_FACTOR", + }, + { + "InputType": "EACH_REGION", + "InputSource": "REGION_MODEL", + "TableValues": 0.0, + "DataInput": [], + "Name": "GOR_RATIO", + }, + ], + } + ], +} + + +@pytest.fixture(autouse=True) +def mock_rmsapi_package(monkeypatch): + # Create a mock rmsapi module + mock_rmsapi = MagicMock() + monkeypatch.setitem(sys.modules, "rmsapi", mock_rmsapi) + mock_x_rmsapi = MagicMock() + monkeypatch.setitem(sys.modules, "_rmsapi", mock_x_rmsapi) + mock_rmsapi.__version__ = "1.7" + mock_jobs_rmsapi = MagicMock() + monkeypatch.setitem(sys.modules, "rmsapi.jobs", mock_jobs_rmsapi) + + mock_rmsapi.jobs.Job.get_job(...).get_arguments.return_value = VOLJOB_PARAMS + yield mock_rmsapi, mock_x_rmsapi, mock_jobs_rmsapi + + +@pytest.fixture(autouse=True) +def mock_roxar_package(monkeypatch): + # Create a mock roxar module (roxar is renamed to rmsapi from RMS 14.x) + mock_roxar = MagicMock() + monkeypatch.setitem(sys.modules, "roxar", mock_roxar) + mock_x_roxar = MagicMock() + monkeypatch.setitem(sys.modules, "_roxar", mock_x_roxar) + mock_roxar.__version__ = "1.7" + + mock_roxar.jobs.Job.get_job(...).get_arguments.return_value = VOLJOB_PARAMS + yield mock_roxar, mock_x_roxar + + +@pytest.fixture(autouse=True) +def mock_project_variable(): + # A mock_project variable for the RMS 'project' (potentially extend for later use) + mock_project = MagicMock() + + yield mock_project diff --git a/tests/test_export_rms/test_export_rms_volumetrics.py b/tests/test_export_rms/test_export_rms_volumetrics.py new file mode 100644 index 000000000..0d8f9535c --- /dev/null +++ b/tests/test_export_rms/test_export_rms_volumetrics.py @@ -0,0 +1,74 @@ +"""Test the dataio running RMS spesici utility function for volumetrics""" + +import os +from pathlib import Path + +import fmu.dataio as dataio +import pandas as pd +import pytest +from fmu.dataio._logging import null_logger + +from tests.utils import inside_rms + +logger = null_logger(__name__) + + +VOLDATA = (Path("tests/data/drogon/tabular/geogrid--vol.csv")).absolute() + + +@pytest.fixture +def voltable_as_dataframe(): + return pd.read_csv(VOLDATA) + + +@inside_rms +def test_rms_volumetrics_export_class( + mock_project_variable, voltable_as_dataframe, rmssetup_with_fmuconfig, monkeypatch +): + """See mocks in local conftest.py""" + + import rmsapi # type: ignore # noqa + import rmsapi.jobs as jobs # type: ignore # noqa + + from fmu.dataio.export.rms.volumetrics import _ExportVolumetricsRMS + + os.chdir(rmssetup_with_fmuconfig) + + assert rmsapi.__version__ == "1.7" + assert "Report" in jobs.Job.get_job("whatever").get_arguments.return_value + + instance = _ExportVolumetricsRMS( + mock_project_variable, + "Geogrid", + "geogrid_vol", + ) + + assert instance._volume_table_name == "geogrid_volumes" + + # patch the dataframe which originally shall be retrieved from RMS + monkeypatch.setattr(instance, "_dataframe", voltable_as_dataframe) + + out = instance._export_volume_table() + metadata = dataio.read_metadata(out["volume_table"]) + + assert "volumes" in metadata["data"]["content"] + + +@inside_rms +def test_rms_volumetrics_export_function( + mock_project_variable, rmssetup_with_fmuconfig +): + """Test the public function.""" + + from fmu.dataio.export.rms import export_rms_volumetrics + + os.chdir(rmssetup_with_fmuconfig) + + result = export_rms_volumetrics(mock_project_variable, "Geogrid", "geogrid_volume") + vol_table_file = result["volume_table"] + + assert Path(vol_table_file).is_file() + metadata = dataio.read_metadata(vol_table_file) + logger.debug("Volume_table_file is %s", vol_table_file) + + assert "volumes" in metadata["data"]["content"]