Skip to content

Commit

Permalink
Split per vfp table (#64)
Browse files Browse the repository at this point in the history
* Add function to generate separate vfp tables

* Add functionality for split tables for vfp

* Add test data for vfp

* Fix docstring
* Change fixture scope
  • Loading branch information
daniel-sol authored Jun 26, 2024
1 parent aec4c1d commit 8d0fa88
Show file tree
Hide file tree
Showing 12 changed files with 8,935 additions and 26 deletions.
21 changes: 21 additions & 0 deletions src/fmu/sumo/sim2sumo/_special_treatments.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,27 @@ def tidy(frame):
SUBMODULES, SUBMOD_DICT = _define_submodules()


def vfp_to_arrow_dict(datafile, options):
"""Generate dictionary with vfp arrow tables
Args:
datafile (str): The datafile to extract from
options (dict): options for extraction
Returns:
tuple: vfp keyword, then dictionary with key: table_name, value: table
"""
logger = logging.getLogger(__file__ + ".vfp_to_arrow_dict")
resdatafiles = res2df.ResdataFiles(datafile)
keyword = options.get("keyword", "VFPPROD")
vfpnumbers = options.get("vfpnumbers", None)
arrow_tables = res2df.vfp._vfp.pyarrow_tables(
resdatafiles.get_deck(), keyword=keyword, vfpnumbers_str=vfpnumbers
)
logger.debug("Extracted %s vfp tables", len(arrow_tables))
return keyword, arrow_tables


def give_help(submod, only_general=False):
"""Give descriptions of variables available for submodule
Expand Down
1 change: 1 addition & 0 deletions src/fmu/sumo/sim2sumo/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@ def prepare_list_for_sendoff(datatype, simconfig, datafiles, paths, grid3d):
"""
logger = logging.getLogger(__file__ + ".prepare_list_for_sendoff")
submods = find_datatypes(datatype, simconfig)
logger.debug("Submodules to extract with: %s", submods)
outdict = {}
options = simconfig.get("options", {"arrow": True})

Expand Down
69 changes: 51 additions & 18 deletions src/fmu/sumo/sim2sumo/tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,12 @@
SUBMOD_DICT,
tidy,
convert_to_arrow,
vfp_to_arrow_dict,
)
from .common import (
filter_options,
fix_suffix,
generate_meta,
get_case_uuid,
give_name,
convert_to_bytestring,
convert_2_sumo_file,
find_datafiles_no_seedpoint,
)


Expand Down Expand Up @@ -87,9 +83,12 @@ def generate_table_meta(datafile, obj, tagname, config):
"""
logger = logging.getLogger(__name__ + ".generate_table_meta")

metadata = generate_meta(
config, datafile, tagname, obj, SUBMOD_CONTENT.get(tagname, "property")
)
if "vfp" in tagname.lower():
content = "lift_curves"
else:
content = SUBMOD_CONTENT.get(tagname, "property")

metadata = generate_meta(config, datafile, tagname, obj, content)
logger.debug("Generated meta are %s", metadata)

return metadata
Expand Down Expand Up @@ -229,6 +228,32 @@ def upload_tables(sim2sumoconfig, config, dispatcher):
)


def upload_vfp_tables_from_simulation_run(
datafile, options, config, dispatcher
):
"""Upload vfp tables from one simulator run to Sumo
Args:
datafile (str): the datafile defining the simulation run
options (dict): the options for vfp
config (dict): the fmu config with metadata
dispatcher (sim2sumo.common.Dispatcher): job dispatcher
"""
logger = logging.getLogger(
__name__ + ".upload_vfp_tables_from_simulation_run"
)
keyword, tables = vfp_to_arrow_dict(datafile, options)
for table in tables:
table_number = str(
table.schema.metadata[b"TABLE_NUMBER"].decode("utf-8")
)
logger.debug(table)
tagname = f"{keyword}_{table_number}"
logger.debug("Generated tagname: %s", tagname)
sumo_file = convert_table_2_sumo_file(datafile, table, tagname, config)
dispatcher.add(sumo_file)


def upload_tables_from_simulation_run(
datafile, submod_and_options, config, dispatcher
):
Expand All @@ -246,16 +271,24 @@ def upload_tables_from_simulation_run(
if submod == "grid3d":
logger.debug("No tables for grid3d, skipping")
continue
table = get_table(datafile, submod, options)
logger.debug("Sending %s onto file creation", table)
sumo_file = convert_table_2_sumo_file(datafile, table, submod, config)
if sumo_file is None:
logger.warning(
"Table with datatype %s extracted from %s returned nothing",
submod,
datafile,

if submod == "vfp":
upload_vfp_tables_from_simulation_run(
datafile, options, config, dispatcher
)
continue
dispatcher.add(sumo_file)
else:
table = get_table(datafile, submod, options)
logger.debug("Sending %s onto file creation", table)
sumo_file = convert_table_2_sumo_file(
datafile, table, submod, config
)
if sumo_file is None:
logger.warning(
"Table with datatype %s extracted from %s returned nothing",
submod,
datafile,
)
continue
dispatcher.add(sumo_file)

logger.info("%s properties", count)
27 changes: 20 additions & 7 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
from datetime import datetime
from pathlib import Path

import os
import uuid
import pytest
import yaml
from fmu.config.utilities import yaml_load
from fmu.sumo.uploader import CaseOnDisk, SumoConnection
from httpx import HTTPStatusError
from sumo.wrapper import SumoClient

from xtgeo import gridproperty_from_file
Expand Down Expand Up @@ -85,7 +85,7 @@ def _fix_ert_env(monkeypatch):
monkeypatch.setenv("_ERT_RUNPATH", "./")


@pytest.fixture(autouse=True, scope="session", name="case_uuid")
@pytest.fixture(autouse=True, scope="function", name="case_uuid")
def _fix_register(scratch_files, token):

root = scratch_files[0].parents[1]
Expand Down Expand Up @@ -121,18 +121,31 @@ def _fix_xtgeogrid(eightcells_datafile):


@pytest.fixture(name="teardown", autouse=True, scope="session")
def fixture_teardown(case_uuid, sumo, request):
"""Remove case when all tests are run
def fixture_teardown(sumo, request):
"""Remove all test case when all tests are run
Args:
case_uuid (str): uuid of test case
sumo (SumoClient): Client to given sumo environment
"""

def kill():
print(f"Killing object {case_uuid}!")
path = f"/objects('{case_uuid}')"
query = '$query=fmu.case.name:"test-sim2sumo" AND class:case&$size=100'

results = sumo.get("/search", query).json()

print(f'{results["hits"]["total"]["value"]} cases found')

hit_list = results["hits"]["hits"]
for hit in hit_list:
case_name = hit["_source"]["fmu"]["case"]["name"]
case_uuid = hit["_id"]
path = f"/objects('{case_uuid}')"
try:

sumo.delete(path)
sumo.delete(path)
except HTTPStatusError:
print(f"{case_uuid} Allready gone..")
print(f"Killed case with id {case_uuid} (name: {case_name})")

request.addfinalizer(kill)
Loading

0 comments on commit 8d0fa88

Please sign in to comment.