Skip to content

Commit

Permalink
Merge branch 'main' into change-name-and-tagname-for-grids
Browse files Browse the repository at this point in the history
  • Loading branch information
equinor-ruaj committed Oct 17, 2024
2 parents b8c05cf + 5b30fb8 commit 54db721
Show file tree
Hide file tree
Showing 18 changed files with 282 additions and 517 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/run_tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ jobs:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11"]
os: [ubuntu-latest]
max-parallel: 1
permissions:
contents: read
id-token: write
Expand Down Expand Up @@ -57,4 +58,4 @@ jobs:
python -c 'import sys; print(sys.platform)'
python -c 'import os; import sys; print(os.path.dirname(sys.executable))'
pytest --log-cli-level WARNING -s --timeout=300
pytest --log-cli-level=WARNING -s --timeout=300
50 changes: 1 addition & 49 deletions src/fmu/sumo/sim2sumo/_special_treatments.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@ def convert_to_arrow(frame):
logger.debug("!!!!Using convert to arrow!!!")
standard = {"DATE": pa.timestamp("ms")}
if "DATE" in frame.columns:
frame["DATE"] = pd.to_datetime(
frame["DATE"], infer_datetime_format=True
)
frame["DATE"] = pd.to_datetime(frame["DATE"])
scheme = []
for column_name in frame.columns:
if pd.api.types.is_string_dtype(frame[column_name]):
Expand Down Expand Up @@ -85,7 +83,6 @@ def find_functions_and_docstring(submod):
if name not in {"deck", "eclfiles"}
),
"arrow_convertor": find_arrow_convertor(import_path),
"doc": func.__doc__,
}

return returns
Expand Down Expand Up @@ -192,30 +189,20 @@ def vfp_to_arrow_dict(datafile, options):
Returns:
tuple: vfp keyword, then dictionary with key: table_name, value: table
"""
logger = logging.getLogger(__file__ + ".vfp_to_arrow_dict")
filepath_no_suffix = Path(datafile).with_suffix("")
resdatafiles = res2df.ResdataFiles(filepath_no_suffix)
vfp_dict = {}
keyword = options.get("keyword", ["VFPPROD", "VFPINJ"])
logger.debug("keyword is %s", keyword)
vfpnumbers = options.get("vfpnumbers", None)
if isinstance(keyword, str):
keywords = [keyword]
else:
keywords = keyword

logger.debug("%s keywords to go through", len(keywords))

for keyword in keywords:
vfp_dict[keyword] = res2df.vfp._vfp.pyarrow_tables(
resdatafiles.get_deck(), keyword=keyword, vfpnumbers_str=vfpnumbers
)

logger.debug(
"Keyword %s, extracted %s vfp tables",
keyword,
len(vfp_dict[keyword]),
)
return vfp_dict


Expand Down Expand Up @@ -264,38 +251,3 @@ def add_md_to_rft(rft_table, md_file_path):
logger.debug("Head of merged table to return:\n %s", rft_table.head())

return rft_table


def give_help(submod, only_general=False):
"""Give descriptions of variables available for submodule
Args:
submod (str): submodule
Returns:
str: description of submodule input
"""
general_info = """
This utility uses the library ecl2csv, but uploads directly to sumo. Required options are:
A config file in yaml format, where you specifiy the variables to extract. What is required
is a keyword in the config called "sim2simo". under there you have three optional arguments:
* datafile: this can be a string, a list, or it can be absent altogether
* datatypes: this needs to be a list, or non existent
* options: The options are listed below in the original documentation from ecl2csv. The eclfiles
option is replaced with what is under datafile
"""
if submod is None:
only_general = True
if only_general:
text_to_return = general_info
else:
try:
text_to_return = general_info + SUBMOD_DICT[submod]["doc"]
except KeyError:
text_to_return = (
f"subtype {submod} does not exist!!, existing options:\n"
+ "\n".join(SUBMODULES)
)

return text_to_return
57 changes: 7 additions & 50 deletions src/fmu/sumo/sim2sumo/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@
import psutil
import yaml


from fmu.sumo.uploader import SumoConnection
from fmu.sumo.uploader._fileonjob import FileOnJob
from fmu.sumo.uploader._upload_files import upload_files
from fmu.sumo.sim2sumo._special_treatments import (
SUBMOD_DICT,
Expand Down Expand Up @@ -109,11 +107,12 @@ def find_full_path(datafile, paths):
try:
return paths[data_name]
except KeyError:
mess = (
"Datafile %s, with derived name %s, not found in %s,"
" have to skip"
logger.warning(
"Datafile %s, with derived name %s, not found in %s, have to skip",
datafile,
data_name,
paths,
)
logger.warning(mess, datafile, data_name, paths)
return None


Expand All @@ -132,7 +131,7 @@ def find_datafile_paths():
paths[name] = data_path
else:
logger.warning(
"Name %s from file %s allready used", name, data_path
"Name %s from file %s already used", name, data_path
)

return paths
Expand All @@ -156,10 +155,7 @@ def create_config_dict(config, datafile=None, datatype=None):
logger.debug("Input config keys are %s", config.keys())

simconfig = config.get("sim2sumo", {})
if len(simconfig) == 0:
logger.warning("We are starting from scratch")
else:
logger.debug("This is the starting point %s", simconfig)
logger.debug("sim2sumo config %s", simconfig)
grid3d = simconfig.get("grid3d", False)
if isinstance(simconfig, bool):
simconfig = {}
Expand Down Expand Up @@ -398,7 +394,6 @@ def _upload(self):

def finish(self):
"""Cleanup"""
self._logger.info("Final stretch")
self._upload()


Expand All @@ -419,44 +414,6 @@ def find_datefield(text_string):
return date


def convert_2_sumo_file(obj, converter, metacreator, meta_args):
"""Convert object to sumo file
Args:
obj (object): the object
converter (func): function to convert to bytestring
metacreator (func): the function that creates the metadata
meta_args (iterable): arguments for generating metadata
Returns:
SumoFile: file containing obj
"""
logger = logging.getLogger(__name__ + ".convert_2_sumo_file")
logger.debug("Obj type: %s", type(obj))
logger.debug("Convert function %s", converter)
logger.debug("Meta function %s", metacreator)
logger.debug("Arguments for creating metadata %s", meta_args)
if obj is None:
logger.warning("Nothing to do with None object")
return obj
bytestring = converter(obj)
metadata = metacreator(*meta_args)
logger.debug("Metadata created")
assert isinstance(
metadata, dict
), f"meta should be dict, but is {type(metadata)}"
assert isinstance(
bytestring, bytes
), f"bytestring should be bytes, but is {type(bytestring)}"
sumo_file = FileOnJob(bytestring, metadata)
logger.debug("Init of sumo file")
sumo_file.path = metadata["file"]["relative_path"]
sumo_file.metadata_path = ""
sumo_file.size = len(sumo_file.byte_string)
logger.debug("Returning from func")
return sumo_file


def nodisk_upload(files, parent_id, config_path, env="prod", connection=None):
"""Upload files to sumo
Expand Down
Loading

0 comments on commit 54db721

Please sign in to comment.