diff --git a/setup.py b/setup.py index efc576e0b..e8c7a4371 100644 --- a/setup.py +++ b/setup.py @@ -46,6 +46,7 @@ "InplaceVolumes = webviz_subsurface.plugins:InplaceVolumes", "InplaceVolumesOneByOne = webviz_subsurface.plugins:InplaceVolumesOneByOne", "LinePlotterFMU = webviz_subsurface.plugins:LinePlotterFMU", + "MapViewerSumo = webviz_subsurface.plugins:MapViewerSumo", "MapViewerFMU = webviz_subsurface.plugins:MapViewerFMU", "MorrisPlot = webviz_subsurface.plugins:MorrisPlot", "ParameterAnalysis = webviz_subsurface.plugins:ParameterAnalysis", @@ -98,13 +99,16 @@ "pandas>=1.1.5", "pillow>=6.1", "pyarrow>=5.0.0", + "redis", "pyscal>=0.7.5", "scipy>=1.2", "statsmodels>=0.12.1", # indirect dependency through https://plotly.com/python/linear-fits/ - "webviz-config>=0.3.8", - "webviz-core-components>=0.5.6", + "webviz-config", + "webviz-core-components", "webviz-subsurface-components>=0.4.12", "xtgeo>=2.14", + "fmu-sumo@git+https://github.com/equinor/fmu-sumo@explorer", + "sumo-wrapper-python@git+https://github.com/equinor/sumo-wrapper-python.git@master", ], extras_require={"tests": TESTS_REQUIRE}, setup_requires=["setuptools_scm~=3.2"], diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/__init__.py b/webviz_subsurface/_providers/ensemble_surface_provider/__init__.py index 7f54193ef..b063bbdc0 100644 --- a/webviz_subsurface/_providers/ensemble_surface_provider/__init__.py +++ b/webviz_subsurface/_providers/ensemble_surface_provider/__init__.py @@ -12,3 +12,5 @@ SurfaceMeta, SurfaceServer, ) +from .ensemble_provider_dealer import EnsembleProviderDealer +from .ensemble_provider_dealer_sumo import EnsembleProviderDealerSumo diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_sumo.py b/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_sumo.py new file mode 100644 index 000000000..669467a0e --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_sumo.py @@ -0,0 +1,301 @@ +import logging +from io import BytesIO +from pathlib import Path +from typing import List, Optional, Set + +import flask_caching +import xtgeo + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from . import webviz_sumo +from ._stat_surf_cache import StatSurfCache +from .ensemble_surface_provider import ( + EnsembleSurfaceProvider, + ObservedSurfaceAddress, + SimulatedSurfaceAddress, + StatisticalSurfaceAddress, + SurfaceAddress, + SurfaceStatistic, +) + +LOGGER = logging.getLogger(__name__) + + +class ProviderImplSumo(EnsembleSurfaceProvider): + def __init__( + self, + cache_dir: Path, + cache: flask_caching.SimpleCache, + sumo_id_of_case: str, + iteration_id: str, + use_access_token: bool, + access_token: Optional[str], + ) -> None: + self._provider_id = f"sumo_{sumo_id_of_case}__{iteration_id}" + self._case_sumo_id = sumo_id_of_case + self._iteration_id = iteration_id + self._use_access_token = use_access_token + self._access_token = access_token + + self._cache_prefix = f"ProviderImplSumo_{self._provider_id}__" + self._cache = cache + + # my_cache_dir = cache_dir / Path( + # "ProviderImplSumo_StatSurfCache_" + self._provider_id + # ) + # self._stat_surf_cache = StatSurfCache(my_cache_dir) + + self._cached_sumo_case: Optional[webviz_sumo.Case] = None + + def provider_id(self) -> str: + return self._provider_id + + def attributes(self) -> List[str]: + timer = PerfTimer() + + cache_key = f"{self._cache_prefix}__attributes" + cached_arr = self._cache.get(cache_key) + if cached_arr is not None: + LOGGER.debug( + f"ProviderImplSumo.attributes() from cache in: {timer.elapsed_s():.2f}s" + ) + return cached_arr + + case = self._get_my_sumo_case_obj() + attrib_names = case.get_object_property_values( + "tag_name", "surface", iteration_ids=[self._iteration_id] + ) + attrib_names = sorted(attrib_names) + + self._cache.set(cache_key, attrib_names) + + LOGGER.debug( + f"ProviderImplSumo.attributes() completed using Sumo in: {timer.elapsed_s():.2f}s" + ) + + return attrib_names + + def surface_names_for_attribute(self, surface_attribute: str) -> List[str]: + timer = PerfTimer() + + cache_key = ( + f"{self._cache_prefix}__surface_names_for_attribute_{surface_attribute}" + ) + cached_arr = self._cache.get(cache_key) + if cached_arr is not None: + LOGGER.debug( + f"ProviderImplSumo.surface_names_for_attribute({surface_attribute}) from cache in: {timer.elapsed_s():.2f}s" + ) + return cached_arr + + case = self._get_my_sumo_case_obj() + surf_names = case.get_object_property_values( + "object_name", + "surface", + iteration_ids=[self._iteration_id], + tag_names=[surface_attribute], + ) + surf_names = sorted(surf_names) + + self._cache.set(cache_key, surf_names) + + LOGGER.debug( + f"ProviderImplSumo.surface_names_for_attribute({surface_attribute}) completed using Sumo in: {timer.elapsed_s():.2f}s" + ) + + return surf_names + + def surface_dates_for_attribute( + self, surface_attribute: str + ) -> Optional[List[str]]: + timer = PerfTimer() + + cache_key = ( + f"{self._cache_prefix}__surface_dates_for_attribute_{surface_attribute}" + ) + cached_arr = self._cache.get(cache_key) + if cached_arr is not None: + LOGGER.debug( + f"ProviderImplSumo.surface_dates_for_attribute({surface_attribute}) from cache in: {timer.elapsed_s():.2f}s" + ) + if len(cached_arr) == 1 and not bool(cached_arr[0]): + return None + return cached_arr + + case = self._get_my_sumo_case_obj() + time_intervals = case.get_object_property_values( + "time_interval", + "surface", + iteration_ids=[self._iteration_id], + tag_names=[surface_attribute], + ) + + datestr_arr: List[str] = [] + for interval_str in time_intervals: + datestr_arr.append(interval_str if interval_str != "NULL" else "") + + datestr_arr = sorted(datestr_arr) + + self._cache.set(cache_key, datestr_arr) + + LOGGER.debug( + f"ProviderImplSumo.surface_dates_for_attribute({surface_attribute}) completed using Sumo in: {timer.elapsed_s():.2f}s" + ) + + if len(datestr_arr) == 1 and not bool(datestr_arr[0]): + return None + + return datestr_arr + + def realizations(self) -> List[int]: + timer = PerfTimer() + + cache_key = f"{self._cache_prefix}__realizations" + cached_arr = self._cache.get(cache_key) + if cached_arr is not None: + LOGGER.debug( + f"ProviderImplSumo.realizations() from cache in: {timer.elapsed_s():.2f}s" + ) + return cached_arr + + case = self._get_my_sumo_case_obj() + realization_ids = case.get_object_property_values("realization_id", "surface") + realization_ids = sorted(realization_ids) + + self._cache.set(cache_key, realization_ids) + + LOGGER.debug( + f"ProviderImplSumo.realizations() completed using Sumo in: {timer.elapsed_s():.2f}s" + ) + + return realization_ids + + def get_surface( + self, + address: SurfaceAddress, + ) -> Optional[xtgeo.RegularSurface]: + if isinstance(address, StatisticalSurfaceAddress): + return self._get_statistical_surface(address) + if isinstance(address, SimulatedSurfaceAddress): + return self._get_simulated_surface(address) + if isinstance(address, ObservedSurfaceAddress): + return None + + raise TypeError("Unknown type of surface address") + + def _get_my_sumo_case_obj(self) -> webviz_sumo.Case: + + if self._cached_sumo_case is not None: + return self._cached_sumo_case + + timer = PerfTimer() + + if self._use_access_token: + sumo = webviz_sumo.create_explorer(self._access_token) + else: + sumo = webviz_sumo.create_interactive_explorer() + et_create_s = timer.lap_s() + + self._cached_sumo_case = sumo.get_case_by_id(self._case_sumo_id) + et_get_s = timer.lap_s() + + LOGGER.debug( + f"_get_my_sumo_case_obj() took: {timer.elapsed_s():.2f}s " + f"(create={et_create_s:.2f}s, get={et_get_s:.2f}s)" + ) + + return self._cached_sumo_case + + def _get_simulated_surface( + self, address: SimulatedSurfaceAddress + ) -> Optional[xtgeo.RegularSurface]: + """Returns a Xtgeo surface instance of a single realization surface""" + + timer = PerfTimer() + + case = self._get_my_sumo_case_obj() + + time_intervals_list = [address.datestr] if address.datestr is not None else [] + + surface_collection = case.get_objects( + "surface", + iteration_ids=[self._iteration_id], + realization_ids=[address.realization], + tag_names=[address.attribute], + object_names=[address.name], + time_intervals=time_intervals_list, + ) + + num_surfaces = len(surface_collection) + if num_surfaces == 0: + LOGGER.warning(f"No simulated surface found in Sumo for {address}") + return None + if num_surfaces > 1: + LOGGER.warning( + f"Multiple simulated surfaces found in Sumo for: {address}" + "Returning first surface." + ) + + surf = surface_collection[0] + blob_bytes: bytes = surf.blob + byte_stream = BytesIO(blob_bytes) + xtgeo_surf = xtgeo.surface_from_file(byte_stream) + + LOGGER.debug( + f"ProviderImplSumo loaded simulated surface from Sumo in: {timer.elapsed_s():.2f}s" + ) + + return xtgeo_surf + + def _get_statistical_surface( + self, address: StatisticalSurfaceAddress + ) -> Optional[xtgeo.RegularSurface]: + + timer = PerfTimer() + + time_intervals_list = [address.datestr] if address.datestr is not None else [] + + case = self._get_my_sumo_case_obj() + et_get_case_s = timer.lap_s() + + surface_collection = case.get_objects( + "surface", + iteration_ids=[self._iteration_id], + realization_ids=address.realizations, + tag_names=[address.attribute], + object_names=[address.name], + time_intervals=time_intervals_list, + ) + et_get_coll_s = timer.lap_s() + + surf_count = len(surface_collection) + if surf_count == 0: + LOGGER.warning(f"No simulated surfaces found in Sumo for {address}") + return None + + surfstat_to_sumostatstr_map = { + SurfaceStatistic.MEAN: "MEAN", + SurfaceStatistic.STDDEV: "STD", + SurfaceStatistic.MINIMUM: "MIN", + SurfaceStatistic.MAXIMUM: "MAX", + SurfaceStatistic.P10: "P10", + SurfaceStatistic.P90: "P90", + } + sumo_aggr_str = surfstat_to_sumostatstr_map[address.statistic] + + agg_surf_bytes: bytes = surface_collection.aggregate(sumo_aggr_str) + et_calc_s = timer.lap_s() + + byte_stream = BytesIO(agg_surf_bytes) + xtgeo_surf = xtgeo.surface_from_file(byte_stream) + + LOGGER.debug( + f"ProviderImplSumo calculated statistical surface using Sumo in: " + f"{timer.elapsed_s():.2f}s (" + f"get_case={et_get_case_s:.2f}s, get_coll={et_get_coll_s:.2f}s, calc={et_calc_s:.2f}s), " + f"[#surfaces={surf_count}, stat={address.statistic}, " + f"attr={address.attribute}, name={address.name}, date={address.datestr}]" + ) + + return xtgeo_surf diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_image.py b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_image.py index 6bb52ed11..ca154e940 100644 --- a/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_image.py +++ b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_image.py @@ -90,16 +90,16 @@ def surface_to_png_bytes_optimized(surface: xtgeo.RegularSurface) -> bytes: surf_values_ma: np.ma.MaskedArray = surface.values surf_values_ma = np.flip(surf_values_ma.transpose(), axis=0) # type: ignore - LOGGER.debug(f"flip/transpose: {timer.lap_s():.2f}s") + # LOGGER.debug(f"flip/transpose: {timer.lap_s():.2f}s") # This will be a flat bool array with true for all valid entries valid_arr = np.invert(np.ma.getmaskarray(surf_values_ma).flatten()) - LOGGER.debug(f"get valid_arr: {timer.lap_s():.2f}s") + # LOGGER.debug(f"get valid_arr: {timer.lap_s():.2f}s") shape = surf_values_ma.shape min_val = surf_values_ma.min() max_val = surf_values_ma.max() - LOGGER.debug(f"minmax: {timer.lap_s():.2f}s") + # LOGGER.debug(f"minmax: {timer.lap_s():.2f}s") if min_val == 0.0 and max_val == 0.0: scale_factor = 1.0 @@ -112,10 +112,10 @@ def surface_to_png_bytes_optimized(surface: xtgeo.RegularSurface) -> bytes: # Get a NON-masked array with all undefined entries filled with 0 scaled_values = scaled_values_ma.filled(0) - LOGGER.debug(f"scale and fill: {timer.lap_s():.2f}s") + # LOGGER.debug(f"scale and fill: {timer.lap_s():.2f}s") val_arr = scaled_values.astype(np.uint32).ravel() - LOGGER.debug(f"cast and flatten: {timer.lap_s():.2f}s") + # LOGGER.debug(f"cast and flatten: {timer.lap_s():.2f}s") val = val_arr.view(dtype=np.uint8) rgba_arr = np.empty(4 * len(val_arr), dtype=np.uint8) @@ -124,22 +124,22 @@ def surface_to_png_bytes_optimized(surface: xtgeo.RegularSurface) -> bytes: rgba_arr[2::4] = val[0::4] rgba_arr[3::4] = np.multiply(valid_arr, 255).astype(np.uint8) - LOGGER.debug(f"rgba combine: {timer.lap_s():.2f}s") + # LOGGER.debug(f"rgba combine: {timer.lap_s():.2f}s") # Back to 2d shape + 1 dimension for the rgba values. rgba_arr_reshaped = rgba_arr.reshape((shape[0], shape[1], 4)) image = Image.fromarray(rgba_arr_reshaped, "RGBA") - LOGGER.debug(f"create: {timer.lap_s():.2f}s") + # LOGGER.debug(f"create: {timer.lap_s():.2f}s") byte_io = io.BytesIO() image.save(byte_io, format="png", compress_level=1) - LOGGER.debug(f"save png to bytes: {timer.lap_s():.2f}s") + # LOGGER.debug(f"save png to bytes: {timer.lap_s():.2f}s") byte_io.seek(0) ret_bytes = byte_io.read() - LOGGER.debug(f"read bytes: {timer.lap_s():.2f}s") + # LOGGER.debug(f"read bytes: {timer.lap_s():.2f}s") - LOGGER.debug(f"Total time: {timer.elapsed_s():.2f}s") + # LOGGER.debug(f"Total time: {timer.elapsed_s():.2f}s") return ret_bytes diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/cache_helpers.py b/webviz_subsurface/_providers/ensemble_surface_provider/cache_helpers.py new file mode 100644 index 000000000..1e5c28b1b --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/cache_helpers.py @@ -0,0 +1,79 @@ +from pathlib import Path +from typing import Dict, Optional +from dataclasses import dataclass +import flask_caching + +from webviz_config.webviz_instance_info import WEBVIZ_INSTANCE_INFO + + +@dataclass(frozen=False) +class _CacheInfo: + root_cache_folder: Optional[Path] + main_cache: Optional[flask_caching.SimpleCache] + named_cache_dict: Dict[str, flask_caching.SimpleCache] + + +_CACHE_INFO: _CacheInfo = _CacheInfo(None, None, {}) + + +def get_root_cache_folder() -> Path: + if not _CACHE_INFO.root_cache_folder: + _CACHE_INFO.root_cache_folder = ( + WEBVIZ_INSTANCE_INFO.storage_folder / "root_cache_folder" + ) + + return _CACHE_INFO.root_cache_folder + + +def get_or_create_cache() -> flask_caching.SimpleCache: + if not _CACHE_INFO.main_cache: + # _CACHE_INFO.main_cache = _create_file_system_cache("_FlaskFileSystemCache_main") + _CACHE_INFO.main_cache = _create_redis_cache("main:") + + return _CACHE_INFO.main_cache + + +def get_or_create_named_cache(cache_name: str) -> flask_caching.SimpleCache: + cache = _CACHE_INFO.named_cache_dict.get(cache_name) + if cache is None: + # cache = _create_file_system_cache(f"_FlaskFileSystemCache__{cache_name}") + cache = _create_redis_cache(f"_{cache_name}:") + _CACHE_INFO.named_cache_dict[cache_name] = cache + + return cache + + +def _create_file_system_cache(cache_sub_dir: str) -> flask_caching.SimpleCache: + # Threshold is the maximum number of items the cache before it starts deleting some + # of the items. A threshold value of 0 indicates no threshold. + # The default timeout in seconds that is used, 0 indicates that the cache never expires + + # Note that NO deletion of cached items will ever be done before the item threshold + # is reached regardless of the timeout specified. + # Given that, what would a sensible item threshold be? + # In cachelib, the default is 500. This seems a bit low, but setting the value too + # high probably impacts performance since the file cache will regularly iterate over + # all the files in the directory. + + item_threshold = 10000 + default_timeout_s = 3600 + + flask_cache_dir = get_root_cache_folder() / cache_sub_dir + + return flask_caching.backends.FileSystemCache( + cache_dir=flask_cache_dir, + threshold=item_threshold, + default_timeout=default_timeout_s, + ) + + +def _create_redis_cache(key_prefix: str) -> flask_caching.SimpleCache: + + default_timeout_s = 3600 + + return flask_caching.backends.RedisCache( + host="redis-cache", + port=6379, + default_timeout=default_timeout_s, + key_prefix=key_prefix, + ) diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/dev_experiments.py b/webviz_subsurface/_providers/ensemble_surface_provider/dev_experiments.py index 8925eea3a..1bdfba934 100644 --- a/webviz_subsurface/_providers/ensemble_surface_provider/dev_experiments.py +++ b/webviz_subsurface/_providers/ensemble_surface_provider/dev_experiments.py @@ -1,6 +1,9 @@ import logging from pathlib import Path +import flask_caching + +from ._provider_impl_sumo import ProviderImplSumo from .ensemble_surface_provider import ( EnsembleSurfaceProvider, ObservedSurfaceAddress, @@ -23,6 +26,7 @@ def main() -> None: logging.getLogger("webviz_subsurface").setLevel(level=logging.DEBUG) root_storage_dir = Path("/home/sigurdp/buf/webviz_storage_dir") + root_cache_dir = Path("/home/sigurdp/buf/webviz_cache_dir") # fmt:off # ensemble_path = "../webviz-subsurface-testdata/01_drogon_ahm/realization-*/iter-0" @@ -35,11 +39,24 @@ def main() -> None: # factory = EnsembleSurfaceProviderFactory.instance() factory = EnsembleSurfaceProviderFactory( - root_storage_dir, allow_storage_writes=True, avoid_copying_surfaces=False + root_storage_folder=root_storage_dir, + root_cache_folder=root_cache_dir, + allow_storage_writes=True, + avoid_copying_surfaces=False, ) - provider: EnsembleSurfaceProvider = factory.create_from_ensemble_surface_files( - ensemble_path + # provider: EnsembleSurfaceProvider = factory.create_from_ensemble_surface_files( + # ensemble_path + # ) + + cache = flask_caching.backends.FileSystemCache(root_cache_dir) + provider: EnsembleSurfaceProvider = ProviderImplSumo( + cache_dir=root_cache_dir, + cache=cache, + sumo_id_of_case="9f184b7b-9382-6b3d-164a-2a704ccb7dfd", + iteration_id="0", + use_access_token=False, + access_token=None, ) all_attributes = provider.attributes() @@ -60,57 +77,71 @@ def main() -> None: all_realizations = provider.realizations() print(f"all_realizations={all_realizations}") - surf = provider.get_surface( - SimulatedSurfaceAddress( - attribute="oilthickness", - name="therys", - datestr="20200701_20180101", - realization=1, + ######################### + if provider.__class__.__name__ == "ProviderImplFile": + surf = provider.get_surface( + SimulatedSurfaceAddress( + attribute="oilthickness", + name="therys", + datestr="20200701_20180101", + realization=1, + ) ) - ) - print(surf) - - surf = provider.get_surface( - ObservedSurfaceAddress( - attribute="amplitude_mean", - name="basevolantis", - datestr="20180701_20180101", + print(surf) + + surf = provider.get_surface( + ObservedSurfaceAddress( + attribute="amplitude_mean", + name="basevolantis", + datestr="20180701_20180101", + ) ) - ) - print(surf) - - # surf = provider.get_surface( - # StatisticalSurfaceAddress( - # attribute="amplitude_mean", - # name="basevolantis", - # datestr="20180701_20180101", - # statistic=SurfaceStatistic.P10, - # realizations=[0, 1], - # ) - # ) - # print(surf) - - # surf = provider.get_surface( - # StatisticalSurfaceAddress( - # attribute="amplitude_mean", - # name="basevolantis", - # datestr="20180701_20180101", - # statistic=SurfaceStatistic.P10, - # realizations=all_realizations, - # ) - # ) - # print(surf) - - surf = provider.get_surface( - StatisticalSurfaceAddress( - attribute="ds_extract_postprocess-refined8", - name="topvolantis", - datestr=None, - statistic=SurfaceStatistic.P10, - realizations=all_realizations, + print(surf) + + # surf = provider.get_surface( + # StatisticalSurfaceAddress( + # attribute="amplitude_mean", + # name="basevolantis", + # datestr="20180701_20180101", + # statistic=SurfaceStatistic.P10, + # realizations=[0, 1], + # ) + # ) + # print(surf) + + # surf = provider.get_surface( + # StatisticalSurfaceAddress( + # attribute="amplitude_mean", + # name="basevolantis", + # datestr="20180701_20180101", + # statistic=SurfaceStatistic.P10, + # realizations=all_realizations, + # ) + # ) + # print(surf) + + surf = provider.get_surface( + StatisticalSurfaceAddress( + attribute="ds_extract_postprocess-refined8", + name="topvolantis", + datestr=None, + statistic=SurfaceStatistic.P10, + realizations=all_realizations, + ) ) - ) - print(surf) + print(surf) + + ######################### + if provider.__class__.__name__ == "ProviderImplSumo": + surf = provider.get_surface( + SimulatedSurfaceAddress( + attribute="ds_extract_geogrid", + name="Therys Fm. Top", + datestr=None, + realization=1, + ) + ) + print(surf) # Running: diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_provider_dealer.py b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_provider_dealer.py new file mode 100644 index 000000000..e4ad54d65 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_provider_dealer.py @@ -0,0 +1,49 @@ +import abc +from dataclasses import dataclass +from typing import List, Optional + +from .ensemble_surface_provider import EnsembleSurfaceProvider + + +@dataclass(frozen=True) +class ProviderInfo: + field: str + case: str + iter_id: str + provider_id: str + + +# +# Mapping from field + case + iter_id => ensemble name +# + + +class EnsembleProviderDealer(abc.ABC): + # + # These top three methods don't really belong here + # They are more general and really pertain to general ensemble discovery in SUMO + # + @abc.abstractmethod + def field_names(self) -> List[str]: + pass + + @abc.abstractmethod + def case_names(self, field_name: str) -> List[str]: + pass + + @abc.abstractmethod + def iteration_ids(self, field_name: str, case_name: str) -> List[str]: + pass + + # For now, but will not be fully qualifying if backed by file based providers + @abc.abstractmethod + def get_surface_provider( + self, field_name: str, case_name: str, iteration_id: str + ) -> Optional[EnsembleSurfaceProvider]: + pass + + # @abc.abstractmethod + # def available_providers(self, field_name: Optional[str], case_name: Optional[str]) -> List[ProviderInfo]: + + # @abc.abstractmethod + # def get_provider(self, provider_id: str) -> Optional[EnsembleSurfaceProvider]: diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_provider_dealer_sumo.py b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_provider_dealer_sumo.py new file mode 100644 index 000000000..ad54dc3b8 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_provider_dealer_sumo.py @@ -0,0 +1,220 @@ +import logging +from typing import List, Optional, Dict +from dataclasses import dataclass + +import flask +import flask_caching + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from . import webviz_sumo +from . import cache_helpers +from .ensemble_provider_dealer import EnsembleProviderDealer +from .ensemble_surface_provider import EnsembleSurfaceProvider +from .ensemble_surface_provider_factory import EnsembleSurfaceProviderFactory + +from webviz_config.webviz_instance_info import WEBVIZ_INSTANCE_INFO + +LOGGER = logging.getLogger(__name__) + + +class WorkSession: + def __init__( + self, cache: flask_caching.SimpleCache, use_session_token: bool + ) -> None: + self._use_session_token = use_session_token + self._cache = cache + self._cache_prefix = f"sumo_dealer:" + self._sumo_explorer: Optional[webviz_sumo.Explorer] = None + + def _get_case_to_sumo_id_dict(self, field_name: str) -> Dict[str, str]: + timer = PerfTimer() + + cache_key = self._cache_prefix + f"{field_name}:case_to_sumo_id_dict" + case_to_sumo_id_dict: Dict[str, str] = self._cache.get(cache_key) + if case_to_sumo_id_dict is not None: + return case_to_sumo_id_dict + + et_cache_s = timer.lap_s() + + sumo = self.get_sumo_explorer() + cases = sumo.get_cases(fields=[field_name]) + case_to_sumo_id_dict = {} + for case in cases: + case_name = case.case_name + sumo_id_of_case = case.sumo_id + case_to_sumo_id_dict[case_name] = sumo_id_of_case + + et_sumo_s = timer.lap_s() + + self._cache.set(cache_key, case_to_sumo_id_dict) + + et_cache_s += timer.lap_s() + + LOGGER.debug( + f"WorkSession._get_case_to_sumo_id_dict() took: {timer.elapsed_s():.2f}s (" + f"cache={et_cache_s:.2f}s, sumo={et_sumo_s:.2f}s)" + ) + + return case_to_sumo_id_dict + + def get_field_names(self) -> List[str]: + cache_key = self._cache_prefix + "field_list" + fields_list: List[str] = self._cache.get(cache_key) + if fields_list is not None: + return fields_list + + sumo = self.get_sumo_explorer() + fields_dict: dict = sumo.get_fields() + fields_list = list(fields_dict) + + self._cache.set(cache_key, fields_list) + + return fields_list + + def get_case_names(self, field_name: str) -> List[str]: + case_to_sumo_id_dict = self._get_case_to_sumo_id_dict(field_name) + return list(case_to_sumo_id_dict) + + def get_iteration_ids(self, field_name: str, case_name: str) -> List[str]: + timer = PerfTimer() + + cache_key = self._cache_prefix + f"{field_name}:{case_name}:iteration_ids" + iteration_id_list: List[str] = self._cache.get(cache_key) + if iteration_id_list is not None: + return iteration_id_list + + et_cache_s = timer.lap_s() + + iteration_id_list = [] + sumo_case_id = self.get_sumo_case_id(field_name, case_name) + if sumo_case_id is not None: + sumo = self.get_sumo_explorer() + case = sumo.get_case_by_id(sumo_case_id) + if case is not None: + iteration_id_list = list(case.get_iterations()) + + et_sumo_s = timer.lap_s() + + self._cache.set(cache_key, iteration_id_list) + + et_cache_s += timer.lap_s() + + LOGGER.debug( + f"WorkSession.get_iteration_ids() took: {timer.elapsed_s():.2f}s (" + f"cache={et_cache_s:.2f}s, sumo={et_sumo_s:.2f}s) " + ) + + return iteration_id_list + + def get_sumo_case_id(self, field_name: str, case_name: str) -> Optional[str]: + case_to_sumo_id_dict = self._get_case_to_sumo_id_dict(field_name) + sumo_case_id = case_to_sumo_id_dict.get(case_name) + if sumo_case_id is not None: + return sumo_case_id + + return None + + def get_sumo_explorer(self) -> webviz_sumo.Explorer: + if self._sumo_explorer is not None: + return self._sumo_explorer + + if self._use_session_token: + access_token = self.get_access_token() + self._sumo_explorer = webviz_sumo.create_explorer(access_token) + else: + self._sumo_explorer = webviz_sumo.create_interactive_explorer() + + return self._sumo_explorer + + def get_access_token(self) -> str: + if not self._use_session_token: + raise ValueError("Cannot get access token when _use_session_token is False") + + access_token = flask.session.get("access_token") + if not access_token: + raise ValueError("Unable to get access token from flask session") + + return access_token + + +class EnsembleProviderDealerSumo(EnsembleProviderDealer): + def __init__(self, use_session_token: bool) -> None: + self._use_session_token = use_session_token + self._cache = cache_helpers.get_or_create_cache() + + def field_names(self) -> List[str]: + timer = PerfTimer() + + work_session = WorkSession(self._cache, self._use_session_token) + field_names = work_session.get_field_names() + LOGGER.debug( + f"EnsembleProviderDealerSumo.field_names() took: {timer.elapsed_s():.2f}s" + ) + + return field_names + + def case_names(self, field_name: str) -> List[str]: + if not isinstance(field_name, str): + raise ValueError("field_name must be of type str") + + timer = PerfTimer() + + work_session = WorkSession(self._cache, self._use_session_token) + case_names = work_session.get_case_names(field_name) + LOGGER.debug( + f"EnsembleProviderDealerSumo.case_names() took: {timer.elapsed_s():.2f}s" + ) + + return case_names + + def iteration_ids(self, field_name: str, case_name: str) -> List[str]: + if not isinstance(field_name, str): + raise ValueError("field_name must be of type str") + if not isinstance(case_name, str): + raise ValueError("case_name must be of type str") + + timer = PerfTimer() + + work_session = WorkSession(self._cache, self._use_session_token) + iter_ids = work_session.get_iteration_ids(field_name, case_name) + LOGGER.debug( + f"EnsembleProviderDealerSumo.iteration_ids() took: {timer.elapsed_s():.2f}s" + ) + + return iter_ids + + def get_surface_provider( + self, field_name: str, case_name: str, iteration_id: str + ) -> Optional[EnsembleSurfaceProvider]: + if not isinstance(field_name, str): + raise ValueError("field_name must be of type str") + if not isinstance(case_name, str): + raise ValueError("case_name must be of type str") + if not isinstance(case_name, (str, int)): + raise ValueError("iteration_id must be of type str or int") + + timer = PerfTimer() + + work_session = WorkSession(self._cache, self._use_session_token) + sumo_id = work_session.get_sumo_case_id(field_name, case_name) + if sumo_id is None: + return None + + access_token = None + if self._use_session_token: + access_token = work_session.get_access_token() + + factory = EnsembleSurfaceProviderFactory.instance() + provider = factory.create_from_sumo_case_id( + sumo_id_of_case=sumo_id, + iteration_id=iteration_id, + use_access_token=self._use_session_token, + access_token=access_token, + ) + + LOGGER.debug( + f"EnsembleProviderDealerSumo.get_surface_provider() took: {timer.elapsed_s():.2f}s" + ) + + return provider diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider_factory.py b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider_factory.py index b375306b9..a1e76381b 100644 --- a/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider_factory.py +++ b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider_factory.py @@ -2,15 +2,18 @@ import logging import os from pathlib import Path -from typing import List +from typing import List, Optional +import flask_caching from webviz_config.webviz_factory import WebvizFactory from webviz_config.webviz_factory_registry import WEBVIZ_FACTORY_REGISTRY from webviz_config.webviz_instance_info import WebvizRunMode from webviz_subsurface._utils.perf_timer import PerfTimer +from . import cache_helpers from ._provider_impl_file import ProviderImplFile +from ._provider_impl_sumo import ProviderImplSumo from ._surface_discovery import ( discover_observed_surface_files, discover_per_realization_surface_files, @@ -24,15 +27,21 @@ class EnsembleSurfaceProviderFactory(WebvizFactory): def __init__( self, root_storage_folder: Path, + root_cache_folder: Path, + cache: flask_caching.SimpleCache, allow_storage_writes: bool, avoid_copying_surfaces: bool, ) -> None: self._storage_dir = Path(root_storage_folder) / __name__ + self._cache_dir = Path(root_cache_folder) / __name__ + self._cache = cache self._allow_storage_writes = allow_storage_writes self._avoid_copying_surfaces = avoid_copying_surfaces LOGGER.info( - f"EnsembleSurfaceProviderFactory init: storage_dir={self._storage_dir}" + f"EnsembleSurfaceProviderFactory init: " + f"storage_dir={self._storage_dir}, cache_dir={self._cache_dir}," + f"cache={type(self._cache)}" ) if self._allow_storage_writes: @@ -45,12 +54,16 @@ def instance() -> "EnsembleSurfaceProviderFactory": factory = WEBVIZ_FACTORY_REGISTRY.get_factory(EnsembleSurfaceProviderFactory) if not factory: app_instance_info = WEBVIZ_FACTORY_REGISTRY.app_instance_info - storage_folder = app_instance_info.storage_folder + root_storage_folder = app_instance_info.storage_folder + root_cache_folder = cache_helpers.get_root_cache_folder() + cache = cache_helpers.get_or_create_cache() allow_writes = app_instance_info.run_mode != WebvizRunMode.PORTABLE dont_copy_surfs = app_instance_info.run_mode == WebvizRunMode.NON_PORTABLE factory = EnsembleSurfaceProviderFactory( - root_storage_folder=storage_folder, + root_storage_folder=root_storage_folder, + root_cache_folder=root_cache_folder, + cache=cache, allow_storage_writes=allow_writes, avoid_copying_surfaces=dont_copy_surfs, ) @@ -119,6 +132,35 @@ def create_from_ensemble_surface_files( return provider + def create_from_sumo_case_id( + self, + sumo_id_of_case: str, + iteration_id: str, + use_access_token: bool, + access_token: Optional[str], + ) -> EnsembleSurfaceProvider: + timer = PerfTimer() + + provider = ProviderImplSumo( + cache_dir=self._cache_dir, + cache=self._cache, + sumo_id_of_case=sumo_id_of_case, + iteration_id=iteration_id, + use_access_token=use_access_token, + access_token=access_token, + ) + + if not provider: + raise ValueError( + f"Failed to create sumo surface provider for {sumo_id_of_case=}, {iteration_id=}" + ) + + LOGGER.info( + f"Created sumo surface provider for {sumo_id_of_case=}, {iteration_id=} in {timer.elapsed_s():.2f}s" + ) + + return provider + def _make_hash_string(string_to_hash: str) -> str: # There is no security risk here and chances of collision should be very slim diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/surface_server.py b/webviz_subsurface/_providers/ensemble_surface_provider/surface_server.py index 3b3f41495..e126de1f7 100644 --- a/webviz_subsurface/_providers/ensemble_surface_provider/surface_server.py +++ b/webviz_subsurface/_providers/ensemble_surface_provider/surface_server.py @@ -12,10 +12,10 @@ import flask_caching import xtgeo from dash import Dash -from webviz_config.webviz_instance_info import WEBVIZ_INSTANCE_INFO from webviz_subsurface._utils.perf_timer import PerfTimer +from . import cache_helpers from ._surface_to_image import surface_to_png_bytes_optimized from .ensemble_surface_provider import ( ObservedSurfaceAddress, @@ -59,18 +59,9 @@ class SurfaceMeta: class SurfaceServer: def __init__(self, app: Dash) -> None: - cache_dir = ( - WEBVIZ_INSTANCE_INFO.storage_folder / f"SurfaceServer_filecache_{uuid4()}" + self._image_cache = cache_helpers.get_or_create_named_cache( + "SurfaceServer_imagecache" ) - LOGGER.debug(f"Setting up file cache in: {cache_dir}") - self._image_cache = flask_caching.Cache( - config={ - "CACHE_TYPE": "FileSystemCache", - "CACHE_DIR": cache_dir, - "CACHE_DEFAULT_TIMEOUT": 0, - } - ) - self._image_cache.init_app(app.server) self._setup_url_rule(app) @@ -196,15 +187,21 @@ def _create_and_store_image_in_cache( timer = PerfTimer() - LOGGER.debug("Converting surface to PNG image...") + # LOGGER.debug("Converting surface to PNG image...") png_bytes: bytes = surface_to_png_bytes_optimized(surface) - LOGGER.debug(f"Got PNG image, size={(len(png_bytes) / (1024 * 1024)):.2f}MB") + # LOGGER.debug(f"Got PNG image, size={(len(png_bytes) / (1024 * 1024)):.2f}MB") et_to_image_s = timer.lap_s() img_cache_key = "IMG:" + base_cache_key meta_cache_key = "META:" + base_cache_key - self._image_cache.add(img_cache_key, png_bytes) + # The timeout for the cached image + # Note that we use different values for the metadata and the actual image to make + # sure that the metadata times out first + timeout_meta_s = 24 * 3600 + timeout_image_s = 25 * 3600 + + self._image_cache.set(img_cache_key, png_bytes, timeout_image_s) # For debugging rotations # unrot_surf = surface.copy() @@ -223,12 +220,13 @@ def _create_and_store_image_in_cache( deckgl_bounds=deckgl_bounds, deckgl_rot_deg=deckgl_rot, ) - self._image_cache.add(meta_cache_key, meta) + self._image_cache.set(meta_cache_key, meta, timeout_meta_s) et_write_cache_s = timer.lap_s() LOGGER.debug( - f"Created image and wrote to cache in in: {timer.elapsed_s():.2f}s (" + f"Created PNG image and wrote to cache in in: {timer.elapsed_s():.2f}s (" f"to_image={et_to_image_s:.2f}s, write_cache={et_write_cache_s:.2f}s), " + f"size={(len(png_bytes) / (1024 * 1024)):.2f}MB, " f"[base_cache_key={base_cache_key}]" ) diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/webviz_sumo.py b/webviz_subsurface/_providers/ensemble_surface_provider/webviz_sumo.py new file mode 100644 index 000000000..beaf78c9e --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/webviz_sumo.py @@ -0,0 +1,10 @@ +from fmu.sumo.explorer import Explorer # type: ignore +from fmu.sumo.explorer import Case # type: ignore + + +def create_explorer(access_token: str) -> Explorer: + return Explorer(env="dev", token=access_token, interactive=False) + + +def create_interactive_explorer() -> Explorer: + return Explorer(env="dev", interactive=True) diff --git a/webviz_subsurface/plugins/__init__.py b/webviz_subsurface/plugins/__init__.py index 27a8548ac..8eb0bf52a 100644 --- a/webviz_subsurface/plugins/__init__.py +++ b/webviz_subsurface/plugins/__init__.py @@ -30,6 +30,7 @@ from ._inplace_volumes_onebyone import InplaceVolumesOneByOne from ._line_plotter_fmu.line_plotter_fmu import LinePlotterFMU from ._map_viewer_fmu import MapViewerFMU +from ._map_viewer_sumo import MapViewerSumo from ._morris_plot import MorrisPlot from ._parameter_analysis import ParameterAnalysis from ._parameter_correlation import ParameterCorrelation diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/__init__.py b/webviz_subsurface/plugins/_map_viewer_sumo/__init__.py new file mode 100644 index 000000000..c0bb229cf --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_sumo/__init__.py @@ -0,0 +1 @@ +from ._plugin import MapViewerSumo diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/_layout_elements.py b/webviz_subsurface/plugins/_map_viewer_sumo/_layout_elements.py new file mode 100644 index 000000000..f30d4c8ec --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_sumo/_layout_elements.py @@ -0,0 +1,25 @@ +class ElementIds: + class SINGLEMAPVIEW: + ID = "single-map-view" + + class DECKGLVIEW: + ID = "deckgl-view" + VIEW = "deckgl-component" + + class STORES: + CASE_ITER_STORE = "case-iter-store" + SURFACE_ADDRESS_STORE = "surface-address-store" + + class CASE_SELECTOR: + ID = "case-selector" + FIELD = "field" + CASE = "case" + ITER = "iter" + + class SURFACE_SELECTOR: + ID = "surface-selectors" + ATTRIBUTE = "attribute" + NAME = "name" + DATE = "date" + REALIZATION = "realization" + AGGREGATION = "aggregation" diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/_mock_sumo.py b/webviz_subsurface/plugins/_map_viewer_sumo/_mock_sumo.py new file mode 100644 index 000000000..6e949dfa2 --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_sumo/_mock_sumo.py @@ -0,0 +1,192 @@ +from typing import List, Dict + +data = { + "Drogon": { + "cases": [ + { + "name": "case1", + "iterations": [ + { + "name": "iter-0", + "realizations": [0, 1, 2, 3, 4], + "attributes": [ + { + "name": "attr1", + "surface_names": ["top", "base"], + "surface_dates": [20010101, 20020101], + }, + { + "name": "attr2", + "surface_names": ["top", "middle", "base"], + "surface_dates": [], + }, + ], + }, + { + "name": "iter-1", + "realizations": [0, 1, 2, 3, 4], + "attributes": [ + { + "name": "attr1", + "surface_names": ["top", "base"], + "surface_dates": [20010101, 20020101], + }, + { + "name": "attr2", + "surface_names": ["top", "middle", "base"], + "surface_dates": [], + }, + ], + }, + ], + }, + { + "name": "case2", + "iterations": [ + { + "name": "pred", + "realizations": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], + "attributes": [ + { + "name": "attr3", + "surface_names": ["above", "within", "below"], + "surface_dates": [20030101, 20100101], + } + ], + }, + { + "name": "pred2", + "realizations": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + ], + "attributes": [ + { + "name": "attr1", + "surface_names": ["above", "within", "below"], + "surface_dates": [20030101, 20100101], + } + ], + }, + { + "name": "iter-0", + "realizations": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], + "attributes": [ + { + "name": "attr1", + "surface_names": ["above", "within", "below"], + "surface_dates": [20030101, 20100101], + } + ], + }, + ], + }, + ] + } +} + + +def get_case_names(field_name: str = "Drogon") -> List[str]: + cases = [] + for case in data[field_name]["cases"]: + cases.append(case["name"]) + return cases + + +def _get_case(case_name, field_name: str = "Drogon") -> Dict: + case = None + for caseobj in data[field_name]["cases"]: + if caseobj["name"] == case_name: + return caseobj + return case + + +def _get_iteration( + case_name: str, iteration_name: str, field_name: str = "Drogon" +) -> Dict: + iteration = None + case = _get_case(case_name, field_name=field_name) + if case is not None: + for iterationobj in case["iterations"]: + if iterationobj["name"] == iteration_name: + return iterationobj + return iteration + + +def _get_surface_attribute( + case_name: str, iteration_name: str, attribute_name: str, field_name: str = "Drogon" +) -> Dict: + attribute = None + iteration = _get_iteration( + case_name=case_name, iteration_name=iteration_name, field_name=field_name + ) + if iteration is None: + return [] + for attributeobj in iteration["attributes"]: + if attributeobj["name"] == attribute_name: + return attributeobj + return attribute + + +def get_iteration_names(case_name: str, field_name: str = "Drogon") -> List[str]: + iterations = [] + case = _get_case(case_name, field_name) + if case is None: + return [] + for iteration in case["iterations"]: + iterations.append(iteration["name"]) + return iterations + + +def get_realizations( + case_name: str, iteration_name: str, field_name: str = "Drogon" +) -> List[int]: + + iteration = _get_iteration( + case_name=case_name, iteration_name=iteration_name, field_name=field_name + ) + return iteration["realizations"] + + +def get_surface_attribute_names( + case_name: str, iteration_name: str, field_name: str = "Drogon" +) -> List[str]: + attribute_names = [] + iteration = _get_iteration( + case_name=case_name, iteration_name=iteration_name, field_name=field_name + ) + if iteration is None: + return [] + for attribute in iteration["attributes"]: + attribute_names.append(attribute["name"]) + return attribute_names + + +def get_surface_names( + case_name: str, iteration_name: str, attribute_name: str, field_name: str = "Drogon" +) -> List[str]: + attr = _get_surface_attribute( + case_name=case_name, + iteration_name=iteration_name, + attribute_name=attribute_name, + field_name=field_name, + ) + return attr["surface_names"] + + +def get_surface_dates( + case_name: str, iteration_name: str, attribute_name: str, field_name: str = "Drogon" +) -> List[str]: + attr = _get_surface_attribute( + case_name=case_name, + iteration_name=iteration_name, + attribute_name=attribute_name, + field_name=field_name, + ) + return attr["surface_dates"] diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/_plugin.py b/webviz_subsurface/plugins/_map_viewer_sumo/_plugin.py new file mode 100644 index 000000000..7b442850f --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_sumo/_plugin.py @@ -0,0 +1,57 @@ +from dash import Dash +from webviz_config import WebvizPluginABC + +from .views._single_map_view import SingleMapView +from ._layout_elements import ElementIds + +from .views._single_map_view import SingleMapView + +from webviz_subsurface._providers.ensemble_surface_provider import ( + EnsembleProviderDealerSumo, + EnsembleSurfaceProviderFactory, +) +from webviz_subsurface._providers.ensemble_surface_provider import SurfaceServer + +from werkzeug.middleware.proxy_fix import ProxyFix +from webviz_config.webviz_instance_info import WEBVIZ_INSTANCE_INFO, WebvizRunMode + + +class MapViewerSumo(WebvizPluginABC): + """Surface visualizer for FMU ensembles using SUMO.""" + + # pylint: disable=too-many-arguments + def __init__(self, app: Dash, field_name: str): + super().__init__(stretch=True) + + self._use_oauth2 = ( + True if WEBVIZ_INSTANCE_INFO.run_mode == WebvizRunMode.PORTABLE else False + ) + + app.server.wsgi_app = ProxyFix(app.server.wsgi_app, x_proto=1, x_host=1) + + # For now, just touch the factory to initialize it here + EnsembleSurfaceProviderFactory.instance() + + provider_dealer = EnsembleProviderDealerSumo(use_session_token=self._use_oauth2) + surface_server = SurfaceServer.instance(app) + + self.add_store( + ElementIds.STORES.CASE_ITER_STORE, + storage_type=WebvizPluginABC.StorageType.SESSION, + ) + self.add_store( + ElementIds.STORES.SURFACE_ADDRESS_STORE, + storage_type=WebvizPluginABC.StorageType.SESSION, + ) + self.add_view( + SingleMapView( + provider_dealer=provider_dealer, + field_name=field_name, + surface_server=surface_server, + ), + ElementIds.SINGLEMAPVIEW.ID, + ) + + @property + def oauth2(self): + return self._use_oauth2 diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/views/__init__.py b/webviz_subsurface/plugins/_map_viewer_sumo/views/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/views/_single_map_view.py b/webviz_subsurface/plugins/_map_viewer_sumo/views/_single_map_view.py new file mode 100644 index 000000000..f25b5d150 --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_sumo/views/_single_map_view.py @@ -0,0 +1,127 @@ +from typing import List, Tuple, Dict, Optional + +from dash import Input, Output, State, callback, no_update +from webviz_config.webviz_plugin_subclasses import ( + ViewABC, +) + +from .settings._case_selector import CaseSelector +from .settings._surface_selectors import SurfaceSelector +from .view_elements._deckgl_view import DeckGLView + +from .._layout_elements import ElementIds +from .settings._surface_selectors import SurfaceAddress + +from webviz_subsurface._providers.ensemble_surface_provider import ( + EnsembleProviderDealer, + SimulatedSurfaceAddress, + StatisticalSurfaceAddress, + SurfaceServer, + QualifiedSurfaceAddress, +) + +from .settings._surface_selectors import AGGREGATIONS + + +class SingleMapView(ViewABC): + def __init__( + self, + provider_dealer: EnsembleProviderDealer, + field_name: str, + surface_server: SurfaceServer, + ) -> None: + super().__init__("Single Surface View") + self._provider_dealer = provider_dealer + self._field_name = field_name + self._surface_server = surface_server + self.add_view_element(DeckGLView(), ElementIds.DECKGLVIEW.ID), + self.add_settings_group( + CaseSelector(provider_dealer=provider_dealer, field_name=field_name), + ElementIds.CASE_SELECTOR.ID, + ) + self.add_settings_group( + SurfaceSelector(provider_dealer=provider_dealer, field_name=field_name), + ElementIds.SURFACE_SELECTOR.ID, + ) + + def set_callbacks(self) -> None: + @callback( + Output( + self.view_element(ElementIds.DECKGLVIEW.ID) + .component_unique_id(ElementIds.DECKGLVIEW.VIEW) + .to_string(), + "bounds", + ), + Output( + self.view_element(ElementIds.DECKGLVIEW.ID) + .component_unique_id(ElementIds.DECKGLVIEW.VIEW) + .to_string(), + "layers", + ), + Input( + self.get_store_unique_id(ElementIds.STORES.SURFACE_ADDRESS_STORE), + "data", + ), + State( + self.view_element(ElementIds.DECKGLVIEW.ID) + .component_unique_id(ElementIds.DECKGLVIEW.VIEW) + .to_string(), + "views", + ), + ) + def _update_map_component(surface_address: Dict, views: Dict) -> Dict: + # print(f"callback _update_map_component() {surface_address=}") + + if not surface_address: + return no_update + + undecided_address: SurfaceAddress = SurfaceAddress(**surface_address) + case = undecided_address.case_name + iteration = undecided_address.iteration_name + if undecided_address.aggregation == AGGREGATIONS.SINGLE_REAL: + surface_address = SimulatedSurfaceAddress( + attribute=undecided_address.surface_attribute, + name=undecided_address.surface_name, + datestr=undecided_address.surface_date, + realization=int(undecided_address.realizations[0]), + ) + else: + surface_address = StatisticalSurfaceAddress( + attribute=undecided_address.surface_attribute, + name=undecided_address.surface_name, + datestr=undecided_address.surface_date, + realizations=[int(real) for real in undecided_address.realizations], + statistic=undecided_address.aggregation, + ) + provider = self._provider_dealer.get_surface_provider( + field_name=self._field_name, + case_name=case, + iteration_id=iteration, + ) + # surface = provider.get_surface(surface_address) + provider_id = provider.provider_id() + qualified_address = QualifiedSurfaceAddress(provider_id, surface_address) + surf_meta = self._surface_server.get_surface_metadata(qualified_address) + if not surf_meta: + # This means we need to compute the surface + surface = provider.get_surface(address=surface_address) + if not surface: + raise ValueError( + f"Could not get surface for address: {surface_address}" + ) + self._surface_server.publish_surface(qualified_address, surface) + surf_meta = self._surface_server.get_surface_metadata(qualified_address) + viewport_bounds = [ + surf_meta.x_min, + surf_meta.y_min, + surf_meta.x_max, + surf_meta.y_max, + ] + layer_data = { + "image": self._surface_server.encode_partial_url(qualified_address), + "bounds": surf_meta.deckgl_bounds, + "rotDeg": surf_meta.deckgl_rot_deg, + "valueRange": [surf_meta.val_min, surf_meta.val_max], + "@@type": "ColormapLayer", + } + return viewport_bounds, [layer_data] diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/views/settings/__init__.py b/webviz_subsurface/plugins/_map_viewer_sumo/views/settings/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/views/settings/_case_selector.py b/webviz_subsurface/plugins/_map_viewer_sumo/views/settings/_case_selector.py new file mode 100644 index 000000000..a1d9c4edd --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_sumo/views/settings/_case_selector.py @@ -0,0 +1,130 @@ +from typing import List, Tuple, Dict, Optional + +from dash.development.base_component import Component +from dash import Input, Output, State, callback, no_update +import webviz_core_components as wcc +from webviz_config.webviz_plugin_subclasses import SettingsGroupABC + +from ..._layout_elements import ElementIds + +# from ..._mock_sumo import get_case_names, get_iteration_names + +from webviz_subsurface._providers.ensemble_surface_provider import ( + EnsembleProviderDealer, +) + + +class CaseSelector(SettingsGroupABC): + def __init__( + self, provider_dealer: EnsembleProviderDealer, field_name: str + ) -> None: + super().__init__("Case selector") + self._field_name = field_name + self._provider_dealer = provider_dealer + + def layout(self) -> List[Component]: + return [ + wcc.SelectWithLabel( + label="Field", + id=self.register_component_unique_id(ElementIds.CASE_SELECTOR.FIELD), + multi=False, + options=[{"label": self._field_name, "value": self._field_name}], + value=[self._field_name], + ), + wcc.SelectWithLabel( + label="Case", + id=self.register_component_unique_id(ElementIds.CASE_SELECTOR.CASE), + multi=False, + ), + wcc.SelectWithLabel( + label="Iteration", + id=self.register_component_unique_id(ElementIds.CASE_SELECTOR.ITER), + multi=False, + ), + ] + + def set_callbacks(self) -> None: + @callback( + Output( + self.component_unique_id(ElementIds.CASE_SELECTOR.CASE).to_string(), + "options", + ), + Output( + self.component_unique_id(ElementIds.CASE_SELECTOR.CASE).to_string(), + "value", + ), + Input( + self.component_unique_id(ElementIds.CASE_SELECTOR.FIELD).to_string(), + "value", + ), + ) + def _update_case(field_names: list) -> Tuple[List[Dict], str]: + # print(f"callback _update_case() {field_names=}") + + if field_names is None: + return [{}], None + + case_names = self._provider_dealer.case_names(field_name=field_names[0]) + if not case_names: + return [{}], None + + return [ + {"label": case_name, "value": case_name} for case_name in case_names + ], [case_names[-1]] + + @callback( + Output( + self.component_unique_id(ElementIds.CASE_SELECTOR.ITER).to_string(), + "options", + ), + Output( + self.component_unique_id(ElementIds.CASE_SELECTOR.ITER).to_string(), + "value", + ), + Input( + self.component_unique_id(ElementIds.CASE_SELECTOR.FIELD).to_string(), + "value", + ), + Input( + self.component_unique_id(ElementIds.CASE_SELECTOR.CASE).to_string(), + "value", + ), + ) + def _update_iteration( + field_names: list, case_names: list + ) -> Tuple[List[Dict], str]: + # print(f"callback _update_iteration() {field_names=} {case_names=}") + + if field_names is None or case_names is None: + return [{}], None + + iteration_ids = self._provider_dealer.iteration_ids( + field_name=field_names[0], case_name=case_names[0] + ) + if not iteration_ids: + return [{}], None + + return [ + {"label": iter_id, "value": iter_id} for iter_id in iteration_ids + ], [iteration_ids[0]] + + @callback( + Output( + self.get_store_unique_id(ElementIds.STORES.CASE_ITER_STORE), + "data", + ), + Input( + self.component_unique_id(ElementIds.CASE_SELECTOR.CASE).to_string(), + "value", + ), + Input( + self.component_unique_id(ElementIds.CASE_SELECTOR.ITER).to_string(), + "value", + ), + ) + def _update_case_iter_store(case: str, iteration: str) -> Dict: + # print(f"callback _update_case_iter_store() {case=} {iteration=}") + + if not case or not iteration: + return {} + return {"case": case, "iteration": iteration} diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/views/settings/_surface_selectors.py b/webviz_subsurface/plugins/_map_viewer_sumo/views/settings/_surface_selectors.py new file mode 100644 index 000000000..0058f3b65 --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_sumo/views/settings/_surface_selectors.py @@ -0,0 +1,319 @@ +from typing import List, Tuple, Dict, Optional +from enum import Enum +from dataclasses import dataclass + +from dash.development.base_component import Component +from dash import Input, Output, State, callback, no_update +import webviz_core_components as wcc +from webviz_config.webviz_plugin_subclasses import SettingsGroupABC + +from ..._layout_elements import ElementIds +from ..._mock_sumo import ( + get_surface_attribute_names, + get_surface_names, + get_surface_dates, + get_realizations, +) + +from webviz_subsurface._providers.ensemble_surface_provider import ( + EnsembleProviderDealer, +) + + +@dataclass(frozen=True) +class SurfaceAddress: + field: str + case_name: str + iteration_name: str + realizations: List[int] + aggregation: str + surface_attribute: str + surface_name: str + surface_date: str + + +class AGGREGATIONS(str, Enum): + SINGLE_REAL = "Single realization" + MEAN = "Mean" + STDDEV = "StdDev" + MIN = "Minimum" + MAX = "Maximum" + P10 = "P10" + P90 = "P90" + + +class SurfaceSelector(SettingsGroupABC): + def __init__( + self, provider_dealer: EnsembleProviderDealer, field_name: str + ) -> None: + super().__init__("Surface selectors") + self.field_name = field_name + self._provider_dealer = provider_dealer + + def layout(self) -> List[Component]: + + return [ + wcc.SelectWithLabel( + label="Surface attribute", + id=self.register_component_unique_id( + ElementIds.SURFACE_SELECTOR.ATTRIBUTE + ), + multi=False, + ), + wcc.SelectWithLabel( + label="Surface name", + id=self.register_component_unique_id(ElementIds.SURFACE_SELECTOR.NAME), + multi=False, + ), + wcc.SelectWithLabel( + label="Surface date", + id=self.register_component_unique_id(ElementIds.SURFACE_SELECTOR.DATE), + multi=False, + ), + wcc.Dropdown( + label="Aggregation", + id=self.register_component_unique_id( + ElementIds.SURFACE_SELECTOR.AGGREGATION + ), + options=[{"label": agg, "value": agg} for agg in AGGREGATIONS], + value=AGGREGATIONS.SINGLE_REAL, + clearable=False, + ), + wcc.SelectWithLabel( + label="Realization", + id=self.register_component_unique_id( + ElementIds.SURFACE_SELECTOR.REALIZATION + ), + multi=False, + ), + ] + + def set_callbacks(self) -> None: + @callback( + Output( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.REALIZATION + ).to_string(), + "options", + ), + Output( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.REALIZATION + ).to_string(), + "value", + ), + Output( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.REALIZATION + ).to_string(), + "multi", + ), + Input( + self.get_store_unique_id(ElementIds.STORES.CASE_ITER_STORE), + "data", + ), + Input( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.AGGREGATION + ).to_string(), + "value", + ), + ) + def _update_realization( + case_iter: Dict, aggregation: str + ) -> Tuple[List[Dict], int]: + # print(f"callback _update_realization() {case_iter=} {aggregation=}") + + if not case_iter: + return no_update + agg = AGGREGATIONS(aggregation) + case = case_iter["case"][0] + iteration = case_iter["iteration"][0] + provider = self._provider_dealer.get_surface_provider( + field_name=self.field_name, case_name=case, iteration_id=iteration + ) + realizations = provider.realizations() + if agg == AGGREGATIONS.SINGLE_REAL: + selected_reals = [realizations[0]] + multi = False + else: + selected_reals = realizations + multi = True + + return ( + [{"label": attr, "value": attr} for attr in realizations], + selected_reals, + multi, + ) + + @callback( + Output( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.ATTRIBUTE + ).to_string(), + "options", + ), + Output( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.ATTRIBUTE + ).to_string(), + "value", + ), + Input( + self.get_store_unique_id(ElementIds.STORES.CASE_ITER_STORE), + "data", + ), + ) + def _update_surface_attribute(case_iter: Dict) -> Tuple[List[Dict], str]: + # print(f"callback _update_surface_attribute() {case_iter=}") + + if not case_iter: + return no_update + case = case_iter["case"][0] + iteration = case_iter["iteration"][0] + provider = self._provider_dealer.get_surface_provider( + field_name=self.field_name, case_name=case, iteration_id=iteration + ) + attributes = provider.attributes() + return [{"label": attr, "value": attr} for attr in attributes], [ + attributes[0] + ] + + @callback( + Output( + self.component_unique_id(ElementIds.SURFACE_SELECTOR.NAME).to_string(), + "options", + ), + Output( + self.component_unique_id(ElementIds.SURFACE_SELECTOR.NAME).to_string(), + "value", + ), + Output( + self.component_unique_id(ElementIds.SURFACE_SELECTOR.DATE).to_string(), + "options", + ), + Output( + self.component_unique_id(ElementIds.SURFACE_SELECTOR.DATE).to_string(), + "value", + ), + Input( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.ATTRIBUTE + ).to_string(), + "value", + ), + State( + self.get_store_unique_id(ElementIds.STORES.CASE_ITER_STORE), + "data", + ), + ) + def _update_surface_names_and_dates( + attribute_name: str, case_iter: Dict + ) -> Tuple[List[Dict], str, List[Dict], str]: + # print(f"callback _update_surface_names_and_dates() {attribute_name=}") + + if not case_iter or not attribute_name: + return no_update, no_update, no_update, no_update + case = case_iter["case"][0] + iteration = case_iter["iteration"][0] + + attribute_name = attribute_name[0] + + provider = self._provider_dealer.get_surface_provider( + field_name=self.field_name, case_name=case, iteration_id=iteration + ) + + surface_names = provider.surface_names_for_attribute( + surface_attribute=attribute_name, + ) + surface_dates = provider.surface_dates_for_attribute( + surface_attribute=attribute_name, + ) + + name_options = [{"label": name, "value": name} for name in surface_names] + + name_value = [surface_names[0]] + + date_options = [{}] + date_value = None + if surface_dates: + date_options = [ + {"label": date, "value": date} for date in surface_dates + ] + date_value = [surface_dates[0]] + return name_options, name_value, date_options, date_value + + @callback( + Output( + self.get_store_unique_id(ElementIds.STORES.SURFACE_ADDRESS_STORE), + "data", + ), + Input( + self.get_store_unique_id(ElementIds.STORES.CASE_ITER_STORE), + "data", + ), + Input( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.ATTRIBUTE + ).to_string(), + "value", + ), + Input( + self.component_unique_id(ElementIds.SURFACE_SELECTOR.NAME).to_string(), + "value", + ), + Input( + self.component_unique_id(ElementIds.SURFACE_SELECTOR.DATE).to_string(), + "value", + ), + Input( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.REALIZATION + ).to_string(), + "value", + ), + Input( + self.component_unique_id( + ElementIds.SURFACE_SELECTOR.AGGREGATION + ).to_string(), + "value", + ), + ) + def _update_surface_names_and_dates( + case_iter: Dict, + surface_attribute: str, + surface_name: str, + surface_date: str, + realizations: List[int], + aggregation: str, + ) -> Dict: + # print(f"callback _update_surface_names_and_dates() {surface_attribute=} {surface_name=} {surface_date=} {realizations=}") + + if any( + el is None + for el in [ + case_iter, + surface_attribute, + surface_name, + realizations, + aggregation, + ] + ): + return {} + if not case_iter: + return no_update + case = case_iter["case"][0] + iteration = case_iter["iteration"][0] + + address = SurfaceAddress( + field=self.field_name, + case_name=case, + iteration_name=iteration, + realizations=realizations, + aggregation=aggregation, + surface_attribute=surface_attribute[0], + surface_name=surface_name[0], + surface_date=surface_date[0] if surface_date is not None else None, + ) + print(address) + return address diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/views/view_elements/__init__.py b/webviz_subsurface/plugins/_map_viewer_sumo/views/view_elements/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/webviz_subsurface/plugins/_map_viewer_sumo/views/view_elements/_deckgl_view.py b/webviz_subsurface/plugins/_map_viewer_sumo/views/view_elements/_deckgl_view.py new file mode 100644 index 000000000..d43f17b16 --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_sumo/views/view_elements/_deckgl_view.py @@ -0,0 +1,40 @@ +from typing import List, Tuple + +from dash.development.base_component import Component +from dash import callback, Input, Output, html +from webviz_config.webviz_plugin_subclasses import ViewElementABC +import webviz_subsurface_components as wsc + +from ..._layout_elements import ElementIds + + +class DeckGLView(ViewElementABC): + def __init__(self) -> None: + super().__init__() + + def inner_layout(self) -> Component: + + return html.Div( + style={"height": "90vh"}, + children=wsc.DeckGLMap( + id=self.register_component_unique_id(ElementIds.DECKGLVIEW.VIEW), + layers=[ + { + "@@type": "ColormapLayer", + }, + ], + bounds=[456063.6875, 5926551, 467483.6875, 5939431], + views={ + "layout": [1, 1], + "showLabel": True, + "viewports": [ + { + "id": "one", + "layerIds": ["colormap-layer"], + "show3D": False, + "name": "test", + } + ], + }, + ), + )