From c1bf0347b0735b791b1e42308ad4fda584f1efc1 Mon Sep 17 00:00:00 2001 From: sclaw Date: Fri, 8 Nov 2024 16:32:00 -0500 Subject: [PATCH 01/46] fix job html rendering --- MANIFEST.in | 3 ++- ripple1d/api/app.py | 13 +++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 214d5550..c6395c32 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,2 @@ -exclude production/* \ No newline at end of file +include ripple1d\api\templates\* +recursive-include ripple1d\api\static\ * diff --git a/ripple1d/api/app.py b/ripple1d/api/app.py index 7ef741a8..017a2157 100644 --- a/ripple1d/api/app.py +++ b/ripple1d/api/app.py @@ -131,8 +131,7 @@ def test(): @app.route("/jobs", methods=["GET"]) def jobs(): """Retrieve OGC status and result for all jobs.""" - # try: - format_option = request.args.get("f") + format_option = request.args.get("f", default="json") task2metadata = tasks.task_status(only_task_id=None) jobs = [get_job_status(task_id, huey_metadata) for task_id, huey_metadata in task2metadata.items()] response = {"jobs": jobs} @@ -273,7 +272,9 @@ def parse_request_param__bool(param_name: str, default: bool) -> tuple[bool, tup ) -def get_job_status(task_id: str, huey_metadata: dict, return_result: bool = False, include_traceback: bool = False) -> dict: +def get_job_status( + task_id: str, huey_metadata: dict, return_result: bool = False, include_traceback: bool = False +) -> dict: """Convert huey-style task status metadata into a OGC-style job summary dictionary.""" out_dict = { "jobID": task_id, @@ -282,9 +283,9 @@ def get_job_status(task_id: str, huey_metadata: dict, return_result: bool = Fals "processID": huey_metadata["func_name"], } if return_result: - if not include_traceback and huey_metadata['result'] is not None: - del huey_metadata['result']['tb'] - out_dict["result"] = huey_metadata['result'] + if not include_traceback and huey_metadata["result"] is not None: + del huey_metadata["result"]["tb"] + out_dict["result"] = huey_metadata["result"] return out_dict From 799a2fe841c0e55f63977eb1fdab4696f3b0fcd1 Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 16:13:50 -0500 Subject: [PATCH 02/46] revert to old version --- docs/Makefile | 18 +++ docs/make.bat | 2 + docs/requirements.txt | 130 ------------------ docs/{ => source}/_static/custom.css | 0 docs/{ => source}/_templates/navbar-logo.html | 0 .../_templates/navbar-version.html | 0 docs/{ => source}/api_reference.rst | 0 docs/{ => source}/change_log.rst | 44 ------ docs/{ => source}/conf.py | 20 ++- .../endpoints/compute_conflation_metrics.rst | 0 .../{ => source}/endpoints/conflate_model.rst | 0 .../{ => source}/endpoints/create_fim_lib.rst | 0 .../create_model_run_normal_depth.rst | 0 .../endpoints/create_ras_terrain.rst | 0 .../endpoints/extract_submodel.rst | 0 docs/{ => source}/endpoints/ras_to_gpkg.rst | 0 .../run_incremental_normal_depth.rst | 0 docs/{ => source}/endpoints/run_known_wse.rst | 0 docs/{ => source}/images/coverage_metrics.png | Bin docs/{ => source}/images/length_metrics.png | Bin docs/{ => source}/images/source_w_nwm.png | Bin docs/{ => source}/images/sub_models.png | Bin docs/{ => source}/images/xs_metrics.png | Bin docs/{ => source}/index.rst | 0 docs/{ => source}/postman.rst | 10 +- docs/source/requirements.txt | 7 + docs/{ => source}/tech_summary.rst | 0 docs/{ => source}/user_guide.rst | 0 28 files changed, 41 insertions(+), 190 deletions(-) create mode 100644 docs/Makefile create mode 100644 docs/make.bat delete mode 100644 docs/requirements.txt rename docs/{ => source}/_static/custom.css (100%) rename docs/{ => source}/_templates/navbar-logo.html (100%) rename docs/{ => source}/_templates/navbar-version.html (100%) rename docs/{ => source}/api_reference.rst (100%) rename docs/{ => source}/change_log.rst (76%) rename docs/{ => source}/conf.py (83%) rename docs/{ => source}/endpoints/compute_conflation_metrics.rst (100%) rename docs/{ => source}/endpoints/conflate_model.rst (100%) rename docs/{ => source}/endpoints/create_fim_lib.rst (100%) rename docs/{ => source}/endpoints/create_model_run_normal_depth.rst (100%) rename docs/{ => source}/endpoints/create_ras_terrain.rst (100%) rename docs/{ => source}/endpoints/extract_submodel.rst (100%) rename docs/{ => source}/endpoints/ras_to_gpkg.rst (100%) rename docs/{ => source}/endpoints/run_incremental_normal_depth.rst (100%) rename docs/{ => source}/endpoints/run_known_wse.rst (100%) rename docs/{ => source}/images/coverage_metrics.png (100%) rename docs/{ => source}/images/length_metrics.png (100%) rename docs/{ => source}/images/source_w_nwm.png (100%) rename docs/{ => source}/images/sub_models.png (100%) rename docs/{ => source}/images/xs_metrics.png (100%) rename docs/{ => source}/index.rst (100%) rename docs/{ => source}/postman.rst (80%) create mode 100644 docs/source/requirements.txt rename docs/{ => source}/tech_summary.rst (100%) rename docs/{ => source}/user_guide.rst (100%) diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..af77804e --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,18 @@ +# Define variables for Sphinx +SPHINXAPIDOC = sphinx-apidoc +SPHINXBUILD = sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Target to run sphinx-apidoc to generate .rst files +apidoc: + $(SPHINXAPIDOC) -o $(SOURCEDIR)/api ../ripple1d/ + +# Target to run sphinx-build to generate HTML docs +html: apidoc + $(SPHINXBUILD) -b html $(SOURCEDIR) $(BUILDDIR) + +# Clean up the generated files +clean: + rm -rf $(BUILDDIR)/* + rm -rf $(SOURCEDIR)/api diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..c16ce2d8 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,2 @@ +sphinx-apidoc -o docs\source\api\ ripple1d\ +sphinx-build -M html docs\source docs\build\ \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index 7b976638..00000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,130 +0,0 @@ -affine==2.4.0 -alabaster==0.7.16 -asttokens==2.4.1 -attrs==24.1.0 -blinker==1.8.2 -boto3==1.34.153 -botocore==1.34.153 -build==1.2.1 -certifi==2024.7.4 -charset-normalizer==3.3.2 -click==8.1.7 -click-plugins==1.1.1 -cligj==0.7.2 -colorama==0.4.6 -comm==0.2.2 -contextily==1.6.0 -contourpy==1.2.1 -cycler==0.12.1 -decorator==5.1.1 -# docutils==0.21.2 -executing==2.0.1 -fiona==1.9.6 -Flask==3.0.3 -fonttools==4.53.1 -geographiclib==2.0 -geopandas==1.0.1 -geopy==2.4.1 -ghp-import==2.1.0 -h5py==3.11.0 -huey==2.5.1 -idna==3.7 -imagesize==1.4.1 -importlib_metadata==8.2.0 -iniconfig==2.0.0 -itsdangerous==2.2.0 -jaraco.classes==3.4.0 -jaraco.context==5.3.0 -jaraco.functools==4.0.2 -jedi==0.19.1 -Jinja2==3.1.4 -jmespath==1.0.1 -joblib==1.4.2 -jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -keyring==25.3.0 -kiwisolver==1.4.5 -Markdown==3.6 -markdown-it-py==3.0.0 -MarkupSafe==2.1.5 -# matplotlib==3.9.1 -mdit-py-plugins==0.4.1 -mdurl==0.1.2 -mercantile==1.2.1 -mergedeep==1.3.4 -mkdocs==1.6.0 -mkdocs-get-deps==0.2.0 -mock==5.1.0 -more-itertools==10.4.0 -mypy-boto3-s3==1.34.120 -myst-parser -nest-asyncio==1.6.0 -nh3==0.2.18 -numpy==2.0.1 -numpydoc==1.7.0 -packaging==24.1 -pandas==2.2.2 -parso==0.8.4 -pathspec==0.12.1 -pillow==10.4.0 -pkginfo==1.10.0 -platformdirs==4.2.2 -pluggy==1.5.0 -prompt_toolkit==3.0.47 -psutil==6.0.0 -pure_eval==0.2.3 -pyarrow==16.1.0 -Pygments==2.18.0 -pyogrio==0.9.0 -pyparsing==3.1.2 -pyproj==3.6.1 -pyproject_hooks==1.1.0 -pystac==1.10.1 -pystac-client==0.8.3 -pytest==8.3.2 -python-dateutil==2.9.0.post0 -python-dotenv==1.0.1 -pytz==2024.1 -PyYAML==6.0.1 -pyyaml_env_tag==0.1 -pyzmq==26.0.3 -rasterio==1.3.10 -referencing==0.35.1 -requests==2.32.3 -requests-toolbelt==1.0.0 -rfc3986==2.0.0 -rich==13.7.1 -rpds-py==0.19.1 -ruff==0.5.6 -s3transfer==0.10.2 -setuptools==72.1.0 -shapely==2.0.5 -six==1.16.0 -snowballstemmer==2.2.0 -snuggs==1.4.7 -Sphinx==7.4.7 -sphinx_design==0.6.1 -pydata-sphinx-theme==0.15.4 -sphinx-autodoc-typehints==2.2.3 -sphinxcontrib-applehelp==2.0.0 -sphinxcontrib-devhelp==2.0.0 -sphinxcontrib-htmlhelp==2.1.0 -sphinxcontrib-jquery==4.1 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==2.0.0 -sphinxcontrib-serializinghtml==2.0.0 -sphinx-rtd-theme==2.0.0 -stack-data==0.6.3 -tabulate==0.9.0 -toml==0.10.2 -tornado==6.4.1 -traitlets==5.14.3 -twine==5.1.1 -tzdata==2024.1 -urllib3==2.2.2 -watchdog==4.0.1 -wcwidth==0.2.13 -Werkzeug==3.0.3 -wheel==0.43.0 -xyzservices==2024.6.0 -zipp==3.19.2 diff --git a/docs/_static/custom.css b/docs/source/_static/custom.css similarity index 100% rename from docs/_static/custom.css rename to docs/source/_static/custom.css diff --git a/docs/_templates/navbar-logo.html b/docs/source/_templates/navbar-logo.html similarity index 100% rename from docs/_templates/navbar-logo.html rename to docs/source/_templates/navbar-logo.html diff --git a/docs/_templates/navbar-version.html b/docs/source/_templates/navbar-version.html similarity index 100% rename from docs/_templates/navbar-version.html rename to docs/source/_templates/navbar-version.html diff --git a/docs/api_reference.rst b/docs/source/api_reference.rst similarity index 100% rename from docs/api_reference.rst rename to docs/source/api_reference.rst diff --git a/docs/change_log.rst b/docs/source/change_log.rst similarity index 76% rename from docs/change_log.rst rename to docs/source/change_log.rst index 465b504f..eee32807 100644 --- a/docs/change_log.rst +++ b/docs/source/change_log.rst @@ -4,50 +4,6 @@ Change Log .. note:: Go to the `Releases `__ page for a list of all releases. -Feature Release 0.7.0 -~~~~~~~~~~~~~~~~~~~~~ -Users Changelog ----------------- - -This release of `ripple1d` attempts to speed up the known water surface elevation (kwse) computes by parallelizing the creation of the depth grids. To do this, all kwse runs are ran without producing depth grids. From these initial kwse runs the rating curve database are created and used to inform the boundary condition for the next reach upstream. Once all of the boundary conditions are known, a second kwse run is ran to produce the depth grids in parallel. To make this happen some changes were necessary to the endpoints. - -In addition to adding/modifying the endpoints, this release of `ripple1d` makes significant updates to both ripple1d execution and logging. This includes running each endpoint as a separate subprocess which allows the user to have the ability to dismiss running jobs. This should be handy for when jobs appear to have hung up. Dismissing these hung up jobs will free up cpu for new jobs. - -Features Added ----------------- -**Write depth grids argument** -A new argument boolean "write_depth_grids" was added to the "run_incremental_normal_depth" and "run_known_wse" endpoints. This allows the user to specify whether ripple1d should compute raw RAS depth grids or not. - -**Create rating curves database** -A new endpoint called "create_rating_curves_db" was added. This endpoint is a post processing step that creates a rating curve database from RAS plan hdf results. This endpoint only requires 2 args: "submodel_directory" and "plans. The location of the rating curve database is inferred from the submodel directory. It will be located directly in the submodel directory and will be named as the network reach name; e.g., "2820002/2820002.db". - -The "create_rating_curves_db" endpoint checks for the presence of the depth grid for each profile and records its existence or absence along with the plan suffix in the database table. The columns are "plan_suffix" and "map_exist" and are of type string and boolean, respectively. - -**Create FIM library update** -The "create_fim_lib" endpoint no longer produces the rating curve database. - -**Update to the Ripple1d workflow** -The user should take care with the args when calling the new/modified endpoints; specifically the plan names. The recommended order for calling these endpoints is: - -1. run_known_wse : with "write_depth_grids" set to false and "plan_suffix" set to ["ikwse"] -2. create_rating_curves_db: with "plans" set to ["ikwse"] -3. run_known_wse: with "write_depth_grids" set to true and "plan_suffix" set to ["nd","kwse"] -4. create_rating_curves_db: with "plans" set to ["nd","kwse"] -5. create_fim_lib: with "plans" set to ["nd","kwse"] - -**Subprocess encapsulation** -The execution of all endpoints are now encapsulated within subprocesses - -**Logs and database** - -- Huey and flask logs have been combined into a single log file (server-logs.jsonld). -- Huey.db has been renamed to jobs.db -- The process id for each endpoint are now tracked in a p_id field of the task_status table in jobs.db -- A huey_status field has been added to the task_status table. This field tracks the execution status of the endpoint subprocess. -- A new table called task_logs has been added to jobs.db. This table contains stdout, stderr, and results stemming from endpoint subprocesses. -- A proof of concept graphical (html-based) job status page has been added - - Bugfix Release 0.6.3 ~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/conf.py b/docs/source/conf.py similarity index 83% rename from docs/conf.py rename to docs/source/conf.py index 58af8823..f349bacf 100644 --- a/docs/conf.py +++ b/docs/source/conf.py @@ -8,6 +8,8 @@ import os import sys +import ripple1d + sys.path.insert(0, os.path.abspath("../")) # -- Project information ----------------------------------------------------- @@ -49,14 +51,18 @@ html_theme = "pydata_sphinx_theme" html_theme_options = { - "navbar_start": ["navbar-logo", "navbar-version"], - "navbar_center": ["navbar-nav"], - "navbar_end": ["navbar-icon-links"], - "navbar_persistent": ["search-button"], +"navbar_start": ["navbar-logo", "navbar-version"], +"navbar_center": ["navbar-nav"], +"navbar_end": ["navbar-icon-links"], +"navbar_persistent": ["search-button"] } html_sidebars = { - "tech_summary": [], - "postman": [], - "change_log": [], + "tech_summary": [], + "postman": [], + "change_log": [], } + + +# Substitutions +version = ripple1d.__version__ \ No newline at end of file diff --git a/docs/endpoints/compute_conflation_metrics.rst b/docs/source/endpoints/compute_conflation_metrics.rst similarity index 100% rename from docs/endpoints/compute_conflation_metrics.rst rename to docs/source/endpoints/compute_conflation_metrics.rst diff --git a/docs/endpoints/conflate_model.rst b/docs/source/endpoints/conflate_model.rst similarity index 100% rename from docs/endpoints/conflate_model.rst rename to docs/source/endpoints/conflate_model.rst diff --git a/docs/endpoints/create_fim_lib.rst b/docs/source/endpoints/create_fim_lib.rst similarity index 100% rename from docs/endpoints/create_fim_lib.rst rename to docs/source/endpoints/create_fim_lib.rst diff --git a/docs/endpoints/create_model_run_normal_depth.rst b/docs/source/endpoints/create_model_run_normal_depth.rst similarity index 100% rename from docs/endpoints/create_model_run_normal_depth.rst rename to docs/source/endpoints/create_model_run_normal_depth.rst diff --git a/docs/endpoints/create_ras_terrain.rst b/docs/source/endpoints/create_ras_terrain.rst similarity index 100% rename from docs/endpoints/create_ras_terrain.rst rename to docs/source/endpoints/create_ras_terrain.rst diff --git a/docs/endpoints/extract_submodel.rst b/docs/source/endpoints/extract_submodel.rst similarity index 100% rename from docs/endpoints/extract_submodel.rst rename to docs/source/endpoints/extract_submodel.rst diff --git a/docs/endpoints/ras_to_gpkg.rst b/docs/source/endpoints/ras_to_gpkg.rst similarity index 100% rename from docs/endpoints/ras_to_gpkg.rst rename to docs/source/endpoints/ras_to_gpkg.rst diff --git a/docs/endpoints/run_incremental_normal_depth.rst b/docs/source/endpoints/run_incremental_normal_depth.rst similarity index 100% rename from docs/endpoints/run_incremental_normal_depth.rst rename to docs/source/endpoints/run_incremental_normal_depth.rst diff --git a/docs/endpoints/run_known_wse.rst b/docs/source/endpoints/run_known_wse.rst similarity index 100% rename from docs/endpoints/run_known_wse.rst rename to docs/source/endpoints/run_known_wse.rst diff --git a/docs/images/coverage_metrics.png b/docs/source/images/coverage_metrics.png similarity index 100% rename from docs/images/coverage_metrics.png rename to docs/source/images/coverage_metrics.png diff --git a/docs/images/length_metrics.png b/docs/source/images/length_metrics.png similarity index 100% rename from docs/images/length_metrics.png rename to docs/source/images/length_metrics.png diff --git a/docs/images/source_w_nwm.png b/docs/source/images/source_w_nwm.png similarity index 100% rename from docs/images/source_w_nwm.png rename to docs/source/images/source_w_nwm.png diff --git a/docs/images/sub_models.png b/docs/source/images/sub_models.png similarity index 100% rename from docs/images/sub_models.png rename to docs/source/images/sub_models.png diff --git a/docs/images/xs_metrics.png b/docs/source/images/xs_metrics.png similarity index 100% rename from docs/images/xs_metrics.png rename to docs/source/images/xs_metrics.png diff --git a/docs/index.rst b/docs/source/index.rst similarity index 100% rename from docs/index.rst rename to docs/source/index.rst diff --git a/docs/postman.rst b/docs/source/postman.rst similarity index 80% rename from docs/postman.rst rename to docs/source/postman.rst index ec2592b1..61be7fef 100644 --- a/docs/postman.rst +++ b/docs/source/postman.rst @@ -3,15 +3,7 @@ Postman collection For reference and documentation of the API, please open the postman collection for the version of ripple1d -`v0.7.0: `_ This beta version contains: - new endpoints: - - `create_rating_curves_db`: creates rating curve using results from `run_known_wse` and `run_incremental_normal_depth` results - - `jobs`: added endpoints to view job `results`, `metadata`, and `logs` - - new args: - - `write_depth_grids` (bool) added to `run_known_wse` and `run_incremental_normal_depth` endpoints - -`v0.6.0-v0.6.3: `_ This beta version contains new args for the create_fim_lib endpoint: +`v0.6.0-v0.6.1: `_ This beta version contains new args for the create_fim_lib endpoint: - `library_directory`: Specifies the output directory for the FIM grids and database. - `cleanup`: Boolean indicating if the ras HEC-RAS output grids should be deleted or not. diff --git a/docs/source/requirements.txt b/docs/source/requirements.txt new file mode 100644 index 00000000..b4d46ac5 --- /dev/null +++ b/docs/source/requirements.txt @@ -0,0 +1,7 @@ +-e . +Sphinx +sphinx-autodoc-typehints +sphinx-rtd-theme +myst-parser +sphinx_design +pydata-sphinx-theme \ No newline at end of file diff --git a/docs/tech_summary.rst b/docs/source/tech_summary.rst similarity index 100% rename from docs/tech_summary.rst rename to docs/source/tech_summary.rst diff --git a/docs/user_guide.rst b/docs/source/user_guide.rst similarity index 100% rename from docs/user_guide.rst rename to docs/source/user_guide.rst From af5bdd7fde59310199cd11dccaf9a33db9f56cb7 Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 16:16:31 -0500 Subject: [PATCH 03/46] remove ripple requirements --- docs/source/conf.py | 18 +++++++++--------- docs/source/requirements.txt | 3 +-- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f349bacf..9b0ddcde 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -8,7 +8,7 @@ import os import sys -import ripple1d +# import ripple1d sys.path.insert(0, os.path.abspath("../")) @@ -51,18 +51,18 @@ html_theme = "pydata_sphinx_theme" html_theme_options = { -"navbar_start": ["navbar-logo", "navbar-version"], -"navbar_center": ["navbar-nav"], -"navbar_end": ["navbar-icon-links"], -"navbar_persistent": ["search-button"] + "navbar_start": ["navbar-logo", "navbar-version"], + "navbar_center": ["navbar-nav"], + "navbar_end": ["navbar-icon-links"], + "navbar_persistent": ["search-button"], } html_sidebars = { - "tech_summary": [], - "postman": [], - "change_log": [], + "tech_summary": [], + "postman": [], + "change_log": [], } # Substitutions -version = ripple1d.__version__ \ No newline at end of file +version = 0 diff --git a/docs/source/requirements.txt b/docs/source/requirements.txt index b4d46ac5..28cf530a 100644 --- a/docs/source/requirements.txt +++ b/docs/source/requirements.txt @@ -1,7 +1,6 @@ --e . Sphinx sphinx-autodoc-typehints sphinx-rtd-theme myst-parser sphinx_design -pydata-sphinx-theme \ No newline at end of file +pydata-sphinx-theme From dab78fd2eeb3b5d8f13cd5fa40266ca30328ce8a Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 16:19:01 -0500 Subject: [PATCH 04/46] fix rtd yaml paths --- readthedocs.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/readthedocs.yaml b/readthedocs.yaml index b41a7b16..171c642c 100644 --- a/readthedocs.yaml +++ b/readthedocs.yaml @@ -16,7 +16,7 @@ build: # Build documentation in the "docs/" directory with Sphinx sphinx: - configuration: docs/conf.py + configuration: docs/source/conf.py # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs # builder: "dirhtml" # Fail on all warnings to avoid broken references @@ -32,4 +32,4 @@ sphinx: # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html python: install: - - requirements: docs/requirements.txt \ No newline at end of file + - requirements: docs/source/requirements.txt From 81db0f04415cbd8f4c0f31623bd66304e1a4182d Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 16:26:28 -0500 Subject: [PATCH 05/46] update doc dependencies --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index beb3eb2a..35bd98e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ dependencies = [ [project.optional-dependencies] dev = ["pre-commit", "ruff", "pytest", "pytest-cov"] -docs = ["sphinx", "numpydoc", "sphinx_rtd_theme", "sphinx_design", "pydata-sphinx-theme"] +docs = ["sphinx", "numpydoc", "sphinx_rtd_theme", "sphinx_design", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "myst-parser"] [project.urls] repository = "https://github.com/dewberry/ripple1d" @@ -80,4 +80,4 @@ include = ["ripple1d*"] [tool.sphinx] project = "ripple1d" -author = "Seth Lawler" \ No newline at end of file +author = "Seth Lawler" From ef5c3618cc50d3bb228f58317d96e2f61c274df2 Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 16:26:37 -0500 Subject: [PATCH 06/46] fix fake version --- docs/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 9b0ddcde..39458e6a 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -65,4 +65,4 @@ # Substitutions -version = 0 +version = "0.0.0" From c6fc2bace734628e7835c2f17769e88e23f428fa Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 16:34:07 -0500 Subject: [PATCH 07/46] add sphinx-apidoc command to rtd yaml --- readthedocs.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/readthedocs.yaml b/readthedocs.yaml index 171c642c..e58392fb 100644 --- a/readthedocs.yaml +++ b/readthedocs.yaml @@ -33,3 +33,6 @@ sphinx: python: install: - requirements: docs/source/requirements.txt + +commands: + - sphinx-apidoc -o docs/source/api ripple1d From b939241a3f89d0c598d38bbf2f3d9289bf091979 Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 16:38:05 -0500 Subject: [PATCH 08/46] add sphinx-apidoc command to rtd yaml --- readthedocs.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/readthedocs.yaml b/readthedocs.yaml index e58392fb..a4c989d2 100644 --- a/readthedocs.yaml +++ b/readthedocs.yaml @@ -9,6 +9,9 @@ build: os: ubuntu-22.04 tools: python: "3.10" + jobs: + pre_build: + - sphinx-apidoc -o docs/source/api ripple1d # You can also specify other tool versions: # nodejs: "20" # rust: "1.70" @@ -33,6 +36,3 @@ sphinx: python: install: - requirements: docs/source/requirements.txt - -commands: - - sphinx-apidoc -o docs/source/api ripple1d From 7a066719cac4e49892e18c8d0a0564fa379be665 Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 16:54:05 -0500 Subject: [PATCH 09/46] attempt to fix system path for sphinx-build --- docs/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 39458e6a..f3f609fa 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -10,7 +10,7 @@ # import ripple1d -sys.path.insert(0, os.path.abspath("../")) +sys.path.insert(0, os.path.abspath("../..")) # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information From 892403db957891fac7d7f3650c6c84cd19e435d7 Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 17:00:34 -0500 Subject: [PATCH 10/46] update rtd requirements --- docs/source/requirements.txt | Bin 95 -> 3746 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/docs/source/requirements.txt b/docs/source/requirements.txt index 28cf530aa7a0132362e6f8c6507bcda5255073fc..659a45d89f5e63a306c47530a654d02fca309914 100644 GIT binary patch literal 3746 zcmbW4%Whjo5JYDkAfG}*BqJ+XWRZ;*Mhpb-BFM@hDT<`TmwBm|KR!vFu4&Gt6l)O> zlm*RncUARc+W-7LDKmZMrPF6sre&?)2W4NL%c5-b`?ef)@~b`<&8^yPWJP1h&#jT;i7st>g-UxkHQ)+KE-w_%k|0ou)Ob`&UJn- z`;}1K>sD8G`gbocUdoh71YXYJa(9OHXW5Uc{Z6rV@`iVKjLW5d zCRU%$bmLI7!h8U=PsEAh;$A$cZg-GM-ZB~}TiY{~A9tag)H~JeGznu_{Wz!U& z#c49c9|*2$WGp!&vuo+J2)52E_xv?V?k>u0AVi@neXRog)`4acc(N)$@xDgdMa9g^ zQ!rhtj#9CfJ^WBtWb6>NmUrM}O=q%%Q`QbhoY7%Kx|a1!_EZ4bPb{P0V!YHRktQb$ zznqYMkQLSGig4VlmYa$XgfO;uSdvR{pwvRxcKUgcpS^O1!e}>*XswccZ?9Y1avd>~ ztvNMcbRB2eOOCTt)I0&_Fc?~;r4*R=tf)y_trRPn+sYQJ$Q+pHcd9ATG-fNQ+6C5a zusn$8eN_Q$joU~0*s4}0`Wt>6iMoWdVnat;hZ_n>w5!P?PW6)Lh4`^7+ep|=SY?2?i=d2)a_1pAN9rKx;)53n_a|p zM}g#^+Mx;`g>WDXdbOV?d|LBQ^)~bAvwpv+s+&Qw#ph8s=&)n2lDI|P+Sf)gTde=; zXW}*uxlfmQj+$qS2s@Oa8|kA~4dmFBv{8M*YoJW!6hz(eDWBtrM|UR1wD26@6`|Uk z%dEGi4-t4LV3U*2s>7q*+lt91u;9a0m?+zsCqqVI5*ii0nSm44G*!C@{8<6i(aRpq zTK5~0Rh)H21nb$f+J}l{@Uf~xMK=aymn<`3nCav)UqHB$YfsX>6v^pEBu~%AVl(UE?W@Zz^M zU_By6YS21NqoIl6#e#1;We`l{^vx1u=b;=trEW2QPABpMQGFhGi z2b%P^#7+q44|d75KGtv(uaf5rjQM6HrpQ7czYzFqGe0*>;?3h_aF(-UKMP~_7$>=d zCEIp75FUDaCym)+=t#O{UhVr?X?leO}obLr9F`!{-^CyJU5E@GIE=AZy~>}m42v|LpI$7Z(DVW z`MsUxEz7S$`w}UYn)UU4jpzpS&Mn{lo9wgFnlI-1R8{2nq)qa+DY&NW=Zn0vk-vV4 zt%&=8I$(O`yWyz0C3zNkm++qJ4Iq2Sb^d4YxQoq_Uc!gxRBjzl%EdR;8+;$c*#G-H IkJVWG3;jkcDgXcg literal 95 zcmWG8$jHpA;3@`Fx{0MF`6>Cyx+RqbsX+0PVwglxNs4YsMrv* Date: Tue, 12 Nov 2024 17:02:02 -0500 Subject: [PATCH 11/46] update rtd requirements --- docs/source/requirements.txt | Bin 3746 -> 3948 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/docs/source/requirements.txt b/docs/source/requirements.txt index 659a45d89f5e63a306c47530a654d02fca309914..9e6c0fb85f9374380aa629a1ed024fab7d417d52 100644 GIT binary patch delta 44 zcmZ1^`$lfVB0lb5h608RhD?S$hKk9l+@_OPaH~viVw0MDi(78;HFhl)UIs1(06wY= A(EtDd delta 7 OcmaDOw@7xwB0c~PF9Rw7 From 51961cddd03c5c2ec547f9f1f7b1d3ca4094ea6b Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 17:07:38 -0500 Subject: [PATCH 12/46] remove pywin32 dependencies --- docs/source/requirements.txt | Bin 3948 -> 3920 bytes pyproject.toml | 1 - ripple1d/ras.py | 10 +--------- 3 files changed, 1 insertion(+), 10 deletions(-) diff --git a/docs/source/requirements.txt b/docs/source/requirements.txt index 9e6c0fb85f9374380aa629a1ed024fab7d417d52..b5b255b09ed027213d0511a64916df96db746f10 100644 GIT binary patch delta 12 TcmaDOcR_AL4(H|$&L^w@CB+3x delta 36 qcmca0_eO3*4ySklLnT8wLncEWgE4~ Date: Tue, 12 Nov 2024 17:11:18 -0500 Subject: [PATCH 13/46] remove pythoncom dependency --- ripple1d/ras.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/ripple1d/ras.py b/ripple1d/ras.py index 76f4acac..8c042646 100644 --- a/ripple1d/ras.py +++ b/ripple1d/ras.py @@ -15,12 +15,6 @@ import geopandas as gpd import h5py import pandas as pd - -try: - import pythoncom -except SystemError: - warnings.warn("Windows OS is required to run ripple1d. Many features will not work on other OS's.") - from pyproj import CRS from ripple1d.consts import ( From 8a7369e227d20cf86d30eb9170206a77781b58de Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 17:16:57 -0500 Subject: [PATCH 14/46] fix f strings that may be throwing rtd issue? --- ripple1d/utils/sqlite_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ripple1d/utils/sqlite_utils.py b/ripple1d/utils/sqlite_utils.py index 42e0c13d..77a307f8 100644 --- a/ripple1d/utils/sqlite_utils.py +++ b/ripple1d/utils/sqlite_utils.py @@ -41,7 +41,7 @@ def insert_data( for row in data.itertuples(): if boundary_condition == "kwse": - if f"f_{int(row.us_flow)}-z_{str(row.ds_wse).replace(".","_")}" in missing_grids: + if f"f_{int(row.us_flow)}-z_{str(row.ds_wse).replace('.','_')}" in missing_grids: map_exist = 0 else: map_exist = 1 From 42c28d70c7306261fd6869772e21997e6497ff90 Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 17:26:40 -0500 Subject: [PATCH 15/46] attempt to resolve rtd indexing issue --- docs/source/endpoints/compute_conflation_metrics.rst | 3 ++- docs/source/endpoints/conflate_model.rst | 3 ++- docs/source/endpoints/create_fim_lib.rst | 3 ++- docs/source/endpoints/create_model_run_normal_depth.rst | 3 ++- docs/source/endpoints/create_ras_terrain.rst | 3 ++- docs/source/endpoints/extract_submodel.rst | 3 ++- docs/source/endpoints/ras_to_gpkg.rst | 3 ++- docs/source/endpoints/run_known_wse.rst | 3 ++- 8 files changed, 16 insertions(+), 8 deletions(-) diff --git a/docs/source/endpoints/compute_conflation_metrics.rst b/docs/source/endpoints/compute_conflation_metrics.rst index 5e0d74fc..2c79f436 100644 --- a/docs/source/endpoints/compute_conflation_metrics.rst +++ b/docs/source/endpoints/compute_conflation_metrics.rst @@ -7,4 +7,5 @@ compute_conflation_metrics **Description:** -.. autofunction:: ripple1d.ops.metrics.compute_conflation_metrics \ No newline at end of file +.. autofunction:: ripple1d.ops.metrics.compute_conflation_metrics + :no-index: diff --git a/docs/source/endpoints/conflate_model.rst b/docs/source/endpoints/conflate_model.rst index c7efad33..34632658 100644 --- a/docs/source/endpoints/conflate_model.rst +++ b/docs/source/endpoints/conflate_model.rst @@ -7,4 +7,5 @@ conflate_model **Description:** -.. autofunction:: ripple1d.ops.ras_conflate.conflate_model \ No newline at end of file +.. autofunction:: ripple1d.ops.ras_conflate.conflate_model + :no-index: diff --git a/docs/source/endpoints/create_fim_lib.rst b/docs/source/endpoints/create_fim_lib.rst index 46cd7788..4d1eebe7 100644 --- a/docs/source/endpoints/create_fim_lib.rst +++ b/docs/source/endpoints/create_fim_lib.rst @@ -7,4 +7,5 @@ create_fim_lib **Description:** -.. autofunction:: ripple1d.ops.fim_lib.create_fim_lib \ No newline at end of file +.. autofunction:: ripple1d.ops.fim_lib.create_fim_lib + :no-index: diff --git a/docs/source/endpoints/create_model_run_normal_depth.rst b/docs/source/endpoints/create_model_run_normal_depth.rst index a9aecfad..5c231620 100644 --- a/docs/source/endpoints/create_model_run_normal_depth.rst +++ b/docs/source/endpoints/create_model_run_normal_depth.rst @@ -7,4 +7,5 @@ create_model_run_normal_depth **Description:** -.. autofunction:: ripple1d.ops.ras_run.create_model_run_normal_depth \ No newline at end of file +.. autofunction:: ripple1d.ops.ras_run.create_model_run_normal_depth + :no-index: diff --git a/docs/source/endpoints/create_ras_terrain.rst b/docs/source/endpoints/create_ras_terrain.rst index 8338a5e9..841039c5 100644 --- a/docs/source/endpoints/create_ras_terrain.rst +++ b/docs/source/endpoints/create_ras_terrain.rst @@ -7,4 +7,5 @@ create_ras_terrain **Description:** -.. autofunction:: ripple1d.ops.ras_terrain.create_ras_terrain \ No newline at end of file +.. autofunction:: ripple1d.ops.ras_terrain.create_ras_terrain + :no-index: diff --git a/docs/source/endpoints/extract_submodel.rst b/docs/source/endpoints/extract_submodel.rst index 0b9be983..d186ad7c 100644 --- a/docs/source/endpoints/extract_submodel.rst +++ b/docs/source/endpoints/extract_submodel.rst @@ -7,4 +7,5 @@ extract_submodel **Description:** -.. autofunction:: ripple1d.ops.subset_gpkg.extract_submodel \ No newline at end of file +.. autofunction:: ripple1d.ops.subset_gpkg.extract_submodel + :no-index: diff --git a/docs/source/endpoints/ras_to_gpkg.rst b/docs/source/endpoints/ras_to_gpkg.rst index f903f7d7..c3f92b91 100644 --- a/docs/source/endpoints/ras_to_gpkg.rst +++ b/docs/source/endpoints/ras_to_gpkg.rst @@ -7,4 +7,5 @@ ras_to_gpkg **Description:** -.. autofunction:: ripple1d.ras_to_gpkg.gpkg_from_ras \ No newline at end of file +.. autofunction:: ripple1d.ras_to_gpkg.gpkg_from_ras + :no-index: diff --git a/docs/source/endpoints/run_known_wse.rst b/docs/source/endpoints/run_known_wse.rst index 53fff038..b9a748c4 100644 --- a/docs/source/endpoints/run_known_wse.rst +++ b/docs/source/endpoints/run_known_wse.rst @@ -7,4 +7,5 @@ run_known_wse **Description:** -.. autofunction:: ripple1d.ops.ras_run.run_known_wse \ No newline at end of file +.. autofunction:: ripple1d.ops.ras_run.run_known_wse + :no-index: From e8bb9d0182df9b153d149fa67ef57303b485ef88 Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 12 Nov 2024 17:27:21 -0500 Subject: [PATCH 16/46] attempt to resolve rtd indexing issue --- docs/source/endpoints/run_incremental_normal_depth.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/source/endpoints/run_incremental_normal_depth.rst b/docs/source/endpoints/run_incremental_normal_depth.rst index dfc27e0e..37cbf080 100644 --- a/docs/source/endpoints/run_incremental_normal_depth.rst +++ b/docs/source/endpoints/run_incremental_normal_depth.rst @@ -7,4 +7,5 @@ run_incremental_normal_depth **Description:** -.. autofunction:: ripple1d.ops.ras_run.run_incremental_normal_depth \ No newline at end of file +.. autofunction:: ripple1d.ops.ras_run.run_incremental_normal_depth + :no-index: From 310a09f54932baa3a4ef68f1d5803e422e236b59 Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 13 Nov 2024 09:35:14 -0500 Subject: [PATCH 17/46] add auto-changelog creation --- docs/build_release_changelog.py | 68 +++++++++++ docs/source/change_log.rst | 203 -------------------------------- docs/source/conf.py | 9 ++ 3 files changed, 77 insertions(+), 203 deletions(-) create mode 100644 docs/build_release_changelog.py delete mode 100644 docs/source/change_log.rst diff --git a/docs/build_release_changelog.py b/docs/build_release_changelog.py new file mode 100644 index 00000000..6435bf1d --- /dev/null +++ b/docs/build_release_changelog.py @@ -0,0 +1,68 @@ +import re +from io import TextIOWrapper + +import requests + +CHANGE_LOG_PATH = "docs/source/change_log.rst" + + +def get_content() -> dict: + """Get the content of the changelog from the GitHub API.""" + url = "https://api.github.com/repos/Dewberry/ripple1d/releases" + headers = {"Accept": "application/vnd.github.v3+json"} + response = requests.get(url, headers=headers) + response.raise_for_status() + return response.json() + + +def add_heading(file: TextIOWrapper): + """Add the heading for the changelog.""" + file.write(".. note::\n") + file.write( + " Go to the `Releases `__ page for a list of all releases.\n\n" + ) + + +def add_release_body(file: TextIOWrapper, body: str): + """Add the body of a release to the changelog.""" + lines = body.split("\n") + for l in lines: + if l.startswith("# "): + file.write(f"{l[2:]}") + file.write(f"{'-' * len(l[2:])}\n") + elif l.startswith("## "): + file.write(f"{l[3:]}") + file.write(f"{'^' * len(l[3:])}\n") + elif l.startswith("### "): + file.write(f"{l[4:]}") + file.write(f"{'"' * len(l[4:])}\n") + else: + l = re.sub(r"\[([^\]]+)\]\(([^)]+)\)", r"`\1 <\2>`_", l) # fix links + file.write(f"{l}\n") + + +def add_release(file: TextIOWrapper, release: dict): + """Add a release to the changelog.""" + file.write(f"{release['name']}\n") + file.write(f"{'=' * len(release['name'])}\n\n") + file.write(f"**Tag:** {release['tag_name']}\n\n") + file.write(f"**Published at:** {release['published_at']}\n\n") + file.write(f"**Author:** {release['author']['login']}\n\n") + file.write(f"**Release Notes:**\n\n") + add_release_body(file, release["body"]) + file.write("\n\n") + + +def build_changelog(): + """Build the changelog for the documentation.""" + releases = get_content() + + # Write release information to an .rst file + with open(CHANGE_LOG_PATH, "w") as file: + add_heading(file) + for release in releases: + add_release(file, release) + + +if __name__ == "__main__": + build_changelog() diff --git a/docs/source/change_log.rst b/docs/source/change_log.rst deleted file mode 100644 index eee32807..00000000 --- a/docs/source/change_log.rst +++ /dev/null @@ -1,203 +0,0 @@ -Change Log -========== - -.. note:: - Go to the `Releases `__ page for a list of all releases. - -Bugfix Release 0.6.3 -~~~~~~~~~~~~~~~~~~~~~ - -Users Changelog ----------------- -This release of `ripple1d` fixes several bugs identified during testing. - -Bug Fixes ----------- -- Technical Documentation has been updated with high level summary of package functionality. -- The ID column was removed from geopackage layers. All code dependencies on the ID column have been removed. -- Now only reaches that are connected via the "to_id" are considered eclipsed reaches. -- Precision has been added to the rating curves used to inform the incremental normal depth runs by reducing the amount of rounding. -- CRS is now stored as WKT instead of EPSG in the ripple.json file to more robustly represent all possible CRSs; e.g., ESRI. - -Bugfix Release 0.6.2 -~~~~~~~~~~~~~~~~~~~~~ - -Users Changelog ----------------- -This release of `ripple1d` fixes several bugs associated with conflation. To aid in identifying and fixing these bugs, improvements were made in the logging for the conflation endpoint. In summary, the fixes and changes incorporated in this PR improve the consistency and quality of the conflation process, computations, and metrics in genera with special attention for handling junctions and headwater reaches. - - -Bugfix Release 0.6.1 -~~~~~~~~~~~~~~~~~~~~~ - -Users Changelog ----------------- -This release of `ripple1d` fixes several bugs identified during testing. - -Features Added ----------------- -- A minor change was added to the logging behavior to improve error tracking. - -Bug Fixes ----------- -- A bug causing increasing processing time when calling `creat_ras_terrain` in parallel mode. -- A bug in the `extract_submodel` endpoint which failed when trying to grab the upstream cross section. A check was added for the eclipsed parameter, where if true no geopackage will be created. -- Several bugs associated with the `create_fim_lib endpoint`: - - 1. The library_directory arg was not being implemented correctly in the function. - 2. If a fim.db already exists append fuctionality has been implemented. - 3. If the directory containing the raw RAS depth grids is empty the clean up function will not be invoked. -- Resolves issues introduced when a concave hull from a source model where cross section existed in the wrong direction (resulting in a multipart polygon). A check was added to correct direction and reverses the cross section if it was drawn incorrectly. This is limited to the development of the concave hull and does not modify the cross section direction for use in the modeling. - -Feature Release 0.6.0 -~~~~~~~~~~~~~~~~~~~~~ -Users Changelog ----------------- - -This release of `ripple1d` adds 2 args to the create_fim_lib endpoint, adds a concave hull of the cross sections to the geopackage, and fixes a bug associated with the depth grids. - -Features Added ----------------- -**New library directory argument** - -A new required arg, "library_directory", has been added to the create_fim_lib endpoint. This new arg specifies where the output directory for the FIM grids and database. - -**New cleanup argument** - -A new required arg, "cleanup", has been added to the create_fim_lib endpoint. If this arg is True the raw HEC-RAS depth grids are deleted. If False they are not deleted. - -**Concave hull of cross sections** - -A new layer representing the concave hull of the cross sections has been added to the geopackage for the source model and the network based model. It also improves how the concave hull is handled at junctions by explicitly creating a junction concave hull and then merging it in with the xs concave hull. - - -Bug Fixes ----------------- - -- An error was arising when all normal depth runs resulted in water surface elevations that were below the mapping terrain which means no resulting depth grids were being produced. Previously the code assumed at least 1 depth grid would be present. This has been fixed by obtaining the "terrain part" of the raw RAS grid from the RAS terrain instead of the first depth grid in the raw RAS result folder. - - -Feature Release 0.5.0 -~~~~~~~~~~~~~~~~~~~~~ -Users Changelog ----------------- - -This release of `ripple1d` incorporates geometry extraction, conflation, and conflation metrics into the API, and fixes several bugs. - - -Features Added ----------------- -**Conflation improvements** - -- The source HEC-RAS river centerline is now clipped to the most upstream and most downstream cross sections prior to starting conflation. This helps prevent identifying network reaches that are far away from the cross sections and improves the accuracy of the conflation. -- Overlapped reaches are now tracked and documented in the conflation json file. -- A bbox column has been added to the network parquet file for faster reading. This was especially needed for the new conflation endpoint since each request needs to load the parquet file. Load times without the bbox column were between 5-20 seconds; this is reduced to 1-2 seconds with the bbox column. -- The conflation function now reads locally instead of from s3. -- The conflation function no longer creates a STAC item. -- RAS metadata is now added to the conflation json. -- The source network's metadata is now added to the conflation json. -- Length and flow units are now documented in the conflation json file. - -**Conflation Metrics** - -Three metrics are computed to asses the qualitiy of the conflation: - -- `Coverage`: The the start and end location of the reach coverage is computed as a ratio of the length of the network reach. -- `Lengths`: The lengths between the most upstream cross section and most downstream cross section along the network reach and source HEC-RAS Model's centerline is computed. The ratio of the two lengths is also provided. -- `XS`: The distance between where the network reach and HEC-RAS Model's centerline intersects the cross sections is computed. A similar comparison is performed using the cross section's thalweg location and the network reaches intersection location with the cross sections. The mean, min, max, std, and quartiles are provided as a summary for both comparisons. - - -**Geometry Extraction improvements** -- A new function to verify .prj file is a HEC-RAS file has been added. -- The extracted geopackage now contians a non-spatial metadata table for the souce HEC-RAS model. -- Tests have been added for extracting geopackage from HEC-RAS model. -- Additional attributes are added to the source model gpkg for downstream use. -- Units are extracted from the source RAS model and added to metadata. - -**API** - -- An endpoint was added for extracting geometry and relevant metdata for the soure HEC-RAS models and storing it in in a geopackage. -- An endpoint to compute conflation metric was added. -- An endpoint for conflation (which includes metrics calculations) was added. -- Tests were added for the conflation, conflation metrics, and geopackage endpoints. - - -Bug Fixes ----------- - -- Reaches whose conflation results indicate upstream and downstream cross sections are the same are now considered a failed conflation. -- The function to create a concave hull for the cross sections has been improved when junctions are present. -- Eclipsed reaches are now better identified and are documented in the conflation json with a boolean. -- A check is now performed to ensure cross sections intersect the source HEC-RAS model's river centerline. If cross sections do not intersect the centerline they are dropped. -- A conflation json is no longer written for source HEC-RAS models that fail to conflate. -- Handling has been added to subset gpkg endpoint for river stationings of interpolated. These river stations contain an "*" to indicate interpolated cross section. -- Several issues with the automated API tests were identified and fixed. -- API tests no longer re-run gpkg_from_ras and conflate_model for every reach; just once per source test model. -- When API tests pass the resulting files are now removed automatically. Resulting files for tests that fail are not removed so that the tester can better trouble shoot. - - - -Bugfix Release 0.4.1-0.4.2 -~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Users Changelog ----------------- -This release of `ripple1d` fixes several bugs identified during testing. - -Features Added ----------------- -No features added in this version. - -**API** -- `ripple_version` is no longer an option for the body of any endpoints. - -Bug Fixes ----------- -- A bug due to a hard coded terrain path name causing an error on `create_fim_lib` has been resolved. -- A bug associated with the `ripple_version` parameter has been resolved by removing the parameter from the body of requests (see note in API above). -- An issue with including lateral structures (not yet implemented) in the ras geometry files causing hang ups has been resolved. This fix resolved another issue where stationing was mis-applied in the newly created ras geometry files. -- A bug which caused a failure when calling subset_gpkg in cases where the model geometries are simple (no structures / no junctions). - - -Feature Release 0.4.0 -~~~~~~~~~~~~~~~~~~~~~ - - -Users Changelog ----------------- -This release of `ripple1d` incorporates preliminary support for hydraulic structures in HEC-RAS, improves the installation and setup process, and fixes several bugs. - - -Features Added ------------------- - -**Hydraulic Structures** - -- All data associated with 1D structures that HEC-RAS supports is now included in the geometry extraction functions. (Endpoint exposing this will come in a future release). The extraction of data from the source models is now more robust and better handles different versions of RAS which wrote files slightly different. - -- NWM reach models built from HEC-RAS source models that have the following structures will have structure data included: - - - Inline Structures - - Bridges - - Culverts - - Multiple Opening - -.. note:: - Not included are lateral structures. Handling of lateral structures (wiers) will require additional assumptions/considerations to account for excess discharge (storage area, 2d area, another reach, etc). - -**Conflation improvements** - -- The conflation algorithm has been improved to accommodate models containing junctions. Where junctions exist, HEC-RAS rivers will be joined and the down stream XS (downstream of the junction) will be captured in the upstream model. -- Conflation now incorporates an additional downstream XS if available, extending beyond the NWM reach length to prevent gaps in FIM coverage. - - -**API** - -- `ripple_version` is no longer a required argument for any endpoint. - - -Bug Fixes ----------- -Numerous small bug fixes were made to enable the support of hydraulic structures. Other notable bugs include: - -- HEC-RAS stations with length > 8 characters are now supported. -- Mangled profile names resulting from negative elevations producing FIM libraries has been fixed. \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index f3f609fa..a55736f0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -6,6 +6,7 @@ """ import os +import subprocess import sys # import ripple1d @@ -66,3 +67,11 @@ # Substitutions version = "0.0.0" + + +def fetch_github_releases(): + subprocess.run([sys.executable, "docs/build_release_changelog.py"], check=True) + + +def setup(app): + fetch_github_releases() From 40937dd3485866570a088f532bc2479c33809297 Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 13 Nov 2024 09:35:22 -0500 Subject: [PATCH 18/46] update postman --- docs/source/postman.rst | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/docs/source/postman.rst b/docs/source/postman.rst index 61be7fef..7c395980 100644 --- a/docs/source/postman.rst +++ b/docs/source/postman.rst @@ -1,14 +1,22 @@ Postman collection ================== -For reference and documentation of the API, please open the postman collection for the version of ripple1d +For reference and documentation of the API, please open the postman collection for the version of ripple1d -`v0.6.0-v0.6.1: `_ This beta version contains new args for the create_fim_lib endpoint: +`v0.7.0: `_ This beta version contains: + new endpoints: + - `create_rating_curves_db`: creates rating curve using results from `run_known_wse` and `run_incremental_normal_depth` results + - `jobs`: added endpoints to view job `results`, `metadata`, and `logs` + + new args: + - `write_depth_grids` (bool) added to `run_known_wse` and `run_incremental_normal_depth` endpoints + +`v0.6.0-v0.6.3: `_ This beta version contains new args for the create_fim_lib endpoint: - `library_directory`: Specifies the output directory for the FIM grids and database. - `cleanup`: Boolean indicating if the ras HEC-RAS output grids should be deleted or not. -`v0.5.0: `_ This beta version contains new endpoints: +`v0.5.0: `_ This beta version contains new endpoints: - `geom_to_gpkg`: Extract the data from a model source dirctory to a gepoackage. - `conflate`: Conflate all reaches from the NWM network corresponding to the source model. - `conflation_metrics`: Apply conflation metrics for a conflated source model. From 723ede6070938d0c467d3390cd6b7527ab869d53 Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 13 Nov 2024 09:35:51 -0500 Subject: [PATCH 19/46] dynamically update version --- docs/source/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index a55736f0..f57f17e9 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -9,7 +9,7 @@ import subprocess import sys -# import ripple1d +import ripple1d sys.path.insert(0, os.path.abspath("../..")) @@ -66,7 +66,7 @@ # Substitutions -version = "0.0.0" +version = str(ripple1d.__version__) def fetch_github_releases(): From e1b8d22de5e7b7222acd26c16b5b0e4a80888e48 Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 13 Nov 2024 09:38:12 -0500 Subject: [PATCH 20/46] fix import order for local ripple1d --- docs/source/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f57f17e9..375141d1 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -9,10 +9,10 @@ import subprocess import sys -import ripple1d - sys.path.insert(0, os.path.abspath("../..")) +import ripple1d + # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information From 2a9f1d4ab9bd55bd63eef8d4d814218de1c8c46a Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 13 Nov 2024 09:42:51 -0500 Subject: [PATCH 21/46] test rtd network connection --- docs/build_release_changelog.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/build_release_changelog.py b/docs/build_release_changelog.py index 6435bf1d..cbba0efc 100644 --- a/docs/build_release_changelog.py +++ b/docs/build_release_changelog.py @@ -5,6 +5,14 @@ CHANGE_LOG_PATH = "docs/source/change_log.rst" +ip = "8.8.8.8" + +try: + response = requests.get("http://" + ip, timeout=5) + print(f"{ip} is reachable") +except requests.ConnectionError: + print(f"Failed to reach {ip}") + def get_content() -> dict: """Get the content of the changelog from the GitHub API.""" From 93b7c882b818f845a70e57266abcc86e72191bc2 Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 13 Nov 2024 09:48:44 -0500 Subject: [PATCH 22/46] fix subprocess path --- docs/source/conf.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 375141d1..3a8fcd91 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -6,6 +6,7 @@ """ import os +import pathlib import subprocess import sys @@ -70,7 +71,9 @@ def fetch_github_releases(): - subprocess.run([sys.executable, "docs/build_release_changelog.py"], check=True) + subprocess.run( + [sys.executable, str(pathlib.Path(__file__).parent.parent.resolve() / "build_release_changelog.py")], check=True + ) def setup(app): From 5e8fc73f25b2eecfbf5b7f7d0300aa2463ddae69 Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 13 Nov 2024 09:51:54 -0500 Subject: [PATCH 23/46] fix unclosed f string --- docs/build_release_changelog.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/build_release_changelog.py b/docs/build_release_changelog.py index cbba0efc..aa6cb320 100644 --- a/docs/build_release_changelog.py +++ b/docs/build_release_changelog.py @@ -43,7 +43,8 @@ def add_release_body(file: TextIOWrapper, body: str): file.write(f"{'^' * len(l[3:])}\n") elif l.startswith("### "): file.write(f"{l[4:]}") - file.write(f"{'"' * len(l[4:])}\n") + underline = '"' * len(l[4:]) + file.write(f"{underline}\n") else: l = re.sub(r"\[([^\]]+)\]\(([^)]+)\)", r"`\1 <\2>`_", l) # fix links file.write(f"{l}\n") From 66f14817173739d35f93c1d03b0a06fbae668a89 Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 13 Nov 2024 09:56:27 -0500 Subject: [PATCH 24/46] fix change log path --- docs/build_release_changelog.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/docs/build_release_changelog.py b/docs/build_release_changelog.py index aa6cb320..d87e017a 100644 --- a/docs/build_release_changelog.py +++ b/docs/build_release_changelog.py @@ -1,17 +1,16 @@ import re from io import TextIOWrapper +from pathlib import Path import requests -CHANGE_LOG_PATH = "docs/source/change_log.rst" - -ip = "8.8.8.8" +CHANGE_LOG_PATH = str(Path(__file__).parent.resolve() / "source" / "change_log.rst") try: - response = requests.get("http://" + ip, timeout=5) - print(f"{ip} is reachable") + response = requests.get("https://www.google.com/", timeout=5) + print("Google is reachable") except requests.ConnectionError: - print(f"Failed to reach {ip}") + print("Failed to reach Google") def get_content() -> dict: From 64adcd339ba6c39f7a405e3b1620663b719660bc Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 13 Nov 2024 10:01:05 -0500 Subject: [PATCH 25/46] remove pinging debugging code --- docs/build_release_changelog.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/build_release_changelog.py b/docs/build_release_changelog.py index d87e017a..74a520f4 100644 --- a/docs/build_release_changelog.py +++ b/docs/build_release_changelog.py @@ -6,12 +6,6 @@ CHANGE_LOG_PATH = str(Path(__file__).parent.resolve() / "source" / "change_log.rst") -try: - response = requests.get("https://www.google.com/", timeout=5) - print("Google is reachable") -except requests.ConnectionError: - print("Failed to reach Google") - def get_content() -> dict: """Get the content of the changelog from the GitHub API.""" From d6fee276786deeb2969c21de167ebc34aeb012d9 Mon Sep 17 00:00:00 2001 From: sclaw Date: Fri, 6 Dec 2024 16:40:27 -0500 Subject: [PATCH 26/46] enforce htab minimum value --- ripple1d/data_model.py | 15 +++++++++++++++ ripple1d/ras.py | 22 ++++++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/ripple1d/data_model.py b/ripple1d/data_model.py index 60353ef8..a582e5ce 100644 --- a/ripple1d/data_model.py +++ b/ripple1d/data_model.py @@ -418,6 +418,21 @@ def thalweg(self): _, y = list(zip(*self.station_elevation_points)) return min(y) + @property + def has_htab_error(self): + """Check if min htab value is less than section invert.""" + return self.min_htab < self.thalweg + + @property + def htab_string(self): + """Cross section htab string.""" + return search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True) + + @property + def min_htab(self): + """Cross section minimum htab.""" + return float(self.htab_string.split(",")[0]) + @property def xs_max_elevation(self): """Cross section maximum elevation.""" diff --git a/ripple1d/ras.py b/ripple1d/ras.py index bab8bd3b..2be97f82 100644 --- a/ripple1d/ras.py +++ b/ripple1d/ras.py @@ -775,6 +775,8 @@ def __init__(self, ras_text_file_path: str, crs: str = None, new_file=False): self.crs = CRS(crs) self.hdf_file = self._ras_text_file_path + ".hdf" + self.fix_htab_errors() + def __repr__(self): """Representation of the RasGeomText class.""" return f"RasGeomText({self._ras_text_file_path})" @@ -868,6 +870,26 @@ def _check_layers(self): if "River" not in layers: raise NoRiverLayerError(f"Could not find a layer called River in {self._gpkg_path}") + def fix_htab_errors(self): + """Update any htab values lower than the section invert to the section invert.""" + working_string = "\n".join(self.contents.copy()) + needs_save = False + for xs in self.cross_sections.values(): + if xs.has_htab_error: + needs_save = True + logging.info(f"Fixing htab error for {xs.river_reach}") + old_ras_str = "\n".join(xs.ras_data) + old_htab = xs.htab_string + new_htab = xs.htab_string.replace( + str(xs.min_htab), str(xs.thalweg + 0.5) + ) # HEC-RAS default handling of this error is to do 0.5 ft above section invert + new_ras_string = old_ras_str.replace(old_htab, new_htab) + working_string = working_string.replace(old_ras_str, new_ras_string) + if needs_save: + with open(self._ras_text_file_path, "w") as f: + f.write(working_string) + self.contents = working_string.splitlines() + @property def title(self): """Title of the HEC-RAS Geometry file.""" From f747131cf7acfa3b67a4d81de90c4eca6d868341 Mon Sep 17 00:00:00 2001 From: sclaw Date: Tue, 10 Dec 2024 14:10:25 -0500 Subject: [PATCH 27/46] finish htab updates --- ripple1d/data_model.py | 14 ++++++++++++-- ripple1d/ras.py | 17 ++++++++++------- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/ripple1d/data_model.py b/ripple1d/data_model.py index a582e5ce..0910f2c6 100644 --- a/ripple1d/data_model.py +++ b/ripple1d/data_model.py @@ -421,7 +421,7 @@ def thalweg(self): @property def has_htab_error(self): """Check if min htab value is less than section invert.""" - return self.min_htab < self.thalweg + return self.htab_starting_el < self.thalweg @property def htab_string(self): @@ -429,10 +429,20 @@ def htab_string(self): return search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True) @property - def min_htab(self): + def htab_starting_el(self): """Cross section minimum htab.""" return float(self.htab_string.split(",")[0]) + @property + def htab_increment(self): + """Cross section minimum htab.""" + return float(self.htab_string.split(",")[1]) + + @property + def htab_points(self): + """Cross section minimum htab.""" + return float(self.htab_string.split(",")[2]) + @property def xs_max_elevation(self): """Cross section maximum elevation.""" diff --git a/ripple1d/ras.py b/ripple1d/ras.py index 2be97f82..ff02447c 100644 --- a/ripple1d/ras.py +++ b/ripple1d/ras.py @@ -878,13 +878,16 @@ def fix_htab_errors(self): if xs.has_htab_error: needs_save = True logging.info(f"Fixing htab error for {xs.river_reach}") - old_ras_str = "\n".join(xs.ras_data) - old_htab = xs.htab_string - new_htab = xs.htab_string.replace( - str(xs.min_htab), str(xs.thalweg + 0.5) - ) # HEC-RAS default handling of this error is to do 0.5 ft above section invert - new_ras_string = old_ras_str.replace(old_htab, new_htab) - working_string = working_string.replace(old_ras_str, new_ras_string) + old_htab_str = xs.htab_string + # HEC-RAS default handling: + # either 0 or 0.5 ft above section invert for the start elevation + # increment that will yield 20 pts between start and section max elevations + # We want to preserve engineer-specified increments, so we don't do that + new_htab_str = old_htab_str.replace(str(xs.htab_starting_el), str(xs.thalweg)) + + old_xs_str = "\n".join(xs.ras_data) + new_xs_str = old_xs_str.replace(old_htab_str, new_htab_str) + working_string = working_string.replace(old_xs_str, new_xs_str) if needs_save: with open(self._ras_text_file_path, "w") as f: f.write(working_string) From a4886eea04b1faefe672cdadaba0d36583a3adeb Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 11 Dec 2024 15:55:58 -0500 Subject: [PATCH 28/46] gh-266&gh-225 --- ripple1d/ops/subset_gpkg.py | 93 +++++++++++++++++++------------------ 1 file changed, 48 insertions(+), 45 deletions(-) diff --git a/ripple1d/ops/subset_gpkg.py b/ripple1d/ops/subset_gpkg.py index 444c8657..b7461635 100644 --- a/ripple1d/ops/subset_gpkg.py +++ b/ripple1d/ops/subset_gpkg.py @@ -5,6 +5,7 @@ import os import time import warnings +from functools import lru_cache import fiona import geopandas as gpd @@ -193,41 +194,42 @@ def ripple_structure(self) -> gpd.GeoDataFrame: return self.subset_gdfs["Structure"] @property + @lru_cache def subset_gdfs(self) -> dict: """Subset the cross sections, structues, and river geometry for a given NWM reach.""" - if self._subset_gdf is None: - # subset data - if self.us_river == self.ds_river and self.us_reach == self.ds_reach: - ripple_xs, ripple_structure, ripple_river = self.process_as_one_ras_reach() - else: - ripple_xs, ripple_structure, ripple_river = self.process_as_multiple_ras_reach() - - # check if only 1 cross section for nwm_reach - if len(ripple_xs) <= 1: - logging.warning(f"Only 1 cross section conflated to NWM reach {self.nwm_id}. Skipping this reach.") - return None - - # update fields - ripple_xs = self.update_fields(ripple_xs) - ripple_river = self.update_fields(ripple_river) - ripple_structure = self.update_fields(ripple_structure) - - # clip river to cross sections - ripple_river["geometry"] = clip_ras_centerline( - ripple_river.iloc[0].geometry, fix_reversed_xs(ripple_xs, ripple_river), 2 - ) + # subset data + if self.us_river == self.ds_river and self.us_reach == self.ds_reach: + ripple_xs, ripple_structure, ripple_river = self.process_as_one_ras_reach() + else: + ripple_xs, ripple_structure, ripple_river = self.process_as_multiple_ras_reach() + + # check if only 1 cross section for nwm_reach + if len(ripple_xs) <= 1: + logging.warning(f"Only 1 cross section conflated to NWM reach {self.nwm_id}. Skipping this reach.") + return None + + # update fields + ripple_river = self.rename_river_reach(ripple_river) + ripple_structure = self.rename_river_reach(ripple_structure) + ripple_xs = self.rename_river_reach(ripple_xs) + ripple_xs = self.autoincrement_stations(ripple_xs) + + # clip river to cross sections + ripple_river["geometry"] = clip_ras_centerline( + ripple_river.iloc[0].geometry, fix_reversed_xs(ripple_xs, ripple_river), 2 + ) - if ripple_structure is not None and len(ripple_structure) > 0: - self._subset_gdf = { - "XS": ripple_xs, - "River": ripple_river, - "Structure": ripple_structure, - } - else: - self._subset_gdf = { - "XS": ripple_xs, - "River": ripple_river, - } + if ripple_structure is not None and len(ripple_structure) > 0: + self._subset_gdf = { + "XS": ripple_xs, + "River": ripple_river, + "Structure": ripple_structure, + } + else: + self._subset_gdf = { + "XS": ripple_xs, + "River": ripple_river, + } return self._subset_gdf @@ -517,19 +519,20 @@ def combine_reach_features(self, intermediate_river_reaches: list[str]) -> gpd.G crs=self.crs, ) - def update_fields(self, gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame: - """Update fields for the new NWM reach.""" - if gdf is not None: - gdf = self.rename_river_reach(gdf) - - # clean river stations - if "river_station" in gdf.columns: - if (gdf["river_station"].astype(str).str.len() > 8).any(): - gdf["ras_data"] = gdf["ras_data"].apply(lambda ras_data: self.round_river_stations(ras_data)) - gdf["river_station"] = gdf["river_station"].round().astype(float) - else: - gdf["ras_data"] = gdf["ras_data"].apply(lambda ras_data: self.clean_river_stations(ras_data)) - gdf["river_station"] = gdf["river_station"].astype(float) + def autoincrement_stations(self, gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame: + """Update river_stations and ras data to be autoincrementing instead of distances.""" + gdf["river_station"] = range(1, len(gdf) + 1) # autoincrementing field + for row in gdf.iterrows(): # for each XS + # update station name in RAS data + ras_data = row[1]["ras_data"] + lines = ras_data.splitlines() + data = lines[0].split(",") + if "*" in data[1]: + data[1] = str(row[1]["river_station"] + "*").ljust(8) + else: + data[1] = str(row[1]["river_station"]).ljust(8) + lines[0] = ",".join(data) + gdf.loc[row[0], "ras_data"] = "\n".join(lines) + "\n" return gdf def update_ripple1d_parameters(self, rsd: RippleSourceDirectory): From 0af981cc1d3121b980988f5fae090993c1a57e69 Mon Sep 17 00:00:00 2001 From: sclaw Date: Wed, 11 Dec 2024 16:11:00 -0500 Subject: [PATCH 29/46] fix junction walking bug. needs testing --- ripple1d/ops/subset_gpkg.py | 41 ++++++++++++++++--------------------- 1 file changed, 18 insertions(+), 23 deletions(-) diff --git a/ripple1d/ops/subset_gpkg.py b/ripple1d/ops/subset_gpkg.py index b7461635..b37f382a 100644 --- a/ripple1d/ops/subset_gpkg.py +++ b/ripple1d/ops/subset_gpkg.py @@ -286,29 +286,24 @@ def crs(self): def walk_junctions(self) -> list[str]: """Check if junctions are present for the given river-reaches.""" - river_reaches = [] - if not self.source_junction.empty: - c = 0 - while True: - c += 1 - if c > 100: - logging.warning( - f"Could not find junction for: {self.nwm_id}. The reach may contain cross sections from multiple RAS reaches that are not connected via junctions." - ) - return None - - for _, row in self.source_junction.iterrows(): - - us_rivers = row.us_rivers.split(",") - us_reaches = row.us_reaches.split(",") - - for river, reach in zip(us_rivers, us_reaches): - if row.ds_rivers == self.ds_river and row.ds_reaches == self.ds_reach: - return river_reaches - if river == self.us_river and reach == self.us_reach: - river_reaches.append(f"{row.ds_rivers.ljust(16)},{row.ds_reaches.ljust(16)}") - us_river = row.ds_rivers - us_reach = row.ds_reaches + # make a tree dictionary + tree_dict = {} + for r in self.source_junction.iterrows(): + trib_rivers = r[1]["us_rivers"].split(",") + trib_reaches = r[1]["us_reaches"].split(",") + outlet = f'{r[1]["ds_rivers"]}-{r[1]["ds_reaches"]}' + for riv, rch in zip(trib_rivers, trib_reaches): + tree_dict[f"{riv}-{rch}"] = outlet + + # walk network to id intermediate reaches + intermediate_reaches = [] + cur_reach = f"{self.us_river}-{self.us_reach}" + ds_reach = f"{self.ds_river}-{self.ds_reach}" + while cur_reach != ds_reach: + cur_reach = tree_dict[cur_reach] + intermediate_reaches.append(cur_reach) + intermediate_reaches = intermediate_reaches[:-1] # remove d/s reach + return intermediate_reaches def clean_river_stations(self, ras_data: str) -> str: """Clean up river station data.""" From 464c2aac4e9f920bf1aed4e21f32e9e53f6c1871 Mon Sep 17 00:00:00 2001 From: sclaw Date: Thu, 12 Dec 2024 13:22:16 -0500 Subject: [PATCH 30/46] stable refactor of subset_gpkg --- ripple1d/ops/subset_gpkg.py | 210 ++++++++++++++++++++++++++++-------- 1 file changed, 168 insertions(+), 42 deletions(-) diff --git a/ripple1d/ops/subset_gpkg.py b/ripple1d/ops/subset_gpkg.py index b37f382a..a8505b4c 100644 --- a/ripple1d/ops/subset_gpkg.py +++ b/ripple1d/ops/subset_gpkg.py @@ -6,11 +6,13 @@ import time import warnings from functools import lru_cache +from re import sub import fiona import geopandas as gpd import pandas as pd from shapely import LineString +from shapely.geometry import MultiLineString from shapely.ops import split import ripple1d @@ -178,6 +180,128 @@ def source_junction(self) -> gpd.GeoDataFrame: self._source_junction = gpd.read_file(self.src_gpkg_path, layer="Junction") return self._source_junction + @property + def juntion_tree_dict(self) -> dict: + """Create a dictionary mapping trib->outflow for all junctions.""" + return self.junctions_to_dicts()[0] + + @property + def juntion_dist_dict(self) -> dict: + """Create a dictionary mapping trib->outflow for all junctions.""" + return self.junctions_to_dicts()[1] + + @property + @lru_cache + def subset_xs(self) -> gpd.GeoDataFrame: + """Trim source XS to u/s and d/s limits and add all intermediate reaches.""" + subset_xs = pd.DataFrame(data=None, columns=self.source_xs.columns) # empty copy to put subset into + subset_xs["source_river_station"] = [] + cur_reach = (self.us_river, self.us_reach) + ds_reach = (self.ds_river, self.ds_reach) + _iter = 0 + while True: # Walk network until d/s reach + reach_xs = self.source_xs.query(f'river == "{cur_reach[0]}" & reach == "{cur_reach[1]}"') + reach_xs = self.trim_reach(reach_xs) + reach_xs["source_river_station"] = reach_xs["river_station"] + subset_xs["river_station"] += reach_xs["river_station"].max() + subset_xs = pd.concat([subset_xs, reach_xs]).copy() + + if cur_reach == ds_reach: + break + elif _iter > 100 or cur_reach not in self.juntion_tree_dict: + err_string = "Could not traverse reaches such that u/s river reach led to d/s river reach." + err_string += "\n" + err_string += f"Broke on {cur_reach} at {_iter} iterations" + raise RuntimeError(err_string) + else: + _iter += 1 + subset_xs["river_station"] += self.juntion_dist_dict[cur_reach] # add junction d/s length + cur_reach = self.juntion_tree_dict[cur_reach] # move to next d/s reach + + return gpd.GeoDataFrame(subset_xs) + + @property + @lru_cache + def subset_river(self): + """Trim source centerline to u/s and d/s limits and add all intermediate reaches.""" + coords = [] + subset_rivers = [] + for rr in self.subset_xs["river_reach"].unique(): + tmp_river = self.source_river[self.source_river["river_reach"] == rr] + subset_rivers.append(tmp_river) + coords.extend(tmp_river.iloc[0]["geometry"].coords) + + subset_rivers = gpd.GeoDataFrame(pd.concat(subset_rivers)) + tmp_line = LineString(coords) + tmp_xs = fix_reversed_xs(self.subset_xs, subset_rivers) + tmp_line = clip_ras_centerline(tmp_line, tmp_xs, 2) + return gpd.GeoDataFrame( + {"geometry": [tmp_line], "river": [self.nwm_id], "reach": [self.nwm_id]}, + geometry="geometry", + crs=self.crs, + ) + + @property + @lru_cache + def subset_structures(self): + """Extract structures between u/s and d/s limits.""" + if self.source_structure is None: + return None + + subset_structures = pd.DataFrame( + data=None, columns=self.source_structure.columns + ) # empty copy to put subset into + subset_structures["source_river_station"] = [] + for river_reach in self.subset_xs["river_reach"].unique(): + us_limit = self.subset_xs["source_river_station"].max() # TODO: can a structure be placed d/s of junction? + ds_limit = self.subset_xs["source_river_station"].min() # TODO: can a structure be placed u/s of junction? + tmp_structures = self.source_structure.loc[ + (self.source_structure["river_reach"] == river_reach) + & (self.source_structure["river_station"] >= float(ds_limit)) + & (self.source_structure["river_station"] <= float(us_limit)) + ] + tmp_structures["source_river_station"] = tmp_structures["river_station"] + new_ds_limit = self.subset_xs["river_station"].min() + tmp_structures["river_station"] = (tmp_structures["river_station"] - ds_limit) + new_ds_limit + subset_structures = pd.concat([subset_structures, tmp_structures]) + + if len(subset_structures) == 0: + return None + else: + return gpd.GeoDataFrame(subset_structures) + + def trim_reach(self, reach_xs: gpd.GeoDataFrame) -> gpd.GeoDataFrame: + """Trim a reach-specific XS gdf to the u/s and d/s limits.""" + # Trim + river = reach_xs["river"].iloc[0] + reach = reach_xs["reach"].iloc[0] + if river == self.us_river and reach == self.us_reach: + reach_xs = reach_xs[reach_xs["river_station"] <= float(self.us_rs)] + if river == self.ds_river and reach == self.ds_reach: + reach_xs = reach_xs[reach_xs["river_station"] >= float(self.ds_rs)] + + return reach_xs + + @lru_cache + def junctions_to_dicts(self): + """Make dicts that map trib->outflow and trib->d/s distance for all junctions.""" + juntion_tree_dict = {} + juntion_dist_dict = {} + + if self.source_junction is None: + return (juntion_tree_dict, juntion_dist_dict) + + for r in self.source_junction.iterrows(): + trib_rivers = r[1]["us_rivers"].split(",") + trib_reaches = r[1]["us_reaches"].split(",") + trib_dists = r[1]["junction_lengths"].split(",") + outlet = (r[1]["ds_rivers"], r[1]["ds_reaches"]) + for riv, rch, dist in zip(trib_rivers, trib_reaches, trib_dists): + juntion_tree_dict[(riv, rch)] = outlet + juntion_dist_dict[(riv, rch)] = float(dist) + + return (juntion_tree_dict, juntion_dist_dict) + @property def ripple_xs(self) -> gpd.GeoDataFrame: """Subset cross sections based on NWM reach.""" @@ -197,41 +321,22 @@ def ripple_structure(self) -> gpd.GeoDataFrame: @lru_cache def subset_gdfs(self) -> dict: """Subset the cross sections, structues, and river geometry for a given NWM reach.""" - # subset data - if self.us_river == self.ds_river and self.us_reach == self.ds_reach: - ripple_xs, ripple_structure, ripple_river = self.process_as_one_ras_reach() - else: - ripple_xs, ripple_structure, ripple_river = self.process_as_multiple_ras_reach() - - # check if only 1 cross section for nwm_reach - if len(ripple_xs) <= 1: + # subset geometry data + subset_gdfs = {} + subset_gdfs["XS"] = self.subset_xs + if len(subset_gdfs["XS"]) <= 1: # check if only 1 cross section for nwm_reach logging.warning(f"Only 1 cross section conflated to NWM reach {self.nwm_id}. Skipping this reach.") return None + subset_gdfs["River"] = self.subset_river + if self.subset_structures is not None: + subset_gdfs["Structure"] = self.subset_structures - # update fields - ripple_river = self.rename_river_reach(ripple_river) - ripple_structure = self.rename_river_reach(ripple_structure) - ripple_xs = self.rename_river_reach(ripple_xs) - ripple_xs = self.autoincrement_stations(ripple_xs) + # Update fields + for k in subset_gdfs: + subset_gdfs[k] = self.rename_river_reach(subset_gdfs[k]) + subset_gdfs = self.update_river_station(subset_gdfs) - # clip river to cross sections - ripple_river["geometry"] = clip_ras_centerline( - ripple_river.iloc[0].geometry, fix_reversed_xs(ripple_xs, ripple_river), 2 - ) - - if ripple_structure is not None and len(ripple_structure) > 0: - self._subset_gdf = { - "XS": ripple_xs, - "River": ripple_river, - "Structure": ripple_structure, - } - else: - self._subset_gdf = { - "XS": ripple_xs, - "River": ripple_river, - } - - return self._subset_gdf + return subset_gdfs @property def ripple_gpkg_file(self) -> str: @@ -285,7 +390,10 @@ def crs(self): return self.source_xs.crs def walk_junctions(self) -> list[str]: - """Check if junctions are present for the given river-reaches.""" + """Walk the junctions to find reaches between u/s and d/s reach.""" + if self.source_junction is None: + return + # make a tree dictionary tree_dict = {} for r in self.source_junction.iterrows(): @@ -329,15 +437,38 @@ def round_river_stations(self, ras_data: str) -> str: lines[0] = ",".join(data) return "\n".join(lines) + "\n" - def update_river_station(self, ras_data: str, river_station: str) -> str: - """Update river station data.""" + def update_river_station(self, subset_gdfs: dict[gpd.GeoDataFrame]) -> dict: + """Convert river stations to autoincrementing names.""" + xs = subset_gdfs["XS"] + xs_names = [*range(1, len(xs) + 1)][::-1] + + if "Structure" in subset_gdfs: + structures = subset_gdfs["Structure"] + str_names = [xs_names[xs["river_station"] < i][0] + 0.5 for i in structures["river_station"]] + subset_gdfs["Structure"]["river_station"] = str_names + subset_gdfs["Structure"]["ras_data"] = subset_gdfs["Structure"][["ras_data", "river_station"]].apply( + self.correct_ras_data, axis=1 + ) + + subset_gdfs["XS"]["river_station"] = xs_names + subset_gdfs["XS"]["ras_data"] = subset_gdfs["XS"][["ras_data", "river_station"]].apply( + self.correct_ras_data, axis=1 + ) + + return subset_gdfs + + def correct_ras_data(self, row): + """Make ras_data names consistent with river_station.""" + ras_data = row["ras_data"] + rs = row["river_station"] + lines = ras_data.splitlines() data = lines[0].split(",") if "*" in data[1]: - data[1] = str(float(data[1].rstrip("*")) + river_station) + "*" + data[1] = str(float(data[1].rstrip("*")) + rs) + "*" data[1] = data[1].ljust(8) else: - data[1] = str(float(data[1]) + river_station).ljust(8) + data[1] = str(float(data[1]) + rs).ljust(8) lines[0] = ",".join(data) return "\n".join(lines) + "\n" @@ -455,13 +586,8 @@ def adjust_river_stations(self, xs_us_reach, structures_us_reach) -> tuple: def process_as_multiple_ras_reach(self) -> tuple: """Process as multiple ras-river-reach.""" - if "Junction" in fiona.listlayers(self.src_gpkg_path): - junction_gdf = gpd.read_file(self.src_gpkg_path, layer="Junction") - intermediate_river_reaches = self.walk_junctions() - else: - intermediate_river_reaches = None - # add intermediate river reaches to the upstream reach + intermediate_river_reaches = self.walk_junctions() if intermediate_river_reaches: xs_us_reach = self.add_intermediate_river_reaches(intermediate_river_reaches) else: From 7c929cf78a9287908c7a6803764f898fba1105ae Mon Sep 17 00:00:00 2001 From: sclaw Date: Thu, 12 Dec 2024 13:38:41 -0500 Subject: [PATCH 31/46] refactor and clean up --- ripple1d/ops/subset_gpkg.py | 483 +++++++++--------------------------- 1 file changed, 121 insertions(+), 362 deletions(-) diff --git a/ripple1d/ops/subset_gpkg.py b/ripple1d/ops/subset_gpkg.py index a8505b4c..61b25254 100644 --- a/ripple1d/ops/subset_gpkg.py +++ b/ripple1d/ops/subset_gpkg.py @@ -3,21 +3,17 @@ import json import logging import os -import time import warnings from functools import lru_cache -from re import sub import fiona import geopandas as gpd import pandas as pd from shapely import LineString -from shapely.geometry import MultiLineString from shapely.ops import split import ripple1d -from ripple1d.consts import METERS_PER_FOOT -from ripple1d.data_model import NwmReachModel, RippleSourceDirectory, RippleSourceModel +from ripple1d.data_model import NwmReachModel, RippleSourceDirectory from ripple1d.utils.ripple_utils import ( clip_ras_centerline, fix_reversed_xs, @@ -36,14 +32,6 @@ def __init__(self, src_gpkg_path: str, conflation_json: str, dst_project_dir: st self.conflation_json = conflation_json self.dst_project_dir = dst_project_dir self.nwm_id = nwm_id - self._subset_gdf = None - self._source_junction = None - self._source_river = None - self._source_xs = None - self._source_structure = None - self._conflation_parameters = None - self._source_hulls = None - self._ripple_xs_concave_hull = None def set_nwm_id(self, nwm_id: str): """Set the network ID.""" @@ -52,55 +40,12 @@ def set_nwm_id(self, nwm_id: str): self._ripple_xs_concave_hull = None @property - def ripple_us_xs(self): - """The most upstream cross section for the ripple model.""" - return self.ripple_xs.loc[ - self.ripple_xs["river_station"] == self.ripple_xs["river_station"].max(), "geometry" - ].iloc[0] - - @property - def ripple_ds_xs(self): - """The most downstream cross section for the ripple model.""" - return self.ripple_xs.loc[ - self.ripple_xs["river_station"] == self.ripple_xs["river_station"].min(), "geometry" - ].iloc[0] - - @property - def split_source_hull(self): - """Split the concave hull of the source model using the upstream and downstream cross sections of the submodel.""" - geoms = split(self.source_hulls.geometry.iloc[0], self.ripple_us_xs).geoms - hulls = [] - for geom in geoms: - if geom.intersects(self.ripple_us_xs) and geom.intersects(self.ripple_ds_xs): - candidate_geoms = split(geom, self.ripple_ds_xs).geoms - for candidate_geom in candidate_geoms: - if candidate_geom.intersects(self.ripple_us_xs) and candidate_geom.intersects(self.ripple_ds_xs): - hulls.append(candidate_geom) - if len(hulls) != 1: - raise ValueError( - f"Expected 1 polygon for ripple xs concave hull; got: {len(hulls)} | network id: {self.nwm_id}" - ) - return hulls - - @property - def ripple_xs_concave_hull(self): - """Get the concave hull of the cross sections.""" - if self._ripple_xs_concave_hull is None: - try: - hulls = self.split_source_hull - self._ripple_xs_concave_hull = gpd.GeoDataFrame({"geometry": hulls}, geometry="geometry", crs=self.crs) - except Exception as e: - self._ripple_xs_concave_hull = xs_concave_hull(fix_reversed_xs(self.ripple_xs, self.ripple_river)) - - return self._ripple_xs_concave_hull - - @property + @lru_cache def conflation_parameters(self) -> dict: """Extract conflation parameters from the conflation json.""" - if self._conflation_parameters is None: - with open(self.conflation_json, "r") as f: - self._conflation_parameters = json.load(f) - return self._conflation_parameters + with open(self.conflation_json, "r") as f: + conflation_parameters = json.load(f) + return conflation_parameters @property def ripple1d_parameters(self) -> dict: @@ -138,47 +83,84 @@ def ds_rs(self) -> str: return self.ripple1d_parameters["ds_xs"]["xs_id"] @property + @lru_cache def source_xs(self) -> gpd.GeoDataFrame: """Extract cross sections from the source geopackage.""" - if self._source_xs is None: - xs = gpd.read_file(self.src_gpkg_path, layer="XS") - self._source_xs = xs[xs.intersects(self.source_river.union_all())] - return self._source_xs + xs = gpd.read_file(self.src_gpkg_path, layer="XS") + source_xs = xs[xs.intersects(self.source_river.union_all())] + return source_xs @property + @lru_cache def source_hulls(self) -> gpd.GeoDataFrame: """Extract cross sections from the source geopackage.""" - if self._source_hulls is None: - # TODO we can read the concave hull of the source model from the gpkg once we decide that it is required layer. - # self._source_hulls = gpd.read_file(self.src_gpkg_path, layer="XS_concave_hull") - self._source_hulls = xs_concave_hull( - fix_reversed_xs(self.source_xs, self.source_river), self.source_junction - ) - return self._source_hulls + return gpd.read_file(self.src_gpkg_path, layer="XS_concave_hull") @property + @lru_cache def source_river(self) -> gpd.GeoDataFrame: """Extract river geometry from the source geopackage.""" - if self._source_river is None: - self._source_river = gpd.read_file(self.src_gpkg_path, layer="River") - return self._source_river + return gpd.read_file(self.src_gpkg_path, layer="River") @property + @lru_cache def source_structure(self) -> gpd.GeoDataFrame: """Extract structures from the source geopackage.""" if "Structure" in fiona.listlayers(self.src_gpkg_path): - if self._source_structure is None: - structures = gpd.read_file(self.src_gpkg_path, layer="Structure") - self._source_structure = structures[structures.intersects(self.source_river.union_all())] - return self._source_structure + structures = gpd.read_file(self.src_gpkg_path, layer="Structure") + source_structure = structures[structures.intersects(self.source_river.union_all())] + return source_structure @property + @lru_cache def source_junction(self) -> gpd.GeoDataFrame: """Extract junctions from the source geopackage.""" if "Junction" in fiona.listlayers(self.src_gpkg_path): - if self._source_junction is None: - self._source_junction = gpd.read_file(self.src_gpkg_path, layer="Junction") - return self._source_junction + source_junction = gpd.read_file(self.src_gpkg_path, layer="Junction") + return source_junction + + @property + def ripple_us_xs(self): + """The most upstream cross section for the ripple model.""" + return self.ripple_xs.loc[ + self.ripple_xs["river_station"] == self.ripple_xs["river_station"].max(), "geometry" + ].iloc[0] + + @property + def ripple_ds_xs(self): + """The most downstream cross section for the ripple model.""" + return self.ripple_xs.loc[ + self.ripple_xs["river_station"] == self.ripple_xs["river_station"].min(), "geometry" + ].iloc[0] + + @property + def split_source_hull(self): + """Split the concave hull of the source model using the upstream and downstream cross sections of the submodel.""" + geoms = split(self.source_hulls.geometry.iloc[0], self.ripple_us_xs).geoms + hulls = [] + for geom in geoms: + if geom.intersects(self.ripple_us_xs) and geom.intersects(self.ripple_ds_xs): + candidate_geoms = split(geom, self.ripple_ds_xs).geoms + for candidate_geom in candidate_geoms: + if candidate_geom.intersects(self.ripple_us_xs) and candidate_geom.intersects(self.ripple_ds_xs): + hulls.append(candidate_geom) + if len(hulls) != 1: + raise ValueError( + f"Expected 1 polygon for ripple xs concave hull; got: {len(hulls)} | network id: {self.nwm_id}" + ) + return hulls + + @property + def ripple_xs_concave_hull(self): + """Get the concave hull of the cross sections.""" + if self._ripple_xs_concave_hull is None: + try: + hulls = self.split_source_hull + self._ripple_xs_concave_hull = gpd.GeoDataFrame({"geometry": hulls}, geometry="geometry", crs=self.crs) + except Exception as e: + self._ripple_xs_concave_hull = xs_concave_hull(fix_reversed_xs(self.ripple_xs, self.ripple_river)) + + return self._ripple_xs_concave_hull @property def juntion_tree_dict(self) -> dict: @@ -270,53 +252,6 @@ def subset_structures(self): else: return gpd.GeoDataFrame(subset_structures) - def trim_reach(self, reach_xs: gpd.GeoDataFrame) -> gpd.GeoDataFrame: - """Trim a reach-specific XS gdf to the u/s and d/s limits.""" - # Trim - river = reach_xs["river"].iloc[0] - reach = reach_xs["reach"].iloc[0] - if river == self.us_river and reach == self.us_reach: - reach_xs = reach_xs[reach_xs["river_station"] <= float(self.us_rs)] - if river == self.ds_river and reach == self.ds_reach: - reach_xs = reach_xs[reach_xs["river_station"] >= float(self.ds_rs)] - - return reach_xs - - @lru_cache - def junctions_to_dicts(self): - """Make dicts that map trib->outflow and trib->d/s distance for all junctions.""" - juntion_tree_dict = {} - juntion_dist_dict = {} - - if self.source_junction is None: - return (juntion_tree_dict, juntion_dist_dict) - - for r in self.source_junction.iterrows(): - trib_rivers = r[1]["us_rivers"].split(",") - trib_reaches = r[1]["us_reaches"].split(",") - trib_dists = r[1]["junction_lengths"].split(",") - outlet = (r[1]["ds_rivers"], r[1]["ds_reaches"]) - for riv, rch, dist in zip(trib_rivers, trib_reaches, trib_dists): - juntion_tree_dict[(riv, rch)] = outlet - juntion_dist_dict[(riv, rch)] = float(dist) - - return (juntion_tree_dict, juntion_dist_dict) - - @property - def ripple_xs(self) -> gpd.GeoDataFrame: - """Subset cross sections based on NWM reach.""" - return self.subset_gdfs["XS"] - - @property - def ripple_river(self) -> gpd.GeoDataFrame: - """Subset river geometry based on NWM reach.""" - return self.subset_gdfs["River"] - - @property - def ripple_structure(self) -> gpd.GeoDataFrame: - """Subset structures based on NWM reach.""" - return self.subset_gdfs["Structure"] - @property @lru_cache def subset_gdfs(self) -> dict: @@ -338,6 +273,21 @@ def subset_gdfs(self) -> dict: return subset_gdfs + @property + def ripple_xs(self) -> gpd.GeoDataFrame: + """Subset cross sections based on NWM reach.""" + return self.subset_gdfs["XS"] + + @property + def ripple_river(self) -> gpd.GeoDataFrame: + """Subset river geometry based on NWM reach.""" + return self.subset_gdfs["River"] + + @property + def ripple_structure(self) -> gpd.GeoDataFrame: + """Subset structures based on NWM reach.""" + return self.subset_gdfs["Structure"] + @property def ripple_gpkg_file(self) -> str: """Return the path to the new geopackage.""" @@ -348,24 +298,6 @@ def nwm_reach_model(self) -> NwmReachModel: """Return the new NWM reach model object.""" return NwmReachModel(self.dst_project_dir) - def write_ripple_gpkg(self) -> None: - """Write the subsetted geopackage to the destination project directory.""" - os.makedirs(self.dst_project_dir, exist_ok=True) - - for layer, gdf in self.subset_gdfs.items(): - # remove lateral structures - if layer == "Structure": - if (gdf["type"] == 6).any(): - logging.warning( - f"Lateral structures are not currently supported in ripple1d. The lateral structures will be dropped." - ) - gdf = gdf.loc[gdf["type"] != 6, :] - - if gdf.shape[0] > 0: - gdf.to_file(self.ripple_gpkg_file, layer=layer) - if layer == "XS": - self.ripple_xs_concave_hull.to_file(self.ripple_gpkg_file, driver="GPKG", layer="XS_concave_hull") - @property def min_flow(self) -> float: """Extract the min flow from the cross sections.""" @@ -389,53 +321,55 @@ def crs(self): """Extract the CRS from the cross sections.""" return self.source_xs.crs - def walk_junctions(self) -> list[str]: - """Walk the junctions to find reaches between u/s and d/s reach.""" + def trim_reach(self, reach_xs: gpd.GeoDataFrame) -> gpd.GeoDataFrame: + """Trim a reach-specific XS gdf to the u/s and d/s limits.""" + # Trim + river = reach_xs["river"].iloc[0] + reach = reach_xs["reach"].iloc[0] + if river == self.us_river and reach == self.us_reach: + reach_xs = reach_xs[reach_xs["river_station"] <= float(self.us_rs)] + if river == self.ds_river and reach == self.ds_reach: + reach_xs = reach_xs[reach_xs["river_station"] >= float(self.ds_rs)] + + return reach_xs + + @lru_cache + def junctions_to_dicts(self): + """Make dicts that map trib->outflow and trib->d/s distance for all junctions.""" + juntion_tree_dict = {} + juntion_dist_dict = {} + if self.source_junction is None: - return + return (juntion_tree_dict, juntion_dist_dict) - # make a tree dictionary - tree_dict = {} for r in self.source_junction.iterrows(): trib_rivers = r[1]["us_rivers"].split(",") trib_reaches = r[1]["us_reaches"].split(",") - outlet = f'{r[1]["ds_rivers"]}-{r[1]["ds_reaches"]}' - for riv, rch in zip(trib_rivers, trib_reaches): - tree_dict[f"{riv}-{rch}"] = outlet - - # walk network to id intermediate reaches - intermediate_reaches = [] - cur_reach = f"{self.us_river}-{self.us_reach}" - ds_reach = f"{self.ds_river}-{self.ds_reach}" - while cur_reach != ds_reach: - cur_reach = tree_dict[cur_reach] - intermediate_reaches.append(cur_reach) - intermediate_reaches = intermediate_reaches[:-1] # remove d/s reach - return intermediate_reaches - - def clean_river_stations(self, ras_data: str) -> str: - """Clean up river station data.""" - lines = ras_data.splitlines() - data = lines[0].split(",") - if "*" in data[1]: - data[1] = str(float(data[1].rstrip("*"))) + "*" - data[1] = data[1].ljust(8) - else: - data[1] = str(float(data[1])).ljust(8) - lines[0] = ",".join(data) - return "\n".join(lines) + "\n" + trib_dists = r[1]["junction_lengths"].split(",") + outlet = (r[1]["ds_rivers"], r[1]["ds_reaches"]) + for riv, rch, dist in zip(trib_rivers, trib_reaches, trib_dists): + juntion_tree_dict[(riv, rch)] = outlet + juntion_dist_dict[(riv, rch)] = float(dist) - def round_river_stations(self, ras_data: str) -> str: - """Clean up river station data.""" - lines = ras_data.splitlines() - data = lines[0].split(",") - if "*" in data[1]: - data[1] = str(float(round(float(data[1].rstrip("*"))))) + "*" - data[1] = data[1].ljust(8) - else: - data[1] = str(float(round(float(data[1])))).ljust(8) - lines[0] = ",".join(data) - return "\n".join(lines) + "\n" + return (juntion_tree_dict, juntion_dist_dict) + + def write_ripple_gpkg(self) -> None: + """Write the subsetted geopackage to the destination project directory.""" + os.makedirs(self.dst_project_dir, exist_ok=True) + + for layer, gdf in self.subset_gdfs.items(): + # remove lateral structures + if layer == "Structure": + if (gdf["type"] == 6).any(): + logging.warning( + f"Lateral structures are not currently supported in ripple1d. The lateral structures will be dropped." + ) + gdf = gdf.loc[gdf["type"] != 6, :] + + if gdf.shape[0] > 0: + gdf.to_file(self.ripple_gpkg_file, layer=layer) + if layer == "XS": + self.ripple_xs_concave_hull.to_file(self.ripple_gpkg_file, driver="GPKG", layer="XS_concave_hull") def update_river_station(self, subset_gdfs: dict[gpd.GeoDataFrame]) -> dict: """Convert river stations to autoincrementing names.""" @@ -472,140 +406,6 @@ def correct_ras_data(self, row): lines[0] = ",".join(data) return "\n".join(lines) + "\n" - def junction_length_to_reach_lengths(self): - """Adjust reach lengths using junction.""" - # TODO adjust reach lengths using junction lengths - raise NotImplementedError - # for row in junction_gdf.iterrows(): - # if us_river in row["us_rivers"] and us_reach in row["us_reach"]: - - def process_as_one_ras_reach(self) -> tuple: - """Process as a single ras-river-reach.""" - xs_subset_gdf = self.source_xs.loc[ - (self.source_xs["river"] == self.us_river) - & (self.source_xs["reach"] == self.us_reach) - & (self.source_xs["river_station"] >= float(self.ds_rs)) - & (self.source_xs["river_station"] <= float(self.us_rs)) - ] - if self.source_structure is not None: - structures_subset_gdf = self.source_structure.loc[ - (self.source_structure["river"] == self.us_river) - & (self.source_structure["reach"] == self.us_reach) - & (self.source_structure["river_station"] >= float(self.ds_rs)) - & (self.source_structure["river_station"] <= float(self.us_rs)) - ] - else: - structures_subset_gdf = None - river_subset_gdf = self.source_river.loc[ - (self.source_river["river"] == self.us_river) & (self.source_river["reach"] == self.us_reach) - ] - - return xs_subset_gdf, structures_subset_gdf, river_subset_gdf - - @property - def xs_us_reach(self) -> gpd.GeoDataFrame: - """Extract cross sections for the upstream reach.""" - return self.source_xs.loc[ - (self.source_xs["river"] == self.us_river) - & (self.source_xs["reach"] == self.us_reach) - & (self.source_xs["river_station"] <= float(self.us_rs)) - ] - - @property - def xs_ds_reach(self) -> gpd.GeoDataFrame: - """Extract cross sections for the downstream reach.""" - return self.source_xs.loc[ - (self.source_xs["river"] == self.ds_river) - & (self.source_xs["reach"] == self.ds_reach) - & (self.source_xs["river_station"] >= float(self.ds_rs)) - ] - - @property - def structures_us_reach(self) -> gpd.GeoDataFrame: - """Extract structures for the upstream reach.""" - if self.source_structure is not None: - return self.source_structure.loc[ - (self.source_structure["river"] == self.us_river) - & (self.source_structure["reach"] == self.us_reach) - & (self.source_structure["river_station"] <= float(self.us_rs)) - ] - - @property - def structures_ds_reach(self) -> gpd.GeoDataFrame: - """Extract structures for the downstream reach.""" - if self.source_structure is not None: - return self.source_structure.loc[ - (self.source_structure["river"] == self.ds_river) - & (self.source_structure["reach"] == self.ds_reach) - & (self.source_structure["river_station"] >= float(self.ds_rs)) - ] - - def add_intermediate_river_reaches(self, intermediate_river_reaches) -> gpd.GeoDataFrame: - """Add intermediate river reaches to the xs_us_reach.""" - xs_us_reach = self.xs_us_reach.copy() - for intermediate_river_reach in intermediate_river_reaches: - xs_intermediate_river_reach = self.source_xs.loc[self.source_xs["river_reach"] == intermediate_river_reach] - if xs_us_reach["river_station"].min() <= xs_intermediate_river_reach["river_station"].max(): - logging.info( - f"The lowest river station on the upstream reach ({xs_us_reach['river_station'].min()}) is less" - f" than the highest river station on the intermediate reach" - f"({xs_intermediate_river_reach['river_station'].max()}) for nwm_id: {self.nwm_id}. The river" - f" stationing of the upstream reach will be updated to ensure river stationings increase from" - "downstream to upstream" - ) - xs_us_reach["river_station"] = ( - xs_us_reach["river_station"] + xs_intermediate_river_reach["river_station"].max() - ) - xs_us_reach["ras_data"] = xs_us_reach["ras_data"].apply( - lambda ras_data: self.update_river_station( - ras_data, xs_intermediate_river_reach["river_station"].max() - ) - ) - xs_us_reach = pd.concat([xs_us_reach, xs_intermediate_river_reach]) - return xs_us_reach - - def adjust_river_stations(self, xs_us_reach, structures_us_reach) -> tuple: - """Adjust river stations of the upstream reach if the min river station of the upstream reach is less than the max river station of the downstream reach.""" - if xs_us_reach["river_station"].min() <= self.xs_ds_reach["river_station"].max(): - logging.info( - f"the lowest river station on the upstream reach ({xs_us_reach['river_station'].min()}) is less" - f" than the highest river station on the downstream reach ({self.xs_ds_reach['river_station'].max()}) for nwm_id: {self.nwm_id}" - ) - xs_us_reach["river_station"] = xs_us_reach["river_station"] + self.xs_ds_reach["river_station"].max() - xs_us_reach["ras_data"] = xs_us_reach["ras_data"].apply( - lambda ras_data: self.update_river_station(ras_data, self.xs_ds_reach["river_station"].max()) - ) - if structures_us_reach is not None: - structures_us_reach["river_station"] = ( - structures_us_reach["river_station"] + self.xs_ds_reach["river_station"].max() - ) - structures_us_reach["ras_data"] = structures_us_reach["ras_data"].apply( - lambda ras_data: self.update_river_station(ras_data, self.xs_ds_reach["river_station"].max()) - ) - return xs_us_reach, structures_us_reach - - def process_as_multiple_ras_reach(self) -> tuple: - """Process as multiple ras-river-reach.""" - # add intermediate river reaches to the upstream reach - intermediate_river_reaches = self.walk_junctions() - if intermediate_river_reaches: - xs_us_reach = self.add_intermediate_river_reaches(intermediate_river_reaches) - else: - xs_us_reach = self.xs_us_reach.copy() - - # update river stations - xs_us_reach, structures_us_reach = self.adjust_river_stations(xs_us_reach, self.structures_us_reach) - # combine us and ds gdfs - xs_subset_gdf = pd.concat([xs_us_reach, self.xs_ds_reach]) - river_subset_gdf = self.combine_reach_features(intermediate_river_reaches) - - if self.source_structure is not None: - structures_subset_gdf = pd.concat([structures_us_reach, self.structures_ds_reach]) - else: - structures_subset_gdf = None - - return xs_subset_gdf, structures_subset_gdf, river_subset_gdf - def rename_river_reach(self, gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame: """Rename river, reach, and river_reach columns after the nwm reach id.""" pd.options.mode.copy_on_write = True @@ -615,47 +415,6 @@ def rename_river_reach(self, gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame: return gdf - def combine_reach_features(self, intermediate_river_reaches: list[str]) -> gpd.GeoDataFrame: - """Combine reach coordinates and update river and reach names to be nwm id.""" - us_reach = self.source_river.loc[ - (self.source_river["river"] == self.us_river) & (self.source_river["reach"] == self.us_reach) - ] - ds_reach = self.source_river.loc[ - (self.source_river["river"] == self.ds_river) & (self.source_river["reach"] == self.ds_reach) - ] - - # handle river reach coords - coords = list(us_reach.iloc[0]["geometry"].coords) - if intermediate_river_reaches: - for intermediate_river_reach in intermediate_river_reaches: - intermediate_river_reach_reach = self.source_river.loc[ - self.source_river["river_reach"] == intermediate_river_reach - ] - coords += list(intermediate_river_reach_reach.iloc[0]["geometry"].coords) - coords += list(ds_reach.iloc[0]["geometry"].coords) - - return gpd.GeoDataFrame( - {"geometry": [LineString(coords)], "river": [self.nwm_id], "reach": [self.nwm_id]}, - geometry="geometry", - crs=self.crs, - ) - - def autoincrement_stations(self, gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame: - """Update river_stations and ras data to be autoincrementing instead of distances.""" - gdf["river_station"] = range(1, len(gdf) + 1) # autoincrementing field - for row in gdf.iterrows(): # for each XS - # update station name in RAS data - ras_data = row[1]["ras_data"] - lines = ras_data.splitlines() - data = lines[0].split(",") - if "*" in data[1]: - data[1] = str(row[1]["river_station"] + "*").ljust(8) - else: - data[1] = str(row[1]["river_station"]).ljust(8) - lines[0] = ",".join(data) - gdf.loc[row[0], "ras_data"] = "\n".join(lines) + "\n" - return gdf - def update_ripple1d_parameters(self, rsd: RippleSourceDirectory): """Update ripple1d_parameters with results of subsetting.""" ripple1d_parameters = self.ripple1d_parameters From c3e17d75bd4cd0629ac997565577d60dbd3e977f Mon Sep 17 00:00:00 2001 From: sclaw Date: Thu, 12 Dec 2024 13:42:53 -0500 Subject: [PATCH 32/46] clean up --- ripple1d/ops/subset_gpkg.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/ripple1d/ops/subset_gpkg.py b/ripple1d/ops/subset_gpkg.py index 61b25254..fc98db74 100644 --- a/ripple1d/ops/subset_gpkg.py +++ b/ripple1d/ops/subset_gpkg.py @@ -204,7 +204,7 @@ def subset_xs(self) -> gpd.GeoDataFrame: @property @lru_cache - def subset_river(self): + def subset_river(self) -> gpd.GeoDataFrame: """Trim source centerline to u/s and d/s limits and add all intermediate reaches.""" coords = [] subset_rivers = [] @@ -225,7 +225,7 @@ def subset_river(self): @property @lru_cache - def subset_structures(self): + def subset_structures(self) -> gpd.GeoDataFrame | None: """Extract structures between u/s and d/s limits.""" if self.source_structure is None: return None @@ -334,7 +334,7 @@ def trim_reach(self, reach_xs: gpd.GeoDataFrame) -> gpd.GeoDataFrame: return reach_xs @lru_cache - def junctions_to_dicts(self): + def junctions_to_dicts(self) -> tuple[dict, dict]: """Make dicts that map trib->outflow and trib->d/s distance for all junctions.""" juntion_tree_dict = {} juntion_dist_dict = {} @@ -463,8 +463,6 @@ def extract_submodel(source_model_directory: str, submodel_directory: str, nwm_i FileNotFoundError Raised when no .conflation.json is found in the source model directory """ - # time.sleep(10) - if not os.path.exists(source_model_directory): raise FileNotFoundError( f"cannot find directory for source model {source_model_directory}, please ensure dir exists" @@ -472,7 +470,6 @@ def extract_submodel(source_model_directory: str, submodel_directory: str, nwm_i rsd = RippleSourceDirectory(source_model_directory) logging.info(f"extract_submodel starting for nwm_id {nwm_id}") - # print(f"preparing to extract NWM ID {nwm_id} from {os.path.basename(rsd.ras_project_file)}") if not rsd.file_exists(rsd.ras_gpkg_file): raise FileNotFoundError(f"cannot find file ras-geometry file {rsd.ras_gpkg_file}, please ensure file exists") From be254b73124ae8575427ee61d57e8c67c653c71c Mon Sep 17 00:00:00 2001 From: sclaw Date: Thu, 12 Dec 2024 13:47:42 -0500 Subject: [PATCH 33/46] integrate conflation metrics with refactor. lru_cache broke the old way. --- ripple1d/ops/metrics.py | 3 +-- ripple1d/ops/subset_gpkg.py | 6 ------ 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/ripple1d/ops/metrics.py b/ripple1d/ops/metrics.py index 5474aec5..1f8dc845 100644 --- a/ripple1d/ops/metrics.py +++ b/ripple1d/ops/metrics.py @@ -276,14 +276,13 @@ def compute_conflation_metrics(source_model_directory: str, source_network: dict src_gpkg_path = os.path.join(source_model_directory, f"{model_name}.gpkg") conflation_json = os.path.join(source_model_directory, f"{model_name}.conflation.json") conflation_parameters = json.load(open(conflation_json)) - rgs = RippleGeopackageSubsetter(src_gpkg_path, conflation_json, "") for network_id in conflation_parameters["reaches"].keys(): try: if conflation_parameters["reaches"][network_id]["eclipsed"] == True: continue - rgs.set_nwm_id(network_id) + rgs = RippleGeopackageSubsetter(src_gpkg_path, conflation_json, network_id) layers = {} for layer, gdf in rgs.subset_gdfs.items(): layers[layer] = gdf.to_crs(HYDROFABRIC_CRS) diff --git a/ripple1d/ops/subset_gpkg.py b/ripple1d/ops/subset_gpkg.py index fc98db74..981c49bc 100644 --- a/ripple1d/ops/subset_gpkg.py +++ b/ripple1d/ops/subset_gpkg.py @@ -33,12 +33,6 @@ def __init__(self, src_gpkg_path: str, conflation_json: str, dst_project_dir: st self.dst_project_dir = dst_project_dir self.nwm_id = nwm_id - def set_nwm_id(self, nwm_id: str): - """Set the network ID.""" - self.nwm_id = nwm_id - self._subset_gdf = None - self._ripple_xs_concave_hull = None - @property @lru_cache def conflation_parameters(self) -> dict: From 37ccb4749e31336a24da2a1a8f987fd67de7d7b6 Mon Sep 17 00:00:00 2001 From: sclaw Date: Thu, 12 Dec 2024 15:49:47 -0500 Subject: [PATCH 34/46] fix structure subsetting --- ripple1d/ops/subset_gpkg.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/ripple1d/ops/subset_gpkg.py b/ripple1d/ops/subset_gpkg.py index 981c49bc..e4ec28db 100644 --- a/ripple1d/ops/subset_gpkg.py +++ b/ripple1d/ops/subset_gpkg.py @@ -145,16 +145,15 @@ def split_source_hull(self): return hulls @property + @lru_cache def ripple_xs_concave_hull(self): """Get the concave hull of the cross sections.""" - if self._ripple_xs_concave_hull is None: - try: - hulls = self.split_source_hull - self._ripple_xs_concave_hull = gpd.GeoDataFrame({"geometry": hulls}, geometry="geometry", crs=self.crs) - except Exception as e: - self._ripple_xs_concave_hull = xs_concave_hull(fix_reversed_xs(self.ripple_xs, self.ripple_river)) - - return self._ripple_xs_concave_hull + try: + hulls = self.split_source_hull + ripple_xs_concave_hull = gpd.GeoDataFrame({"geometry": hulls}, geometry="geometry", crs=self.crs) + except Exception as e: + ripple_xs_concave_hull = xs_concave_hull(fix_reversed_xs(self.ripple_xs, self.ripple_river)) + return ripple_xs_concave_hull @property def juntion_tree_dict(self) -> dict: @@ -229,8 +228,9 @@ def subset_structures(self) -> gpd.GeoDataFrame | None: ) # empty copy to put subset into subset_structures["source_river_station"] = [] for river_reach in self.subset_xs["river_reach"].unique(): - us_limit = self.subset_xs["source_river_station"].max() # TODO: can a structure be placed d/s of junction? - ds_limit = self.subset_xs["source_river_station"].min() # TODO: can a structure be placed u/s of junction? + tmp_xs = self.subset_xs[self.subset_xs["river_reach"] == river_reach] + us_limit = tmp_xs["source_river_station"].max() # TODO: can a structure be placed d/s of junction? + ds_limit = tmp_xs["source_river_station"].min() # TODO: can a structure be placed u/s of junction? tmp_structures = self.source_structure.loc[ (self.source_structure["river_reach"] == river_reach) & (self.source_structure["river_station"] >= float(ds_limit)) @@ -372,7 +372,7 @@ def update_river_station(self, subset_gdfs: dict[gpd.GeoDataFrame]) -> dict: if "Structure" in subset_gdfs: structures = subset_gdfs["Structure"] - str_names = [xs_names[xs["river_station"] < i][0] + 0.5 for i in structures["river_station"]] + str_names = [xs_names[(xs["river_station"] > i).argmin()] + 0.5 for i in structures["river_station"]] subset_gdfs["Structure"]["river_station"] = str_names subset_gdfs["Structure"]["ras_data"] = subset_gdfs["Structure"][["ras_data", "river_station"]].apply( self.correct_ras_data, axis=1 From 7e316ba0140058dd218c25c9cbabec2537814f11 Mon Sep 17 00:00:00 2001 From: sclaw Date: Thu, 12 Dec 2024 17:34:28 -0500 Subject: [PATCH 35/46] correct stationing --- ripple1d/ops/subset_gpkg.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ripple1d/ops/subset_gpkg.py b/ripple1d/ops/subset_gpkg.py index e4ec28db..5f0fe19b 100644 --- a/ripple1d/ops/subset_gpkg.py +++ b/ripple1d/ops/subset_gpkg.py @@ -237,8 +237,8 @@ def subset_structures(self) -> gpd.GeoDataFrame | None: & (self.source_structure["river_station"] <= float(us_limit)) ] tmp_structures["source_river_station"] = tmp_structures["river_station"] - new_ds_limit = self.subset_xs["river_station"].min() - tmp_structures["river_station"] = (tmp_structures["river_station"] - ds_limit) + new_ds_limit + offset = tmp_xs["source_river_station"].iloc[0] - tmp_xs["river_station"].iloc[0] + tmp_structures["river_station"] = tmp_structures["river_station"] - offset subset_structures = pd.concat([subset_structures, tmp_structures]) if len(subset_structures) == 0: @@ -393,10 +393,10 @@ def correct_ras_data(self, row): lines = ras_data.splitlines() data = lines[0].split(",") if "*" in data[1]: - data[1] = str(float(data[1].rstrip("*")) + rs) + "*" + data[1] = str(float(rs)) + "*" data[1] = data[1].ljust(8) else: - data[1] = str(float(data[1]) + rs).ljust(8) + data[1] = str(float(rs)).ljust(8) lines[0] = ",".join(data) return "\n".join(lines) + "\n" From 3864fab6d8da47607c567c18d649dc26a2e8c980 Mon Sep 17 00:00:00 2001 From: sclaw Date: Fri, 13 Dec 2024 10:20:13 -0500 Subject: [PATCH 36/46] fix huey consumer path discovery --- ripple1d/api/manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ripple1d/api/manager.py b/ripple1d/api/manager.py index 8a491e80..c3a10ea8 100644 --- a/ripple1d/api/manager.py +++ b/ripple1d/api/manager.py @@ -47,7 +47,7 @@ def start(self): if sys.platform != "win32": raise SystemError("API can only be run from a windows system") - huey_consumer_path = shutil.which("huey_consumer.py") + huey_consumer_path = os.path.join(os.path.dirname(sys.executable), "huey_consumer.py") if not huey_consumer_path: print("Error: huey consumer script was not discoverable.") From 41ffe9b5569cebaf69f529705f66769e31e407a3 Mon Sep 17 00:00:00 2001 From: sclaw Date: Fri, 13 Dec 2024 10:27:50 -0500 Subject: [PATCH 37/46] fix f-string --- ripple1d/utils/sqlite_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ripple1d/utils/sqlite_utils.py b/ripple1d/utils/sqlite_utils.py index 42e0c13d..c674bbfa 100644 --- a/ripple1d/utils/sqlite_utils.py +++ b/ripple1d/utils/sqlite_utils.py @@ -41,7 +41,7 @@ def insert_data( for row in data.itertuples(): if boundary_condition == "kwse": - if f"f_{int(row.us_flow)}-z_{str(row.ds_wse).replace(".","_")}" in missing_grids: + if f'f_{int(row.us_flow)}-z_{str(row.ds_wse).replace(".","_")}' in missing_grids: map_exist = 0 else: map_exist = 1 From 5434a7e5dacd9a1183f1add961b4d0065d1061d5 Mon Sep 17 00:00:00 2001 From: sclaw Date: Thu, 19 Dec 2024 16:02:52 -0500 Subject: [PATCH 38/46] fix merge conflict --- ripple1d/utils/sqlite_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ripple1d/utils/sqlite_utils.py b/ripple1d/utils/sqlite_utils.py index 77a307f8..c674bbfa 100644 --- a/ripple1d/utils/sqlite_utils.py +++ b/ripple1d/utils/sqlite_utils.py @@ -41,7 +41,7 @@ def insert_data( for row in data.itertuples(): if boundary_condition == "kwse": - if f"f_{int(row.us_flow)}-z_{str(row.ds_wse).replace('.','_')}" in missing_grids: + if f'f_{int(row.us_flow)}-z_{str(row.ds_wse).replace(".","_")}' in missing_grids: map_exist = 0 else: map_exist = 1 From 3c5c216bacab809319a2b7e756b9bcf55e759c17 Mon Sep 17 00:00:00 2001 From: sclaw Date: Thu, 19 Dec 2024 16:27:39 -0500 Subject: [PATCH 39/46] resolve merge conflicts --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 85017205..a007c2d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,8 @@ dependencies = [ "rasterio==1.3.10", "requests==2.32.3", "shapely==2.0.5", + "xarray==2024.11.0", + "rioxarray==0.18.1" ] [project.optional-dependencies] From 3e69343dc7b98e23d8f2c50488e400bdb4029453 Mon Sep 17 00:00:00 2001 From: sclaw Date: Fri, 20 Dec 2024 08:06:26 -0500 Subject: [PATCH 40/46] handle no htab models --- ripple1d/data_model.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/ripple1d/data_model.py b/ripple1d/data_model.py index 0910f2c6..edf8e733 100644 --- a/ripple1d/data_model.py +++ b/ripple1d/data_model.py @@ -421,12 +421,19 @@ def thalweg(self): @property def has_htab_error(self): """Check if min htab value is less than section invert.""" - return self.htab_starting_el < self.thalweg + if self.htab_string is None: + return None + else: + return self.htab_starting_el < self.thalweg @property def htab_string(self): """Cross section htab string.""" - return search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True) + try: + htabstr = search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True) + except: + htabstr = None + return htabstr @property def htab_starting_el(self): From 251a6eee424077a35ceb5ce4c8795bb172a19d08 Mon Sep 17 00:00:00 2001 From: sclaw Date: Fri, 20 Dec 2024 10:17:55 -0500 Subject: [PATCH 41/46] handle no htab models better --- ripple1d/data_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ripple1d/data_model.py b/ripple1d/data_model.py index edf8e733..8a45a467 100644 --- a/ripple1d/data_model.py +++ b/ripple1d/data_model.py @@ -422,7 +422,7 @@ def thalweg(self): def has_htab_error(self): """Check if min htab value is less than section invert.""" if self.htab_string is None: - return None + return False else: return self.htab_starting_el < self.thalweg From d4de03da0882fb3e0c0bd0868307b2a3d7bca0d3 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Fri, 20 Dec 2024 09:43:23 -0600 Subject: [PATCH 42/46] detemine units of the crs --- ripple1d/utils/ripple_utils.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/ripple1d/utils/ripple_utils.py b/ripple1d/utils/ripple_utils.py index 983c5365..88c14d6f 100644 --- a/ripple1d/utils/ripple_utils.py +++ b/ripple1d/utils/ripple_utils.py @@ -36,6 +36,20 @@ load_dotenv(find_dotenv()) +def determine_crs_units(crs: CRS): + """Determine the units of the crs.""" + if type(crs) not in [str, int, CRS]: + raise TypeError(f"expected either pyproj.CRS, wkt(st), or epsg code(int); recieved {type(crs)} ") + + unit_name = CRS(crs).axis_info[0].unit_name + if crs.axis_info[0].unit_name not in ["degree", "US survey foot", "foot", "metre"]: + raise ValueError( + f"Expected the crs units to be one of degree, US survey foot, foot, or metre; recieved {unit_name}" + ) + + return unit_name + + def clip_ras_centerline(centerline: LineString, xs: gpd.GeoDataFrame, buffer_distance: float = 0): """Clip RAS centeline to the most upstream and downstream cross sections.""" us_xs, ds_xs = us_ds_xs(xs) From 72ddfb6f6983d6a2e11fcbd5f37a0158a0724c8a Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Fri, 20 Dec 2024 09:44:50 -0600 Subject: [PATCH 43/46] helper function for extracting manning's values --- ripple1d/utils/ripple_utils.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/ripple1d/utils/ripple_utils.py b/ripple1d/utils/ripple_utils.py index 88c14d6f..af8ef15a 100644 --- a/ripple1d/utils/ripple_utils.py +++ b/ripple1d/utils/ripple_utils.py @@ -11,6 +11,7 @@ import geopandas as gpd import pandas as pd from dotenv import find_dotenv, load_dotenv +from pyproj import CRS from shapely import ( LineString, MultiPoint, @@ -350,6 +351,19 @@ def data_pairs_from_text_block(lines: list[str], width: int) -> list[tuple[float return pairs +def data_triplets_from_text_block(lines: list[str], width: int) -> list[tuple[float]]: + """Split lines at given width to get paired data string. Split the string in half and convert to tuple of floats.""" + pairs = [] + for line in lines: + for i in range(0, len(line), width): + x = line[i : int(i + width / 3)] + y = line[int(i + width / 3) : int(i + (width * 2 / 3))] + z = line[int(i + (width * 2 / 3)) : int(i + (width))] + pairs.append((float(x), float(y), float(z))) + + return pairs + + def handle_spaces(line: str, lines: list[str]): """Handle spaces in the line.""" if line in lines: From 9402d83ababcfc5104d5d2b08362eb7302cf5887 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Fri, 20 Dec 2024 09:45:19 -0600 Subject: [PATCH 44/46] pass model units to the cross section --- ripple1d/ras.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/ripple1d/ras.py b/ripple1d/ras.py index bab8bd3b..2796ce41 100644 --- a/ripple1d/ras.py +++ b/ripple1d/ras.py @@ -199,7 +199,7 @@ def get_plans(self): plans = {} for plan_file in self.ras_project.plans: try: - plan = RasPlanText(plan_file, self.crs) + plan = RasPlanText(plan_file, self.crs, units=self.ras_project.units) plans[plan.title] = plan except FileNotFoundError: logging.info(f"Could not find plan file: {plan_file}") @@ -211,7 +211,7 @@ def get_geoms(self): geoms = {} for geom_file in self.ras_project.geoms: try: - geom = RasGeomText(geom_file, self.crs) + geom = RasGeomText(geom_file, self.crs, units=self.ras_project.units) geoms[geom.title] = geom except FileNotFoundError: logging.warning(f"Could not find geom file: {geom_file}") @@ -380,7 +380,7 @@ def write_new_plan_text_file( plan_text_file = self.ras_project._ras_root_path + f".p{new_extension_number}" # create plan - rpt = RasPlanText(plan_text_file, self.crs, new_file=True) + rpt = RasPlanText(plan_text_file, self.crs, new_file=True, units=self.ras_project.units) # populate new plan info rpt.new_plan_contents( @@ -579,7 +579,7 @@ def set_current_plan(self, plan_ext): class RasPlanText(RasTextFile): """Represents a HEC-RAS plan file.""" - def __init__(self, ras_text_file_path: str, crs: str = None, new_file: bool = False): + def __init__(self, ras_text_file_path: str, crs: str = None, new_file: bool = False, units: str = "English"): super().__init__(ras_text_file_path, new_file) if self.file_extension not in VALID_PLANS: raise TypeError(f"Plan extenstion must be one of .p01-.p99, not {self.file_extension}") @@ -654,7 +654,7 @@ def plan_steady_extension(self): @check_crs def geom(self): """Represents the HEC-RAS geometry file associated with this plan.""" - return RasGeomText(self.plan_geom_file, self.crs) + return RasGeomText(self.plan_geom_file, self.crs, units=self.units) @property def flow(self): @@ -767,12 +767,13 @@ def remove_multiple_spaces(x): class RasGeomText(RasTextFile): """Represents a HEC-RAS geometry text file.""" - def __init__(self, ras_text_file_path: str, crs: str = None, new_file=False): + def __init__(self, ras_text_file_path: str, crs: str = None, new_file=False, units: str = "English"): super().__init__(ras_text_file_path, new_file) if not new_file and self.file_extension not in VALID_GEOMS: raise TypeError(f"Geometry extenstion must be one of .g01-.g99, not {self.file_extension}") self.crs = CRS(crs) + self.units = units self.hdf_file = self._ras_text_file_path + ".hdf" def __repr__(self): @@ -885,7 +886,7 @@ def reaches(self) -> dict: river_reaches = search_contents(self.contents, "River Reach", expect_one=False) reaches = {} for river_reach in river_reaches: - reaches[river_reach] = Reach(self.contents, river_reach, self.crs) + reaches[river_reach] = Reach(self.contents, river_reach, self.crs, self.units) return reaches @property From e0d9c6025eb421da9b3844bdca3e87c734307c95 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Fri, 20 Dec 2024 09:46:19 -0600 Subject: [PATCH 45/46] methods/properties for model quality metrics --- ripple1d/data_model.py | 439 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 430 insertions(+), 9 deletions(-) diff --git a/ripple1d/data_model.py b/ripple1d/data_model.py index 60353ef8..6d6a4883 100644 --- a/ripple1d/data_model.py +++ b/ripple1d/data_model.py @@ -6,21 +6,27 @@ import os import sqlite3 from dataclasses import dataclass +from functools import lru_cache from pathlib import Path, PurePosixPath, PureWindowsPath from typing import List import geopandas as gpd +import numpy as np import pandas as pd from pyproj import CRS +from shapely import reverse from shapely.geometry import LineString, Point from ripple1d.utils.ripple_utils import ( data_pairs_from_text_block, + data_triplets_from_text_block, + determine_crs_units, fix_reversed_xs, search_contents, text_block_from_start_end_str, text_block_from_start_str_length, text_block_from_start_str_to_empty_line, + validate_point, xs_concave_hull, ) from ripple1d.utils.s3_utils import init_s3_resources, read_json_from_s3 @@ -364,13 +370,20 @@ class FlowChangeLocation: class XS: """HEC-RAS Cross Section.""" - def __init__(self, ras_data: list, river_reach: str, river: str, reach: str, crs: str): + def __init__( + self, ras_data: list, river_reach: str, river: str, reach: str, crs: str, reach_geom: LineString, units + ): self.ras_data = ras_data self.crs = crs self.river = river self.reach = reach self.river_reach = river_reach self.river_reach_rs = f"{river} {reach} {self.river_station}" + self.thalweg_drop = None + self.reach_geom = reach_geom + self.computed_channel_reach_length = None + self.computed_channel_reach_length_ratio = None + self.units = units def split_xs_header(self, position: int): """ @@ -460,11 +473,336 @@ def bank_stations(self): return search_contents(self.ras_data, "Bank Sta", expect_one=True).split(",") @property + def left_bank_station(self): + """The cross sections left bank station.""" + return float(self.bank_stations[0]) + + @property + def right_bank_station(self): + """The cross sections right bank station.""" + return float(self.bank_stations[1]) + + @property + def station_length(self): + """Length of cross section based on station-elevation data.""" + return float(self.station_elevation_points[-1][0]) + + @property + def cutline_length(self): + """Length of the cross section bassed on the geometry (x-y coordinates).""" + return self.geom.length * self.unit_conversion + + @property + def xs_length_ratio(self): + """Ratio of the station length to the cutline length.""" + return self.station_length / self.cutline_length + + @property + @lru_cache + def geom(self): + """Geometry of the cross section according to its coords.""" + return LineString(self.coords) + + @property + def banks_encompass_channel(self): + """A boolean; True if the channel centerlien intersects the cross section between the bank stations.""" + if ( + self.centerline_intersection_station < self.right_bank_station + and self.centerline_intersection_station > self.left_bank_station + ): + return True + else: + return False + + @property + def centerline_intersection_station(self): + """Station along the cross section where the centerline intersects it.""" + return self.geom.project(self.centerline_intersection_point) * self.unit_conversion + + @property + def centerline_intersection_point(self): + """A point located where the cross section and reach centerline intersect.""" + if self.reach_geom.intersects(self.geom): + return self.reach_geom.intersection(self.geom) + else: + raise IndexError(f"The cross section does not intersect the reach: {self.river_reach_rs}") + + @property + def left_reach_length_ratio(self): + """The ratio of the left reach length to the channel reach length.""" + if self.reach_lengths_populated: + return self.left_reach_length / self.channel_reach_length + + @property + def right_reach_length_ratio(self): + """The ratio of the right reach length to the channel reach length.""" + if self.reach_lengths_populated: + return self.right_reach_length / self.channel_reach_length + + @property + def reach_lengths(self): + """The reach lengths of the cross section.""" + return [self.right_reach_length, self.left_reach_length, self.channel_reach_length] + + @property + def reach_lengths_populated(self): + """A boolean indicating if all the reach lengths are poputed.""" + if np.isnan(self.reach_lengths).any(): + return False + elif len([i for i in self.reach_lengths if i == 0]) > 0: + return False + else: + return True + + @property + def skew(self): + """The skew applied to the cross section.""" + skew = search_contents(self.ras_data, "Skew Angle", expect_one=False) + if len(skew) == 1: + return skew[0] + elif len(skew) > 1: + raise ValueError( + f"Expected only one skew value for the cross section recieved: {len(skew)}. XS: {self.river_reach_rs}" + ) + + @property + def number_of_mannings_points(self): + """The number of mannings points in the cross section.""" + return int(search_contents(self.ras_data, "#Mann", expect_one=True).split(",")[0]) + + @property + def mannings_code(self): + """ + A code indicating what type of manning's values are used. + + 0, -1 correspond to 3 value manning's; horizontally varying manning's values, respectively. + """ + return search_contents(self.ras_data, "#Mann", expect_one=True).split(",")[1] + + @property + def horizontal_varying_mannings(self): + """A boolean indicating if horizontally varied mannings values are applied.""" + if self.mannings_code == -1: + return True + else: + return False + + @property + def expansion_coefficient(self): + """The expansion coefficient for the cross section.""" + return search_contents(self.ras_data, "Exp/Cntr", expect_one=True).split(",")[0] + + @property + def contraction_coefficient(self): + """The expansion coefficient for the cross section.""" + return search_contents(self.ras_data, "Exp/Cntr", expect_one=True).split(",")[1] + + @property + @lru_cache + def mannings(self): + """The manning's values of the cross section.""" + try: + lines = text_block_from_start_str_length( + "#Mann=" + search_contents(self.ras_data, "#Mann", expect_one=True), + math.ceil(self.number_of_mannings_points / 4), + self.ras_data, + ) + return data_triplets_from_text_block(lines, 24) + except ValueError as e: + print(e) + return None + + @property + @lru_cache + def max_n(self): + """The highest manning's n value used in the cross section.""" + return max(list(zip(*self.mannings))[1]) + + @property + @lru_cache + def min_n(self): + """The lowest manning's n value used in the cross section.""" + return min(list(zip(*self.mannings))[1]) + + @property + def has_levees(self): + """A boolean indicating if the cross section contains levees.""" + levees = search_contents(self.ras_data, "Levee", expect_one=False) + if len(levees) > 0: + return True + else: + return False + + @property + def has_ineffectives(self): + """A boolean indicating if the cross section contains ineffective flow areas.""" + ineff = search_contents(self.ras_data, "#XS Ineff", expect_one=False) + if len(ineff) > 0: + return True + else: + return False + + @property + def has_blocks(self): + """A boolean indicating if the cross section contains blocked obstructions.""" + blocks = search_contents(self.ras_data, "#Block Obstruct", expect_one=False) + if len(blocks) > 0: + return True + else: + return False + + @property + def channel_obstruction(self): + """ + A boolean indicating if the channel is being blocked. + + A boolean indicating if ineffective flow area, blocked obstructions, or levees are contained + in the channel (between bank stations). + """ + + @property + @lru_cache + def station_elevation_df(self): + """A pandas DataFrame containing the station-elevation data of the cross section.""" + return pd.DataFrame(self.station_elevation_points, columns=["Station", "Elevation"]) + + @property + def left_max_elevation(self): + """Max Elevation on the left side of the channel.""" + return self.station_elevation_df.loc[ + self.station_elevation_df["Station"] <= self.left_bank_station, "Elevation" + ].max() + + @property + def right_max_elevation(self): + """Max Elevation on the right side of the channel.""" + df = pd.DataFrame(self.station_elevation_points, columns=["Station", "Elevation"]) + return df.loc[df["Station"] >= self.right_bank_station, "Elevation"].max() + + @property + def station_elevation_point_density(self): + """The average spacing of the station-elevation points.""" + return self.cutline_length / self.number_of_station_elevation_points + + @property + def channel_width(self): + """The width of the cross section between bank points.""" + return self.right_bank_station - self.left_bank_station + + @property + def left_bank_elevation(self): + """Elevation of the left bank station.""" + return self.station_elevation_df.loc[ + self.station_elevation_df["Station"] == self.left_bank_station, "Elevation" + ].iloc[0] + + @property + def right_bank_elevation(self): + """Elevation of the right bank station.""" + return self.station_elevation_df.loc[ + self.station_elevation_df["Station"] == self.right_bank_station, "Elevation" + ].iloc[0] + + @property + def channel_depth(self): + """The depth of the channel; i.e., the depth at which the first bank station is overtoppped.""" + return min([self.left_bank_elevation, self.right_bank_elevation]) - self.thalweg + + @property + def htab_min_elevation(self): + """The starting elevation for the cross section's htab.""" + return search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True).split(",")[0] + + @property + def htab_min_increment(self): + """The increment for the cross section's htab.""" + return search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True).split(",")[1] + + @property + def htab_points(self): + """The number of points on the cross section's htab.""" + search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True).split(",")[2] + + def set_thalweg_drop(self, ds_thalweg): + """Set the drop in thalweg elevation between this cross section and the downstream cross section.""" + self.thalweg_drop = self.thalweg - ds_thalweg + + def set_computed_reach_length(self, computed_river_station: float): + """Set the channel reach length computed from the reach/xs/ds_xs geometry.""" + # if self.reach_lengths_populated and computed_river_station + self.computed_channel_reach_length = self.computed_river_station - computed_river_station + + def set_computed_reach_length_ratio(self): + """Set the ratio of the computed channel reach length to the model channel reach length.""" + self.computed_channel_reach_length_ratio = self.computed_channel_reach_length / self.channel_reach_length + + @property + @lru_cache + def computed_river_station(self): + """The computed river stationing according to the reach geometry.""" + return reverse(self.reach_geom).project(self.centerline_intersection_point) * self.unit_conversion + + @property + @lru_cache + def correct_cross_section_direction(self): + """A boolean indicating if the cross section is drawn from right to left looking downstream.""" + offset = self.geom.offset_curve(-1) + if self.reach_geom.intersects(offset): # if the offset line intersects then use this logic + point = self.reach_geom.intersection(offset) + point = validate_point(point) + + offset_rs = self.reach_geom.project(point) + if self.computed_river_station > offset_rs: + return True + else: + return False + else: # if the original offset line did not intersect then try offsetting the other direction and applying + # the opposite stationing logic; the orginal line may have gone beyound the other line. + offset = self.geom.offset_curve(1) + point = self.reach_geom.intersection(offset) + point = validate_point(point) + + offset_rs = self.reach_geom.project(point) + if self.computed_river_station < offset_rs: + return True + else: + return False + + @property + @lru_cache + def unit_conversion(self): + """Conversion factor for units based on the model units and crs units.""" + if self.crs_units in ["US survey foot", "foot"] and self.units == "English": + return 1 + elif self.crs_units == "metre" and self.units == "Metric": + return 1 + elif self.crs_units == "metre" and self.units == "English": + return 1 / 3.281 + elif self.crs_units in ["US survey foot", "foot"] and self.units == "Metric": + return 3.281 + + @property + @lru_cache + def crs_units(self): + """The units of the crs.""" + return determine_crs_units(self.crs) + + def set_bridge_xs(self, br: int): + """ + Set the bridge cross section attribute. + + A value of 0 is added for non-bridge cross sections and 4, 3, 2, 1 are + set for each of the bridge cross sections from downstream to upstream order. + """ + self.bridge_xs = br + + @property + @lru_cache def gdf(self): """Cross section geodataframe.""" return gpd.GeoDataFrame( { - "geometry": [LineString(self.coords)], + "geometry": [self.geom], "river": [self.river], "reach": [self.reach], "river_reach": [self.river_reach], @@ -475,12 +813,48 @@ def gdf(self): "left_reach_length": [self.left_reach_length], "right_reach_length": [self.right_reach_length], "channel_reach_length": [self.channel_reach_length], + "computed_channel_reach_length": [self.computed_channel_reach_length], + "computed_channel_reach_length_ratio": [self.computed_channel_reach_length_ratio], + "left_reach_length_ratio": [self.left_reach_length_ratio], + "right_reach_length_ratio": [self.right_reach_length_ratio], + "reach_lengths_populated": [self.reach_lengths_populated], "ras_data": ["\n".join(self.ras_data)], "station_elevation_points": [self.station_elevation_points], "bank_stations": [self.bank_stations], + "left_bank_station": [self.left_bank_station], + "right_bank_station": [self.right_bank_station], + "left_bank_elevation": [self.left_bank_elevation], + "right_bank_elevation": [self.right_bank_elevation], "number_of_station_elevation_points": [self.number_of_station_elevation_points], "number_of_coords": [self.number_of_coords], - # "coords": [self.coords], + "station_length": [self.station_length], + "cutline_length": [self.cutline_length], + "xs_length_ratio": [self.xs_length_ratio], + "banks_encompass_channel": [self.banks_encompass_channel], + "skew": [self.skew], + "max_n": [self.max_n], + "min_n": [self.min_n], + # "has_lateral_structure": [self.has_lateral_structures], + "has_ineffective": [self.has_ineffectives], + "has_levees": [self.has_levees], + "has_blocks": [self.has_blocks], + "channel_obstruction": [self.channel_obstruction], + "thalweg_drop": [self.thalweg_drop], + "left_max_elevation": [self.left_max_elevation], + "right_max_elevation": [self.right_max_elevation], + "channel_width": [self.channel_width], + "channel_depth": [self.channel_depth], + "station_elevation_point_density": [self.station_elevation_point_density], + "htab_min_elevation": [self.htab_min_elevation], + "htab_min_increment": [self.htab_min_increment], + "htab points": [self.htab_points], + "correct_cross_section_direction": [self.correct_cross_section_direction], + "horizontal_varying_mannings": [self.horizontal_varying_mannings], + "number_of_mannings_points": [self.number_of_mannings_points], + "expansion_coefficient": [self.expansion_coefficient], + "contraction_coefficient": [self.contraction_coefficient], + "centerline_intersection_station": [self.centerline_intersection_station], + "bridge_xs": [self.bridge_xs], }, crs=self.crs, geometry="geometry", @@ -546,6 +920,7 @@ def width(self): return float(self.structure_data(1)) @property + @lru_cache def gdf(self): """Structure geodataframe.""" return gpd.GeoDataFrame( @@ -569,13 +944,14 @@ def gdf(self): class Reach: """HEC-RAS River Reach.""" - def __init__(self, ras_data: list, river_reach: str, crs: str): + def __init__(self, ras_data: list, river_reach: str, crs: str, units: str): reach_lines = text_block_from_start_end_str(f"River Reach={river_reach}", ["River Reach"], ras_data, -1) self.ras_data = reach_lines self.crs = crs self.river_reach = river_reach self.river = river_reach.split(",")[0].rstrip() self.reach = river_reach.split(",")[1].rstrip() + self.units = units us_connection: str = None ds_connection: str = None @@ -626,23 +1002,59 @@ def reach_nodes(self): return search_contents(self.ras_data, "Type RM Length L Ch R ", expect_one=False) @property + @lru_cache def cross_sections(self): """Cross sections.""" - cross_sections = {} + cross_sections, bridge_xs = [], [] for header in self.reach_nodes: type, _, _, _, _ = header.split(",")[:5] + + if int(type) in [2, 3, 4]: + bridge_xs = bridge_xs[::-2] + [4, 3] if int(type) != 1: continue + if len(bridge_xs) == 0: + bridge_xs = [0] + else: + bridge_xs.append(max([0, bridge_xs[-1] - 1])) xs_lines = text_block_from_start_end_str( f"Type RM Length L Ch R ={header}", ["Type RM Length L Ch R", "River Reach"], self.ras_data, ) - cross_section = XS(xs_lines, self.river_reach, self.river, self.reach, self.crs) - cross_sections[cross_section.river_reach_rs] = cross_section + cross_sections.append( + XS(xs_lines, self.river_reach, self.river, self.reach, self.crs, self.geom, self.units) + ) + + cross_sections = self.add_bridge_xs(cross_sections, bridge_xs) + cross_sections = self.compute_multi_xs_variables(cross_sections) return cross_sections + def add_bridge_xs(self, cross_sections, bridge_xs): + """Add bridge cross sections attribute to the cross sections.""" + updated_xs = [] + for xs, br_xs in zip(cross_sections, bridge_xs): + xs.set_bridge_xs(br_xs) + updated_xs.append(xs) + return updated_xs + + def compute_multi_xs_variables(self, cross_sections: list) -> dict: + """Compute variables that depend on multiple cross sections. + + Set the thalweg drop, computed channel reach length and computed channel reach length + ratio between a cross section and the cross section downstream. + """ + ds_thalweg = cross_sections[-1].thalweg + updated_xs = [cross_sections[-1]] + for xs in cross_sections[::-1][1:]: + xs.set_thalweg_drop(ds_thalweg) + xs.set_computed_reach_length(updated_xs[-1].computed_river_station) + xs.set_computed_reach_length_ratio() + updated_xs.append(xs) + ds_thalweg = xs.thalweg + return {xs.river_reach_rs: xs for xs in updated_xs[::-1]} + @property def structures(self): """Structures.""" @@ -655,7 +1067,7 @@ def structures(self): ["Type RM Length L Ch R", "River Reach"], self.ras_data, ) - cross_section = XS(xs_lines, self.river_reach, self.river, self.reach, self.crs) + cross_section = XS(xs_lines, self.river_reach, self.river, self.reach, self.crs, self.geom, self.units) continue elif int(type) in [2, 3, 4, 5, 6]: # culvert or bridge or multiple openeing structure_lines = text_block_from_start_end_str( @@ -674,11 +1086,17 @@ def structures(self): return structures @property + def geom(self): + """Geometry of the reach.""" + return LineString(self.coords) + + @property + @lru_cache def gdf(self): """Reach geodataframe.""" return gpd.GeoDataFrame( { - "geometry": [LineString(self.coords)], + "geometry": [self.geom], "river": [self.river], "reach": [self.reach], "river_reach": [self.river_reach], @@ -691,11 +1109,13 @@ def gdf(self): ) @property + @lru_cache def xs_gdf(self): """Cross section geodataframe.""" return pd.concat([xs.gdf for xs in self.cross_sections.values()]) @property + @lru_cache def structures_gdf(self): """Structures geodataframe.""" return pd.concat([structure.gdf for structure in self.structures.values()]) @@ -778,6 +1198,7 @@ def junction_lengths(self): return ",".join(self.split_lines(search_contents(self.ras_data, "Junc L&A", expect_one=False), ",", 0)) @property + @lru_cache def gdf(self): """Junction geodataframe.""" return gpd.GeoDataFrame( From 5e441f587c488ae2c87bb02aa8b303361c686c78 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Fri, 20 Dec 2024 13:41:14 -0600 Subject: [PATCH 46/46] identify xs with lateral structures and xs that don't intersect the reaches once --- ripple1d/data_model.py | 153 +++++++++++++++++++++++++++++++---------- ripple1d/ras.py | 37 +++++++++- 2 files changed, 153 insertions(+), 37 deletions(-) diff --git a/ripple1d/data_model.py b/ripple1d/data_model.py index 6d6a4883..b2126680 100644 --- a/ripple1d/data_model.py +++ b/ripple1d/data_model.py @@ -384,6 +384,7 @@ def __init__( self.computed_channel_reach_length = None self.computed_channel_reach_length_ratio = None self.units = units + self.has_lateral_structures = False def split_xs_header(self, position: int): """ @@ -506,26 +507,45 @@ def geom(self): @property def banks_encompass_channel(self): """A boolean; True if the channel centerlien intersects the cross section between the bank stations.""" - if ( - self.centerline_intersection_station < self.right_bank_station - and self.centerline_intersection_station > self.left_bank_station - ): - return True - else: - return False + if self.cross_section_intersects_reach: + if ( + self.centerline_intersection_station < self.right_bank_station + and self.centerline_intersection_station > self.left_bank_station + ): + return True + else: + return False @property def centerline_intersection_station(self): """Station along the cross section where the centerline intersects it.""" - return self.geom.project(self.centerline_intersection_point) * self.unit_conversion + if self.cross_section_intersects_reach: + return self.geom.project(self.centerline_intersection_point) * self.unit_conversion + + @property + def intersects_reach_once(self): + """A boolean indicating if the cross section intersects the reach only once.""" + if isinstance(self.centerline_intersection_point, LineString): + return False + elif self.centerline_intersection_point is None: + return False + elif isinstance(self.centerline_intersection_point, Point): + return True + else: + raise TypeError( + f"Unexpected type resulting from intersecting cross section and reach; expected Point or LineString; recieved: {type(self.centerline_intersection_point)}. {self.river_reach_rs}" + ) + + @property + def cross_section_intersects_reach(self): + """Detemine if the cross section intersects the reach, if not return False, otherwise return True.""" + return self.reach_geom.intersects(self.geom) @property def centerline_intersection_point(self): """A point located where the cross section and reach centerline intersect.""" - if self.reach_geom.intersects(self.geom): + if self.cross_section_intersects_reach: return self.reach_geom.intersection(self.geom) - else: - raise IndexError(f"The cross section does not intersect the reach: {self.river_reach_rs}") @property def left_reach_length_ratio(self): @@ -711,17 +731,23 @@ def channel_depth(self): @property def htab_min_elevation(self): """The starting elevation for the cross section's htab.""" - return search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True).split(",")[0] + result = search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=False) + if len(result) == 1: + return result[0].split(",")[0] @property def htab_min_increment(self): """The increment for the cross section's htab.""" - return search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True).split(",")[1] + result = search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=False) + if len(result) == 1: + return result[0].split(",")[1] @property def htab_points(self): """The number of points on the cross section's htab.""" - search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=True).split(",")[2] + result = search_contents(self.ras_data, "XS HTab Starting El and Incr", expect_one=False) + if len(result) == 1: + return result[0].split(",")[2] def set_thalweg_drop(self, ds_thalweg): """Set the drop in thalweg elevation between this cross section and the downstream cross section.""" @@ -746,27 +772,30 @@ def computed_river_station(self): @lru_cache def correct_cross_section_direction(self): """A boolean indicating if the cross section is drawn from right to left looking downstream.""" - offset = self.geom.offset_curve(-1) - if self.reach_geom.intersects(offset): # if the offset line intersects then use this logic - point = self.reach_geom.intersection(offset) - point = validate_point(point) - - offset_rs = self.reach_geom.project(point) - if self.computed_river_station > offset_rs: - return True - else: - return False - else: # if the original offset line did not intersect then try offsetting the other direction and applying - # the opposite stationing logic; the orginal line may have gone beyound the other line. - offset = self.geom.offset_curve(1) - point = self.reach_geom.intersection(offset) - point = validate_point(point) - - offset_rs = self.reach_geom.project(point) - if self.computed_river_station < offset_rs: - return True - else: - return False + if self.cross_section_intersects_reach: + offset = self.geom.offset_curve(-1) + if self.reach_geom.intersects(offset): # if the offset line intersects then use this logic + point = self.reach_geom.intersection(offset) + point = validate_point(point) + + offset_rs = self.reach_geom.project(point) + if self.computed_river_station > offset_rs: + return True + else: + return False + else: # if the original offset line did not intersect then try offsetting the other direction and applying + # the opposite stationing logic; the orginal line may have gone beyound the other line. + offset = self.geom.offset_curve(1) + point = self.reach_geom.intersection(offset) + point = validate_point(point) + + offset_rs = self.reach_geom.project(point) + if self.computed_river_station < offset_rs: + return True + else: + return False + else: + return False @property @lru_cache @@ -834,7 +863,7 @@ def gdf(self): "skew": [self.skew], "max_n": [self.max_n], "min_n": [self.min_n], - # "has_lateral_structure": [self.has_lateral_structures], + "has_lateral_structure": [self.has_lateral_structures], "has_ineffective": [self.has_ineffectives], "has_levees": [self.has_levees], "has_blocks": [self.has_blocks], @@ -855,6 +884,8 @@ def gdf(self): "contraction_coefficient": [self.contraction_coefficient], "centerline_intersection_station": [self.centerline_intersection_station], "bridge_xs": [self.bridge_xs], + "cross_section_intersects_reach": [self.cross_section_intersects_reach], + "intersects_reach_once": [self.intersects_reach_once], }, crs=self.crs, geometry="geometry", @@ -883,11 +914,61 @@ def split_structure_header(self, position: int): return header.split(",")[position] + @property + def number_of_station_elevation_points(self): + """The number of station elevation points.""" + return int(search_contents(self.ras_data, "Lateral Weir SE", expect_one=True)) + + @property + def station_elevation_points(self): + """Station elevation points.""" + try: + lines = text_block_from_start_str_length( + f"Lateral Weir SE= {self.number_of_station_elevation_points} ", + math.ceil(self.number_of_station_elevation_points / 5), + self.ras_data, + ) + return data_pairs_from_text_block(lines, 16) + except ValueError as e: + return None + + @property + def weir_length(self): + """The length weir.""" + if self.type == 6: + return float(list(zip(*self.station_elevation_points))[0][-1]) + + @property + def dowstream_river_station(self): + """The dowstream river station based on the up stream river station and the length of the weir.""" + if self.type == 6: + return self.river_station + self.weir_length + @property def river_station(self): """Structure river station.""" return float(self.split_structure_header(1)) + @property + def tail_water_river(self): + """The tail water reache's river name.""" + return search_contents(self.ras_data, "Lateral Weir End", expect_one=True).split(",")[0] + + @property + def tail_water_reach(self): + """The tail water reache's reach name.""" + return search_contents(self.ras_data, "Lateral Weir End", expect_one=True).split(",")[1] + + @property + def tail_water_river_us_station(self): + """The tail water reache's river stationing.""" + return float(search_contents(self.ras_data, "Lateral Weir End", expect_one=True).split(",")[2]) + + @property + def tail_water_river_ds_station(self): + """The tail water reache's river stationing.""" + return self.tail_water_river_us_station + self.weir_length + @property def type(self): """Structure type.""" diff --git a/ripple1d/ras.py b/ripple1d/ras.py index 2796ce41..55194950 100644 --- a/ripple1d/ras.py +++ b/ripple1d/ras.py @@ -8,6 +8,7 @@ import subprocess import time import warnings +from functools import lru_cache from pathlib import Path from typing import List @@ -929,13 +930,44 @@ def structures(self) -> dict: return structures + def determine_lateral_structure_xs(self, xs_gdf): + """ + Determine if the cross sections are connected to lateral structure. + + Determine if the cross sections are connected to lateral structures, + if they are update 'has_lateral_structures' to True. + """ + for structure in self.structures.values(): + if int(structure.type) == 6: + try: + xs_gdf.loc[ + (xs_gdf["river"] == structure.river) + & (xs_gdf["reach"] == structure.reach) + & (xs_gdf["river_station"] > structure.dowstream_river_station) + & (xs_gdf["river_station"] < structure.river_station), + "has_lateral_structures", + ] = True + + xs_gdf.loc[ + (xs_gdf["river"] == structure.tail_water_river) + & (xs_gdf["reach"] == structure.tail_water_reach) + & (xs_gdf["river_station"] > structure.tail_water_river_us_station) + & (xs_gdf["river_station"] < structure.tail_water_river_ds_station), + "has_lateral_structures", + ] = True + except IndexError as e: + pass + return xs_gdf + @property + @lru_cache @check_crs def reach_gdf(self): """A GeodataFrame of the reaches contained in the HEC-RAS geometry file.""" return pd.concat([reach.gdf for reach in self.reaches.values()], ignore_index=True) @property + @lru_cache @check_crs def junction_gdf(self): """A GeodataFrame of the junctions contained in the HEC-RAS geometry file.""" @@ -946,12 +978,15 @@ def junction_gdf(self): ) @property + @lru_cache @check_crs def xs_gdf(self): """Geodataframe of all cross sections in the geometry text file.""" - return pd.concat([xs.gdf for xs in self.cross_sections.values()], ignore_index=True) + gdf = pd.concat([xs.gdf for xs in self.cross_sections.values()], ignore_index=True) + return self.determine_lateral_structure_xs(gdf) @property + @lru_cache @check_crs def structures_gdf(self): """Geodataframe of all structures in the geometry text file."""