From 632e5d660994f66f90300310de2b39ef135191d7 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Thu, 30 May 2024 17:37:50 +0200 Subject: [PATCH 001/219] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20upgrading=20fastst?= =?UTF-8?q?ream=20repo=20wide=20(#5898)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- packages/aws-library/requirements/_base.txt | 2 +- .../service-library/requirements/_base.txt | 2 +- .../service-library/requirements/_test.txt | 1 + packages/simcore-sdk/requirements/_base.txt | 2 +- services/agent/requirements/_base.txt | 6 +++++- services/api-server/requirements/_base.txt | 18 ++++++------------ services/autoscaling/requirements/_base.txt | 2 +- services/catalog/requirements/_base.txt | 7 +++++-- .../clusters-keeper/requirements/_base.txt | 2 +- services/dask-sidecar/requirements/_base.txt | 2 +- .../datcore-adapter/requirements/_base.txt | 18 ++++++------------ services/director-v2/requirements/_base.txt | 2 +- .../dynamic-scheduler/requirements/_base.txt | 18 ++++++------------ .../dynamic-sidecar/requirements/_base.txt | 4 ++-- services/invitations/requirements/_base.txt | 6 ++---- services/payments/requirements/_base.txt | 6 ++---- .../requirements/_base.txt | 6 ++---- services/storage/requirements/_base.txt | 18 ++++++------------ services/web/server/requirements/_base.txt | 17 +++++++++-------- services/web/server/requirements/_test.txt | 2 +- services/web/server/requirements/_tools.txt | 2 +- tests/swarm-deploy/requirements/_test.txt | 2 +- 22 files changed, 62 insertions(+), 83 deletions(-) diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index 42293ad4b0f..519620748c1 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -65,7 +65,7 @@ exceptiongroup==1.2.1 # via anyio fast-depends==2.4.2 # via faststream -faststream==0.5.4 +faststream==0.5.10 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 # via diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 6a67b9e171b..719b18489e6 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -43,7 +43,7 @@ exceptiongroup==1.2.1 # via anyio fast-depends==2.4.2 # via faststream -faststream==0.5.4 +faststream==0.5.10 # via -r requirements/_base.in frozenlist==1.4.1 # via diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index 94ad236bb6b..d3f328b3b43 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -134,6 +134,7 @@ pathable==0.4.3 pluggy==1.5.0 # via pytest psutil==5.9.8 + # via -r requirements/_test.in py-cpuinfo==9.0.0 # via pytest-benchmark pytest==8.2.0 diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index efd06adf883..50018478759 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -61,7 +61,7 @@ exceptiongroup==1.2.1 # via anyio fast-depends==2.4.2 # via faststream -faststream==0.5.4 +faststream==0.5.10 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 # via diff --git a/services/agent/requirements/_base.txt b/services/agent/requirements/_base.txt index 39b41b8bddf..0bd92dd993c 100644 --- a/services/agent/requirements/_base.txt +++ b/services/agent/requirements/_base.txt @@ -141,8 +141,11 @@ rich==13.4.2 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer setuptools==69.2.0 # via jsonschema +shellingham==1.5.4 + # via typer six==1.16.0 # via # jsonschema @@ -160,7 +163,7 @@ starlette==0.27.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi -typer==0.6.1 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -168,6 +171,7 @@ typing-extensions==4.4.0 # via # aiodocker # pydantic + # typer uvicorn==0.19.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index ce00939855a..6588ceaaa78 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -103,7 +103,7 @@ cffi==1.16.0 # via cryptography click==8.1.7 # via - # typer-slim + # typer # uvicorn cryptography==42.0.5 # via @@ -156,7 +156,7 @@ fastapi-pagination==0.12.17 # via # -c requirements/./constraints.txt # -r requirements/_base.in -faststream==0.4.7 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -400,11 +400,11 @@ rich==13.7.1 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in - # typer-slim + # typer setuptools==69.2.0 # via jsonschema shellingham==1.5.4 - # via typer-slim + # via typer six==1.16.0 # via # jsonschema @@ -468,7 +468,7 @@ tqdm==4.66.2 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.12.0 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -477,12 +477,6 @@ typer==0.12.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # faststream -typer-cli==0.12.0 - # via typer -typer-slim==0.12.0 - # via - # typer - # typer-cli types-python-dateutil==2.9.0.20240316 # via arrow typing-extensions==4.10.0 @@ -496,7 +490,7 @@ typing-extensions==4.10.0 # faststream # pint # pydantic - # typer-slim + # typer # uvicorn ujson==5.9.0 # via diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt index e15fb386b3f..4ac010d1e2a 100644 --- a/services/autoscaling/requirements/_base.txt +++ b/services/autoscaling/requirements/_base.txt @@ -145,7 +145,7 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.5.7 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt index 76640216c02..38aee2dbd37 100644 --- a/services/catalog/requirements/_base.txt +++ b/services/catalog/requirements/_base.txt @@ -97,7 +97,7 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.5.2 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -272,10 +272,13 @@ rich==13.7.1 # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer rpds-py==0.18.0 # via # jsonschema # referencing +shellingham==1.5.4 + # via typer six==1.16.0 # via python-dateutil sniffio==1.3.1 @@ -316,7 +319,7 @@ tqdm==4.66.2 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.10.0 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt index 82335b7b877..ec17fdb3216 100644 --- a/services/clusters-keeper/requirements/_base.txt +++ b/services/clusters-keeper/requirements/_base.txt @@ -142,7 +142,7 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.5.7 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index 36b72e7be33..25bb9353644 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -92,7 +92,7 @@ exceptiongroup==1.2.1 # via anyio fast-depends==2.4.2 # via faststream -faststream==0.5.7 +faststream==0.5.10 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 # via diff --git a/services/datcore-adapter/requirements/_base.txt b/services/datcore-adapter/requirements/_base.txt index 5f69f38d55d..3a2d6651c9c 100644 --- a/services/datcore-adapter/requirements/_base.txt +++ b/services/datcore-adapter/requirements/_base.txt @@ -71,7 +71,7 @@ certifi==2024.2.2 # httpx click==8.1.7 # via - # typer-slim + # typer # uvicorn dnspython==2.6.1 # via email-validator @@ -95,7 +95,7 @@ fastapi==0.99.1 # prometheus-fastapi-instrumentator fastapi-pagination==0.12.21 # via -r requirements/_base.in -faststream==0.4.7 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -236,7 +236,7 @@ rich==13.7.1 # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in - # typer-slim + # typer rpds-py==0.18.0 # via # jsonschema @@ -244,7 +244,7 @@ rpds-py==0.18.0 s3transfer==0.10.1 # via boto3 shellingham==1.5.4 - # via typer-slim + # via typer six==1.16.0 # via python-dateutil sniffio==1.3.1 @@ -272,18 +272,12 @@ tqdm==4.66.2 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.0 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # faststream -typer-cli==0.12.0 - # via typer -typer-slim==0.12.0 - # via - # typer - # typer-cli types-python-dateutil==2.9.0.20240316 # via arrow typing-extensions==4.10.0 @@ -295,7 +289,7 @@ typing-extensions==4.10.0 # fastapi-pagination # faststream # pydantic - # typer-slim + # typer # uvicorn urllib3==2.2.1 # via diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index 630b0f9ae2c..fbfb162394f 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -173,7 +173,7 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.5.7 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt index f0ded2ee17a..9165ad1f0f6 100644 --- a/services/dynamic-scheduler/requirements/_base.txt +++ b/services/dynamic-scheduler/requirements/_base.txt @@ -69,7 +69,7 @@ certifi==2024.2.2 # httpx click==8.1.7 # via - # typer-slim + # typer # uvicorn dnspython==2.6.1 # via email-validator @@ -91,7 +91,7 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.4.7 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -240,13 +240,13 @@ rich==13.7.1 # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in - # typer-slim + # typer rpds-py==0.18.0 # via # jsonschema # referencing shellingham==1.5.4 - # via typer-slim + # via typer six==1.16.0 # via python-dateutil sniffio==1.3.1 @@ -286,19 +286,13 @@ tqdm==4.66.2 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.0 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # faststream -typer-cli==0.12.0 - # via typer -typer-slim==0.12.0 - # via - # typer - # typer-cli types-python-dateutil==2.9.0.20240316 # via arrow typing-extensions==4.10.0 @@ -310,7 +304,7 @@ typing-extensions==4.10.0 # fastapi # faststream # pydantic - # typer-slim + # typer # uvicorn uvicorn==0.29.0 # via diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index 761cfa24fb5..ae231dd3f35 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -135,7 +135,7 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.5.2 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -423,7 +423,7 @@ tqdm==4.66.2 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.12.1 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in diff --git a/services/invitations/requirements/_base.txt b/services/invitations/requirements/_base.txt index 2c50eb1246f..2659a2c327b 100644 --- a/services/invitations/requirements/_base.txt +++ b/services/invitations/requirements/_base.txt @@ -66,8 +66,6 @@ click==8.1.7 # via # typer # uvicorn -colorama==0.4.6 - # via typer cryptography==42.0.5 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -96,7 +94,7 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.5.2 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -258,7 +256,7 @@ tqdm==4.66.2 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.10.0 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in diff --git a/services/payments/requirements/_base.txt b/services/payments/requirements/_base.txt index 1b117b803b2..7a88fa94700 100644 --- a/services/payments/requirements/_base.txt +++ b/services/payments/requirements/_base.txt @@ -77,8 +77,6 @@ click==8.1.7 # via # typer # uvicorn -colorama==0.4.6 - # via typer cryptography==42.0.5 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -111,7 +109,7 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.5.2 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -347,7 +345,7 @@ tqdm==4.66.2 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.10.0 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt index 24f44c39845..9165d4a0d7f 100644 --- a/services/resource-usage-tracker/requirements/_base.txt +++ b/services/resource-usage-tracker/requirements/_base.txt @@ -113,8 +113,6 @@ click==8.1.7 # via # typer # uvicorn -colorama==0.4.6 - # via typer contourpy==1.2.0 # via matplotlib cycler==0.12.1 @@ -147,7 +145,7 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.5.2 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -459,7 +457,7 @@ tqdm==4.66.2 # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.10.0 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index 7a7857c9878..cc8627570ff 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -120,7 +120,7 @@ certifi==2024.2.2 charset-normalizer==3.3.2 # via requests click==8.1.7 - # via typer-slim + # via typer dnspython==2.6.1 # via email-validator email-validator==2.1.1 @@ -129,7 +129,7 @@ exceptiongroup==1.2.1 # via anyio fast-depends==2.4.2 # via faststream -faststream==0.4.7 +faststream==0.5.10 # via # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -360,7 +360,7 @@ rich==13.7.1 # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in - # typer-slim + # typer rpds-py==0.18.0 # via # jsonschema @@ -370,7 +370,7 @@ s3transfer==0.10.1 sh==2.0.6 # via -r requirements/../../../packages/aws-library/requirements/_base.in shellingham==1.5.4 - # via typer-slim + # via typer six==1.16.0 # via # isodate @@ -412,7 +412,7 @@ tqdm==4.66.2 # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.0 +typer==0.12.3 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -421,12 +421,6 @@ typer==0.12.0 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # faststream -typer-cli==0.12.0 - # via typer -typer-slim==0.12.0 - # via - # typer - # typer-cli types-aiobotocore==2.12.1 # via # -r requirements/../../../packages/aws-library/requirements/_base.in @@ -447,7 +441,7 @@ typing-extensions==4.10.0 # anyio # faststream # pydantic - # typer-slim + # typer # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-s3 diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt index 3f3ada05f2c..08c5df91515 100644 --- a/services/web/server/requirements/_base.txt +++ b/services/web/server/requirements/_base.txt @@ -79,7 +79,7 @@ alembic==1.8.1 anyio==4.3.0 # via # fast-depends - # watchfiles + # faststream arrow==1.2.3 # via # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in @@ -166,7 +166,7 @@ faker==19.6.1 # via -r requirements/_base.in fast-depends==2.4.2 # via faststream -faststream==0.2.12 +faststream==0.5.10 # via # -c requirements/../../../../packages/service-library/requirements/./_base.in # -r requirements/../../../../packages/service-library/requirements/_base.in @@ -415,6 +415,7 @@ rich==13.4.2 # -r requirements/../../../../packages/settings-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in + # typer setproctitle==1.2.3 # via gunicorn setuptools==69.1.1 @@ -422,6 +423,8 @@ setuptools==69.1.1 # gunicorn # jsonschema # openapi-spec-validator +shellingham==1.5.4 + # via typer six==1.16.0 # via # isodate @@ -472,7 +475,7 @@ tqdm==4.64.0 # -r requirements/../../../../packages/simcore-sdk/requirements/_base.in twilio==7.12.0 # via -r requirements/_base.in -typer==0.4.1 +typer==0.12.3 # via # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -480,12 +483,14 @@ typer==0.4.1 # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # faststream -typing-extensions==4.3.0 +typing-extensions==4.12.0 # via # aiodebug # aiodocker # anyio + # faststream # pydantic + # typer ujson==5.5.0 # via # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -520,10 +525,6 @@ urllib3==1.26.11 # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt # requests -uvloop==0.19.0 - # via faststream -watchfiles==0.21.0 - # via faststream werkzeug==2.1.2 # via -r requirements/../../../../packages/service-library/requirements/_aiohttp.in yarl==1.5.1 diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 06f0c1e8171..552a44cf189 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -217,7 +217,7 @@ tomli==2.0.1 # coverage # mypy # pytest -typing-extensions==4.3.0 +typing-extensions==4.12.0 # via # -c requirements/_base.txt # mypy diff --git a/services/web/server/requirements/_tools.txt b/services/web/server/requirements/_tools.txt index 988c32dbc5c..8da5f244bf0 100644 --- a/services/web/server/requirements/_tools.txt +++ b/services/web/server/requirements/_tools.txt @@ -88,7 +88,7 @@ tomlkit==0.12.4 # via pylint types-cachetools==5.3.0.7 # via -r requirements/_tools.in -typing-extensions==4.3.0 +typing-extensions==4.12.0 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 61b8a0933f9..3cb5ba49426 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -112,7 +112,7 @@ exceptiongroup==1.2.1 # pytest fast-depends==2.4.2 # via faststream -faststream==0.5.4 +faststream==0.5.10 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in From d5a90d7d3a8341eb4320b6c0a8039ceae5e73644 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Thu, 30 May 2024 19:15:01 +0200 Subject: [PATCH 002/219] =?UTF-8?q?=F0=9F=8E=A8=20Frontend:=20Redesign=20F?= =?UTF-8?q?ile=20Picker=20in=20App=20Mode=20=20(#5893)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/desktop/PanelView.js | 3 +- .../osparc/desktop/RadioCollapsibleViews.js | 61 ++++++++++ .../source/class/osparc/file/FileDrop.js | 46 ++++--- .../source/class/osparc/file/FilePicker.js | 114 +++++++++++------- .../source/class/osparc/ui/form/FileInput.js | 6 +- .../class/osparc/widget/CollapsibleView.js | 4 + 6 files changed, 167 insertions(+), 67 deletions(-) create mode 100644 services/static-webserver/client/source/class/osparc/desktop/RadioCollapsibleViews.js diff --git a/services/static-webserver/client/source/class/osparc/desktop/PanelView.js b/services/static-webserver/client/source/class/osparc/desktop/PanelView.js index 144585a2b2b..cb492aa97cc 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/PanelView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/PanelView.js @@ -14,7 +14,6 @@ * Ignacio Pascual (ignapas) ************************************************************************ */ -/* eslint-disable no-use-before-define */ /** * Display widget with a title bar and collapsible content. @@ -43,7 +42,7 @@ qx.Class.define("osparc.desktop.PanelView", { _applyContent: function(content, oldContent) { this.base(arguments, content, oldContent); - this._innerContainer.set({ + this.getInnerContainer().set({ appearance: "panelview-content" }); } diff --git a/services/static-webserver/client/source/class/osparc/desktop/RadioCollapsibleViews.js b/services/static-webserver/client/source/class/osparc/desktop/RadioCollapsibleViews.js new file mode 100644 index 00000000000..3c0416e129f --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/desktop/RadioCollapsibleViews.js @@ -0,0 +1,61 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2018 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (ignapas) + +************************************************************************ */ + +qx.Class.define("osparc.desktop.RadioCollapsibleViews", { + extend: qx.core.Object, + + /** + * @param {Array} collapsibleViews array of osparc.widget.CollapsibleView + */ + construct: function(collapsibleViews = []) { + this.base(arguments); + + this.__collapsibleViews = []; + + collapsibleViews.forEach(cv => this.addCollapsibleView(cv)); + }, + + members: { + __collapsibleViews: null, + + /** + * @param {osparc.widget.CollapsibleView | osparc.desktop.PanelView} collapsibleView + */ + addCollapsibleView: function(collapsibleView) { + this.__collapsibleViews.push(collapsibleView); + + collapsibleView.addListener("changeCollapsed", e => { + const collapsed = e.getData(); + if (collapsed === false) { + // close the other views + const idx = this.__collapsibleViews.indexOf(collapsibleView); + this.__collapsibleViews.forEach((cv, idx2) => { + if (idx !== idx2) { + cv.setCollapsed(true); + } + }) + } + }, this); + }, + + openCollapsibleView: function(idx = 0) { + if (idx < this.__collapsibleViews.length) { + this.__collapsibleViews[idx].setCollapsed(false); + } + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/file/FileDrop.js b/services/static-webserver/client/source/class/osparc/file/FileDrop.js index 78da05d21e6..6f7adb2bd82 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileDrop.js +++ b/services/static-webserver/client/source/class/osparc/file/FileDrop.js @@ -51,28 +51,16 @@ qx.Class.define("osparc.file.FileDrop", { } msg += ""; - const dropHere = this.__dropHere = new qx.ui.basic.Label(msg).set({ + const dropHereMessage = this.__dropHereMessage = new qx.ui.basic.Label(msg).set({ font: "text-14", rich: true, alignX: "center", alignY: "middle" }); - this._add(dropHere, { - top: 40, - left: 40 - }); + this._add(dropHereMessage); - const centerDropHere = () => { - // center it - const dropHereBounds = dropHere.getBounds() || dropHere.getSizeHint(); - const fileDropBounds = this.getBounds() || this.getSizeHint(); - dropHere.setLayoutProperties({ - top: parseInt((fileDropBounds.height - dropHereBounds.height) / 2), - left: parseInt((fileDropBounds.width - dropHereBounds.width) / 2) - }); - }; - dropHere.addListener("appear", centerDropHere); - this.addListener("resize", centerDropHere); + dropHereMessage.addListener("appear", () => this.__centerDropHereMessage(), this); + this.addListener("resize", () => this.__centerDropHereMessage(), this); const svgLayer = this.__svgLayer = new osparc.workbench.SvgWidget(); this._add(svgLayer, { @@ -123,7 +111,7 @@ qx.Class.define("osparc.file.FileDrop", { members: { __svgLayer: null, - __dropHere: null, + __dropHereMessage: null, __dropMe: null, __isDraggingFile: null, __isDraggingLink: null, @@ -134,7 +122,25 @@ qx.Class.define("osparc.file.FileDrop", { }, __applyShowDropHere: function(value) { - this.__dropHere.setVisibility(value ? "visible" : "excluded"); + this.__dropHereMessage.setVisibility(value ? "visible" : "excluded"); + }, + + __centerDropHereMessage: function() { + const dropHere = this.__dropHereMessage; + // center it + const dropHereBounds = dropHere.getBounds() || dropHere.getSizeHint(); + const fileDropBounds = this.getBounds() || this.getSizeHint(); + dropHere.setLayoutProperties({ + top: parseInt((fileDropBounds.height - dropHereBounds.height) / 2), + left: parseInt((fileDropBounds.width - dropHereBounds.width) / 2) + }); + }, + + setDropHereMessage: function(msg) { + this.__dropHereMessage.set({ + value: msg + }); + this.__centerDropHereMessage(); }, resetDropAction: function() { @@ -212,11 +218,11 @@ qx.Class.define("osparc.file.FileDrop", { __updateWidgets: function(dragging, posX, posY) { if (dragging) { - this.__dropHere.exclude(); + this.__dropHereMessage.exclude(); this.__updateDropMe(posX, posY); } else { if (this.getShowDropHere()) { - this.__dropHere.show(); + this.__dropHereMessage.show(); } this.__hideDropMe(); } diff --git a/services/static-webserver/client/source/class/osparc/file/FilePicker.js b/services/static-webserver/client/source/class/osparc/file/FilePicker.js index 8267ae6e1c7..573d599f6ae 100644 --- a/services/static-webserver/client/source/class/osparc/file/FilePicker.js +++ b/services/static-webserver/client/source/class/osparc/file/FilePicker.js @@ -284,26 +284,17 @@ qx.Class.define("osparc.file.FilePicker", { __buildLayout: function() { this._removeAll(); - const isWorkbenchContext = this.getPageContext() === "workbench"; const hasOutput = osparc.file.FilePicker.hasOutputAssigned(this.getNode().getOutputs()); - if (isWorkbenchContext) { - if (hasOutput) { - // WORKBENCH mode WITH output - this.__buildInfoLayout(); - } else { - // WORKBENCH mode WITHOUT output - this.__addProgressBar(); - this.__buildNoFileWBLayout(); - } + if (hasOutput) { + this.__buildInfoLayout(); } else { - this.setMargin(10); - if (hasOutput) { - // APP mode WITH output - this.__buildInfoLayout(); + this.__addProgressBar(); + const isWorkbenchContext = this.getPageContext() === "workbench"; + if (isWorkbenchContext) { + this.__buildWorkbenchLayout(); } else { - // APP mode WITHOUT output - this.__addProgressBar(); - this.__buildNoFileAppLayout(); + this.setMargin(10); + this.__buildAppModeLayout(); } } }, @@ -312,6 +303,7 @@ qx.Class.define("osparc.file.FilePicker", { const progressLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ alignY: "middle" })); + progressLayout.alwaysEnabled = true; const progressBar = new qx.ui.indicator.ProgressBar(); const nodeStatus = this.getNode().getStatus(); @@ -341,7 +333,7 @@ qx.Class.define("osparc.file.FilePicker", { const uploading = (validProgress > 0 && validProgress < 100); progressLayout.setVisibility(uploading ? "visible" : "excluded"); this._getChildren().forEach(child => { - if (child !== progressLayout) { + if (!child.alwaysEnabled) { child.setEnabled(!uploading); } }); @@ -390,19 +382,6 @@ qx.Class.define("osparc.file.FilePicker", { return resetFileBtn; }, - __buildNoFileWBLayout: function() { - const uploadFileSection = this.__getUploadFileSection(); - this._add(uploadFileSection); - - const fileDrop = this.__getFileDropSection(); - this._add(fileDrop, { - flex: 1 - }); - - const downloadLinkSection = this.__getDownloadLinkSection(); - this._add(downloadLinkSection); - }, - __getUploadFileSection: function() { const uploadFileSection = new osparc.ui.form.FileInput(); uploadFileSection.addListener("selectionChanged", () => { @@ -453,9 +432,7 @@ qx.Class.define("osparc.file.FilePicker", { }, __getDownloadLinkSection: function() { - const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5).set({ - alignY: "middle" - })); + const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); layout.add(new qx.ui.basic.Label(this.tr("Provide Link"))); @@ -473,12 +450,25 @@ qx.Class.define("osparc.file.FilePicker", { return layout; }, - __buildNoFileAppLayout: function() { - let msg = this.tr("In order to Select a file you have three options:"); + __buildWorkbenchLayout: function() { + const uploadFileSection = this.__getUploadFileSection(); + this._add(uploadFileSection); + + const fileDrop = this.__getFileDropSection(); + this._add(fileDrop, { + flex: 1 + }); + + const downloadLinkSection = this.__getDownloadLinkSection(); + this._add(downloadLinkSection); + }, + + __buildAppModeLayout: function() { + let msg = this.tr("In order to Select a File you have three options:"); const options = [ - this.tr("- Upload a file"), - this.tr("- Select a file from tree"), - this.tr("- Provide Link") + this.tr("- Upload a New File"), + this.tr("- Provide a File Link"), + this.tr("- Select a File from other ") + osparc.product.Utils.getStudyAlias() ]; for (let i=0; i { + cv.getInnerContainer().set({ + margin: contentMargin + }); + this._add(cv, { + flex: 1 + }); + radioCollapsibleViews.addCollapsibleView(cv); + }); + radioCollapsibleViews.openCollapsibleView(0); }, __getFileBrowserLayout: function() { diff --git a/services/static-webserver/client/source/class/osparc/ui/form/FileInput.js b/services/static-webserver/client/source/class/osparc/ui/form/FileInput.js index a0cec55c13c..d0536a5a060 100644 --- a/services/static-webserver/client/source/class/osparc/ui/form/FileInput.js +++ b/services/static-webserver/client/source/class/osparc/ui/form/FileInput.js @@ -34,7 +34,7 @@ qx.Class.define("osparc.ui.form.FileInput", { }); this.getContentElement().add(this.__input); - this.__selectBtn = new qx.ui.form.Button(this.tr("Select a file...")); + this.__selectBtn = new qx.ui.form.Button(this.tr("Select File...")); this._add(this.__selectBtn); this.__selectedFiles = new qx.ui.basic.Label(); @@ -63,6 +63,10 @@ qx.Class.define("osparc.ui.form.FileInput", { __selectBtn: null, __selectedFiles: null, + getButton: function() { + return this.__selectBtn; + }, + __attachEventHandlers: function() { this.__input.addListener("change", () => { const fileNames = []; diff --git a/services/static-webserver/client/source/class/osparc/widget/CollapsibleView.js b/services/static-webserver/client/source/class/osparc/widget/CollapsibleView.js index 391b753ec36..619b051e78f 100644 --- a/services/static-webserver/client/source/class/osparc/widget/CollapsibleView.js +++ b/services/static-webserver/client/source/class/osparc/widget/CollapsibleView.js @@ -221,6 +221,10 @@ qx.Class.define("osparc.widget.CollapsibleView", { } }, + getInnerContainer: function() { + return this._innerContainer; + }, + _applyCaretSize: function(size) { this.getChildControl("caret").setSource(this.__getCaretId(this.getCollapsed())); }, From 08c999af52c8df5f7308cd93d0c8c74ea43deee3 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Thu, 30 May 2024 21:12:59 +0200 Subject: [PATCH 003/219] =?UTF-8?q?=F0=9F=8E=A8=20Frontend:=20Credits=20in?= =?UTF-8?q?dicator=20only=20visible=20in=20warning=20zone=20by=20default?= =?UTF-8?q?=20(=F0=9F=9A=A8)=20(#5897)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../client/source/class/osparc/Preferences.js | 2 +- .../class/osparc/desktop/preferences/pages/GeneralPage.js | 6 +++--- .../client/source/class/osparc/info/StudyLarge.js | 3 +-- .../source/class/osparc/metadata/ClassifiersEditor.js | 4 ++-- .../client/source/class/osparc/metadata/QualityEditor.js | 2 +- .../source/class/osparc/navigation/CreditsMenuButton.js | 7 ++----- .../simcore_service_webserver/users/_preferences_models.py | 2 +- 7 files changed, 11 insertions(+), 15 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/Preferences.js b/services/static-webserver/client/source/class/osparc/Preferences.js index bf105ccb43c..6d5dea18b5d 100644 --- a/services/static-webserver/client/source/class/osparc/Preferences.js +++ b/services/static-webserver/client/source/class/osparc/Preferences.js @@ -95,7 +95,7 @@ qx.Class.define("osparc.Preferences", { walletIndicatorVisibility: { check: ["always", "warning"], nullable: false, - init: "always", + init: "warning", event: "changeWalletIndicatorVisibility", apply: "__patchPreference" }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js index 45bf60f1d84..21b09014e9c 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js @@ -55,9 +55,9 @@ qx.Class.define("osparc.desktop.preferences.pages.GeneralPage", { const lItem = new qx.ui.form.ListItem(options.label, null, options.id); walletIndicatorVisibilitySB.add(lItem); }); - const value2 = preferencesSettings.getWalletIndicatorVisibility(); + const value = preferencesSettings.getWalletIndicatorVisibility(); walletIndicatorVisibilitySB.getSelectables().forEach(selectable => { - if (selectable.getModel() === value2) { + if (selectable.getModel() === value) { walletIndicatorVisibilitySB.setSelection([selectable]); } }); @@ -68,7 +68,7 @@ qx.Class.define("osparc.desktop.preferences.pages.GeneralPage", { form.add(walletIndicatorVisibilitySB, this.tr("Show indicator")); const creditsWarningThresholdField = new qx.ui.form.Spinner().set({ - minimum: 100, + minimum: 50, maximum: 10000, singleStep: 10, allowGrowX: false diff --git a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js index 93deaaa762f..fb9e4b942e5 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js @@ -39,8 +39,7 @@ qx.Class.define("osparc.info.StudyLarge", { }, events: { - "updateStudy": "qx.event.type.Data", - "updateService": "qx.event.type.Data" + "updateStudy": "qx.event.type.Data" }, properties: { diff --git a/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js b/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js index 43f30bfc0cf..563bfe60a5c 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js +++ b/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js @@ -99,8 +99,8 @@ qx.Class.define("osparc.metadata.ClassifiersEditor", { }, __createClassifiersTree: function() { - const studyData = this.__resourceData; - const classifiers = studyData.classifiers && studyData.classifiers ? studyData.classifiers : []; + const resourceData = this.__resourceData; + const classifiers = resourceData.classifiers && resourceData.classifiers ? resourceData.classifiers : []; const classifiersTree = this.__classifiersTree = new osparc.filter.ClassifiersFilter("classifiersEditor", "searchBarFilter", classifiers); osparc.store.Store.getInstance().addListener("changeClassifiers", e => { classifiersTree.recreateTree(); diff --git a/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js b/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js index 92f072b5c9e..a153f3d5278 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js +++ b/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js @@ -451,7 +451,7 @@ qx.Class.define("osparc.metadata.QualityEditor", { "tsr_target": this.__copyQualityData["tsr_target"] }; const patchData = { - "quality" : newQuality + "quality": newQuality }; if (this.__validate(this.__schema, patchData["quality"])) { btn.setFetching(true); diff --git a/services/static-webserver/client/source/class/osparc/navigation/CreditsMenuButton.js b/services/static-webserver/client/source/class/osparc/navigation/CreditsMenuButton.js index 463a2d12bc1..6cf7bf67e5b 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/CreditsMenuButton.js +++ b/services/static-webserver/client/source/class/osparc/navigation/CreditsMenuButton.js @@ -37,12 +37,8 @@ qx.Class.define("osparc.navigation.CreditsMenuButton", { this.getContentElement().setStyle("line-height", 1.2); const preferencesSettings = osparc.Preferences.getInstance(); - this.__computeVisibility(); preferencesSettings.addListener("changeWalletIndicatorVisibility", () => this.__computeVisibility()); - preferencesSettings.addListener("changeCreditsWarningThreshold", () => { - this.__computeVisibility(); - this.__updateCredits(); - }); + preferencesSettings.addListener("changeCreditsWarningThreshold", () => this.__updateCredits()); const store = osparc.store.Store.getInstance(); this.__contextWalletChanged(store.getContextWallet()); @@ -100,6 +96,7 @@ qx.Class.define("osparc.navigation.CreditsMenuButton", { textColor: osparc.desktop.credits.Utils.creditsToColor(creditsLeft, "text") }); } + this.__computeVisibility(); }, __computeVisibility: function() { diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_models.py b/services/web/server/src/simcore_service_webserver/users/_preferences_models.py index 72dd0148e6e..01b6b87e377 100644 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_models.py +++ b/services/web/server/src/simcore_service_webserver/users/_preferences_models.py @@ -79,7 +79,7 @@ class CreditsWarningThresholdFrontendUserPreference(FrontendUserPreference): class WalletIndicatorVisibilityFrontendUserPreference(FrontendUserPreference): preference_identifier: PreferenceIdentifier = "walletIndicatorVisibility" - value: str | None = "always" + value: str | None = "warning" class UserInactivityThresholdFrontendUserPreference(FrontendUserPreference): From e28db90957c0589ded593cbfcd42e8835a6b7d5d Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Fri, 31 May 2024 09:36:21 +0200 Subject: [PATCH 004/219] =?UTF-8?q?=E2=9C=A8=20introducing=20elastic=20fil?= =?UTF-8?q?e=20system=20guardian=20(OPS=20=E2=9A=A0=EF=B8=8F)=20(#5887)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/CODEOWNERS | 1 + .github/workflows/ci-testing-deploy.yml | 60 ++++ Makefile | 1 + .../src/pytest_simcore/environment_configs.py | 2 +- .../src/pytest_simcore/simcore_services.py | 1 + .../simcore_service_director_v2/utils/dask.py | 2 +- services/docker-compose-build.yml | 16 ++ services/docker-compose-deploy.yml | 2 + services/docker-compose.devel.yml | 8 + services/docker-compose.local.yml | 8 + services/docker-compose.yml | 9 + services/efs-guardian/Dockerfile | 193 +++++++++++++ services/efs-guardian/Makefile | 5 + services/efs-guardian/README.md | 4 + services/efs-guardian/VERSION | 1 + services/efs-guardian/docker/boot.sh | 58 ++++ services/efs-guardian/docker/entrypoint.sh | 94 ++++++ services/efs-guardian/docker/healthcheck.py | 41 +++ services/efs-guardian/requirements/Makefile | 10 + services/efs-guardian/requirements/_base.in | 18 ++ services/efs-guardian/requirements/_base.txt | 175 ++++++++++++ services/efs-guardian/requirements/_test.in | 32 +++ services/efs-guardian/requirements/_test.txt | 267 ++++++++++++++++++ services/efs-guardian/requirements/_tools.in | 7 + services/efs-guardian/requirements/_tools.txt | 74 +++++ services/efs-guardian/requirements/ci.txt | 21 ++ .../efs-guardian/requirements/constraints.txt | 0 services/efs-guardian/requirements/dev.txt | 22 ++ services/efs-guardian/requirements/prod.txt | 18 ++ services/efs-guardian/setup.cfg | 13 + services/efs-guardian/setup.py | 69 +++++ .../simcore_service_efs_guardian/__init__.py | 3 + .../src/simcore_service_efs_guardian/_meta.py | 65 +++++ .../api/__init__.py | 0 .../api/rest/__init__.py | 0 .../api/rest/health.py | 18 ++ .../api/rest/routes.py | 17 ++ .../api/rpc/__init__.py | 0 .../api/rpc/rpc_routes.py | 22 ++ .../src/simcore_service_efs_guardian/cli.py | 24 ++ .../core/__init__.py | 0 .../core/application.py | 59 ++++ .../core/settings.py | 85 ++++++ .../exceptions/__init__.py | 5 + .../exceptions/_base.py | 8 + .../exceptions/custom_errors.py | 9 + .../exceptions/handlers/__init__.py | 7 + .../src/simcore_service_efs_guardian/main.py | 17 ++ .../services/__init__.py | 0 .../efs-guardian/tests/integration/.gitkeep | 0 services/efs-guardian/tests/unit/conftest.py | 117 ++++++++ .../tests/unit/test_api_health.py | 13 + services/efs-guardian/tests/unit/test_cli.py | 21 ++ .../tests/unit/test_core_settings.py | 12 + services/efs-guardian/tests/unit/test_main.py | 12 + tests/swarm-deploy/test_service_restart.py | 1 + 56 files changed, 1745 insertions(+), 2 deletions(-) create mode 100644 services/efs-guardian/Dockerfile create mode 100644 services/efs-guardian/Makefile create mode 100644 services/efs-guardian/README.md create mode 100644 services/efs-guardian/VERSION create mode 100755 services/efs-guardian/docker/boot.sh create mode 100755 services/efs-guardian/docker/entrypoint.sh create mode 100755 services/efs-guardian/docker/healthcheck.py create mode 100644 services/efs-guardian/requirements/Makefile create mode 100644 services/efs-guardian/requirements/_base.in create mode 100644 services/efs-guardian/requirements/_base.txt create mode 100644 services/efs-guardian/requirements/_test.in create mode 100644 services/efs-guardian/requirements/_test.txt create mode 100644 services/efs-guardian/requirements/_tools.in create mode 100644 services/efs-guardian/requirements/_tools.txt create mode 100644 services/efs-guardian/requirements/ci.txt create mode 100644 services/efs-guardian/requirements/constraints.txt create mode 100644 services/efs-guardian/requirements/dev.txt create mode 100644 services/efs-guardian/requirements/prod.txt create mode 100644 services/efs-guardian/setup.cfg create mode 100755 services/efs-guardian/setup.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/__init__.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/_meta.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/api/__init__.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/api/rest/__init__.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/api/rest/health.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/api/rest/routes.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/__init__.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/rpc_routes.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/cli.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/core/__init__.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/core/application.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/exceptions/__init__.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/exceptions/custom_errors.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/exceptions/handlers/__init__.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/main.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/services/__init__.py create mode 100644 services/efs-guardian/tests/integration/.gitkeep create mode 100644 services/efs-guardian/tests/unit/conftest.py create mode 100644 services/efs-guardian/tests/unit/test_api_health.py create mode 100644 services/efs-guardian/tests/unit/test_cli.py create mode 100644 services/efs-guardian/tests/unit/test_core_settings.py create mode 100644 services/efs-guardian/tests/unit/test_main.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6f40095b947..e982fee49ac 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -29,6 +29,7 @@ Makefile @pcrespov @sanderegg /services/director*/ @sanderegg @pcrespov @GitHK /services/docker-compose*.yml @sanderegg @mrnicegyu11 @YuryHrytsuk /services/dynamic-sidecar/ @GitHK +/services/efs-guardian/ @matusdrobuliak66 /services/invitations/ @pcrespov /services/migration/ @pcrespov /services/osparc-gateway-server/ @sanderegg diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index d87c462c613..87c2ee0c7b2 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -70,6 +70,7 @@ jobs: director: ${{ steps.filter.outputs.director }} director-v2: ${{ steps.filter.outputs.director-v2 }} dynamic-sidecar: ${{ steps.filter.outputs.dynamic-sidecar }} + efs-guardian: ${{ steps.filter.outputs.efs-guardian }} invitations: ${{ steps.filter.outputs.invitations }} migration: ${{ steps.filter.outputs.migration }} osparc-gateway-server: ${{ steps.filter.outputs.osparc-gateway-server }} @@ -199,6 +200,12 @@ jobs: - 'services/docker-compose*' - 'scripts/mypy/*' - 'mypy.ini' + efs-guardian: + - 'packages/**' + - 'services/efs-guardian/**' + - 'services/docker-compose*' + - 'scripts/mypy/*' + - 'mypy.ini' invitations: - 'packages/**' - 'services/invitations/**' @@ -1207,6 +1214,58 @@ jobs: with: flags: unittests #optional + unit-test-efs-guardian: + needs: changes + if: ${{ needs.changes.outputs.efs-guardian == 'true' || github.event_name == 'push' }} + timeout-minutes: 18 # if this timeout gets too small, then split the tests + name: "[unit] efs-guardian" + runs-on: ${{ matrix.os }} + strategy: + matrix: + python: ["3.10"] + os: [ubuntu-22.04] + fail-fast: false + steps: + - uses: actions/checkout@v4 + - name: setup docker buildx + id: buildx + uses: docker/setup-buildx-action@v3 + with: + driver: docker-container + - name: setup python environment + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: install uv + uses: yezz123/setup-uv@v4 + - uses: actions/cache@v4 + id: cache-uv + with: + path: ~/.cache/uv + key: ${{ runner.os }}-${{ github.job }}-python-${{ matrix.python }}-uv + - name: show system version + run: ./ci/helpers/show_system_versions.bash + - name: install + run: | + make devenv + source .venv/bin/activate && \ + pushd services/efs-guardian && \ + make install-ci + - name: typecheck + run: | + source .venv/bin/activate && \ + pushd services/efs-guardian && \ + make mypy + - name: test + if: always() + run: | + source .venv/bin/activate && \ + pushd services/efs-guardian && \ + make test-ci-unit + - uses: codecov/codecov-action@v4.4.1 + with: + flags: unittests #optional + unit-test-frontend: needs: changes if: ${{ needs.changes.outputs.static-webserver == 'true' || github.event_name == 'push' }} @@ -1638,6 +1697,7 @@ jobs: unit-test-director-v2, unit-test-director, unit-test-dynamic-sidecar, + unit-test-efs-guardian, unit-test-frontend, unit-test-models-library, unit-test-notifications-library, diff --git a/Makefile b/Makefile index 6ec735dac95..6938cfc0afd 100644 --- a/Makefile +++ b/Makefile @@ -41,6 +41,7 @@ SERVICES_NAMES_TO_BUILD := \ director \ director-v2 \ dynamic-sidecar \ + efs-guardian \ invitations \ migration \ osparc-gateway-server \ diff --git a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py index fa023cef28f..45dbc64ccb0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py +++ b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py @@ -11,7 +11,7 @@ from .helpers.utils_envs import delenvs_from_dict, load_dotenv, setenvs_from_dict -@pytest.fixture(scope="session") +@pytest.fixture(scope="session") # MD: get this, I will mock it with my app environmnet def env_devel_dict(env_devel_file: Path) -> EnvVarsDict: assert env_devel_file.exists() assert env_devel_file.name == ".env-devel" diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py index 1243d6914d6..51f278fbde4 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py @@ -46,6 +46,7 @@ "datcore-adapter": "/v0/live", "director-v2": "/", "dynamic-schdlr": "/", + "efs-guardian": "/", "invitations": "/", "payments": "/", "resource-usage-tracker": "/", diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index 5cb5d2b0f53..e78ad6f03d6 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -601,7 +601,7 @@ def check_if_cluster_is_able_to_run_pipeline( json_dumps(task_resources, indent=2), ) - if can_a_worker_run_task: + if can_a_worker_run_task: # OsparcErrorMixin return # check if we have missing resources diff --git a/services/docker-compose-build.yml b/services/docker-compose-build.yml index 779254c3951..e51ca8dbd7b 100644 --- a/services/docker-compose-build.yml +++ b/services/docker-compose-build.yml @@ -264,6 +264,22 @@ services: org.opencontainers.image.source: "${VCS_URL}" org.opencontainers.image.revision: "${VCS_REF}" + efs-guardian: + image: local/efs-guardian:${BUILD_TARGET:?build_target_required} + build: + context: ../ + dockerfile: services/efs-guardian/Dockerfile + cache_from: + - local/efs-guardian:${BUILD_TARGET:?build_target_required} + - ${DOCKER_REGISTRY:-itisfoundation}/efs-guardian:master-github-latest + - ${DOCKER_REGISTRY:-itisfoundation}/efs-guardian:staging-github-latest + - ${DOCKER_REGISTRY:-itisfoundation}/efs-guardian:release-github-latest + target: ${BUILD_TARGET:?build_target_required} + labels: + org.opencontainers.image.created: "${BUILD_DATE}" + org.opencontainers.image.source: "${VCS_URL}" + org.opencontainers.image.revision: "${VCS_REF}" + invitations: image: local/invitations:${BUILD_TARGET:?build_target_required} build: diff --git a/services/docker-compose-deploy.yml b/services/docker-compose-deploy.yml index a3ec02d7bc9..f8e306b0ed2 100644 --- a/services/docker-compose-deploy.yml +++ b/services/docker-compose-deploy.yml @@ -20,6 +20,8 @@ services: image: ${DOCKER_REGISTRY:-itisfoundation}/director-v2:${DOCKER_IMAGE_TAG:-latest} dynamic-sidecar: image: ${DOCKER_REGISTRY:-itisfoundation}/dynamic-sidecar:${DOCKER_IMAGE_TAG:-latest} + efs-guardian: + image: ${DOCKER_REGISTRY:-itisfoundation}/efs-guardian:${DOCKER_IMAGE_TAG:-latest} invitations: image: ${DOCKER_REGISTRY:-itisfoundation}/invitations:${DOCKER_IMAGE_TAG:-latest} migration: diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml index d25a007c005..840497e81e3 100644 --- a/services/docker-compose.devel.yml +++ b/services/docker-compose.devel.yml @@ -102,6 +102,14 @@ services: - ./director-v2:/devel/services/director-v2 - ../packages:/devel/packages + efs-guardian: + environment: + <<: *common-environment + EFS_GUARDIAN_LOGLEVEL: DEBUG + volumes: + - ./efs-guardian:/devel/services/efs-guardian + - ../packages:/devel/packages + static-webserver: volumes: - ./static-webserver/client/source-output:/static-content diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml index d9b50768459..b5859000ddb 100644 --- a/services/docker-compose.local.yml +++ b/services/docker-compose.local.yml @@ -70,6 +70,14 @@ services: - "8000" - "3010:3000" + efs-guardian: + environment: + <<: *common_environment + EFS_GUARDIAN_REMOTE_DEBUGGING_PORT : 3000 + ports: + - "8013:8000" + - "3020:3000" + invitations: environment: <<: *common_environment diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 3ef810a2a3d..30c0f7f724f 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -343,6 +343,15 @@ services: - computational_services_subnet secrets: *dask_tls_secrets + efs-guardian: + image: ${DOCKER_REGISTRY:-itisfoundation}/efs-guardian:${DOCKER_IMAGE_TAG:-latest} + init: true + hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" + networks: + - default + environment: + LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + invitations: image: ${DOCKER_REGISTRY:-itisfoundation}/invitations:${DOCKER_IMAGE_TAG:-latest} init: true diff --git a/services/efs-guardian/Dockerfile b/services/efs-guardian/Dockerfile new file mode 100644 index 00000000000..d1468f443f2 --- /dev/null +++ b/services/efs-guardian/Dockerfile @@ -0,0 +1,193 @@ +# syntax=docker/dockerfile:1 +ARG PYTHON_VERSION="3.10.10" +FROM python:${PYTHON_VERSION}-slim-buster as base + +# +# USAGE: +# cd sercices/efs-guardian +# docker build -f Dockerfile -t efs-guardian:prod --target production ../../ +# docker run efs-guardian:prod +# +# REQUIRED: context expected at ``osparc-simcore/`` folder because we need access to osparc-simcore/packages + +LABEL maintainer=sanderegg + +# NOTE: to list the latest version run `make` inside `scripts/apt-packages-versions` +ENV DOCKER_APT_VERSION="5:24.0.5-1~debian.10~buster" + +# for docker apt caching to work this needs to be added: [https://vsupalov.com/buildkit-cache-mount-dockerfile/] +RUN rm -f /etc/apt/apt.conf.d/docker-clean && \ + echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache +RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ + --mount=type=cache,target=/var/lib/apt,mode=0755,sharing=private \ + set -eux; \ + apt-get update; \ + apt-get install -y --no-install-recommends \ + gosu \ + ca-certificates \ + curl \ + gnupg \ + lsb-release \ + && mkdir -p /etc/apt/keyrings \ + && curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian \ + $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null \ + && apt-get update \ + && apt-get install -y --no-install-recommends \ + # only the cli is needed and we remove the unnecessary stuff again + docker-ce-cli=${DOCKER_APT_VERSION} \ + && apt-get remove -y\ + gnupg \ + curl \ + lsb-release \ + && apt-get clean -y\ + # verify that the binary works + && gosu nobody true + +# simcore-user uid=8004(scu) gid=8004(scu) groups=8004(scu) +ENV SC_USER_ID=8004 \ + SC_USER_NAME=scu \ + SC_BUILD_TARGET=base \ + SC_BOOT_MODE=default + +RUN adduser \ + --uid ${SC_USER_ID} \ + --disabled-password \ + --gecos "" \ + --shell /bin/sh \ + --home /home/${SC_USER_NAME} \ + ${SC_USER_NAME} + + +# Sets utf-8 encoding for Python et al +ENV LANG=C.UTF-8 + +# Turns off writing .pyc files; superfluous on an ephemeral container. +ENV PYTHONDONTWRITEBYTECODE=1 \ + VIRTUAL_ENV=/home/scu/.venv + +# Ensures that the python and pip executables used in the image will be +# those from our virtualenv. +ENV PATH="${VIRTUAL_ENV}/bin:$PATH" + +EXPOSE 8000 +EXPOSE 3000 + +# -------------------------- Build stage ------------------- +# Installs build/package management tools and third party dependencies +# +# + /build WORKDIR +# +FROM base as build + +ENV SC_BUILD_TARGET=build + +RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ + --mount=type=cache,target=/var/lib/apt,mode=0755,sharing=private \ + set -eux \ + && apt-get update \ + && apt-get install -y --no-install-recommends \ + build-essential + +# NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement +RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ + pip install uv~=0.1 + +# NOTE: python virtualenv is used here such that installed +# packages may be moved to production image easily by copying the venv +RUN uv venv "${VIRTUAL_ENV}" + +RUN --mount=type=cache,mode=0755,target=/root/.cache/uv \ + uv pip install --upgrade \ + pip~=24.0 \ + wheel \ + setuptools + +WORKDIR /build + +# install base 3rd party dependencies +# NOTE: copies to /build to avoid overwriting later which would invalidate this layer +RUN \ + --mount=type=bind,source=services/efs-guardian/requirements/_base.txt,target=_base.txt \ + --mount=type=cache,mode=0755,target=/root/.cache/uv \ + uv pip install \ + --requirement _base.txt + + +# --------------------------Prod-depends-only stage ------------------- +# This stage is for production only dependencies that get partially wiped out afterwards (final docker image concerns) +# +# + /build +# + services/efs-guardian [scu:scu] WORKDIR +# +FROM build as prod-only-deps + +ENV SC_BUILD_TARGET prod-only-deps + +WORKDIR /build/services/efs-guardian + +RUN \ + --mount=type=bind,source=packages,target=/build/packages,rw \ + --mount=type=bind,source=services/efs-guardian,target=/build/services/efs-guardian,rw \ + --mount=type=cache,mode=0755,target=/root/.cache/uv \ + uv pip install \ + --requirement requirements/prod.txt \ + && uv pip list + + +# --------------------------Production stage ------------------- +# Final cleanup up to reduce image size and startup setup +# Runs as scu (non-root user) +# +# + /home/scu $HOME = WORKDIR +# + services/efs-guardian [scu:scu] +# +FROM base as production + +ENV SC_BUILD_TARGET=production \ + SC_BOOT_MODE=production + +ENV PYTHONOPTIMIZE=TRUE + +WORKDIR /home/scu +# ensure home folder is read/writable for user scu +RUN chown -R scu /home/scu + +# Starting from clean base image, copies pre-installed virtualenv from prod-only-deps +COPY --chown=scu:scu --from=prod-only-deps ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +# Copies booting scripts +COPY --chown=scu:scu services/efs-guardian/docker services/efs-guardian/docker +RUN chmod +x services/efs-guardian/docker/*.sh + + +HEALTHCHECK --interval=10s \ + --timeout=5s \ + --start-period=30s \ + --start-interval=1s \ + --retries=5 \ + CMD ["python3", "services/efs-guardian/docker/healthcheck.py", "http://localhost:8000/"] + +ENTRYPOINT [ "/bin/sh", "services/efs-guardian/docker/entrypoint.sh" ] +CMD ["/bin/sh", "services/efs-guardian/docker/boot.sh"] + + +# --------------------------Development stage ------------------- +# Source code accessible in host but runs in container +# Runs as myu with same gid/uid as host +# Placed at the end to speed-up the build if images targeting production +# +# + /devel WORKDIR +# + services (mounted volume) +# +FROM build as development + +ENV SC_BUILD_TARGET=development \ + SC_DEVEL_MOUNT=/devel/services/efs-guardian + +WORKDIR /devel + +RUN chown -R scu:scu "${VIRTUAL_ENV}" + +ENTRYPOINT ["/bin/sh", "services/efs-guardian/docker/entrypoint.sh"] +CMD ["/bin/sh", "services/efs-guardian/docker/boot.sh"] diff --git a/services/efs-guardian/Makefile b/services/efs-guardian/Makefile new file mode 100644 index 00000000000..af13c225526 --- /dev/null +++ b/services/efs-guardian/Makefile @@ -0,0 +1,5 @@ +# +# DEVELOPMENT recipes for efs-guardian service +# +include ../../scripts/common.Makefile +include ../../scripts/common-service.Makefile diff --git a/services/efs-guardian/README.md b/services/efs-guardian/README.md new file mode 100644 index 00000000000..503bdb93b1b --- /dev/null +++ b/services/efs-guardian/README.md @@ -0,0 +1,4 @@ +# efs-guardian + + +Service to monitor and manage elastic file system diff --git a/services/efs-guardian/VERSION b/services/efs-guardian/VERSION new file mode 100644 index 00000000000..3eefcb9dd5b --- /dev/null +++ b/services/efs-guardian/VERSION @@ -0,0 +1 @@ +1.0.0 diff --git a/services/efs-guardian/docker/boot.sh b/services/efs-guardian/docker/boot.sh new file mode 100755 index 00000000000..d4d046d5a14 --- /dev/null +++ b/services/efs-guardian/docker/boot.sh @@ -0,0 +1,58 @@ +#!/bin/sh +set -o errexit +set -o nounset + +IFS=$(printf '\n\t') + +INFO="INFO: [$(basename "$0")] " + +echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..." +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" + +# +# DEVELOPMENT MODE +# +# - prints environ info +# - installs requirements in mounted volume +# +if [ "${SC_BUILD_TARGET}" = "development" ]; then + echo "$INFO" "Environment :" + printenv | sed 's/=/: /' | sed 's/^/ /' | sort + echo "$INFO" "Python :" + python --version | sed 's/^/ /' + command -v python | sed 's/^/ /' + + cd services/efs-guardian || exit 1 + pip install uv + uv pip --quiet --no-cache-dir install -r requirements/dev.txt + cd - || exit 1 + echo "$INFO" "PIP :" + uv pip list | sed 's/^/ /' +fi + +# +# RUNNING application +# + +APP_LOG_LEVEL=${API_SERVER_LOGLEVEL:-${LOG_LEVEL:-${LOGLEVEL:-INFO}}} +SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') +echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" + +if [ "${SC_BOOT_MODE}" = "debug" ]; then + reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + + exec sh -c " + cd services/efs-guardian/src/simcore_service_efs_guardian && \ + python -m debugpy --listen 0.0.0.0:${EFS_GUARDIAN_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + --host 0.0.0.0 \ + --reload \ + $reload_dir_packages + --reload-dir . \ + --log-level \"${SERVER_LOG_LEVEL}\" + " +else + exec uvicorn simcore_service_efs_guardian.main:the_app \ + --host 0.0.0.0 \ + --log-level "${SERVER_LOG_LEVEL}" +fi diff --git a/services/efs-guardian/docker/entrypoint.sh b/services/efs-guardian/docker/entrypoint.sh new file mode 100755 index 00000000000..ac4bcf76085 --- /dev/null +++ b/services/efs-guardian/docker/entrypoint.sh @@ -0,0 +1,94 @@ +#!/bin/sh +# +# - Executes *inside* of the container upon start as --user [default root] +# - Notice that the container *starts* as --user [default root] but +# *runs* as non-root user [scu] +# +set -o errexit +set -o nounset + +IFS=$(printf '\n\t') + +INFO="INFO: [$(basename "$0")] " +WARNING="WARNING: [$(basename "$0")] " +ERROR="ERROR: [$(basename "$0")] " + +# Read self-signed SSH certificates (if applicable) +# +# In case efs-guardian must access a docker registry in a secure way using +# non-standard certificates (e.g. such as self-signed certificates), this call is needed. +# It needs to be executed as root. Also required to any access for example to secure rabbitmq. +update-ca-certificates + +echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..." +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" +echo "$INFO" "User : $(id scu)" +echo "$INFO" "python : $(command -v python)" +echo "$INFO" "pip : $(command -v pip)" + +# +# DEVELOPMENT MODE +# - expects docker run ... -v $(pwd):$SC_DEVEL_MOUNT +# - mounts source folders +# - deduces host's uid/gip and assigns to user within docker +# +if [ "${SC_BUILD_TARGET}" = "development" ]; then + echo "$INFO" "development mode detected..." + stat "${SC_DEVEL_MOUNT}" >/dev/null 2>&1 || + (echo "$ERROR" "You must mount '$SC_DEVEL_MOUNT' to deduce user and group ids" && exit 1) + + echo "$INFO" "setting correct user id/group id..." + HOST_USERID=$(stat --format=%u "${SC_DEVEL_MOUNT}") + HOST_GROUPID=$(stat --format=%g "${SC_DEVEL_MOUNT}") + CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) + if [ "$HOST_USERID" -eq 0 ]; then + echo "$WARNING" "Folder mounted owned by root user... adding $SC_USER_NAME to root..." + adduser "$SC_USER_NAME" root + else + echo "$INFO" "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." + # take host's credentials in $SC_USER_NAME + if [ -z "$CONT_GROUPNAME" ]; then + echo "$WARNING" "Creating new group grp$SC_USER_NAME" + CONT_GROUPNAME=grp$SC_USER_NAME + addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" + else + echo "$INFO" "group already exists" + fi + echo "$INFO" "Adding $SC_USER_NAME to group $CONT_GROUPNAME..." + adduser "$SC_USER_NAME" "$CONT_GROUPNAME" + + echo "$WARNING" "Changing ownership [this could take some time]" + echo "$INFO" "Changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" + usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" + + echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; + # change user property of files already around + echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fi +fi + + +# Appends docker group if socket is mounted +DOCKER_MOUNT=/var/run/docker.sock +if stat $DOCKER_MOUNT >/dev/null 2>&1; then + echo "$INFO detected docker socket is mounted, adding user to group..." + GROUPID=$(stat --format=%g $DOCKER_MOUNT) + GROUPNAME=scdocker + + if ! addgroup --gid "$GROUPID" $GROUPNAME >/dev/null 2>&1; then + echo "$WARNING docker group with $GROUPID already exists, getting group name..." + # if group already exists in container, then reuse name + GROUPNAME=$(getent group "${GROUPID}" | cut --delimiter=: --fields=1) + echo "$WARNING docker group with $GROUPID has name $GROUPNAME" + fi + adduser "$SC_USER_NAME" "$GROUPNAME" +fi + +echo "$INFO Starting $* ..." +echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")" +echo " local dir : $(ls -al)" + +exec gosu "$SC_USER_NAME" "$@" diff --git a/services/efs-guardian/docker/healthcheck.py b/services/efs-guardian/docker/healthcheck.py new file mode 100755 index 00000000000..10e58d00e21 --- /dev/null +++ b/services/efs-guardian/docker/healthcheck.py @@ -0,0 +1,41 @@ +#!/bin/python +""" Healthcheck script to run inside docker + +Example of usage in a Dockerfile +``` + COPY --chown=scu:scu docker/healthcheck.py docker/healthcheck.py + HEALTHCHECK --interval=30s \ + --timeout=30s \ + --start-period=1s \ + --retries=3 \ + CMD python3 docker/healthcheck.py http://localhost:8000/ +``` + +Q&A: + 1. why not to use curl instead of a python script? + - SEE https://blog.sixeyed.com/docker-healthchecks-why-not-to-use-curl-or-iwr/ +""" + +import os +import sys +from contextlib import suppress +from urllib.request import urlopen + +# Disabled if boots with debugger (e.g. debug, pdb-debug, debug-ptvsd, etc) +SC_BOOT_MODE = os.environ.get("SC_BOOT_MODE", "") + +# Adds a base-path if defined in environ +SIMCORE_NODE_BASEPATH = os.environ.get("SIMCORE_NODE_BASEPATH", "") + + +def is_service_healthy() -> bool: + if "debug" in SC_BOOT_MODE.lower(): + return True + + with suppress(Exception): + with urlopen(f"{sys.argv[1]}{SIMCORE_NODE_BASEPATH}") as f: + return f.getcode() == 200 + return False + + +sys.exit(os.EX_OK if is_service_healthy() else os.EX_UNAVAILABLE) diff --git a/services/efs-guardian/requirements/Makefile b/services/efs-guardian/requirements/Makefile new file mode 100644 index 00000000000..e1319af9d7f --- /dev/null +++ b/services/efs-guardian/requirements/Makefile @@ -0,0 +1,10 @@ +# +# Targets to pip-compile requirements +# +include ../../../requirements/base.Makefile + +# Add here any extra explicit dependency: e.g. _migration.txt: _base.txt + +_base.in: constraints.txt +_test.in: constraints.txt +_tools.in: constraints.txt diff --git a/services/efs-guardian/requirements/_base.in b/services/efs-guardian/requirements/_base.in new file mode 100644 index 00000000000..84e8460fa05 --- /dev/null +++ b/services/efs-guardian/requirements/_base.in @@ -0,0 +1,18 @@ +# +# Specifies third-party dependencies for 'services/efs-guardian/src' +# +# NOTE: ALL version constraints MUST be commented +--constraint ../../../requirements/constraints.txt +--constraint ./constraints.txt + +# intra-repo required dependencies +--requirement ../../../packages/models-library/requirements/_base.in +--requirement ../../../packages/settings-library/requirements/_base.in +--requirement ../../../packages/aws-library/requirements/_base.in +# service-library[fastapi] +--requirement ../../../packages/service-library/requirements/_base.in +--requirement ../../../packages/service-library/requirements/_fastapi.in + + +fastapi +packaging diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt new file mode 100644 index 00000000000..8012fdf97a5 --- /dev/null +++ b/services/efs-guardian/requirements/_base.txt @@ -0,0 +1,175 @@ +aio-pika==9.4.1 +aioboto3==13.0.0 +aiobotocore==2.13.0 + # via aioboto3 +aiocache==0.12.2 +aiodebug==2.3.0 +aiodocker==0.21.0 +aiofiles==23.2.1 + # via aioboto3 +aiohttp==3.9.5 + # via + # aiobotocore + # aiodocker +aioitertools==0.11.0 + # via aiobotocore +aiormq==6.8.0 + # via aio-pika +aiosignal==1.3.1 + # via aiohttp +anyio==4.4.0 + # via + # fast-depends + # faststream + # httpx + # starlette +arrow==1.3.0 +async-timeout==4.0.3 + # via + # aiohttp + # redis +attrs==23.2.0 + # via + # aiohttp + # jsonschema + # referencing +boto3==1.34.106 + # via aiobotocore +botocore==1.34.106 + # via + # aiobotocore + # boto3 + # s3transfer +botocore-stubs==1.34.94 + # via types-aiobotocore +certifi==2024.2.2 + # via + # httpcore + # httpx +click==8.1.7 + # via + # typer + # uvicorn +dnspython==2.6.1 + # via email-validator +email-validator==2.1.1 + # via pydantic +exceptiongroup==1.2.1 + # via anyio +fast-depends==2.4.3 + # via faststream +fastapi==0.99.1 + # via prometheus-fastapi-instrumentator +faststream==0.5.9 +frozenlist==1.4.1 + # via + # aiohttp + # aiosignal +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.5 + # via httpx +httpx==0.27.0 +idna==3.7 + # via + # anyio + # email-validator + # httpx + # yarl +jmespath==1.0.1 + # via + # boto3 + # botocore +jsonschema==4.22.0 +jsonschema-specifications==2023.7.1 + # via jsonschema +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +multidict==6.0.5 + # via + # aiohttp + # yarl +orjson==3.10.3 +packaging==24.0 +pamqp==3.3.0 + # via aiormq +prometheus-client==0.20.0 + # via prometheus-fastapi-instrumentator +prometheus-fastapi-instrumentator==6.1.0 +pydantic==1.10.15 + # via + # fast-depends + # fastapi +pygments==2.18.0 + # via rich +pyinstrument==4.6.2 +python-dateutil==2.9.0.post0 + # via + # arrow + # botocore +pyyaml==6.0.1 +redis==5.0.4 +referencing==0.29.3 + # via + # jsonschema + # jsonschema-specifications +rich==13.7.1 + # via typer +rpds-py==0.18.1 + # via + # jsonschema + # referencing +s3transfer==0.10.1 + # via boto3 +sh==2.0.6 +shellingham==1.5.4 + # via typer +six==1.16.0 + # via python-dateutil +sniffio==1.3.1 + # via + # anyio + # httpx +starlette==0.27.0 + # via fastapi +tenacity==8.3.0 +toolz==0.12.1 +tqdm==4.66.4 +typer==0.12.3 + # via faststream +types-aiobotocore==2.13.0 +types-aiobotocore-ec2==2.13.0 + # via types-aiobotocore +types-aiobotocore-s3==2.13.0 + # via types-aiobotocore +types-awscrt==0.20.9 + # via botocore-stubs +types-python-dateutil==2.9.0.20240316 + # via arrow +typing-extensions==4.11.0 + # via + # aiodebug + # aiodocker + # anyio + # fastapi + # faststream + # pydantic + # typer + # types-aiobotocore + # types-aiobotocore-ec2 + # types-aiobotocore-s3 + # uvicorn +urllib3==2.2.1 + # via botocore +uvicorn==0.30.0 +wrapt==1.16.0 + # via aiobotocore +yarl==1.9.4 + # via + # aio-pika + # aiohttp + # aiormq diff --git a/services/efs-guardian/requirements/_test.in b/services/efs-guardian/requirements/_test.in new file mode 100644 index 00000000000..3d7f73b1cd8 --- /dev/null +++ b/services/efs-guardian/requirements/_test.in @@ -0,0 +1,32 @@ +# +# Specifies dependencies required to run 'services/api-server/test' +# both for unit and integration tests!! +# +--constraint ../../../requirements/constraints.txt +--constraint ./constraints.txt + +# Adds base AS CONSTRAINT specs, not requirement. +# - Resulting _text.txt is a frozen list of EXTRA packages for testing, besides _base.txt +# +--constraint _base.txt + + +aiodocker +asgi-lifespan +coverage +debugpy +deepdiff +docker +faker +fakeredis[lua] +httpx +moto[server] +parse +psutil +pytest +pytest-asyncio +pytest-cov +pytest-mock +pytest-runner +python-dotenv +respx diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt new file mode 100644 index 00000000000..0a40d9e8f25 --- /dev/null +++ b/services/efs-guardian/requirements/_test.txt @@ -0,0 +1,267 @@ +aiodocker==0.21.0 +aiohttp==3.9.5 + # via aiodocker +aiosignal==1.3.1 + # via aiohttp +antlr4-python3-runtime==4.13.1 + # via moto +anyio==4.4.0 + # via httpx +asgi-lifespan==2.1.0 +async-timeout==4.0.3 + # via + # aiohttp + # redis +attrs==23.2.0 + # via + # aiohttp + # jschema-to-python + # jsonschema + # referencing + # sarif-om +aws-sam-translator==1.89.0 + # via cfn-lint +aws-xray-sdk==2.13.1 + # via moto +blinker==1.8.2 + # via flask +boto3==1.34.106 + # via + # aws-sam-translator + # moto +botocore==1.34.106 + # via + # aws-xray-sdk + # boto3 + # moto + # s3transfer +certifi==2024.2.2 + # via + # httpcore + # httpx + # requests +cffi==1.16.0 + # via cryptography +cfn-lint==0.87.3 + # via moto +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via flask +coverage==7.5.3 + # via pytest-cov +cryptography==42.0.7 + # via + # joserfc + # moto +debugpy==1.8.1 +deepdiff==7.0.1 +docker==7.1.0 + # via moto +exceptiongroup==1.2.1 + # via + # anyio + # pytest +faker==25.3.0 +fakeredis==2.23.2 +flask==3.0.3 + # via + # flask-cors + # moto +flask-cors==4.0.1 + # via moto +frozenlist==1.4.1 + # via + # aiohttp + # aiosignal +graphql-core==3.2.3 + # via moto +h11==0.14.0 + # via httpcore +httpcore==1.0.5 + # via httpx +httpx==0.27.0 + # via respx +idna==3.7 + # via + # anyio + # httpx + # requests + # yarl +iniconfig==2.0.0 + # via pytest +itsdangerous==2.2.0 + # via flask +jinja2==3.1.4 + # via + # flask + # moto +jmespath==1.0.1 + # via + # boto3 + # botocore +joserfc==0.10.0 + # via moto +jschema-to-python==1.2.3 + # via cfn-lint +jsondiff==2.0.0 + # via moto +jsonpatch==1.33 + # via cfn-lint +jsonpath-ng==1.6.1 + # via moto +jsonpickle==3.0.4 + # via jschema-to-python +jsonpointer==2.4 + # via jsonpatch +jsonschema==4.22.0 + # via + # aws-sam-translator + # cfn-lint + # openapi-schema-validator + # openapi-spec-validator +jsonschema-path==0.3.2 + # via openapi-spec-validator +jsonschema-specifications==2023.7.1 + # via + # jsonschema + # openapi-schema-validator +junit-xml==1.9 + # via cfn-lint +lazy-object-proxy==1.10.0 + # via openapi-spec-validator +lupa==2.1 + # via fakeredis +markupsafe==2.1.5 + # via + # jinja2 + # werkzeug +moto==5.0.8 +mpmath==1.3.0 + # via sympy +multidict==6.0.5 + # via + # aiohttp + # yarl +networkx==3.3 + # via cfn-lint +openapi-schema-validator==0.6.2 + # via openapi-spec-validator +openapi-spec-validator==0.7.1 + # via moto +ordered-set==4.1.0 + # via deepdiff +packaging==24.0 + # via pytest +parse==1.20.1 +pathable==0.4.3 + # via jsonschema-path +pbr==6.0.0 + # via + # jschema-to-python + # sarif-om +pluggy==1.5.0 + # via pytest +ply==3.11 + # via jsonpath-ng +psutil==5.9.8 +py-partiql-parser==0.5.5 + # via moto +pycparser==2.22 + # via cffi +pydantic==1.10.15 + # via aws-sam-translator +pyparsing==3.1.2 + # via moto +pytest==8.2.1 + # via + # pytest-asyncio + # pytest-cov + # pytest-mock +pytest-asyncio==0.21.2 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-runner==6.0.1 +python-dateutil==2.9.0.post0 + # via + # botocore + # faker + # moto +python-dotenv==1.0.1 +pyyaml==6.0.1 + # via + # cfn-lint + # jsonschema-path + # moto + # responses +redis==5.0.4 + # via fakeredis +referencing==0.29.3 + # via + # jsonschema + # jsonschema-path + # jsonschema-specifications +regex==2024.5.15 + # via cfn-lint +requests==2.32.2 + # via + # docker + # jsonschema-path + # moto + # responses +responses==0.25.0 + # via moto +respx==0.21.1 +rfc3339-validator==0.1.4 + # via openapi-schema-validator +rpds-py==0.18.1 + # via + # jsonschema + # referencing +s3transfer==0.10.1 + # via boto3 +sarif-om==1.0.4 + # via cfn-lint +setuptools==70.0.0 + # via moto +six==1.16.0 + # via + # junit-xml + # python-dateutil + # rfc3339-validator +sniffio==1.3.1 + # via + # anyio + # asgi-lifespan + # httpx +sortedcontainers==2.4.0 + # via fakeredis +sympy==1.12 + # via cfn-lint +tomli==2.0.1 + # via + # coverage + # pytest +typing-extensions==4.11.0 + # via + # aiodocker + # anyio + # aws-sam-translator + # fakeredis + # pydantic +urllib3==2.2.1 + # via + # botocore + # docker + # requests + # responses +werkzeug==3.0.3 + # via + # flask + # moto +wrapt==1.16.0 + # via aws-xray-sdk +xmltodict==0.13.0 + # via moto +yarl==1.9.4 + # via aiohttp diff --git a/services/efs-guardian/requirements/_tools.in b/services/efs-guardian/requirements/_tools.in new file mode 100644 index 00000000000..52a9a39d162 --- /dev/null +++ b/services/efs-guardian/requirements/_tools.in @@ -0,0 +1,7 @@ +--constraint ../../../requirements/constraints.txt +--constraint _base.txt +--constraint _test.txt + +--requirement ../../../requirements/devenv.txt + +watchdog[watchmedo] diff --git a/services/efs-guardian/requirements/_tools.txt b/services/efs-guardian/requirements/_tools.txt new file mode 100644 index 00000000000..a141a791764 --- /dev/null +++ b/services/efs-guardian/requirements/_tools.txt @@ -0,0 +1,74 @@ +astroid==3.2.2 + # via pylint +black==24.4.2 +build==1.2.1 + # via pip-tools +bump2version==1.0.1 +cfgv==3.4.0 + # via pre-commit +click==8.1.7 + # via + # black + # pip-tools +dill==0.3.8 + # via pylint +distlib==0.3.8 + # via virtualenv +filelock==3.14.0 + # via virtualenv +identify==2.5.36 + # via pre-commit +isort==5.13.2 + # via pylint +mccabe==0.7.0 + # via pylint +mypy-extensions==1.0.0 + # via black +nodeenv==1.8.0 + # via pre-commit +packaging==24.0 + # via + # black + # build +pathspec==0.12.1 + # via black +pip==24.0 + # via pip-tools +pip-tools==7.4.1 +platformdirs==4.2.2 + # via + # black + # pylint + # virtualenv +pre-commit==3.7.1 +pylint==3.2.2 +pyproject-hooks==1.1.0 + # via + # build + # pip-tools +pyyaml==6.0.1 + # via + # pre-commit + # watchdog +ruff==0.4.5 +setuptools==70.0.0 + # via + # nodeenv + # pip-tools +tomli==2.0.1 + # via + # black + # build + # pip-tools + # pylint +tomlkit==0.12.5 + # via pylint +typing-extensions==4.11.0 + # via + # astroid + # black +virtualenv==20.26.2 + # via pre-commit +watchdog==4.0.1 +wheel==0.43.0 + # via pip-tools diff --git a/services/efs-guardian/requirements/ci.txt b/services/efs-guardian/requirements/ci.txt new file mode 100644 index 00000000000..85e9fca927f --- /dev/null +++ b/services/efs-guardian/requirements/ci.txt @@ -0,0 +1,21 @@ +# Shortcut to install all packages for the contigous integration (CI) of 'services/efs-guardian' +# +# - As ci.txt but w/ tests +# +# Usage: +# pip install -r requirements/ci.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt + +# installs this repo's packages +simcore-aws-library @ ../../packages/aws-library +simcore-models-library @ ../../packages/models-library +pytest-simcore @ ../../packages/pytest-simcore +simcore-service-library[fastapi] @ ../../packages/service-library +simcore-settings-library @ ../../packages/settings-library + +# installs current package +simcore-service-efs-guardian @ . diff --git a/services/efs-guardian/requirements/constraints.txt b/services/efs-guardian/requirements/constraints.txt new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/efs-guardian/requirements/dev.txt b/services/efs-guardian/requirements/dev.txt new file mode 100644 index 00000000000..76ea75d980d --- /dev/null +++ b/services/efs-guardian/requirements/dev.txt @@ -0,0 +1,22 @@ +# Shortcut to install all packages needed to develop 'services/efs-guardian' +# +# - As ci.txt but with current and repo packages in develop (edit) mode +# +# Usage: +# pip install -r requirements/dev.txt +# + +# installs base + tests + tools requirements +--requirement _base.txt +--requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +--editable ../../packages/aws-library +--editable ../../packages/models-library +--editable ../../packages/pytest-simcore +--editable ../../packages/service-library[fastapi] +--editable ../../packages/settings-library + +# installs current package +--editable . diff --git a/services/efs-guardian/requirements/prod.txt b/services/efs-guardian/requirements/prod.txt new file mode 100644 index 00000000000..0a75d60f13f --- /dev/null +++ b/services/efs-guardian/requirements/prod.txt @@ -0,0 +1,18 @@ +# Shortcut to install 'services/efs-guardian' for production +# +# - As ci.txt but w/o tests +# +# Usage: +# pip install -r requirements/prod.txt +# + +# installs base requirements +--requirement _base.txt + +# installs this repo's packages +simcore-aws-library @ ../../packages/aws-library +simcore-models-library @ ../../packages/models-library +simcore-service-library[fastapi] @ ../../packages/service-library +simcore-settings-library @ ../../packages/settings-library +# installs current package +simcore-service-efs-guardian @ . diff --git a/services/efs-guardian/setup.cfg b/services/efs-guardian/setup.cfg new file mode 100644 index 00000000000..34c42997769 --- /dev/null +++ b/services/efs-guardian/setup.cfg @@ -0,0 +1,13 @@ +[bumpversion] +current_version = 1.0.0 +commit = True +message = services/efs-guardian version: {current_version} → {new_version} +tag = False +commit_args = --no-verify + +[bumpversion:file:VERSION] + +[tool:pytest] +asyncio_mode = auto +markers = + testit: "marks test to run during development" diff --git a/services/efs-guardian/setup.py b/services/efs-guardian/setup.py new file mode 100755 index 00000000000..ed3f29fc23b --- /dev/null +++ b/services/efs-guardian/setup.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 + +import re +import sys +from pathlib import Path + +from setuptools import find_packages, setup + + +def read_reqs(reqs_path: Path) -> set[str]: + return { + r + for r in re.findall( + r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", + reqs_path.read_text(), + re.MULTILINE, + ) + if isinstance(r, str) + } + + +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + +NAME = "simcore-service-efs-guardian" +VERSION = (CURRENT_DIR / "VERSION").read_text().strip() +AUTHORS = ("Matus Drobuliak (drobuliakmatus66)",) +DESCRIPTION = "Service to monitor and manage elastic file system" +README = (CURRENT_DIR / "README.md").read_text() + +PROD_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_base.txt") + | { + "simcore-aws-library", + "simcore-models-library", + "simcore-service-library[fastapi]", + "simcore-settings-library", + } +) + +TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) + +SETUP = { + "name": NAME, + "version": VERSION, + "author": AUTHORS, + "description": DESCRIPTION, + "long_description": README, + "license": "MIT license", + "python_requires": "~=3.10", + "packages": find_packages(where="src"), + "package_dir": { + "": "src", + }, + "package_data": {"": ["data/*.yml"]}, + "include_package_data": True, + "install_requires": PROD_REQUIREMENTS, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "entry_points": { + "console_scripts": [ + "simcore-service-efs-guardian = simcore_service_efs_guardian.cli:main", + "simcore-service = simcore_service_efs_guardian.cli:main", + ], + }, +} + +if __name__ == "__main__": + setup(**SETUP) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/__init__.py b/services/efs-guardian/src/simcore_service_efs_guardian/__init__.py new file mode 100644 index 00000000000..f513c971cca --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/__init__.py @@ -0,0 +1,3 @@ +from ._meta import __version__ + +assert __version__ # nosec diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/_meta.py b/services/efs-guardian/src/simcore_service_efs_guardian/_meta.py new file mode 100644 index 00000000000..27ec8aad7a6 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/_meta.py @@ -0,0 +1,65 @@ +""" Application's metadata + +""" + +from importlib.metadata import distribution, version +from importlib.resources import files +from pathlib import Path +from typing import Final + +from models_library.basic_types import VersionTag +from packaging.version import Version +from pydantic import parse_obj_as + +_current_distribution = distribution("simcore-service-efs-guardian") +__version__: str = version("simcore-service-efs-guardian") + + +APP_NAME: Final[str] = _current_distribution.metadata["Name"] +API_VERSION: Final[str] = __version__ +VERSION: Final[Version] = Version(__version__) +API_VTAG: Final[VersionTag] = parse_obj_as(VersionTag, f"v{VERSION.major}") +RPC_VTAG: Final[VersionTag] = parse_obj_as(VersionTag, f"v{VERSION.major}") + + +def get_summary() -> str: + return _current_distribution.metadata.get_all("Summary", [""])[-1] + + +SUMMARY: Final[str] = get_summary() +PACKAGE_DATA_FOLDER: Final[Path] = Path(f'{files(APP_NAME.replace("-", "_")) / "data"}') + +# https://patorjk.com/software/taag/#p=display&f=ANSI%20Shadow&t=Elastic%20file%0Asystem%20guardian +APP_STARTED_BANNER_MSG = r""" +███████╗██╗ █████╗ ███████╗████████╗██╗ ██████╗ ███████╗██╗██╗ ███████╗ +██╔════╝██║ ██╔══██╗██╔════╝╚══██╔══╝██║██╔════╝ ██╔════╝██║██║ ██╔════╝ +█████╗ ██║ ███████║███████╗ ██║ ██║██║ █████╗ ██║██║ █████╗ +██╔══╝ ██║ ██╔══██║╚════██║ ██║ ██║██║ ██╔══╝ ██║██║ ██╔══╝ +███████╗███████╗██║ ██║███████║ ██║ ██║╚██████╗ ██║ ██║███████╗███████╗ +╚══════╝╚══════╝╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝╚══════╝ + +███████╗██╗ ██╗███████╗████████╗███████╗███╗ ███╗ ██████╗ ██╗ ██╗ █████╗ ██████╗ ██████╗ ██╗ █████╗ ███╗ ██╗ +██╔════╝╚██╗ ██╔╝██╔════╝╚══██╔══╝██╔════╝████╗ ████║ ██╔════╝ ██║ ██║██╔══██╗██╔══██╗██╔══██╗██║██╔══██╗████╗ ██║ +███████╗ ╚████╔╝ ███████╗ ██║ █████╗ ██╔████╔██║ ██║ ███╗██║ ██║███████║██████╔╝██║ ██║██║███████║██╔██╗ ██║ +╚════██║ ╚██╔╝ ╚════██║ ██║ ██╔══╝ ██║╚██╔╝██║ ██║ ██║██║ ██║██╔══██║██╔══██╗██║ ██║██║██╔══██║██║╚██╗██║ +███████║ ██║ ███████║ ██║ ███████╗██║ ╚═╝ ██║ ╚██████╔╝╚██████╔╝██║ ██║██║ ██║██████╔╝██║██║ ██║██║ ╚████║ +╚══════╝ ╚═╝ ╚══════╝ ╚═╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ + 🛡️ Welcome to EFS-Guardian App 🛡️ + Your Elastic File System Manager & Monitor + {} +""".format( + f"v{__version__}" +) + +APP_STARTED_DISABLED_BANNER_MSG = r""" +██████╗ ██╗███████╗ █████╗ ██████╗ ██╗ ███████╗██████╗ +██╔══██╗██║██╔════╝██╔══██╗██╔══██╗██║ ██╔════╝██╔══██╗ +██║ ██║██║███████╗███████║██████╔╝██║ █████╗ ██║ ██║ +██║ ██║██║╚════██║██╔══██║██╔══██╗██║ ██╔══╝ ██║ ██║ +██████╔╝██║███████║██║ ██║██████╔╝███████╗███████╗██████╔╝ +╚═════╝ ╚═╝╚══════╝╚═╝ ╚═╝╚═════╝ ╚══════╝╚══════╝╚═════╝ +""" + +APP_FINISHED_BANNER_MSG = "{:=^100}".format( + f"🎉 App {APP_NAME}=={__version__} shutdown completed 🎉" +) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/api/__init__.py b/services/efs-guardian/src/simcore_service_efs_guardian/api/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/api/rest/__init__.py b/services/efs-guardian/src/simcore_service_efs_guardian/api/rest/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/api/rest/health.py b/services/efs-guardian/src/simcore_service_efs_guardian/api/rest/health.py new file mode 100644 index 00000000000..2c6f160a9e8 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/api/rest/health.py @@ -0,0 +1,18 @@ +""" +All entrypoints used for operations + +for instance: service health-check (w/ different variants), diagnostics, debugging, status, etc +""" + +import datetime + +from fastapi import APIRouter +from fastapi.responses import PlainTextResponse + +router = APIRouter() + + +@router.get("/", include_in_schema=True, response_class=PlainTextResponse) +async def health_check(): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{datetime.datetime.now(datetime.timezone.utc).isoformat()}" diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/api/rest/routes.py b/services/efs-guardian/src/simcore_service_efs_guardian/api/rest/routes.py new file mode 100644 index 00000000000..af7eef7aa26 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/api/rest/routes.py @@ -0,0 +1,17 @@ +from fastapi import APIRouter, FastAPI + +from ..._meta import API_VTAG +from . import health + + +def setup_api_routes(app: FastAPI): + """ + Composes resources/sub-resources routers + """ + router = APIRouter() + + # include operations in / + app.include_router(health.router, tags=["operations"]) + + # include the rest under /vX + app.include_router(router, prefix=f"/{API_VTAG}") diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/__init__.py b/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/rpc_routes.py b/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/rpc_routes.py new file mode 100644 index 00000000000..c79ed1f7ed3 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/rpc_routes.py @@ -0,0 +1,22 @@ +from collections.abc import Awaitable, Callable + +from fastapi import FastAPI + + +def on_app_startup(app: FastAPI) -> Callable[[], Awaitable[None]]: + async def _start() -> None: + assert app # nosec + + return _start + + +def on_app_shutdown(app: FastAPI) -> Callable[[], Awaitable[None]]: + async def _stop() -> None: + assert app # nosec + + return _stop + + +def setup_rpc_routes(app: FastAPI) -> None: + app.add_event_handler("startup", on_app_startup(app)) + app.add_event_handler("shutdown", on_app_shutdown(app)) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/cli.py b/services/efs-guardian/src/simcore_service_efs_guardian/cli.py new file mode 100644 index 00000000000..77d18015ec0 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/cli.py @@ -0,0 +1,24 @@ +import logging + +import typer +from settings_library.utils_cli import create_settings_command + +from ._meta import APP_NAME +from .core.settings import ApplicationSettings + +log = logging.getLogger(__name__) + +# NOTE: 'main' variable is referred in the setup's entrypoint! +main = typer.Typer(name=APP_NAME) + +main.command()(create_settings_command(settings_cls=ApplicationSettings, logger=log)) + + +@main.command() +def run(): + """Runs application""" + typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") + typer.secho( + "$ uvicorn simcore_service_efs_guardian.main:the_app", + fg=typer.colors.BLUE, + ) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/__init__.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py new file mode 100644 index 00000000000..da0d9deb0d2 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py @@ -0,0 +1,59 @@ +import logging + +from fastapi import FastAPI + +from .._meta import ( + API_VERSION, + API_VTAG, + APP_FINISHED_BANNER_MSG, + APP_NAME, + APP_STARTED_BANNER_MSG, + APP_STARTED_DISABLED_BANNER_MSG, +) +from ..api.rest.routes import setup_api_routes +from ..api.rpc.rpc_routes import setup_rpc_routes +from .settings import ApplicationSettings + +logger = logging.getLogger(__name__) + + +def create_app(settings: ApplicationSettings) -> FastAPI: + logger.info("app settings: %s", settings.json(indent=1)) + + app = FastAPI( + debug=settings.EFS_GUARDIAN_DEBUG, + title=APP_NAME, + description="Service to monitor and manage elastic file system", + version=API_VERSION, + openapi_url=f"/api/{API_VTAG}/openapi.json", + docs_url="/dev/doc", + redoc_url=None, # default disabled + ) + # STATE + app.state.settings = settings + assert app.state.settings.API_VERSION == API_VERSION # nosec + + # PLUGINS SETUP + setup_api_routes(app) + setup_rpc_routes(app) + + # ERROR HANDLERS + + # EVENTS + async def _on_startup() -> None: + print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 + if any( + s is None + for s in [ + settings.EFS_GUARDIAN_AWS_EFS_SETTINGS, + ] + ): + print(APP_STARTED_DISABLED_BANNER_MSG, flush=True) # noqa: T201 + + async def _on_shutdown() -> None: + print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) + + return app diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py new file mode 100644 index 00000000000..aedbca71f0c --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py @@ -0,0 +1,85 @@ +from functools import cached_property +from typing import Final, cast + +from fastapi import FastAPI +from models_library.basic_types import ( + BootModeEnum, + BuildTargetEnum, + LogLevel, + VersionTag, +) +from pydantic import Field, PositiveInt, validator +from settings_library.base import BaseCustomSettings +from settings_library.utils_logging import MixinLoggingSettings + +from .._meta import API_VERSION, API_VTAG, APP_NAME + +EFS_GUARDIAN_ENV_PREFIX: Final[str] = "EFS_GUARDIAN_" + + +class AwsEfsSettings(BaseCustomSettings): + EFS_DNS_NAME: str = Field( + description="AWS Elastic File System DNS name", + example="fs-xxx.efs.us-east-1.amazonaws.com", + ) + EFS_BASE_DIRECTORY: str = Field(default="project-specific-data") + + +class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): + # CODE STATICS --------------------------------------------------------- + API_VERSION: str = API_VERSION + APP_NAME: str = APP_NAME + API_VTAG: VersionTag = API_VTAG + + # IMAGE BUILDTIME ------------------------------------------------------ + # @Makefile + SC_BUILD_DATE: str | None = None + SC_BUILD_TARGET: BuildTargetEnum | None = None + SC_VCS_REF: str | None = None + SC_VCS_URL: str | None = None + + # @Dockerfile + SC_BOOT_MODE: BootModeEnum | None = None + SC_BOOT_TARGET: BuildTargetEnum | None = None + SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( + None, + description="If a single run of the check takes longer than timeout seconds " + "then the check is considered to have failed." + "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", + ) + SC_USER_ID: int | None = None + SC_USER_NAME: str | None = None + + # RUNTIME ----------------------------------------------------------- + EFS_GUARDIAN_DEBUG: bool = Field( + default=False, description="Debug mode", env=["EFS_GUARDIAN_DEBUG", "DEBUG"] + ) + EFS_GUARDIAN_LOGLEVEL: LogLevel = Field( + LogLevel.INFO, env=["EFS_GUARDIAN_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + ) + EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( + default=False, + env=[ + "EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ], + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", + ) + + EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings | None = Field( + auto_default_from_env=True + ) + + @cached_property + def LOG_LEVEL(self) -> LogLevel: # noqa: N802 + return self.EFS_GUARDIAN_LOGLEVEL + + @validator("EFS_GUARDIAN_LOGLEVEL") + @classmethod + def valid_log_level(cls, value: str) -> str: + # NOTE: mypy is not happy without the cast + return cast(str, cls.validate_log_level(value)) + + +def get_application_settings(app: FastAPI) -> ApplicationSettings: + return cast(ApplicationSettings, app.state.settings) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/__init__.py b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/__init__.py new file mode 100644 index 00000000000..b6036dda040 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/__init__.py @@ -0,0 +1,5 @@ +from . import handlers + +setup_exception_handlers = handlers.setup + +__all__: tuple[str, ...] = ("setup_exception_handlers",) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py new file mode 100644 index 00000000000..61a92118c92 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py @@ -0,0 +1,8 @@ +from typing import Any + +from models_library.errors_classes import OsparcErrorMixin + + +class EfsGuardianBaseError(OsparcErrorMixin, Exception): + def __init__(self, **ctx: Any) -> None: + super().__init__(**ctx) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/custom_errors.py b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/custom_errors.py new file mode 100644 index 00000000000..ca702657f53 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/custom_errors.py @@ -0,0 +1,9 @@ +from ._base import EfsGuardianBaseError + + +class CustomBaseError(EfsGuardianBaseError): + pass + + +class ApplicationSetupError(CustomBaseError): + pass diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/handlers/__init__.py b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/handlers/__init__.py new file mode 100644 index 00000000000..f9a5aefe592 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/handlers/__init__.py @@ -0,0 +1,7 @@ +# pylint: disable=unused-argument + +from fastapi import FastAPI + + +def setup(app: FastAPI, *, is_debug: bool = False): + ... diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/main.py b/services/efs-guardian/src/simcore_service_efs_guardian/main.py new file mode 100644 index 00000000000..6ab24933b02 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/main.py @@ -0,0 +1,17 @@ +"""Main application to be deployed by uvicorn (or equivalent) server + +""" +import logging + +from fastapi import FastAPI +from servicelib.logging_utils import config_all_loggers +from simcore_service_efs_guardian.core.application import create_app +from simcore_service_efs_guardian.core.settings import ApplicationSettings + +the_settings = ApplicationSettings.create_from_envs() +logging.basicConfig(level=the_settings.log_level) +logging.root.setLevel(the_settings.log_level) +config_all_loggers(the_settings.EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED) + +# SINGLETON FastAPI app +the_app: FastAPI = create_app(the_settings) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/__init__.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/efs-guardian/tests/integration/.gitkeep b/services/efs-guardian/tests/integration/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/efs-guardian/tests/unit/conftest.py b/services/efs-guardian/tests/unit/conftest.py new file mode 100644 index 00000000000..9c53ab29a3f --- /dev/null +++ b/services/efs-guardian/tests/unit/conftest.py @@ -0,0 +1,117 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import re +from collections.abc import AsyncIterator +from pathlib import Path + +import httpx +import pytest +import simcore_service_efs_guardian +import yaml +from asgi_lifespan import LifespanManager +from fastapi import FastAPI +from httpx import ASGITransport +from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from simcore_service_efs_guardian.core.application import create_app +from simcore_service_efs_guardian.core.settings import ApplicationSettings + +pytest_plugins = [ + "pytest_simcore.cli_runner", + "pytest_simcore.environment_configs", + "pytest_simcore.repository_paths", +] + + +@pytest.fixture(scope="session") +def project_slug_dir(osparc_simcore_root_dir: Path) -> Path: + # fixtures in pytest_simcore.environs + service_folder = osparc_simcore_root_dir / "services" / "efs_guardian" + assert service_folder.exists() + assert any(service_folder.glob("src/simcore_service_efs_guardian")) + return service_folder + + +@pytest.fixture(scope="session") +def installed_package_dir() -> Path: + dirpath = Path(simcore_service_efs_guardian.__file__).resolve().parent + assert dirpath.exists() + return dirpath + + +@pytest.fixture +def docker_compose_service_efs_guardian_env_vars( + services_docker_compose_file: Path, + env_devel_dict: EnvVarsDict, +) -> EnvVarsDict: + """env vars injected at the docker-compose""" + + payments = yaml.safe_load(services_docker_compose_file.read_text())["services"][ + "efs-guardian" + ] + + def _substitute(key, value): + if m := re.match(r"\${([^{}:-]\w+)", value): + expected_env_var = m.group(1) + try: + # NOTE: if this raises, then the RHS env-vars in the docker-compose are + # not defined in the env-devel + if value := env_devel_dict[expected_env_var]: + return key, value + except KeyError: + pytest.fail( + f"{expected_env_var} is not defined in .env-devel but used in docker-compose services[{payments}].environment[{key}]" + ) + return None + + envs: EnvVarsDict = {} + for key, value in payments.get("environment", {}).items(): + if found := _substitute(key, value): + _, new_value = found + envs[key] = new_value + + return envs + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + docker_compose_service_efs_guardian_env_vars: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **docker_compose_service_efs_guardian_env_vars, + }, + ) + + +@pytest.fixture +def app_settings(app_environment: EnvVarsDict) -> ApplicationSettings: + settings = ApplicationSettings.create_from_envs() + return settings + + +@pytest.fixture +async def app(app_settings: ApplicationSettings) -> AsyncIterator[FastAPI]: + the_test_app = create_app(app_settings) + async with LifespanManager( + the_test_app, + ): + yield the_test_app + + +@pytest.fixture +async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: + # - Needed for app to trigger start/stop event handlers + # - Prefer this client instead of fastapi.testclient.TestClient + async with httpx.AsyncClient( + app=app, + base_url="http://efs-guardian.testserver.io", + headers={"Content-Type": "application/json"}, + ) as client: + assert isinstance( + client._transport, ASGITransport # pylint: disable=protected-access + ) + yield client diff --git a/services/efs-guardian/tests/unit/test_api_health.py b/services/efs-guardian/tests/unit/test_api_health.py new file mode 100644 index 00000000000..791fb2bee26 --- /dev/null +++ b/services/efs-guardian/tests/unit/test_api_health.py @@ -0,0 +1,13 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import httpx +from starlette import status + + +async def test_healthcheck(client: httpx.AsyncClient): + response = await client.get("/") + response.raise_for_status() + assert response.status_code == status.HTTP_200_OK + assert "simcore_service_efs_guardian" in response.text diff --git a/services/efs-guardian/tests/unit/test_cli.py b/services/efs-guardian/tests/unit/test_cli.py new file mode 100644 index 00000000000..6819ed50a41 --- /dev/null +++ b/services/efs-guardian/tests/unit/test_cli.py @@ -0,0 +1,21 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + + +from simcore_service_efs_guardian.cli import main +from typer.testing import CliRunner + +runner = CliRunner() + + +def test_settings(app_environment): + result = runner.invoke(main, ["settings"]) + assert result.exit_code == 0 + assert "APP_NAME=simcore-service-efs-guardian" in result.stdout + + +def test_run(): + result = runner.invoke(main, ["run"]) + assert result.exit_code == 0 + assert "disabled" in result.stdout diff --git a/services/efs-guardian/tests/unit/test_core_settings.py b/services/efs-guardian/tests/unit/test_core_settings.py new file mode 100644 index 00000000000..a3496b381b5 --- /dev/null +++ b/services/efs-guardian/tests/unit/test_core_settings.py @@ -0,0 +1,12 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from pytest_simcore.helpers.utils_envs import EnvVarsDict +from simcore_service_efs_guardian.core.settings import ApplicationSettings + + +def test_settings(app_environment: EnvVarsDict): + settings = ApplicationSettings.create_from_envs() + assert settings diff --git a/services/efs-guardian/tests/unit/test_main.py b/services/efs-guardian/tests/unit/test_main.py new file mode 100644 index 00000000000..475673488be --- /dev/null +++ b/services/efs-guardian/tests/unit/test_main.py @@ -0,0 +1,12 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + + +from pytest_simcore.helpers.utils_envs import EnvVarsDict + + +def test_main_app(app_environment: EnvVarsDict): + from simcore_service_efs_guardian.main import the_app, the_settings + + assert the_app.state.settings == the_settings diff --git a/tests/swarm-deploy/test_service_restart.py b/tests/swarm-deploy/test_service_restart.py index 93c081b3d67..d07a20b8a10 100644 --- a/tests/swarm-deploy/test_service_restart.py +++ b/tests/swarm-deploy/test_service_restart.py @@ -20,6 +20,7 @@ ("dask-sidecar", 0), ("datcore-adapter", 0), ("director-v2", 0), + ("efs-guardian", 0), ("migration", 143), ("static-webserver", 15), ("storage", 0), From 1c5cf7b262807f3bcf5b1039da183ce5e3db830c Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Fri, 31 May 2024 11:45:39 +0200 Subject: [PATCH 005/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Removing=20default?= =?UTF-8?q?=20network=20form=20agent=20(#5901)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- services/docker-compose.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 30c0f7f724f..1c94c000275 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -910,8 +910,6 @@ services: cpus: "1.0" memory: 1024M - networks: - - default volumes: - /var/run/docker.sock:/var/run/docker.sock environment: From e50f9726a578152158a23bc9cc0034c8695cf5b0 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 31 May 2024 16:32:41 +0200 Subject: [PATCH 006/219] =?UTF-8?q?=E2=9C=A8=E2=99=BB=EF=B8=8F=20New=20fie?= =?UTF-8?q?lds=20for=20service=20metadata=20(#5902)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../api_schemas_catalog/services.py | 6 +- .../src/models_library/services.py | 203 +++++++++--------- .../src/models_library/services_db.py | 4 +- packages/service-integration/VERSION | 2 +- packages/service-integration/setup.cfg | 2 +- .../_compose_spec_model_autogenerated.py | 2 +- .../src/service_integration/cli.py | 4 +- .../service_integration/commands/compose.py | 32 +-- .../service_integration/commands/config.py | 18 +- .../service_integration/commands/metadata.py | 3 +- .../commands/run_creator.py | 16 +- .../src/service_integration/commands/test.py | 3 +- .../src/service_integration/errors.py | 2 +- .../src/service_integration/oci_image_spec.py | 9 +- .../src/service_integration/osparc_config.py | 94 ++------ .../service_integration/osparc_image_specs.py | 11 +- .../osparc_runtime_specs.py | 5 +- .../pytest_plugin/docker_integration.py | 15 +- .../pytest_plugin/folder_structure.py | 4 +- .../pytest_plugin/validation_data.py | 6 +- .../src/service_integration/settings.py | 6 +- .../src/service_integration/versioning.py | 12 +- .../service-integration/tests/conftest.py | 19 +- .../tests/data/metadata.yml | 6 +- .../tests/test__usecase_jupytermath.py | 6 +- .../service-integration/tests/test_cli.py | 2 +- .../tests/test_command_compose.py | 48 ++--- .../tests/test_command_config.py | 5 +- .../tests/test_command_metadata.py | 2 +- .../tests/test_labels_annotations.py | 4 +- .../tests/test_oci_image_spec.py | 6 +- .../tests/test_osparc_config.py | 10 +- .../tests/test_osparc_image_specs.py | 8 +- .../tests/test_osparc_runtime_specs.py | 2 +- .../tests/test_versioning.py | 12 +- .../test_api_routers_solvers_jobs.py | 4 +- services/web/server/VERSION | 2 +- services/web/server/setup.cfg | 2 +- .../api/v0/openapi.yaml | 44 +++- 39 files changed, 317 insertions(+), 324 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services.py b/packages/models-library/src/models_library/api_schemas_catalog/services.py index bf3ca20a25c..f491eb6bcaf 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/services.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/services.py @@ -3,12 +3,12 @@ from pydantic import Extra from ..emails import LowerCaseEmailStr -from ..services import ServiceDockerData, ServiceMetaData +from ..services import BaseServiceMetaData, ServiceDockerData from ..services_access import ServiceAccessRights from ..services_resources import ServiceResourcesDict -class ServiceUpdate(ServiceMetaData, ServiceAccessRights): +class ServiceUpdate(BaseServiceMetaData, ServiceAccessRights): class Config: schema_extra: ClassVar[dict[str, Any]] = { "example": { @@ -61,7 +61,7 @@ class Config: class ServiceGet( - ServiceDockerData, ServiceAccessRights, ServiceMetaData + ServiceDockerData, ServiceAccessRights, BaseServiceMetaData ): # pylint: disable=too-many-ancestors owner: LowerCaseEmailStr | None diff --git a/packages/models-library/src/models_library/services.py b/packages/models-library/src/models_library/services.py index 821ecef9a74..144d4b22d04 100644 --- a/packages/models-library/src/models_library/services.py +++ b/packages/models-library/src/models_library/services.py @@ -18,7 +18,7 @@ validator, ) -from .basic_regex import VERSION_RE +from .basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS, VERSION_RE from .boot_options import BootOption, BootOptions from .emails import LowerCaseEmailStr from .services_constants import FILENAME_RE, PROPERTY_TYPE_RE @@ -480,25 +480,104 @@ def validate_thumbnail(cls, value): # pylint: disable=no-self-argument,no-self- ServiceOutputsDict: TypeAlias = dict[ServicePortKey, ServiceOutput] +_EXAMPLE = { + "name": "oSparc Python Runner", + "key": "simcore/services/comp/osparc-python-runner", + "type": "computational", + "integration-version": "1.0.0", + "progress_regexp": "^(?:\\[?PROGRESS\\]?:?)?\\s*(?P[0-1]?\\.\\d+|\\d+\\s*(?P%))", + "version": "1.7.0", + "description": "oSparc Python Runner", + "contact": "smith@company.com", + "authors": [ + { + "name": "John Smith", + "email": "smith@company.com", + "affiliation": "Company", + }, + { + "name": "Richard Brown", + "email": "brown@uni.edu", + "affiliation": "University", + }, + ], + "inputs": { + "input_1": { + "displayOrder": 1, + "label": "Input data", + "description": "Any code, requirements or data file", + "type": "data:*/*", + } + }, + "outputs": { + "output_1": { + "displayOrder": 1, + "label": "Output data", + "description": "All data produced by the script is zipped as output_data.zip", + "type": "data:*/*", + "fileToKeyMap": {"output_data.zip": "output_1"}, + } + }, +} + +_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER = { + **_EXAMPLE, + "description": "oSparc Python Runner with boot options", + "inputs": { + "input_1": { + "label": "Input data", + "description": "Any code, requirements or data file", + "type": "data:*/*", + } + }, + "outputs": { + "output_1": { + "label": "Output data", + "description": "All data produced by the script is zipped as output_data.zip", + "type": "data:*/*", + "fileToKeyMap": {"output_data.zip": "output_1"}, + } + }, + "boot-options": { + "example_service_defined_boot_mode": BootOption.Config.schema_extra["examples"][ + 0 + ], + "example_service_defined_theme_selection": BootOption.Config.schema_extra[ + "examples" + ][1], + }, + "min-visible-inputs": 2, +} + + class ServiceDockerData(ServiceKeyVersion, _BaseServiceCommonDataModel): """ Static metadata for a service injected in the image labels - This is one to one with node-meta-v0.0.1.json + NOTE: This model is serialized in .osparc/metadata.yml and in the labels of the docker image """ - integration_version: str | None = Field( + version_display: str | None = Field( None, - alias="integration-version", - description="integration version number", - regex=VERSION_RE, - examples=["1.0.0"], + description="A user-friendly or marketing name for the release." + " This can be used to reference the release in a more readable and recognizable format, such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.'" + " This name is not used for version comparison but is useful for communication and documentation purposes.", ) - progress_regexp: str | None = Field( + + release_date: datetime | None = Field( None, - alias="progress_regexp", - description="regexp pattern for detecting computational service's progress", + description="A timestamp when the specific version of the service was released." + " This field helps in tracking the timeline of releases and understanding the sequence of updates." + " A timestamp string should be formatted as YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z or [±]HH[:]MM]", ) + + integration_version: str | None = Field( + None, + alias="integration-version", + description="This version is used to maintain backward compatibility when there are changes in the way a service is integrated into the framework", + regex=SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS, + ) + service_type: ServiceType = Field( ..., alias="type", @@ -526,6 +605,7 @@ class ServiceDockerData(ServiceKeyVersion, _BaseServiceCommonDataModel): alias="boot-options", description="Service defined boot options. These get injected in the service as env variables.", ) + min_visible_inputs: NonNegativeInt | None = Field( None, alias="min-visible-inputs", @@ -535,108 +615,33 @@ class ServiceDockerData(ServiceKeyVersion, _BaseServiceCommonDataModel): ), ) + progress_regexp: str | None = Field( + None, + alias="progress_regexp", + description="regexp pattern for detecting computational service's progress", + ) + class Config: description = "Description of a simcore node 'class' with input and output" extra = Extra.forbid - frozen = False # it inherits from ServiceKeyVersion. + frozen = False # overrides config from ServiceKeyVersion. + allow_population_by_field_name = True schema_extra: ClassVar[dict[str, Any]] = { "examples": [ - { - "name": "oSparc Python Runner", - "key": "simcore/services/comp/osparc-python-runner", - "type": "computational", - "integration-version": "1.0.0", - "progress_regexp": "^(?:\\[?PROGRESS\\]?:?)?\\s*(?P[0-1]?\\.\\d+|\\d+\\s*(?P%))", - "version": "1.7.0", - "description": "oSparc Python Runner", - "contact": "smith@company.com", - "authors": [ - { - "name": "John Smith", - "email": "smith@company.com", - "affiliation": "Company", - }, - { - "name": "Richard Brown", - "email": "brown@uni.edu", - "affiliation": "University", - }, - ], - "inputs": { - "input_1": { - "displayOrder": 1, - "label": "Input data", - "description": "Any code, requirements or data file", - "type": "data:*/*", - } - }, - "outputs": { - "output_1": { - "displayOrder": 1, - "label": "Output data", - "description": "All data produced by the script is zipped as output_data.zip", - "type": "data:*/*", - "fileToKeyMap": {"output_data.zip": "output_1"}, - } - }, - }, + _EXAMPLE, + _EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, # latest { - "name": "oSparc Python Runner", - "key": "simcore/services/comp/osparc-python-runner", - "type": "computational", - "integration-version": "1.0.0", - "progress_regexp": "^(?:\\[?PROGRESS\\]?:?)?\\s*(?P[0-1]?\\.\\d+|\\d+\\s*(?P%))", - "version": "1.7.0", - "description": "oSparc Python Runner with boot options", - "contact": "smith@company.com", - "authors": [ - { - "name": "John Smith", - "email": "smith@company.com", - "affiliation": "Company", - }, - { - "name": "Richard Brown", - "email": "brown@uni.edu", - "affiliation": "University", - }, - ], - "inputs": { - "input_1": { - "label": "Input data", - "description": "Any code, requirements or data file", - "type": "data:*/*", - } - }, - "outputs": { - "output_1": { - "label": "Output data", - "description": "All data produced by the script is zipped as output_data.zip", - "type": "data:*/*", - "fileToKeyMap": {"output_data.zip": "output_1"}, - } - }, - "boot-options": { - "example_service_defined_boot_mode": BootOption.Config.schema_extra[ - "examples" - ][ - 0 - ], - "example_service_defined_theme_selection": BootOption.Config.schema_extra[ - "examples" - ][ - 1 - ], - }, - "min-visible-inputs": 2, + **_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, + "version_display": "Matterhorn Release", + "release_date": "2024-05-31T13:45:30", }, ] } -class ServiceMetaData(_BaseServiceCommonDataModel): +class BaseServiceMetaData(_BaseServiceCommonDataModel): # Overrides all fields of _BaseServiceCommonDataModel: # - for a partial update all members must be Optional # FIXME: if API entry needs a schema to allow partial updates (e.g. patch/put), diff --git a/packages/models-library/src/models_library/services_db.py b/packages/models-library/src/models_library/services_db.py index 0e5353353ae..e4ceae79c1f 100644 --- a/packages/models-library/src/models_library/services_db.py +++ b/packages/models-library/src/models_library/services_db.py @@ -9,7 +9,7 @@ from pydantic import Field from pydantic.types import PositiveInt -from .services import ServiceKeyVersion, ServiceMetaData +from .services import BaseServiceMetaData, ServiceKeyVersion from .services_access import ServiceGroupAccessRights # ------------------------------------------------------------------- @@ -18,7 +18,7 @@ # - table services_access_rights -class ServiceMetaDataAtDB(ServiceKeyVersion, ServiceMetaData): +class ServiceMetaDataAtDB(ServiceKeyVersion, BaseServiceMetaData): # for a partial update all members must be Optional classifiers: list[str] | None = Field([]) owner: PositiveInt | None diff --git a/packages/service-integration/VERSION b/packages/service-integration/VERSION index 21e8796a09d..ee90284c27f 100644 --- a/packages/service-integration/VERSION +++ b/packages/service-integration/VERSION @@ -1 +1 @@ -1.0.3 +1.0.4 diff --git a/packages/service-integration/setup.cfg b/packages/service-integration/setup.cfg index af7998eb1a3..a6c19f847ef 100644 --- a/packages/service-integration/setup.cfg +++ b/packages/service-integration/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.0.3 +current_version = 1.0.4 commit = True message = service-integration version: {current_version} → {new_version} tag = False diff --git a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py index 56ad2faf2df..a390a469a41 100644 --- a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py +++ b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py @@ -519,7 +519,7 @@ class Config: ) = None oom_kill_disable: bool | None = None oom_score_adj: conint(ge=-1000, le=1000) | None = None - pid: str | None | None = None + pid: str | None = None pids_limit: float | str | None = None platform: str | None = None ports: list[PortInt | str | Port] | None = None diff --git a/packages/service-integration/src/service_integration/cli.py b/packages/service-integration/src/service_integration/cli.py index bf5d0101119..a257be65c14 100644 --- a/packages/service-integration/src/service_integration/cli.py +++ b/packages/service-integration/src/service_integration/cli.py @@ -14,7 +14,7 @@ def _version_callback(value: bool): if value: rich.print(__version__) - raise typer.Exit() + raise typer.Exit @app.callback() @@ -38,7 +38,7 @@ def main( ), ): """o2s2parc service integration library""" - assert version or not version # nosec + assert isinstance(version, bool | None) # nosec overrides = {} if registry_name: diff --git a/packages/service-integration/src/service_integration/commands/compose.py b/packages/service-integration/src/service_integration/commands/compose.py index 15ddae6094e..3904828cad5 100644 --- a/packages/service-integration/src/service_integration/commands/compose.py +++ b/packages/service-integration/src/service_integration/commands/compose.py @@ -10,7 +10,12 @@ from ..compose_spec_model import ComposeSpecification from ..oci_image_spec import LS_LABEL_PREFIX, OCI_LABEL_PREFIX -from ..osparc_config import DockerComposeOverwriteCfg, MetaConfig, RuntimeConfig +from ..osparc_config import ( + OSPARC_CONFIG_DIRNAME, + DockerComposeOverwriteConfig, + MetadataConfig, + RuntimeConfig, +) from ..osparc_image_specs import create_image_spec from ..settings import AppSettings @@ -20,10 +25,7 @@ def _run_git(*args) -> str: """:raises CalledProcessError""" return subprocess.run( # nosec - [ - "git", - ] - + list(args), + ["git", *list(args)], capture_output=True, encoding="utf8", check=True, @@ -60,15 +62,15 @@ def create_docker_compose_image_spec( config_basedir = meta_config_path.parent # required - meta_cfg = MetaConfig.from_yaml(meta_config_path) + meta_cfg = MetadataConfig.from_yaml(meta_config_path) # required if docker_compose_overwrite_path: - docker_compose_overwrite_cfg = DockerComposeOverwriteCfg.from_yaml( + docker_compose_overwrite_cfg = DockerComposeOverwriteConfig.from_yaml( docker_compose_overwrite_path ) else: - docker_compose_overwrite_cfg = DockerComposeOverwriteCfg.create_default( + docker_compose_overwrite_cfg = DockerComposeOverwriteConfig.create_default( service_name=meta_cfg.service_name() ) @@ -88,7 +90,8 @@ def create_docker_compose_image_spec( (config_basedir / f"{OCI_LABEL_PREFIX}.yml").read_text() ) if not oci_spec: - raise ValueError("Undefined OCI image spec") + msg = "Undefined OCI image spec" + raise ValueError(msg) oci_labels = to_labels(oci_spec, prefix_key=OCI_LABEL_PREFIX) extra_labels.update(oci_labels) @@ -118,7 +121,7 @@ def create_docker_compose_image_spec( "config", "--get", "remote.origin.url" ) - compose_spec = create_image_spec( + return create_image_spec( settings, meta_cfg, docker_compose_overwrite_cfg, @@ -126,13 +129,11 @@ def create_docker_compose_image_spec( extra_labels=extra_labels, ) - return compose_spec - def main( ctx: typer.Context, config_path: Path = typer.Option( - ".osparc", + OSPARC_CONFIG_DIRNAME, "-m", "--metadata", help="osparc config file or folder. " @@ -149,7 +150,8 @@ def main( # TODO: all these MUST be replaced by osparc_config.ConfigFilesStructure if not config_path.exists(): - raise typer.BadParameter("Invalid path to metadata file or folder") + msg = "Invalid path to metadata file or folder" + raise typer.BadParameter(msg) if config_path.is_dir(): # equivalent to 'basedir/**/metadata.yml' @@ -162,7 +164,7 @@ def main( configs_kwargs_map: dict[str, dict[str, Path]] = {} - for meta_config in sorted(list(basedir.rglob(config_pattern))): + for meta_config in sorted(basedir.rglob(config_pattern)): config_name = meta_config.parent.name configs_kwargs_map[config_name] = {} diff --git a/packages/service-integration/src/service_integration/commands/config.py b/packages/service-integration/src/service_integration/commands/config.py index dc321430a12..e1e5b8ef5b1 100644 --- a/packages/service-integration/src/service_integration/commands/config.py +++ b/packages/service-integration/src/service_integration/commands/config.py @@ -9,7 +9,12 @@ from pydantic.main import BaseModel from ..compose_spec_model import ComposeSpecification -from ..osparc_config import DockerComposeOverwriteCfg, MetaConfig, RuntimeConfig +from ..osparc_config import ( + OSPARC_CONFIG_DIRNAME, + DockerComposeOverwriteConfig, + MetadataConfig, + RuntimeConfig, +) def create_osparc_specs( @@ -53,17 +58,18 @@ def _save(service_name: str, filename: Path, model: BaseModel): labels = dict(item.strip().split("=") for item in build_labels) elif isinstance(build_labels, dict): labels = build_labels - elif labels__root__ := getattr(build_labels, "__root__"): + elif labels__root__ := build_labels.__root__: assert isinstance(labels__root__, dict) # nosec labels = labels__root__ else: - raise ValueError(f"Invalid build labels {build_labels}") + msg = f"Invalid build labels {build_labels}" + raise ValueError(msg) - meta_cfg = MetaConfig.from_labels_annotations(labels) + meta_cfg = MetadataConfig.from_labels_annotations(labels) _save(service_name, metadata_path, meta_cfg) docker_compose_overwrite_cfg = ( - DockerComposeOverwriteCfg.create_default( + DockerComposeOverwriteConfig.create_default( service_name=meta_cfg.service_name() ) ) @@ -94,7 +100,7 @@ def main( ): """Creates osparc config from complete docker compose-spec""" # TODO: sync defaults among CLI commands - config_dir = from_spec_file.parent / ".osparc" + config_dir = from_spec_file.parent / OSPARC_CONFIG_DIRNAME project_cfg_path = config_dir / "docker-compose.overwrite.yml" meta_cfg_path = config_dir / "metadata.yml" runtime_cfg_path = config_dir / "runtime.yml" diff --git a/packages/service-integration/src/service_integration/commands/metadata.py b/packages/service-integration/src/service_integration/commands/metadata.py index 3e26c455c57..eb6e153b7f5 100644 --- a/packages/service-integration/src/service_integration/commands/metadata.py +++ b/packages/service-integration/src/service_integration/commands/metadata.py @@ -7,6 +7,7 @@ import yaml from models_library.services import ServiceDockerData +from ..osparc_config import OSPARC_CONFIG_DIRNAME from ..versioning import bump_version_string from ..yaml_utils import ordered_safe_dump, ordered_safe_load @@ -57,7 +58,7 @@ def get_version( TargetVersionChoices.SEMANTIC_VERSION ), metadata_file: Path = typer.Option( - ".osparc/metadata.yml", + f"{OSPARC_CONFIG_DIRNAME}/metadata.yml", help="The metadata yaml file", ), ): diff --git a/packages/service-integration/src/service_integration/commands/run_creator.py b/packages/service-integration/src/service_integration/commands/run_creator.py index cfcb6a6b5fb..3b08948eeec 100644 --- a/packages/service-integration/src/service_integration/commands/run_creator.py +++ b/packages/service-integration/src/service_integration/commands/run_creator.py @@ -4,6 +4,8 @@ import typer import yaml +from ..osparc_config import OSPARC_CONFIG_DIRNAME + def get_input_config(metadata_file: Path) -> dict: inputs = {} @@ -16,7 +18,7 @@ def get_input_config(metadata_file: Path) -> dict: def main( metadata_file: Path = typer.Option( - ".osparc/metadata.yml", + f"{OSPARC_CONFIG_DIRNAME}/metadata.yml", "--metadata", help="The metadata yaml of the node", ), @@ -50,19 +52,19 @@ def main( ] input_config = get_input_config(metadata_file) for input_key, input_value in input_config.items(): + input_key_upper = f"{input_key}".upper() + if "data:" in input_value["type"]: filename = input_key if "fileToKeyMap" in input_value and len(input_value["fileToKeyMap"]) > 0: filename, _ = next(iter(input_value["fileToKeyMap"].items())) - input_script.append( - f"{str(input_key).upper()}=$INPUT_FOLDER/{str(filename)}" - ) - input_script.append(f"export {str(input_key).upper()}") + input_script.append(f"{input_key_upper}=$INPUT_FOLDER/{filename}") + input_script.append(f"export {input_key_upper}") else: input_script.append( - f"{str(input_key).upper()}=$(< \"$json_input\" jq '.{input_key}')" + f"{input_key_upper}=$(< \"$json_input\" jq '.{input_key}')" ) - input_script.append(f"export {str(input_key).upper()}") + input_script.append(f"export {input_key_upper}") input_script.extend( [ diff --git a/packages/service-integration/src/service_integration/commands/test.py b/packages/service-integration/src/service_integration/commands/test.py index b08c5a85c03..3bf25551dc2 100644 --- a/packages/service-integration/src/service_integration/commands/test.py +++ b/packages/service-integration/src/service_integration/commands/test.py @@ -14,7 +14,8 @@ def main( """Runs tests against service directory""" if not service_dir.exists(): - raise typer.BadParameter("Invalid path to service directory") + msg = "Invalid path to service directory" + raise typer.BadParameter(msg) rich.print(f"Testing '{service_dir.resolve()}' ...") error_code = pytest_runner.main(service_dir=service_dir, extra_args=[]) diff --git a/packages/service-integration/src/service_integration/errors.py b/packages/service-integration/src/service_integration/errors.py index 33ebe3eebfc..e9a857edc1c 100644 --- a/packages/service-integration/src/service_integration/errors.py +++ b/packages/service-integration/src/service_integration/errors.py @@ -5,5 +5,5 @@ class ServiceIntegrationError(PydanticErrorMixin, RuntimeError): pass -class ConfigNotFound(ServiceIntegrationError): +class ConfigNotFoundError(ServiceIntegrationError): msg_template = "could not find any osparc config under {basedir}" diff --git a/packages/service-integration/src/service_integration/oci_image_spec.py b/packages/service-integration/src/service_integration/oci_image_spec.py index ba1cf3c8b77..e07a5e4cafc 100644 --- a/packages/service-integration/src/service_integration/oci_image_spec.py +++ b/packages/service-integration/src/service_integration/oci_image_spec.py @@ -37,6 +37,10 @@ } +def _underscore_as_dot(field_name: str): + return field_name.replace("_", ".") + + class OciImageSpecAnnotations(BaseModel): # TODO: review and polish constraints @@ -98,7 +102,7 @@ class OciImageSpecAnnotations(BaseModel): ) class Config: - alias_generator = lambda field_name: field_name.replace("_", ".") + alias_generator = _underscore_as_dot allow_population_by_field_name = True extra = Extra.forbid @@ -153,5 +157,4 @@ def to_oci_data(self) -> dict[str, Any]: set(self.__fields__.keys()) ) # nosec - oci_data = {_TO_OCI[key]: value for key, value in convertable_data.items()} - return oci_data + return {_TO_OCI[key]: value for key, value in convertable_data.items()} diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 514b9d26194..17c5f1d181f 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -8,13 +8,13 @@ - config should provide enough information about that context to allow - build an image - run an container - on a single command call. + on a single command call. - """ import logging from pathlib import Path -from typing import Any, ClassVar, Literal, NamedTuple +from typing import Any, Literal from models_library.callbacks_mapping import CallbacksMapping from models_library.service_settings_labels import ( @@ -43,13 +43,12 @@ from pydantic.main import BaseModel from .compose_spec_model import ComposeSpecification -from .errors import ConfigNotFound from .settings import AppSettings from .yaml_utils import yaml_safe_load _logger = logging.getLogger(__name__) -CONFIG_FOLDER_NAME = ".osparc" +OSPARC_CONFIG_DIRNAME = ".osparc" SERVICE_KEY_FORMATS = { @@ -58,20 +57,14 @@ } -## MODELS --------------------------------------------------------------------------------- -# -# Project config -> stored in repo's basedir/.osparc -# - - -class DockerComposeOverwriteCfg(ComposeSpecification): - """picks up configurations used to overwrite the docker-compuse output""" +class DockerComposeOverwriteConfig(ComposeSpecification): + """Content of docker-compose.overwrite.yml configuration file""" @classmethod def create_default( cls, service_name: str | None = None - ) -> "DockerComposeOverwriteCfg": - model: "DockerComposeOverwriteCfg" = cls.parse_obj( + ) -> "DockerComposeOverwriteConfig": + model: "DockerComposeOverwriteConfig" = cls.parse_obj( { "services": { service_name: { @@ -85,16 +78,17 @@ def create_default( return model @classmethod - def from_yaml(cls, path: Path) -> "DockerComposeOverwriteCfg": + def from_yaml(cls, path: Path) -> "DockerComposeOverwriteConfig": with path.open() as fh: data = yaml_safe_load(fh) - model: "DockerComposeOverwriteCfg" = cls.parse_obj(data) + model: "DockerComposeOverwriteConfig" = cls.parse_obj(data) return model -class MetaConfig(ServiceDockerData): - """Details about general info and I/O configuration of the service +class MetadataConfig(ServiceDockerData): + """Content of metadata.yml configuration file + Details about general info and I/O configuration of the service Necessary for both image- and runtime-spec """ @@ -109,18 +103,18 @@ def check_contact_in_authors(cls, v, values): return v @classmethod - def from_yaml(cls, path: Path) -> "MetaConfig": + def from_yaml(cls, path: Path) -> "MetadataConfig": with path.open() as fh: data = yaml_safe_load(fh) - model: "MetaConfig" = cls.parse_obj(data) + model: "MetadataConfig" = cls.parse_obj(data) return model @classmethod - def from_labels_annotations(cls, labels: dict[str, str]) -> "MetaConfig": + def from_labels_annotations(cls, labels: dict[str, str]) -> "MetadataConfig": data = from_labels( labels, prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False ) - model: "MetaConfig" = cls.parse_obj(data) + model: "MetadataConfig" = cls.parse_obj(data) return model def to_labels_annotations(self) -> dict[str, str]: @@ -194,7 +188,7 @@ class Config: allow_population_by_field_name = True -def _get_alias_generator(field_name: str) -> str: +def _underscore_as_minus(field_name: str) -> str: return field_name.replace("_", "-") @@ -240,7 +234,7 @@ def ensure_compatibility(cls, v): return v class Config: - alias_generator = _get_alias_generator + alias_generator = _underscore_as_minus allow_population_by_field_name = True extra = Extra.forbid @@ -261,55 +255,3 @@ def to_labels_annotations(self) -> dict[str, str]: prefix_key=OSPARC_LABEL_PREFIXES[1], ) return labels - - -## FILES ----------------------------------------------------------- - - -class ConfigFileDescriptor(NamedTuple): - glob_pattern: str - required: bool = True - - -class ConfigFilesStructure: - """ - Defines config file structure and how they - map to the models - """ - - FILES_GLOBS: ClassVar[dict] = { - DockerComposeOverwriteCfg.__name__: ConfigFileDescriptor( - glob_pattern="docker-compose.overwrite.y*ml", required=False - ), - MetaConfig.__name__: ConfigFileDescriptor(glob_pattern="metadata.y*ml"), - RuntimeConfig.__name__: ConfigFileDescriptor(glob_pattern="runtime.y*ml"), - } - - @staticmethod - def config_file_path(scope: Literal["user", "project"]) -> Path: - basedir = Path.cwd() # assumes project is in CWD - if scope == "user": - basedir = Path.home() - return basedir / ".osparc" / "service-integration.json" - - def search(self, start_dir: Path) -> dict[str, Path]: - """Tries to match of any of file layouts - and returns associated config files - """ - found = { - configtype: list(start_dir.rglob(pattern)) - for configtype, (pattern, required) in self.FILES_GLOBS.items() - if required - } - - if not found: - raise ConfigNotFound(basedir=start_dir) - - raise NotImplementedError("TODO") - - # TODO: - # scenarios: - # .osparc/meta, [runtime] - # .osparc/{service-name}/meta, [runtime] - - # metadata is required, runtime is optional? diff --git a/packages/service-integration/src/service_integration/osparc_image_specs.py b/packages/service-integration/src/service_integration/osparc_image_specs.py index ec7747ec9f5..df97e7c18b1 100644 --- a/packages/service-integration/src/service_integration/osparc_image_specs.py +++ b/packages/service-integration/src/service_integration/osparc_image_specs.py @@ -9,17 +9,17 @@ Service, ) -from .osparc_config import DockerComposeOverwriteCfg, MetaConfig, RuntimeConfig +from .osparc_config import DockerComposeOverwriteConfig, MetadataConfig, RuntimeConfig from .settings import AppSettings def create_image_spec( settings: AppSettings, - meta_cfg: MetaConfig, - docker_compose_overwrite_cfg: DockerComposeOverwriteCfg, + meta_cfg: MetadataConfig, + docker_compose_overwrite_cfg: DockerComposeOverwriteConfig, runtime_cfg: RuntimeConfig | None = None, *, - extra_labels: dict[str, str] = None, + extra_labels: dict[str, str] | None = None, **_context ) -> ComposeSpecification: """Creates the image-spec provided the osparc-config and a given context (e.g. development) @@ -46,10 +46,9 @@ def create_image_spec( ) build_spec = BuildItem(**overwrite_options) - compose_spec = ComposeSpecification( + return ComposeSpecification( version=settings.COMPOSE_VERSION, services={ service_name: Service(image=meta_cfg.image_name(settings), build=build_spec) }, ) - return compose_spec diff --git a/packages/service-integration/src/service_integration/osparc_runtime_specs.py b/packages/service-integration/src/service_integration/osparc_runtime_specs.py index a9ffc331f5f..56e33db0d79 100644 --- a/packages/service-integration/src/service_integration/osparc_runtime_specs.py +++ b/packages/service-integration/src/service_integration/osparc_runtime_specs.py @@ -5,6 +5,5 @@ # # -raise NotImplementedError( - "SEE prototype in packages/service-integration/tests/test_osparc_runtime_specs.py" -) +msg = "SEE prototype in packages/service-integration/tests/test_osparc_runtime_specs.py" +raise NotImplementedError(msg) diff --git a/packages/service-integration/src/service_integration/pytest_plugin/docker_integration.py b/packages/service-integration/src/service_integration/pytest_plugin/docker_integration.py index 854ba67782b..6b6f1ec19d7 100644 --- a/packages/service-integration/src/service_integration/pytest_plugin/docker_integration.py +++ b/packages/service-integration/src/service_integration/pytest_plugin/docker_integration.py @@ -8,10 +8,10 @@ import shutil import urllib.error import urllib.request +from collections.abc import Iterator from contextlib import suppress from pathlib import Path from pprint import pformat -from typing import Iterator import docker import jsonschema @@ -119,11 +119,10 @@ def host_folders(temporary_path: Path) -> dict: @pytest.fixture def container_variables() -> dict: # of type INPUT_FOLDER=/home/scu/data/input - env = { + return { f"{str(folder).upper()}_FOLDER": (_CONTAINER_FOLDER / folder).as_posix() for folder in _FOLDER_NAMES } - return env @pytest.fixture @@ -224,7 +223,7 @@ def assert_container_runs( list_of_files = [ x.name for x in validation_folders[folder].iterdir() - if not ".gitkeep" in x.name + if ".gitkeep" not in x.name ] for file_name in list_of_files: assert Path( @@ -244,14 +243,12 @@ def assert_container_runs( continue # test if the generated files are the ones expected list_of_files = [ - x.name for x in host_folders[folder].iterdir() if not ".gitkeep" in x.name + x.name for x in host_folders[folder].iterdir() if ".gitkeep" not in x.name ] for file_name in list_of_files: assert Path( validation_folders[folder] / file_name - ).exists(), "{} is not present in {}".format( - file_name, validation_folders[folder] - ) + ).exists(), f"{file_name} is not present in {validation_folders[folder]}" _, _, errors = filecmp.cmpfiles( host_folders[folder], validation_folders[folder], @@ -274,7 +271,7 @@ def assert_container_runs( for key, value in io_simcore_labels["outputs"].items(): assert "type" in value # rationale: files are on their own and other types are in inputs.json - if not "data:" in value["type"]: + if "data:" not in value["type"]: # check that keys are available assert key in output_cfg else: diff --git a/packages/service-integration/src/service_integration/pytest_plugin/folder_structure.py b/packages/service-integration/src/service_integration/pytest_plugin/folder_structure.py index dc1e57fbee4..47969490661 100644 --- a/packages/service-integration/src/service_integration/pytest_plugin/folder_structure.py +++ b/packages/service-integration/src/service_integration/pytest_plugin/folder_structure.py @@ -5,6 +5,8 @@ import pytest +from ..osparc_config import OSPARC_CONFIG_DIRNAME + @pytest.fixture(scope="session") def project_slug_dir(request: pytest.FixtureRequest) -> Path: @@ -15,7 +17,7 @@ def project_slug_dir(request: pytest.FixtureRequest) -> Path: assert isinstance(root_dir, Path) assert root_dir.exists() - assert any(root_dir.glob(".osparc")) + assert any(root_dir.glob(OSPARC_CONFIG_DIRNAME)) return root_dir diff --git a/packages/service-integration/src/service_integration/pytest_plugin/validation_data.py b/packages/service-integration/src/service_integration/pytest_plugin/validation_data.py index 4128b8004bf..e5cc87da4a2 100644 --- a/packages/service-integration/src/service_integration/pytest_plugin/validation_data.py +++ b/packages/service-integration/src/service_integration/pytest_plugin/validation_data.py @@ -3,8 +3,8 @@ # pylint: disable=unused-variable import json +from collections.abc import Iterator from pathlib import Path -from typing import Iterator import pytest import yaml @@ -63,7 +63,7 @@ def assert_validation_data_follows_definition( assert "type" in value # rationale: files are on their own and other types are in inputs.json - if not "data:" in value["type"]: + if "data:" not in value["type"]: # check that keys are available assert key in validation_cfg, f"missing {key} in validation config file" else: @@ -99,7 +99,7 @@ def assert_validation_data_follows_definition( "boolean": bool, "string": str, } - if not "data:" in label_cfg[key]["type"]: + if "data:" not in label_cfg[key]["type"]: # check the type is correct expected_type = label2types[label_cfg[key]["type"]] assert isinstance( diff --git a/packages/service-integration/src/service_integration/settings.py b/packages/service-integration/src/service_integration/settings.py index e93c33dbc0e..70c971c8db9 100644 --- a/packages/service-integration/src/service_integration/settings.py +++ b/packages/service-integration/src/service_integration/settings.py @@ -1,12 +1,10 @@ -from typing import Optional - from pydantic import BaseModel, BaseSettings, Field, SecretStr class Registry(BaseModel): url_or_prefix: str - user: Optional[str] = None - password: Optional[SecretStr] = None + user: str | None = None + password: SecretStr | None = None # NOTE: image names w/o a prefix default in dockerhub registry diff --git a/packages/service-integration/src/service_integration/versioning.py b/packages/service-integration/src/service_integration/versioning.py index f990e4633d8..3ed56868e50 100644 --- a/packages/service-integration/src/service_integration/versioning.py +++ b/packages/service-integration/src/service_integration/versioning.py @@ -1,6 +1,7 @@ import re from datetime import datetime -from typing import Pattern +from re import Pattern +from typing import Any, ClassVar from models_library.basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS from packaging.version import Version @@ -20,7 +21,8 @@ def bump_version_string(current_version: str, bump: str) -> str: # CAN ONLY bump releases not pre/post/dev releases if version.is_devrelease or version.is_postrelease or version.is_prerelease: - raise NotImplementedError("Can only bump released versions") + msg = "Can only bump released versions" + raise NotImplementedError(msg) major, minor, patch = version.major, version.minor, version.micro if bump == "major": @@ -32,7 +34,6 @@ def bump_version_string(current_version: str, bump: str) -> str: return new_version -# TODO: from https://github.com/ITISFoundation/osparc-simcore/issues/2409 # ### versioning # a single version number does not suffice. Instead we should have a set of versions that describes "what is inside the container" # - service version (following semantic versioning): for the published service @@ -40,6 +41,7 @@ def bump_version_string(current_version: str, bump: str) -> str: # - executable name: the public name of the wrapped program (e.g. matlab) # - executable version: the version of the program (e.g. matlab 2020b) # - further libraries version dump (e.g. requirements.txt, etc) +# SEE from https://github.com/ITISFoundation/osparc-simcore/issues/2409 class ExecutableVersionInfo(BaseModel): @@ -51,7 +53,7 @@ class ExecutableVersionInfo(BaseModel): released: datetime class Config: - schema_extra = { + schema_extra: ClassVar[dict[str, Any]] = { "example": { "display_name": "SEMCAD X", "display_version": "Matterhorn Student Edition 1", @@ -71,7 +73,7 @@ class ServiceVersionInfo(BaseModel): released: datetime = Field(..., description="Publication/release date") class Config: - schema_extra = { + schema_extra: ClassVar[dict[str, Any]] = { "example": { "version": "1.0.0", # e.g. first time released as an osparc "integration_version": "2.1.0", diff --git a/packages/service-integration/tests/conftest.py b/packages/service-integration/tests/conftest.py index 2c210da61e6..07e4652b2ea 100644 --- a/packages/service-integration/tests/conftest.py +++ b/packages/service-integration/tests/conftest.py @@ -4,8 +4,8 @@ import shutil import sys +from collections.abc import Callable from pathlib import Path -from typing import Callable import pytest import service_integration @@ -18,7 +18,9 @@ "pytest_simcore.pytest_global_environs", ] -CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent +_CURRENT_DIR = ( + Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent +) @pytest.fixture(scope="session") @@ -30,24 +32,27 @@ def package_dir() -> Path: @pytest.fixture(scope="session") def tests_data_dir() -> Path: - pdir = CURRENT_DIR / "data" + pdir = _CURRENT_DIR / "data" assert pdir.exists() return pdir @pytest.fixture -def project_file_path(tests_data_dir, tmp_path) -> Path: +def docker_compose_overwrite_path(tests_data_dir, tmp_path) -> Path: + name = "docker-compose.overwrite.yml" dst = shutil.copy( - src=tests_data_dir / "docker-compose.overwrite.yml", - dst=tmp_path / "docker-compose.overwrite.yml", + src=tests_data_dir / name, + dst=tmp_path / name, ) return Path(dst) @pytest.fixture def metadata_file_path(tests_data_dir, tmp_path) -> Path: + name = "metadata.yml" dst = shutil.copy( - src=tests_data_dir / "metadata.yml", dst=tmp_path / "metadata.yml" + src=tests_data_dir / name, + dst=tmp_path / name, ) return Path(dst) diff --git a/packages/service-integration/tests/data/metadata.yml b/packages/service-integration/tests/data/metadata.yml index 46c3ee99a5e..0b9ffc61cd8 100644 --- a/packages/service-integration/tests/data/metadata.yml +++ b/packages/service-integration/tests/data/metadata.yml @@ -1,9 +1,11 @@ -name: oSparc Python Runner +name: Sim4Life Python Runner key: simcore/services/dynamic/osparc-python-runner type: computational integration-version: 1.0.0 version: 1.1.0 -description: oSparc Python Runner +version_display: "Sim4Life Release V7.2" +release_date: "2024-05-31T13:45:30" +description: Python Runner with Sim4Life contact: sylvain@foo.com authors: - name: Mr X diff --git a/packages/service-integration/tests/test__usecase_jupytermath.py b/packages/service-integration/tests/test__usecase_jupytermath.py index b1fb1cceb4f..e49f9b0512a 100644 --- a/packages/service-integration/tests/test__usecase_jupytermath.py +++ b/packages/service-integration/tests/test__usecase_jupytermath.py @@ -7,8 +7,9 @@ import os import shutil import subprocess +from collections.abc import Callable, Iterable from pathlib import Path -from typing import Any, Callable, Iterable +from typing import Any import pytest import yaml @@ -118,10 +119,9 @@ def compose_spec_reference(tests_data_dir: Path) -> dict[str, Any]: Digest: sha256:279a297b49f1fddb26289d205d4ba5acca1bb8e7bedadcfce00f821873935c03 Status: Downloaded newer image for itisfoundation/ci-service-integration-library:v1.0.1-dev-25 """ - compose_spec = yaml.safe_load( + return yaml.safe_load( (tests_data_dir / "docker-compose_jupyter-math_ad51f53.yml").read_text() ) - return compose_spec def test_ooil_compose_wo_arguments( diff --git a/packages/service-integration/tests/test_cli.py b/packages/service-integration/tests/test_cli.py index aa2844823c3..4eee418ec14 100644 --- a/packages/service-integration/tests/test_cli.py +++ b/packages/service-integration/tests/test_cli.py @@ -1,4 +1,4 @@ -from typing import Callable +from collections.abc import Callable from service_integration import __version__ diff --git a/packages/service-integration/tests/test_command_compose.py b/packages/service-integration/tests/test_command_compose.py index 562ae7c3708..371d8a9dbdc 100644 --- a/packages/service-integration/tests/test_command_compose.py +++ b/packages/service-integration/tests/test_command_compose.py @@ -3,31 +3,19 @@ # pylint: disable=unused-variable import os +from collections.abc import Callable from pathlib import Path -from typing import Callable -import pytest import yaml - - -@pytest.fixture -def compose_file_path(metadata_file_path: Path) -> Path: - # TODO: should pass with non-existing docker-compose-meta.yml file - compose_file_path: Path = metadata_file_path.parent / "docker-compose-meta.yml" - assert not compose_file_path.exists() - - # minimal - compose_file_path.write_text( - yaml.dump({"services": {"osparc-python-runner": {"build": {"labels": {}}}}}) - ) - return compose_file_path +from service_integration.compose_spec_model import ComposeSpecification +from service_integration.osparc_config import MetadataConfig def test_make_docker_compose_meta( run_program_with_args: Callable, - project_file_path: Path, + docker_compose_overwrite_path: Path, metadata_file_path: Path, - compose_file_path: Path, + tmp_path: Path, ): """ docker-compose-build.yml: $(metatada) @@ -35,25 +23,33 @@ def test_make_docker_compose_meta( simcore-service-integrator compose --metadata $< --to-spec-file $@ """ + target_compose_specs = tmp_path / "docker-compose.yml" + metadata_cfg = MetadataConfig.from_yaml(metadata_file_path) + result = run_program_with_args( "compose", "--metadata", str(metadata_file_path), "--to-spec-file", - compose_file_path, + target_compose_specs, ) assert result.exit_code == os.EX_OK, result.output - assert compose_file_path.exists() + # produces a compose spec + assert target_compose_specs.exists() - compose_cfg = yaml.safe_load(compose_file_path.read_text()) - metadata_cfg = yaml.safe_load(metadata_file_path.read_text()) + # valid compose specs + compose_cfg = ComposeSpecification.parse_obj( + yaml.safe_load(target_compose_specs.read_text()) + ) + assert compose_cfg.services - # TODO: compare labels vs metadata - service_name = metadata_cfg["key"].split("/")[-1] - compose_labels = compose_cfg["services"][service_name]["build"]["labels"] + # compose labels vs metadata fild + compose_labels = compose_cfg.services[metadata_cfg.service_name()].build.labels assert compose_labels - # schema of expected + assert isinstance(compose_labels.__root__, dict) - # deserialize content and should fit metadata_cfg + assert ( + MetadataConfig.from_labels_annotations(compose_labels.__root__) == metadata_cfg + ) diff --git a/packages/service-integration/tests/test_command_config.py b/packages/service-integration/tests/test_command_config.py index ea2b984aafe..08967ba63e6 100644 --- a/packages/service-integration/tests/test_command_config.py +++ b/packages/service-integration/tests/test_command_config.py @@ -3,11 +3,12 @@ # pylint: disable=unused-variable import os import shutil +from collections.abc import Callable from pathlib import Path -from typing import Callable import pytest import yaml +from service_integration.osparc_config import OSPARC_CONFIG_DIRNAME @pytest.fixture @@ -21,7 +22,7 @@ def tmp_compose_spec(tests_data_dir: Path, tmp_path: Path): def test_create_new_osparc_config( run_program_with_args: Callable, tmp_compose_spec: Path ): - osparc_dir = tmp_compose_spec.parent / ".osparc" + osparc_dir = tmp_compose_spec.parent / OSPARC_CONFIG_DIRNAME assert not osparc_dir.exists() result = run_program_with_args( diff --git a/packages/service-integration/tests/test_command_metadata.py b/packages/service-integration/tests/test_command_metadata.py index 97376a5e633..24073dcbc42 100644 --- a/packages/service-integration/tests/test_command_metadata.py +++ b/packages/service-integration/tests/test_command_metadata.py @@ -3,8 +3,8 @@ # pylint: disable=unused-variable import os +from collections.abc import Callable from pathlib import Path -from typing import Callable import pytest import yaml diff --git a/packages/service-integration/tests/test_labels_annotations.py b/packages/service-integration/tests/test_labels_annotations.py index 708e120e973..f92b9e75c0e 100644 --- a/packages/service-integration/tests/test_labels_annotations.py +++ b/packages/service-integration/tests/test_labels_annotations.py @@ -3,7 +3,6 @@ # pylint: disable=unused-variable from pathlib import Path -from pprint import pprint from typing import Any import pytest @@ -20,14 +19,13 @@ def metadata_config(tests_data_dir: Path): return config -@pytest.mark.parametrize("trim_key_head", (True, False)) +@pytest.mark.parametrize("trim_key_head", [True, False]) def test_to_and_from_labels(metadata_config: dict[str, Any], trim_key_head: bool): metadata_labels = to_labels( metadata_config, prefix_key="swiss.itisfoundation", trim_key_head=trim_key_head ) print(f"\n{trim_key_head=:*^100}") - pprint(metadata_labels) assert all(key.startswith("swiss.itisfoundation.") for key in metadata_labels) diff --git a/packages/service-integration/tests/test_oci_image_spec.py b/packages/service-integration/tests/test_oci_image_spec.py index 4207c005199..ef2bd8b47d9 100644 --- a/packages/service-integration/tests/test_oci_image_spec.py +++ b/packages/service-integration/tests/test_oci_image_spec.py @@ -8,7 +8,7 @@ LabelSchemaAnnotations, OciImageSpecAnnotations, ) -from service_integration.osparc_config import MetaConfig +from service_integration.osparc_config import MetadataConfig def test_label_schema_to_oci_conversion(monkeypatch): @@ -27,10 +27,10 @@ def test_create_annotations_from_metadata(tests_data_dir: Path): # recover from docker labels # - meta_cfg = MetaConfig.from_yaml(tests_data_dir / "metadata.yml") + meta_cfg = MetadataConfig.from_yaml(tests_data_dir / "metadata.yml") # map io_spec to OCI image-spec - oic_image_spec = OciImageSpecAnnotations( + OciImageSpecAnnotations( authors=", ".join([f"{a.name} ({a.email})" for a in meta_cfg.authors]) ) diff --git a/packages/service-integration/tests/test_osparc_config.py b/packages/service-integration/tests/test_osparc_config.py index cddc93d9e9b..e993bc25392 100644 --- a/packages/service-integration/tests/test_osparc_config.py +++ b/packages/service-integration/tests/test_osparc_config.py @@ -10,7 +10,11 @@ import pytest import yaml from models_library.service_settings_labels import SimcoreServiceSettingLabelEntry -from service_integration.osparc_config import MetaConfig, RuntimeConfig, SettingsItem +from service_integration.osparc_config import ( + MetadataConfig, + RuntimeConfig, + SettingsItem, +) @pytest.fixture @@ -44,7 +48,7 @@ def labels(tests_data_dir: Path, labels_fixture_name: str) -> dict[str, str]: def test_load_from_labels( labels: dict[str, str], labels_fixture_name: str, tmp_path: Path ): - meta_cfg = MetaConfig.from_labels_annotations(labels) + meta_cfg = MetadataConfig.from_labels_annotations(labels) runtime_cfg = RuntimeConfig.from_labels_annotations(labels) assert runtime_cfg.callbacks_mapping is not None @@ -56,7 +60,7 @@ def test_load_from_labels( config_path = ( tmp_path / f"{model.__class__.__name__.lower()}-{labels_fixture_name}.yml" ) - with open(config_path, "wt") as fh: + with open(config_path, "w") as fh: data = json.loads( model.json(exclude_unset=True, by_alias=True, exclude_none=True) ) diff --git a/packages/service-integration/tests/test_osparc_image_specs.py b/packages/service-integration/tests/test_osparc_image_specs.py index b3ee3ada466..f5777ada3f2 100644 --- a/packages/service-integration/tests/test_osparc_image_specs.py +++ b/packages/service-integration/tests/test_osparc_image_specs.py @@ -9,8 +9,8 @@ from pydantic import BaseModel from service_integration.compose_spec_model import BuildItem, Service from service_integration.osparc_config import ( - DockerComposeOverwriteCfg, - MetaConfig, + DockerComposeOverwriteConfig, + MetadataConfig, RuntimeConfig, ) from service_integration.osparc_image_specs import create_image_spec @@ -27,10 +27,10 @@ def test_create_image_spec_impl(tests_data_dir: Path, settings: AppSettings): # image-spec for devel, prod, ... # load & parse osparc configs - docker_compose_overwrite_cfg = DockerComposeOverwriteCfg.from_yaml( + docker_compose_overwrite_cfg = DockerComposeOverwriteConfig.from_yaml( tests_data_dir / "docker-compose.overwrite.yml" ) - meta_cfg = MetaConfig.from_yaml(tests_data_dir / "metadata-dynamic.yml") + meta_cfg = MetadataConfig.from_yaml(tests_data_dir / "metadata-dynamic.yml") runtime_cfg = RuntimeConfig.from_yaml(tests_data_dir / "runtime.yml") assert runtime_cfg.callbacks_mapping is not None diff --git a/packages/service-integration/tests/test_osparc_runtime_specs.py b/packages/service-integration/tests/test_osparc_runtime_specs.py index f7cd59f8ec7..74d63e15e5b 100644 --- a/packages/service-integration/tests/test_osparc_runtime_specs.py +++ b/packages/service-integration/tests/test_osparc_runtime_specs.py @@ -85,7 +85,7 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): data["deploy"] = {"placement": {"constraints": item.value}} else: - assert False, item + raise AssertionError(item) print(Service(**data).json(exclude_unset=True, indent=2)) diff --git a/packages/service-integration/tests/test_versioning.py b/packages/service-integration/tests/test_versioning.py index a4172e62461..01c36e49082 100644 --- a/packages/service-integration/tests/test_versioning.py +++ b/packages/service-integration/tests/test_versioning.py @@ -20,13 +20,11 @@ def test_pep404_compare_versions(): assert Version("0.6a9dev") < Version("0.6a9") # same release but one is pre-release - assert ( - Version("2.1-rc2").release == Version("2.1").release - and Version("2.1-rc2").is_prerelease - ) + assert Version("2.1-rc2").release == Version("2.1").release + assert Version("2.1-rc2").is_prerelease -BUMP_PARAMS = [ +_BUMP_PARAMS = [ # "upgrade,current_version,new_version", ("patch", "1.1.1", "1.1.2"), ("minor", "1.1.1", "1.2.0"), @@ -36,7 +34,7 @@ def test_pep404_compare_versions(): @pytest.mark.parametrize( "bump,current_version,new_version", - BUMP_PARAMS, + _BUMP_PARAMS, ) def test_bump_version_string( bump: str, @@ -48,7 +46,7 @@ def test_bump_version_string( @pytest.mark.parametrize( "model_cls", - (ExecutableVersionInfo, ServiceVersionInfo), + [ExecutableVersionInfo, ServiceVersionInfo], ) def test_version_info_model_examples(model_cls, model_cls_examples): for name, example in model_cls_examples.items(): diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py index 7f56eac79ad..db57f42dc97 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py @@ -313,8 +313,8 @@ async def test_run_solver_job( example = next( e - for e in ServiceDockerData.Config.schema_extra["examples"][::-1] - if "boot" in e["description"] + for e in ServiceDockerData.Config.schema_extra["examples"] + if "boot-options" in e ) mocked_catalog_service_api.get( diff --git a/services/web/server/VERSION b/services/web/server/VERSION index e373c4adece..f57373a053b 100644 --- a/services/web/server/VERSION +++ b/services/web/server/VERSION @@ -1 +1 @@ -0.40.3 +0.40.4 diff --git a/services/web/server/setup.cfg b/services/web/server/setup.cfg index 55164b0810f..d660bde7d6f 100644 --- a/services/web/server/setup.cfg +++ b/services/web/server/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.40.3 +current_version = 0.40.4 commit = True message = services/webserver api version: {current_version} → {new_version} tag = False diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 734c8b55102..1d65163f669 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.0.2 info: title: simcore-service-webserver description: Main service with an interface (http-API & websockets) to the web front-end - version: 0.40.3 + version: 0.40.4 servers: - url: '' description: webserver @@ -7667,6 +7667,13 @@ components: title: Inputs type: object description: values of input properties + inputsRequired: + title: Inputsrequired + type: array + items: + pattern: ^[-_a-zA-Z0-9]+$ + type: string + description: Defines inputs that are required in order to run the service inputsUnits: title: Inputsunits type: object @@ -7905,6 +7912,12 @@ components: inputs: title: Inputs type: object + inputsRequired: + title: Inputsrequired + type: array + items: + pattern: ^[-_a-zA-Z0-9]+$ + type: string inputNodes: title: Inputnodes type: array @@ -11441,15 +11454,27 @@ components: pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ type: string description: service version number + versionDisplay: + title: Versiondisplay + type: string + description: A user-friendly or marketing name for the release. This can + be used to reference the release in a more readable and recognizable format, + such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.' This + name is not used for version comparison but is useful for communication + and documentation purposes. + releaseDate: + title: Releasedate + type: string + description: The date when the specific version of the service was released. + This field helps in tracking the timeline of releases and understanding + the sequence of updates. The date should be formatted in YYYY-MM-DD format + for consistency and easy sorting. + format: date integration-version: title: Integration-Version pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ type: string - description: integration version number - progress_regexp: - title: Progress Regexp - type: string - description: regexp pattern for detecting computational service's progress + description: Defines which version of the integration workflow should use type: allOf: - $ref: '#/components/schemas/ServiceType' @@ -11489,6 +11514,10 @@ components: type: integer description: The number of 'data type inputs' displayed by default in the UI. When None all 'data type inputs' are displayed. + progress_regexp: + title: Progress Regexp + type: string + description: regexp pattern for detecting computational service's progress owner: title: Owner type: string @@ -11496,7 +11525,8 @@ components: description: 'Static metadata for a service injected in the image labels - This is one to one with node-meta-v0.0.1.json' + NOTE: This model serialized in .osparc/metadata.yml and in the labels of the + docker image' example: name: File Picker description: description From b77814ac1ded8f8ad89ff72c43fc4434e5612a5e Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Mon, 3 Jun 2024 10:10:22 +0200 Subject: [PATCH 007/219] =?UTF-8?q?=E2=99=BB=EF=B8=8FEnsure=20parent=20pro?= =?UTF-8?q?ject/node=20is=20well=20structured=20in=20the=20DB=20?= =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20(#5874)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/specs/web-server/_projects_crud.py | 16 +- ...85bd35bdaa_add_optional_project_parents.py | 70 ++++ .../models/projects_metadata.py | 60 ++- .../utils_projects_metadata.py | 172 +++++++- .../utils_projects_nodes.py | 60 +-- packages/postgres-database/tests/conftest.py | 28 +- .../tests/test_utils_projects_metadata.py | 321 ++++++++++++++- .../tests/test_utils_projects_nodes.py | 90 +++-- .../pytest_simcore/helpers/utils_projects.py | 12 +- .../helpers/utils_webserver_unit_with_db.py | 14 + .../servicelib/aiohttp/requests_validation.py | 15 + .../src/servicelib/common_headers.py | 2 + .../tests/aiohttp/test_requests_validation.py | 78 +++- .../api/routes/computations.py | 38 +- .../db/repositories/projects_metadata.py | 18 +- .../test_modules_db_repositories_projects.py | 4 +- .../api/v0/openapi.yaml | 48 ++- .../projects/_crud_api_create.py | 24 +- .../projects/_crud_handlers.py | 12 +- .../projects/_crud_handlers_models.py | 48 ++- .../projects/_metadata_api.py | 80 +++- .../projects/_metadata_db.py | 133 ++++++- .../projects/_metadata_handlers.py | 38 +- .../projects/exceptions.py | 18 + .../projects/projects_api.py | 6 +- services/web/server/tests/conftest.py | 47 ++- .../test_studies_dispatcher_studies_access.py | 2 - .../server/tests/unit/with_dbs/02/conftest.py | 2 - .../02/test_projects_crud_handlers.py | 17 +- .../02/test_projects_crud_handlers__clone.py | 3 +- .../02/test_projects_metadata_handlers.py | 366 +++++++++++++++++- .../tests/unit/with_dbs/03/tags/conftest.py | 1 - .../unit/with_dbs/03/wallets/conftest.py | 1 - .../server/tests/unit/with_dbs/conftest.py | 2 +- 34 files changed, 1634 insertions(+), 212 deletions(-) create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/0d85bd35bdaa_add_optional_project_parents.py diff --git a/api/specs/web-server/_projects_crud.py b/api/specs/web-server/_projects_crud.py index f0cd8916d01..1b6030cc061 100644 --- a/api/specs/web-server/_projects_crud.py +++ b/api/specs/web-server/_projects_crud.py @@ -11,7 +11,7 @@ from typing import Annotated -from fastapi import APIRouter, Depends, Query, status +from fastapi import APIRouter, Depends, Header, Query, status from models_library.api_schemas_directorv2.dynamic_services import ( GetProjectInactivityResponse, ) @@ -26,6 +26,7 @@ ) from models_library.generics import Envelope from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from models_library.rest_pagination import Page from pydantic import Json from simcore_service_webserver._meta import API_VTAG @@ -50,6 +51,19 @@ async def create_project( _params: Annotated[ProjectCreateParams, Depends()], _create: ProjectCreateNew | ProjectCopyOverride, + x_simcore_user_agent: Annotated[str | None, Header()] = "undefined", + x_simcore_parent_project_uuid: Annotated[ + ProjectID | None, + Header( + description="Optionally sets a parent project UUID (both project and node must be set)", + ), + ] = None, + x_simcore_parent_node_id: Annotated[ + NodeID | None, + Header( + description="Optionally sets a parent node ID (both project and node must be set)", + ), + ] = None, ): ... diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/0d85bd35bdaa_add_optional_project_parents.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/0d85bd35bdaa_add_optional_project_parents.py new file mode 100644 index 00000000000..10a59cd6f27 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/0d85bd35bdaa_add_optional_project_parents.py @@ -0,0 +1,70 @@ +"""add_optional_project_parents + +Revision ID: 0d85bd35bdaa +Revises: baf0ee1c37dc +Create Date: 2024-05-30 09:37:39.234834+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "0d85bd35bdaa" +down_revision = "baf0ee1c37dc" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "projects_metadata", + sa.Column("parent_project_uuid", sa.String(), nullable=True), + ) + op.add_column( + "projects_metadata", sa.Column("parent_node_id", sa.String(), nullable=True) + ) + op.add_column( + "projects_metadata", + sa.Column("root_parent_project_uuid", sa.String(), nullable=True), + ) + op.add_column( + "projects_metadata", + sa.Column("root_parent_node_id", sa.String(), nullable=True), + ) + op.create_foreign_key( + "fk_projects_metadata_parent_node_id", + "projects_metadata", + "projects_nodes", + ["parent_project_uuid", "parent_node_id"], + ["project_uuid", "node_id"], + onupdate="CASCADE", + ondelete="SET NULL", + ) + op.create_foreign_key( + "fk_projects_metadata_root_parent_node_id", + "projects_metadata", + "projects_nodes", + ["root_parent_project_uuid", "root_parent_node_id"], + ["project_uuid", "node_id"], + onupdate="CASCADE", + ondelete="SET NULL", + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "fk_projects_metadata_root_parent_node_id", + "projects_metadata", + type_="foreignkey", + ) + op.drop_constraint( + "fk_projects_metadata_parent_node_id", "projects_metadata", type_="foreignkey" + ) + op.drop_column("projects_metadata", "root_parent_node_id") + op.drop_column("projects_metadata", "root_parent_project_uuid") + op.drop_column("projects_metadata", "parent_node_id") + op.drop_column("projects_metadata", "parent_project_uuid") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/models/projects_metadata.py index 2a450e5a80c..b2d2d4f640a 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects_metadata.py @@ -13,18 +13,28 @@ ) from .base import metadata from .projects import projects +from .projects_nodes import projects_nodes projects_metadata = sa.Table( "projects_metadata", # # Keeps "third-party" metadata attached to a project # - # These SHOULD NOT be actual properties of the project (e.g. uuid, name etc) - # but rather information attached by third-parties that "decorate" or qualify - # a project resource + # CUSTOM metadata: + # These SHOULD NOT be actual properties of the project (e.g. uuid, name etc) + # but rather information attached by third-parties that "decorate" or qualify + # a project resource # - # Things like 'stars', 'quality', 'classifiers', 'dev', etc (or any kind of stats) - # should be moved here + # project genealogy: + # a project might be created via the public API, in which case it might be created + # 1. directly, as usual + # 2. via a parent project/node combination (think jupyter/sim4life job creating a bunch of jobs) + # 3. via a parent project/node that ran as a computation ("3rd generation" project, there is no limits to the number of generations) + # + # in cases 2., 3. the parent_project_uuid is the direct parent project, and parent_node_id is the direct node parent as + # a specific node is defined by a project AND a node (since node IDs are non unique) + # + # in cases 2., 3. the root_parent_project_uuid is the very first parent project, and root_parent_node_id is the very first parent node # metadata, sa.Column( @@ -38,7 +48,7 @@ ), nullable=False, primary_key=True, - doc="The project unique identifier is also used to identify the associated job", + doc="The project unique identifier", ), sa.Column( "custom", @@ -47,10 +57,48 @@ server_default=sa.text("'{}'::jsonb"), doc="Reserved for the user to store custom metadata", ), + sa.Column( + "parent_project_uuid", + sa.String, + nullable=True, + doc="If applicable the parent project UUID of this project (the node that ran the public API to start this project_uuid lives in a project with UUID parent_project_uuid)", + ), + sa.Column( + "parent_node_id", + sa.String, + nullable=True, + doc="If applicable the parent node UUID of this project (the node that ran the public API to start this project_uuid lives in a node with ID parent_node_id)", + ), + sa.Column( + "root_parent_project_uuid", + sa.String, + nullable=True, + doc="If applicable the root parent project UUID of this project (the root project UUID in which the root node created the very first child)", + ), + sa.Column( + "root_parent_node_id", + sa.String, + nullable=True, + doc="If applicable the root parent node UUID of this project (the root node ID of the node that created the very first child)", + ), # TIME STAMPS ----ß column_created_datetime(timezone=True), column_modified_datetime(timezone=True), sa.PrimaryKeyConstraint("project_uuid"), + sa.ForeignKeyConstraint( + ("parent_project_uuid", "parent_node_id"), + (projects_nodes.c.project_uuid, projects_nodes.c.node_id), + onupdate="CASCADE", + ondelete="SET NULL", + name="fk_projects_metadata_parent_node_id", + ), + sa.ForeignKeyConstraint( + ("root_parent_project_uuid", "root_parent_node_id"), + (projects_nodes.c.project_uuid, projects_nodes.c.node_id), + onupdate="CASCADE", + ondelete="SET NULL", + name="fk_projects_metadata_root_parent_node_id", + ), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py index dafca9ffdad..6d72bd43e3c 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py @@ -1,25 +1,43 @@ import datetime import uuid -from dataclasses import dataclass from typing import Any import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy -from simcore_postgres_database.models.projects_metadata import projects_metadata +from pydantic import BaseModel +from pydantic.errors import PydanticErrorMixin from sqlalchemy.dialects.postgresql import insert as pg_insert from .errors import ForeignKeyViolation from .models.projects import projects -from .utils_models import FromRowMixin +from .models.projects_metadata import projects_metadata # # Errors # -class DBProjectNotFoundError(Exception): - ... +class BaseProjectsMetadataError(PydanticErrorMixin, RuntimeError): + msg_template: str = "Project metadata unexpected error" + + +class DBProjectNotFoundError(BaseProjectsMetadataError): + msg_template: str = "Project project_uuid={project_uuid!r} not found" + + +class DBProjectInvalidAncestorsError(BaseProjectsMetadataError): + msg_template: str = ( + "Projects metadata invalid ancestors given (both must be set or none)" + ) + + +class DBProjectInvalidParentProjectError(BaseProjectsMetadataError): + msg_template: str = "Project project_uuid={project_uuid!r} has invalid parent project uuid={parent_project_uuid!r}" + + +class DBProjectInvalidParentNodeError(BaseProjectsMetadataError): + msg_template: str = "Project project_uuid={project_uuid!r} has invalid parent project uuid={parent_node_id!r}" # @@ -27,11 +45,18 @@ class DBProjectNotFoundError(Exception): # -@dataclass(frozen=True, slots=True, kw_only=True) -class ProjectMetadata(FromRowMixin): +class ProjectMetadata(BaseModel): custom: dict[str, Any] | None created: datetime.datetime | None modified: datetime.datetime | None + parent_project_uuid: uuid.UUID | None + parent_node_id: uuid.UUID | None + root_parent_project_uuid: uuid.UUID | None + root_parent_node_id: uuid.UUID | None + + class Config: + frozen = True + orm_mode = True # @@ -47,6 +72,10 @@ async def get(connection: SAConnection, project_uuid: uuid.UUID) -> ProjectMetad projects_metadata.c.custom, projects_metadata.c.created, projects_metadata.c.modified, + projects_metadata.c.parent_project_uuid, + projects_metadata.c.parent_node_id, + projects_metadata.c.root_parent_project_uuid, + projects_metadata.c.root_parent_node_id, ) .select_from( sa.join( @@ -61,12 +90,129 @@ async def get(connection: SAConnection, project_uuid: uuid.UUID) -> ProjectMetad result: ResultProxy = await connection.execute(get_stmt) row: RowProxy | None = await result.first() if row is None: - msg = f"Project project_uuid={project_uuid!r} not found" - raise DBProjectNotFoundError(msg) - return ProjectMetadata.from_row(row) + raise DBProjectNotFoundError(project_uuid=project_uuid) + return ProjectMetadata.from_orm(row) + + +def _check_valid_ancestors_combination( + project_uuid: uuid.UUID, + parent_project_uuid: uuid.UUID | None, + parent_node_id: uuid.UUID | None, +) -> None: + if project_uuid == parent_project_uuid: + raise DBProjectInvalidAncestorsError + if parent_project_uuid is not None and parent_node_id is None: + raise DBProjectInvalidAncestorsError + if parent_project_uuid is None and parent_node_id is not None: + raise DBProjectInvalidAncestorsError + + +async def _project_has_any_child( + connection: SAConnection, project_uuid: uuid.UUID +) -> bool: + get_stmt = sa.select(projects_metadata.c.project_uuid).where( + projects_metadata.c.parent_project_uuid == f"{project_uuid}" + ) + if await connection.scalar(get_stmt) is not None: + return True + return False + + +async def _compute_root_parent_from_parent( + connection: SAConnection, + *, + project_uuid: uuid.UUID, + parent_project_uuid: uuid.UUID | None, + parent_node_id: uuid.UUID | None, +) -> tuple[uuid.UUID | None, uuid.UUID | None]: + if parent_project_uuid is None and parent_node_id is None: + return None, None + + try: + assert parent_project_uuid is not None # nosec + parent_project_metadata = await get(connection, parent_project_uuid) + if parent_project_metadata.root_parent_project_uuid is not None: + assert parent_project_metadata.root_parent_node_id is not None # nosec + return ( + parent_project_metadata.root_parent_project_uuid, + parent_project_metadata.root_parent_node_id, + ) + # that means this is the root already + return parent_project_uuid, parent_node_id + except DBProjectNotFoundError as err: + raise DBProjectInvalidParentProjectError( + project_uuid=project_uuid, parent_project_uuid=parent_project_uuid + ) from err + + +async def set_project_ancestors( + connection: SAConnection, + *, + project_uuid: uuid.UUID, + parent_project_uuid: uuid.UUID | None, + parent_node_id: uuid.UUID | None, +) -> ProjectMetadata: + """ + Raises: + NotImplementedError: if you touch ancestry of a project that has children + DBProjectInvalidAncestorsError: if you pass invalid parents + DBProjectInvalidParentProjectError: the parent_project_uuid is invalid + DBProjectInvalidParentNodeError: the parent_node_ID is invalid + DBProjectNotFoundError: the project_uuid is not found + """ + _check_valid_ancestors_combination( + project_uuid, parent_project_uuid, parent_node_id + ) + if await _project_has_any_child(connection, project_uuid): + msg = "Cannot set ancestors for a project with children" + raise NotImplementedError(msg) + ( + root_parent_project_uuid, + root_parent_node_id, + ) = await _compute_root_parent_from_parent( + connection, + project_uuid=project_uuid, + parent_project_uuid=parent_project_uuid, + parent_node_id=parent_node_id, + ) + data = { + "project_uuid": f"{project_uuid}", + "parent_project_uuid": ( + f"{parent_project_uuid}" if parent_project_uuid is not None else None + ), + "parent_node_id": f"{parent_node_id}" if parent_node_id is not None else None, + "root_parent_project_uuid": ( + f"{root_parent_project_uuid}" + if root_parent_project_uuid is not None + else None + ), + "root_parent_node_id": ( + f"{root_parent_node_id}" if root_parent_node_id is not None else None + ), + } + insert_stmt = pg_insert(projects_metadata).values(**data) + upsert_stmt = insert_stmt.on_conflict_do_update( + index_elements=[projects_metadata.c.project_uuid], + set_=data, + ).returning(sa.literal_column("*")) + + try: + result: ResultProxy = await connection.execute(upsert_stmt) + row: RowProxy | None = await result.first() + assert row # nosec + return ProjectMetadata.from_orm(row) + + except ForeignKeyViolation as err: + assert err.pgerror is not None # nosec # noqa: PT017 + if "fk_projects_metadata_parent_node_id" in err.pgerror: + raise DBProjectInvalidParentNodeError( + project_uuid=project_uuid, parent_node_id=parent_node_id + ) from err + + raise DBProjectNotFoundError(project_uuid=project_uuid) from err -async def upsert( +async def set_project_custom_metadata( connection: SAConnection, *, project_uuid: uuid.UUID, @@ -86,7 +232,7 @@ async def upsert( result: ResultProxy = await connection.execute(upsert_stmt) row: RowProxy | None = await result.first() assert row # nosec - return ProjectMetadata.from_row(row) + return ProjectMetadata.from_orm(row) except ForeignKeyViolation as err: - raise DBProjectNotFoundError(project_uuid) from err + raise DBProjectNotFoundError(project_uuid=project_uuid) from err diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 6ce53275999..40a3726bab0 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -6,6 +6,7 @@ import sqlalchemy from aiopg.sa.connection import SAConnection from pydantic import BaseModel, Field +from pydantic.errors import PydanticErrorMixin from sqlalchemy.dialects.postgresql import insert as pg_insert from .errors import ForeignKeyViolation, UniqueViolation @@ -16,24 +17,24 @@ # # Errors # -class BaseProjectNodesError(Exception): - ... +class BaseProjectNodesError(PydanticErrorMixin, RuntimeError): + msg_template: str = "Project nodes unexpected error" -class ProjectNodesProjectNotFound(BaseProjectNodesError): - ... +class ProjectNodesProjectNotFoundError(BaseProjectNodesError): + msg_template: str = "Project {project_uuid} not found" -class ProjectNodesNodeNotFound(BaseProjectNodesError): - ... +class ProjectNodesNodeNotFoundError(BaseProjectNodesError): + msg_template: str = "Node {node_id!r} from project {project_uuid!r} not found" -class ProjectNodesOperationNotAllowed(BaseProjectNodesError): - ... +class ProjectNodesNonUniqueNodeFoundError(BaseProjectNodesError): + msg_template: str = "Multiple project found containing node {node_id}. TIP: misuse, the same node ID was found in several projects." -class ProjectNodesDuplicateNode(BaseProjectNodesError): - ... +class ProjectNodesDuplicateNodeError(BaseProjectNodesError): + msg_template: str = "Project node already exists, you cannot have 2x the same node in the same project." class ProjectNodeCreate(BaseModel): @@ -106,14 +107,14 @@ async def add( return [ProjectNode.from_orm(r) for r in rows] except ForeignKeyViolation as exc: # this happens when the project does not exist, as we first check the node exists - msg = f"Project {self.project_uuid} not found" - raise ProjectNodesProjectNotFound(msg) from exc + raise ProjectNodesProjectNotFoundError( + project_uuid=self.project_uuid + ) from exc except UniqueViolation as exc: # this happens if the node already exists on creation - msg = f"Project node already exists: {exc}" - raise ProjectNodesDuplicateNode(msg) from exc + raise ProjectNodesDuplicateNodeError from exc - async def list(self, connection: SAConnection) -> list[ProjectNode]: # noqa: A003 + async def list(self, connection: SAConnection) -> list[ProjectNode]: """list the nodes in the current project NOTE: Do not use this in an asyncio.gather call as this will fail! @@ -151,8 +152,9 @@ async def get(self, connection: SAConnection, *, node_id: uuid.UUID) -> ProjectN assert result # nosec row = await result.first() if row is None: - msg = f"Node with {node_id} not found" - raise ProjectNodesNodeNotFound(msg) + raise ProjectNodesNodeNotFoundError( + project_uuid=self.project_uuid, node_id=node_id + ) assert row # nosec return ProjectNode.from_orm(row) @@ -180,8 +182,9 @@ async def update( result = await connection.execute(update_stmt) row = await result.first() if not row: - msg = f"Node with {node_id} not found" - raise ProjectNodesNodeNotFound(msg) + raise ProjectNodesNodeNotFoundError( + project_uuid=self.project_uuid, node_id=node_id + ) assert row # nosec return ProjectNode.from_orm(row) @@ -268,14 +271,21 @@ async def get_project_id_from_node_id( connection: SAConnection, *, node_id: uuid.UUID ) -> uuid.UUID: """ + WARNING: this function should not be used! it has a flaw! a Node ID is not unique and there can + be more than one project linked to it. + TODO: return project idS? and adapt code + Raises: - ProjectNodesNodeNotFound: + ProjectNodesNodeNotFound: if no node_id found + ProjectNodesNonUniqueNodeFoundError: there are multiple projects that contain that node """ get_stmt = sqlalchemy.select(projects_nodes.c.project_uuid).where( projects_nodes.c.node_id == f"{node_id}" ) - project_id = await connection.scalar(get_stmt) - if project_id is None: - msg = f"No project found containing {node_id=}" - raise ProjectNodesNodeNotFound(msg) - return uuid.UUID(project_id) + result = await connection.execute(get_stmt) + project_ids = await result.fetchall() + if not project_ids: + raise ProjectNodesNodeNotFoundError(project_uuid=None, node_id=node_id) + if len(project_ids) > 1: + raise ProjectNodesNonUniqueNodeFoundError(node_id=node_id) + return uuid.UUID(project_ids[0][projects_nodes.c.project_uuid]) diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py index c70ec2651c2..65e36b91406 100644 --- a/packages/postgres-database/tests/conftest.py +++ b/packages/postgres-database/tests/conftest.py @@ -3,6 +3,7 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable +import uuid from collections.abc import AsyncIterator, Awaitable, Callable, Iterator import aiopg.sa @@ -24,6 +25,11 @@ from simcore_postgres_database.models.clusters import ClusterType, clusters from simcore_postgres_database.models.products import products from simcore_postgres_database.models.projects import projects +from simcore_postgres_database.utils_projects_nodes import ( + ProjectNode, + ProjectNodeCreate, + ProjectNodesRepo, +) from simcore_postgres_database.webserver_models import ( GroupType, groups, @@ -261,7 +267,9 @@ async def _creator(**overrides) -> int: @pytest.fixture -async def create_fake_project(pg_engine: Engine) -> AsyncIterator[Callable]: +async def create_fake_project( + pg_engine: Engine, +) -> AsyncIterator[Callable[..., Awaitable[RowProxy]]]: created_project_uuids = [] async def _creator(conn, user: RowProxy, **overrides) -> RowProxy: @@ -283,6 +291,24 @@ async def _creator(conn, user: RowProxy, **overrides) -> RowProxy: ) +@pytest.fixture +async def create_fake_projects_node( + connection: aiopg.sa.connection.SAConnection, + faker: Faker, +) -> Callable[[uuid.UUID], Awaitable[ProjectNode]]: + async def _creator(project_uuid: uuid.UUID) -> ProjectNode: + fake_node = ProjectNodeCreate( + node_id=uuid.uuid4(), + required_resources=faker.pydict(allowed_types=(str,)), + ) + repo = ProjectNodesRepo(project_uuid=project_uuid) + created_nodes = await repo.add(connection, nodes=[fake_node]) + assert created_nodes + return created_nodes[0] + + return _creator + + @pytest.fixture def create_fake_product( connection: aiopg.sa.connection.SAConnection, diff --git a/packages/postgres-database/tests/test_utils_projects_metadata.py b/packages/postgres-database/tests/test_utils_projects_metadata.py index 1cfce5e2e2a..0aa352f7853 100644 --- a/packages/postgres-database/tests/test_utils_projects_metadata.py +++ b/packages/postgres-database/tests/test_utils_projects_metadata.py @@ -3,14 +3,22 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments +import uuid from collections.abc import Awaitable, Callable +from uuid import UUID import pytest from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from faker import Faker from simcore_postgres_database import utils_projects_metadata -from simcore_postgres_database.utils_projects_metadata import DBProjectNotFoundError +from simcore_postgres_database.utils_projects_metadata import ( + DBProjectInvalidAncestorsError, + DBProjectInvalidParentNodeError, + DBProjectInvalidParentProjectError, + DBProjectNotFoundError, +) +from simcore_postgres_database.utils_projects_nodes import ProjectNode @pytest.fixture @@ -27,15 +35,17 @@ async def fake_project( connection: SAConnection, fake_user: RowProxy, create_fake_project: Callable[..., Awaitable[RowProxy]], + create_fake_nodes: Callable[..., Awaitable[RowProxy]], ) -> RowProxy: project: RowProxy = await create_fake_project(connection, fake_user, hidden=True) + await create_fake_nodes(project) return project @pytest.mark.acceptance_test( "For https://github.com/ITISFoundation/osparc-simcore/issues/4313" ) -async def test_projects_metadata_repository( +async def test_set_project_custom_metadata( connection: SAConnection, create_fake_user: Callable[..., Awaitable[RowProxy]], create_fake_project: Callable[..., Awaitable[RowProxy]], @@ -46,13 +56,16 @@ async def test_projects_metadata_repository( # subresource is attached to parent user_metadata = {"float": 3.14, "int": 42, "string": "foo", "bool": True} - + random_project_uuid = faker.uuid4(cast_to=None) + assert isinstance(random_project_uuid, UUID) with pytest.raises(DBProjectNotFoundError): - await utils_projects_metadata.get(connection, project_uuid=faker.uuid4()) + await utils_projects_metadata.get(connection, project_uuid=random_project_uuid) with pytest.raises(DBProjectNotFoundError): - await utils_projects_metadata.upsert( - connection, project_uuid=faker.uuid4(), custom_metadata=user_metadata + await utils_projects_metadata.set_project_custom_metadata( + connection, + project_uuid=random_project_uuid, + custom_metadata=user_metadata, ) project_metadata = await utils_projects_metadata.get( @@ -60,11 +73,21 @@ async def test_projects_metadata_repository( ) assert project_metadata is not None assert project_metadata.custom is None + assert project_metadata.parent_project_uuid is None + assert project_metadata.parent_node_id is None + assert project_metadata.root_parent_project_uuid is None + assert project_metadata.root_parent_node_id is None - got = await utils_projects_metadata.upsert( - connection, project_uuid=project["uuid"], custom_metadata=user_metadata + got = await utils_projects_metadata.set_project_custom_metadata( + connection, + project_uuid=project["uuid"], + custom_metadata=user_metadata, ) assert got.custom + assert got.parent_project_uuid is None + assert got.parent_node_id is None + assert got.root_parent_project_uuid is None + assert got.root_parent_node_id is None assert user_metadata == got.custom project_metadata = await utils_projects_metadata.get( @@ -73,10 +96,288 @@ async def test_projects_metadata_repository( assert project_metadata is not None assert project_metadata == got - got_after_update = await utils_projects_metadata.upsert( - connection, project_uuid=project["uuid"], custom_metadata={} + got_after_update = await utils_projects_metadata.set_project_custom_metadata( + connection, + project_uuid=project["uuid"], + custom_metadata={}, ) assert got_after_update.custom == {} assert got.modified assert got_after_update.modified assert got.modified < got_after_update.modified + + +async def test_set_project_ancestors_with_invalid_parents( + connection: SAConnection, + create_fake_user: Callable[..., Awaitable[RowProxy]], + create_fake_project: Callable[..., Awaitable[RowProxy]], + create_fake_projects_node: Callable[[uuid.UUID], Awaitable[ProjectNode]], + faker: Faker, +): + user: RowProxy = await create_fake_user(connection) + project: RowProxy = await create_fake_project(connection, user, hidden=True) + project_node = await create_fake_projects_node(project["uuid"]) + + # this is empty + project_metadata = await utils_projects_metadata.get( + connection, project_uuid=project["uuid"] + ) + assert project_metadata is not None + assert project_metadata.custom is None + assert project_metadata.parent_project_uuid is None + assert project_metadata.parent_node_id is None + assert project_metadata.root_parent_project_uuid is None + assert project_metadata.root_parent_node_id is None + + random_project_uuid = faker.uuid4(cast_to=None) + assert isinstance(random_project_uuid, UUID) + random_node_id = faker.uuid4(cast_to=None) + assert isinstance(random_node_id, UUID) + + # invalid project + with pytest.raises(DBProjectNotFoundError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=random_project_uuid, + parent_project_uuid=None, + parent_node_id=None, + ) + + # test invalid combinations + with pytest.raises(DBProjectInvalidAncestorsError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=project["uuid"], + parent_project_uuid=random_project_uuid, + parent_node_id=None, + ) + with pytest.raises(DBProjectInvalidAncestorsError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=project["uuid"], + parent_project_uuid=None, + parent_node_id=random_node_id, + ) + + # valid combination with invalid project/node + with pytest.raises(DBProjectInvalidParentProjectError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=project["uuid"], + parent_project_uuid=random_project_uuid, + parent_node_id=random_node_id, + ) + + # these would make it a parent of itself which is forbiden + with pytest.raises(DBProjectInvalidAncestorsError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=project["uuid"], + parent_project_uuid=project["uuid"], + parent_node_id=random_node_id, + ) + + with pytest.raises(DBProjectInvalidAncestorsError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=project["uuid"], + parent_project_uuid=project["uuid"], + parent_node_id=project_node.node_id, + ) + + # + another_project = await create_fake_project(connection, user, hidden=False) + another_project_node = await create_fake_projects_node(another_project["uuid"]) + with pytest.raises(DBProjectInvalidParentNodeError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=another_project["uuid"], + parent_project_uuid=project["uuid"], + parent_node_id=random_node_id, + ) + + with pytest.raises(DBProjectInvalidParentProjectError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=another_project["uuid"], + parent_project_uuid=random_project_uuid, + parent_node_id=project_node.node_id, + ) + + # mix a node from one project and a parent project + yet_another_project = await create_fake_project(connection, user, hidden=False) + with pytest.raises(DBProjectInvalidParentNodeError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=yet_another_project["uuid"], + parent_project_uuid=project["uuid"], + parent_node_id=another_project_node.node_id, + ) + + with pytest.raises(DBProjectInvalidParentNodeError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=yet_another_project["uuid"], + parent_project_uuid=another_project["uuid"], + parent_node_id=project_node.node_id, + ) + + +async def test_set_project_ancestors( + connection: SAConnection, + create_fake_user: Callable[..., Awaitable[RowProxy]], + create_fake_project: Callable[..., Awaitable[RowProxy]], + create_fake_projects_node: Callable[[uuid.UUID], Awaitable[ProjectNode]], +): + user: RowProxy = await create_fake_user(connection) + + # create grand-parent + grand_parent_project = await create_fake_project(connection, user, hidden=False) + grand_parent_node = await create_fake_projects_node(grand_parent_project["uuid"]) + + # create parent + parent_project = await create_fake_project(connection, user, hidden=False) + parent_node = await create_fake_projects_node(parent_project["uuid"]) + + # create child + child_project: RowProxy = await create_fake_project(connection, user, hidden=True) + + # set ancestry, first the parents + updated_parent_metadata = await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=parent_project["uuid"], + parent_project_uuid=grand_parent_project["uuid"], + parent_node_id=grand_parent_node.node_id, + ) + assert updated_parent_metadata.parent_project_uuid == uuid.UUID( + grand_parent_project["uuid"] + ) + assert updated_parent_metadata.parent_node_id == grand_parent_node.node_id + assert updated_parent_metadata.root_parent_project_uuid == uuid.UUID( + grand_parent_project["uuid"] + ) + assert updated_parent_metadata.root_parent_node_id == grand_parent_node.node_id + + # then the child + updated_child_metadata = await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=child_project["uuid"], + parent_project_uuid=parent_project["uuid"], + parent_node_id=parent_node.node_id, + ) + assert updated_child_metadata.parent_project_uuid == uuid.UUID( + parent_project["uuid"] + ) + assert updated_child_metadata.parent_node_id == parent_node.node_id + assert updated_child_metadata.root_parent_project_uuid == uuid.UUID( + grand_parent_project["uuid"] + ) + assert updated_child_metadata.root_parent_node_id == grand_parent_node.node_id + + # check properly updated + returned_project_metadata = await utils_projects_metadata.get( + connection, project_uuid=child_project["uuid"] + ) + assert returned_project_metadata == updated_child_metadata + + # remove the child + updated_child_metadata = await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=child_project["uuid"], + parent_project_uuid=None, + parent_node_id=None, + ) + assert updated_child_metadata.parent_project_uuid is None + assert updated_child_metadata.parent_node_id is None + assert updated_child_metadata.root_parent_project_uuid is None + assert updated_child_metadata.root_parent_node_id is None + + +async def _create_child_project( + connection: SAConnection, + user: RowProxy, + create_fake_project: Callable[..., Awaitable[RowProxy]], + create_fake_projects_node: Callable[[uuid.UUID], Awaitable[ProjectNode]], + parent_project: RowProxy | None, + parent_node: ProjectNode | None, +) -> tuple[RowProxy, ProjectNode]: + project = await create_fake_project(connection, user, hidden=False) + node = await create_fake_projects_node(project["uuid"]) + if parent_project and parent_node: + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=project["uuid"], + parent_project_uuid=parent_project["uuid"], + parent_node_id=parent_node.node_id, + ) + return project, node + + +@pytest.fixture +async def create_projects_genealogy( + connection: SAConnection, + create_fake_project: Callable[..., Awaitable[RowProxy]], + create_fake_projects_node: Callable[[uuid.UUID], Awaitable[ProjectNode]], +) -> Callable[[RowProxy], Awaitable[list[tuple[RowProxy, ProjectNode]]]]: + async def _(user: RowProxy) -> list[tuple[RowProxy, ProjectNode]]: + ancestors: list[tuple[RowProxy, ProjectNode]] = [] + + ancestor_project = await create_fake_project(connection, user, hidden=False) + ancestor_node = await create_fake_projects_node(ancestor_project["uuid"]) + ancestors.append((ancestor_project, ancestor_node)) + + for _ in range(13): + child_project, child_node = await _create_child_project( + connection, + user, + create_fake_project, + create_fake_projects_node, + ancestor_project, + ancestor_node, + ) + ancestor_project = child_project + ancestor_node = child_node + ancestors.append((child_project, child_node)) + + return ancestors + + return _ + + +async def test_not_implemented_use_cases( + connection: SAConnection, + create_fake_user: Callable[..., Awaitable[RowProxy]], + create_fake_project: Callable[..., Awaitable[RowProxy]], + create_fake_projects_node: Callable[[uuid.UUID], Awaitable[ProjectNode]], + create_projects_genealogy: Callable[ + [RowProxy], Awaitable[list[tuple[RowProxy, ProjectNode]]] + ], +): + """This will tests use-cases that are currently not implemented and that are expected to fail with an exception + Basically any project with children cannot have a change in its genealogy anymore. yes children are sacred. + If you still want to change them you need to go first via the children. + """ + user = await create_fake_user(connection) + # add a missing parent to an already existing chain of parent-children + ancestors = await create_projects_genealogy(user) + missing_parent_project = await create_fake_project(connection, user) + missing_parent_node = await create_fake_projects_node( + missing_parent_project["uuid"] + ) + + with pytest.raises(NotImplementedError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=ancestors[0][0]["uuid"], + parent_project_uuid=missing_parent_project["uuid"], + parent_node_id=missing_parent_node.node_id, + ) + + # modifying a parent-child relationship in the middle of the genealogy is also not implemented + with pytest.raises(NotImplementedError): + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=ancestors[3][0]["uuid"], + parent_project_uuid=missing_parent_project["uuid"], + parent_node_id=missing_parent_node.node_id, + ) diff --git a/packages/postgres-database/tests/test_utils_projects_nodes.py b/packages/postgres-database/tests/test_utils_projects_nodes.py index 85a14630d69..a20083608dd 100644 --- a/packages/postgres-database/tests/test_utils_projects_nodes.py +++ b/packages/postgres-database/tests/test_utils_projects_nodes.py @@ -19,9 +19,10 @@ from simcore_postgres_database.models.projects_nodes import projects_nodes from simcore_postgres_database.utils_projects_nodes import ( ProjectNodeCreate, - ProjectNodesDuplicateNode, - ProjectNodesNodeNotFound, - ProjectNodesProjectNotFound, + ProjectNodesDuplicateNodeError, + ProjectNodesNodeNotFoundError, + ProjectNodesNonUniqueNodeFoundError, + ProjectNodesProjectNotFoundError, ProjectNodesRepo, ) @@ -57,6 +58,7 @@ async def registered_project( @pytest.fixture def projects_nodes_repo_of_invalid_project(faker: Faker) -> ProjectNodesRepo: invalid_project_uuid = faker.uuid4(cast_to=None) + assert isinstance(invalid_project_uuid, uuid.UUID) repo = ProjectNodesRepo(project_uuid=invalid_project_uuid) assert repo return repo @@ -72,11 +74,10 @@ def projects_nodes_repo(registered_project: dict[str, Any]) -> ProjectNodesRepo: @pytest.fixture def create_fake_projects_node( faker: Faker, - create_fake_node_id: Callable[[], uuid.UUID], ) -> Callable[..., ProjectNodeCreate]: def _creator() -> ProjectNodeCreate: node = ProjectNodeCreate( - node_id=create_fake_node_id(), + node_id=uuid.uuid4(), required_resources=faker.pydict(allowed_types=(str,)), ) assert node @@ -85,20 +86,12 @@ def _creator() -> ProjectNodeCreate: return _creator -@pytest.fixture -def create_fake_node_id(faker: Faker) -> Callable[[], uuid.UUID]: - def _creator() -> uuid.UUID: - return faker.uuid4(cast_to=None) - - return _creator - - async def test_create_projects_nodes_raises_if_project_not_found( connection: SAConnection, projects_nodes_repo_of_invalid_project: ProjectNodesRepo, create_fake_projects_node: Callable[..., ProjectNodeCreate], ): - with pytest.raises(ProjectNodesProjectNotFound): + with pytest.raises(ProjectNodesProjectNotFoundError): await projects_nodes_repo_of_invalid_project.add( connection, nodes=[create_fake_projects_node()], @@ -110,6 +103,9 @@ async def test_create_projects_nodes( projects_nodes_repo: ProjectNodesRepo, create_fake_projects_node: Callable[..., ProjectNodeCreate], ): + + assert await projects_nodes_repo.add(connection, nodes=[]) == [] + new_nodes = await projects_nodes_repo.add( connection, nodes=[create_fake_projects_node()], @@ -128,7 +124,7 @@ async def test_create_twice_same_projects_nodes_raises( new_nodes = await projects_nodes_repo.add(connection, nodes=[node_create]) assert new_nodes assert len(new_nodes) == 1 - with pytest.raises(ProjectNodesDuplicateNode): + with pytest.raises(ProjectNodesDuplicateNodeError): await projects_nodes_repo.add( connection, nodes=[node_create], @@ -166,21 +162,19 @@ async def test_list_project_nodes( async def test_get_project_node_of_invalid_project_raises( connection: SAConnection, projects_nodes_repo_of_invalid_project: ProjectNodesRepo, - create_fake_node_id: Callable[[], uuid.UUID], ): - with pytest.raises(ProjectNodesNodeNotFound): + with pytest.raises(ProjectNodesNodeNotFoundError): await projects_nodes_repo_of_invalid_project.get( - connection, node_id=create_fake_node_id() + connection, node_id=uuid.uuid4() ) async def test_get_project_node_of_empty_project_raises( connection: SAConnection, projects_nodes_repo: ProjectNodesRepo, - create_fake_node_id: Callable[[], uuid.UUID], ): - with pytest.raises(ProjectNodesNodeNotFound): - await projects_nodes_repo.get(connection, node_id=create_fake_node_id()) + with pytest.raises(ProjectNodesNodeNotFoundError): + await projects_nodes_repo.get(connection, node_id=uuid.uuid4()) async def test_get_project_node( @@ -205,7 +199,6 @@ async def test_update_project_node_of_invalid_node_raises( connection: SAConnection, projects_nodes_repo: ProjectNodesRepo, create_fake_projects_node: Callable[..., ProjectNodeCreate], - create_fake_node_id: Callable[[], uuid.UUID], faker: Faker, ): new_nodes = await projects_nodes_repo.add( @@ -215,10 +208,10 @@ async def test_update_project_node_of_invalid_node_raises( assert len(new_nodes) == 1 assert new_nodes[0] assert new_nodes[0].created == new_nodes[0].modified - with pytest.raises(ProjectNodesNodeNotFound): + with pytest.raises(ProjectNodesNodeNotFoundError): await projects_nodes_repo.update( connection, - node_id=create_fake_node_id(), + node_id=uuid.uuid4(), required_resources={faker.pystr(): faker.pyint()}, ) @@ -227,7 +220,6 @@ async def test_update_project_node( connection: SAConnection, projects_nodes_repo: ProjectNodesRepo, create_fake_projects_node: Callable[..., ProjectNodeCreate], - create_fake_node_id: Callable[[], uuid.UUID], faker: Faker, ): new_nodes = await projects_nodes_repo.add( @@ -252,10 +244,9 @@ async def test_update_project_node( async def test_delete_invalid_node_does_nothing( connection: SAConnection, projects_nodes_repo_of_invalid_project: ProjectNodesRepo, - create_fake_node_id: Callable[[], uuid.UUID], ): await projects_nodes_repo_of_invalid_project.delete( - connection, node_id=create_fake_node_id() + connection, node_id=uuid.uuid4() ) @@ -277,7 +268,7 @@ async def test_delete_node( assert received_node == new_nodes[0] await projects_nodes_repo.delete(connection, node_id=new_nodes[0].node_id) - with pytest.raises(ProjectNodesNodeNotFound): + with pytest.raises(ProjectNodesNodeNotFoundError): await projects_nodes_repo.get(connection, node_id=new_nodes[0].node_id) @@ -302,7 +293,7 @@ async def test_delete_project_delete_all_nodes( await _delete_project(connection, projects_nodes_repo.project_uuid) # the project cannot be found anymore (the link in projects_to_projects_nodes is auto-removed) - with pytest.raises(ProjectNodesNodeNotFound): + with pytest.raises(ProjectNodesNodeNotFoundError): await projects_nodes_repo.get(connection, node_id=new_nodes[0].node_id) # the underlying projects_nodes should also be gone, thanks to migration @@ -378,9 +369,9 @@ async def _workflow() -> dict[uuid.UUID, list[uuid.UUID]]: for project_id_to_node_ids_map in list_of_project_id_node_ids_map: project_id = next(iter(project_id_to_node_ids_map)) - random_node_id = random.choice( + random_node_id = random.choice( # noqa: S311 project_id_to_node_ids_map[project_id] - ) # noqa: S311 + ) received_project_id = await ProjectNodesRepo.get_project_id_from_node_id( connection, node_id=random_node_id ) @@ -393,7 +384,40 @@ async def test_get_project_id_from_node_id_raises_for_invalid_node_id( projects_nodes_repo: ProjectNodesRepo, faker: Faker, ): - with pytest.raises(ProjectNodesNodeNotFound): + random_uuid = faker.uuid4(cast_to=None) + assert isinstance(random_uuid, uuid.UUID) + with pytest.raises(ProjectNodesNodeNotFoundError): + await ProjectNodesRepo.get_project_id_from_node_id( + connection, node_id=random_uuid + ) + + +async def test_get_project_id_from_node_id_raises_if_multiple_projects_with_same_node_id_exist( + pg_engine: Engine, + connection: SAConnection, + projects_nodes_repo: ProjectNodesRepo, + registered_user: RowProxy, + create_fake_project: Callable[..., Awaitable[RowProxy]], + create_fake_projects_node: Callable[..., ProjectNodeCreate], +): + project1 = await create_fake_project(connection, registered_user) + project1_repo = ProjectNodesRepo(project_uuid=project1.uuid) + + project2 = await create_fake_project(connection, registered_user) + project2_repo = ProjectNodesRepo(project_uuid=project2.uuid) + + shared_node = create_fake_projects_node() + + project1_nodes = await project1_repo.add(connection, nodes=[shared_node]) + assert len(project1_nodes) == 1 + project2_nodes = await project2_repo.add(connection, nodes=[shared_node]) + assert len(project2_nodes) == 1 + assert project1_nodes[0].dict( + include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"}) + ) == project2_nodes[0].dict( + include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"}) + ) + with pytest.raises(ProjectNodesNonUniqueNodeFoundError): await ProjectNodesRepo.get_project_id_from_node_id( - connection, node_id=faker.uuid4(cast_to=None) + connection, node_id=project1_nodes[0].node_id ) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py index 30b4abb179b..1df38757bbb 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py @@ -1,6 +1,7 @@ """ helpers to manage the projects's database and produce fixtures/mockup data for testing """ + # pylint: disable=no-value-for-parameter import json @@ -111,7 +112,6 @@ def __init__( self, params_override: dict | None = None, app: web.Application | None = None, - clear_all: bool = True, user_id: int | None = None, *, product_name: str, @@ -126,7 +126,6 @@ def __init__( self.product_name = product_name self.app = app self.prj = {} - self.clear_all = clear_all self.force_uuid = force_uuid self.tests_data_dir = tests_data_dir self.as_template = as_template @@ -134,12 +133,6 @@ def __init__( assert tests_data_dir.exists() assert tests_data_dir.is_dir() - if not self.clear_all: - # TODO: add delete_project. Deleting a single project implies having to delete as well all dependencies created - raise ValueError( - "UNDER DEVELOPMENT: Currently can only delete all projects " - ) - async def __aenter__(self) -> ProjectDict: assert self.app # nosec @@ -156,8 +149,7 @@ async def __aenter__(self) -> ProjectDict: async def __aexit__(self, *args): assert self.app # nosec - if self.clear_all: - await delete_all_projects(self.app) + await delete_all_projects(self.app) async def assert_get_same_project( diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_webserver_unit_with_db.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_webserver_unit_with_db.py index 153fbce6158..747d1a1193f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_webserver_unit_with_db.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_webserver_unit_with_db.py @@ -101,6 +101,20 @@ def standard_role_response() -> tuple[str, list[tuple[UserRole, ExpectedResponse ) +def standard_user_role_response() -> ( + tuple[str, list[tuple[UserRole, ExpectedResponse]]] +): + all_roles = standard_role_response() + return ( + all_roles[0], + [ + (user_role, response) + for user_role, response in all_roles[1] + if user_role in [UserRole.USER] + ], + ) + + class MockedStorageSubsystem(NamedTuple): copy_data_folders_from_project: mock.MagicMock delete_project: mock.MagicMock diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index 51d83c5448b..4353c8fad0d 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -173,6 +173,21 @@ def parse_request_query_parameters_as( return model +def parse_request_headers_as( + parameters_schema_cls: type[ModelClass], + request: web.Request, + *, + use_enveloped_error_v1: bool = True, +) -> ModelClass: + with handle_validation_as_http_error( + error_msg_template="Invalid parameter/s '{failed}' in request headers", + resource_name=request.rel_url.path, + use_error_v1=use_enveloped_error_v1, + ): + data = dict(request.headers) + return parameters_schema_cls.parse_obj(data) + + async def parse_request_body_as( model_schema_cls: type[ModelOrListOrDictType], request: web.Request, diff --git a/packages/service-library/src/servicelib/common_headers.py b/packages/service-library/src/servicelib/common_headers.py index f883a99b9b4..543ef593fe5 100644 --- a/packages/service-library/src/servicelib/common_headers.py +++ b/packages/service-library/src/servicelib/common_headers.py @@ -4,4 +4,6 @@ X_DYNAMIC_SIDECAR_REQUEST_SCHEME: Final[str] = "X-Dynamic-Sidecar-Request-Scheme" X_FORWARDED_PROTO: Final[str] = "X-Forwarded-Proto" X_SIMCORE_USER_AGENT: Final[str] = "X-Simcore-User-Agent" +X_SIMCORE_PARENT_PROJECT_UUID: Final[str] = "X-Simcore-Parent-Project-Uuid" +X_SIMCORE_PARENT_NODE_ID: Final[str] = "X-Simcore-Parent-Node-Id" UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE: Final[str] = "undefined" diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py index 095c34f04e9..08e2f07bfbe 100644 --- a/packages/service-library/tests/aiohttp/test_requests_validation.py +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -15,6 +15,7 @@ from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_body_as, + parse_request_headers_as, parse_request_path_parameters_as, parse_request_query_parameters_as, ) @@ -62,6 +63,23 @@ def create_fake(cls, faker: Faker): return cls(is_ok=faker.pybool(), label=faker.word()) +class MyRequestHeadersParams(BaseModel): + user_agent: str = Field(alias="X-Simcore-User-Agent") + optional_header: str | None = Field(default=None, alias="X-Simcore-Optional-Header") + + class Config: + allow_population_by_field_name = False + + @classmethod + def create_fake(cls, faker: Faker): + return cls( + **{ + "X-Simcore-User-Agent": faker.pystr(), + "X-Simcore-Optional-Header": faker.word(), + } + ) + + class Sub(BaseModel): a: float = 33 @@ -101,6 +119,9 @@ async def _handler(request: web.Request) -> web.Response: query_params = parse_request_query_parameters_as( MyRequestQueryParams, request, use_enveloped_error_v1=False ) + headers_params = parse_request_headers_as( + MyRequestHeadersParams, request, use_enveloped_error_v1=False + ) body = await parse_request_body_as( MyBody, request, use_enveloped_error_v1=False ) @@ -112,6 +133,7 @@ async def _handler(request: web.Request) -> web.Response: "queries": query_params.dict(), "body": body.dict(), "context": context.dict(), + "headers": headers_params.dict(), }, dumps=json_dumps, ) @@ -123,8 +145,7 @@ async def _middleware(request: web.Request, handler): # request context request[RQT_USERID_KEY] = 42 request["RQT_IGNORE_CONTEXT"] = "not interesting" - resp = await handler(request) - return resp + return await handler(request) app = web.Application( middlewares=[ @@ -143,9 +164,8 @@ async def _middleware(request: web.Request, handler): @pytest.fixture -def path_params(faker: Faker): - path_params = MyRequestPathParams.create_fake(faker) - return path_params +def path_params(faker: Faker) -> MyRequestPathParams: + return MyRequestPathParams.create_fake(faker) @pytest.fixture @@ -158,17 +178,24 @@ def body(faker: Faker) -> MyBody: return MyBody.create_fake(faker) +@pytest.fixture +def headers_params(faker: Faker) -> MyRequestHeadersParams: + return MyRequestHeadersParams.create_fake(faker) + + async def test_parse_request_as( client: TestClient, path_params: MyRequestPathParams, query_params: MyRequestQueryParams, body: MyBody, + headers_params: MyRequestHeadersParams, ): assert client.app r = await client.get( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), json=body.dict(), + headers=headers_params.dict(by_alias=True), ) assert r.status == status.HTTP_200_OK, f"{await r.text()}" @@ -181,18 +208,21 @@ async def test_parse_request_as( "secret": client.app[APP_SECRET_KEY], "user_id": 42, } + assert got["headers"] == jsonable_encoder(headers_params.dict()) async def test_parse_request_with_invalid_path_params( client: TestClient, query_params: MyRequestQueryParams, body: MyBody, + headers_params: MyRequestHeadersParams, ): r = await client.get( "/projects/invalid-uuid", params=query_params.as_params(), json=body.dict(), + headers=headers_params.dict(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -216,12 +246,14 @@ async def test_parse_request_with_invalid_query_params( client: TestClient, path_params: MyRequestPathParams, body: MyBody, + headers_params: MyRequestHeadersParams, ): r = await client.get( f"/projects/{path_params.project_uuid}", params={}, json=body.dict(), + headers=headers_params.dict(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -245,12 +277,14 @@ async def test_parse_request_with_invalid_body( client: TestClient, path_params: MyRequestPathParams, query_params: MyRequestQueryParams, + headers_params: MyRequestHeadersParams, ): r = await client.get( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), json={"invalid": "body"}, + headers=headers_params.dict(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -281,13 +315,47 @@ async def test_parse_request_with_invalid_json_body( client: TestClient, path_params: MyRequestPathParams, query_params: MyRequestQueryParams, + headers_params: MyRequestHeadersParams, ): r = await client.get( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), data=b"[ 1 2, 3 'broken-json' ]", + headers=headers_params.dict(by_alias=True), ) body = await r.text() assert r.status == status.HTTP_400_BAD_REQUEST, body + + +async def test_parse_request_with_invalid_headers_params( + client: TestClient, + path_params: MyRequestPathParams, + query_params: MyRequestQueryParams, + body: MyBody, + headers_params: MyRequestHeadersParams, +): + + r = await client.get( + f"/projects/{path_params.project_uuid}", + params=query_params.as_params(), + json=body.dict(), + headers=headers_params.dict(), # we pass the wrong names + ) + assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" + + response_body = await r.json() + assert response_body["error"].pop("resource") + assert response_body == { + "error": { + "msg": "Invalid parameter/s 'X-Simcore-User-Agent' in request headers", + "details": [ + { + "loc": "X-Simcore-User-Agent", + "msg": "field required", + "type": "value_error.missing", + } + ], + } + } diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index 83a9b9a3b4d..905c048b19e 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -11,6 +11,7 @@ - the task ID is the same as the associated node uuid """ + # pylint: disable=too-many-arguments # pylint: disable=too-many-statements @@ -37,7 +38,10 @@ from pydantic import AnyHttpUrl, parse_obj_as from servicelib.async_utils import run_sequentially_in_context from servicelib.rabbitmq import RabbitMQRPCClient -from simcore_postgres_database.utils_projects_nodes import ProjectNodesNodeNotFound +from simcore_postgres_database.utils_projects_nodes import ( + ProjectNodesNodeNotFoundError, + ProjectNodesNonUniqueNodeFoundError, +) from starlette import status from starlette.requests import Request from tenacity import retry @@ -156,7 +160,7 @@ async def _get_project_metadata( projects_metadata_repo: ProjectsMetadataRepository, computation: ComputationCreate, ) -> ProjectMetadataDict: - current_project_metadata = await projects_metadata_repo.get_metadata( + current_project_metadata = await projects_metadata_repo.get_custom_metadata( computation.project_id ) @@ -177,8 +181,12 @@ async def _get_project_metadata( parent_project_id=parent_project_id, parent_project_name=parent_project.name, ) - except (ProjectNotFoundError, ProjectNodesNodeNotFound) as exc: - _logger.exception("Could not find project/node: %s", exc) + except ( + ProjectNotFoundError, + ProjectNodesNodeNotFoundError, + ProjectNodesNonUniqueNodeFoundError, + ): + _logger.exception("Could not find project/node: %s", f"{parent_node_id=}") return {} @@ -360,12 +368,14 @@ async def create_computation( # noqa: PLR0913 AnyHttpUrl, f"{request.url}/{computation.project_id}?user_id={computation.user_id}", ), - stop_url=parse_obj_as( - AnyHttpUrl, - f"{request.url}/{computation.project_id}:stop?user_id={computation.user_id}", - ) - if computation.start_pipeline - else None, + stop_url=( + parse_obj_as( + AnyHttpUrl, + f"{request.url}/{computation.project_id}:stop?user_id={computation.user_id}", + ) + if computation.start_pipeline + else None + ), iteration=last_run.iteration if last_run else None, cluster_id=last_run.cluster_id if last_run else None, result=None, @@ -462,9 +472,11 @@ async def get_computation( state=pipeline_state, pipeline_details=pipeline_details, url=parse_obj_as(AnyHttpUrl, f"{request.url}"), - stop_url=parse_obj_as(AnyHttpUrl, f"{self_url}:stop?user_id={user_id}") - if pipeline_state.is_running() - else None, + stop_url=( + parse_obj_as(AnyHttpUrl, f"{self_url}:stop?user_id={user_id}") + if pipeline_state.is_running() + else None + ), iteration=last_run.iteration if last_run else None, cluster_id=last_run.cluster_id if last_run else None, result=None, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_metadata.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_metadata.py index 6d364e9536d..0d62739f88e 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_metadata.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_metadata.py @@ -1,6 +1,7 @@ from typing import Any from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from simcore_postgres_database.utils_projects_metadata import ProjectMetadata from simcore_postgres_database.utils_projects_metadata import ( get as projects_metadata_get, @@ -10,14 +11,25 @@ class ProjectsMetadataRepository(BaseRepository): - async def get_metadata(self, project_id: ProjectID) -> dict[str, Any] | None: + async def get_custom_metadata(self, project_id: ProjectID) -> dict[str, Any] | None: """ Raises: DBProjectNotFoundError """ async with self.db_engine.acquire() as conn: - project_custom_metadata: ProjectMetadata = await projects_metadata_get( + project_metadata: ProjectMetadata = await projects_metadata_get( conn, project_id ) - custom_metadata: dict[str, Any] | None = project_custom_metadata.custom + custom_metadata: dict[str, Any] | None = project_metadata.custom return custom_metadata + + async def get_parent_project_and_node( + self, project_id: ProjectID + ) -> tuple[ProjectID, NodeID] | None: + async with self.db_engine.acquire() as conn: + project_metadata: ProjectMetadata = await projects_metadata_get( + conn, project_id + ) + if project_metadata.parent_project_uuid and project_metadata.parent_node_id: + return project_metadata.parent_project_uuid, project_metadata.parent_node_id + return None diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py b/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py index a7ed20ce26f..d23932422a8 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py @@ -12,7 +12,7 @@ from models_library.projects_nodes_io import NodeID from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from simcore_postgres_database.utils_projects_nodes import ProjectNodesNodeNotFound +from simcore_postgres_database.utils_projects_nodes import ProjectNodesNodeNotFoundError from simcore_service_director_v2.modules.db.repositories.projects import ( ProjectsRepository, ) @@ -121,5 +121,5 @@ async def test_get_project_id_from_node( ) not_existing_node_id = faker.uuid4(cast_to=None) - with pytest.raises(ProjectNodesNodeNotFound): + with pytest.raises(ProjectNodesNodeNotFoundError): await project_repository.get_project_id_from_node(not_existing_node_id) diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 1d65163f669..d930d3e42b8 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2223,6 +2223,35 @@ paths: default: false name: hidden in: query + - required: false + schema: + title: X-Simcore-User-Agent + type: string + default: undefined + name: x-simcore-user-agent + in: header + - description: Optionally sets a parent project UUID (both project and node + must be set) + required: false + schema: + title: X-Simcore-Parent-Project-Uuid + type: string + description: Optionally sets a parent project UUID (both project and node + must be set) + format: uuid + name: x-simcore-parent-project-uuid + in: header + - description: Optionally sets a parent node ID (both project and node must + be set) + required: false + schema: + title: X-Simcore-Parent-Node-Id + type: string + description: Optionally sets a parent node ID (both project and node must + be set) + format: uuid + name: x-simcore-parent-node-id + in: header requestBody: content: application/json: @@ -11465,16 +11494,17 @@ components: releaseDate: title: Releasedate type: string - description: The date when the specific version of the service was released. - This field helps in tracking the timeline of releases and understanding - the sequence of updates. The date should be formatted in YYYY-MM-DD format - for consistency and easy sorting. - format: date + description: "A timestamp when the specific version of the service was released.\ + \ This field helps in tracking the timeline of releases and understanding\ + \ the sequence of updates. A timestamp string should be formatted as YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z\ + \ or [\xB1]HH[:]MM]" + format: date-time integration-version: title: Integration-Version - pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ + pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$ type: string - description: Defines which version of the integration workflow should use + description: This version is used to maintain backward compatibility when + there are changes in the way a service is integrated into the framework type: allOf: - $ref: '#/components/schemas/ServiceType' @@ -11525,8 +11555,8 @@ components: description: 'Static metadata for a service injected in the image labels - NOTE: This model serialized in .osparc/metadata.yml and in the labels of the - docker image' + NOTE: This model is serialized in .osparc/metadata.yml and in the labels of + the docker image' example: name: File Picker description: description diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py index ff91f07e901..597b3fc5e91 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py @@ -32,9 +32,15 @@ ) from ..users.api import get_user_fullname from . import projects_api +from ._metadata_api import set_project_ancestors from ._permalink_api import update_or_pop_permalink_in_project from .db import ProjectDBAPI -from .exceptions import ProjectInvalidRightsError, ProjectNotFoundError +from .exceptions import ( + ParentNodeNotFoundError, + ParentProjectNotFoundError, + ProjectInvalidRightsError, + ProjectNotFoundError, +) from .models import ProjectDict from .utils import NodesMap, clone_project_document, default_copy_project_name @@ -210,8 +216,9 @@ async def _compose_project_data( return new_project, project_nodes -async def create_project( +async def create_project( # pylint: disable=too-many-arguments # noqa: C901, PLR0913 task_progress: TaskProgress, + *, request: web.Request, new_project_was_hidden_before_data_was_copied: bool, from_study: ProjectID | None, @@ -221,6 +228,8 @@ async def create_project( product_name: str, predefined_project: ProjectDict | None, simcore_user_agent: str, + parent_project_uuid: ProjectID | None, + parent_node_id: NodeID | None, ) -> None: """Implements TaskProtocol for 'create_projects' handler @@ -286,6 +295,14 @@ async def create_project( hidden=copy_data, project_nodes=project_nodes, ) + # add parent linking if needed + await set_project_ancestors( + request.app, + user_id=user_id, + project_uuid=new_project["uuid"], + parent_project_uuid=parent_project_uuid, + parent_node_id=parent_node_id, + ) task_progress.update() # 4. deep copy source project's files @@ -340,6 +357,9 @@ async def create_project( except ProjectInvalidRightsError as exc: raise web.HTTPUnauthorized from exc + except (ParentProjectNotFoundError, ParentNodeNotFoundError) as exc: + raise web.HTTPNotFound(reason=f"{exc}") from exc + except asyncio.CancelledError: log.warning( "cancelled create_project for '%s'. Cleaning up", diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py index 8a80a6fbf59..a7611b83c65 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py @@ -3,6 +3,7 @@ Standard methods or CRUD that states for Create+Read(Get&List)+Update+Delete """ + import functools import json import logging @@ -28,6 +29,7 @@ from servicelib.aiohttp.long_running_tasks.server import start_long_running_task from servicelib.aiohttp.requests_validation import ( parse_request_body_as, + parse_request_headers_as, parse_request_path_parameters_as, parse_request_query_parameters_as, ) @@ -51,6 +53,7 @@ from ._common_models import ProjectPathParams, RequestContext from ._crud_handlers_models import ( ProjectActiveParams, + ProjectCreateHeaders, ProjectCreateParams, ProjectListWithJsonStrParams, ) @@ -113,6 +116,7 @@ async def _wrapper(request: web.Request) -> web.StreamResponse: async def create_project(request: web.Request): req_ctx = RequestContext.parse_obj(request) query_params = parse_request_query_parameters_as(ProjectCreateParams, request) + header_params = parse_request_headers_as(ProjectCreateHeaders, request) if query_params.as_template: # create template from await check_user_permission(request, "project.template.create") @@ -152,10 +156,10 @@ async def create_project(request: web.Request): copy_data=query_params.copy_data, user_id=req_ctx.user_id, product_name=req_ctx.product_name, - simcore_user_agent=request.headers.get( - X_SIMCORE_USER_AGENT, UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE - ), + simcore_user_agent=header_params.simcore_user_agent, predefined_project=predefined_project, + parent_project_uuid=header_params.parent_project_uuid, + parent_node_id=header_params.parent_node_id, ) @@ -609,4 +613,6 @@ async def clone_project(request: web.Request): X_SIMCORE_USER_AGENT, UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE ), predefined_project=None, + parent_project_uuid=None, + parent_node_id=None, ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py index 4044e10b26a..77f3643f942 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py @@ -4,14 +4,60 @@ """ +from typing import Any + from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from models_library.rest_ordering import OrderBy, OrderDirection from models_library.rest_pagination import PageQueryParameters -from pydantic import BaseModel, Extra, Field, Json, validator +from pydantic import BaseModel, Extra, Field, Json, root_validator, validator +from servicelib.common_headers import ( + UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + X_SIMCORE_PARENT_NODE_ID, + X_SIMCORE_PARENT_PROJECT_UUID, + X_SIMCORE_USER_AGENT, +) from .models import ProjectTypeAPI +class ProjectCreateHeaders(BaseModel): + + simcore_user_agent: str = Field( # type: ignore[pydantic-alias] + default=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + description="Optional simcore user agent", + alias=X_SIMCORE_USER_AGENT, + ) + + parent_project_uuid: ProjectID | None = Field( # type: ignore[pydantic-alias] + default=None, + description="Optional parent project UUID", + alias=X_SIMCORE_PARENT_PROJECT_UUID, + ) + parent_node_id: NodeID | None = Field( # type: ignore[pydantic-alias] + default=None, + description="Optional parent node ID", + alias=X_SIMCORE_PARENT_NODE_ID, + ) + + @root_validator + @classmethod + def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: + if ( + values.get("parent_project_uuid") is None + and values.get("parent_node_id") is not None + ) or ( + values.get("parent_project_uuid") is not None + and values.get("parent_node_id") is None + ): + msg = "Both parent_project_uuid and parent_node_id must be set or both null or both unset" + raise ValueError(msg) + return values + + class Config: + allow_population_by_field_name = False + + class ProjectCreateParams(BaseModel): from_study: ProjectID | None = Field( None, diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py index 433a863540c..d07dc017243 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py @@ -1,30 +1,102 @@ +import logging +from typing import Final + from aiohttp import web from models_library.api_schemas_webserver.projects_metadata import MetadataDict from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from models_library.users import UserID +from pydantic import parse_obj_as from ..db.plugin import get_database_engine from . import _metadata_db from ._access_rights_api import validate_project_ownership +_logger = logging.getLogger(__name__) + -async def get_project_metadata( +async def get_project_custom_metadata( app: web.Application, user_id: UserID, project_uuid: ProjectID ) -> MetadataDict: await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) - return await _metadata_db.get_project_metadata( + return await _metadata_db.get_project_custom_metadata( engine=get_database_engine(app), project_uuid=project_uuid ) async def set_project_custom_metadata( - app: web.Application, user_id: UserID, project_uuid: ProjectID, value: MetadataDict + app: web.Application, + user_id: UserID, + project_uuid: ProjectID, + value: MetadataDict, ) -> MetadataDict: await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) - return await _metadata_db.set_project_metadata( + return await _metadata_db.set_project_custom_metadata( engine=get_database_engine(app), project_uuid=project_uuid, custom_metadata=value, ) + + +_NIL_NODE_UUID: Final[NodeID] = NodeID(int=0) + + +async def _project_has_ancestors( + app: web.Application, *, user_id: UserID, project_uuid: ProjectID +) -> bool: + await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) + + return await _metadata_db.project_has_ancestors( + engine=get_database_engine(app), project_uuid=project_uuid + ) + + +async def set_project_ancestors_from_custom_metadata( + app: web.Application, + user_id: UserID, + project_uuid: ProjectID, + custom_metadata: MetadataDict, +) -> None: + """NOTE: this should not be used anywhere else than from the metadata handler!""" + await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) + if await _project_has_ancestors(app, user_id=user_id, project_uuid=project_uuid): + # we do not override any existing ancestors via this method + return + + if parent_node_idstr := custom_metadata.get("node_id"): + # NOTE: backward compatibility with S4l old client + parent_node_id = parse_obj_as(NodeID, parent_node_idstr) + + if parent_node_id == _NIL_NODE_UUID: + return + + # let's try to get the parent project UUID + parent_project_uuid = await _metadata_db.get_project_id_from_node_id( + get_database_engine(app), node_id=parent_node_id + ) + + await _metadata_db.set_project_ancestors( + get_database_engine(app), + project_uuid=project_uuid, + parent_project_uuid=parent_project_uuid, + parent_node_id=parent_node_id, + ) + + +async def set_project_ancestors( + app: web.Application, + user_id: UserID, + project_uuid: ProjectID, + parent_project_uuid: ProjectID | None, + parent_node_id: NodeID | None, +) -> None: + await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) + + await _metadata_db.set_project_ancestors( + get_database_engine(app), + project_uuid=project_uuid, + parent_project_uuid=parent_project_uuid, + parent_node_id=parent_node_id, + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_db.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_db.py index 1ce0b1cd24c..23bd16cadb6 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_db.py @@ -1,37 +1,85 @@ -from collections.abc import AsyncIterator -from contextlib import asynccontextmanager +import functools +from collections.abc import Awaitable, Callable +from typing import Any, TypeVar -from aiopg.sa.connection import SAConnection from aiopg.sa.engine import Engine from models_library.api_schemas_webserver.projects_metadata import MetadataDict from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from pydantic import parse_obj_as from simcore_postgres_database import utils_projects_metadata -from simcore_postgres_database.utils_projects_metadata import DBProjectNotFoundError +from simcore_postgres_database.utils_projects_metadata import ( + DBProjectInvalidAncestorsError, + DBProjectInvalidParentNodeError, + DBProjectInvalidParentProjectError, + DBProjectNotFoundError, +) +from simcore_postgres_database.utils_projects_nodes import ( + ProjectNodesNodeNotFoundError, + ProjectNodesNonUniqueNodeFoundError, + ProjectNodesRepo, +) -from .exceptions import ProjectNotFoundError +from .exceptions import ( + NodeNotFoundError, + ParentNodeNotFoundError, + ParentProjectNotFoundError, + ProjectInvalidUsageError, + ProjectNotFoundError, +) +F = TypeVar("F", bound=Callable[..., Awaitable[Any]]) -@asynccontextmanager -async def _acquire_and_handle( - engine: Engine, project_uuid: ProjectID -) -> AsyncIterator[SAConnection]: - try: - async with engine.acquire() as connection: - yield connection +def _handle_projects_metadata_exceptions(fct: F) -> F: + """Transforms project errors -> http errors""" + + @functools.wraps(fct) + async def wrapper(*args, **kwargs) -> Any: + try: + return await fct(*args, **kwargs) + + except DBProjectNotFoundError as err: + raise ProjectNotFoundError(project_uuid=err.project_uuid) from err + except ProjectNodesNodeNotFoundError as err: + raise NodeNotFoundError( + project_uuid=err.project_uuid, node_uuid=err.node_id + ) from err + except ProjectNodesNonUniqueNodeFoundError as err: + raise ProjectInvalidUsageError from err + except DBProjectInvalidParentNodeError as err: + raise ParentNodeNotFoundError( + project_uuid=err.project_uuid, node_uuid=err.parent_node_id + ) from err + + except DBProjectInvalidParentProjectError as err: + raise ParentProjectNotFoundError( + project_uuid=err.parent_project_uuid + ) from err + except DBProjectInvalidAncestorsError as err: + raise ProjectInvalidUsageError from err - except DBProjectNotFoundError as err: - raise ProjectNotFoundError(project_uuid=project_uuid) from err + return wrapper # type: ignore + + +@_handle_projects_metadata_exceptions +async def get_project_id_from_node_id(engine: Engine, *, node_id: NodeID) -> ProjectID: + async with engine.acquire() as connection: + return await ProjectNodesRepo.get_project_id_from_node_id( + connection, node_id=node_id + ) -async def get_project_metadata(engine: Engine, project_uuid: ProjectID) -> MetadataDict: +@_handle_projects_metadata_exceptions +async def get_project_custom_metadata( + engine: Engine, project_uuid: ProjectID +) -> MetadataDict: """ Raises: ProjectNotFoundError ValidationError: illegal metadata format in the database """ - async with _acquire_and_handle(engine, project_uuid) as connection: + async with engine.acquire() as connection: metadata = await utils_projects_metadata.get( connection, project_uuid=project_uuid ) @@ -39,7 +87,8 @@ async def get_project_metadata(engine: Engine, project_uuid: ProjectID) -> Metad return parse_obj_as(MetadataDict, metadata.custom or {}) -async def set_project_metadata( +@_handle_projects_metadata_exceptions +async def set_project_custom_metadata( engine: Engine, project_uuid: ProjectID, custom_metadata: MetadataDict, @@ -47,10 +96,52 @@ async def set_project_metadata( """ Raises: ProjectNotFoundError - ValidationError: illegal metadata format in the database """ - async with _acquire_and_handle(engine, project_uuid) as connection: - metadata = await utils_projects_metadata.upsert( - connection, project_uuid=project_uuid, custom_metadata=custom_metadata + async with engine.acquire() as connection: + metadata = await utils_projects_metadata.set_project_custom_metadata( + connection, + project_uuid=project_uuid, + custom_metadata=custom_metadata, ) + return parse_obj_as(MetadataDict, metadata.custom) + + +@_handle_projects_metadata_exceptions +async def project_has_ancestors(engine: Engine, *, project_uuid: ProjectID) -> bool: + async with engine.acquire() as connection: + metadata = await utils_projects_metadata.get( + connection, project_uuid=project_uuid + ) + return bool(metadata.parent_project_uuid is not None) + + +@_handle_projects_metadata_exceptions +async def set_project_ancestors( + engine: Engine, + *, + project_uuid: ProjectID, + parent_project_uuid: ProjectID | None, + parent_node_id: NodeID | None, +) -> None: + """ + Raises: + ProjectNotFoundError + NodeNotFoundError + ParentNodeNotFoundError + ProjectInvalidUsageError + ValidationError: illegal metadata format in the database + """ + + async with engine.acquire() as connection: + if parent_project_uuid and (parent_project_uuid == project_uuid): + # this is not allowed! + msg = "Project cannot be parent of itself" + raise ProjectInvalidUsageError(msg) + + await utils_projects_metadata.set_project_ancestors( + connection, + project_uuid=project_uuid, + parent_project_uuid=parent_project_uuid, + parent_node_id=parent_node_id, + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py index 8d5f6f8d5c4..614d0ba03b9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py @@ -3,15 +3,15 @@ Design rationale: - Resource metadata/labels: https://cloud.google.com/apis/design/design_patterns#resource_labels - - named `metadata` instead of labels - - limit number of entries and depth? dict[str, st] ?? + - named `metadata` instead of labels + - limit number of entries and depth? dict[str, st] ?? - Singleton https://cloud.google.com/apis/design/design_patterns#singleton_resources - - the singleton is implicitly created or deleted when its parent is created or deleted - - Get and Update methods only + - the singleton is implicitly created or deleted when its parent is created or deleted + - Get and Update methods only """ - import functools +import logging from aiohttp import web from models_library.api_schemas_webserver.projects_metadata import ( @@ -23,6 +23,7 @@ parse_request_path_parameters_as, ) from servicelib.aiohttp.typing_extension import Handler +from servicelib.logging_utils import log_catch from .._meta import api_version_prefix from ..login.decorators import login_required @@ -30,10 +31,18 @@ from ..utils_aiohttp import envelope_json_response from . import _metadata_api from ._common_models import ProjectPathParams, RequestContext -from .exceptions import ProjectInvalidRightsError, ProjectNotFoundError +from .exceptions import ( + NodeNotFoundError, + ParentNodeNotFoundError, + ProjectInvalidRightsError, + ProjectInvalidUsageError, + ProjectNotFoundError, +) routes = web.RouteTableDef() +_logger = logging.getLogger(__name__) + def _handle_project_exceptions(handler: Handler): """Transforms project errors -> http errors""" @@ -43,10 +52,16 @@ async def wrapper(request: web.Request) -> web.StreamResponse: try: return await handler(request) - except ProjectNotFoundError as exc: + except ( + ProjectNotFoundError, + NodeNotFoundError, + ParentNodeNotFoundError, + ) as exc: raise web.HTTPNotFound(reason=f"{exc}") from exc except ProjectInvalidRightsError as exc: raise web.HTTPUnauthorized(reason=f"{exc}") from exc + except ProjectInvalidUsageError as exc: + raise web.HTTPUnprocessableEntity(reason=f"{exc}") from exc return wrapper @@ -67,7 +82,7 @@ async def get_project_metadata(request: web.Request) -> web.Response: req_ctx = RequestContext.parse_obj(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) - custom_metadata = await _metadata_api.get_project_metadata( + custom_metadata = await _metadata_api.get_project_custom_metadata( request.app, user_id=req_ctx.user_id, project_uuid=path_params.project_id ) @@ -94,6 +109,13 @@ async def update_project_metadata(request: web.Request) -> web.Response: project_uuid=path_params.project_id, value=update.custom, ) + with log_catch(_logger, reraise=False): + await _metadata_api.set_project_ancestors_from_custom_metadata( + request.app, + user_id=req_ctx.user_id, + project_uuid=path_params.project_id, + custom_metadata=custom_metadata, + ) return envelope_json_response( ProjectMetadataGet(project_uuid=path_params.project_id, custom=custom_metadata) diff --git a/services/web/server/src/simcore_service_webserver/projects/exceptions.py b/services/web/server/src/simcore_service_webserver/projects/exceptions.py index 27db44fda07..e62c4ef78e2 100644 --- a/services/web/server/src/simcore_service_webserver/projects/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/exceptions.py @@ -1,4 +1,5 @@ """Defines the different exceptions that may arise in the projects subpackage""" + from typing import Any import redis.exceptions @@ -82,6 +83,23 @@ def __init__(self, *, project_uuid: str, node_uuid: str, **ctx): self.project_uuid = project_uuid +class ParentNodeNotFoundError(BaseProjectError): + msg_template = "Parent node '{node_uuid}' not found" + + def __init__(self, *, project_uuid: str | None, node_uuid: str, **ctx): + super().__init__(**ctx) + self.node_uuid = node_uuid + self.project_uuid = project_uuid + + +class ParentProjectNotFoundError(BaseProjectError): + msg_template = "Parent project '{project_uuid}' not found" + + def __init__(self, *, project_uuid: str | None, **ctx): + super().__init__(**ctx) + self.project_uuid = project_uuid + + ProjectLockError = redis.exceptions.LockError diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 2e9dfcc3dfd..5088775c7fb 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -80,7 +80,7 @@ from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.utils_projects_nodes import ( ProjectNodeCreate, - ProjectNodesNodeNotFound, + ProjectNodesNodeNotFoundError, ) from simcore_postgres_database.webserver_models import ProjectType @@ -1355,7 +1355,7 @@ async def get_project_node_resources( ) return node_resources - except ProjectNodesNodeNotFound as exc: + except ProjectNodesNodeNotFoundError as exc: raise NodeNotFoundError( project_uuid=f"{project_id}", node_uuid=f"{node_id}" ) from exc @@ -1397,7 +1397,7 @@ async def update_project_node_resources( check_update_allowed=True, ) return parse_obj_as(ServiceResourcesDict, project_node.required_resources) - except ProjectNodesNodeNotFound as exc: + except ProjectNodesNodeNotFoundError as exc: raise NodeNotFoundError( project_uuid=f"{project_id}", node_uuid=f"{node_id}" ) from exc diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index e9a6a0bf6f3..5415e67e6c6 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -17,6 +17,8 @@ import simcore_service_webserver from aiohttp.test_utils import TestClient from faker import Faker +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from models_library.projects_state import ProjectState from models_library.utils.json_serialization import json_dumps from pytest_simcore.helpers.utils_assert import assert_status @@ -26,6 +28,10 @@ from pytest_simcore.simcore_webserver_projects_rest_api import NEW_PROJECT from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.server import TaskStatus +from servicelib.common_headers import ( + X_SIMCORE_PARENT_NODE_ID, + X_SIMCORE_PARENT_PROJECT_UUID, +) from simcore_service_webserver.application_settings_utils import convert_to_environ_vars from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects._crud_api_create import ( @@ -169,16 +175,19 @@ def _patch(app_config: dict) -> EnvVarsDict: @pytest.fixture -def request_create_project() -> Callable[..., Awaitable[ProjectDict]]: +async def request_create_project() -> ( # noqa: C901, PLR0915 + AsyncIterator[Callable[..., Awaitable[ProjectDict]]] +): """this fixture allows to create projects through the webserver interface - NOTE: a next iteration should take care of cleaning up created projects - Returns: Callable[..., Awaitable[ProjectDict]]: _description_ """ # pylint: disable=too-many-statements + created_project_uuids = [] + used_clients = [] + async def _setup( client: TestClient, *, @@ -186,6 +195,8 @@ async def _setup( from_study: dict | None = None, as_template: bool | None = None, copy_data: bool | None = None, + parent_project_uuid: ProjectID | None, + parent_node_id: NodeID | None, ): # Pre-defined fields imposed by required properties in schema project_data: ProjectDict = {} @@ -239,8 +250,16 @@ async def _setup( url = url.update_query(as_template=f"{as_template}") if copy_data is not None: url = url.update_query(copy_data=f"{copy_data}") - - return url, project_data, expected_data + headers = {} + if parent_project_uuid is not None: + headers |= { + X_SIMCORE_PARENT_PROJECT_UUID: f"{parent_project_uuid}", + } + if parent_node_id is not None: + headers |= { + X_SIMCORE_PARENT_NODE_ID: f"{parent_node_id}", + } + return url, project_data, expected_data, headers async def _creator( client: TestClient, @@ -253,16 +272,20 @@ async def _creator( from_study: dict | None = None, as_template: bool | None = None, copy_data: bool | None = None, + parent_project_uuid: ProjectID | None = None, + parent_node_id: NodeID | None = None, ) -> ProjectDict: - url, project_data, expected_data = await _setup( + url, project_data, expected_data, headers = await _setup( client, project=project, from_study=from_study, as_template=as_template, copy_data=copy_data, + parent_project_uuid=parent_project_uuid, + parent_node_id=parent_node_id, ) - resp = await client.post(f"{url}", json=project_data) + resp = await client.post(f"{url}", json=project_data, headers=headers) print(f"<-- created project response: {resp=}") data, error = await assert_status(resp, expected_accepted_response) if error: @@ -358,6 +381,14 @@ async def _creator( if key not in modified_fields: assert expected_data[key] == new_project[key] + created_project_uuids.append(new_project["uuid"]) + used_clients.append(client) + return new_project - return _creator + yield _creator + + # cleanup projects + for client, project_uuid in zip(used_clients, created_project_uuids, strict=True): + url = client.app.router["delete_project"].url_for(project_id=project_uuid) + await client.delete(url.path) diff --git a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_studies_access.py index e8d1d4202cb..1abf8926a57 100644 --- a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_studies_access.py @@ -93,7 +93,6 @@ async def published_project( user_id=None, as_template=True, # <--IS a template product_name=osparc_product_name, - clear_all=True, tests_data_dir=tests_data_dir, ) as template_project: yield template_project @@ -119,7 +118,6 @@ async def unpublished_project( user_id=None, as_template=True, product_name=osparc_product_name, - clear_all=True, tests_data_dir=tests_data_dir, ) as template_project: yield template_project diff --git a/services/web/server/tests/unit/with_dbs/02/conftest.py b/services/web/server/tests/unit/with_dbs/02/conftest.py index 845230be113..7a311221770 100644 --- a/services/web/server/tests/unit/with_dbs/02/conftest.py +++ b/services/web/server/tests/unit/with_dbs/02/conftest.py @@ -170,7 +170,6 @@ async def template_project( client.app, user_id=None, product_name=osparc_product_name, - clear_all=True, tests_data_dir=tests_data_dir, as_template=True, ) as template_project: @@ -205,7 +204,6 @@ async def _creator(**prj_kwargs) -> ProjectDict: client.app, user_id=None, product_name=osparc_product_name, - clear_all=True, tests_data_dir=tests_data_dir, as_template=True, ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py index 53480e939cf..38f638692c5 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py @@ -30,6 +30,7 @@ ExpectedResponse, MockedStorageSubsystem, standard_role_response, + standard_user_role_response, ) from servicelib.aiohttp import status from servicelib.rest_constants import X_PRODUCT_NAME_HEADER @@ -412,7 +413,7 @@ async def test_new_project( client: TestClient, logged_user: UserInfoDict, primary_group, - expected, + expected: ExpectedResponse, storage_subsystem_mock, project_db_cleaner, request_create_project: Callable[..., Awaitable[ProjectDict]], @@ -422,13 +423,13 @@ async def test_new_project( ) -@pytest.mark.parametrize(*standard_role_response()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_template( client: TestClient, logged_user: UserInfoDict, primary_group: dict[str, str], template_project, - expected, + expected: ExpectedResponse, storage_subsystem_mock, project_db_cleaner, request_create_project: Callable[..., Awaitable[ProjectDict]], @@ -448,13 +449,13 @@ async def test_new_project_from_template( parse_obj_as(uuidlib.UUID, node_name) -@pytest.mark.parametrize(*standard_role_response()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_other_study( client: TestClient, logged_user: UserInfoDict, primary_group: dict[str, str], user_project: ProjectDict, - expected, + expected: ExpectedResponse, storage_subsystem_mock, catalog_subsystem_mock: Callable[[list[ProjectDict]], None], project_db_cleaner, @@ -477,14 +478,14 @@ async def test_new_project_from_other_study( parse_obj_as(uuidlib.UUID, node_name) -@pytest.mark.parametrize(*standard_role_response()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_template_with_body( client: TestClient, logged_user: UserInfoDict, primary_group: dict[str, str], standard_groups: list[dict[str, str]], template_project, - expected, + expected: ExpectedResponse, storage_subsystem_mock, project_db_cleaner, request_create_project: Callable[..., Awaitable[ProjectDict]], @@ -531,7 +532,7 @@ async def test_new_project_from_template_with_body( parse_obj_as(uuidlib.UUID, node_name) -@pytest.mark.parametrize(*standard_role_response()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_new_template_from_project( client: TestClient, logged_user: dict[str, Any], diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py index 02cea8ba046..6215ae57226 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py @@ -20,6 +20,7 @@ from servicelib.aiohttp.long_running_tasks.client import long_running_task_request from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict +from yarl import URL @pytest.fixture @@ -32,7 +33,7 @@ def fake_project( return project -async def _request_clone_project(client, url) -> ProjectGet: +async def _request_clone_project(client: TestClient, url: URL) -> ProjectGet: """Raise HTTPError subclasses if request fails""" # polls until long-running task is done data = None diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py index d388bd0c5b6..66e9f97a534 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py @@ -4,7 +4,11 @@ # pylint: disable=too-many-arguments import json +import random +from collections.abc import Awaitable, Callable +import aiopg +import aiopg.sa import pytest from aiohttp.test_utils import TestClient from faker import Faker @@ -12,12 +16,20 @@ ProjectMetadataGet, ProjectMetadataUpdate, ) +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from pydantic import parse_obj_as from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_webserver_unit_with_db import MockedStorageSubsystem +from pytest_simcore.helpers.utils_webserver_unit_with_db import ( + ExpectedResponse, + MockedStorageSubsystem, + standard_user_role_response, +) from servicelib.aiohttp import status -from simcore_postgres_database.models.users import UserRole +from simcore_postgres_database.utils_projects_metadata import ( + get as get_db_project_metadata, +) from simcore_service_webserver.projects import _crud_api_delete from simcore_service_webserver.projects.models import ProjectDict @@ -25,12 +37,7 @@ @pytest.mark.acceptance_test( "For https://github.com/ITISFoundation/osparc-simcore/issues/4313" ) -@pytest.mark.parametrize( - "user_role", - [ - UserRole.USER, - ], -) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_custom_metadata_handlers( # for deletion mocked_director_v2_api: None, @@ -40,6 +47,7 @@ async def test_custom_metadata_handlers( faker: Faker, logged_user: UserInfoDict, user_project: ProjectDict, + expected: ExpectedResponse, ): # # metadata is a singleton subresource of a project @@ -54,9 +62,7 @@ async def test_custom_metadata_handlers( ) response = await client.get(f"{url}") - _, error = await assert_status( - response, expected_status_code=status.HTTP_404_NOT_FOUND - ) + _, error = await assert_status(response, expected_status_code=expected.not_found) error_message = error["errors"][0]["message"] assert invalid_project_id in error_message assert "project" in error_message.lower() @@ -66,7 +72,7 @@ async def test_custom_metadata_handlers( project_id=user_project["uuid"] ) response = await client.get(f"{url}") - data, _ = await assert_status(response, expected_status_code=status.HTTP_200_OK) + data, _ = await assert_status(response, expected_status_code=expected.ok) assert data["custom"] == {} # replace metadata @@ -80,14 +86,14 @@ async def test_custom_metadata_handlers( f"{url}", json=ProjectMetadataUpdate(custom=custom_metadata).dict() ) - data, _ = await assert_status(response, expected_status_code=status.HTTP_200_OK) + data, _ = await assert_status(response, expected_status_code=expected.ok) assert parse_obj_as(ProjectMetadataGet, data).custom == custom_metadata # delete project url = client.app.router["delete_project"].url_for(project_id=user_project["uuid"]) response = await client.delete(f"{url}") - await assert_status(response, expected_status_code=status.HTTP_204_NO_CONTENT) + await assert_status(response, expected_status_code=expected.no_content) async def _wait_until_deleted(): tasks = _crud_api_delete.get_scheduled_tasks( @@ -102,4 +108,334 @@ async def _wait_until_deleted(): project_id=user_project["uuid"] ) response = await client.get(f"{url}") - await assert_status(response, expected_status_code=status.HTTP_404_NOT_FOUND) + await assert_status(response, expected_status_code=expected.not_found) + + +@pytest.mark.parametrize(*standard_user_role_response()) +async def test_new_project_with_parent_project_node( + # for deletion + mocked_director_v2_api: None, + storage_subsystem_mock: MockedStorageSubsystem, + # + client: TestClient, + logged_user: UserInfoDict, + primary_group: dict[str, str], + user_project: ProjectDict, + expected: ExpectedResponse, + catalog_subsystem_mock: Callable[[list[ProjectDict]], None], + request_create_project: Callable[..., Awaitable[ProjectDict]], + aiopg_engine: aiopg.sa.Engine, +): + """this is new way of setting parents by using request headers""" + catalog_subsystem_mock([user_project]) + parent_project = await request_create_project( + client, + expected.accepted, + expected.created, + logged_user, + primary_group, + from_study=user_project, + ) + assert parent_project + + parent_project_uuid = parse_obj_as(ProjectID, parent_project["uuid"]) + parent_node_id = parse_obj_as( + NodeID, random.choice(list(parent_project["workbench"])) # noqa: S311 + ) + child_project = await request_create_project( + client, + expected.accepted, + expected.created, + logged_user, + primary_group, + parent_project_uuid=parent_project_uuid, + parent_node_id=parent_node_id, + ) + assert child_project + async with aiopg_engine.acquire() as connection: + project_db_metadata = await get_db_project_metadata( + connection, child_project["uuid"] + ) + assert project_db_metadata.parent_project_uuid == parent_project_uuid + assert project_db_metadata.parent_node_id == parent_node_id + + # now we set the metadata with another node_id which shall not override the already set genealogy + another_node_id = random.choice( # noqa: S311 + [n for n in parent_project["workbench"] if NodeID(n) != parent_node_id] + ) + assert NodeID(another_node_id) != parent_node_id + custom_metadata = { + "number": 3.14, + "string": "str", + "boolean": False, + "node_id": f"{another_node_id}", + } + assert client.app + url = client.app.router["update_project_metadata"].url_for( + project_id=child_project["uuid"] + ) + response = await client.patch( + f"{url}", json=ProjectMetadataUpdate(custom=custom_metadata).dict() + ) + data, _ = await assert_status(response, expected_status_code=status.HTTP_200_OK) + assert parse_obj_as(ProjectMetadataGet, data).custom == custom_metadata + # check child project has parent unchanged + async with aiopg_engine.acquire() as connection: + project_db_metadata = await get_db_project_metadata( + connection, child_project["uuid"] + ) + assert project_db_metadata.parent_project_uuid == parent_project_uuid + assert project_db_metadata.parent_node_id == parent_node_id + + +@pytest.mark.parametrize(*standard_user_role_response()) +async def test_new_project_with_invalid_parent_project_node( + # for deletion + mocked_director_v2_api: None, + storage_subsystem_mock: MockedStorageSubsystem, + # + client: TestClient, + logged_user: UserInfoDict, + primary_group: dict[str, str], + user_project: ProjectDict, + expected: ExpectedResponse, + catalog_subsystem_mock: Callable[[list[ProjectDict]], None], + request_create_project: Callable[..., Awaitable[ProjectDict]], + aiopg_engine: aiopg.sa.Engine, + faker: Faker, +): + """this is new way of setting parents by using request headers""" + catalog_subsystem_mock([user_project]) + parent_project = await request_create_project( + client, + expected.accepted, + expected.created, + logged_user, + primary_group, + from_study=user_project, + ) + assert parent_project + + parent_project_uuid = parse_obj_as(ProjectID, parent_project["uuid"]) + parent_node_id = parse_obj_as( + NodeID, random.choice(list(parent_project["workbench"])) # noqa: S311 + ) + + # creating with random project UUID should fail + random_project_uuid = parse_obj_as(ProjectID, faker.uuid4()) + child_project = await request_create_project( + client, + expected.accepted, + expected.not_found, + logged_user, + primary_group, + parent_project_uuid=random_project_uuid, + parent_node_id=parent_node_id, + ) + assert not child_project + + # creating with a random node ID should fail too + random_node_id = parse_obj_as(NodeID, faker.uuid4()) + child_project = await request_create_project( + client, + expected.accepted, + expected.not_found, + logged_user, + primary_group, + parent_project_uuid=parent_project_uuid, + parent_node_id=random_node_id, + ) + assert not child_project + + # creating with only a parent project ID should fail too + child_project = await request_create_project( + client, + expected.unprocessable, + expected.unprocessable, + logged_user, + primary_group, + parent_project_uuid=parent_project_uuid, + ) + assert not child_project + + # creating with only a parent node ID should fail too + random_node_id = parse_obj_as(NodeID, faker.uuid4()) + child_project = await request_create_project( + client, + expected.unprocessable, + expected.unprocessable, + logged_user, + primary_group, + parent_node_id=parent_node_id, + ) + assert not child_project + + +@pytest.mark.parametrize(*standard_user_role_response()) +async def test_set_project_parent_backward_compatibility( + # for deletion + mocked_director_v2_api: None, + storage_subsystem_mock: MockedStorageSubsystem, + # + client: TestClient, + logged_user: UserInfoDict, + primary_group: dict[str, str], + user_project: ProjectDict, + request_create_project: Callable[..., Awaitable[ProjectDict]], + expected: ExpectedResponse, + aiopg_engine: aiopg.sa.Engine, +): + """backwards compatiblity with sim4life.io runs like so + - create a project + - pass project metadata with a node_id inside + - osparc will try to find the project id and set it as parent + """ + assert client.app + + # create a blank project (no nodes necessary) + child_project = await request_create_project( + client, + expected.accepted, + expected.created, + logged_user, + primary_group, + project={"name": "child"}, + ) + + # create a parent project with nodes + parent_project = user_project + + # create some custom data with one of parents node_id as creator + random_parent_node_id = NodeID( + random.choice(list(parent_project["workbench"])) # noqa: S311 + ) + custom_metadata = { + "number": 3.14, + "string": "str", + "boolean": False, + "node_id": f"{random_parent_node_id}", + } + + url = client.app.router["update_project_metadata"].url_for( + project_id=child_project["uuid"] + ) + response = await client.patch( + f"{url}", json=ProjectMetadataUpdate(custom=custom_metadata).dict() + ) + data, _ = await assert_status(response, expected_status_code=status.HTTP_200_OK) + assert parse_obj_as(ProjectMetadataGet, data).custom == custom_metadata + # check child project has parent set correctly + async with aiopg_engine.acquire() as connection: + project_db_metadata = await get_db_project_metadata( + connection, child_project["uuid"] + ) + assert project_db_metadata.parent_project_uuid == ProjectID( + parent_project["uuid"] + ) + assert f"{project_db_metadata.parent_node_id}" in parent_project["workbench"] + + +@pytest.mark.parametrize(*standard_user_role_response()) +async def test_update_project_metadata_backward_compatibility_with_same_project_does_not_raises_and_does_not_work( + # for deletion + mocked_director_v2_api: None, + storage_subsystem_mock: MockedStorageSubsystem, + # + client: TestClient, + faker: Faker, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: ExpectedResponse, + aiopg_engine: aiopg.sa.Engine, +): + assert client.app + + child_project = user_project + # set metadata with fake node_id shall return 404 + custom_metadata = { + "number": 3.14, + "string": "str", + "boolean": False, + "node_id": faker.uuid4(), + } + url = client.app.router["update_project_metadata"].url_for( + project_id=child_project["uuid"] + ) + response = await client.patch( + f"{url}", json=ProjectMetadataUpdate(custom=custom_metadata).dict() + ) + await assert_status(response, expected_status_code=expected.ok) + + # using one of its own nodes as parent is not allowed + custom_metadata = { + "number": 3.14, + "string": "str", + "boolean": False, + "node_id": random.choice(list(child_project["workbench"])), # noqa: S311, + } + url = client.app.router["update_project_metadata"].url_for( + project_id=child_project["uuid"] + ) + response = await client.patch( + f"{url}", json=ProjectMetadataUpdate(custom=custom_metadata).dict() + ) + await assert_status(response, expected_status_code=expected.ok) + + # check project has no parent + async with aiopg_engine.acquire() as connection: + project_db_metadata = await get_db_project_metadata( + connection, child_project["uuid"] + ) + assert project_db_metadata.parent_project_uuid is None + assert project_db_metadata.parent_node_id is None + + +@pytest.mark.parametrize(*standard_user_role_response()) +async def test_update_project_metadata_s4lacad_backward_compatibility_passing_nil_parent_node_id( + # for deletion + mocked_director_v2_api: None, + storage_subsystem_mock: MockedStorageSubsystem, + # + client: TestClient, + logged_user: UserInfoDict, + primary_group: dict[str, str], + user_project: ProjectDict, + request_create_project: Callable[..., Awaitable[ProjectDict]], + expected: ExpectedResponse, + aiopg_engine: aiopg.sa.Engine, +): + assert client.app + + child_project = await request_create_project( + client, + expected.accepted, + expected.created, + logged_user, + primary_group, + from_study=user_project, + ) + + # set metadata with node_id set to UUID(0), which should not raise + # Notice that the parent project ID is not passed! + custom_metadata = { + "number": 3.14, + "string": "str", + "boolean": False, + "node_id": "00000000-0000-0000-0000-000000000000", + } + url = client.app.router["update_project_metadata"].url_for( + project_id=child_project["uuid"] + ) + response = await client.patch( + f"{url}", json=ProjectMetadataUpdate(custom=custom_metadata).dict() + ) + data, _ = await assert_status(response, expected_status_code=status.HTTP_200_OK) + assert parse_obj_as(ProjectMetadataGet, data).custom == custom_metadata + + # check project has no parent + async with aiopg_engine.acquire() as connection: + project_db_metadata = await get_db_project_metadata( + connection, child_project["uuid"] + ) + assert project_db_metadata.parent_project_uuid is None + assert project_db_metadata.parent_node_id is None diff --git a/services/web/server/tests/unit/with_dbs/03/tags/conftest.py b/services/web/server/tests/unit/with_dbs/03/tags/conftest.py index 00fb580988f..4bef4fa16c9 100644 --- a/services/web/server/tests/unit/with_dbs/03/tags/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/tags/conftest.py @@ -132,7 +132,6 @@ async def template_project( project_data, client.app, user_id=None, - clear_all=True, tests_data_dir=tests_data_dir, product_name=osparc_product_name, ) as template_project: diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/conftest.py b/services/web/server/tests/unit/with_dbs/03/wallets/conftest.py index 8488c9fa42b..25c73d9f005 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/conftest.py @@ -101,7 +101,6 @@ async def template_project( project_data, client.app, user_id=None, - clear_all=True, tests_data_dir=tests_data_dir, product_name=osparc_product_name, ) as template_project: diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 02ea92d4459..b42d20f62e1 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -562,7 +562,7 @@ async def redis_client(redis_service: RedisSettings) -> AsyncIterator[aioredis.R yield client await client.flushall() - await client.close(close_connection_pool=True) + await client.aclose(close_connection_pool=True) # type: ignore[attr-defined] @pytest.fixture From b169cc8a95943fa7c15c85aa64dcb709d9e2cb70 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:02:32 +0200 Subject: [PATCH 008/219] =?UTF-8?q?=E2=9C=A8=20api-server:=20New=20study?= =?UTF-8?q?=20jobs=20metadata=20entrypoints=20(#5690)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../helpers/httpx_calls_capture_parameters.py | 6 + .../src/pytest_simcore/httpx_calls_capture.py | 6 +- .../api/routes/solvers_jobs.py | 13 +- .../api/routes/solvers_jobs_getters.py | 13 +- .../api/routes/studies_jobs.py | 58 +- .../{api/routes/_jobs.py => services/jobs.py} | 57 +- .../services/webserver.py | 24 +- services/api-server/tests/mocks/cleanup.py | 19 +- ...est_get_and_update_study_job_metadata.json | 615 ++++++++++++++++++ .../test_api_routers_studies_jobs_metadata.py | 195 ++++++ services/api-server/tests/unit/conftest.py | 46 +- 11 files changed, 997 insertions(+), 55 deletions(-) rename services/api-server/src/simcore_service_api_server/{api/routes/_jobs.py => services/jobs.py} (62%) create mode 100644 services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json create mode 100644 services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py index d2c5056a048..89783d0591c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py @@ -133,3 +133,9 @@ def respx_lookup(self) -> str: class PathDescription(BaseModel): path: str path_parameters: list[CapturedParameter] + + def to_path_regex(self) -> str: + path_regex: str = f"{self.path}" + for param in self.path_parameters: + path_regex = path_regex.replace("{" + param.name + "}", param.respx_lookup) + return path_regex diff --git a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py index cda489e9070..f2c316fa289 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py @@ -242,11 +242,7 @@ def _get_correct_mock_router_for_capture( assert isinstance(url_path, PathDescription) # path - path_regex: str = str(url_path.path) - for param in url_path.path_parameters: - path_regex = path_regex.replace( - "{" + param.name + "}", param.respx_lookup - ) + path_regex = url_path.to_path_regex() # response side_effect = _CaptureSideEffect( diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py index 0e1a1f44d59..9fe37ada46b 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py @@ -23,6 +23,7 @@ from ...models.schemas.solvers import Solver, SolverKeyId from ...services.catalog import CatalogApi from ...services.director_v2 import DirectorV2Api +from ...services.jobs import replace_custom_metadata, start_project, stop_project from ...services.solver_job_models_converters import ( create_job_from_project, create_jobstatus_from_task, @@ -33,7 +34,6 @@ from ..dependencies.services import get_api_client from ..dependencies.webserver import AuthSession, get_webserver_session from ._common import API_SERVER_DEV_FEATURES_ENABLED -from ._jobs import start_project, stop_project _logger = logging.getLogger(__name__) @@ -248,13 +248,12 @@ async def replace_job_custom_metadata( job_name = _compose_job_resource_name(solver_key, version, job_id) _logger.debug("Custom metadata for '%s'", job_name) - project_metadata = await webserver_api.update_project_metadata( - project_id=job_id, metadata=update.metadata - ) - return JobMetadata( + return await replace_custom_metadata( + job_name=job_name, job_id=job_id, - metadata=project_metadata.custom, - url=url_for( + update=update, + webserver_api=webserver_api, + self_url=url_for( "replace_job_custom_metadata", solver_key=solver_key, version=version, diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_getters.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_getters.py index 54a1673476d..952b126a173 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_getters.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_getters.py @@ -32,6 +32,10 @@ from ...models.schemas.solvers import SolverKeyId from ...services.catalog import CatalogApi from ...services.director_v2 import DirectorV2Api, DownloadLink, NodeName +from ...services.jobs import ( + get_custom_metadata, + raise_if_job_not_associated_with_solver, +) from ...services.log_streaming import LogDistributor, LogStreamer from ...services.solver_job_models_converters import create_job_from_project from ...services.solver_job_outputs import ResultsTypes, get_solver_output_results @@ -43,7 +47,6 @@ from ..dependencies.services import get_api_client from ..dependencies.webserver import AuthSession, get_webserver_session from ._common import API_SERVER_DEV_FEATURES_ENABLED -from ._jobs import raise_if_job_not_associated_with_solver from .solvers_jobs import ( JOBS_STATUS_CODES, METADATA_STATUS_CODES, @@ -345,11 +348,11 @@ async def get_job_custom_metadata( job_name = _compose_job_resource_name(solver_key, version, job_id) _logger.debug("Custom metadata for '%s'", job_name) - project_metadata = await webserver_api.get_project_metadata(project_id=job_id) - return JobMetadata( + return await get_custom_metadata( + job_name=job_name, job_id=job_id, - metadata=project_metadata.custom, - url=url_for( + webserver_api=webserver_api, + self_url=url_for( "get_job_custom_metadata", solver_key=solver_key, version=version, diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py index fd08e82475c..68be168b217 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py @@ -28,6 +28,12 @@ ) from ...models.schemas.studies import Study, StudyID from ...services.director_v2 import DirectorV2Api +from ...services.jobs import ( + get_custom_metadata, + replace_custom_metadata, + start_project, + stop_project, +) from ...services.solver_job_models_converters import create_jobstatus_from_task from ...services.storage import StorageApi from ...services.study_job_models_converters import ( @@ -38,18 +44,11 @@ from ...services.webserver import AuthSession from ..dependencies.application import get_reverse_url_mapper from ._common import API_SERVER_DEV_FEATURES_ENABLED -from ._jobs import start_project, stop_project _logger = logging.getLogger(__name__) router = APIRouter() -# -# - Study maps to project -# - study-job maps to run?? -# - - def _compose_job_resource_name(study_key, job_id) -> str: """Creates a unique resource name for solver's jobs""" return Job.compose_resource_name( @@ -274,25 +273,56 @@ async def get_study_job_output_logfile(study_id: StudyID, job_id: JobID): "/{study_id}/jobs/{job_id}/metadata", response_model=JobMetadata, include_in_schema=API_SERVER_DEV_FEATURES_ENABLED, - status_code=status.HTTP_501_NOT_IMPLEMENTED, ) async def get_study_job_custom_metadata( study_id: StudyID, job_id: JobID, + webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], ): """Gets custom metadata from a job""" - msg = f"Gets metadata attached to study_id={study_id!r} job_id={job_id!r}. SEE https://github.com/ITISFoundation/osparc-simcore/issues/4313" - raise NotImplementedError(msg) + job_name = _compose_job_resource_name(study_id, job_id) + msg = f"Gets metadata attached to study_id={study_id!r} job_id={job_id!r}.\njob_name={job_name!r}.\nSEE https://github.com/ITISFoundation/osparc-simcore/issues/4313" + _logger.debug(msg) + + return await get_custom_metadata( + job_name=job_name, + job_id=job_id, + webserver_api=webserver_api, + self_url=url_for( + "get_study_job_custom_metadata", + study_id=study_id, + job_id=job_id, + ), + ) @router.put( "/{study_id}/jobs/{job_id}/metadata", + response_model=JobMetadata, include_in_schema=API_SERVER_DEV_FEATURES_ENABLED, - status_code=status.HTTP_501_NOT_IMPLEMENTED, ) async def replace_study_job_custom_metadata( - study_id: StudyID, job_id: JobID, replace: JobMetadataUpdate + study_id: StudyID, + job_id: JobID, + replace: JobMetadataUpdate, + webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], ): """Changes job's custom metadata""" - msg = f"Attaches metadata={replace.metadata!r} to study_id={study_id!r} job_id={job_id!r}. SEE https://github.com/ITISFoundation/osparc-simcore/issues/4313" - raise NotImplementedError(msg) + job_name = _compose_job_resource_name(study_id, job_id) + + msg = f"Attaches metadata={replace.metadata!r} to study_id={study_id!r} job_id={job_id!r}.\njob_name={job_name!r}.\nSEE https://github.com/ITISFoundation/osparc-simcore/issues/4313" + _logger.debug(msg) + + return await replace_custom_metadata( + job_name=job_name, + job_id=job_id, + update=replace, + webserver_api=webserver_api, + self_url=url_for( + "replace_study_job_custom_metadata", + study_id=study_id, + job_id=job_id, + ), + ) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/_jobs.py b/services/api-server/src/simcore_service_api_server/services/jobs.py similarity index 62% rename from services/api-server/src/simcore_service_api_server/api/routes/_jobs.py rename to services/api-server/src/simcore_service_api_server/services/jobs.py index 39bb00f23f1..e1e1b63a88f 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/_jobs.py +++ b/services/api-server/src/simcore_service_api_server/services/jobs.py @@ -5,16 +5,22 @@ from fastapi import Depends, HTTPException, Request, status from models_library.api_schemas_webserver.projects import ProjectGet from models_library.clusters import ClusterID -from pydantic import PositiveInt +from pydantic import HttpUrl, PositiveInt from servicelib.logging_utils import log_context -from ...models.schemas.jobs import JobID, JobPricingSpecification, JobStatus -from ...services.director_v2 import DirectorV2Api -from ...services.solver_job_models_converters import create_jobstatus_from_task -from ...services.webserver import AuthSession -from ..dependencies.authentication import get_current_user_id -from ..dependencies.services import get_api_client -from ..dependencies.webserver import get_webserver_session +from ..api.dependencies.authentication import get_current_user_id +from ..api.dependencies.services import get_api_client +from ..api.dependencies.webserver import get_webserver_session +from ..models.schemas.jobs import ( + JobID, + JobMetadata, + JobMetadataUpdate, + JobPricingSpecification, + JobStatus, +) +from .director_v2 import DirectorV2Api +from .solver_job_models_converters import create_jobstatus_from_task +from .webserver import AuthSession _logger = logging.getLogger(__name__) @@ -64,3 +70,38 @@ async def stop_project( task = await director2_api.get_computation(job_id, user_id) job_status: JobStatus = create_jobstatus_from_task(task) return job_status + + +async def get_custom_metadata( + *, + job_name: str, + job_id: JobID, + webserver_api: AuthSession, + self_url: HttpUrl, +): + assert job_name # nosec + project_metadata = await webserver_api.get_project_metadata(project_id=job_id) + return JobMetadata( + job_id=job_id, + metadata=project_metadata.custom, + url=self_url, + ) + + +async def replace_custom_metadata( + *, + job_name: str, + job_id: JobID, + update: JobMetadataUpdate, + webserver_api: AuthSession, + self_url: HttpUrl, +): + assert job_name # nosec + project_metadata = await webserver_api.update_project_metadata( + project_id=job_id, metadata=update.metadata + ) + return JobMetadata( + job_id=job_id, + metadata=project_metadata.custom, + url=self_url, + ) diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py index fe82850bc1b..89a0b78fc14 100644 --- a/services/api-server/src/simcore_service_api_server/services/webserver.py +++ b/services/api-server/src/simcore_service_api_server/services/webserver.py @@ -8,6 +8,7 @@ from typing import Any from uuid import UUID +import httpx from cryptography import fernet from fastapi import FastAPI, status from models_library.api_schemas_api_server.pricing_plans import ServicePricingPlanGet @@ -90,6 +91,14 @@ } +def _get_lrt_urls(lrt_response: httpx.Response): + # WARNING: this function is patched in patch_lrt_response_urls fixture + data = Envelope[TaskGet].parse_raw(lrt_response.text).data + assert data is not None # nosec + + return data.status_href, data.result_href + + class WebserverApi(BaseServiceClientApi): """Access to web-server API @@ -192,9 +201,8 @@ async def _page_projects( return Page[ProjectGet].parse_raw(resp.text) - async def _wait_for_long_running_task_results(self, data: TaskGet): - status_url = data.status_href - result_url = data.result_href + async def _wait_for_long_running_task_results(self, lrt_response: httpx.Response): + status_url, result_url = _get_lrt_urls(lrt_response) # GET task status now until done async for attempt in AsyncRetrying( @@ -255,10 +263,7 @@ async def create_project( cookies=self.session_cookies, ) response.raise_for_status() - data = Envelope[TaskGet].parse_raw(response.text).data - assert data is not None # nosec - - result = await self._wait_for_long_running_task_results(data) + result = await self._wait_for_long_running_task_results(response) return ProjectGet.parse_obj(result) @_exception_mapper(_JOB_STATUS_MAP) @@ -268,10 +273,7 @@ async def clone_project(self, *, project_id: UUID, hidden: bool) -> ProjectGet: "/projects", cookies=self.session_cookies, params=query ) response.raise_for_status() - data = Envelope[TaskGet].parse_raw(response.text).data - assert data is not None # nosec - - result = await self._wait_for_long_running_task_results(data) + result = await self._wait_for_long_running_task_results(response) return ProjectGet.parse_obj(result) @_exception_mapper(_JOB_STATUS_MAP) diff --git a/services/api-server/tests/mocks/cleanup.py b/services/api-server/tests/mocks/cleanup.py index 21716df9f3b..fb563a3ebe8 100644 --- a/services/api-server/tests/mocks/cleanup.py +++ b/services/api-server/tests/mocks/cleanup.py @@ -1,3 +1,4 @@ +import argparse import json import re from pathlib import Path @@ -29,7 +30,23 @@ def anonymize_values(json_key, json_data): def main(): - for path in Path.cwd().glob("*.json"): + parser = argparse.ArgumentParser(description="Anonymizes mocks/*.json files") + + parser.add_argument( + "file", nargs="?", type=str, help="The file that will be sanitized" + ) + args = parser.parse_args() + + if args.file: + target = Path(args.file) + assert target.exists() + iter_paths = [ + target, + ] + else: + iter_paths = Path.cwd().glob("*.json") + + for path in iter_paths: print("Anonymizing", path, "...") json_data = anonymize_values(None, json.loads(path.read_text())) path.write_text(json.dumps(json_data, indent=1)) diff --git a/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json b/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json new file mode 100644 index 00000000000..1f6eb91a6d8 --- /dev/null +++ b/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json @@ -0,0 +1,615 @@ +[ + { + "name": "clone_project", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=784f63f4-1d9f-11ef-892d-0242ac140012&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3D784f63f4-1d9f-11ef-892d-0242ac140012%26hidden%3Dtrue.3b945ded-136e-405a-8ae3-e2b2f3ea9e72", + "task_name": "POST /v0/projects?from_study=784f63f4-1d9f-11ef-892d-0242ac140012&hidden=true", + "status_href": "http://webserver:8080/v0/tasks/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253D784f63f4-1d9f-11ef-892d-0242ac140012%2526hidden%253Dtrue.3b945ded-136e-405a-8ae3-e2b2f3ea9e72", + "result_href": "http://webserver:8080/v0/tasks/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253D784f63f4-1d9f-11ef-892d-0242ac140012%2526hidden%253Dtrue.3b945ded-136e-405a-8ae3-e2b2f3ea9e72/result", + "abort_href": "http://webserver:8080/v0/tasks/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253D784f63f4-1d9f-11ef-892d-0242ac140012%2526hidden%253Dtrue.3b945ded-136e-405a-8ae3-e2b2f3ea9e72" + } + }, + "status_code": 202 + }, + { + "name": "get_clone_project_task_status", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3D784f63f4-1d9f-11ef-892d-0242ac140012%26hidden%3Dtrue.3b945ded-136e-405a-8ae3-e2b2f3ea9e72", + "message": "creating new study...", + "percent": 0.0 + }, + "done": false, + "started": "2024-05-30T10:29:54.137359" + } + } + }, + { + "name": "get_clone_project_task_status_1", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3D784f63f4-1d9f-11ef-892d-0242ac140012%26hidden%3Dtrue.3b945ded-136e-405a-8ae3-e2b2f3ea9e72", + "message": "Checking study access rights...", + "percent": 0.0 + }, + "done": false, + "started": "2024-05-30T10:29:54.137359" + } + } + }, + { + "name": "get_clone_project_task_status_2", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3D784f63f4-1d9f-11ef-892d-0242ac140012%26hidden%3Dtrue.3b945ded-136e-405a-8ae3-e2b2f3ea9e72", + "message": "finished", + "percent": 1.0 + }, + "done": false, + "started": "2024-05-30T10:29:54.137359" + } + } + }, + { + "name": "get_clone_project_task_status_3", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3D784f63f4-1d9f-11ef-892d-0242ac140012%26hidden%3Dtrue.3b945ded-136e-405a-8ae3-e2b2f3ea9e72", + "message": "finished", + "percent": 1.0 + }, + "done": false, + "started": "2024-05-30T10:29:54.137359" + } + } + }, + { + "name": "get_clone_project_task_status_4", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3D784f63f4-1d9f-11ef-892d-0242ac140012%26hidden%3Dtrue.3b945ded-136e-405a-8ae3-e2b2f3ea9e72", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2024-05-30T10:29:54.137359" + } + } + }, + { + "name": "get_clone_project_task_result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks" + } + ] + }, + "response_body": { + "data": { + "uuid": "8dd46a50-1e6f-11ef-90e3-0242ac14000c", + "name": "New Study (Copy)", + "description": "", + "thumbnail": "", + "creationDate": "2024-05-30T10:29:54.150Z", + "lastChangeDate": "2024-05-30T10:29:54.150Z", + "workbench": { + "45043872-d6d3-530b-bf40-67bfde79191c": { + "key": "simcore/services/dynamic/jupyter-math", + "version": "3.0.2", + "label": "JupyterLab Math (Python+Octave)", + "thumbnail": null, + "inputs": {}, + "inputsUnits": {}, + "inputNodes": [] + } + }, + "prjOwner": "krobinson@example.com", + "accessRights": { + "3": { + "read": true, + "write": true, + "delete": true + } + }, + "tags": [], + "classifiers": [], + "state": { + "locked": { + "value": false, + "status": "CLOSED" + }, + "state": { + "value": "UNKNOWN" + } + }, + "ui": { + "workbench": { + "45043872-d6d3-530b-bf40-67bfde79191c": { + "position": { + "x": 195, + "y": 180 + } + } + }, + "slideshow": {}, + "currentNodeId": "784f63f4-1d9f-11ef-892d-0242ac140012", + "mode": "workbench" + }, + "quality": { + "enabled": true, + "tsr_target": { + "r01": { + "level": 4, + "references": "" + }, + "r02": { + "level": 4, + "references": "" + }, + "r03": { + "level": 4, + "references": "" + }, + "r04": { + "level": 4, + "references": "" + }, + "r05": { + "level": 4, + "references": "" + }, + "r06": { + "level": 4, + "references": "" + }, + "r07": { + "level": 4, + "references": "" + }, + "r08": { + "level": 4, + "references": "" + }, + "r09": { + "level": 4, + "references": "" + }, + "r10": { + "level": 4, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + }, + "tsr_current": { + "r01": { + "level": 0, + "references": "" + }, + "r02": { + "level": 0, + "references": "" + }, + "r03": { + "level": 0, + "references": "" + }, + "r04": { + "level": 0, + "references": "" + }, + "r05": { + "level": 0, + "references": "" + }, + "r06": { + "level": 0, + "references": "" + }, + "r07": { + "level": 0, + "references": "" + }, + "r08": { + "level": 0, + "references": "" + }, + "r09": { + "level": 0, + "references": "" + }, + "r10": { + "level": 0, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + } + }, + "dev": {} + } + }, + "status_code": 201 + }, + { + "name": "patch_project", + "description": "", + "method": "PATCH", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "request_payload": { + "name": "studies/784f63f4-1d9f-11ef-892d-0242ac140012/jobs/8dd46a50-1e6f-11ef-90e3-0242ac14000c" + }, + "status_code": 204 + }, + { + "name": "get_project_inputs", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/inputs", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "response_body": { + "data": {} + } + }, + { + "name": "get_project_metadata", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/metadata", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "response_body": { + "data": { + "projectUuid": "8dd46a50-1e6f-11ef-90e3-0242ac14000c", + "custom": {} + } + } + }, + { + "name": "patch_project_metadata", + "description": "", + "method": "PATCH", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/metadata", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "request_payload": { + "custom": { + "number": 3.14, + "integer": 42, + "string": "foo", + "boolean": true + } + }, + "response_body": { + "data": { + "projectUuid": "8dd46a50-1e6f-11ef-90e3-0242ac14000c", + "custom": { + "number": 3.14, + "string": "foo", + "boolean": true, + "integer": 42 + } + } + } + }, + { + "name": "get_project_metadata_1", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/metadata", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "response_body": { + "data": { + "projectUuid": "8dd46a50-1e6f-11ef-90e3-0242ac14000c", + "custom": { + "number": 3.14, + "string": "foo", + "boolean": true, + "integer": 42 + } + } + } + }, + { + "name": "delete_project", + "description": "", + "method": "DELETE", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "status_code": 204 + }, + { + "name": "get_project_metadata_2", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/metadata", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "response_body": { + "data": null, + "error": { + "logs": [ + { + "message": "Project with uuid '8dd46a50-1e6f-11ef-90e3-0242ac14000c' not found.", + "level": "ERROR", + "logger": "user" + } + ], + "errors": [ + { + "code": "HTTPNotFound", + "message": "Project with uuid '8dd46a50-1e6f-11ef-90e3-0242ac14000c' not found.", + "resource": null, + "field": null + } + ], + "status": 404, + "message": "Project with uuid '8dd46a50-1e6f-11ef-90e3-0242ac14000c' not found." + } + }, + "status_code": 404 + } +] diff --git a/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py b/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py new file mode 100644 index 00000000000..526f3320154 --- /dev/null +++ b/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py @@ -0,0 +1,195 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import json +import re +from pathlib import Path +from typing import TypedDict + +import httpx +import pytest +from fastapi.encoders import jsonable_encoder +from pydantic import parse_file_as +from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel +from pytest_simcore.helpers.httpx_calls_capture_parameters import PathDescription +from respx import MockRouter +from simcore_service_api_server.models.schemas.jobs import ( + Job, + JobMetadata, + JobMetadataUpdate, +) +from simcore_service_api_server.models.schemas.studies import StudyID +from starlette import status + + +class MockedBackendApiDict(TypedDict): + webserver: MockRouter | None + + +@pytest.fixture +def mocked_backend( + project_tests_dir: Path, + mocked_webserver_service_api_base: MockRouter, +) -> MockedBackendApiDict | None: + # load + captures = { + c.name: c + for c in parse_file_as( + list[HttpApiCallCaptureModel], + project_tests_dir / "mocks" / "test_get_and_update_study_job_metadata.json", + ) + } + + # group captures based on manually adjusted capture names (see assert below) + names = list(captures) + groups = {} + used = set() + for n, name in enumerate(names): + group = ( + [other for other in names[n:] if re.match(rf"{name}_\d+$", other)] + if name not in used + else [] + ) + if name not in used: + groups[name] = group + used.update(group) + + print("Captures groups:", json.dumps(groups, indent=1)) + assert groups == { + "clone_project": [], + "get_clone_project_task_status": [ + "get_clone_project_task_status_1", + "get_clone_project_task_status_2", + "get_clone_project_task_status_3", + "get_clone_project_task_status_4", + ], + "get_clone_project_task_result": [], + "patch_project": [], + "get_project_inputs": [], + "get_project_metadata": ["get_project_metadata_1", "get_project_metadata_2"], + "patch_project_metadata": [], + "delete_project": [], + } + + # setup mocks as single or iterable responses + for name, group in groups.items(): + c = captures[name] + assert isinstance(c.path, PathDescription) + if group: + # mock this entrypoint using https://lundberg.github.io/respx/guide/#iterable + cc = [c] + [captures[_] for _ in group] + mocked_webserver_service_api_base.request( + method=c.method.upper(), + url=None, + path__regex=f"^{c.path.to_path_regex()}$", + name=name, + ).mock( + side_effect=[_.as_response() for _ in cc], + ) + else: + mocked_webserver_service_api_base.request( + method=c.method.upper(), + url=None, + path__regex=f"^{c.path.to_path_regex()}$", + name=name, + ).mock(return_value=c.as_response()) + + return MockedBackendApiDict( + webserver=mocked_webserver_service_api_base, + ) + + +@pytest.fixture +def study_id() -> StudyID: + # NOTE: this id is used in mocks/test_get_and_update_study_job_metadata.json + return StudyID("784f63f4-1d9f-11ef-892d-0242ac140012") + + +async def test_get_and_update_study_job_metadata( + auth: httpx.BasicAuth, + client: httpx.AsyncClient, + study_id: StudyID, + mocked_backend: MockedBackendApiDict, +): + """ + To generate mock capture you can run + + pytest \ + --ff \ + --log-cli-level=INFO \ + --pdb \ + --setup-show \ + -sx \ + -vv \ + --spy-httpx-calls-enabled=true \ + --spy-httpx-calls-capture-path=test-httpx-spy-capture.ignore.keep.json \ + --faker-user-id=1 \ + --faker-user-email=foo@email.com \ + --faker-user-api-key=test \ + --faker-user-api-secret=test \ + --faker-project-id=784f63f4-1d9f-11ef-892d-0242ac140012 \ + -k test_get_and_update_study_job_metadata + """ + + # Creates a job (w/o running it) + resp = await client.post( + f"/v0/studies/{study_id}/jobs", + auth=auth, + json={"values": {}}, + ) + assert resp.status_code == status.HTTP_200_OK + job = Job(**resp.json()) + + # Get metadata + resp = await client.get( + f"/v0/studies/{study_id}/jobs/{job.id}/metadata", + auth=auth, + ) + assert resp.status_code == status.HTTP_200_OK + job_meta = JobMetadata(**resp.json()) + + assert job_meta.metadata == {} + + # Update metadata + my_metadata = { + "number": 3.14, + "integer": 42, + "string": "foo", + "boolean": True, + } + resp = await client.put( + f"/v0/studies/{study_id}/jobs/{job.id}/metadata", + auth=auth, + json=jsonable_encoder(JobMetadataUpdate(metadata=my_metadata)), + ) + assert resp.status_code == status.HTTP_200_OK + + job_meta = JobMetadata(**resp.json()) + assert job_meta.metadata == my_metadata + + # Get metadata after update + resp = await client.get( + f"/v0/studies/{study_id}/jobs/{job.id}/metadata", + auth=auth, + ) + assert resp.status_code == status.HTTP_200_OK + job_meta = JobMetadata(**resp.json()) + + assert job_meta.metadata == my_metadata + + # Delete job + resp = await client.delete( + f"/v0/studies/{study_id}/jobs/{job.id}", + auth=auth, + ) + assert resp.status_code == status.HTTP_204_NO_CONTENT + + # Get metadata -> job not found! + resp = await client.get( + f"/v0/studies/{study_id}/jobs/{job.id}/metadata", + auth=auth, + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index c5b4528ee55..25db6cb0ff0 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -10,6 +10,7 @@ from pathlib import Path from typing import Any from unittest import mock +from unittest.mock import MagicMock import aiohttp.test_utils import httpx @@ -19,6 +20,7 @@ from asgi_lifespan import LifespanManager from faker import Faker from fastapi import FastAPI, status +from fastapi.encoders import jsonable_encoder from httpx import ASGITransport from models_library.api_schemas_long_running_tasks.tasks import ( TaskGet, @@ -32,7 +34,6 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import BaseFileLink, SimcoreS3FileID from models_library.users import UserID -from models_library.utils.fastapi_encoders import jsonable_encoder from moto.server import ThreadedMotoServer from packaging.version import Version from pydantic import EmailStr, HttpUrl, parse_obj_as @@ -90,12 +91,18 @@ def mock_missing_plugins(app_environment: EnvVarsDict, mocker: MockerFixture): def app( mock_missing_plugins: EnvVarsDict, create_httpx_async_client_spy_if_enabled: Callable, + patch_lrt_response_urls: Callable, + spy_httpx_calls_enabled: bool, ) -> FastAPI: """Inits app on a light environment""" - create_httpx_async_client_spy_if_enabled( - "simcore_service_api_server.utils.client_base.AsyncClient" - ) + if spy_httpx_calls_enabled: + create_httpx_async_client_spy_if_enabled( + "simcore_service_api_server.utils.client_base.AsyncClient" + ) + + patch_lrt_response_urls() + return init_app() @@ -430,6 +437,37 @@ def mocked_solver_job_outputs(mocker) -> None: ) +@pytest.fixture +def patch_lrt_response_urls(mocker: MockerFixture): + """ + Callable that patches webserver._get_lrt_urls helper + when running in spy mode + """ + + def _() -> MagicMock: + def _get_lrt_urls(lrt_response: httpx.Response): + # NOTE: this function is needed to mock + data = Envelope[TaskGet].parse_raw(lrt_response.text).data + assert data is not None # nosec + + def _patch(href): + return lrt_response.request.url.copy_with( + raw_path=httpx.URL(href).raw_path + ) + + data.status_href = _patch(data.status_href) + data.result_href = _patch(data.result_href) + + return data.status_href, data.result_href + + return mocker.patch( + "simcore_service_api_server.services.webserver._get_lrt_urls", + side_effect=_get_lrt_urls, + ) + + return _ + + @pytest.fixture def patch_webserver_long_running_project_tasks( app: FastAPI, faker: Faker, services_mocks_enabled: bool From 88e23a6b017cb62c6445cb982b5c22f6cc8c394a Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:43:33 +0200 Subject: [PATCH 009/219] =?UTF-8?q?=F0=9F=8E=A8=20introducing=20parent=20i?= =?UTF-8?q?ds=20to=20rut=20(=F0=9F=97=83=EF=B8=8F)=20(#5891)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../service_runs.py | 3 +- .../api_schemas_webserver/resource_usage.py | 3 +- .../src/models_library/rabbitmq_messages.py | 7 + .../481d5b472721_add_parent_fields_to_rut.py | 120 ++++++++++++++++++ .../models/resource_tracker_service_runs.py | 35 ++++- .../utils/rabbitmq.py | 5 + .../modules/resource_tracking/_core.py | 5 + .../models/resource_tracker_service_runs.py | 10 ++ .../db/repositories/resource_tracker.py | 10 ++ .../resource_tracker_process_messages.py | 5 + .../services/resource_tracker_service_runs.py | 3 +- .../tests/unit/with_dbs/conftest.py | 10 ++ .../api/v0/openapi.yaml | 13 +- .../resource_usage/_service_runs_handlers.py | 2 + .../test_usage_services__list.py | 12 +- 15 files changed, 224 insertions(+), 19 deletions(-) create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/481d5b472721_add_parent_fields_to_rut.py diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py index 3c3c5905d58..8abb2da421e 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py @@ -22,10 +22,11 @@ class ServiceRunGet(BaseModel): project_name: str node_id: NodeID node_name: str + root_parent_project_id: ProjectID + root_parent_project_name: str service_key: ServiceKey service_version: ServiceVersion service_type: str - service_resources: dict started_at: datetime stopped_at: datetime | None service_run_status: ServiceRunStatus diff --git a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py index 4e514b8776e..fa150f9ffc6 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py @@ -35,10 +35,11 @@ class ServiceRunGet( project_name: str node_id: NodeID node_name: str + root_parent_project_id: ProjectID + root_parent_project_name: str service_key: ServiceKey service_version: ServiceVersion service_type: str - service_resources: dict started_at: datetime stopped_at: datetime | None service_run_status: ServiceRunStatus diff --git a/packages/models-library/src/models_library/rabbitmq_messages.py b/packages/models-library/src/models_library/rabbitmq_messages.py index 42374baeb4c..902bac4d279 100644 --- a/packages/models-library/src/models_library/rabbitmq_messages.py +++ b/packages/models-library/src/models_library/rabbitmq_messages.py @@ -217,6 +217,13 @@ class RabbitResourceTrackingStartedMessage(RabbitResourceTrackingBaseMessage): node_id: NodeID node_name: str + parent_project_id: ProjectID + root_parent_project_id: ProjectID + root_parent_project_name: str + + parent_node_id: NodeID + root_parent_node_id: NodeID + service_key: ServiceKey service_version: ServiceVersion service_type: ServiceType diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/481d5b472721_add_parent_fields_to_rut.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/481d5b472721_add_parent_fields_to_rut.py new file mode 100644 index 00000000000..8458cf73442 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/481d5b472721_add_parent_fields_to_rut.py @@ -0,0 +1,120 @@ +"""add_parent_fields_to_rut + +Revision ID: 481d5b472721 +Revises: 0d85bd35bdaa +Create Date: 2024-06-03 08:58:35.086686+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "481d5b472721" +down_revision = "0d85bd35bdaa" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "resource_tracker_service_runs", + sa.Column("parent_project_id", sa.String(), nullable=True), + ) + op.add_column( + "resource_tracker_service_runs", + sa.Column("root_parent_project_id", sa.String(), nullable=True), + ) + op.add_column( + "resource_tracker_service_runs", + sa.Column("root_parent_project_name", sa.String(), nullable=True), + ) + op.add_column( + "resource_tracker_service_runs", + sa.Column("parent_node_id", sa.String(), nullable=True), + ) + op.add_column( + "resource_tracker_service_runs", + sa.Column("root_parent_node_id", sa.String(), nullable=True), + ) + + # Populate new columns with values from the existing column + op.execute( + sa.DDL( + f""" + + UPDATE resource_tracker_service_runs + SET parent_project_id = project_id, + root_parent_project_id = project_id, + root_parent_project_name = project_name, + parent_node_id = node_id, + root_parent_node_id = node_id + """ + ) + ) + + # Make newly created column non-nullable + op.alter_column( + "resource_tracker_service_runs", + "parent_project_id", + nullable=False, + ) + op.alter_column( + "resource_tracker_service_runs", + "root_parent_project_id", + nullable=False, + ) + op.alter_column( + "resource_tracker_service_runs", + "root_parent_project_name", + nullable=False, + ) + op.alter_column( + "resource_tracker_service_runs", + "parent_node_id", + nullable=False, + ) + op.alter_column( + "resource_tracker_service_runs", + "root_parent_node_id", + nullable=False, + ) + + # Make already existing columns non-nullable + op.alter_column( + "resource_tracker_service_runs", + "project_name", + existing_type=sa.VARCHAR(), + nullable=False, + ) + op.alter_column( + "resource_tracker_service_runs", + "node_name", + existing_type=sa.VARCHAR(), + nullable=False, + ) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "resource_tracker_service_runs", + "node_name", + existing_type=sa.VARCHAR(), + nullable=True, + ) + op.alter_column( + "resource_tracker_service_runs", + "project_name", + existing_type=sa.VARCHAR(), + nullable=True, + ) + op.drop_column("resource_tracker_service_runs", "root_parent_node_id") + op.drop_column("resource_tracker_service_runs", "parent_node_id") + op.drop_column("resource_tracker_service_runs", "root_parent_project_name") + op.drop_column("resource_tracker_service_runs", "root_parent_project_id") + op.drop_column("resource_tracker_service_runs", "parent_project_id") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_service_runs.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_service_runs.py index 012d0e6b27c..33eddcb9fc7 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_service_runs.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_service_runs.py @@ -104,7 +104,7 @@ class ResourceTrackerServiceRunStatus(str, enum.Enum): sa.Column( "project_name", sa.String, - nullable=True, + nullable=False, doc="we want to store the project name for tracking/billing purposes and be sure it stays there even when the project is deleted (that's also reason why we do not introduce foreign key)", ), # Node fields @@ -117,9 +117,40 @@ class ResourceTrackerServiceRunStatus(str, enum.Enum): sa.Column( "node_name", sa.String, - nullable=True, + nullable=False, doc="we want to store the node/service name/label for tracking/billing purposes and be sure it stays there even when the node is deleted.", ), + # Project/Node parent fields + sa.Column( + "parent_project_id", # UUID + sa.String, + nullable=False, + doc="If a user starts computational jobs via a dynamic service, a new project is created in the backend. This newly created project is considered a child project, and the project from which it was created is the parent project. We want to store the parent project ID for tracking and billing purposes, and ensure it remains even when the node is deleted. This is also the reason why we do not introduce a foreign key.", + ), + sa.Column( + "root_parent_project_id", # UUID + sa.String, + nullable=False, + doc="Similar to the parent project concept, we are flexible enough to allow multiple nested computational jobs, which create multiple nested projects. For this reason, we keep the parent project ID, so we know from which project the user started their computation.", + ), + sa.Column( + "root_parent_project_name", + sa.String, + nullable=False, + doc="We want to store the root parent project name for tracking/billing purposes.", + ), + sa.Column( + "parent_node_id", # UUID + sa.String, + nullable=False, + doc="Since each project can have multiple nodes, similar to the parent project concept, we also store the parent node..", + ), + sa.Column( + "root_parent_node_id", # UUID + sa.String, + nullable=False, + doc="Since each project can have multiple nodes, similar to the root parent project concept, we also store the root parent node.", + ), # Service fields sa.Column( "service_key", diff --git a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py index 87b99c46071..e239d57b673 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py @@ -106,6 +106,11 @@ async def publish_service_resource_tracking_started( # pylint: disable=too-many project_name=project_name, node_id=node_id, node_name=node_name, + parent_project_id=project_id, # <-- SAN please modify + root_parent_project_id=project_id, # <-- SAN please modify + root_parent_project_name=project_name, # <-- SAN please modify + parent_node_id=node_id, # <-- SAN please modify + root_parent_node_id=node_id, # <-- SAN please modify service_key=service_key, service_version=service_version, service_type=service_type, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py index 524f21c83e0..031b42ff324 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py @@ -110,6 +110,11 @@ async def send_service_started( project_name=metrics_params.project_name, node_id=settings.DY_SIDECAR_NODE_ID, node_name=metrics_params.node_name, + parent_project_id=settings.DY_SIDECAR_PROJECT_ID, + root_parent_project_id=settings.DY_SIDECAR_PROJECT_ID, + root_parent_project_name=metrics_params.project_name, + parent_node_id=settings.DY_SIDECAR_NODE_ID, + root_parent_node_id=settings.DY_SIDECAR_NODE_ID, service_key=metrics_params.service_key, service_version=metrics_params.service_version, service_type=ServiceType.DYNAMIC, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py index eadab0831d4..f68e9ff2c5a 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py @@ -35,6 +35,11 @@ class ServiceRunCreate(BaseModel): project_name: str node_id: NodeID node_name: str + parent_project_id: ProjectID + root_parent_project_id: ProjectID + root_parent_project_name: str + parent_node_id: NodeID + root_parent_node_id: NodeID service_key: ServiceKey service_version: ServiceVersion service_type: ResourceTrackerServiceType @@ -72,6 +77,11 @@ class ServiceRunDB(BaseModel): project_name: str node_id: NodeID node_name: str + parent_project_id: ProjectID + root_parent_project_id: ProjectID + root_parent_project_name: str + parent_node_id: NodeID + root_parent_node_id: NodeID service_key: ServiceKey service_version: ServiceVersion service_type: ResourceTrackerServiceType diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/repositories/resource_tracker.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/repositories/resource_tracker.py index 560138176d7..f24f4930344 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/repositories/resource_tracker.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/repositories/resource_tracker.py @@ -104,6 +104,11 @@ async def create_service_run(self, data: ServiceRunCreate) -> ServiceRunId: project_name=data.project_name, node_id=f"{data.node_id}", node_name=data.node_name, + parent_project_id=f"{data.parent_project_id}", + root_parent_project_id=f"{data.root_parent_project_id}", + root_parent_project_name=data.root_parent_project_name, + parent_node_id=f"{data.parent_node_id}", + root_parent_node_id=f"{data.root_parent_node_id}", service_key=data.service_key, service_version=data.service_version, service_type=data.service_type, @@ -231,6 +236,11 @@ async def list_service_runs_by_product_and_user_and_wallet( resource_tracker_service_runs.c.project_name, resource_tracker_service_runs.c.node_id, resource_tracker_service_runs.c.node_name, + resource_tracker_service_runs.c.parent_project_id, + resource_tracker_service_runs.c.root_parent_project_id, + resource_tracker_service_runs.c.root_parent_project_name, + resource_tracker_service_runs.c.parent_node_id, + resource_tracker_service_runs.c.root_parent_node_id, resource_tracker_service_runs.c.service_key, resource_tracker_service_runs.c.service_version, resource_tracker_service_runs.c.service_type, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_process_messages.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_process_messages.py index 591bbb98b47..9a1bc1b6170 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_process_messages.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_process_messages.py @@ -109,6 +109,11 @@ async def _process_start_event( project_name=msg.project_name, node_id=msg.node_id, node_name=msg.node_name, + parent_project_id=msg.parent_project_id, + root_parent_project_id=msg.root_parent_project_id, + root_parent_project_name=msg.root_parent_project_name, + parent_node_id=msg.parent_node_id, + root_parent_node_id=msg.root_parent_node_id, service_key=msg.service_key, service_version=msg.service_version, service_type=service_type, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py index 0618838c69b..536d7c9423f 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py @@ -118,12 +118,13 @@ async def list_service_runs( user_email=service.user_email, project_id=service.project_id, project_name=service.project_name, + root_parent_project_id=service.root_parent_project_id, + root_parent_project_name=service.root_parent_project_name, node_id=service.node_id, node_name=service.node_name, service_key=service.service_key, service_version=service.service_version, service_type=service.service_type, - service_resources=service.service_resources, started_at=service.started_at, stopped_at=service.stopped_at, service_run_status=service.service_run_status, diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py b/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py index fda8e0eea1a..3d476e01d6d 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py @@ -98,6 +98,11 @@ def _creator(**overrides) -> dict[str, Any]: "project_name": faker.word(), "node_id": faker.uuid4(), "node_name": faker.word(), + "parent_project_id": faker.uuid4(), + "root_parent_project_id": faker.uuid4(), + "root_parent_project_name": faker.pystr(), + "parent_node_id": faker.uuid4(), + "root_parent_node_id": faker.uuid4(), "service_key": "simcore/services/dynamic/jupyter-smash", "service_version": "3.0.7", "service_type": "DYNAMIC_SERVICE", @@ -238,6 +243,11 @@ def _creator(**kwargs: dict[str, Any]) -> RabbitResourceTrackingStartedMessage: "project_name": faker.pystr(), "node_id": faker.uuid4(), "node_name": faker.pystr(), + "parent_project_id": faker.uuid4(), + "root_parent_project_id": faker.uuid4(), + "root_parent_project_name": faker.pystr(), + "parent_node_id": faker.uuid4(), + "root_parent_node_id": faker.uuid4(), "service_key": "simcore/services/comp/itis/sleeper", "service_version": "2.1.6", "service_type": "computational", diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index d930d3e42b8..621a50ff25a 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -10015,10 +10015,11 @@ components: - project_name - node_id - node_name + - root_parent_project_id + - root_parent_project_name - service_key - service_version - service_type - - service_resources - started_at - service_run_status type: object @@ -10053,6 +10054,13 @@ components: node_name: title: Node Name type: string + root_parent_project_id: + title: Root Parent Project Id + type: string + format: uuid + root_parent_project_name: + title: Root Parent Project Name + type: string service_key: title: Service Key pattern: ^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$ @@ -10064,9 +10072,6 @@ components: service_type: title: Service Type type: string - service_resources: - title: Service Resources - type: object started_at: title: Started At type: string diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py index a605fe1f853..a5efdd515f2 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py @@ -89,6 +89,8 @@ def validate_order_by_field(cls, v): "project_name", "node_id", "node_name", + "root_parent_project_id", + "root_parent_project_name", "service_key", "service_version", "service_type", diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py index 0dea4c45928..e5fe848543b 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py @@ -37,19 +37,11 @@ "project_name": "osparc", "node_id": "3d2133f4-aba4-4364-9f7a-9377dea1221f", "node_name": "sleeper", + "root_parent_project_id": "5c2110be-441b-11ee-a0e8-02420a000040", + "root_parent_project_name": "osparc", "service_key": "simcore/services/comp/itis/sleeper", "service_version": "2.0.2", "service_type": "DYNAMIC_SERVICE", - "service_resources": { - "container": { - "image": "simcore/services/comp/itis/sleeper:2.0.2", - "resources": { - "CPU": {"limit": 0.1, "reservation": 0.1}, - "RAM": {"limit": 2147483648, "reservation": 2147483648}, - }, - "boot_modes": ["CPU"], - } - }, "started_at": "2023-08-26T14:18:17.600493+00:00", "stopped_at": "2023-08-26T14:18:19.358355+00:00", "service_run_status": "SUCCESS", From dd7da65b181e374ba76812120d99e8c06948e4f1 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 3 Jun 2024 12:55:19 +0200 Subject: [PATCH 010/219] =?UTF-8?q?=E2=9C=A8=20Frontend:=20Expose=20``inpu?= =?UTF-8?q?ts=20required``=20property=20(#5899)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../class/osparc/dashboard/StudyBrowser.js | 4 +- .../source/class/osparc/data/model/Node.js | 57 +++++-------- .../class/osparc/data/model/Workbench.js | 11 +-- .../class/osparc/desktop/StudyEditor.js | 4 +- .../source/class/osparc/form/PortInfoHint.js | 2 +- .../class/osparc/form/renderer/PropForm.js | 79 +++++++++++++------ .../osparc/form/renderer/PropFormBase.js | 34 +++++++- .../source/class/osparc/widget/NodeOptions.js | 2 + 8 files changed, 119 insertions(+), 74 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index 1f4260bbf29..b1a87f56194 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -285,8 +285,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, _reloadCards: function() { - this.__addNewStudyButtons(); - const fetching = this._loadingResourcesBtn ? this._loadingResourcesBtn.getFetching() : false; const visibility = this._loadingResourcesBtn ? this._loadingResourcesBtn.getVisibility() : "excluded"; @@ -294,6 +292,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const cards = this._resourcesContainer.reloadCards("studiesList"); this.__configureCards(cards); + this.__addNewStudyButtons(); + const loadMoreBtn = this.__createLoadMoreButton(); loadMoreBtn.set({ fetching, diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index afca119c2c9..488c1b54f6f 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -53,7 +53,6 @@ qx.Class.define("osparc.data.model.Node", { this.setOutputs({}); this.__inputNodes = []; - this.__exposedNodes = []; if (study) { this.setStudy(study); @@ -136,6 +135,12 @@ qx.Class.define("osparc.data.model.Node", { event: "changeInputs" }, + inputsRequired: { + check: "Array", + init: [], + event: "changeInputsRequired" + }, + outputs: { check: "Object", nullable: false, @@ -228,7 +233,8 @@ qx.Class.define("osparc.data.model.Node", { "fileUploaded": "qx.event.type.Event", "showInLogger": "qx.event.type.Data", "outputListChanged": "qx.event.type.Event", - "changeInputNodes": "qx.event.type.Event" + "changeInputNodes": "qx.event.type.Event", + "changeInputsRequired": "qx.event.type.Event" }, statics: { @@ -331,7 +337,6 @@ qx.Class.define("osparc.data.model.Node", { members: { __metaData: null, __inputNodes: null, - __exposedNodes: null, __settingsForm: null, __posX: null, __posY: null, @@ -512,7 +517,8 @@ qx.Class.define("osparc.data.model.Node", { } this.setOutputData(nodeData.outputs); this.addInputNodes(nodeData.inputNodes); - this.addOutputNodes(nodeData.outputNodes); + // backwards compatible + this.setInputsRequired(nodeData.inputsRequired || []); }, populateStates: function(nodeData) { @@ -875,43 +881,17 @@ qx.Class.define("osparc.data.model.Node", { }, // !---- Input Nodes ----- - // ----- Output Nodes ----- - getOutputNodes: function() { - return this.__exposedNodes; - }, - - addOutputNodes: function(outputNodes) { - if (outputNodes) { - outputNodes.forEach(outputNode => { - this.addOutputNode(outputNode); - }); - } - }, - - addOutputNode: function(outputNodeId) { - if (!this.__exposedNodes.includes(outputNodeId)) { - this.__exposedNodes.push(outputNodeId); - this.fireEvent("outputListChanged"); - return true; - } - return false; - }, - - removeOutputNode: function(outputNodeId) { - const index = this.__exposedNodes.indexOf(outputNodeId); + toggleInputRequired: function(portId) { + const inputsRequired = this.getInputsRequired(); + const index = inputsRequired.indexOf(portId); if (index > -1) { - // remove node connection - this.__exposedNodes.splice(index, 1); - this.fireEvent("outputListChanged"); + inputsRequired.splice(index, 1); + } else { + inputsRequired.push(portId); } - return false; - }, - - isOutputNode: function(outputNodeId) { - const index = this.__exposedNodes.indexOf(outputNodeId); - return (index > -1); + this.setInputsRequired(inputsRequired); + this.fireEvent("changeInputsRequired"); }, - // !---- Output Nodes ----- canNodeStart: function() { return this.isDynamic() && ["idle", "failed"].includes(this.getStatus().getInteractive()); @@ -1532,6 +1512,7 @@ qx.Class.define("osparc.data.model.Node", { inputsUnits: this.__getInputUnits(), inputAccess: this.getInputAccess(), inputNodes: this.getInputNodes(), + inputsRequired: this.getInputsRequired(), thumbnail: this.getThumbnail(), bootOptions: this.getBootOptions() }; diff --git a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js index 072913795d4..01da1153297 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js @@ -676,12 +676,11 @@ qx.Class.define("osparc.data.model.Workbench", { if (node === null) { continue; } - this.__addInputOutputNodesAndEdges(node, nodeData.inputNodes, true); - this.__addInputOutputNodesAndEdges(node, nodeData.outputNodes, false); + this.__addInputOutputNodesAndEdges(node, nodeData.inputNodes); } }, - __addInputOutputNodesAndEdges: function(node, inputOutputNodeIds, isInput) { + __addInputOutputNodesAndEdges: function(node, inputOutputNodeIds) { if (inputOutputNodeIds) { inputOutputNodeIds.forEach(inputOutputNodeId => { const node1 = this.getNode(inputOutputNodeId); @@ -690,11 +689,7 @@ qx.Class.define("osparc.data.model.Workbench", { } const edge = new osparc.data.model.Edge(null, node1, node); this.addEdge(edge); - if (isInput) { - node.addInputNode(inputOutputNodeId); - } else { - node.addOutputNode(inputOutputNodeId); - } + node.addInputNode(inputOutputNodeId); }); } }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js index 9ad894e029a..4b0848288d2 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js +++ b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js @@ -169,7 +169,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { study.openStudy() .then(() => { - this.__lastSavedStudy = study.serialize(); + this.__lastSavedStudy = osparc.utils.Utils.deepCloneObject(study.serialize()); this.__workbenchView.setStudy(study); this.__slideshowView.setStudy(study); @@ -761,7 +761,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { const newObj = this.getStudy().serialize(); return this.getStudy().updateStudy(newObj, run) .then(() => { - this.__lastSavedStudy = osparc.wrapper.JsonDiffPatch.getInstance().clone(newObj); + this.__lastSavedStudy = osparc.utils.Utils.deepCloneObject(newObj); }) .catch(error => { if ("status" in error && error.status === 409) { diff --git a/services/static-webserver/client/source/class/osparc/form/PortInfoHint.js b/services/static-webserver/client/source/class/osparc/form/PortInfoHint.js index 9c1bf89f9b6..70ddcf32bf0 100644 --- a/services/static-webserver/client/source/class/osparc/form/PortInfoHint.js +++ b/services/static-webserver/client/source/class/osparc/form/PortInfoHint.js @@ -38,7 +38,7 @@ qx.Class.define("osparc.form.PortInfoHint", { const color = qx.theme.manager.Color.getInstance().resolve("failed-red"); text += `

${errorMsg}`; } - this._hint.setText(text); + this.setHintText(text); this.set({ source: errorMsg ? this.self().ERROR_ICON : osparc.ui.hint.InfoHint.INFO_ICON, textColor: errorMsg ? "failed-red" : "text" diff --git a/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js b/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js index 733be9bd936..07fa01cb1f2 100644 --- a/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js +++ b/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js @@ -286,6 +286,12 @@ qx.Class.define("osparc.form.renderer.PropForm", { }); }); } + + if (optionsMenu.getChildren().length) { + optionsMenu.addSeparator(); + } + const inputRequiredButton = this.__getInputRequiredButton(field.key); + optionsMenu.add(inputRequiredButton); }, __connectToInputNode: function(targetPortId, inputNodeId, outputKey) { @@ -342,6 +348,26 @@ qx.Class.define("osparc.form.renderer.PropForm", { return null; }, + __populateInputNodePortsMenu: function(inputNodeId, targetPortId, menu, menuBtn) { + menuBtn.exclude(); + menu.removeAll(); + + const inputNode = this.getStudy().getWorkbench().getNode(inputNodeId); + if (inputNode) { + for (const outputKey in inputNode.getOutputs()) { + osparc.utils.Ports.arePortsCompatible(inputNode, outputKey, this.getNode(), targetPortId) + .then(compatible => { + if (compatible) { + const paramButton = new qx.ui.menu.Button(inputNode.getOutput(outputKey).label); + paramButton.addListener("execute", () => this.__connectToInputNode(targetPortId, inputNodeId, outputKey), this); + menu.add(paramButton); + menuBtn.show(); + } + }); + } + } + }, + __getSelectFileButton: function(portId) { const selectFileButton = new qx.ui.menu.Button(this.tr("Select File")); selectFileButton.addListener("execute", () => this.fireDataEvent("filePickerRequested", { @@ -366,26 +392,6 @@ qx.Class.define("osparc.form.renderer.PropForm", { return existingParamBtn; }, - __populateInputNodePortsMenu: function(inputNodeId, targetPortId, menu, menuBtn) { - menuBtn.exclude(); - menu.removeAll(); - - const inputNode = this.getStudy().getWorkbench().getNode(inputNodeId); - if (inputNode) { - for (const outputKey in inputNode.getOutputs()) { - osparc.utils.Ports.arePortsCompatible(inputNode, outputKey, this.getNode(), targetPortId) - .then(compatible => { - if (compatible) { - const paramButton = new qx.ui.menu.Button(inputNode.getOutput(outputKey).label); - paramButton.addListener("execute", () => this.__connectToInputNode(targetPortId, inputNodeId, outputKey), this); - menu.add(paramButton); - menuBtn.show(); - } - }); - } - } - }, - __populateExistingParamsMenu: function(targetPortId, menu, menuBtn) { menuBtn.exclude(); menu.removeAll(); @@ -410,6 +416,26 @@ qx.Class.define("osparc.form.renderer.PropForm", { }); }, + __getInputRequiredButton: function(portId) { + const node = this.getNode(); + const inputRequiredBtn = new qx.ui.menu.Button(this.tr("Required Input")); + const evalButton = () => { + if (node.getInputsRequired().includes(portId)) { + inputRequiredBtn.set({ + icon: "@FontAwesome5Regular/check-square/12" + }); + } else { + inputRequiredBtn.set({ + icon: "@FontAwesome5Regular/square/12" + }); + } + } + node.addListener("changeInputsRequired", () => evalButton(), this); + inputRequiredBtn.addListener("execute", () => node.toggleInputRequired(portId), this); + evalButton(); + return inputRequiredBtn; + }, + // overridden addItems: function(items, names, title, itemOptions, headerOptions) { this.base(arguments, items, names, title, itemOptions, headerOptions); @@ -419,6 +445,7 @@ qx.Class.define("osparc.form.renderer.PropForm", { for (let i = 0; i < items.length; i++) { const item = items[i]; + const portId = item.key; const fieldOpts = this.__createLinkUnlinkStack(item); if (fieldOpts) { @@ -428,10 +455,10 @@ qx.Class.define("osparc.form.renderer.PropForm", { }); } - this.__createDropMechanism(item, item.key); + this.__createDropMechanism(item, portId); // Notify focus and focus out - const msgDataFn = (nodeId, portId) => this.__arePortsCompatible(nodeId, portId, this.getNode().getNodeId(), item.key); + const msgDataFn = (nodeId, pId) => this.__arePortsCompatible(nodeId, pId, this.getNode().getNodeId(), item.key); item.addListener("focus", () => { if (this.getNode()) { @@ -447,6 +474,14 @@ qx.Class.define("osparc.form.renderer.PropForm", { row++; } + const evalRequired = () => { + for (const portId in this.__ctrlLinkMap) { + this.evalFieldRequired(portId); + } + } + this.getNode().addListener("changeInputsRequired", () => evalRequired()); + evalRequired(); + // add port button const addPortButton = this.__addInputPortButton = new qx.ui.form.Button().set({ label: this.tr("Input"), diff --git a/services/static-webserver/client/source/class/osparc/form/renderer/PropFormBase.js b/services/static-webserver/client/source/class/osparc/form/renderer/PropFormBase.js index f6110e38204..bda6c498aed 100644 --- a/services/static-webserver/client/source/class/osparc/form/renderer/PropFormBase.js +++ b/services/static-webserver/client/source/class/osparc/form/renderer/PropFormBase.js @@ -125,7 +125,7 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { const label = this._createLabel(names[i], item); label.set({ - rich: false, // override, required for showing the vut off ellipses + rich: false, // override, required for showing the cut off ellipses toolTipText: names[i] }); label.setBuddy(item); @@ -191,6 +191,38 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { return filteredData; }, + evalFieldRequired: function(portId) { + const label = this._getLabelFieldChild(portId).child; + const inputsRequired = this.getNode().getInputsRequired(); + + // add star (*) to the label + const requiredSuffix = " *"; + let newLabel = label.getValue(); + newLabel = newLabel.replace(requiredSuffix, ""); + if (inputsRequired.includes(portId)) { + newLabel += requiredSuffix; + } + label.setValue(newLabel); + + // add "required" text to the label's tooltip + const toolTipSuffix = "
" + this.tr("Required input: without it, the service will not start/run."); + let newToolTip = label.getToolTipText(); + newToolTip = newToolTip.replace(toolTipSuffix, ""); + if (inputsRequired.includes(portId)) { + newToolTip += toolTipSuffix; + } + label.setToolTipText(newToolTip); + + // add "required" text to the description + const infoButton = this._getInfoFieldChild(portId).child; + let newHintText = infoButton.getHintText(); + newHintText = newHintText.replace(toolTipSuffix, ""); + if (inputsRequired.includes(portId)) { + newHintText += toolTipSuffix; + } + infoButton.setHintText(newHintText); + }, + getChangedXUnits: function() { const xUnits = {}; const ctrls = this._form.getControls(); diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js index 2aafc7f2dd6..180de5bb2cb 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js +++ b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js @@ -113,6 +113,8 @@ qx.Class.define("osparc.widget.NodeOptions", { const startStopButton = new osparc.node.StartStopButton(); startStopButton.setNode(node); this._add(startStopButton); + + startStopButton.getChildControl("stop-button").bind("visibility", instructions, "visibility"); } sections.forEach(section => this._add(section)); From 00b094474968496f4dc65eb5ffa69f3131b45cce Mon Sep 17 00:00:00 2001 From: Mads Bisgaard <126242332+bisgaard-itis@users.noreply.github.com> Date: Mon, 3 Jun 2024 15:06:49 +0200 Subject: [PATCH 011/219] =?UTF-8?q?=F0=9F=8E=A8=20propagate=20job=20parent?= =?UTF-8?q?=20ids=20through=20api=20server=20(#5903)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- services/api-server/openapi.json | 60 +++++++++++++++++++ .../api/routes/solvers_jobs.py | 11 +++- .../api/routes/studies.py | 11 +++- .../api/routes/studies_jobs.py | 13 +++- .../services/webserver.py | 35 ++++++++++- .../test_api_routers_solvers_jobs_delete.py | 35 ++++++++++- .../api_studies/test_api_routes_studies.py | 44 +++++++++++++- .../test_api_routes_studies_jobs.py | 29 +++++++++ 8 files changed, 225 insertions(+), 13 deletions(-) diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json index 828876780ce..7989d4a9c48 100644 --- a/services/api-server/openapi.json +++ b/services/api-server/openapi.json @@ -2038,6 +2038,26 @@ }, "name": "hidden", "in": "query" + }, + { + "required": false, + "schema": { + "type": "string", + "format": "uuid", + "title": "X-Simcore-Parent-Project-Uuid" + }, + "name": "x-simcore-parent-project-uuid", + "in": "header" + }, + { + "required": false, + "schema": { + "type": "string", + "format": "uuid", + "title": "X-Simcore-Parent-Node-Id" + }, + "name": "x-simcore-parent-node-id", + "in": "header" } ], "requestBody": { @@ -3254,6 +3274,26 @@ }, "name": "study_id", "in": "path" + }, + { + "required": false, + "schema": { + "type": "string", + "format": "uuid", + "title": "X-Simcore-Parent-Project-Uuid" + }, + "name": "x-simcore-parent-project-uuid", + "in": "header" + }, + { + "required": false, + "schema": { + "type": "string", + "format": "uuid", + "title": "X-Simcore-Parent-Node-Id" + }, + "name": "x-simcore-parent-node-id", + "in": "header" } ], "responses": { @@ -3382,6 +3422,26 @@ }, "name": "hidden", "in": "query" + }, + { + "required": false, + "schema": { + "type": "string", + "format": "uuid", + "title": "X-Simcore-Parent-Project-Uuid" + }, + "name": "x-simcore-parent-project-uuid", + "in": "header" + }, + { + "required": false, + "schema": { + "type": "string", + "format": "uuid", + "title": "X-Simcore-Parent-Node-Id" + }, + "name": "x-simcore-parent-node-id", + "in": "header" } ], "requestBody": { diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py index 9fe37ada46b..82a461d91e7 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py @@ -4,9 +4,11 @@ from collections.abc import Callable from typing import Annotated, Any -from fastapi import APIRouter, Depends, Query, Request, status +from fastapi import APIRouter, Depends, Header, Query, Request, status from models_library.api_schemas_webserver.projects import ProjectCreateNew, ProjectGet from models_library.clusters import ClusterID +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from pydantic.types import PositiveInt from ...exceptions.service_errors_utils import DEFAULT_BACKEND_SERVICE_STATUS_CODES @@ -87,6 +89,8 @@ async def create_job( url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], product_name: Annotated[str, Depends(get_product_name)], hidden: Annotated[bool, Query()] = True, + x_simcore_parent_project_uuid: Annotated[ProjectID | None, Header()] = None, + x_simcore_parent_node_id: Annotated[NodeID | None, Header()] = None, ): """Creates a job in a specific release with given inputs. @@ -107,7 +111,10 @@ async def create_job( project_in: ProjectCreateNew = create_new_project_for_job(solver, pre_job, inputs) new_project: ProjectGet = await webserver_api.create_project( - project_in, is_hidden=hidden + project_in, + is_hidden=hidden, + parent_project_uuid=x_simcore_parent_project_uuid, + parent_node_id=x_simcore_parent_node_id, ) assert new_project # nosec assert new_project.uuid == pre_job.id # nosec diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies.py b/services/api-server/src/simcore_service_api_server/api/routes/studies.py index 29327098aed..d4c37bb0512 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies.py @@ -1,9 +1,11 @@ import logging from typing import Annotated, Final -from fastapi import APIRouter, Depends, status +from fastapi import APIRouter, Depends, Header, status from fastapi_pagination.api import create_page from models_library.api_schemas_webserver.projects import ProjectGet +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from ...models.pagination import OnePage, Page, PaginationParams from ...models.schemas.errors import ErrorGet @@ -85,9 +87,14 @@ async def get_study( async def clone_study( study_id: StudyID, webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + x_simcore_parent_project_uuid: Annotated[ProjectID | None, Header()] = None, + x_simcore_parent_node_id: Annotated[NodeID | None, Header()] = None, ): project: ProjectGet = await webserver_api.clone_project( - project_id=study_id, hidden=False + project_id=study_id, + hidden=False, + parent_project_uuid=x_simcore_parent_project_uuid, + parent_node_id=x_simcore_parent_node_id, ) return _create_study_from_project(project) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py index 68be168b217..e320214867d 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py @@ -2,13 +2,15 @@ from collections.abc import Callable from typing import Annotated -from fastapi import APIRouter, Depends, Query, Request, status +from fastapi import APIRouter, Depends, Header, Query, Request, status from fastapi.responses import RedirectResponse from models_library.api_schemas_webserver.projects import ProjectName, ProjectPatch from models_library.api_schemas_webserver.projects_nodes import NodeOutputs from models_library.clusters import ClusterID from models_library.function_services_catalog.services import file_picker +from models_library.projects import ProjectID from models_library.projects_nodes import InputID, InputTypes +from models_library.projects_nodes_io import NodeID from pydantic import PositiveInt from servicelib.logging_utils import log_context @@ -81,11 +83,18 @@ async def create_study_job( webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], hidden: Annotated[bool, Query()] = True, + x_simcore_parent_project_uuid: ProjectID | None = Header(default=None), + x_simcore_parent_node_id: NodeID | None = Header(default=None), ) -> Job: """ hidden -- if True (default) hides project from UI """ - project = await webserver_api.clone_project(project_id=study_id, hidden=hidden) + project = await webserver_api.clone_project( + project_id=study_id, + hidden=hidden, + parent_project_uuid=x_simcore_parent_project_uuid, + parent_node_id=x_simcore_parent_node_id, + ) job = create_job_from_study( study_key=study_id, project=project, job_inputs=job_inputs ) diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py index 89a0b78fc14..ee29d81f582 100644 --- a/services/api-server/src/simcore_service_api_server/services/webserver.py +++ b/services/api-server/src/simcore_service_api_server/services/webserver.py @@ -46,6 +46,10 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import PositiveInt from servicelib.aiohttp.long_running_tasks.server import TaskStatus +from servicelib.common_headers import ( + X_SIMCORE_PARENT_NODE_ID, + X_SIMCORE_PARENT_PROJECT_UUID, +) from tenacity import TryAgain from tenacity._asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -253,12 +257,22 @@ async def update_me(self, profile_update: ProfileUpdate) -> Profile: @_exception_mapper({}) async def create_project( - self, project: ProjectCreateNew, *, is_hidden: bool + self, + project: ProjectCreateNew, + *, + is_hidden: bool, + parent_project_uuid: ProjectID | None, + parent_node_id: NodeID | None, ) -> ProjectGet: # POST /projects --> 202 Accepted + _headers = { + X_SIMCORE_PARENT_PROJECT_UUID: parent_project_uuid, + X_SIMCORE_PARENT_NODE_ID: parent_node_id, + } response = await self.client.post( "/projects", params={"hidden": is_hidden}, + headers={k: f"{v}" for k, v in _headers.items() if v is not None}, json=jsonable_encoder(project, by_alias=True, exclude={"state"}), cookies=self.session_cookies, ) @@ -267,10 +281,25 @@ async def create_project( return ProjectGet.parse_obj(result) @_exception_mapper(_JOB_STATUS_MAP) - async def clone_project(self, *, project_id: UUID, hidden: bool) -> ProjectGet: + async def clone_project( + self, + *, + project_id: UUID, + hidden: bool, + parent_project_uuid: ProjectID | None, + parent_node_id: NodeID | None, + ) -> ProjectGet: query = {"from_study": project_id, "hidden": hidden} + _headers = { + X_SIMCORE_PARENT_PROJECT_UUID: parent_project_uuid, + X_SIMCORE_PARENT_NODE_ID: parent_node_id, + } + response = await self.client.post( - "/projects", cookies=self.session_cookies, params=query + "/projects", + cookies=self.session_cookies, + params=query, + headers={k: f"{v}" for k, v in _headers.items() if v is not None}, ) response.raise_for_status() result = await self._wait_for_long_running_task_results(response) diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py index 9a98a4ac4fa..5e61c9d1b82 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py @@ -4,6 +4,7 @@ from pathlib import Path from typing import TypedDict +from uuid import UUID import httpx import jinja2 @@ -13,9 +14,15 @@ from pydantic import parse_file_as from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from respx import MockRouter +from servicelib.common_headers import ( + X_SIMCORE_PARENT_NODE_ID, + X_SIMCORE_PARENT_PROJECT_UUID, +) from simcore_service_api_server.models.schemas.jobs import Job, JobInputs from starlette import status +_faker = Faker() + class MockedBackendApiDict(TypedDict): catalog: MockRouter | None @@ -158,6 +165,10 @@ async def test_create_and_delete_solver_job( # Run a job and delete when finished +@pytest.mark.parametrize( + "parent_node_id, parent_project_id", + [(_faker.uuid4(), _faker.uuid4()), (None, None)], +) @pytest.mark.parametrize("hidden", [True, False]) async def test_create_job( auth: httpx.BasicAuth, @@ -166,29 +177,47 @@ async def test_create_job( solver_version: str, mocked_backend_services_apis_for_create_and_delete_solver_job: MockedBackendApiDict, hidden: bool, + parent_project_id: UUID | None, + parent_node_id: UUID | None, ): mock_webserver_router = ( mocked_backend_services_apis_for_create_and_delete_solver_job["webserver"] ) + assert mock_webserver_router is not None callback = mock_webserver_router["create_projects"].side_effect + assert callback is not None def create_project_side_effect(request: httpx.Request): + # check `hidden` bool query = dict(elm.split("=") for elm in request.url.query.decode().split("&")) _hidden = query.get("hidden") assert _hidden == ("true" if hidden else "false") + + # check parent project and node id + if parent_project_id is not None: + assert f"{parent_project_id}" == dict(request.headers).get( + X_SIMCORE_PARENT_PROJECT_UUID.lower() + ) + if parent_node_id is not None: + assert f"{parent_node_id}" == dict(request.headers).get( + X_SIMCORE_PARENT_NODE_ID.lower() + ) return callback(request) - mock_webserver_router = ( - mocked_backend_services_apis_for_create_and_delete_solver_job["webserver"] - ) mock_webserver_router["create_projects"].side_effect = create_project_side_effect # create Job + header_dict = {} + if parent_project_id is not None: + header_dict[X_SIMCORE_PARENT_PROJECT_UUID] = f"{parent_project_id}" + if parent_node_id is not None: + header_dict[X_SIMCORE_PARENT_NODE_ID] = f"{parent_node_id}" resp = await client.post( f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs", auth=auth, params={"hidden": f"{hidden}"}, + headers=header_dict, json=JobInputs( values={ "x": 3.14, diff --git a/services/api-server/tests/unit/api_studies/test_api_routes_studies.py b/services/api-server/tests/unit/api_studies/test_api_routes_studies.py index 3090184a962..858787953b7 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routes_studies.py +++ b/services/api-server/tests/unit/api_studies/test_api_routes_studies.py @@ -6,6 +6,7 @@ from collections.abc import Callable from pathlib import Path from typing import Any, TypedDict +from uuid import UUID import httpx import pytest @@ -14,9 +15,15 @@ from pydantic import parse_file_as, parse_obj_as from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from respx import MockRouter +from servicelib.common_headers import ( + X_SIMCORE_PARENT_NODE_ID, + X_SIMCORE_PARENT_PROJECT_UUID, +) from simcore_service_api_server.models.schemas.errors import ErrorGet from simcore_service_api_server.models.schemas.studies import Study, StudyID, StudyPort +_faker = Faker() + class MockedBackendApiDict(TypedDict): catalog: MockRouter | None @@ -137,17 +144,52 @@ async def test_list_study_ports( @pytest.mark.acceptance_test( "Implements https://github.com/ITISFoundation/osparc-simcore/issues/4651" ) +@pytest.mark.parametrize( + "parent_node_id, parent_project_id", + [(_faker.uuid4(), _faker.uuid4()), (None, None)], +) async def test_clone_study( client: httpx.AsyncClient, auth: httpx.BasicAuth, study_id: StudyID, mocked_webserver_service_api_base: MockRouter, patch_webserver_long_running_project_tasks: Callable[[MockRouter], MockRouter], + parent_project_id: UUID | None, + parent_node_id: UUID | None, ): # Mocks /projects patch_webserver_long_running_project_tasks(mocked_webserver_service_api_base) - resp = await client.post(f"/v0/studies/{study_id}:clone", auth=auth) + callback = mocked_webserver_service_api_base["create_projects"].side_effect + assert callback is not None + + def clone_project_side_effect(request: httpx.Request): + if parent_project_id is not None: + _parent_project_id = dict(request.headers).get( + X_SIMCORE_PARENT_PROJECT_UUID.lower() + ) + assert _parent_project_id == f"{parent_project_id}" + if parent_node_id is not None: + _parent_node_id = dict(request.headers).get( + X_SIMCORE_PARENT_NODE_ID.lower() + ) + assert _parent_node_id == f"{parent_node_id}" + return callback(request) + + mocked_webserver_service_api_base[ + "create_projects" + ].side_effect = clone_project_side_effect + + _headers = {} + if parent_project_id is not None: + _headers[X_SIMCORE_PARENT_PROJECT_UUID] = f"{parent_project_id}" + if parent_node_id is not None: + _headers[X_SIMCORE_PARENT_NODE_ID] = f"{parent_node_id}" + resp = await client.post( + f"/v0/studies/{study_id}:clone", headers=_headers, auth=auth + ) + + assert mocked_webserver_service_api_base["create_projects"].called assert resp.status_code == status.HTTP_201_CREATED diff --git a/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py b/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py index 736ead2acea..443fa943548 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py +++ b/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py @@ -20,10 +20,16 @@ HttpApiCallCaptureModel, ) from respx import MockRouter +from servicelib.common_headers import ( + X_SIMCORE_PARENT_NODE_ID, + X_SIMCORE_PARENT_PROJECT_UUID, +) from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.schemas.jobs import Job, JobOutputs from simcore_service_api_server.models.schemas.studies import Study, StudyID +_faker = Faker() + @pytest.mark.xfail(reason="Still not implemented") @pytest.mark.acceptance_test( @@ -188,6 +194,10 @@ def _check_response(response: httpx.Response, status_code: int): _check_response(response, status.HTTP_204_NO_CONTENT) +@pytest.mark.parametrize( + "parent_node_id, parent_project_id", + [(_faker.uuid4(), _faker.uuid4()), (None, None)], +) @pytest.mark.parametrize("hidden", [True, False]) async def test_create_study_job( client: httpx.AsyncClient, @@ -198,6 +208,8 @@ async def test_create_study_job( project_tests_dir: Path, fake_study_id: UUID, hidden: bool, + parent_project_id: UUID | None, + parent_node_id: UUID | None, ): _capture_file: Final[Path] = project_tests_dir / "mocks" / "create_study_job.json" @@ -216,12 +228,23 @@ def _default_side_effect( assert project_id is not None assert project_id in name if capture.method == "POST": + # test hidden boolean _default_side_effect.post_called = True query_dict = dict( elm.split("=") for elm in request.url.query.decode().split("&") ) _hidden = query_dict.get("hidden") assert _hidden == ("true" if hidden else "false") + + # test parent project and node ids + if parent_project_id is not None: + assert f"{parent_project_id}" == dict(request.headers).get( + X_SIMCORE_PARENT_PROJECT_UUID.lower() + ) + if parent_node_id is not None: + assert f"{parent_node_id}" == dict(request.headers).get( + X_SIMCORE_PARENT_NODE_ID.lower() + ) return capture.response_body _default_side_effect.patch_called = False @@ -236,9 +259,15 @@ def _default_side_effect( side_effects_callbacks=[_default_side_effect] * 5, ) + header_dict = {} + if parent_project_id is not None: + header_dict[X_SIMCORE_PARENT_PROJECT_UUID] = f"{parent_project_id}" + if parent_node_id is not None: + header_dict[X_SIMCORE_PARENT_NODE_ID] = f"{parent_node_id}" response = await client.post( f"{API_VTAG}/studies/{fake_study_id}/jobs", auth=auth, + headers=header_dict, params={"hidden": f"{hidden}"}, json={"values": {}}, ) From 05380668f250e7612253c193f2f89ff5116c1207 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Mon, 3 Jun 2024 16:10:14 +0200 Subject: [PATCH 012/219] =?UTF-8?q?=E2=9C=A8=20dynamic-services=20will=20f?= =?UTF-8?q?ail=20if=20they=20have=20any=20required=20input=20that=20is=20n?= =?UTF-8?q?ot=20set=20(#5845)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../projects/_nodes_handlers.py | 3 + .../projects/exceptions.py | 46 +++++++ .../projects/projects_api.py | 73 ++++++++++- .../tests/unit/with_dbs/03/test_project_db.py | 117 ++++++++++++++++++ 4 files changed, 237 insertions(+), 2 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py index 67f3104a829..6a7109799e0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py @@ -74,6 +74,7 @@ DefaultPricingUnitNotFoundError, NodeNotFoundError, ProjectInvalidRightsError, + ProjectNodeRequiredInputsNotSetError, ProjectNodeResourcesInsufficientRightsError, ProjectNodeResourcesInvalidError, ProjectNotFoundError, @@ -105,6 +106,8 @@ async def wrapper(request: web.Request) -> web.StreamResponse: raise web.HTTPConflict(reason=f"{exc}") from exc except ClustersKeeperNotAvailableError as exc: raise web.HTTPServiceUnavailable(reason=f"{exc}") from exc + except ProjectNodeRequiredInputsNotSetError as exc: + raise web.HTTPConflict(reason=f"{exc}") from exc return wrapper diff --git a/services/web/server/src/simcore_service_webserver/projects/exceptions.py b/services/web/server/src/simcore_service_webserver/projects/exceptions.py index e62c4ef78e2..ecd60a58c39 100644 --- a/services/web/server/src/simcore_service_webserver/projects/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/exceptions.py @@ -4,6 +4,7 @@ import redis.exceptions from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from models_library.users import UserID from ..errors import WebServerBaseError @@ -136,6 +137,51 @@ class ProjectNodeResourcesInsufficientRightsError(BaseProjectError): ... +class ProjectNodeRequiredInputsNotSetError(BaseProjectError): + ... + + +class ProjectNodeConnectionsMissingError(ProjectNodeRequiredInputsNotSetError): + msg_template = "Missing '{joined_unset_required_inputs}' connection(s) to '{node_with_required_inputs}'" + + def __init__( + self, + *, + unset_required_inputs: list[str], + node_with_required_inputs: NodeID, + **ctx, + ): + super().__init__( + joined_unset_required_inputs=", ".join(unset_required_inputs), + unset_required_inputs=unset_required_inputs, + node_with_required_inputs=node_with_required_inputs, + **ctx, + ) + self.unset_required_inputs = unset_required_inputs + self.node_with_required_inputs = node_with_required_inputs + + +class ProjectNodeOutputPortMissingValueError(ProjectNodeRequiredInputsNotSetError): + msg_template = "Missing: {joined_start_message}" + + def __init__( + self, + *, + unset_outputs_in_upstream: list[tuple[str, str]], + **ctx, + ): + start_messages = [ + f"'{input_key}' of '{service_name}'" + for input_key, service_name in unset_outputs_in_upstream + ] + super().__init__( + joined_start_message=", ".join(start_messages), + unset_outputs_in_upstream=unset_outputs_in_upstream, + **ctx, + ) + self.unset_outputs_in_upstream = unset_outputs_in_upstream + + class DefaultPricingUnitNotFoundError(BaseProjectError): msg_template = "Default pricing unit not found for node '{node_uuid}' in project '{project_uuid}'" diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 5088775c7fb..28655495bfb 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -34,8 +34,8 @@ from models_library.errors import ErrorDict from models_library.products import ProductName from models_library.projects import Project, ProjectID, ProjectIDStr -from models_library.projects_nodes import Node -from models_library.projects_nodes_io import NodeID, NodeIDStr +from models_library.projects_nodes import Node, OutputsDict +from models_library.projects_nodes_io import NodeID, NodeIDStr, PortLink from models_library.projects_state import ( Owner, ProjectLocked, @@ -124,6 +124,9 @@ NodeNotFoundError, ProjectInvalidRightsError, ProjectLockError, + ProjectNodeConnectionsMissingError, + ProjectNodeOutputPortMissingValueError, + ProjectNodeRequiredInputsNotSetError, ProjectNodeResourcesInvalidError, ProjectOwnerNotFoundInTheProjectAccessRightsError, ProjectStartsTooManyDynamicNodesError, @@ -447,6 +450,56 @@ def _by_type_name(ec2: EC2InstanceTypeGet) -> bool: raise ClustersKeeperNotAvailableError from exc +async def _check_project_node_has_all_required_inputs( + db: ProjectDBAPI, user_id: UserID, project_uuid: ProjectID, node_id: NodeID +) -> None: + + project_dict, _ = await db.get_project(user_id, f"{project_uuid}") + + nodes_map: dict[NodeID, Node] = { + NodeID(k): Node(**v) for k, v in project_dict["workbench"].items() + } + node = nodes_map[node_id] + + unset_required_inputs: list[str] = [] + unset_outputs_in_upstream: list[tuple[str, str]] = [] + + def _check_required_input(required_input_key: str) -> None: + input_entry: PortLink | None = None + if node.inputs: + input_entry = node.inputs.get(required_input_key, None) + if input_entry is None: + # NOT linked to any node connect service or set value manually(whichever applies) + unset_required_inputs.append(required_input_key) + return + + source_node_id: NodeID = input_entry.node_uuid + source_output_key = input_entry.output + + source_node = nodes_map[source_node_id] + + output_entry: OutputsDict | None = None + if source_node.outputs: + output_entry = source_node.outputs.get(source_output_key, None) + if output_entry is None: + unset_outputs_in_upstream.append((source_output_key, source_node.label)) + + for required_input in node.inputs_required: + _check_required_input(required_input) + + node_with_required_inputs = node.label + if unset_required_inputs: + raise ProjectNodeConnectionsMissingError( + unset_required_inputs=unset_required_inputs, + node_with_required_inputs=node_with_required_inputs, + ) + + if unset_outputs_in_upstream: + raise ProjectNodeOutputPortMissingValueError( + unset_outputs_in_upstream=unset_outputs_in_upstream + ) + + async def _start_dynamic_service( request: web.Request, *, @@ -456,6 +509,7 @@ async def _start_dynamic_service( user_id: UserID, project_uuid: ProjectID, node_uuid: NodeID, + graceful_start: bool = False, ) -> None: if not _is_node_dynamic(service_key): return @@ -464,6 +518,20 @@ async def _start_dynamic_service( db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) + try: + await _check_project_node_has_all_required_inputs( + db, user_id, project_uuid, node_uuid + ) + except ProjectNodeRequiredInputsNotSetError as e: + if graceful_start: + log.info( + "Did not start '%s' because of missing required inputs: %s", + node_uuid, + e, + ) + return + raise + save_state = False user_role: UserRole = await get_user_role(request.app, user_id) if user_role > UserRole.GUEST: @@ -1464,6 +1532,7 @@ async def run_project_dynamic_services( user_id=user_id, project_uuid=project["uuid"], node_uuid=NodeID(service_uuid), + graceful_start=True, ) for service_uuid, is_deprecated in zip( services_to_start_uuids, deprecated_services, strict=True diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index 8411f84ca23..ebf46bee580 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -33,9 +33,13 @@ from simcore_service_webserver.projects.db import ProjectAccessRights, ProjectDBAPI from simcore_service_webserver.projects.exceptions import ( NodeNotFoundError, + ProjectNodeRequiredInputsNotSetError, ProjectNotFoundError, ) from simcore_service_webserver.projects.models import ProjectDict +from simcore_service_webserver.projects.projects_api import ( + _check_project_node_has_all_required_inputs, +) from simcore_service_webserver.users.exceptions import UserNotFoundError from simcore_service_webserver.utils import to_datetime from sqlalchemy.engine.result import Row @@ -829,3 +833,116 @@ async def test_has_permission( await db_api.has_permission(second_user["id"], project_id, permission) is access_rights[permission] ), f"Found unexpected {permission=} for {access_rights=} of {user_role=} and {project_id=}" + + +def _fake_output_data() -> dict: + return { + "store": 0, + "path": "9f8207e6-144a-11ef-831f-0242ac140027/98b68cbe-9e22-4eb5-a91b-2708ad5317b7/outputs/output_2/output_2.zip", + "eTag": "ec3bc734d85359b660aab400147cd1ea", + } + + +def _fake_connect_to(output_number: int) -> dict: + return { + "nodeUuid": "98b68cbe-9e22-4eb5-a91b-2708ad5317b7", + "output": f"output_{output_number}", + } + + +@pytest.fixture +async def inserted_project( + logged_user: dict[str, Any], + insert_project_in_db: Callable[..., Awaitable[dict[str, Any]]], + fake_project: dict[str, Any], + downstream_inputs: dict, + downstream_required_inputs: list[str], + upstream_outputs: dict, +) -> dict: + fake_project["workbench"] = { + "98b68cbe-9e22-4eb5-a91b-2708ad5317b7": { + "key": "simcore/services/dynamic/jupyter-math", + "version": "2.0.10", + "label": "upstream", + "inputs": {}, + "inputsUnits": {}, + "inputNodes": [], + "thumbnail": "", + "outputs": upstream_outputs, + "runHash": "c6ae58f36a2e0f65f443441ecda023a451cb1b8051d01412d79aa03653e1a6b3", + }, + "324d6ef2-a82c-414d-9001-dc84da1cbea3": { + "key": "simcore/services/dynamic/jupyter-math", + "version": "2.0.10", + "label": "downstream", + "inputs": downstream_inputs, + "inputsUnits": {}, + "inputNodes": ["98b68cbe-9e22-4eb5-a91b-2708ad5317b7"], + "thumbnail": "", + "inputsRequired": downstream_required_inputs, + }, + } + + return await insert_project_in_db(fake_project, user_id=logged_user["id"]) + + +@pytest.mark.parametrize( + "downstream_inputs,downstream_required_inputs,upstream_outputs,expected_error", + [ + pytest.param( + {"input_1": _fake_connect_to(1)}, + ["input_1", "input_2"], + {}, + "Missing 'input_2' connection(s) to 'downstream'", + id="missing_connection_on_input_2", + ), + pytest.param( + {"input_1": _fake_connect_to(1), "input_2": _fake_connect_to(2)}, + ["input_1", "input_2"], + {"output_2": _fake_output_data()}, + "Missing: 'output_1' of 'upstream'", + id="output_1_has_not_file", + ), + ], +) +@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +async def test_check_project_node_has_all_required_inputs_raises( + logged_user: dict[str, Any], + db_api: ProjectDBAPI, + inserted_project: dict, + expected_error: str, +): + + with pytest.raises(ProjectNodeRequiredInputsNotSetError) as exc: + await _check_project_node_has_all_required_inputs( + db_api, + user_id=logged_user["id"], + project_uuid=UUID(inserted_project["uuid"]), + node_id=UUID("324d6ef2-a82c-414d-9001-dc84da1cbea3"), + ) + assert f"{exc.value}" == expected_error + + +@pytest.mark.parametrize( + "downstream_inputs,downstream_required_inputs,upstream_outputs", + [ + pytest.param( + {"input_1": _fake_connect_to(1), "input_2": _fake_connect_to(2)}, + ["input_1", "input_2"], + {"output_1": _fake_output_data(), "output_2": _fake_output_data()}, + id="with_required_inputs_present", + ), + ], +) +@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +async def test_check_project_node_has_all_required_inputs_ok( + logged_user: dict[str, Any], + db_api: ProjectDBAPI, + inserted_project: dict, +): + await _check_project_node_has_all_required_inputs( + db_api, + user_id=logged_user["id"], + project_uuid=UUID(inserted_project["uuid"]), + node_id=UUID("324d6ef2-a82c-414d-9001-dc84da1cbea3"), + ) From 36ae83407457c382580d196619fbcc9af2d1f8a1 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 4 Jun 2024 09:58:30 +0200 Subject: [PATCH 013/219] =?UTF-8?q?=F0=9F=8E=A8=20Frontend:=20Usage=20tabl?= =?UTF-8?q?e:=20show=20``root=5Fparent=5Fproject=5Fname``=20instead=20of?= =?UTF-8?q?=20``project=5Fname``=20(#5908)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../osparc/desktop/credits/UsageTable.js | 71 ++++++++++++--- .../osparc/desktop/credits/UsageTableModel.js | 89 +++++++++---------- 2 files changed, 102 insertions(+), 58 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTable.js b/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTable.js index 64692a73452..9e81f2c8541 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTable.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTable.js @@ -30,13 +30,13 @@ qx.Class.define("osparc.desktop.credits.UsageTable", { const columnModel = this.getTableColumnModel(); - columnModel.setDataCellRenderer(6, new qx.ui.table.cellrenderer.Number()); + columnModel.setDataCellRenderer(this.self().COLS.COST.column, new qx.ui.table.cellrenderer.Number()); if (!osparc.desktop.credits.Utils.areWalletsEnabled()) { - columnModel.setColumnVisible(6, false); - columnModel.setColumnVisible(7, false); + columnModel.setColumnVisible(this.self().COLS.COST.column, false); + columnModel.setColumnVisible(this.self().COLS.USER.column, false); } - columnModel.setColumnVisible(2, false) + columnModel.setColumnVisible(this.self().COLS.SERVICE.column, false); // Array [0, 1, ..., N] where N is column_count - 1 (default column order) this.__columnOrder = [...Array(columnModel.getOverallColumnCount()).keys()] @@ -62,12 +62,59 @@ qx.Class.define("osparc.desktop.credits.UsageTable", { } }, this) - columnModel.setColumnWidth(0, 130) - columnModel.setColumnWidth(1, 130) - columnModel.setColumnWidth(3, 130) - columnModel.setColumnWidth(4, 70) - columnModel.setColumnWidth(5, 70) - columnModel.setColumnWidth(6, 56) - columnModel.setColumnWidth(7, 130) + Object.values(this.self().COLS).forEach(col => columnModel.setColumnWidth(col.column, col.width)); + }, + + statics: { + COLS: { + PROJECT: { + id: "project", + column: 0, + label: osparc.product.Utils.getStudyAlias({firstUpperCase: true}), + width: 140 + }, + NODE: { + id: "node", + column: 1, + label: qx.locale.Manager.tr("Node"), + width: 140 + }, + SERVICE: { + id: "service", + column: 2, + label: qx.locale.Manager.tr("Service"), + width: 140 + }, + START: { + id: "start", + column: 3, + label: qx.locale.Manager.tr("Start"), + width: 130 + }, + DURATION: { + id: "duration", + column: 4, + label: qx.locale.Manager.tr("Duration"), + width: 70 + }, + STATUS: { + id: "status", + column: 5, + label: qx.locale.Manager.tr("Status"), + width: 70 + }, + COST: { + id: "cost", + column: 6, + label: qx.locale.Manager.tr("Credits"), + width: 56 + }, + USER: { + id: "user", + column: 7, + label: qx.locale.Manager.tr("User"), + width: 140 + } + } } -}) +}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTableModel.js b/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTableModel.js index c8025ffb7de..5859d5f7dce 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTableModel.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTableModel.js @@ -3,49 +3,28 @@ * Copyright: 2024 IT'IS Foundation - https://itis.swiss * License: MIT - https://opensource.org/licenses/MIT * Authors: Ignacio Pascual (ignapas) + * Odei Maiz (odeimaiz) */ -const SERVER_MAX_LIMIT = 49 -const COLUMN_ID_TO_DB_COLUMN_MAP = { - 0: "project_name", - 1: "node_name", - 2: "service_key", - 3: "started_at", - 5: "service_run_status", - 6: "credit_cost", - 7: "user_email" -} + qx.Class.define("osparc.desktop.credits.UsageTableModel", { extend: qx.ui.table.model.Remote, construct(walletId, filters) { - this.base(arguments) - this.setColumns([ - osparc.product.Utils.getStudyAlias({firstUpperCase: true}), - qx.locale.Manager.tr("Node"), - qx.locale.Manager.tr("Service"), - qx.locale.Manager.tr("Start"), - qx.locale.Manager.tr("Duration"), - qx.locale.Manager.tr("Status"), - qx.locale.Manager.tr("Credits"), - qx.locale.Manager.tr("User") - ], [ - "project", - "node", - "service", - "start", - "duration", - "status", - "cost", - "user" - ]) + this.base(arguments); + + const usageCols = osparc.desktop.credits.UsageTable.COLS; + const colLabels = Object.values(usageCols).map(col => col.label); + const colIDs = Object.values(usageCols).map(col => col.id); + + this.setColumns(colLabels, colIDs); this.setWalletId(walletId) if (filters) { this.setFilters(filters) } - this.setSortColumnIndexWithoutSortingData(3) + this.setSortColumnIndexWithoutSortingData(usageCols.START.column); this.setSortAscendingWithoutSortingData(false) - this.setColumnSortable(4, false) + this.setColumnSortable(usageCols.DURATION.column, false); }, properties: { @@ -71,15 +50,30 @@ qx.Class.define("osparc.desktop.credits.UsageTableModel", { } }, + statics: { + SERVER_MAX_LIMIT: 49, + COLUMN_ID_TO_DB_COLUMN_MAP: { + 0: "root_parent_project_name", + 1: "node_name", + 2: "service_key", + 3: "started_at", + // 4: (not used) SORTING BY DURATION + 5: "service_run_status", + 6: "credit_cost", + 7: "user_email" + } + }, + members: { - // overrriden + // overridden sortByColumn(columnIndex, ascending) { this.setOrderBy({ - field: COLUMN_ID_TO_DB_COLUMN_MAP[columnIndex], + field: this.self().COLUMN_ID_TO_DB_COLUMN_MAP[columnIndex], direction: ascending ? "asc" : "desc" }) this.base(arguments, columnIndex, ascending) }, + // overridden _loadRowCount() { const endpoint = this.getWalletId() == null ? "get" : "getWithWallet" @@ -105,13 +99,14 @@ qx.Class.define("osparc.desktop.credits.UsageTableModel", { this._onRowCountLoaded(null) }) }, + // overridden _loadRowData(firstRow, qxLastRow) { this.setIsFetching(true) // Please Qloocloox don't ask for more rows than there are const lastRow = Math.min(qxLastRow, this._rowCount - 1) // Returns a request promise with given offset and limit - const getFetchPromise = (offset, limit=SERVER_MAX_LIMIT) => { + const getFetchPromise = (offset, limit=this.self().SERVER_MAX_LIMIT) => { const endpoint = this.getWalletId() == null ? "get" : "getWithWallet" return osparc.data.Resources.fetch("resourceUsage", endpoint, { url: { @@ -128,6 +123,7 @@ qx.Class.define("osparc.desktop.credits.UsageTableModel", { }) .then(rawData => { const data = [] + const usageCols = osparc.desktop.credits.UsageTable.COLS; rawData.forEach(rawRow => { let service = "" if (rawRow["service_key"]) { @@ -143,14 +139,15 @@ qx.Class.define("osparc.desktop.credits.UsageTableModel", { } } data.push({ - project: rawRow["project_name"] || rawRow["project_id"], - node: rawRow["node_name"] || rawRow["node_id"], - service, - start, - duration, - status: qx.lang.String.firstUp(rawRow["service_run_status"].toLowerCase()), - cost: rawRow["credit_cost"] ? rawRow["credit_cost"].toFixed(2) : "", - user: rawRow["user_email"] + // root_parent_project is the same as project if it has no parent + [usageCols.PROJECT.id]: rawRow["root_parent_project_name"] || rawRow["root_parent_project_id"] || rawRow["project_name"] || rawRow["project_id"], + [usageCols.NODE.id]: rawRow["node_name"] || rawRow["node_id"], + [usageCols.SERVICE.id]: service, + [usageCols.START.id]: start, + [usageCols.DURATION.id]: duration, + [usageCols.STATUS.id]: qx.lang.String.firstUp(rawRow["service_run_status"].toLowerCase()), + [usageCols.COST.id]: rawRow["credit_cost"] ? rawRow["credit_cost"].toFixed(2) : "", + [usageCols.USER.id]: rawRow["user_email"] }) }) return data @@ -158,11 +155,11 @@ qx.Class.define("osparc.desktop.credits.UsageTableModel", { } // Divides the model row request into several server requests to comply with the number of rows server limit const reqLimit = lastRow - firstRow + 1 // Number of requested rows - const nRequests = Math.ceil(reqLimit / SERVER_MAX_LIMIT) + const nRequests = Math.ceil(reqLimit / this.self().SERVER_MAX_LIMIT) if (nRequests > 1) { let requests = [] - for (let i=firstRow; i <= lastRow; i += SERVER_MAX_LIMIT) { - requests.push(getFetchPromise(i, i > lastRow - SERVER_MAX_LIMIT + 1 ? reqLimit % SERVER_MAX_LIMIT : SERVER_MAX_LIMIT)) + for (let i=firstRow; i <= lastRow; i += this.self().SERVER_MAX_LIMIT) { + requests.push(getFetchPromise(i, i > lastRow - this.self().SERVER_MAX_LIMIT + 1 ? reqLimit % this.self().SERVER_MAX_LIMIT : this.self().SERVER_MAX_LIMIT)) } Promise.all(requests) .then(responses => { From 3f7d0670a84afdecb67b1c7ffcaa354eed761c51 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Tue, 4 Jun 2024 12:19:23 +0200 Subject: [PATCH 014/219] =?UTF-8?q?=F0=9F=94=A8Clusters=20maintenance=20sc?= =?UTF-8?q?ript:=20refactoring=20and=20add=20SSH=20tunneling=20(#5886)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../computational-clusters/Makefile | 20 +- .../autoscaled_monitor/__init__.py | 0 .../autoscaled_monitor/cli.py | 156 ++ .../autoscaled_monitor/constants.py | 26 + .../autoscaled_monitor/core.py | 620 ++++++++ .../autoscaled_monitor/dask.py | 146 ++ .../autoscaled_monitor/db.py | 109 ++ .../autoscaled_monitor/ec2.py | 171 +++ .../autoscaled_monitor/models.py | 113 ++ .../autoscaled_monitor/ssh.py | 257 ++++ .../autoscaled_monitor/utils.py | 64 + .../computational-clusters/osparc_clusters.py | 1345 ----------------- .../computational-clusters/pyproject.toml | 37 + .../computational-clusters/requirements.txt | 16 - 14 files changed, 1714 insertions(+), 1366 deletions(-) create mode 100644 scripts/maintenance/computational-clusters/autoscaled_monitor/__init__.py create mode 100644 scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py create mode 100644 scripts/maintenance/computational-clusters/autoscaled_monitor/constants.py create mode 100755 scripts/maintenance/computational-clusters/autoscaled_monitor/core.py create mode 100644 scripts/maintenance/computational-clusters/autoscaled_monitor/dask.py create mode 100644 scripts/maintenance/computational-clusters/autoscaled_monitor/db.py create mode 100644 scripts/maintenance/computational-clusters/autoscaled_monitor/ec2.py create mode 100644 scripts/maintenance/computational-clusters/autoscaled_monitor/models.py create mode 100644 scripts/maintenance/computational-clusters/autoscaled_monitor/ssh.py create mode 100644 scripts/maintenance/computational-clusters/autoscaled_monitor/utils.py delete mode 100755 scripts/maintenance/computational-clusters/osparc_clusters.py create mode 100644 scripts/maintenance/computational-clusters/pyproject.toml delete mode 100644 scripts/maintenance/computational-clusters/requirements.txt diff --git a/scripts/maintenance/computational-clusters/Makefile b/scripts/maintenance/computational-clusters/Makefile index b4820fd68b8..c9aa61316d9 100644 --- a/scripts/maintenance/computational-clusters/Makefile +++ b/scripts/maintenance/computational-clusters/Makefile @@ -2,14 +2,24 @@ SHELL := /bin/bash -install: +.venv: # creating python virtual environment @uv venv .venv - # activating python virtual environment - @source .venv/bin/activate # installing python dependencies @uv pip install --upgrade pip setuptools wheel - @uv pip install -r requirements.txt + + +install: .venv + # activating python virtual environment + @source .venv/bin/activate + # installing package + @uv pip install . # now you can call the maintenance scripts # source .venv/bin/activate - # e.g. ./osparc_clusters.py PATH/TO/REPO.CONFIG --ssh-key-path=PATH/TO/SSHKEY + # autoscaled-monitor --deploy-config PATH/TO/REPO.CONFIG summary + +install-dev: .venv + # activating python virtual environment + @source .venv/bin/activate + # installing package + @uv pip install -e . diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/__init__.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py new file mode 100644 index 00000000000..1cad40078dd --- /dev/null +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py @@ -0,0 +1,156 @@ +import asyncio +from pathlib import Path +from typing import Annotated + +import parse +import rich +import typer +from dotenv import dotenv_values + +from . import core as api +from .constants import ( + DEFAULT_COMPUTATIONAL_EC2_FORMAT, + DEFAULT_DYNAMIC_EC2_FORMAT, + DEPLOY_SSH_KEY_PARSER, +) +from .ec2 import autoscaling_ec2_client, cluster_keeper_ec2_client +from .models import AppState + +state: AppState = AppState( + dynamic_parser=parse.compile(DEFAULT_DYNAMIC_EC2_FORMAT), + computational_parser=parse.compile(DEFAULT_COMPUTATIONAL_EC2_FORMAT), +) + +app = typer.Typer() + + +def _parse_environment(deploy_config: Path) -> dict[str, str | None]: + repo_config = deploy_config / "repo.config" + assert repo_config.exists() + environment = dotenv_values(repo_config) + if environment["AUTOSCALING_EC2_ACCESS_KEY_ID"] == "": + rich.print( + "Terraform variables detected, looking for repo.config.frozen as alternative." + " TIP: you are responsible for them being up to date!!" + ) + repo_config = deploy_config / "repo.config.frozen" + assert repo_config.exists() + environment = dotenv_values(repo_config) + + if environment["AUTOSCALING_EC2_ACCESS_KEY_ID"] == "": + error_msg = ( + "Terraform is necessary in order to check into that deployment!\n" + f"install terraform (check README.md in {state.deploy_config} for instructions)" + "then run make repo.config.frozen, then re-run this code" + ) + rich.print(error_msg) + raise typer.Abort(error_msg) + assert environment + return environment + + +@app.callback() +def main( + deploy_config: Annotated[ + Path, typer.Option(help="path to the deploy configuration") + ] +): + """Manages external clusters""" + + state.deploy_config = deploy_config.expanduser() + assert ( + deploy_config.is_dir() + ), "deploy-config argument is not pointing to a directory!" + state.environment = _parse_environment(deploy_config) + + # connect to ec2s + state.ec2_resource_autoscaling = autoscaling_ec2_client(state) + state.ec2_resource_clusters_keeper = cluster_keeper_ec2_client(state) + + assert state.environment["EC2_INSTANCES_KEY_NAME"] + state.dynamic_parser = parse.compile( + f"{state.environment['EC2_INSTANCES_NAME_PREFIX']}-{{key_name}}" + ) + if state.environment["CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX"]: + state.computational_parser = parse.compile( + f"{state.environment['CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX']}-{DEFAULT_COMPUTATIONAL_EC2_FORMAT}" + ) + + # locate ssh key path + for file_path in deploy_config.glob("**/*.pem"): + if "license" in file_path.name: + continue + # very bad HACK + if ( + any(_ in f"{file_path}" for _ in ("sim4life.io", "osparc-master")) + and "openssh" not in f"{file_path}" + ): + continue + + if DEPLOY_SSH_KEY_PARSER.parse(f"{file_path.name}") is not None: + rich.print( + f"will be using following ssh_key_path: {file_path}. " + "TIP: if wrong adapt the code or manually remove some of them." + ) + state.ssh_key_path = file_path + break + + +@app.command() +def summary( + user_id: Annotated[int, typer.Option(help="filters by the user ID")] = 0, + wallet_id: Annotated[int, typer.Option(help="filters by the wallet ID")] = 0, +) -> None: + """Show a summary of the current situation of autoscaled EC2 instances. + + Gives a list of all the instances used for dynamic services, and optionally shows what runs in them. + Gives alist of all the instances used for computational services (e.g. primary + worker(s) instances) + + Arguments: + repo_config -- path that shall point to a repo.config type of file (see osparc-ops-deployment-configuration repository) + + """ + + asyncio.run(api.summary(state, user_id or None, wallet_id or None)) + + +@app.command() +def cancel_jobs( + user_id: Annotated[int, typer.Option(help="the user ID")], + wallet_id: Annotated[int, typer.Option(help="the wallet ID")], + *, + force: Annotated[ + bool, + typer.Option( + help="will also force the job to abort in the database (use only if job is in WAITING FOR CLUSTER/WAITING FOR RESOURCE)" + ), + ] = False, +) -> None: + """Cancel jobs from the cluster, this will rely on osparc platform to work properly + The director-v2 should receive the cancellation and abort the concerned pipelines in the next 15 seconds. + NOTE: This should be called prior to clearing jobs on the cluster. + + Keyword Arguments: + user_id -- the user ID + wallet_id -- the wallet ID + """ + asyncio.run(api.cancel_jobs(state, user_id, wallet_id, force=force)) + + +@app.command() +def trigger_cluster_termination( + user_id: Annotated[int, typer.Option(help="the user ID")], + wallet_id: Annotated[int, typer.Option(help="the wallet ID")], +) -> None: + """this will set the Heartbeat tag on the primary machine to 1 hour, thus ensuring the + clusters-keeper will properly terminate that cluster. + + Keyword Arguments: + user_id -- the user ID + wallet_id -- the wallet ID + """ + asyncio.run(api.trigger_cluster_termination(state, user_id, wallet_id)) + + +if __name__ == "__main__": + app() diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/constants.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/constants.py new file mode 100644 index 00000000000..82c5978f1d5 --- /dev/null +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/constants.py @@ -0,0 +1,26 @@ +import re +from typing import Final + +import parse +from pydantic import ByteSize + +DEFAULT_COMPUTATIONAL_EC2_FORMAT: Final[ + str +] = r"osparc-computational-cluster-{role}-{swarm_stack_name}-user_id:{user_id:d}-wallet_id:{wallet_id:d}" +DEFAULT_DYNAMIC_EC2_FORMAT: Final[str] = r"osparc-dynamic-autoscaled-worker-{key_name}" +DEPLOY_SSH_KEY_PARSER: Final[parse.Parser] = parse.compile(r"osparc-{random_name}.pem") + +MINUTE: Final[int] = 60 +HOUR: Final[int] = 60 * MINUTE + + +SSH_USER_NAME: Final[str] = "ubuntu" +UNDEFINED_BYTESIZE: Final[ByteSize] = ByteSize(-1) +TASK_CANCEL_EVENT_NAME_TEMPLATE: Final[str] = "cancel_event_{}" + +# NOTE: service_name and service_version are not available on dynamic-sidecar/dynamic-proxies! +DYN_SERVICES_NAMING_CONVENTION: Final[re.Pattern] = re.compile( + r"^dy-(proxy|sidecar)(-|_)(?P.{8}-.{4}-.{4}-.{4}-.{12}).*\t(?P[^\t]+)\t(?P\d+)\t(?P.{8}-.{4}-.{4}-.{4}-.{12})\t(?P[^\t]*)\t(?P.*)$" +) + +DANGER = "[red]{}[/red]" diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py new file mode 100755 index 00000000000..1ebd11821b5 --- /dev/null +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py @@ -0,0 +1,620 @@ +#! /usr/bin/env python3 + +import asyncio +import datetime +import json +from dataclasses import replace +from pathlib import Path + +import arrow +import parse +import rich +import typer +from mypy_boto3_ec2.service_resource import Instance, ServiceResourceInstancesCollection +from mypy_boto3_ec2.type_defs import TagTypeDef +from pydantic import ByteSize, TypeAdapter, ValidationError +from rich.progress import track +from rich.style import Style +from rich.table import Column, Table + +from . import dask, db, ec2, ssh, utils +from .constants import SSH_USER_NAME, UNDEFINED_BYTESIZE +from .models import ( + AppState, + ComputationalCluster, + ComputationalInstance, + ComputationalTask, + DaskTask, + DynamicInstance, + DynamicService, + InstanceRole, + TaskId, + TaskState, +) + + +@utils.to_async +def _parse_computational( + state: AppState, instance: Instance +) -> ComputationalInstance | None: + name = utils.get_instance_name(instance) + if result := state.computational_parser.search(name): + assert isinstance(result, parse.Result) + last_heartbeat = utils.get_last_heartbeat(instance) + return ComputationalInstance( + role=InstanceRole(result["role"]), + user_id=result["user_id"], + wallet_id=result["wallet_id"], + name=name, + last_heartbeat=last_heartbeat, + ec2_instance=instance, + disk_space=UNDEFINED_BYTESIZE, + dask_ip="unknown", + ) + + return None + + +def _create_graylog_permalinks( + environment: dict[str, str | None], instance: Instance +) -> str: + # https://monitoring.sim4life.io/graylog/search/6552235211aee4262e7f9f21?q=source%3A%22ip-10-0-1-67%22&rangetype=relative&from=28800 + source_name = instance.private_ip_address.replace(".", "-") + time_span = int( + ( + arrow.utcnow().datetime - instance.launch_time + datetime.timedelta(hours=1) + ).total_seconds() + ) + return f"https://monitoring.{environment['MACHINE_FQDN']}/graylog/search?q=source%3A%22ip-{source_name}%22&rangetype=relative&from={time_span}" + + +def _parse_dynamic(state: AppState, instance: Instance) -> DynamicInstance | None: + name = utils.get_instance_name(instance) + if result := state.dynamic_parser.search(name): + assert isinstance(result, parse.Result) + + return DynamicInstance( + name=name, + ec2_instance=instance, + running_services=[], + disk_space=UNDEFINED_BYTESIZE, + ) + return None + + +def _print_dynamic_instances( + instances: list[DynamicInstance], + environment: dict[str, str | None], + aws_region: str, +) -> None: + time_now = arrow.utcnow() + table = Table( + Column("Instance"), + Column( + "Running services", + footer="[red]Intervention detection might show false positive if in transient state, be careful and always double-check!![/red]", + ), + title=f"dynamic autoscaled instances: {aws_region}", + show_footer=True, + padding=(0, 0), + title_style=Style(color="red", encircle=True), + ) + for instance in track( + instances, description="Preparing dynamic autoscaled instances details..." + ): + service_table = "[i]n/a[/i]" + if instance.running_services: + service_table = Table( + "UserID", + "ProjectID", + "NodeID", + "ServiceName", + "ServiceVersion", + "Created Since", + "Need intervention", + expand=True, + padding=(0, 0), + ) + for service in instance.running_services: + service_table.add_row( + f"{service.user_id}", + service.project_id, + service.node_id, + service.service_name, + service.service_version, + utils.timedelta_formatting( + time_now - service.created_at, color_code=True + ), + f"{'[red]' if service.needs_manual_intervention else ''}{service.needs_manual_intervention}{'[/red]' if service.needs_manual_intervention else ''}", + ) + + table.add_row( + "\n".join( + [ + f"{utils.color_encode_with_state(instance.name, instance.ec2_instance)}", + f"ID: {instance.ec2_instance.instance_id}", + f"AMI: {instance.ec2_instance.image_id}", + f"AMI name: {instance.ec2_instance.image.name}", + f"Type: {instance.ec2_instance.instance_type}", + f"Up: {utils.timedelta_formatting(time_now - instance.ec2_instance.launch_time, color_code=True)}", + f"ExtIP: {instance.ec2_instance.public_ip_address}", + f"IntIP: {instance.ec2_instance.private_ip_address}", + f"/mnt/docker(free): {utils.color_encode_with_threshold(instance.disk_space.human_readable(), instance.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", + ] + ), + service_table, + ) + table.add_row( + "Graylog: ", + f"{_create_graylog_permalinks(environment, instance.ec2_instance)}", + end_section=True, + ) + rich.print(table, flush=True) + + +def _print_computational_clusters( + clusters: list[ComputationalCluster], + environment: dict[str, str | None], + aws_region: str, +) -> None: + time_now = arrow.utcnow() + table = Table( + Column("Instance", justify="left", overflow="ellipsis", ratio=1), + Column("Computational details", overflow="fold", ratio=2), + title=f"computational clusters: {aws_region}", + padding=(0, 0), + title_style=Style(color="red", encircle=True), + expand=True, + ) + + for cluster in track( + clusters, "Collecting information about computational clusters..." + ): + cluster_worker_metrics = dask.get_worker_metrics(cluster.scheduler_info) + # first print primary machine info + table.add_row( + "\n".join( + [ + f"[bold]{utils.color_encode_with_state('Primary', cluster.primary.ec2_instance)}", + f"Name: {cluster.primary.name}", + f"ID: {cluster.primary.ec2_instance.id}", + f"AMI: {cluster.primary.ec2_instance.image_id}", + f"AMI name: {cluster.primary.ec2_instance.image.name}", + f"Type: {cluster.primary.ec2_instance.instance_type}", + f"Up: {utils.timedelta_formatting(time_now - cluster.primary.ec2_instance.launch_time, color_code=True)}", + f"ExtIP: {cluster.primary.ec2_instance.public_ip_address}", + f"IntIP: {cluster.primary.ec2_instance.private_ip_address}", + f"DaskSchedulerIP: {cluster.primary.dask_ip}", + f"UserID: {cluster.primary.user_id}", + f"WalletID: {cluster.primary.wallet_id}", + f"Heartbeat: {utils.timedelta_formatting(time_now - cluster.primary.last_heartbeat) if cluster.primary.last_heartbeat else 'n/a'}", + f"/mnt/docker(free): {utils.color_encode_with_threshold(cluster.primary.disk_space.human_readable(), cluster.primary.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", + ] + ), + "\n".join( + [ + f"Dask Scheduler UI: http://{cluster.primary.ec2_instance.public_ip_address}:8787", + f"Dask Scheduler TLS: tls://{cluster.primary.ec2_instance.public_ip_address}:8786", + f"Graylog UI: {_create_graylog_permalinks(environment, cluster.primary.ec2_instance)}", + f"Prometheus UI: http://{cluster.primary.ec2_instance.public_ip_address}:9090", + f"tasks: {json.dumps(cluster.task_states_to_tasks, indent=2)}", + ] + ), + ) + + # now add the workers + for index, worker in enumerate(cluster.workers): + worker_dask_metrics = next( + ( + worker_metrics + for worker_name, worker_metrics in cluster_worker_metrics.items() + if worker.dask_ip in worker_name + ), + "no metrics???", + ) + worker_processing_jobs = [ + job_id + for worker_name, job_id in cluster.processing_jobs.items() + if worker.dask_ip in worker_name + ] + table.add_row() + table.add_row( + "\n".join( + [ + f"[italic]{utils.color_encode_with_state(f'Worker {index+1}', worker.ec2_instance)}[/italic]", + f"Name: {worker.name}", + f"ID: {worker.ec2_instance.id}", + f"AMI: {worker.ec2_instance.image_id}", + f"AMI name: {worker.ec2_instance.image.name}", + f"Type: {worker.ec2_instance.instance_type}", + f"Up: {utils.timedelta_formatting(time_now - worker.ec2_instance.launch_time, color_code=True)}", + f"ExtIP: {worker.ec2_instance.public_ip_address}", + f"IntIP: {worker.ec2_instance.private_ip_address}", + f"DaskWorkerIP: {worker.dask_ip}", + f"/mnt/docker(free): {utils.color_encode_with_threshold(worker.disk_space.human_readable(), worker.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", + "", + ] + ), + "\n".join( + [ + f"Graylog: {_create_graylog_permalinks(environment, worker.ec2_instance)}", + f"Dask metrics: {json.dumps(worker_dask_metrics, indent=2)}", + f"Running tasks: {worker_processing_jobs}", + ] + ), + ) + table.add_row(end_section=True) + rich.print(table) + + +async def _fetch_instance_details( + state: AppState, instance: DynamicInstance, ssh_key_path: Path +) -> tuple[list[DynamicService] | BaseException, ByteSize | BaseException]: + # Run both SSH operations concurrently for this instance + running_services, disk_space = await asyncio.gather( + ssh.list_running_dyn_services( + state, + instance.ec2_instance, + SSH_USER_NAME, + ssh_key_path, + ), + ssh.get_available_disk_space( + state, instance.ec2_instance, SSH_USER_NAME, ssh_key_path + ), + return_exceptions=True, + ) + return running_services, disk_space + + +async def _analyze_dynamic_instances_running_services_concurrently( + state: AppState, + dynamic_instances: list[DynamicInstance], + ssh_key_path: Path, + user_id: int | None, +) -> list[DynamicInstance]: + details = await asyncio.gather( + *( + _fetch_instance_details(state, instance, ssh_key_path) + for instance in dynamic_instances + ), + return_exceptions=True, + ) + + # Filter and update instances based on results and given criteria + return [ + replace( + instance, + running_services=instance_details[0], + disk_space=instance_details[1], + ) + for instance, instance_details in zip(dynamic_instances, details, strict=True) + if isinstance(instance_details, tuple) + and isinstance(instance_details[0], list) + and isinstance(instance_details[1], ByteSize) + and (user_id is None or any(s.user_id == user_id for s in instance_details[0])) + ] + + +async def _analyze_computational_instances( + state: AppState, + computational_instances: list[ComputationalInstance], + ssh_key_path: Path | None, +) -> list[ComputationalCluster]: + + all_disk_spaces = [UNDEFINED_BYTESIZE] * len(computational_instances) + if ssh_key_path is not None: + all_disk_spaces = await asyncio.gather( + *( + ssh.get_available_disk_space( + state, instance.ec2_instance, SSH_USER_NAME, ssh_key_path + ) + for instance in computational_instances + ), + return_exceptions=True, + ) + + all_dask_ips = await asyncio.gather( + *( + ssh.get_dask_ip( + state, instance.ec2_instance, SSH_USER_NAME, ssh_key_path + ) + for instance in computational_instances + ), + return_exceptions=True, + ) + + computational_clusters = [] + for instance, disk_space, dask_ip in track( + zip(computational_instances, all_disk_spaces, all_dask_ips, strict=True), + description="Collecting computational clusters data...", + ): + if isinstance(disk_space, ByteSize): + instance.disk_space = disk_space + if isinstance(dask_ip, str): + instance.dask_ip = dask_ip + if instance.role is InstanceRole.manager: + ( + scheduler_info, + datasets_on_cluster, + processing_jobs, + all_tasks, + ) = await dask.get_scheduler_details( + state, + instance.ec2_instance, + ) + + assert isinstance(datasets_on_cluster, tuple) + assert isinstance(processing_jobs, dict) + + computational_clusters.append( + ComputationalCluster( + primary=instance, + workers=[], + scheduler_info=scheduler_info, + datasets=datasets_on_cluster, + processing_jobs=processing_jobs, + task_states_to_tasks=all_tasks, + ) + ) + + for instance in computational_instances: + if instance.role is InstanceRole.worker: + # assign the worker to correct cluster + for cluster in computational_clusters: + if ( + cluster.primary.user_id == instance.user_id + and cluster.primary.wallet_id == instance.wallet_id + ): + cluster.workers.append(instance) + + return computational_clusters + + +async def _parse_computational_clusters( + state: AppState, + instances: ServiceResourceInstancesCollection, + ssh_key_path: Path | None, + user_id: int | None, + wallet_id: int | None, +) -> list[ComputationalCluster]: + computational_instances = [ + comp_instance + for instance in track( + instances, description="Parsing computational instances..." + ) + if (comp_instance := await _parse_computational(state, instance)) + and (user_id is None or comp_instance.user_id == user_id) + and (wallet_id is None or comp_instance.wallet_id == wallet_id) + ] + return await _analyze_computational_instances( + state, computational_instances, ssh_key_path + ) + + +async def _parse_dynamic_instances( + state: AppState, + instances: ServiceResourceInstancesCollection, + ssh_key_path: Path | None, + user_id: int | None, + wallet_id: int | None, # noqa: ARG001 +) -> list[DynamicInstance]: + dynamic_instances = [ + dyn_instance + for instance in track(instances, description="Parsing dynamic instances...") + if (dyn_instance := _parse_dynamic(state, instance)) + ] + + if dynamic_instances and ssh_key_path: + dynamic_instances = ( + await _analyze_dynamic_instances_running_services_concurrently( + state, dynamic_instances, ssh_key_path, user_id + ) + ) + return dynamic_instances + + +async def summary(state: AppState, user_id: int | None, wallet_id: int | None) -> None: + # get all the running instances + assert state.ec2_resource_autoscaling + dynamic_instances = await ec2.list_dynamic_instances_from_ec2( + state, user_id, wallet_id + ) + dynamic_autoscaled_instances = await _parse_dynamic_instances( + state, dynamic_instances, state.ssh_key_path, user_id, wallet_id + ) + _print_dynamic_instances( + dynamic_autoscaled_instances, + state.environment, + state.ec2_resource_autoscaling.meta.client.meta.region_name, + ) + + assert state.ec2_resource_clusters_keeper + computational_instances = await ec2.list_computational_instances_from_ec2( + state, user_id, wallet_id + ) + computational_clusters = await _parse_computational_clusters( + state, computational_instances, state.ssh_key_path, user_id, wallet_id + ) + _print_computational_clusters( + computational_clusters, + state.environment, + state.ec2_resource_clusters_keeper.meta.client.meta.region_name, + ) + + +def _print_computational_tasks( + user_id: int, + wallet_id: int, + tasks: list[tuple[ComputationalTask | None, DaskTask | None]], +) -> None: + table = Table( + "index", + "ProjectID", + "NodeID", + "ServiceName", + "ServiceVersion", + "State in DB", + "State in Dask cluster", + title=f"{len(tasks)} Tasks running for {user_id=}/{wallet_id=}", + padding=(0, 0), + title_style=Style(color="red", encircle=True), + ) + + for index, (db_task, dask_task) in enumerate(tasks): + table.add_row( + f"{index}", + ( + f"{db_task.project_id}" + if db_task + else "[red][bold]intervention needed[/bold][/red]" + ), + f"{db_task.node_id}" if db_task else "", + f"{db_task.service_name}" if db_task else "", + f"{db_task.service_version}" if db_task else "", + f"{db_task.state}" if db_task else "", + ( + dask_task.state + if dask_task + else "[orange]task not yet in cluster[/orange]" + ), + ) + + rich.print(table) + + +async def _list_computational_clusters( + state: AppState, user_id: int, wallet_id: int +) -> list[ComputationalCluster]: + assert state.ec2_resource_clusters_keeper + computational_instances = await ec2.list_computational_instances_from_ec2( + state, user_id, wallet_id + ) + return await _parse_computational_clusters( + state, computational_instances, state.ssh_key_path, user_id, wallet_id + ) + + +async def cancel_jobs( # noqa: C901, PLR0912 + state: AppState, user_id: int, wallet_id: int, *, force: bool +) -> None: + # get the theory + computational_tasks = await db.list_computational_tasks_from_db(state, user_id) + + # get the reality + computational_clusters = await _list_computational_clusters( + state, user_id, wallet_id + ) + job_id_to_dask_state: dict[TaskId, TaskState] = {} + if computational_clusters: + assert ( + len(computational_clusters) == 1 + ), "too many clusters found! TIP: fix this code or something weird is playing out" + + the_cluster = computational_clusters[0] + rich.print(f"{the_cluster.task_states_to_tasks=}") + + for job_state, job_ids in the_cluster.task_states_to_tasks.items(): + for job_id in job_ids: + job_id_to_dask_state[job_id] = job_state + + task_to_dask_job: list[tuple[ComputationalTask | None, DaskTask | None]] = [] + for task in computational_tasks: + dask_task = None + if task.job_id: + dask_task = DaskTask( + job_id=task.job_id, + state=job_id_to_dask_state.pop(task.job_id, None) or "unknown", + ) + task_to_dask_job.append((task, dask_task)) + # keep the jobs still in the cluster + for job_id, dask_state in job_id_to_dask_state.items(): + task_to_dask_job.append((None, DaskTask(job_id=job_id, state=dask_state))) + + if not task_to_dask_job: + rich.print("[red]nothing found![/red]") + raise typer.Exit + + _print_computational_tasks(user_id, wallet_id, task_to_dask_job) + rich.print(the_cluster.datasets) + try: + if response := typer.prompt( + "[yellow]Which dataset to cancel? all for all of them.[/yellow]", + default="none", + ): + if response == "none": + rich.print("[yellow]not cancelling anything[/yellow]") + elif response == "all": + for comp_task, dask_task in task_to_dask_job: + if dask_task is not None and dask_task.state != "unknown": + await dask.trigger_job_cancellation_in_scheduler( + state, + the_cluster, + dask_task.job_id, + ) + if comp_task is None: + # we need to clear it of the cluster + await dask.remove_job_from_scheduler( + state, + the_cluster, + dask_task.job_id, + ) + if comp_task is not None and force: + await db.abort_job_in_db( + state, comp_task.project_id, comp_task.node_id + ) + + rich.print("cancelled all tasks") + else: + selected_index = TypeAdapter(int).validate_python(response) + comp_task, dask_task = task_to_dask_job[selected_index] + if dask_task is not None and dask_task.state != "unknown": + await dask.trigger_job_cancellation_in_scheduler( + state, the_cluster, dask_task.job_id + ) + if comp_task is None: + # we need to clear it of the cluster + await dask.remove_job_from_scheduler( + state, the_cluster, dask_task.job_id + ) + + if comp_task is not None and force: + await db.abort_job_in_db( + state, comp_task.project_id, comp_task.node_id + ) + + except ValidationError: + rich.print("[yellow]wrong index, not cancelling anything[/yellow]") + + +async def trigger_cluster_termination( + state: AppState, user_id: int, wallet_id: int +) -> None: + assert state.ec2_resource_clusters_keeper + computational_instances = await ec2.list_computational_instances_from_ec2( + state, user_id, wallet_id + ) + computational_clusters = await _parse_computational_clusters( + state, computational_instances, state.ssh_key_path, user_id, wallet_id + ) + assert computational_clusters + assert ( + len(computational_clusters) == 1 + ), "too many clusters found! TIP: fix this code" + + _print_computational_clusters( + computational_clusters, + state.environment, + state.ec2_resource_clusters_keeper.meta.client.meta.region_name, + ) + if typer.confirm("Are you sure you want to trigger termination of that cluster?"): + the_cluster = computational_clusters[0] + new_heartbeat_tag: TagTypeDef = { + "Key": "last_heartbeat", + "Value": f"{arrow.utcnow().datetime - datetime.timedelta(hours=1)}", + } + the_cluster.primary.ec2_instance.create_tags(Tags=[new_heartbeat_tag]) + rich.print( + f"heartbeat tag on cluster of {user_id=}/{wallet_id=} changed, clusters-keeper will terminate that cluster soon." + ) + else: + rich.print("not deleting anything") diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/dask.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/dask.py new file mode 100644 index 00000000000..e18c2beb831 --- /dev/null +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/dask.py @@ -0,0 +1,146 @@ +import contextlib +from collections.abc import AsyncGenerator, Awaitable, Coroutine +from typing import Any, Final + +import distributed +import rich +from mypy_boto3_ec2.service_resource import Instance +from pydantic import AnyUrl + +from .constants import SSH_USER_NAME, TASK_CANCEL_EVENT_NAME_TEMPLATE +from .ec2 import get_bastion_instance_from_remote_instance +from .models import AppState, ComputationalCluster, TaskId, TaskState +from .ssh import ssh_tunnel + +_SCHEDULER_PORT: Final[int] = 8786 + + +def _wrap_dask_async_call(called_fct) -> Awaitable[Any]: + assert isinstance(called_fct, Coroutine) + return called_fct + + +@contextlib.asynccontextmanager +async def dask_client( + state: AppState, instance: Instance +) -> AsyncGenerator[distributed.Client, None]: + security = distributed.Security() + assert state.deploy_config + dask_certificates = state.deploy_config / "assets" / "dask-certificates" + if dask_certificates.exists(): + security = distributed.Security( + tls_ca_file=f"{dask_certificates / 'dask-cert.pem'}", + tls_client_cert=f"{dask_certificates / 'dask-cert.pem'}", + tls_client_key=f"{dask_certificates / 'dask-key.pem'}", + require_encryption=True, + ) + + try: + + async with contextlib.AsyncExitStack() as stack: + if instance.public_ip_address is not None: + url = AnyUrl(f"tls://{instance.public_ip_address}:{_SCHEDULER_PORT}") + else: + bastion_instance = await get_bastion_instance_from_remote_instance( + state, instance + ) + assert state.ssh_key_path # nosec + assert state.environment # nosec + tunnel = stack.enter_context( + ssh_tunnel( + ssh_host=bastion_instance.public_dns_name, + username=SSH_USER_NAME, + private_key_path=state.ssh_key_path, + remote_bind_host=instance.private_ip_address, + remote_bind_port=_SCHEDULER_PORT, + ) + ) + assert tunnel # nosec + host, port = tunnel.local_bind_address + url = AnyUrl(f"tls://{host}:{port}") + client = await stack.enter_async_context( + distributed.Client( + f"{url}", security=security, timeout="5", asynchronous=True + ) + ) + yield client + + finally: + pass + + +async def remove_job_from_scheduler( + state: AppState, + cluster: ComputationalCluster, + task_id: TaskId, +) -> None: + async with dask_client(state, cluster.primary.ec2_instance) as client: + await _wrap_dask_async_call(client.unpublish_dataset(task_id)) + rich.print(f"unpublished {task_id} from scheduler") + + +async def trigger_job_cancellation_in_scheduler( + state: AppState, + cluster: ComputationalCluster, + task_id: TaskId, +) -> None: + async with dask_client(state, cluster.primary.ec2_instance) as client: + task_future = distributed.Future(task_id) + cancel_event = distributed.Event( + name=TASK_CANCEL_EVENT_NAME_TEMPLATE.format(task_future.key), + client=client, + ) + await _wrap_dask_async_call(cancel_event.set()) + await _wrap_dask_async_call(task_future.cancel()) + rich.print(f"cancelled {task_id} in scheduler/workers") + + +async def _list_all_tasks( + client: distributed.Client, +) -> dict[TaskState, list[TaskId]]: + def _list_tasks( + dask_scheduler: distributed.Scheduler, + ) -> dict[TaskId, TaskState]: + # NOTE: this is ok and needed: this runs on the dask scheduler, so don't remove this import + + task_state_to_tasks = {} + for task in dask_scheduler.tasks.values(): + if task.state in task_state_to_tasks: + task_state_to_tasks[task.state].append(task.key) + else: + task_state_to_tasks[task.state] = [task.key] + + return dict(task_state_to_tasks) + + try: + list_of_tasks: dict[TaskState, list[TaskId]] = await client.run_on_scheduler( + _list_tasks + ) # type: ignore + except TypeError: + rich.print(f"ERROR while recoverring unrunnable tasks using {dask_client=}") + return list_of_tasks + + +async def get_scheduler_details(state: AppState, instance: Instance): + scheduler_info = {} + datasets_on_cluster = () + processing_jobs = {} + all_tasks = {} + with contextlib.suppress(TimeoutError, OSError): + async with dask_client(state, instance) as client: + scheduler_info = client.scheduler_info() + datasets_on_cluster = await _wrap_dask_async_call(client.list_datasets()) + processing_jobs = await _wrap_dask_async_call(client.processing()) + all_tasks = await _list_all_tasks(client) + + return scheduler_info, datasets_on_cluster, processing_jobs, all_tasks + + +def get_worker_metrics(scheduler_info: dict[str, Any]) -> dict[str, Any]: + worker_metrics = {} + for worker_name, worker_data in scheduler_info.get("workers", {}).items(): + worker_metrics[worker_name] = { + "resources": worker_data["resources"], + "tasks": worker_data["metrics"].get("task_counts", {}), + } + return worker_metrics diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py new file mode 100644 index 00000000000..615fe6b82a6 --- /dev/null +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py @@ -0,0 +1,109 @@ +import contextlib +import uuid +from collections.abc import AsyncGenerator +from typing import Any + +import rich +import sqlalchemy as sa +from pydantic import PostgresDsn, TypeAdapter +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine + +from .models import AppState, ComputationalTask, PostgresDB + + +@contextlib.asynccontextmanager +async def db_engine(state: AppState) -> AsyncGenerator[AsyncEngine, Any]: + engine = None + try: + for env in [ + "POSTGRES_USER", + "POSTGRES_PASSWORD", + "POSTGRES_ENDPOINT", + "POSTGRES_DB", + ]: + assert state.environment[env] + postgres_db = PostgresDB( + dsn=TypeAdapter(PostgresDsn).validate_python( + f"postgresql+asyncpg://{state.environment['POSTGRES_USER']}:{state.environment['POSTGRES_PASSWORD']}@{state.environment['POSTGRES_ENDPOINT']}/{state.environment['POSTGRES_DB']}" + ) + ) + + engine = create_async_engine( + f"{postgres_db.dsn}", + connect_args={ + "server_settings": { + "application_name": "osparc-clusters-monitoring-script" + } + }, + ) + yield engine + finally: + if engine: + await engine.dispose() + + +async def abort_job_in_db( + state: AppState, project_id: uuid.UUID, node_id: uuid.UUID +) -> None: + async with contextlib.AsyncExitStack() as stack: + engine = await stack.enter_async_context(db_engine(state)) + db_connection = await stack.enter_async_context(engine.begin()) + + await db_connection.execute( + sa.text( + f"UPDATE comp_tasks SET state = 'ABORTED' WHERE project_id='{project_id}' AND node_id='{node_id}'" + ) + ) + rich.print(f"set comp_tasks for {project_id=}/{node_id=} set to ABORTED") + + +async def list_computational_tasks_from_db( + state: AppState, user_id: int +) -> list[ComputationalTask]: + async with contextlib.AsyncExitStack() as stack: + engine = await stack.enter_async_context(db_engine(state)) + db_connection = await stack.enter_async_context(engine.begin()) + + # Get the list of running project UUIDs with a subquery + subquery = ( + sa.select(sa.column("project_uuid")) + .select_from(sa.table("comp_runs")) + .where( + sa.and_( + sa.column("user_id") == user_id, + sa.cast(sa.column("result"), sa.VARCHAR) != "SUCCESS", + sa.cast(sa.column("result"), sa.VARCHAR) != "FAILED", + sa.cast(sa.column("result"), sa.VARCHAR) != "ABORTED", + ) + ) + ) + + # Now select comp_tasks rows where project_id is one of the project_uuids + query = ( + sa.select("*") + .select_from(sa.table("comp_tasks")) + .where( + sa.column("project_id").in_(subquery) + & (sa.cast(sa.column("state"), sa.VARCHAR) != "SUCCESS") + & (sa.cast(sa.column("state"), sa.VARCHAR) != "FAILED") + & (sa.cast(sa.column("state"), sa.VARCHAR) != "ABORTED") + ) + ) + + result = await db_connection.execute(query) + comp_tasks_list = result.fetchall() + return [ + TypeAdapter(ComputationalTask).validate_python( + { + "project_id": row.project_id, + "node_id": row.node_id, + "job_id": row.job_id, + "service_name": row.image["name"].split("/")[-1], + "service_version": row.image["tag"], + "state": row.state, + } + ) + for row in comp_tasks_list + ] + msg = "unable to access database!" + raise RuntimeError(msg) diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/ec2.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/ec2.py new file mode 100644 index 00000000000..2f1e21423c9 --- /dev/null +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/ec2.py @@ -0,0 +1,171 @@ +import json +from typing import Final + +import boto3 +from aiocache import cached +from mypy_boto3_ec2 import EC2ServiceResource +from mypy_boto3_ec2.service_resource import Instance, ServiceResourceInstancesCollection +from mypy_boto3_ec2.type_defs import FilterTypeDef + +from .models import AppState +from .utils import get_instance_name, to_async + + +@to_async +def _list_running_ec2_instances( + ec2_resource: EC2ServiceResource, + key_name: str, + custom_tags: dict[str, str], + user_id: int | None, + wallet_id: int | None, +) -> ServiceResourceInstancesCollection: + # get all the running instances + + ec2_filters: list[FilterTypeDef] = [ + {"Name": "instance-state-name", "Values": ["running", "pending"]}, + {"Name": "key-name", "Values": [key_name]}, + ] + if custom_tags: + ec2_filters.extend( + [ + {"Name": f"tag:{key}", "Values": [f"{value}"]} + for key, value in custom_tags.items() + ] + ) + + if user_id: + ec2_filters.append({"Name": "tag:user_id", "Values": [f"{user_id}"]}) + if wallet_id: + ec2_filters.append({"Name": "tag:wallet_id", "Values": [f"{wallet_id}"]}) + + return ec2_resource.instances.filter(Filters=ec2_filters) + + +async def list_computational_instances_from_ec2( + state: AppState, + user_id: int | None, + wallet_id: int | None, +) -> ServiceResourceInstancesCollection: + assert state.environment["PRIMARY_EC2_INSTANCES_KEY_NAME"] + assert state.environment["WORKERS_EC2_INSTANCES_KEY_NAME"] + assert ( + state.environment["PRIMARY_EC2_INSTANCES_KEY_NAME"] + == state.environment["WORKERS_EC2_INSTANCES_KEY_NAME"] + ), "key name is different on primary and workers. TIP: adjust this code now" + custom_tags = {} + if state.environment["PRIMARY_EC2_INSTANCES_CUSTOM_TAGS"]: + assert ( + state.environment["PRIMARY_EC2_INSTANCES_CUSTOM_TAGS"] + == state.environment["WORKERS_EC2_INSTANCES_CUSTOM_TAGS"] + ), "custom tags are different on primary and workers. TIP: adjust this code now" + custom_tags = json.loads(state.environment["PRIMARY_EC2_INSTANCES_CUSTOM_TAGS"]) + assert state.ec2_resource_clusters_keeper + return await _list_running_ec2_instances( + state.ec2_resource_clusters_keeper, + state.environment["PRIMARY_EC2_INSTANCES_KEY_NAME"], + custom_tags, + user_id, + wallet_id, + ) + + +async def list_dynamic_instances_from_ec2( + state: AppState, + user_id: int | None, + wallet_id: int | None, +) -> ServiceResourceInstancesCollection: + assert state.environment["EC2_INSTANCES_KEY_NAME"] + custom_tags = {} + if state.environment["EC2_INSTANCES_CUSTOM_TAGS"]: + custom_tags = json.loads(state.environment["EC2_INSTANCES_CUSTOM_TAGS"]) + assert state.ec2_resource_autoscaling + return await _list_running_ec2_instances( + state.ec2_resource_autoscaling, + state.environment["EC2_INSTANCES_KEY_NAME"], + custom_tags, + user_id, + wallet_id, + ) + + +_DEFAULT_BASTION_NAME: Final[str] = "bastion-host" + + +@cached() +async def get_computational_bastion_instance(state: AppState) -> Instance: + assert state.ec2_resource_clusters_keeper # nosec + assert state.environment["PRIMARY_EC2_INSTANCES_KEY_NAME"] # nosec + instances = await _list_running_ec2_instances( + state.ec2_resource_clusters_keeper, + state.environment["PRIMARY_EC2_INSTANCES_KEY_NAME"], + {}, + None, + None, + ) + + possible_bastions = list( + filter(lambda i: _DEFAULT_BASTION_NAME in get_instance_name(i), instances) + ) + assert len(possible_bastions) == 1 + return possible_bastions[0] + + +@cached() +async def get_dynamic_bastion_instance(state: AppState) -> Instance: + assert state.ec2_resource_autoscaling # nosec + assert state.environment["EC2_INSTANCES_KEY_NAME"] # nosec + instances = await _list_running_ec2_instances( + state.ec2_resource_autoscaling, + state.environment["EC2_INSTANCES_KEY_NAME"], + {}, + None, + None, + ) + + possible_bastions = list( + filter(lambda i: _DEFAULT_BASTION_NAME in get_instance_name(i), instances) + ) + assert len(possible_bastions) == 1 + return possible_bastions[0] + + +def cluster_keeper_region(state: AppState) -> str: + assert state.environment["CLUSTERS_KEEPER_EC2_REGION_NAME"] # nosec + return state.environment["CLUSTERS_KEEPER_EC2_REGION_NAME"] + + +def autoscaling_region(state: AppState) -> str: + assert state.environment["AUTOSCALING_EC2_REGION_NAME"] # nosec + return state.environment["AUTOSCALING_EC2_REGION_NAME"] + + +async def get_bastion_instance_from_remote_instance( + state: AppState, remote_instance: Instance +) -> Instance: + availability_zone = remote_instance.placement["AvailabilityZone"] + if cluster_keeper_region(state) in availability_zone: + return await get_computational_bastion_instance(state) + if autoscaling_region(state) in availability_zone: + return await get_dynamic_bastion_instance(state) + msg = "no corresponding bastion instance!" + raise RuntimeError(msg) + + +def cluster_keeper_ec2_client(state: AppState) -> EC2ServiceResource: + return boto3.resource( + "ec2", + region_name=cluster_keeper_region(state), + aws_access_key_id=state.environment["CLUSTERS_KEEPER_EC2_ACCESS_KEY_ID"], + aws_secret_access_key=state.environment[ + "CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY" + ], + ) + + +def autoscaling_ec2_client(state: AppState) -> EC2ServiceResource: + return boto3.resource( + "ec2", + region_name=autoscaling_region(state), + aws_access_key_id=state.environment["AUTOSCALING_EC2_ACCESS_KEY_ID"], + aws_secret_access_key=state.environment["AUTOSCALING_EC2_SECRET_ACCESS_KEY"], + ) diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/models.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/models.py new file mode 100644 index 00000000000..a4b423ebddc --- /dev/null +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/models.py @@ -0,0 +1,113 @@ +import datetime +import uuid +from collections import namedtuple +from dataclasses import dataclass, field +from enum import Enum +from pathlib import Path +from typing import Any, TypeAlias + +import parse +from mypy_boto3_ec2 import EC2ServiceResource +from mypy_boto3_ec2.service_resource import Instance +from pydantic import BaseModel, ByteSize, PostgresDsn + + +@dataclass(kw_only=True) +class AppState: + environment: dict[str, str | None] = field(default_factory=dict) + ec2_resource_autoscaling: EC2ServiceResource | None = None + ec2_resource_clusters_keeper: EC2ServiceResource | None = None + dynamic_parser: parse.Parser + computational_parser: parse.Parser + deploy_config: Path | None = None + ssh_key_path: Path | None = None + + computational_bastion: Instance | None = None + dynamic_bastion: Instance | None = None + + +@dataclass(slots=True, kw_only=True) +class AutoscaledInstance: + name: str + ec2_instance: Instance + disk_space: ByteSize + + +class InstanceRole(str, Enum): + manager = "manager" + worker = "worker" + + +@dataclass(slots=True, kw_only=True) +class ComputationalInstance(AutoscaledInstance): + role: InstanceRole + user_id: int + wallet_id: int + last_heartbeat: datetime.datetime | None + dask_ip: str + + +@dataclass +class DynamicService: + node_id: str + user_id: int + project_id: str + service_name: str + service_version: str + created_at: datetime.datetime + needs_manual_intervention: bool + containers: list[str] + + +@dataclass(slots=True, kw_only=True) +class DynamicInstance(AutoscaledInstance): + running_services: list[DynamicService] + + +TaskId: TypeAlias = str +TaskState: TypeAlias = str + + +@dataclass(slots=True, kw_only=True, frozen=True) +class ComputationalTask: + project_id: uuid.UUID + node_id: uuid.UUID + job_id: TaskId | None + service_name: str + service_version: str + state: str + + +@dataclass(slots=True, kw_only=True, frozen=True) +class DaskTask: + job_id: TaskId + state: TaskState + + +@dataclass(frozen=True, slots=True, kw_only=True) +class ComputationalCluster: + primary: ComputationalInstance + workers: list[ComputationalInstance] + + scheduler_info: dict[str, Any] + datasets: tuple[str, ...] + processing_jobs: dict[str, str] + task_states_to_tasks: dict[str, list[TaskState]] + + +DockerContainer = namedtuple( # noqa: PYI024 + "docker_container", + [ + "node_id", + "user_id", + "project_id", + "created_at", + "name", + "service_name", + "service_version", + ], +) + + +class PostgresDB(BaseModel): + dsn: PostgresDsn diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/ssh.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/ssh.py new file mode 100644 index 00000000000..a19f4be0992 --- /dev/null +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/ssh.py @@ -0,0 +1,257 @@ +import contextlib +import datetime +import json +import logging +import re +from collections import defaultdict +from collections.abc import AsyncGenerator, Generator +from pathlib import Path +from typing import Any, Final + +import arrow +import paramiko +import rich +import typer +from mypy_boto3_ec2.service_resource import Instance +from paramiko import Ed25519Key +from pydantic import ByteSize +from sshtunnel import SSHTunnelForwarder + +from .constants import DYN_SERVICES_NAMING_CONVENTION +from .ec2 import get_bastion_instance_from_remote_instance +from .models import AppState, DockerContainer, DynamicService + +_DEFAULT_SSH_PORT: Final[int] = 22 +_LOCAL_BIND_ADDRESS: Final[str] = "127.0.0.1" + +_logger = logging.getLogger(__name__) + + +@contextlib.contextmanager +def ssh_tunnel( + *, + ssh_host: str, + username: str, + private_key_path: Path, + remote_bind_host: str, + remote_bind_port: int, +) -> Generator[SSHTunnelForwarder | None, Any, None]: + try: + with SSHTunnelForwarder( + (ssh_host, _DEFAULT_SSH_PORT), + ssh_username=username, + ssh_pkey=Ed25519Key(filename=private_key_path), + remote_bind_address=(remote_bind_host, remote_bind_port), + local_bind_address=(_LOCAL_BIND_ADDRESS, 0), + set_keepalive=10, + ) as tunnel: + yield tunnel + except Exception: + _logger.exception("Unexpected issue with ssh tunnel") + raise + finally: + pass + + +@contextlib.contextmanager +def _ssh_client( + hostname: str, port: int, *, username: str, private_key_path: Path +) -> Generator[paramiko.SSHClient, Any, None]: + try: + with paramiko.SSHClient() as client: + client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + client.connect( + hostname, + port, + username=username, + key_filename=f"{private_key_path}", + timeout=5, + ) + yield client + except Exception: + _logger.exception("Unexpected issue with ssh client") + raise + finally: + pass + + +@contextlib.asynccontextmanager +async def ssh_instance( + instance: Instance, *, state: AppState, username: str, private_key_path: Path +) -> AsyncGenerator[paramiko.SSHClient, Any]: + """ssh in instance with/without tunnel as needed""" + assert state.ssh_key_path # nosec + try: + async with contextlib.AsyncExitStack() as stack: + if instance.public_ip_address: + hostname = instance.public_ip_address + port = _DEFAULT_SSH_PORT + else: + assert state.environment + bastion_instance = await get_bastion_instance_from_remote_instance( + state, instance + ) + tunnel = stack.enter_context( + ssh_tunnel( + ssh_host=bastion_instance.public_dns_name, + username=username, + private_key_path=state.ssh_key_path, + remote_bind_host=instance.private_ip_address, + remote_bind_port=_DEFAULT_SSH_PORT, + ) + ) + assert tunnel # nosec + hostname, port = tunnel.local_bind_address + ssh_client = stack.enter_context( + _ssh_client( + hostname, + port, + username=username, + private_key_path=private_key_path, + ) + ) + yield ssh_client + + finally: + pass + + +async def get_available_disk_space( + state: AppState, instance: Instance, username: str, private_key_path: Path +) -> ByteSize: + assert state.ssh_key_path + + try: + async with ssh_instance( + instance, state=state, username=username, private_key_path=private_key_path + ) as ssh_client: + # Command to get disk space for /docker partition + disk_space_command = "df --block-size=1 /mnt/docker | awk 'NR==2{print $4}'" + + # Run the command on the remote machine + _, stdout, stderr = ssh_client.exec_command(disk_space_command) + exit_status = stdout.channel.recv_exit_status() + error = stderr.read().decode() + + if exit_status != 0: + rich.print(error) + raise typer.Abort(error) + + # Available disk space will be captured here + available_space = stdout.read().decode("utf-8").strip() + return ByteSize(available_space) + except ( + paramiko.AuthenticationException, + paramiko.SSHException, + TimeoutError, + ): + return ByteSize(0) + + +async def get_dask_ip( + state: AppState, instance: Instance, username: str, private_key_path: Path +) -> str: + + try: + async with ssh_instance( + instance, state=state, username=username, private_key_path=private_key_path + ) as ssh_client: + dask_ip_command = "docker inspect -f '{{.NetworkSettings.Networks.dask_stack_cluster.IPAddress}}' $(docker ps --filter 'name=dask-sidecar|dask-scheduler' --format '{{.ID}}')" + + # Run the command on the remote machine + _, stdout, stderr = ssh_client.exec_command(dask_ip_command) + exit_status = stdout.channel.recv_exit_status() + if exit_status != 0: + error_message = stderr.read().decode().strip() + _logger.error( + "Command failed with exit status %s: %s", exit_status, error_message + ) + return "Not Found / Drained / Not Ready" + + # Available disk space will be captured here + return stdout.read().decode("utf-8").strip() + except ( + paramiko.AuthenticationException, + paramiko.SSHException, + TimeoutError, + ): + return "Not Ready" + + +async def list_running_dyn_services( + state: AppState, instance: Instance, username: str, private_key_path: Path +) -> list[DynamicService]: + try: + async with ssh_instance( + instance, state=state, username=username, private_key_path=private_key_path + ) as ssh_client: + # Run the Docker command to list containers + _stdin, stdout, stderr = ssh_client.exec_command( + 'docker ps --format=\'{{.Names}}\t{{.CreatedAt}}\t{{.Label "io.simcore.runtime.user-id"}}\t{{.Label "io.simcore.runtime.project-id"}}\t{{.Label "io.simcore.name"}}\t{{.Label "io.simcore.version"}}\' --filter=name=dy-', + ) + exit_status = stdout.channel.recv_exit_status() + error = stderr.read().decode() + if exit_status != 0: + rich.print(error) + raise typer.Abort(error) + + output = stdout.read().decode("utf-8") + # Extract containers that follow the naming convention + running_service: dict[str, list[DockerContainer]] = defaultdict(list) + for container in output.splitlines(): + if match := re.match(DYN_SERVICES_NAMING_CONVENTION, container): + named_container = DockerContainer( + match["node_id"], + int(match["user_id"]), + match["project_id"], + arrow.get( + match["created_at"], + "YYYY-MM-DD HH:mm:ss", + tzinfo=datetime.timezone.utc, + ).datetime, + container, + ( + json.loads(match["service_name"])["name"] + if match["service_name"] + else "" + ), + ( + json.loads(match["service_version"])["version"] + if match["service_version"] + else "" + ), + ) + running_service[match["node_id"]].append(named_container) + + def _needs_manual_intervention( + running_containers: list[DockerContainer], + ) -> bool: + valid_prefixes = ["dy-sidecar_", "dy-proxy_", "dy-sidecar-"] + for prefix in valid_prefixes: + found = any( + container.name.startswith(prefix) + for container in running_containers + ) + if not found: + return True + return False + + return [ + DynamicService( + node_id=node_id, + user_id=containers[0].user_id, + project_id=containers[0].project_id, + created_at=containers[0].created_at, + needs_manual_intervention=_needs_manual_intervention(containers), + containers=[c.name for c in containers], + service_name=containers[0].service_name, + service_version=containers[0].service_version, + ) + for node_id, containers in running_service.items() + ] + except ( + paramiko.AuthenticationException, + paramiko.SSHException, + TimeoutError, + ): + return [] diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/utils.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/utils.py new file mode 100644 index 00000000000..29c2225db80 --- /dev/null +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/utils.py @@ -0,0 +1,64 @@ +import asyncio +import datetime +import functools +from typing import Awaitable, Callable, ParamSpec, TypeVar + +import arrow +from mypy_boto3_ec2.service_resource import Instance + +from .constants import DANGER, HOUR + + +def timedelta_formatting( + time_diff: datetime.timedelta, *, color_code: bool = False +) -> str: + formatted_time_diff = f"{time_diff.days} day(s), " if time_diff.days > 0 else "" + formatted_time_diff += f"{time_diff.seconds // 3600:02}:{(time_diff.seconds // 60) % 60:02}:{time_diff.seconds % 60:02}" + if time_diff.days and color_code: + formatted_time_diff = f"[red]{formatted_time_diff}[/red]" + elif (time_diff.seconds > 5 * HOUR) and color_code: + formatted_time_diff = f"[orange]{formatted_time_diff}[/orange]" + return formatted_time_diff + + +def get_instance_name(instance: Instance) -> str: + for tag in instance.tags: + assert "Key" in tag # nosec + if tag["Key"] == "Name": + return tag.get("Value", "unknown") + return "unknown" + + +def get_last_heartbeat(instance: Instance) -> datetime.datetime | None: + for tag in instance.tags: + assert "Key" in tag # nosec + if tag["Key"] == "last_heartbeat": + assert "Value" in tag # nosec + return arrow.get(tag["Value"]).datetime + return None + + +def color_encode_with_state(string: str, ec2_instance: Instance) -> str: + return ( + f"[green]{string}[/green]" + if ec2_instance.state["Name"] == "running" + else f"[yellow]{string}[/yellow]" + ) + + +def color_encode_with_threshold(string: str, value, threshold) -> str: + return string if value > threshold else DANGER.format(string) + + +P = ParamSpec("P") +R = TypeVar("R") + + +def to_async(func: Callable[P, R]) -> Callable[P, Awaitable[R]]: + @functools.wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> Awaitable[R]: + loop = asyncio.get_running_loop() + partial_func = functools.partial(func, *args, **kwargs) + return loop.run_in_executor(None, partial_func) + + return wrapper diff --git a/scripts/maintenance/computational-clusters/osparc_clusters.py b/scripts/maintenance/computational-clusters/osparc_clusters.py deleted file mode 100755 index 2ca58bc6ec6..00000000000 --- a/scripts/maintenance/computational-clusters/osparc_clusters.py +++ /dev/null @@ -1,1345 +0,0 @@ -#! /usr/bin/env python3 - -import asyncio -import contextlib -import datetime -import json -import re -import uuid -from collections import defaultdict, namedtuple -from collections.abc import AsyncGenerator -from dataclasses import dataclass, field, replace -from enum import Enum -from pathlib import Path -from typing import Annotated, Any, Final, TypeAlias - -import arrow -import boto3 -import distributed -import paramiko -import parse -import sqlalchemy as sa -import typer -from dotenv import dotenv_values -from mypy_boto3_ec2 import EC2ServiceResource -from mypy_boto3_ec2.service_resource import Instance, ServiceResourceInstancesCollection -from mypy_boto3_ec2.type_defs import FilterTypeDef, TagTypeDef -from pydantic import ( - BaseModel, - ByteSize, - PostgresDsn, - TypeAdapter, - ValidationError, - field_validator, -) -from rich import print # pylint: disable=redefined-builtin -from rich.progress import track -from rich.style import Style -from rich.table import Column, Table -from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine - -app = typer.Typer() - - -@dataclass(slots=True, kw_only=True) -class AutoscaledInstance: - name: str - ec2_instance: Instance - disk_space: ByteSize - - -class InstanceRole(str, Enum): - manager = "manager" - worker = "worker" - - -@dataclass(slots=True, kw_only=True) -class ComputationalInstance(AutoscaledInstance): - role: InstanceRole - user_id: int - wallet_id: int - last_heartbeat: datetime.datetime | None - dask_ip: str - - -@dataclass -class DynamicService: - node_id: str - user_id: int - project_id: str - service_name: str - service_version: str - created_at: datetime.datetime - needs_manual_intervention: bool - containers: list[str] - - -@dataclass(slots=True, kw_only=True) -class DynamicInstance(AutoscaledInstance): - running_services: list[DynamicService] - - -TaskId: TypeAlias = str -TaskState: TypeAlias = str - - -@dataclass(slots=True, kw_only=True, frozen=True) -class ComputationalTask: - project_id: uuid.UUID - node_id: uuid.UUID - job_id: TaskId | None - service_name: str - service_version: str - state: str - - -@dataclass(slots=True, kw_only=True, frozen=True) -class DaskTask: - job_id: TaskId - state: TaskState - - -@dataclass(frozen=True, slots=True, kw_only=True) -class ComputationalCluster: - primary: ComputationalInstance - workers: list[ComputationalInstance] - - scheduler_info: dict[str, Any] - datasets: tuple[str, ...] - processing_jobs: dict[str, str] - task_states_to_tasks: dict[str, list[TaskState]] - - -MINUTE: Final[int] = 60 -HOUR: Final[int] = 60 * MINUTE - -DEFAULT_COMPUTATIONAL_EC2_FORMAT: Final[ - str -] = r"osparc-computational-cluster-{role}-{swarm_stack_name}-user_id:{user_id:d}-wallet_id:{wallet_id:d}" -DEFAULT_DYNAMIC_EC2_FORMAT: Final[str] = r"osparc-dynamic-autoscaled-worker-{key_name}" - -DEPLOY_SSH_KEY_PARSER: Final[parse.Parser] = parse.compile(r"osparc-{random_name}.pem") -SSH_USER_NAME: Final[str] = "ubuntu" -UNDEFINED_BYTESIZE: Final[ByteSize] = ByteSize(-1) -TASK_CANCEL_EVENT_NAME_TEMPLATE: Final[str] = "cancel_event_{}" - -# NOTE: service_name and service_version are not available on dynamic-sidecar/dynamic-proxies! -DYN_SERVICES_NAMING_CONVENTION: Final[re.Pattern] = re.compile( - r"^dy-(proxy|sidecar)(-|_)(?P.{8}-.{4}-.{4}-.{4}-.{12}).*\t(?P[^\t]+)\t(?P\d+)\t(?P.{8}-.{4}-.{4}-.{4}-.{12})\t(?P[^\t]*)\t(?P.*)$" -) - -DockerContainer = namedtuple( # noqa: PYI024 - "docker_container", - [ - "node_id", - "user_id", - "project_id", - "created_at", - "name", - "service_name", - "service_version", - ], -) - - -@dataclass(kw_only=True) -class AppState: - environment: dict[str, str | None] = field(default_factory=dict) - ec2_resource_autoscaling: EC2ServiceResource | None = None - ec2_resource_clusters_keeper: EC2ServiceResource | None = None - dynamic_parser: parse.Parser - computational_parser: parse.Parser - deploy_config: Path | None = None - ssh_key_path: Path | None = None - - -state: AppState = AppState( - dynamic_parser=parse.compile(DEFAULT_DYNAMIC_EC2_FORMAT), - computational_parser=parse.compile(DEFAULT_COMPUTATIONAL_EC2_FORMAT), -) - - -class PostgresDB(BaseModel): - dsn: PostgresDsn - - @classmethod - @field_validator("db") - def check_db_name(cls, v): - assert v.path and len(v.path) > 1, "database must be provided" # noqa: PT018 - return v - - -@contextlib.asynccontextmanager -async def db_engine() -> AsyncGenerator[AsyncEngine, Any]: - engine = None - try: - for env in [ - "POSTGRES_USER", - "POSTGRES_PASSWORD", - "POSTGRES_ENDPOINT", - "POSTGRES_DB", - ]: - assert state.environment[env] - postgres_db = PostgresDB( - dsn=f"postgresql+asyncpg://{state.environment['POSTGRES_USER']}:{state.environment['POSTGRES_PASSWORD']}@{state.environment['POSTGRES_ENDPOINT']}/{state.environment['POSTGRES_DB']}" - ) - - engine = create_async_engine( - f"{postgres_db.dsn}", - connect_args={ - "server_settings": { - "application_name": "osparc-clusters-monitoring-script" - } - }, - ) - yield engine - finally: - if engine: - await engine.dispose() - - -def _get_instance_name(instance) -> str: - for tag in instance.tags: - if tag["Key"] == "Name": - return tag.get("Value", "unknown") - return "unknown" - - -def _get_last_heartbeat(instance) -> datetime.datetime | None: - for tag in instance.tags: - if tag["Key"] == "last_heartbeat": - return arrow.get(tag["Value"]).datetime - return None - - -def _timedelta_formatting( - time_diff: datetime.timedelta, *, color_code: bool = False -) -> str: - formatted_time_diff = f"{time_diff.days} day(s), " if time_diff.days > 0 else "" - formatted_time_diff += f"{time_diff.seconds // 3600:02}:{(time_diff.seconds // 60) % 60:02}:{time_diff.seconds % 60:02}" - if time_diff.days and color_code: - formatted_time_diff = f"[red]{formatted_time_diff}[/red]" - elif (time_diff.seconds > 5 * HOUR) and color_code: - formatted_time_diff = f"[orange]{formatted_time_diff}[/orange]" - return formatted_time_diff - - -def _parse_computational(instance: Instance) -> ComputationalInstance | None: - name = _get_instance_name(instance) - if result := state.computational_parser.search(name): - assert isinstance(result, parse.Result) - last_heartbeat = _get_last_heartbeat(instance) - return ComputationalInstance( - role=InstanceRole(result["role"]), - user_id=result["user_id"], - wallet_id=result["wallet_id"], - name=name, - last_heartbeat=last_heartbeat, - ec2_instance=instance, - disk_space=UNDEFINED_BYTESIZE, - dask_ip="unknown", - ) - - return None - - -def _create_graylog_permalinks( - environment: dict[str, str | None], instance: Instance -) -> str: - # https://monitoring.sim4life.io/graylog/search/6552235211aee4262e7f9f21?q=source%3A%22ip-10-0-1-67%22&rangetype=relative&from=28800 - source_name = instance.private_ip_address.replace(".", "-") - time_span = int( - ( - arrow.utcnow().datetime - instance.launch_time + datetime.timedelta(hours=1) - ).total_seconds() - ) - return f"https://monitoring.{environment['MACHINE_FQDN']}/graylog/search?q=source%3A%22ip-{source_name}%22&rangetype=relative&from={time_span}" - - -def _parse_dynamic(instance: Instance) -> DynamicInstance | None: - name = _get_instance_name(instance) - if result := state.dynamic_parser.search(name): - assert isinstance(result, parse.Result) - - return DynamicInstance( - name=name, - ec2_instance=instance, - running_services=[], - disk_space=UNDEFINED_BYTESIZE, - ) - return None - - -def _ssh_and_get_dask_ip( - instance: Instance, username: str, private_key_path: Path -) -> str: - # Establish SSH connection with key-based authentication - with paramiko.SSHClient() as client: - client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - try: - client.connect( - instance.public_ip_address, - username=username, - key_filename=f"{private_key_path}", - timeout=5, - ) - # Command to get disk space for /docker partition - dask_ip_command = "docker inspect -f '{{.NetworkSettings.Networks.dask_stack_default.IPAddress}}' $(docker ps --filter 'name=dask-sidecar|dask-scheduler' --format '{{.ID}}')" - - # Run the command on the remote machine - _, stdout, _ = client.exec_command(dask_ip_command) - exit_status = stdout.channel.recv_exit_status() - - if exit_status != 0: - return "Not Found / Drained / Not Ready" - - # Available disk space will be captured here - return stdout.read().decode("utf-8").strip() - except ( - paramiko.AuthenticationException, - paramiko.SSHException, - TimeoutError, - ): - return "Not Ready" - - -def _ssh_and_get_available_disk_space( - instance: Instance, username: str, private_key_path: Path -) -> ByteSize: - # Establish SSH connection with key-based authentication - with paramiko.SSHClient() as client: - client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - try: - client.connect( - instance.public_ip_address, - username=username, - key_filename=f"{private_key_path}", - timeout=5, - ) - # Command to get disk space for /docker partition - disk_space_command = "df --block-size=1 /mnt/docker | awk 'NR==2{print $4}'" - - # Run the command on the remote machine - _, stdout, stderr = client.exec_command(disk_space_command) - exit_status = stdout.channel.recv_exit_status() - error = stderr.read().decode() - - if exit_status != 0: - print(error) - raise typer.Abort(error) - - # Available disk space will be captured here - available_space = stdout.read().decode("utf-8").strip() - return ByteSize(available_space) - except ( - paramiko.AuthenticationException, - paramiko.SSHException, - TimeoutError, - ): - return ByteSize(0) - - -def _ssh_and_list_running_dyn_services( - instance: Instance, username: str, private_key_path: Path -) -> list[DynamicService]: - # Establish SSH connection with key-based authentication - with paramiko.SSHClient() as client: - client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - try: - client.connect( - instance.public_ip_address, - username=username, - key_filename=f"{private_key_path}", - timeout=5, - ) - # Run the Docker command to list containers - _stdin, stdout, stderr = client.exec_command( - 'docker ps --format=\'{{.Names}}\t{{.CreatedAt}}\t{{.Label "io.simcore.runtime.user-id"}}\t{{.Label "io.simcore.runtime.project-id"}}\t{{.Label "io.simcore.name"}}\t{{.Label "io.simcore.version"}}\' --filter=name=dy-', - ) - exit_status = stdout.channel.recv_exit_status() - error = stderr.read().decode() - if exit_status != 0: - print(error) - raise typer.Abort(error) - - output = stdout.read().decode("utf-8") - # Extract containers that follow the naming convention - running_service: dict[str, list[DockerContainer]] = defaultdict(list) - for container in output.splitlines(): - if match := re.match(DYN_SERVICES_NAMING_CONVENTION, container): - named_container = DockerContainer( - match["node_id"], - int(match["user_id"]), - match["project_id"], - arrow.get( - match["created_at"], - "YYYY-MM-DD HH:mm:ss", - tzinfo=datetime.timezone.utc, - ).datetime, - container, - ( - json.loads(match["service_name"])["name"] - if match["service_name"] - else "" - ), - ( - json.loads(match["service_version"])["version"] - if match["service_version"] - else "" - ), - ) - running_service[match["node_id"]].append(named_container) - - def _needs_manual_intervention( - running_containers: list[DockerContainer], - ) -> bool: - valid_prefixes = ["dy-sidecar_", "dy-proxy_", "dy-sidecar-"] - for prefix in valid_prefixes: - found = any( - container.name.startswith(prefix) - for container in running_containers - ) - if not found: - return True - return False - - return [ - DynamicService( - node_id=node_id, - user_id=containers[0].user_id, - project_id=containers[0].project_id, - created_at=containers[0].created_at, - needs_manual_intervention=_needs_manual_intervention(containers), - containers=[c.name for c in containers], - service_name=containers[0].service_name, - service_version=containers[0].service_version, - ) - for node_id, containers in running_service.items() - ] - except ( - paramiko.AuthenticationException, - paramiko.SSHException, - TimeoutError, - ): - return [] - - -def _print_dynamic_instances( - instances: list[DynamicInstance], - environment: dict[str, str | None], - aws_region: str, -) -> None: - time_now = arrow.utcnow() - table = Table( - Column("Instance"), - Column( - "Running services", - footer="[red]Intervention detection might show false positive if in transient state, be careful and always double-check!![/red]", - ), - title=f"dynamic autoscaled instances: {aws_region}", - show_footer=True, - padding=(0, 0), - title_style=Style(color="red", encircle=True), - ) - for instance in track( - instances, description="Preparing dynamic autoscaled instances details..." - ): - service_table = "[i]n/a[/i]" - if instance.running_services: - service_table = Table( - "UserID", - "ProjectID", - "NodeID", - "ServiceName", - "ServiceVersion", - "Created Since", - "Need intervention", - expand=True, - padding=(0, 0), - ) - for service in instance.running_services: - service_table.add_row( - f"{service.user_id}", - service.project_id, - service.node_id, - service.service_name, - service.service_version, - _timedelta_formatting( - time_now - service.created_at, color_code=True - ), - f"{'[red]' if service.needs_manual_intervention else ''}{service.needs_manual_intervention}{'[/red]' if service.needs_manual_intervention else ''}", - ) - - table.add_row( - "\n".join( - [ - f"{_color_encode_with_state(instance.name, instance.ec2_instance)}", - f"ID: {instance.ec2_instance.instance_id}", - f"AMI: {instance.ec2_instance.image_id}", - f"AMI name: {instance.ec2_instance.image.name}", - f"Type: {instance.ec2_instance.instance_type}", - f"Up: {_timedelta_formatting(time_now - instance.ec2_instance.launch_time, color_code=True)}", - f"ExtIP: {instance.ec2_instance.public_ip_address}", - f"IntIP: {instance.ec2_instance.private_ip_address}", - f"/mnt/docker(free): {_color_encode_with_threshold(instance.disk_space.human_readable(), instance.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", - ] - ), - service_table, - ) - table.add_row( - "Graylog: ", - f"{_create_graylog_permalinks(environment, instance.ec2_instance)}", - end_section=True, - ) - print(table, flush=True) - - -def _get_worker_metrics(scheduler_info: dict[str, Any]) -> dict[str, Any]: - worker_metrics = {} - for worker_name, worker_data in scheduler_info.get("workers", {}).items(): - worker_metrics[worker_name] = { - "resources": worker_data["resources"], - "tasks": worker_data["metrics"].get("task_counts", {}), - } - return worker_metrics - - -def _color_encode_with_state(string: str, ec2_instance: Instance) -> str: - return ( - f"[green]{string}[/green]" - if ec2_instance.state["Name"] == "running" - else f"[yellow]{string}[/yellow]" - ) - - -DANGER = "[red]{}[/red]" - - -def _color_encode_with_threshold(string: str, value, threshold) -> str: - return string if value > threshold else DANGER.format(string) - - -def _print_computational_clusters( - clusters: list[ComputationalCluster], - environment: dict[str, str | None], - aws_region: str, -) -> None: - time_now = arrow.utcnow() - table = Table( - Column("Instance", justify="left", overflow="ellipsis", ratio=1), - Column("Computational details", overflow="fold", ratio=2), - title=f"computational clusters: {aws_region}", - padding=(0, 0), - title_style=Style(color="red", encircle=True), - expand=True, - ) - - for cluster in track( - clusters, "Collecting information about computational clusters..." - ): - cluster_worker_metrics = _get_worker_metrics(cluster.scheduler_info) - # first print primary machine info - table.add_row( - "\n".join( - [ - f"[bold]{_color_encode_with_state('Primary', cluster.primary.ec2_instance)}", - f"Name: {cluster.primary.name}", - f"ID: {cluster.primary.ec2_instance.id}", - f"AMI: {cluster.primary.ec2_instance.image_id}", - f"AMI name: {cluster.primary.ec2_instance.image.name}", - f"Type: {cluster.primary.ec2_instance.instance_type}", - f"Up: {_timedelta_formatting(time_now - cluster.primary.ec2_instance.launch_time, color_code=True)}", - f"ExtIP: {cluster.primary.ec2_instance.public_ip_address}", - f"IntIP: {cluster.primary.ec2_instance.private_ip_address}", - f"DaskSchedulerIP: {cluster.primary.dask_ip}", - f"UserID: {cluster.primary.user_id}", - f"WalletID: {cluster.primary.wallet_id}", - f"Heartbeat: {_timedelta_formatting(time_now - cluster.primary.last_heartbeat) if cluster.primary.last_heartbeat else 'n/a'}", - f"/mnt/docker(free): {_color_encode_with_threshold(cluster.primary.disk_space.human_readable(), cluster.primary.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", - ] - ), - "\n".join( - [ - f"Dask Scheduler UI: http://{cluster.primary.ec2_instance.public_ip_address}:8787", - f"Dask Scheduler TCP: tcp://{cluster.primary.ec2_instance.public_ip_address}:8786", - f"Graylog UI: {_create_graylog_permalinks(environment, cluster.primary.ec2_instance)}", - f"Prometheus UI: http://{cluster.primary.ec2_instance.public_ip_address}:9090", - f"tasks: {json.dumps(cluster.task_states_to_tasks, indent=2)}", - ] - ), - ) - - # now add the workers - for index, worker in enumerate(cluster.workers): - worker_dask_metrics = next( - ( - worker_metrics - for worker_name, worker_metrics in cluster_worker_metrics.items() - if worker.dask_ip in worker_name - ), - "no metrics???", - ) - worker_processing_jobs = [ - job_id - for worker_name, job_id in cluster.processing_jobs.items() - if worker.dask_ip in worker_name - ] - table.add_row() - table.add_row( - "\n".join( - [ - f"[italic]{_color_encode_with_state(f'Worker {index+1}', worker.ec2_instance)}[/italic]", - f"Name: {worker.name}", - f"ID: {worker.ec2_instance.id}", - f"AMI: {worker.ec2_instance.image_id}", - f"AMI name: {worker.ec2_instance.image.name}", - f"Type: {worker.ec2_instance.instance_type}", - f"Up: {_timedelta_formatting(time_now - worker.ec2_instance.launch_time, color_code=True)}", - f"ExtIP: {worker.ec2_instance.public_ip_address}", - f"IntIP: {worker.ec2_instance.private_ip_address}", - f"DaskWorkerIP: {worker.dask_ip}", - f"/mnt/docker(free): {_color_encode_with_threshold(worker.disk_space.human_readable(), worker.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", - "", - ] - ), - "\n".join( - [ - f"Graylog: {_create_graylog_permalinks(environment, worker.ec2_instance)}", - f"Dask metrics: {json.dumps(worker_dask_metrics, indent=2)}", - f"Running tasks: {worker_processing_jobs}", - ] - ), - ) - table.add_row(end_section=True) - print(table) - - -async def _fetch_instance_details( - instance: DynamicInstance, ssh_key_path: Path -) -> tuple[list[DynamicService] | BaseException, ByteSize | BaseException]: - # Run both SSH operations concurrently for this instance - running_services, disk_space = await asyncio.gather( - asyncio.get_event_loop().run_in_executor( - None, - _ssh_and_list_running_dyn_services, - instance.ec2_instance, - SSH_USER_NAME, - ssh_key_path, - ), - asyncio.get_event_loop().run_in_executor( - None, - _ssh_and_get_available_disk_space, - instance.ec2_instance, - SSH_USER_NAME, - ssh_key_path, - ), - return_exceptions=True, - ) - return running_services, disk_space - - -async def _analyze_dynamic_instances_running_services_concurrently( - dynamic_instances: list[DynamicInstance], - ssh_key_path: Path, - user_id: int | None, -) -> list[DynamicInstance]: - details = await asyncio.gather( - *( - _fetch_instance_details(instance, ssh_key_path) - for instance in dynamic_instances - ), - return_exceptions=True, - ) - - # Filter and update instances based on results and given criteria - return [ - replace( - instance, - running_services=instance_details[0], - disk_space=instance_details[1], - ) - for instance, instance_details in zip(dynamic_instances, details, strict=True) - if isinstance(instance_details, tuple) - and isinstance(instance_details[0], list) - and isinstance(instance_details[1], ByteSize) - and (user_id is None or any(s.user_id == user_id for s in instance_details[0])) - ] - - -async def _dask_list_tasks( - dask_client: distributed.Client, -) -> dict[TaskState, list[TaskId]]: - def _list_tasks( - dask_scheduler: distributed.Scheduler, - ) -> dict[TaskId, TaskState]: - # NOTE: this is ok and needed: this runs on the dask scheduler, so don't remove this import - - task_state_to_tasks = {} - for task in dask_scheduler.tasks.values(): - if task.state in task_state_to_tasks: - task_state_to_tasks[task.state].append(task.key) - else: - task_state_to_tasks[task.state] = [task.key] - - return dict(task_state_to_tasks) - - try: - list_of_tasks: dict[ - TaskState, list[TaskId] - ] = await dask_client.run_on_scheduler( - _list_tasks - ) # type: ignore - except TypeError: - print(f"ERROR while recoverring unrunnable tasks using {dask_client=}") - return list_of_tasks - - -@contextlib.asynccontextmanager -async def _dask_client(ip_address: str) -> AsyncGenerator[distributed.Client, None]: - security = distributed.Security() - assert state.deploy_config - dask_certificates = state.deploy_config / "assets" / "dask-certificates" - if dask_certificates.exists(): - security = distributed.Security( - tls_ca_file=f"{dask_certificates / 'dask-cert.pem'}", - tls_client_cert=f"{dask_certificates / 'dask-cert.pem'}", - tls_client_key=f"{dask_certificates / 'dask-key.pem'}", - require_encryption=True, - ) - try: - - async with distributed.Client( - f"tls://{ip_address}:8786", - security=security, - timeout="5", - asynchronous=True, - ) as client: - yield client - finally: - ... - - -async def _analyze_computational_instances( - computational_instances: list[ComputationalInstance], - ssh_key_path: Path | None, -) -> list[ComputationalCluster]: - - all_disk_spaces = [UNDEFINED_BYTESIZE] * len(computational_instances) - if ssh_key_path is not None: - all_disk_spaces = await asyncio.gather( - *( - asyncio.get_event_loop().run_in_executor( - None, - _ssh_and_get_available_disk_space, - instance.ec2_instance, - SSH_USER_NAME, - ssh_key_path, - ) - for instance in computational_instances - ), - return_exceptions=True, - ) - - all_dask_ips = await asyncio.gather( - *( - asyncio.get_event_loop().run_in_executor( - None, - _ssh_and_get_dask_ip, - instance.ec2_instance, - SSH_USER_NAME, - ssh_key_path, - ) - for instance in computational_instances - ), - return_exceptions=True, - ) - - computational_clusters = [] - for instance, disk_space, dask_ip in track( - zip(computational_instances, all_disk_spaces, all_dask_ips, strict=True), - description="Collecting computational clusters data...", - ): - if isinstance(disk_space, ByteSize): - instance.disk_space = disk_space - if isinstance(dask_ip, str): - instance.dask_ip = dask_ip - if instance.role is InstanceRole.manager: - scheduler_info = {} - datasets_on_cluster = () - processing_jobs = {} - all_tasks = {} - with contextlib.suppress(TimeoutError, OSError): - async with _dask_client( - instance.ec2_instance.public_ip_address - ) as client: - scheduler_info = client.scheduler_info() - datasets_on_cluster = await client.list_datasets() - processing_jobs = await client.processing() - all_tasks = await _dask_list_tasks(client) - - assert isinstance(datasets_on_cluster, tuple) - assert isinstance(processing_jobs, dict) - - computational_clusters.append( - ComputationalCluster( - primary=instance, - workers=[], - scheduler_info=scheduler_info, - datasets=datasets_on_cluster, - processing_jobs=processing_jobs, - task_states_to_tasks=all_tasks, - ) - ) - - for instance in computational_instances: - if instance.role is InstanceRole.worker: - # assign the worker to correct cluster - for cluster in computational_clusters: - if ( - cluster.primary.user_id == instance.user_id - and cluster.primary.wallet_id == instance.wallet_id - ): - cluster.workers.append(instance) - - return computational_clusters - - -async def _parse_computational_clusters( - instances: ServiceResourceInstancesCollection, - ssh_key_path: Path | None, - user_id: int | None, - wallet_id: int | None, -) -> list[ComputationalCluster]: - computational_instances = [ - comp_instance - for instance in track( - instances, description="Parsing computational instances..." - ) - if ( - comp_instance := await asyncio.get_event_loop().run_in_executor( - None, _parse_computational, instance - ) - ) - and (user_id is None or comp_instance.user_id == user_id) - and (wallet_id is None or comp_instance.wallet_id == wallet_id) - ] - return await _analyze_computational_instances(computational_instances, ssh_key_path) - - -async def _parse_dynamic_instances( - instances: ServiceResourceInstancesCollection, - ssh_key_path: Path | None, - user_id: int | None, - wallet_id: int | None, -) -> list[DynamicInstance]: - dynamic_instances = [ - dyn_instance - for instance in track(instances, description="Parsing dynamic instances...") - if (dyn_instance := _parse_dynamic(instance)) - ] - if dynamic_instances and ssh_key_path: - dynamic_instances = ( - await _analyze_dynamic_instances_running_services_concurrently( - dynamic_instances, ssh_key_path, user_id - ) - ) - return dynamic_instances - - -def _list_running_ec2_instances( - ec2_resource: EC2ServiceResource, - key_name: str, - custom_tags: dict[str, str], - user_id: int | None, - wallet_id: int | None, -) -> ServiceResourceInstancesCollection: - # get all the running instances - - ec2_filters: list[FilterTypeDef] = [ - {"Name": "instance-state-name", "Values": ["running", "pending"]}, - {"Name": "key-name", "Values": [key_name]}, - ] - if custom_tags: - ec2_filters.extend( - [ - {"Name": f"tag:{key}", "Values": [f"{value}"]} - for key, value in custom_tags.items() - ] - ) - - if user_id: - ec2_filters.append({"Name": "tag:user_id", "Values": [f"{user_id}"]}) - if wallet_id: - ec2_filters.append({"Name": "tag:wallet_id", "Values": [f"{wallet_id}"]}) - - return ec2_resource.instances.filter(Filters=ec2_filters) - - -async def _list_dynamic_instances_from_ec2( - user_id: int | None, - wallet_id: int | None, -) -> ServiceResourceInstancesCollection: - assert state.environment["EC2_INSTANCES_KEY_NAME"] - custom_tags = {} - if state.environment["EC2_INSTANCES_CUSTOM_TAGS"]: - custom_tags = json.loads(state.environment["EC2_INSTANCES_CUSTOM_TAGS"]) - assert state.ec2_resource_autoscaling - return await asyncio.get_event_loop().run_in_executor( - None, - _list_running_ec2_instances, - state.ec2_resource_autoscaling, - state.environment["EC2_INSTANCES_KEY_NAME"], - custom_tags, - user_id, - wallet_id, - ) - - -async def _list_computational_instances_from_ec2( - user_id: int | None, - wallet_id: int | None, -) -> ServiceResourceInstancesCollection: - assert state.environment["PRIMARY_EC2_INSTANCES_KEY_NAME"] - assert state.environment["WORKERS_EC2_INSTANCES_KEY_NAME"] - assert ( - state.environment["PRIMARY_EC2_INSTANCES_KEY_NAME"] - == state.environment["WORKERS_EC2_INSTANCES_KEY_NAME"] - ), "key name is different on primary and workers. TIP: adjust this code now" - custom_tags = {} - if state.environment["PRIMARY_EC2_INSTANCES_CUSTOM_TAGS"]: - assert ( - state.environment["PRIMARY_EC2_INSTANCES_CUSTOM_TAGS"] - == state.environment["WORKERS_EC2_INSTANCES_CUSTOM_TAGS"] - ), "custom tags are different on primary and workers. TIP: adjust this code now" - custom_tags = json.loads(state.environment["PRIMARY_EC2_INSTANCES_CUSTOM_TAGS"]) - assert state.ec2_resource_clusters_keeper - return await asyncio.get_event_loop().run_in_executor( - None, - _list_running_ec2_instances, - state.ec2_resource_clusters_keeper, - state.environment["PRIMARY_EC2_INSTANCES_KEY_NAME"], - custom_tags, - user_id, - wallet_id, - ) - - -def _parse_environment(deploy_config: Path) -> dict[str, str | None]: - repo_config = deploy_config / "repo.config" - assert repo_config.exists() - environment = dotenv_values(repo_config) - if environment["AUTOSCALING_EC2_ACCESS_KEY_ID"] == "": - print( - "Terraform variables detected, looking for repo.config.frozen as alternative." - " TIP: you are responsible for them being up to date!!" - ) - repo_config = deploy_config / "repo.config.frozen" - assert repo_config.exists() - environment = dotenv_values(repo_config) - - if environment["AUTOSCALING_EC2_ACCESS_KEY_ID"] == "": - error_msg = ( - "Terraform is necessary in order to check into that deployment!\n" - f"install terraform (check README.md in {state.deploy_config} for instructions)" - "then run make repo.config.frozen, then re-run this code" - ) - print(error_msg) - raise typer.Abort(error_msg) - assert environment - return environment - - -@app.callback() -def main( - deploy_config: Annotated[ - Path, typer.Option(help="path to the deploy configuration") - ] -): - """Manages external clusters""" - - state.deploy_config = deploy_config.expanduser() - assert ( - deploy_config.is_dir() - ), "deploy-config argument is not pointing to a directory!" - - state.environment = _parse_environment(deploy_config) - - # connect to ec2s - state.ec2_resource_autoscaling = boto3.resource( - "ec2", - region_name=state.environment["AUTOSCALING_EC2_REGION_NAME"], - aws_access_key_id=state.environment["AUTOSCALING_EC2_ACCESS_KEY_ID"], - aws_secret_access_key=state.environment["AUTOSCALING_EC2_SECRET_ACCESS_KEY"], - ) - - state.ec2_resource_clusters_keeper = boto3.resource( - "ec2", - region_name=state.environment["CLUSTERS_KEEPER_EC2_REGION_NAME"], - aws_access_key_id=state.environment["CLUSTERS_KEEPER_EC2_ACCESS_KEY_ID"], - aws_secret_access_key=state.environment[ - "CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY" - ], - ) - - assert state.environment["EC2_INSTANCES_KEY_NAME"] - state.dynamic_parser = parse.compile( - f"{state.environment['EC2_INSTANCES_NAME_PREFIX']}-{{key_name}}" - ) - if state.environment["CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX"]: - state.computational_parser = parse.compile( - f"{state.environment['CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX']}-{DEFAULT_COMPUTATIONAL_EC2_FORMAT}" - ) - - # locate ssh key path - for file_path in deploy_config.glob("**/*.pem"): - # very bad HACK - if "sim4life.io" in f"{file_path}" and "openssh" not in f"{file_path}": - continue - - if DEPLOY_SSH_KEY_PARSER.parse(f"{file_path.name}") is not None: - print( - f"will be using following ssh_key_path: {file_path}. " - "TIP: if wrong adapt the code or manually remove some of them." - ) - state.ssh_key_path = file_path - break - - -async def _summary(user_id: int | None, wallet_id: int | None) -> None: - # get all the running instances - assert state.ec2_resource_autoscaling - dynamic_instances = await _list_dynamic_instances_from_ec2(user_id, wallet_id) - dynamic_autoscaled_instances = await _parse_dynamic_instances( - dynamic_instances, state.ssh_key_path, user_id, wallet_id - ) - _print_dynamic_instances( - dynamic_autoscaled_instances, - state.environment, - state.ec2_resource_autoscaling.meta.client.meta.region_name, - ) - - assert state.ec2_resource_clusters_keeper - computational_instances = await _list_computational_instances_from_ec2( - user_id, wallet_id - ) - computational_clusters = await _parse_computational_clusters( - computational_instances, state.ssh_key_path, user_id, wallet_id - ) - _print_computational_clusters( - computational_clusters, - state.environment, - state.ec2_resource_clusters_keeper.meta.client.meta.region_name, - ) - - -@app.command() -def summary( - user_id: Annotated[int, typer.Option(help="filters by the user ID")] = 0, - wallet_id: Annotated[int, typer.Option(help="filters by the wallet ID")] = 0, -) -> None: - """Show a summary of the current situation of autoscaled EC2 instances. - - Gives a list of all the instances used for dynamic services, and optionally shows what runs in them. - Gives alist of all the instances used for computational services (e.g. primary + worker(s) instances) - - Arguments: - repo_config -- path that shall point to a repo.config type of file (see osparc-ops-deployment-configuration repository) - - """ - - asyncio.run(_summary(user_id or None, wallet_id or None)) - - -async def _abort_job_in_db(project_id: uuid.UUID, node_id: uuid.UUID) -> None: - async with contextlib.AsyncExitStack() as stack: - engine = await stack.enter_async_context(db_engine()) - db_connection = await stack.enter_async_context(engine.begin()) - - await db_connection.execute( - sa.text( - f"UPDATE comp_tasks SET state = 'ABORTED' WHERE project_id='{project_id}' AND node_id='{node_id}'" - ) - ) - print(f"set comp_tasks for {project_id=}/{node_id=} set to ABORTED") - - -async def _list_computational_tasks_from_db(user_id: int) -> list[ComputationalTask]: - async with contextlib.AsyncExitStack() as stack: - engine = await stack.enter_async_context(db_engine()) - db_connection = await stack.enter_async_context(engine.begin()) - - # Get the list of running project UUIDs with a subquery - subquery = ( - sa.select(sa.column("project_uuid")) - .select_from(sa.table("comp_runs")) - .where( - sa.and_( - sa.column("user_id") == user_id, - sa.cast(sa.column("result"), sa.VARCHAR) != "SUCCESS", - sa.cast(sa.column("result"), sa.VARCHAR) != "FAILED", - sa.cast(sa.column("result"), sa.VARCHAR) != "ABORTED", - ) - ) - ) - - # Now select comp_tasks rows where project_id is one of the project_uuids - query = ( - sa.select("*") - .select_from(sa.table("comp_tasks")) - .where( - sa.column("project_id").in_(subquery) - & (sa.cast(sa.column("state"), sa.VARCHAR) != "SUCCESS") - & (sa.cast(sa.column("state"), sa.VARCHAR) != "FAILED") - & (sa.cast(sa.column("state"), sa.VARCHAR) != "ABORTED") - ) - ) - - result = await db_connection.execute(query) - comp_tasks_list = result.fetchall() - return [ - TypeAdapter(ComputationalTask).validate_python( - { - "project_id": row.project_id, - "node_id": row.node_id, - "job_id": row.job_id, - "service_name": row.image["name"].split("/")[-1], - "service_version": row.image["tag"], - "state": row.state, - } - ) - for row in comp_tasks_list - ] - msg = "unable to access database!" - raise RuntimeError(msg) - - -def _print_computational_tasks( - user_id: int, - wallet_id: int, - tasks: list[tuple[ComputationalTask | None, DaskTask | None]], -) -> None: - table = Table( - "index", - "ProjectID", - "NodeID", - "ServiceName", - "ServiceVersion", - "State in DB", - "State in Dask cluster", - title=f"{len(tasks)} Tasks running for {user_id=}/{wallet_id=}", - padding=(0, 0), - title_style=Style(color="red", encircle=True), - ) - - for index, (db_task, dask_task) in enumerate(tasks): - table.add_row( - f"{index}", - ( - f"{db_task.project_id}" - if db_task - else "[red][bold]intervention needed[/bold][/red]" - ), - f"{db_task.node_id}" if db_task else "", - f"{db_task.service_name}" if db_task else "", - f"{db_task.service_version}" if db_task else "", - f"{db_task.state}" if db_task else "", - ( - dask_task.state - if dask_task - else "[orange]task not yet in cluster[/orange]" - ), - ) - - print(table) - - -async def _list_computational_clusters( - user_id: int, wallet_id: int -) -> list[ComputationalCluster]: - assert state.ec2_resource_clusters_keeper - computational_instances = await _list_computational_instances_from_ec2( - user_id, wallet_id - ) - return await _parse_computational_clusters( - computational_instances, state.ssh_key_path, user_id, wallet_id - ) - - -async def _trigger_job_cancellation_in_scheduler( - cluster: ComputationalCluster, task_id: TaskId -) -> None: - async with _dask_client( - cluster.primary.ec2_instance.public_ip_address - ) as dask_client: - task_future = distributed.Future(task_id) - cancel_event = distributed.Event( - name=TASK_CANCEL_EVENT_NAME_TEMPLATE.format(task_future.key), - client=dask_client, - ) - await cancel_event.set() - await task_future.cancel() - print(f"cancelled {task_id} in scheduler/workers") - - -async def _remove_job_from_scheduler( - cluster: ComputationalCluster, task_id: TaskId -) -> None: - async with _dask_client( - cluster.primary.ec2_instance.public_ip_address - ) as dask_client: - await dask_client.unpublish_dataset(task_id) - print(f"unpublished {task_id} from scheduler") - - -async def _cancel_jobs( # noqa: C901, PLR0912 - user_id: int, wallet_id: int, *, force: bool -) -> None: - # get the theory - computational_tasks = await _list_computational_tasks_from_db(user_id) - - # get the reality - computational_clusters = await _list_computational_clusters(user_id, wallet_id) - job_id_to_dask_state: dict[TaskId, TaskState] = {} - if computational_clusters: - assert ( - len(computational_clusters) == 1 - ), "too many clusters found! TIP: fix this code or something weird is playing out" - - the_cluster = computational_clusters[0] - print(f"{the_cluster.task_states_to_tasks=}") - - for job_state, job_ids in the_cluster.task_states_to_tasks.items(): - for job_id in job_ids: - job_id_to_dask_state[job_id] = job_state - - task_to_dask_job: list[tuple[ComputationalTask | None, DaskTask | None]] = [] - for task in computational_tasks: - dask_task = None - if task.job_id: - dask_task = DaskTask( - job_id=task.job_id, - state=job_id_to_dask_state.pop(task.job_id, None) or "unknown", - ) - task_to_dask_job.append((task, dask_task)) - # keep the jobs still in the cluster - for job_id, dask_state in job_id_to_dask_state.items(): - task_to_dask_job.append((None, DaskTask(job_id=job_id, state=dask_state))) - - if not task_to_dask_job: - print("[red]nothing found![/red]") - raise typer.Exit - - _print_computational_tasks(user_id, wallet_id, task_to_dask_job) - print(the_cluster.datasets) - try: - if response := typer.prompt( - "[yellow]Which dataset to cancel? all for all of them.[/yellow]", - default="none", - ): - if response == "none": - print("[yellow]not cancelling anything[/yellow]") - elif response == "all": - for comp_task, dask_task in task_to_dask_job: - if dask_task is not None and dask_task.state != "unknown": - await _trigger_job_cancellation_in_scheduler( - the_cluster, dask_task.job_id - ) - if comp_task is None: - # we need to clear it of the cluster - await _remove_job_from_scheduler( - the_cluster, dask_task.job_id - ) - if comp_task is not None and force: - await _abort_job_in_db(comp_task.project_id, comp_task.node_id) - - print("cancelled all tasks") - else: - selected_index = TypeAdapter(int).validate_python(response) - comp_task, dask_task = task_to_dask_job[selected_index] - if dask_task is not None and dask_task.state != "unknown": - await _trigger_job_cancellation_in_scheduler( - the_cluster, dask_task.job_id - ) - if comp_task is None: - # we need to clear it of the cluster - await _remove_job_from_scheduler(the_cluster, dask_task.job_id) - - if comp_task is not None and force: - await _abort_job_in_db(comp_task.project_id, comp_task.node_id) - - except ValidationError: - print("[yellow]wrong index, not cancelling anything[/yellow]") - - -@app.command() -def cancel_jobs( - user_id: Annotated[int, typer.Option(help="the user ID")], - wallet_id: Annotated[int, typer.Option(help="the wallet ID")], - *, - force: Annotated[ - bool, - typer.Option( - help="will also force the job to abort in the database (use only if job is in WAITING FOR CLUSTER/WAITING FOR RESOURCE)" - ), - ] = False, -) -> None: - """Cancel jobs from the cluster, this will rely on osparc platform to work properly - The director-v2 should receive the cancellation and abort the concerned pipelines in the next 15 seconds. - NOTE: This should be called prior to clearing jobs on the cluster. - - Keyword Arguments: - user_id -- the user ID - wallet_id -- the wallet ID - """ - asyncio.run(_cancel_jobs(user_id, wallet_id, force=force)) - - -async def _trigger_cluster_termination(user_id: int, wallet_id: int) -> None: - assert state.ec2_resource_clusters_keeper - computational_instances = await _list_computational_instances_from_ec2( - user_id, wallet_id - ) - computational_clusters = await _parse_computational_clusters( - computational_instances, state.ssh_key_path, user_id, wallet_id - ) - assert computational_clusters - assert ( - len(computational_clusters) == 1 - ), "too many clusters found! TIP: fix this code" - - _print_computational_clusters( - computational_clusters, - state.environment, - state.ec2_resource_clusters_keeper.meta.client.meta.region_name, - ) - if typer.confirm("Are you sure you want to trigger termination of that cluster?"): - the_cluster = computational_clusters[0] - new_heartbeat_tag: TagTypeDef = { - "Key": "last_heartbeat", - "Value": f"{arrow.utcnow().datetime - datetime.timedelta(hours=1)}", - } - the_cluster.primary.ec2_instance.create_tags(Tags=[new_heartbeat_tag]) - print( - f"heartbeat tag on cluster of {user_id=}/{wallet_id=} changed, clusters-keeper will terminate that cluster soon." - ) - else: - print("not deleting anything") - - -@app.command() -def trigger_cluster_termination( - user_id: Annotated[int, typer.Option(help="the user ID")], - wallet_id: Annotated[int, typer.Option(help="the wallet ID")], -) -> None: - """this will set the Heartbeat tag on the primary machine to 1 hour, thus ensuring the - clusters-keeper will properly terminate that cluster. - - Keyword Arguments: - user_id -- the user ID - wallet_id -- the wallet ID - """ - asyncio.run(_trigger_cluster_termination(user_id, wallet_id)) - - -if __name__ == "__main__": - app() diff --git a/scripts/maintenance/computational-clusters/pyproject.toml b/scripts/maintenance/computational-clusters/pyproject.toml new file mode 100644 index 00000000000..e98f7f89049 --- /dev/null +++ b/scripts/maintenance/computational-clusters/pyproject.toml @@ -0,0 +1,37 @@ +[build-system] +requires = ["setuptools>=61.0"] +build-backend = "setuptools.build_meta" + +[project] +dependencies = [ + "arrow", + "aiocache", + "asyncpg", + "black", + "boto3", + # NOTE: these must be in sync with ospar + "cloudpickle", + "dask[distributed]", + "mypy_boto3_ec2", + "types-boto3", + "parse", + "paramiko", + "pydantic[email]", + "pylint", + "python-dotenv", + "typer", + "rich", + "sqlalchemy[asyncio]", + "sshtunnel", +] +name = "autoscaled-monitor" +version = "1.0.0" +authors = [ + { name = "Sylvain Anderegg", email = "35365065+sanderegg@users.noreply.github.com" }, +] +description = "Helper script for monitoring clusters" +readme = "README.md" +requires-python = ">=3.10" + +[project.scripts] +autoscaled-monitor = "autoscaled_monitor.cli:app" diff --git a/scripts/maintenance/computational-clusters/requirements.txt b/scripts/maintenance/computational-clusters/requirements.txt deleted file mode 100644 index 7cd2f8d5bb2..00000000000 --- a/scripts/maintenance/computational-clusters/requirements.txt +++ /dev/null @@ -1,16 +0,0 @@ -arrow -asyncpg -black -boto3 -# NOTE: these must be in sync with osparc -cloudpickle==2.2.1 -dask[distributed]==2023.3.2 -mypy_boto3_ec2 -types-boto3 -parse -paramiko -pydantic[email] -pylint -python-dotenv -typer[all] -sqlalchemy[asyncio] From 5d4290a2fb817afafed585d8336ea80b830a4dc0 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 4 Jun 2024 14:40:10 +0200 Subject: [PATCH 015/219] =?UTF-8?q?=F0=9F=94=A8=20New=20pytest-simcore=20`?= =?UTF-8?q?environment=5Fconfig`=20fixtures=20and=20`utils=5Fpostgres`=20h?= =?UTF-8?q?elpers=20(#5909)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/pytest_simcore/environment_configs.py | 105 +++++++++++++----- .../pytest_simcore/helpers/utils_postgres.py | 47 +++++++- .../src/pytest_simcore/postgres_service.py | 11 +- .../servicelib/fastapi/requests_decorators.py | 10 +- .../servicelib/fastapi/timing_middleware.py | 7 +- packages/settings-library/tests/test_email.py | 15 +++ .../api/module_setup.py | 2 +- services/payments/tests/conftest.py | 42 +------ .../tests/unit/test_db_payments_users_repo.py | 71 +++++------- .../unit/test_services_notifier_email.py | 4 +- 10 files changed, 178 insertions(+), 136 deletions(-) rename services/datcore-adapter/src/simcore_service_datcore_adapter/api/middleware_timing.py => packages/service-library/src/servicelib/fastapi/timing_middleware.py (76%) diff --git a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py index 45dbc64ccb0..7317f18530b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py +++ b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py @@ -3,39 +3,14 @@ # pylint: disable=unused-variable +import re from pathlib import Path +from typing import Any import pytest from .helpers.typing_env import EnvVarsDict -from .helpers.utils_envs import delenvs_from_dict, load_dotenv, setenvs_from_dict - - -@pytest.fixture(scope="session") # MD: get this, I will mock it with my app environmnet -def env_devel_dict(env_devel_file: Path) -> EnvVarsDict: - assert env_devel_file.exists() - assert env_devel_file.name == ".env-devel" - return load_dotenv(env_devel_file, verbose=True, interpolate=True) - - -@pytest.fixture -def mock_env_devel_environment( - env_devel_dict: EnvVarsDict, monkeypatch: pytest.MonkeyPatch -) -> EnvVarsDict: - return setenvs_from_dict(monkeypatch, env_devel_dict) - - -@pytest.fixture -def all_env_devel_undefined( - monkeypatch: pytest.MonkeyPatch, env_devel_dict: EnvVarsDict -): - """Ensures that all env vars in .env-devel are undefined in the test environment - - NOTE: this is useful to have a clean starting point and avoid - the environment to influence your test. I found this situation - when some script was accidentaly injecting the entire .env-devel in the environment - """ - delenvs_from_dict(monkeypatch, env_devel_dict, raising=False) +from .helpers.utils_envs import load_dotenv, setenvs_from_dict def pytest_addoption(parser: pytest.Parser): @@ -68,3 +43,77 @@ def external_envfile_dict(request: pytest.FixtureRequest) -> EnvVarsDict: envs = load_dotenv(envfile) return envs + + +@pytest.fixture(scope="session") +def env_devel_dict(env_devel_file: Path) -> EnvVarsDict: + assert env_devel_file.exists() + assert env_devel_file.name == ".env-devel" + return load_dotenv(env_devel_file, verbose=True, interpolate=True) + + +@pytest.fixture +def mock_env_devel_environment( + env_devel_dict: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return setenvs_from_dict(monkeypatch, {**env_devel_dict}) + + +# +# ENVIRONMENT IN A SERVICE +# + + +@pytest.fixture(scope="session") +def service_name(project_slug_dir: Path) -> str: + """ + project_slug_dir MUST be defined on root's conftest.py + """ + return project_slug_dir.name + + +@pytest.fixture(scope="session") +def services_docker_compose_dict(services_docker_compose_file: Path) -> EnvVarsDict: + # NOTE: By keeping import here, this library is ONLY required when the fixture is used + import yaml + + content = yaml.safe_load(services_docker_compose_file.read_text()) + assert "services" in content + return content + + +@pytest.fixture +def docker_compose_service_environment_dict( + services_docker_compose_dict: dict[str, Any], + env_devel_dict: EnvVarsDict, + service_name: str, + env_devel_file: Path, +) -> EnvVarsDict: + """Returns env vars dict from the docker-compose `environment` section + + - env_devel_dict in environment_configs plugin + - service_name needs to be defined + """ + service = services_docker_compose_dict["services"][service_name] + + def _substitute(key, value): + if m := re.match(r"\${([^{}:-]\w+)", value): + expected_env_var = m.group(1) + try: + # NOTE: if this raises, then the RHS env-vars in the docker-compose are + # not defined in the env-devel + if value := env_devel_dict[expected_env_var]: + return key, value + except KeyError: + pytest.fail( + f"{expected_env_var} is not defined in {env_devel_file} but used in docker-compose services[{service}].environment[{key}]" + ) + return None + + envs: EnvVarsDict = {} + for key, value in service.get("environment", {}).items(): + if found := _substitute(key, value): + _, new_value = found + envs[key] = new_value + + return envs diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py index a54b02cfd1c..6587f9052fa 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py @@ -1,14 +1,12 @@ -import logging -from collections.abc import Iterator -from contextlib import contextmanager -from typing import TypedDict +from collections.abc import AsyncIterator, Iterator +from contextlib import asynccontextmanager, contextmanager +from typing import Any, TypedDict import simcore_postgres_database.cli import sqlalchemy as sa from psycopg2 import OperationalError from simcore_postgres_database.models.base import metadata - -log = logging.getLogger(__name__) +from sqlalchemy.ext.asyncio import AsyncEngine class PostgresTestConfig(TypedDict): @@ -67,3 +65,40 @@ def is_postgres_responsive(url) -> bool: except OperationalError: return False return True + + +async def _insert_and_get_row( + conn, table: sa.Table, values: dict[str, Any], pk_col: sa.Column, pk_value: Any +): + result = await conn.execute(table.insert().values(**values).returning(pk_col)) + row = result.first() + + # NOTE: DO NO USE row[pk_col] since you will get a deprecation error (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) + assert getattr(row, pk_col.name) == pk_value + + result = await conn.execute(sa.select(table).where(pk_col == pk_value)) + return result.first() + + +@asynccontextmanager +async def insert_and_get_row_lifespan( + sqlalchemy_async_engine: AsyncEngine, + *, + table: sa.Table, + values: dict[str, Any], + pk_col: sa.Column, + pk_value: Any, +) -> AsyncIterator[dict[str, Any]]: + # insert & get + async with sqlalchemy_async_engine.begin() as conn: + row = await _insert_and_get_row( + conn, table=table, values=values, pk_col=pk_col, pk_value=pk_value + ) + + # NOTE: DO NO USE dict(row) since you will get a deprecation error (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) + # pylint: disable=protected-access + yield row._asdict() + + # delete row + async with sqlalchemy_async_engine.begin() as conn: + await conn.execute(table.delete().where(pk_col == pk_value)) diff --git a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py index fb144aed2be..a64e21d206c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py @@ -12,6 +12,7 @@ import tenacity from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from sqlalchemy.ext.asyncio import AsyncEngine from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed @@ -200,7 +201,7 @@ def postgres_db( yield postgres_engine -@pytest.fixture() +@pytest.fixture async def aiopg_engine( postgres_db: sa.engine.Engine, ) -> AsyncIterator: @@ -216,10 +217,10 @@ async def aiopg_engine( await engine.wait_closed() -@pytest.fixture() +@pytest.fixture async def sqlalchemy_async_engine( postgres_db: sa.engine.Engine, -) -> AsyncIterator: +) -> AsyncIterator[AsyncEngine]: # NOTE: prevent having to import this if latest sqlalchemy not installed from sqlalchemy.ext.asyncio import create_async_engine @@ -232,7 +233,7 @@ async def sqlalchemy_async_engine( await engine.dispose() -@pytest.fixture() +@pytest.fixture def postgres_env_vars_dict(postgres_dsn: PostgresTestConfig) -> EnvVarsDict: return { "POSTGRES_USER": postgres_dsn["user"], @@ -244,7 +245,7 @@ def postgres_env_vars_dict(postgres_dsn: PostgresTestConfig) -> EnvVarsDict: } -@pytest.fixture() +@pytest.fixture def postgres_host_config( postgres_dsn: PostgresTestConfig, postgres_env_vars_dict: EnvVarsDict, diff --git a/packages/service-library/src/servicelib/fastapi/requests_decorators.py b/packages/service-library/src/servicelib/fastapi/requests_decorators.py index 4beee25bda2..ae5f1ea047c 100644 --- a/packages/service-library/src/servicelib/fastapi/requests_decorators.py +++ b/packages/service-library/src/servicelib/fastapi/requests_decorators.py @@ -22,13 +22,11 @@ def _validate_signature(handler: _HandlerWithRequestArg): try: p = next(iter(inspect.signature(handler).parameters.values())) if p.kind != inspect.Parameter.POSITIONAL_OR_KEYWORD or p.annotation != Request: - raise TypeError( - f"Invalid handler {handler.__name__} signature: first parameter must be a Request, got {p.annotation}" - ) + msg = f"Invalid handler {handler.__name__} signature: first parameter must be a Request, got {p.annotation}" + raise TypeError(msg) except StopIteration as e: - raise TypeError( - f"Invalid handler {handler.__name__} signature: first parameter must be a Request, got none" - ) from e + msg = f"Invalid handler {handler.__name__} signature: first parameter must be a Request, got none" + raise TypeError(msg) from e # diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/middleware_timing.py b/packages/service-library/src/servicelib/fastapi/timing_middleware.py similarity index 76% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/middleware_timing.py rename to packages/service-library/src/servicelib/fastapi/timing_middleware.py index 131f5fd3285..7fe5e283814 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/middleware_timing.py +++ b/packages/service-library/src/servicelib/fastapi/timing_middleware.py @@ -1,14 +1,15 @@ -import time import logging +import time + from fastapi import Request -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) async def add_process_time_header(request: Request, call_next): start_time = time.time() response = await call_next(request) process_time = time.time() - start_time - logger.debug("time to process %.2fs", process_time) + _logger.debug("time to process %.2fs", process_time) response.headers["X-Process-Time"] = str(process_time) return response diff --git a/packages/settings-library/tests/test_email.py b/packages/settings-library/tests/test_email.py index b24b5d3c256..449218857c0 100644 --- a/packages/settings-library/tests/test_email.py +++ b/packages/settings-library/tests/test_email.py @@ -7,9 +7,24 @@ import pytest from pydantic import ValidationError +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.utils_envs import delenvs_from_dict from settings_library.email import EmailProtocol, SMTPSettings +@pytest.fixture +def all_env_devel_undefined( + monkeypatch: pytest.MonkeyPatch, env_devel_dict: EnvVarsDict +) -> None: + """Ensures that all env vars in .env-devel are undefined in the test environment + + NOTE: this is useful to have a clean starting point and avoid + the environment to influence your test. I found this situation + when some script was accidentaly injecting the entire .env-devel in the environment + """ + delenvs_from_dict(monkeypatch, env_devel_dict, raising=False) + + @pytest.mark.parametrize( "cfg", [ diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py index c7cbc3e992e..d1c1d8e8410 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py @@ -4,12 +4,12 @@ from botocore.exceptions import ClientError from fastapi import APIRouter, FastAPI from fastapi.exceptions import HTTPException, RequestValidationError +from servicelib.fastapi.timing_middleware import add_process_time_header from .._meta import API_VTAG from .errors.http_error import http_error_handler from .errors.pennsieve_error import botocore_exceptions_handler from .errors.validation_error import http422_error_handler -from .middleware_timing import add_process_time_header from .routes import datasets, files, health, user diff --git a/services/payments/tests/conftest.py b/services/payments/tests/conftest.py index f1f9102923e..26092690de9 100644 --- a/services/payments/tests/conftest.py +++ b/services/payments/tests/conftest.py @@ -4,12 +4,10 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -import re from pathlib import Path import pytest import simcore_service_payments -import yaml from faker import Faker from models_library.users import GroupID from pydantic import parse_obj_as @@ -63,7 +61,7 @@ def external_envfile_dict(external_envfile_dict: EnvVarsDict) -> EnvVarsDict: return external_envfile_dict -@pytest.fixture +@pytest.fixture(scope="session") def env_devel_dict( env_devel_dict: EnvVarsDict, external_envfile_dict: EnvVarsDict ) -> EnvVarsDict: @@ -72,51 +70,17 @@ def env_devel_dict( return env_devel_dict -@pytest.fixture -def docker_compose_service_payments_env_vars( - services_docker_compose_file: Path, - env_devel_dict: EnvVarsDict, -) -> EnvVarsDict: - """env vars injected at the docker-compose""" - - payments = yaml.safe_load(services_docker_compose_file.read_text())["services"][ - "payments" - ] - - def _substitute(key, value): - if m := re.match(r"\${([^{}:-]\w+)", value): - expected_env_var = m.group(1) - try: - # NOTE: if this raises, then the RHS env-vars in the docker-compose are - # not defined in the env-devel - if value := env_devel_dict[expected_env_var]: - return key, value - except KeyError: - pytest.fail( - f"{expected_env_var} is not defined in .env-devel but used in docker-compose services[{payments}].environment[{key}]" - ) - return None - - envs: EnvVarsDict = {} - for key, value in payments.get("environment", {}).items(): - if found := _substitute(key, value): - _, new_value = found - envs[key] = new_value - - return envs - - @pytest.fixture def app_environment( monkeypatch: pytest.MonkeyPatch, - docker_compose_service_payments_env_vars: EnvVarsDict, + docker_compose_service_environment_dict: EnvVarsDict, secret_key: str, faker: Faker, ) -> EnvVarsDict: return setenvs_from_dict( monkeypatch, { - **docker_compose_service_payments_env_vars, + **docker_compose_service_environment_dict, "PAYMENTS_ACCESS_TOKEN_SECRET_KEY": secret_key, "PAYMENTS_USERNAME": faker.user_name(), "PAYMENTS_PASSWORD": faker.password(), diff --git a/services/payments/tests/unit/test_db_payments_users_repo.py b/services/payments/tests/unit/test_db_payments_users_repo.py index cfd34cd707e..4db76e25992 100644 --- a/services/payments/tests/unit/test_db_payments_users_repo.py +++ b/services/payments/tests/unit/test_db_payments_users_repo.py @@ -9,11 +9,11 @@ from typing import Any import pytest -import sqlalchemy as sa from fastapi import FastAPI from models_library.users import GroupID, UserID from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from pytest_simcore.helpers.utils_postgres import insert_and_get_row_lifespan from simcore_postgres_database.models.payments_transactions import payments_transactions from simcore_postgres_database.models.products import products from simcore_postgres_database.models.users import users @@ -49,21 +49,6 @@ def app_environment( ) -async def _insert_and_get_row( - conn, table: sa.Table, values: dict[str, Any], pk_col: sa.Column, pk_value: Any -): - result = await conn.execute(table.insert().values(**values).returning(pk_col)) - row = result.first() - assert row[pk_col] == pk_value - - result = await conn.execute(sa.select(table).where(pk_col == pk_value)) - return result.first() - - -async def _delete_row(conn, table, pk_col: sa.Column, pk_value: Any): - await conn.execute(table.delete().where(pk_col == pk_value)) - - @pytest.fixture async def user( app: FastAPI, @@ -74,16 +59,14 @@ async def user( injects a user in db """ assert user_id == user["id"] - pk_args = users.c.id, user["id"] - - # NOTE: creation of primary group and setting `groupid`` is automatically triggered after creation of user by postgres - async with get_engine(app).begin() as conn: - row = await _insert_and_get_row(conn, users, user, *pk_args) - - yield dict(row) - - async with get_engine(app).begin() as conn: - await _delete_row(conn, users, *pk_args) + async with insert_and_get_row_lifespan( + get_engine(app), + table=users, + values=user, + pk_col=users.c.id, + pk_value=user["id"], + ) as row: + yield row @pytest.fixture @@ -101,15 +84,14 @@ async def product( """ # NOTE: this fixture ignores products' group-id but it is fine for this test context assert product["group_id"] is None - pk_args = products.c.name, product["name"] - - async with get_engine(app).begin() as conn: - row = await _insert_and_get_row(conn, products, product, *pk_args) - - yield dict(row) - - async with get_engine(app).begin() as conn: - await _delete_row(conn, products, *pk_args) + async with insert_and_get_row_lifespan( + get_engine(app), + table=products, + values=product, + pk_col=products.c.name, + pk_value=product["name"], + ) as row: + yield row @pytest.fixture @@ -119,17 +101,14 @@ async def successful_transaction( """ injects transaction in db """ - pk_args = payments_transactions.c.payment_id, successful_transaction["payment_id"] - - async with get_engine(app).begin() as conn: - row = await _insert_and_get_row( - conn, payments_transactions, successful_transaction, *pk_args - ) - - yield dict(row) - - async with get_engine(app).begin() as conn: - await _delete_row(conn, payments_transactions, *pk_args) + async with insert_and_get_row_lifespan( + get_engine(app), + table=payments_transactions, + values=successful_transaction, + pk_col=payments_transactions.c.payment_id, + pk_value=successful_transaction["payment_id"], + ) as row: + yield row async def test_payments_user_repo( diff --git a/services/payments/tests/unit/test_services_notifier_email.py b/services/payments/tests/unit/test_services_notifier_email.py index f27e23698d6..f7d30007df1 100644 --- a/services/payments/tests/unit/test_services_notifier_email.py +++ b/services/payments/tests/unit/test_services_notifier_email.py @@ -37,12 +37,12 @@ def app_environment( monkeypatch: pytest.MonkeyPatch, external_envfile_dict: EnvVarsDict, - docker_compose_service_payments_env_vars: EnvVarsDict, + docker_compose_service_environment_dict: EnvVarsDict, ) -> EnvVarsDict: return setenvs_from_dict( monkeypatch, { - **docker_compose_service_payments_env_vars, + **docker_compose_service_environment_dict, **external_envfile_dict, }, ) From b305ed2d62610b1484afe863f7017fe35f708829 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 5 Jun 2024 10:59:54 +0200 Subject: [PATCH 016/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Major=20cleanup=20?= =?UTF-8?q?of=20catalog=20service=20(#5904)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .env-devel | 1 + scripts/common-service.Makefile | 20 + services/api-server/tests/unit/conftest.py | 2 +- services/catalog/.env-devel | 28 - services/catalog/Makefile | 66 +- services/catalog/README.md | 41 - services/catalog/docker-compose-extra.yml | 79 - services/catalog/openapi.json | 1447 +++++++++-------- services/catalog/requirements/_test.in | 2 +- services/catalog/requirements/_test.txt | 3 + .../api/dependencies/database.py | 7 +- .../api/dependencies/director.py | 10 +- .../api/dependencies/services.py | 35 +- .../api/dependencies/user_groups.py | 19 +- .../src/simcore_service_catalog/api/rest.py | 61 + .../src/simcore_service_catalog/api/root.py | 38 - .../api/routes/health.py | 2 +- .../api/routes/services.py | 45 +- .../api/routes/services_access_rights.py | 14 +- .../api/routes/services_ports.py | 10 +- .../api/routes/services_resources.py | 28 +- .../api/routes/services_specifications.py | 26 +- .../src/simcore_service_catalog/cli.py | 56 +- .../core/application.py | 99 +- .../core/background_tasks.py | 38 +- .../simcore_service_catalog/core/errors.py | 0 .../simcore_service_catalog/core/events.py | 34 +- .../simcore_service_catalog/core/settings.py | 7 +- .../src/simcore_service_catalog/db/events.py | 2 +- .../db/repositories/groups.py | 11 +- .../db/repositories/projects.py | 6 +- .../db/repositories/services.py | 92 +- .../{api/errors => exceptions}/__init__.py | 0 .../{db/errors.py => exceptions/db_errors.py} | 0 .../exceptions/handlers/__init__.py | 24 + .../handlers/_http_error.py} | 8 +- .../handlers/_validation_error.py} | 4 +- .../src/simcore_service_catalog/main.py | 16 +- .../services/access_rights.py | 60 +- .../services/director.py | 36 +- .../simcore_service_catalog/utils/pools.py | 40 - .../utils/requests_decorators.py | 47 - .../utils/versioning.py | 8 +- .../catalog/tests/integration/test_none.py | 5 - services/catalog/tests/unit/conftest.py | 104 +- .../tests/unit/test_services_director.py | 61 +- .../unit/test_services_function_services.py | 11 +- .../unit/test_utils_service_resources.py | 18 +- .../catalog/tests/unit/with_dbs/conftest.py | 228 +-- .../test_api_routes_services__list.py | 96 +- .../test_api_routes_services_access_rights.py | 38 +- .../test_api_routes_services_ports.py | 23 +- .../test_api_routes_services_resources.py | 19 +- ...test_api_routes_services_specifications.py | 32 +- .../unit/with_dbs/test_db_repositories.py | 15 +- .../with_dbs/test_services_access_rights.py | 13 +- services/docker-compose.yml | 2 +- 57 files changed, 1589 insertions(+), 1648 deletions(-) delete mode 100644 services/catalog/.env-devel delete mode 100644 services/catalog/docker-compose-extra.yml create mode 100644 services/catalog/src/simcore_service_catalog/api/rest.py delete mode 100644 services/catalog/src/simcore_service_catalog/api/root.py delete mode 100644 services/catalog/src/simcore_service_catalog/core/errors.py rename services/catalog/src/simcore_service_catalog/{api/errors => exceptions}/__init__.py (100%) rename services/catalog/src/simcore_service_catalog/{db/errors.py => exceptions/db_errors.py} (100%) create mode 100644 services/catalog/src/simcore_service_catalog/exceptions/handlers/__init__.py rename services/catalog/src/simcore_service_catalog/{api/errors/http_error.py => exceptions/handlers/_http_error.py} (80%) rename services/catalog/src/simcore_service_catalog/{api/errors/validation_error.py => exceptions/handlers/_validation_error.py} (90%) delete mode 100644 services/catalog/src/simcore_service_catalog/utils/pools.py delete mode 100644 services/catalog/src/simcore_service_catalog/utils/requests_decorators.py delete mode 100644 services/catalog/tests/integration/test_none.py diff --git a/.env-devel b/.env-devel index 6d9f9cf3f5a..648f3037638 100644 --- a/.env-devel +++ b/.env-devel @@ -30,6 +30,7 @@ AUTOSCALING_LOGLEVEL=WARNING AUTOSCALING_NODES_MONITORING=null AUTOSCALING_POLL_INTERVAL=10 +CATALOG_BACKGROUND_TASK_REST_TIME=60 CATALOG_DEV_FEATURES_ENABLED=0 CATALOG_HOST=catalog CATALOG_LOGLEVEL=WARNING diff --git a/scripts/common-service.Makefile b/scripts/common-service.Makefile index 6dc787b8b79..ee326807789 100644 --- a/scripts/common-service.Makefile +++ b/scripts/common-service.Makefile @@ -180,3 +180,23 @@ _run-test-ci: _check_venv_active .PHONY: _assert_target_defined _assert_target_defined: $(if $(target),,$(error unset argument 'target' is required)) + + + + +# +# OPENAPI SPECIFICATIONS ROUTINES +# + + +# specification of the used openapi-generator-cli (see also https://github.com/ITISFoundation/openapi-generator) +OPENAPI_GENERATOR_NAME := itisfoundation/openapi-generator-cli-openapi-generator-v4.2.3 +OPENAPI_GENERATOR_TAG := v0 +OPENAPI_GENERATOR_IMAGE := $(OPENAPI_GENERATOR_NAME):$(OPENAPI_GENERATOR_TAG) + +define validate_openapi_specs + # Validating OAS '$(1)' ... + docker run --rm \ + --volume "$(CURDIR):/local" \ + $(OPENAPI_GENERATOR_IMAGE) validate --input-spec /local/$(strip $(1)) +endef diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 25db6cb0ff0..7d7017fe1cd 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -408,7 +408,7 @@ def mocked_catalog_service_api_base( text="simcore_service_catalog.api.routes.health@2023-07-03T12:59:12.024551+00:00", ) respx_mock.get("/v0/meta").respond( - status.HTTP_200_OK, json=schemas["Meta"]["example"] + status.HTTP_200_OK, json=schemas["BaseMeta"]["example"] ) # SEE https://github.com/pcrespov/sandbox-python/blob/f650aad57aced304aac9d0ad56c00723d2274ad0/respx-lib/test_disable_mock.py diff --git a/services/catalog/.env-devel b/services/catalog/.env-devel deleted file mode 100644 index 4811e3a06fc..00000000000 --- a/services/catalog/.env-devel +++ /dev/null @@ -1,28 +0,0 @@ -# -# Environment variables used to configure this service -# - -CATALOG_DEV_FEATURES_ENABLED=1 - -LOG_LEVEL=DEBUG - -DIRECTOR_ENABLED=1 -DIRECTOR_HOST=localhost -DIRECTOR_PORT=28080 - -POSTGRES_USER=test -POSTGRES_PASSWORD=test -POSTGRES_DB=test -POSTGRES_HOST=localhost - -REGISTRY_AUTH=False -REGISTRY_PW=adminadmin -REGISTRY_SSL=False -REGISTRY_URL=172.17.0.1:5000 -REGISTRY_USER=admin -DIRECTOR_REGISTRY_CACHING=True -DIRECTOR_REGISTRY_CACHING_TTL=10 - -CATALOG_BACKGROUND_TASK_REST_TIME=60 - -SC_BOOT_MODE=debug diff --git a/services/catalog/Makefile b/services/catalog/Makefile index 13cfcbf3213..31b3a327698 100644 --- a/services/catalog/Makefile +++ b/services/catalog/Makefile @@ -5,69 +5,19 @@ include ../../scripts/common.Makefile include ../../scripts/common-service.Makefile -.PHONY: requirements reqs -requirements reqs: ## (or reqs) compiles pip requirements (.in -> .txt) - @$(MAKE_C) requirements reqs - - - - -# DEVELOPMENT ######## - -.env: - cp .env-devel $@ - - -.PHONY: run-devel up-extra down down-extra - -up-extra: .env down-extra ## creates and starts adjacent services and migrates postgres database - # starting all adjacent services - docker compose -f docker-compose-extra.yml up --detach - sleep 1 - # discovering postgres services - @export $(shell grep -v '^#' .env | xargs) && sc-pg discover - @sc-pg info - # upgrading postgres database to HEAD version - @sc-pg upgrade - -down down-extra: docker-compose-extra.yml ## stops pg fixture - # cleanup discover cache - -@sc-pg clean - # stopping extra services - -@docker compose -f docker-compose-extra.yml down - # killing any process using port 8000 - -@fuser --kill --verbose --namespace tcp 8000 - - -run-devel: .env up-extra ## starts app with extra stack - # start app (within $<) in devel mode - uvicorn $(APP_PACKAGE_NAME).__main__:the_app \ - --reload --reload-dir $(SRC_DIR) \ - --port=8000 --host=0.0.0.0 - -run-prod: .env up-extra - # start app (within $<) in prod mode - $(APP_CLI_NAME) - - - -# BUILD ##################### - - -# specification of the used openapi-generator-cli (see also https://github.com/ITISFoundation/openapi-generator) -OPENAPI_GENERATOR_NAME := itisfoundation/openapi-generator-cli-openapi-generator-v4.2.3 -OPENAPI_GENERATOR_TAG := v0 -OPENAPI_GENERATOR_IMAGE := $(OPENAPI_GENERATOR_NAME):$(OPENAPI_GENERATOR_TAG) +.env-ignore: + $(APP_CLI_NAME) echo-dotenv > $@ .PHONY: openapi-specs openapi.json openapi-specs: openapi.json -openapi.json: .env - # generating openapi specs file +openapi.json: .env-ignore + # generating openapi specs file (need to have the environment set for this) + @set -o allexport; \ + source $<; \ + set +o allexport; \ python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ # validates OAS file: $@ - docker run --rm \ - --volume "$(CURDIR):/local" \ - $(OPENAPI_GENERATOR_IMAGE) validate --input-spec /local/$@ + $(call validate_openapi_specs,$@) diff --git a/services/catalog/README.md b/services/catalog/README.md index 3eb7770af7a..1495a3d9f44 100644 --- a/services/catalog/README.md +++ b/services/catalog/README.md @@ -1,44 +1,3 @@ # catalog -[![image-size]](https://microbadger.com/images/itisfoundation/catalog. "More on itisfoundation/catalog:staging-latest image") -[![image-badge]](https://microbadger.com/images/itisfoundation/catalog "More on Components Catalog Service image in registry") -[![image-version]](https://microbadger.com/images/itisfoundation/catalog "More on Components Catalog Service image in registry") -[![image-commit]](https://microbadger.com/images/itisfoundation/catalog "More on Components Catalog Service image in registry") - Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc) - -## Development - -Typical development workflow: - -```cmd -make devenv -source .venv/bin/activate - -cd services/api-service -make install-dev -``` - -Then -```cmd -make run-devel -``` -will start the service in development-mode together with a postgres db initialized with test data. The API can be query using -- http://127.0.0.1:8000/api/docs: swagger-UI API doc - - -Finally -```cmd -make tests -make build-devel -make build -``` - - - - -[image-size]:https://img.shields.io/microbadger/image-size/itisfoundation/catalog./staging-latest.svg?label=catalog.&style=flat -[image-badge]:https://images.microbadger.com/badges/image/itisfoundation/catalog.svg -[image-version]:https://images.microbadger.com/badges/version/itisfoundation/catalog.svg -[image-commit]:https://images.microbadger.com/badges/commit/itisfoundation/catalog.svg - diff --git a/services/catalog/docker-compose-extra.yml b/services/catalog/docker-compose-extra.yml deleted file mode 100644 index a3a5912b910..00000000000 --- a/services/catalog/docker-compose-extra.yml +++ /dev/null @@ -1,79 +0,0 @@ -# -# Includes all adjancent services for testing/development purposes -# -version: "3.8" -services: - postgres: - image: postgres:14.5-alpine@sha256:db802f226b620fc0b8adbeca7859eb203c8d3c9ce5d84870fadee05dea8f50ce - init: true - environment: - - POSTGRES_USER=${POSTGRES_USER:-test} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-test} - - POSTGRES_DB=${POSTGRES_PASSWORD:-test} - - POSTGRES_HOST=${POSTGRES_HOST:-localhost} - - POSTGRES_PORT=${POSTGRES_PORT:-5432} - ports: - - "5432:5432" - # https://www.postgresql.org/docs/10/runtime-config-logging.html#GUC-LOG-STATEMENT - command: - [ - "postgres", - "-c", - "log_connections=true", - "-c", - "log_disconnections=true", - "-c", - "log_duration=true", - "-c", - "log_line_prefix=[%p] [%a] [%c] [%x] " - ] - adminer: - image: adminer - init: true - ports: - - 18080:8080 - depends_on: - - postgres - director: - image: local/director:production - init: true - environment: - - REGISTRY_URL=${REGISTRY_URL} - - REGISTRY_AUTH=${REGISTRY_AUTH} - - REGISTRY_USER=${REGISTRY_USER} - - REGISTRY_PW=${REGISTRY_PW} - - REGISTRY_SSL=${REGISTRY_SSL} - - DIRECTOR_REGISTRY_CACHING=${DIRECTOR_REGISTRY_CACHING} - - DIRECTOR_REGISTRY_CACHING_TTL=${DIRECTOR_REGISTRY_CACHING_TTL} - - DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=${DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS} - - DIRECTOR_SELF_SIGNED_SSL_SECRET_ID=${DIRECTOR_SELF_SIGNED_SSL_SECRET_ID} - - DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME=${DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME} - - DIRECTOR_SELF_SIGNED_SSL_FILENAME=${DIRECTOR_SELF_SIGNED_SSL_FILENAME} - - POSTGRES_ENDPOINT=${POSTGRES_ENDPOINT} - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_HOST=${POSTGRES_HOST} - - POSTGRES_PORT=${POSTGRES_PORT} - - S3_ENDPOINT=${S3_ENDPOINT} - - S3_ACCESS_KEY=${S3_ACCESS_KEY} - - S3_SECRET_KEY=${S3_SECRET_KEY} - - S3_BUCKET_NAME=${S3_BUCKET_NAME} - - STORAGE_ENDPOINT=${STORAGE_ENDPOINT} - - EXTRA_HOSTS_SUFFIX=undefined - - SIMCORE_SERVICES_NETWORK_NAME=interactive_services_subnet - - MONITORING_ENABLED=${MONITORING_ENABLED:-True} - - TRACING_ENABLED=${TRACING_ENABLED:-True} - - TRACING_ZIPKIN_ENDPOINT=${TRACING_ZIPKIN_ENDPOINT:-http://jaeger:9411} - - TRAEFIK_SIMCORE_ZONE=${TRAEFIK_SIMCORE_ZONE:-internal_simcore_stack} - - LOGLEVEL=${LOG_LEVEL:-WARNING} - - SWARM_STACK_NAME=${SWARM_STACK_NAME:-simcore} - volumes: - - "/var/run/docker.sock:/var/run/docker.sock" - ports: - - "28080:8080" - registry: - image: registry:2 - init: true - ports: - - "5000:5000" diff --git a/services/catalog/openapi.json b/services/catalog/openapi.json index 8e9a2d8034b..6d66c5918c7 100644 --- a/services/catalog/openapi.json +++ b/services/catalog/openapi.json @@ -1,11 +1,46 @@ { - "openapi": "3.0.2", + "openapi": "3.1.0", "info": { "title": "simcore-service-catalog", - "description": " Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc)", - "version": "0.4.0" + "description": "Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc)", + "version": "0.5.0" }, "paths": { + "/": { + "get": { + "summary": "Check Service Health", + "operationId": "check_service_health__get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + } + } + } + }, + "/v0/": { + "get": { + "tags": [ + "diagnostics" + ], + "summary": "Check Service Health", + "operationId": "check_service_health_v0__get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + } + } + } + }, "/v0/meta": { "get": { "tags": [ @@ -19,7 +54,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Meta" + "$ref": "#/components/schemas/BaseMeta" } } } @@ -38,9 +73,9 @@ { "required": true, "schema": { - "title": "Service Key", + "type": "string", "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", - "type": "string" + "title": "Service Key" }, "name": "service_key", "in": "path" @@ -48,9 +83,9 @@ { "required": true, "schema": { - "title": "Service Version", + "type": "string", "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" + "title": "Service Version" }, "name": "service_version", "in": "path" @@ -59,9 +94,9 @@ "description": "if passed, and that user has custom resources, they will be merged with default resources and returned.", "required": false, "schema": { - "title": "User Id", - "exclusiveMinimum": true, "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", "description": "if passed, and that user has custom resources, they will be merged with default resources and returned.", "minimum": 0 }, @@ -75,8 +110,11 @@ "content": { "application/json": { "schema": { - "title": "Response Get Service Resources V0 Services Service Key Service Version Resources Get", - "type": "object" + "additionalProperties": { + "$ref": "#/components/schemas/ImageResources" + }, + "type": "object", + "title": "Response Get Service Resources V0 Services Service Key Service Version Resources Get" } } } @@ -105,9 +143,9 @@ { "required": true, "schema": { - "title": "Service Key", + "type": "string", "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", - "type": "string" + "title": "Service Key" }, "name": "service_key", "in": "path" @@ -115,9 +153,9 @@ { "required": true, "schema": { - "title": "Service Version", + "type": "string", "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" + "title": "Service Version" }, "name": "service_version", "in": "path" @@ -125,9 +163,9 @@ { "required": true, "schema": { - "title": "User Id", - "exclusiveMinimum": true, "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", "minimum": 0 }, "name": "user_id", @@ -137,8 +175,8 @@ "description": "if True only the version specs will be retrieved, if False the latest version will be used instead", "required": false, "schema": { - "title": "Strict", "type": "boolean", + "title": "Strict", "description": "if True only the version specs will be retrieved, if False the latest version will be used instead", "default": false }, @@ -182,9 +220,9 @@ { "required": true, "schema": { - "title": "Service Key", + "type": "string", "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", - "type": "string" + "title": "Service Key" }, "name": "service_key", "in": "path" @@ -192,9 +230,9 @@ { "required": true, "schema": { - "title": "Service Version", + "type": "string", "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" + "title": "Service Version" }, "name": "service_version", "in": "path" @@ -202,8 +240,8 @@ { "required": true, "schema": { - "title": "User Id", - "type": "integer" + "type": "integer", + "title": "User Id" }, "name": "user_id", "in": "query" @@ -211,8 +249,8 @@ { "required": false, "schema": { - "title": "X-Simcore-Products-Name", - "type": "string" + "type": "string", + "title": "X-Simcore-Products-Name" }, "name": "x-simcore-products-name", "in": "header" @@ -224,11 +262,11 @@ "content": { "application/json": { "schema": { - "title": "Response List Service Ports V0 Services Service Key Service Version Ports Get", - "type": "array", "items": { "$ref": "#/components/schemas/ServicePortGet" - } + }, + "type": "array", + "title": "Response List Service Ports V0 Services Service Key Service Version Ports Get" } } } @@ -258,9 +296,9 @@ { "required": true, "schema": { - "title": "Service Key", + "type": "string", "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", - "type": "string" + "title": "Service Key" }, "name": "service_key", "in": "path" @@ -268,9 +306,9 @@ { "required": true, "schema": { - "title": "Service Version", + "type": "string", "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" + "title": "Service Version" }, "name": "service_version", "in": "path" @@ -278,8 +316,8 @@ { "required": true, "schema": { - "title": "User Id", - "type": "integer" + "type": "integer", + "title": "User Id" }, "name": "user_id", "in": "query" @@ -287,8 +325,8 @@ { "required": true, "schema": { - "title": "X-Simcore-Products-Name", - "type": "string" + "type": "string", + "title": "X-Simcore-Products-Name" }, "name": "x-simcore-products-name", "in": "header" @@ -329,9 +367,9 @@ { "required": true, "schema": { - "title": "User Id", - "exclusiveMinimum": true, "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", "minimum": 0 }, "name": "user_id", @@ -340,8 +378,8 @@ { "required": false, "schema": { - "title": "Details", "type": "boolean", + "title": "Details", "default": true }, "name": "details", @@ -350,8 +388,8 @@ { "required": true, "schema": { - "title": "X-Simcore-Products-Name", - "type": "string" + "type": "string", + "title": "X-Simcore-Products-Name" }, "name": "x-simcore-products-name", "in": "header" @@ -363,11 +401,11 @@ "content": { "application/json": { "schema": { - "title": "Response List Services V0 Services Get", - "type": "array", "items": { "$ref": "#/components/schemas/ServiceGet" - } + }, + "type": "array", + "title": "Response List Services V0 Services Get" } } } @@ -396,9 +434,9 @@ { "required": true, "schema": { - "title": "Service Key", + "type": "string", "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", - "type": "string" + "title": "Service Key" }, "name": "service_key", "in": "path" @@ -406,9 +444,9 @@ { "required": true, "schema": { - "title": "Service Version", + "type": "string", "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" + "title": "Service Version" }, "name": "service_version", "in": "path" @@ -416,8 +454,8 @@ { "required": true, "schema": { - "title": "User Id", - "type": "integer" + "type": "integer", + "title": "User Id" }, "name": "user_id", "in": "query" @@ -425,8 +463,8 @@ { "required": false, "schema": { - "title": "X-Simcore-Products-Name", - "type": "string" + "type": "string", + "title": "X-Simcore-Products-Name" }, "name": "x-simcore-products-name", "in": "header" @@ -465,9 +503,9 @@ { "required": true, "schema": { - "title": "Service Key", + "type": "string", "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", - "type": "string" + "title": "Service Key" }, "name": "service_key", "in": "path" @@ -475,9 +513,9 @@ { "required": true, "schema": { - "title": "Service Version", + "type": "string", "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" + "title": "Service Version" }, "name": "service_version", "in": "path" @@ -485,8 +523,8 @@ { "required": true, "schema": { - "title": "User Id", - "type": "integer" + "type": "integer", + "title": "User Id" }, "name": "user_id", "in": "query" @@ -494,8 +532,8 @@ { "required": false, "schema": { - "title": "X-Simcore-Products-Name", - "type": "string" + "type": "string", + "title": "X-Simcore-Products-Name" }, "name": "x-simcore-products-name", "in": "header" @@ -539,69 +577,103 @@ "components": { "schemas": { "Author": { - "title": "Author", - "required": [ - "name", - "email" - ], - "type": "object", "properties": { "name": { - "title": "Name", "type": "string", + "title": "Name", "description": "Name of the author", "example": "Jim Knopf" }, "email": { - "title": "Email", "type": "string", - "description": "Email address", - "format": "email" + "format": "email", + "title": "Email", + "description": "Email address" }, "affiliation": { - "title": "Affiliation", "type": "string", + "title": "Affiliation", "description": "Affiliation of the author" } }, - "additionalProperties": false - }, - "Badge": { - "title": "Badge", + "additionalProperties": false, + "type": "object", "required": [ "name", - "image", - "url" + "email" ], - "type": "object", + "title": "Author" + }, + "Badge": { "properties": { "name": { - "title": "Name", "type": "string", + "title": "Name", "description": "Name of the subject" }, "image": { - "title": "Image", + "type": "string", "maxLength": 2083, "minLength": 1, - "type": "string", - "description": "Url to the badge", - "format": "uri" + "format": "uri", + "title": "Image", + "description": "Url to the badge" }, "url": { - "title": "Url", + "type": "string", "maxLength": 2083, "minLength": 1, + "format": "uri", + "title": "Url", + "description": "Link to the status" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "name", + "image", + "url" + ], + "title": "Badge" + }, + "BaseMeta": { + "properties": { + "name": { + "type": "string", + "title": "Name" + }, + "version": { "type": "string", - "description": "Link to the status", - "format": "uri" + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "title": "Version" + }, + "released": { + "additionalProperties": { + "type": "string", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$" + }, + "type": "object", + "title": "Released", + "description": "Maps every route's path tag with a released version" } }, - "additionalProperties": false + "type": "object", + "required": [ + "name", + "version" + ], + "title": "BaseMeta", + "example": { + "name": "simcore_service_foo", + "version": "2.4.45", + "released": { + "v1": "1.3.4", + "v2": "2.4.45" + } + } }, "BindOptions": { - "title": "BindOptions", - "type": "object", "properties": { "Propagation": { "allOf": [ @@ -612,253 +684,253 @@ "description": "A propagation mode with the value `[r]private`, `[r]shared`, or `[r]slave`." }, "NonRecursive": { - "title": "Nonrecursive", "type": "boolean", + "title": "Nonrecursive", "description": "Disable recursive bind mount.", "default": false } }, + "type": "object", + "title": "BindOptions", "description": "Optional configuration for the `bind` type." }, "BootChoice": { - "title": "BootChoice", - "required": [ - "label", - "description" - ], - "type": "object", "properties": { "label": { - "title": "Label", - "type": "string" + "type": "string", + "title": "Label" }, "description": { - "title": "Description", - "type": "string" + "type": "string", + "title": "Description" } - } + }, + "type": "object", + "required": [ + "label", + "description" + ], + "title": "BootChoice" }, "BootMode": { - "title": "BootMode", + "type": "string", "enum": [ "CPU", "GPU", "MPI" ], - "type": "string", + "title": "BootMode", "description": "An enumeration." }, "BootOption": { - "title": "BootOption", - "required": [ - "label", - "description", - "default", - "items" - ], - "type": "object", "properties": { "label": { - "title": "Label", - "type": "string" + "type": "string", + "title": "Label" }, "description": { - "title": "Description", - "type": "string" + "type": "string", + "title": "Description" }, "default": { - "title": "Default", - "type": "string" + "type": "string", + "title": "Default" }, "items": { - "title": "Items", - "type": "object", "additionalProperties": { "$ref": "#/components/schemas/BootChoice" - } + }, + "type": "object", + "title": "Items" } - } + }, + "type": "object", + "required": [ + "label", + "description", + "default", + "items" + ], + "title": "BootOption" }, "Condition": { - "title": "Condition", + "type": "string", "enum": [ "none", "on-failure", "any" ], - "type": "string", + "title": "Condition", "description": "Condition for restart." }, "Config1": { - "title": "Config1", - "type": "object", "properties": { "File": { - "title": "File", "allOf": [ { "$ref": "#/components/schemas/File1" } ], + "title": "File", "description": "File represents a specific target that is backed by a file.\n\n


\n\n> **Note**: `Configs.File` and `Configs.Runtime` are mutually exclusive\n" }, "Runtime": { - "title": "Runtime", "type": "object", + "title": "Runtime", "description": "Runtime represents a target that is not mounted into the\ncontainer but is used by the task\n\n


\n\n> **Note**: `Configs.File` and `Configs.Runtime` are mutually\n> exclusive\n" }, "ConfigID": { - "title": "Configid", "type": "string", + "title": "Configid", "description": "ConfigID represents the ID of the specific config that we're\nreferencing.\n" }, "ConfigName": { - "title": "Configname", "type": "string", + "title": "Configname", "description": "ConfigName is the name of the config that this references,\nbut this is just provided for lookup/display purposes. The\nconfig in the reference will be identified by its ID.\n" } - } + }, + "type": "object", + "title": "Config1" }, "ContainerSpec": { - "title": "ContainerSpec", - "type": "object", "properties": { "Image": { - "title": "Image", "type": "string", + "title": "Image", "description": "The image name to use for the container" }, "Labels": { - "title": "Labels", - "type": "object", "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Labels", "description": "User-defined key/value data." }, "Command": { - "title": "Command", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Command", "description": "The command to be run in the image." }, "Args": { - "title": "Args", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Args", "description": "Arguments to the command." }, "Hostname": { - "title": "Hostname", "type": "string", + "title": "Hostname", "description": "The hostname to use for the container, as a valid\n[RFC 1123](https://tools.ietf.org/html/rfc1123) hostname.\n" }, "Env": { - "title": "Env", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Env", "description": "A list of environment variables in the form `VAR=value`.\n" }, "Dir": { - "title": "Dir", "type": "string", + "title": "Dir", "description": "The working directory for commands to run in." }, "User": { - "title": "User", "type": "string", + "title": "User", "description": "The user inside the container." }, "Groups": { - "title": "Groups", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Groups", "description": "A list of additional groups that the container process will run as.\n" }, "Privileges": { - "title": "Privileges", "allOf": [ { "$ref": "#/components/schemas/Privileges" } ], + "title": "Privileges", "description": "Security options for the container" }, "TTY": { - "title": "Tty", "type": "boolean", + "title": "Tty", "description": "Whether a pseudo-TTY should be allocated." }, "OpenStdin": { - "title": "Openstdin", "type": "boolean", + "title": "Openstdin", "description": "Open `stdin`" }, "ReadOnly": { - "title": "Readonly", "type": "boolean", + "title": "Readonly", "description": "Mount the container's root filesystem as read only." }, "Mounts": { - "title": "Mounts", - "type": "array", "items": { "$ref": "#/components/schemas/Mount" }, + "type": "array", + "title": "Mounts", "description": "Specification for mounts to be added to containers created as part\nof the service.\n" }, "StopSignal": { - "title": "Stopsignal", "type": "string", + "title": "Stopsignal", "description": "Signal to stop the container." }, "StopGracePeriod": { - "title": "Stopgraceperiod", "type": "integer", + "title": "Stopgraceperiod", "description": "Amount of time to wait for the container to terminate before\nforcefully killing it.\n" }, "HealthCheck": { "$ref": "#/components/schemas/HealthConfig" }, "Hosts": { - "title": "Hosts", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Hosts", "description": "A list of hostname/IP mappings to add to the container's `hosts`\nfile. The format of extra hosts is specified in the\n[hosts(5)](http://man7.org/linux/man-pages/man5/hosts.5.html)\nman page:\n\n IP_address canonical_hostname [aliases...]\n" }, "DNSConfig": { - "title": "Dnsconfig", "allOf": [ { "$ref": "#/components/schemas/DNSConfig" } ], + "title": "Dnsconfig", "description": "Specification for DNS related configurations in resolver configuration\nfile (`resolv.conf`).\n" }, "Secrets": { - "title": "Secrets", - "type": "array", "items": { "$ref": "#/components/schemas/Secret" }, + "type": "array", + "title": "Secrets", "description": "Secrets contains references to zero or more secrets that will be\nexposed to the service.\n" }, "Configs": { - "title": "Configs", - "type": "array", "items": { "$ref": "#/components/schemas/Config1" }, + "type": "array", + "title": "Configs", "description": "Configs contains references to zero or more configs that will be\nexposed to the service.\n" }, "Isolation": { @@ -870,24 +942,24 @@ "description": "Isolation technology of the containers running the service.\n(Windows only)\n" }, "Init": { - "title": "Init", "type": "boolean", + "title": "Init", "description": "Run an init inside the container that forwards signals and reaps\nprocesses. This field is omitted if empty, and the default (as\nconfigured on the daemon) is used.\n" }, "Sysctls": { - "title": "Sysctls", - "type": "object", "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Sysctls", "description": "Set kernel namedspaced parameters (sysctls) in the container.\nThe Sysctls option on services accepts the same sysctls as the\nare supported on containers. Note that while the same sysctls are\nsupported, no guarantees or checks are made about their\nsuitability for a clustered environment, and it's up to the user\nto determine whether a given sysctl will work properly in a\nService.\n" }, "CapabilityAdd": { - "title": "Capabilityadd", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Capabilityadd", "description": "A list of kernel capabilities to add to the default set\nfor the container.\n", "example": [ "CAP_NET_RAW", @@ -897,135 +969,135 @@ ] }, "CapabilityDrop": { - "title": "Capabilitydrop", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Capabilitydrop", "description": "A list of kernel capabilities to drop from the default set\nfor the container.\n", "example": [ "CAP_NET_RAW" ] }, "Ulimits": { - "title": "Ulimits", - "type": "array", "items": { "$ref": "#/components/schemas/Ulimit1" }, + "type": "array", + "title": "Ulimits", "description": "A list of resource limits to set in the container. For example: `{\"Name\": \"nofile\", \"Soft\": 1024, \"Hard\": 2048}`\"\n" } }, + "type": "object", + "title": "ContainerSpec", "description": " Container spec for the service.\n\n


\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`." }, "CredentialSpec": { - "title": "CredentialSpec", - "type": "object", "properties": { "Config": { - "title": "Config", "type": "string", + "title": "Config", "description": "Load credential spec from a Swarm Config with the given ID.\nThe specified config must also be present in the Configs\nfield with the Runtime property set.\n\n


\n\n\n> **Note**: `CredentialSpec.File`, `CredentialSpec.Registry`,\n> and `CredentialSpec.Config` are mutually exclusive.\n", "example": "0bt9dmxjvjiqermk6xrop3ekq" }, "File": { - "title": "File", "type": "string", + "title": "File", "description": "Load credential spec from this file. The file is read by\nthe daemon, and must be present in the `CredentialSpecs`\nsubdirectory in the docker data directory, which defaults\nto `C:\\ProgramData\\Docker\\` on Windows.\n\nFor example, specifying `spec.json` loads\n`C:\\ProgramData\\Docker\\CredentialSpecs\\spec.json`.\n\n


\n\n> **Note**: `CredentialSpec.File`, `CredentialSpec.Registry`,\n> and `CredentialSpec.Config` are mutually exclusive.\n", "example": "spec.json" }, "Registry": { - "title": "Registry", "type": "string", + "title": "Registry", "description": "Load credential spec from this value in the Windows\nregistry. The specified registry value must be located in:\n\n`HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Virtualization\\Containers\\CredentialSpecs`\n\n


\n\n\n> **Note**: `CredentialSpec.File`, `CredentialSpec.Registry`,\n> and `CredentialSpec.Config` are mutually exclusive.\n" } }, + "type": "object", + "title": "CredentialSpec", "description": "CredentialSpec for managed service account (Windows only)" }, "DNSConfig": { - "title": "DNSConfig", - "type": "object", "properties": { "Nameservers": { - "title": "Nameservers", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Nameservers", "description": "The IP addresses of the name servers." }, "Search": { - "title": "Search", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Search", "description": "A search list for host-name lookup." }, "Options": { - "title": "Options", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Options", "description": "A list of internal resolver variables to be modified (e.g.,\n`debug`, `ndots:3`, etc.).\n" } }, + "type": "object", + "title": "DNSConfig", "description": " Specification for DNS related configurations in resolver configuration\nfile (`resolv.conf`)." }, "DiscreteResourceSpec": { - "title": "DiscreteResourceSpec", - "type": "object", "properties": { "Kind": { - "title": "Kind", - "type": "string" + "type": "string", + "title": "Kind" }, "Value": { - "title": "Value", - "type": "integer" + "type": "integer", + "title": "Value" } - } + }, + "type": "object", + "title": "DiscreteResourceSpec" }, "DriverConfig": { - "title": "DriverConfig", - "type": "object", "properties": { "Name": { - "title": "Name", "type": "string", + "title": "Name", "description": "Name of the driver to use to create the volume." }, "Options": { - "title": "Options", - "type": "object", "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Options", "description": "key/value map of driver specific options." } }, + "type": "object", + "title": "DriverConfig", "description": "Map of driver specific options" }, "EndpointPortConfig": { - "title": "EndpointPortConfig", - "type": "object", "properties": { "Name": { - "title": "Name", - "type": "string" + "type": "string", + "title": "Name" }, "Protocol": { "$ref": "#/components/schemas/Type" }, "TargetPort": { - "title": "Targetport", "type": "integer", + "title": "Targetport", "description": "The port inside the container." }, "PublishedPort": { - "title": "Publishedport", "type": "integer", + "title": "Publishedport", "description": "The port on the swarm hosts." }, "PublishMode": { @@ -1038,11 +1110,11 @@ "default": "ingress", "example": "ingress" } - } + }, + "type": "object", + "title": "EndpointPortConfig" }, "EndpointSpec": { - "title": "EndpointSpec", - "type": "object", "properties": { "Mode": { "allOf": [ @@ -1054,92 +1126,92 @@ "default": "vip" }, "Ports": { - "title": "Ports", - "type": "array", "items": { "$ref": "#/components/schemas/EndpointPortConfig" }, + "type": "array", + "title": "Ports", "description": "List of exposed ports that this service is accessible on from the\noutside. Ports can only be provided if `vip` resolution mode is used.\n" } }, + "type": "object", + "title": "EndpointSpec", "description": "Properties that can be configured to access and load balance a service." }, "FailureAction": { - "title": "FailureAction", + "type": "string", "enum": [ "continue", "pause", "rollback" ], - "type": "string", + "title": "FailureAction", "description": " Action to take if an updated task fails to run, or stops running\nduring the update." }, "FailureAction1": { - "title": "FailureAction1", + "type": "string", "enum": [ "continue", "pause" ], - "type": "string", + "title": "FailureAction1", "description": " Action to take if an rolled back task fails to run, or stops\nrunning during the rollback." }, "File": { - "title": "File", - "type": "object", "properties": { "Name": { - "title": "Name", "type": "string", + "title": "Name", "description": "Name represents the final filename in the filesystem.\n" }, "UID": { - "title": "Uid", "type": "string", + "title": "Uid", "description": "UID represents the file UID." }, "GID": { - "title": "Gid", "type": "string", + "title": "Gid", "description": "GID represents the file GID." }, "Mode": { - "title": "Mode", "type": "integer", + "title": "Mode", "description": "Mode represents the FileMode of the file." } }, + "type": "object", + "title": "File", "description": "File represents a specific target that is backed by a file." }, "File1": { - "title": "File1", - "type": "object", "properties": { "Name": { - "title": "Name", "type": "string", + "title": "Name", "description": "Name represents the final filename in the filesystem.\n" }, "UID": { - "title": "Uid", "type": "string", + "title": "Uid", "description": "UID represents the file UID." }, "GID": { - "title": "Gid", "type": "string", + "title": "Gid", "description": "GID represents the file GID." }, "Mode": { - "title": "Mode", "type": "integer", + "title": "Mode", "description": "Mode represents the FileMode of the file." } }, + "type": "object", + "title": "File1", "description": " File represents a specific target that is backed by a file.\n\n


\n\n> **Note**: `Configs.File` and `Configs.Runtime` are mutually exclusive" }, "GenericResource": { - "title": "GenericResource", - "type": "object", "properties": { "NamedResourceSpec": { "$ref": "#/components/schemas/NamedResourceSpec" @@ -1147,14 +1219,16 @@ "DiscreteResourceSpec": { "$ref": "#/components/schemas/DiscreteResourceSpec" } - } + }, + "type": "object", + "title": "GenericResource" }, "GenericResources": { - "title": "GenericResources", - "type": "array", "items": { "$ref": "#/components/schemas/GenericResource" }, + "type": "array", + "title": "GenericResources", "description": "User-defined resources can be either Integer resources (e.g, `SSD=3`) or\nString resources (e.g, `GPU=UUID1`).\n", "example": [ { @@ -1178,85 +1252,85 @@ ] }, "HTTPValidationError": { - "title": "HTTPValidationError", - "type": "object", "properties": { "errors": { - "title": "Validation errors", - "type": "array", "items": { "$ref": "#/components/schemas/ValidationError" - } + }, + "type": "array", + "title": "Validation errors" } - } + }, + "type": "object", + "title": "HTTPValidationError" }, "HealthConfig": { - "title": "HealthConfig", - "type": "object", "properties": { "Test": { - "title": "Test", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Test", "description": "The test to perform. Possible values are:\n\n- `[]` inherit healthcheck from image or parent image\n- `[\"NONE\"]` disable healthcheck\n- `[\"CMD\", args...]` exec arguments directly\n- `[\"CMD-SHELL\", command]` run command with system's default shell\n" }, "Interval": { - "title": "Interval", "type": "integer", + "title": "Interval", "description": "The time to wait between checks in nanoseconds. It should be 0 or at\nleast 1000000 (1 ms). 0 means inherit.\n" }, "Timeout": { - "title": "Timeout", "type": "integer", + "title": "Timeout", "description": "The time to wait before considering the check to have hung. It should\nbe 0 or at least 1000000 (1 ms). 0 means inherit.\n" }, "Retries": { - "title": "Retries", "type": "integer", + "title": "Retries", "description": "The number of consecutive failures needed to consider a container as\nunhealthy. 0 means inherit.\n" }, "StartPeriod": { - "title": "Startperiod", "type": "integer", + "title": "Startperiod", "description": "Start period for the container to initialize before starting\nhealth-retries countdown in nanoseconds. It should be 0 or at least\n1000000 (1 ms). 0 means inherit.\n" } }, + "type": "object", + "title": "HealthConfig", "description": "A test to perform to check that the container is healthy." }, "ImageResources": { - "title": "ImageResources", - "required": [ - "image", - "resources" - ], - "type": "object", "properties": { "image": { - "title": "Image", - "pattern": "^(?:([a-z0-9-]+(?:\\.[a-z0-9-]+)+(?::\\d+)?|[a-z0-9-]+:\\d+)/)?((?:[a-z0-9][a-z0-9_.-]*/)*[a-z0-9-_]+[a-z0-9])(?::([\\w][\\w.-]{0,127}))?(\\@sha256:[a-fA-F0-9]{32,64})?$", "type": "string", + "pattern": "^(?:([a-z0-9-]+(?:\\.[a-z0-9-]+)+(?::\\d+)?|[a-z0-9-]+:\\d+)/)?((?:[a-z0-9][a-z0-9_.-]*/)*[a-z0-9-_]+[a-z0-9])(?::([\\w][\\w.-]{0,127}))?(\\@sha256:[a-fA-F0-9]{32,64})?$", + "title": "Image", "description": "Used by the frontend to provide a context for the users.Services with a docker-compose spec will have multiple entries.Using the `image:version` instead of the docker-compose spec is more helpful for the end user." }, "resources": { - "title": "Resources", - "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ResourceValue" - } + }, + "type": "object", + "title": "Resources" }, "boot_modes": { - "type": "array", "items": { "$ref": "#/components/schemas/BootMode" }, + "type": "array", "description": "describe how a service shall be booted, using CPU, MPI, openMP or GPU", "default": [ "CPU" ] } }, + "type": "object", + "required": [ + "image", + "resources" + ], + "title": "ImageResources", "example": { "image": "simcore/service/dynamic/pretty-intense:1.0.0", "resources": { @@ -1284,142 +1358,104 @@ } }, "Isolation": { - "title": "Isolation", + "type": "string", "enum": [ "default", "process", "hyperv" ], - "type": "string", + "title": "Isolation", "description": "Isolation technology of the container. (Windows only)" }, "Limit": { - "title": "Limit", - "type": "object", "properties": { "NanoCPUs": { - "title": "Nanocpus", "type": "integer", + "title": "Nanocpus", "example": 4000000000 }, "MemoryBytes": { - "title": "Memorybytes", "type": "integer", + "title": "Memorybytes", "example": 8272408576 }, "Pids": { - "title": "Pids", "type": "integer", + "title": "Pids", "description": "Limits the maximum number of PIDs in the container. Set `0` for unlimited.\n", "default": 0, "example": 100 } }, + "type": "object", + "title": "Limit", "description": "An object describing a limit on resources which can be requested by a task." }, "LogDriver1": { - "title": "LogDriver1", - "type": "object", "properties": { "Name": { - "title": "Name", - "type": "string" + "type": "string", + "title": "Name" }, "Options": { - "title": "Options", - "type": "object", - "additionalProperties": { - "type": "string" - } - } - }, - "description": " Specifies the log driver to use for tasks created from this spec. If\nnot present, the default one for the swarm will be used, finally\nfalling back to the engine default if not specified." - }, - "Meta": { - "title": "Meta", - "required": [ - "name", - "version" - ], - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "version": { - "title": "Version", - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" - }, - "released": { - "title": "Released", - "type": "object", "additionalProperties": { - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", "type": "string" }, - "description": "Maps every route's path tag with a released version" + "type": "object", + "title": "Options" } }, - "example": { - "name": "simcore_service_foo", - "version": "2.4.45", - "released": { - "v1": "1.3.4", - "v2": "2.4.45" - } - } + "type": "object", + "title": "LogDriver1", + "description": " Specifies the log driver to use for tasks created from this spec. If\nnot present, the default one for the swarm will be used, finally\nfalling back to the engine default if not specified." }, "Mode": { - "title": "Mode", - "type": "object", "properties": { "Replicated": { "$ref": "#/components/schemas/Replicated" }, "Global": { - "title": "Global", - "type": "object" + "type": "object", + "title": "Global" }, "ReplicatedJob": { - "title": "Replicatedjob", "allOf": [ { "$ref": "#/components/schemas/ReplicatedJob" } ], + "title": "Replicatedjob", "description": "The mode used for services with a finite number of tasks that run\nto a completed state.\n" }, "GlobalJob": { - "title": "Globaljob", "type": "object", + "title": "Globaljob", "description": "The mode used for services which run a task to the completed state\non each valid node.\n" } }, + "type": "object", + "title": "Mode", "description": "Scheduling mode for the service." }, "Mode1": { - "title": "Mode1", + "type": "string", "enum": [ "vip", "dnsrr" ], - "type": "string", + "title": "Mode1", "description": "The mode of resolution to use for internal load balancing between tasks." }, "Mount": { - "title": "Mount", - "type": "object", "properties": { "Target": { - "title": "Target", "type": "string", + "title": "Target", "description": "Container path." }, "Source": { - "title": "Source", "type": "string", + "title": "Source", "description": "Mount source (e.g. a volume name, a host path)." }, "Type": { @@ -1431,117 +1467,117 @@ "description": "The mount type. Available types:\n\n- `bind` Mounts a file or directory from the host into the container. Must exist prior to creating the container.\n- `volume` Creates a volume with the given name and options (or uses a pre-existing volume with the same name and options). These are **not** removed when the container is removed.\n- `tmpfs` Create a tmpfs with the given options. The mount source cannot be specified for tmpfs.\n- `npipe` Mounts a named pipe from the host into the container. Must exist prior to creating the container.\n" }, "ReadOnly": { - "title": "Readonly", "type": "boolean", + "title": "Readonly", "description": "Whether the mount should be read-only." }, "Consistency": { - "title": "Consistency", "type": "string", + "title": "Consistency", "description": "The consistency requirement for the mount: `default`, `consistent`, `cached`, or `delegated`." }, "BindOptions": { - "title": "Bindoptions", "allOf": [ { "$ref": "#/components/schemas/BindOptions" } ], + "title": "Bindoptions", "description": "Optional configuration for the `bind` type." }, "VolumeOptions": { - "title": "Volumeoptions", "allOf": [ { "$ref": "#/components/schemas/VolumeOptions" } ], + "title": "Volumeoptions", "description": "Optional configuration for the `volume` type." }, "TmpfsOptions": { - "title": "Tmpfsoptions", "allOf": [ { "$ref": "#/components/schemas/TmpfsOptions" } ], + "title": "Tmpfsoptions", "description": "Optional configuration for the `tmpfs` type." } - } + }, + "type": "object", + "title": "Mount" }, "NamedResourceSpec": { - "title": "NamedResourceSpec", - "type": "object", "properties": { "Kind": { - "title": "Kind", - "type": "string" + "type": "string", + "title": "Kind" }, "Value": { - "title": "Value", - "type": "string" + "type": "string", + "title": "Value" } - } + }, + "type": "object", + "title": "NamedResourceSpec" }, "NetworkAttachmentConfig": { - "title": "NetworkAttachmentConfig", - "type": "object", "properties": { "Target": { - "title": "Target", "type": "string", + "title": "Target", "description": "The target network for attachment. Must be a network name or ID.\n" }, "Aliases": { - "title": "Aliases", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Aliases", "description": "Discoverable alternate names for the service on this network.\n" }, "DriverOpts": { - "title": "Driveropts", - "type": "object", "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Driveropts", "description": "Driver attachment options for the network target.\n" } }, + "type": "object", + "title": "NetworkAttachmentConfig", "description": "Specifies how a service should be attached to a particular network." }, "NetworkAttachmentSpec": { - "title": "NetworkAttachmentSpec", - "type": "object", "properties": { "ContainerID": { - "title": "Containerid", "type": "string", + "title": "Containerid", "description": "ID of the container represented by this task" } }, + "type": "object", + "title": "NetworkAttachmentSpec", "description": " Read-only spec type for non-swarm containers attached to swarm overlay\nnetworks.\n\n


\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`." }, "Order": { - "title": "Order", + "type": "string", "enum": [ "stop-first", "start-first" ], - "type": "string", + "title": "Order", "description": " The order of operations when rolling out an updated task. Either\nthe old task is shut down before the new task is started, or the\nnew task is started before the old task is shut down." }, "Placement": { - "title": "Placement", - "type": "object", "properties": { "Constraints": { - "title": "Constraints", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Constraints", "description": "An array of constraint expressions to limit the set of nodes where\na task can be scheduled. Constraint expressions can either use a\n_match_ (`==`) or _exclude_ (`!=`) rule. Multiple constraints find\nnodes that satisfy every expression (AND match). Constraints can\nmatch node or Docker Engine labels as follows:\n\nnode attribute | matches | example\n---------------------|--------------------------------|-----------------------------------------------\n`node.id` | Node ID | `node.id==2ivku8v2gvtg4`\n`node.hostname` | Node hostname | `node.hostname!=node-2`\n`node.role` | Node role (`manager`/`worker`) | `node.role==manager`\n`node.platform.os` | Node operating system | `node.platform.os==windows`\n`node.platform.arch` | Node architecture | `node.platform.arch==x86_64`\n`node.labels` | User-defined node labels | `node.labels.security==high`\n`engine.labels` | Docker Engine's labels | `engine.labels.operatingsystem==ubuntu-14.04`\n\n`engine.labels` apply to Docker Engine labels like operating system,\ndrivers, etc. Swarm administrators add `node.labels` for operational\npurposes by using the [`node update endpoint`](#operation/NodeUpdate).\n", "example": [ "node.hostname!=node3.corp.example.com", @@ -1552,11 +1588,11 @@ ] }, "Preferences": { - "title": "Preferences", - "type": "array", "items": { "$ref": "#/components/schemas/Preference" }, + "type": "array", + "title": "Preferences", "description": "Preferences provide a way to make the scheduler aware of factors\nsuch as topology. They are provided in order from highest to\nlowest precedence.\n", "example": [ { @@ -1572,131 +1608,133 @@ ] }, "MaxReplicas": { - "title": "Maxreplicas", "type": "integer", + "title": "Maxreplicas", "description": "Maximum number of replicas for per node (default value is 0, which\nis unlimited)\n", "default": 0 }, "Platforms": { - "title": "Platforms", - "type": "array", "items": { "$ref": "#/components/schemas/Platform" }, + "type": "array", + "title": "Platforms", "description": "Platforms stores all the platforms that the service's image can\nrun on. This field is used in the platform filter for scheduling.\nIf empty, then the platform filter is off, meaning there are no\nscheduling restrictions.\n" } - } + }, + "type": "object", + "title": "Placement" }, "Platform": { - "title": "Platform", - "type": "object", "properties": { "Architecture": { - "title": "Architecture", "type": "string", + "title": "Architecture", "description": "Architecture represents the hardware architecture (for example,\n`x86_64`).\n", "example": "x86_64" }, "OS": { - "title": "Os", "type": "string", + "title": "Os", "description": "OS represents the Operating System (for example, `linux` or `windows`).\n", "example": "linux" } }, + "type": "object", + "title": "Platform", "description": "Platform represents the platform (Arch/OS)." }, "PluginPrivilege": { - "title": "PluginPrivilege", - "type": "object", "properties": { "Name": { - "title": "Name", "type": "string", + "title": "Name", "example": "network" }, "Description": { - "title": "Description", - "type": "string" + "type": "string", + "title": "Description" }, "Value": { - "title": "Value", - "type": "array", "items": { "type": "string" }, + "type": "array", + "title": "Value", "example": [ "host" ] } }, + "type": "object", + "title": "PluginPrivilege", "description": " Describes a permission the user has to accept upon installing\nthe plugin." }, "PluginSpec": { - "title": "PluginSpec", - "type": "object", "properties": { "Name": { - "title": "Name", "type": "string", + "title": "Name", "description": "The name or 'alias' to use for the plugin." }, "Remote": { - "title": "Remote", "type": "string", + "title": "Remote", "description": "The plugin image reference to use." }, "Disabled": { - "title": "Disabled", "type": "boolean", + "title": "Disabled", "description": "Disable the plugin once scheduled." }, "PluginPrivilege": { - "title": "Pluginprivilege", - "type": "array", "items": { "$ref": "#/components/schemas/PluginPrivilege" - } + }, + "type": "array", + "title": "Pluginprivilege" } }, + "type": "object", + "title": "PluginSpec", "description": " Plugin spec for the service. *(Experimental release only.)*\n\n


\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`." }, "Preference": { - "title": "Preference", - "type": "object", "properties": { "Spread": { "$ref": "#/components/schemas/Spread" } - } + }, + "type": "object", + "title": "Preference" }, "Privileges": { - "title": "Privileges", - "type": "object", "properties": { "CredentialSpec": { - "title": "Credentialspec", "allOf": [ { "$ref": "#/components/schemas/CredentialSpec" } ], + "title": "Credentialspec", "description": "CredentialSpec for managed service account (Windows only)" }, "SELinuxContext": { - "title": "Selinuxcontext", "allOf": [ { "$ref": "#/components/schemas/SELinuxContext" } ], + "title": "Selinuxcontext", "description": "SELinux labels of the container" } }, + "type": "object", + "title": "Privileges", "description": "Security options for the container" }, "Propagation": { - "title": "Propagation", + "type": "string", "enum": [ "private", "rprivate", @@ -1705,76 +1743,69 @@ "slave", "rslave" ], - "type": "string", + "title": "Propagation", "description": "A propagation mode with the value `[r]private`, `[r]shared`, or `[r]slave`." }, "PublishMode": { - "title": "PublishMode", + "type": "string", "enum": [ "ingress", "host" ], - "type": "string", + "title": "PublishMode", "description": " The mode in which port is published.\n\n


\n\n- \"ingress\" makes the target port accessible on every node,\n regardless of whether there is a task for the service running on\n that node or not.\n- \"host\" bypasses the routing mesh and publish the port directly on\n the swarm node where that service is running." }, "Replicated": { - "title": "Replicated", - "type": "object", "properties": { "Replicas": { - "title": "Replicas", - "type": "integer" + "type": "integer", + "title": "Replicas" } - } + }, + "type": "object", + "title": "Replicated" }, "ReplicatedJob": { - "title": "ReplicatedJob", - "type": "object", "properties": { "MaxConcurrent": { - "title": "Maxconcurrent", "type": "integer", + "title": "Maxconcurrent", "description": "The maximum number of replicas to run simultaneously.\n", "default": 1 }, "TotalCompletions": { - "title": "Totalcompletions", "type": "integer", + "title": "Totalcompletions", "description": "The total number of replicas desired to reach the Completed\nstate. If unset, will default to the value of `MaxConcurrent`\n" } }, + "type": "object", + "title": "ReplicatedJob", "description": " The mode used for services with a finite number of tasks that run\nto a completed state." }, "ResourceObject": { - "title": "ResourceObject", - "type": "object", "properties": { "NanoCPUs": { - "title": "Nanocpus", "type": "integer", + "title": "Nanocpus", "example": 4000000000 }, "MemoryBytes": { - "title": "Memorybytes", "type": "integer", + "title": "Memorybytes", "example": 8272408576 }, "GenericResources": { "$ref": "#/components/schemas/GenericResources" } }, + "type": "object", + "title": "ResourceObject", "description": " An object describing the resources which can be advertised by a node and\nrequested by a task." }, "ResourceValue": { - "title": "ResourceValue", - "required": [ - "limit", - "reservation" - ], - "type": "object", "properties": { "limit": { - "title": "Limit", "anyOf": [ { "type": "integer" @@ -1785,10 +1816,10 @@ { "type": "string" } - ] + ], + "title": "Limit" }, "reservation": { - "title": "Reservation", "anyOf": [ { "type": "integer" @@ -1799,38 +1830,43 @@ { "type": "string" } - ] + ], + "title": "Reservation" } - } + }, + "type": "object", + "required": [ + "limit", + "reservation" + ], + "title": "ResourceValue" }, "Resources1": { - "title": "Resources1", - "type": "object", "properties": { "Limits": { - "title": "Limits", "allOf": [ { "$ref": "#/components/schemas/Limit" } ], + "title": "Limits", "description": "Define resources limits." }, "Reservations": { - "title": "Reservations", "allOf": [ { "$ref": "#/components/schemas/ResourceObject" } ], + "title": "Reservations", "description": "Define resources reservation." } }, + "type": "object", + "title": "Resources1", "description": " Resource requirements which apply to each individual container created\nas part of the service." }, "RestartPolicy1": { - "title": "RestartPolicy1", - "type": "object", "properties": { "Condition": { "allOf": [ @@ -1841,37 +1877,37 @@ "description": "Condition for restart." }, "Delay": { - "title": "Delay", "type": "integer", + "title": "Delay", "description": "Delay between restart attempts." }, "MaxAttempts": { - "title": "Maxattempts", "type": "integer", + "title": "Maxattempts", "description": "Maximum attempts to restart a given container before giving up\n(default value is 0, which is ignored).\n", "default": 0 }, "Window": { - "title": "Window", "type": "integer", + "title": "Window", "description": "Windows is the time window used to evaluate the restart policy\n(default value is 0, which is unbounded).\n", "default": 0 } }, + "type": "object", + "title": "RestartPolicy1", "description": " Specification for the restart policy which applies to containers\ncreated as part of this service." }, "RollbackConfig": { - "title": "RollbackConfig", - "type": "object", "properties": { "Parallelism": { - "title": "Parallelism", "type": "integer", + "title": "Parallelism", "description": "Maximum number of tasks to be rolled back in one iteration (0 means\nunlimited parallelism).\n" }, "Delay": { - "title": "Delay", "type": "integer", + "title": "Delay", "description": "Amount of time between rollback iterations, in nanoseconds.\n" }, "FailureAction": { @@ -1883,13 +1919,13 @@ "description": "Action to take if an rolled back task fails to run, or stops\nrunning during the rollback.\n" }, "Monitor": { - "title": "Monitor", "type": "integer", + "title": "Monitor", "description": "Amount of time to monitor each rolled back task for failures, in\nnanoseconds.\n" }, "MaxFailureRatio": { - "title": "Maxfailureratio", "type": "number", + "title": "Maxfailureratio", "description": "The fraction of tasks that may fail during a rollback before the\nfailure action is invoked, specified as a floating point number\nbetween 0 and 1.\n", "default": 0 }, @@ -1902,191 +1938,191 @@ "description": "The order of operations when rolling back a task. Either the old\ntask is shut down before the new task is started, or the new task\nis started before the old task is shut down.\n" } }, + "type": "object", + "title": "RollbackConfig", "description": "Specification for the rollback strategy of the service." }, "SELinuxContext": { - "title": "SELinuxContext", - "type": "object", "properties": { "Disable": { - "title": "Disable", "type": "boolean", + "title": "Disable", "description": "Disable SELinux" }, "User": { - "title": "User", "type": "string", + "title": "User", "description": "SELinux user label" }, "Role": { - "title": "Role", "type": "string", + "title": "Role", "description": "SELinux role label" }, "Type": { - "title": "Type", "type": "string", + "title": "Type", "description": "SELinux type label" }, "Level": { - "title": "Level", "type": "string", + "title": "Level", "description": "SELinux level label" } }, + "type": "object", + "title": "SELinuxContext", "description": "SELinux labels of the container" }, "Secret": { - "title": "Secret", - "type": "object", "properties": { "File": { - "title": "File", "allOf": [ { "$ref": "#/components/schemas/File" } ], + "title": "File", "description": "File represents a specific target that is backed by a file.\n" }, "SecretID": { - "title": "Secretid", "type": "string", + "title": "Secretid", "description": "SecretID represents the ID of the specific secret that we're\nreferencing.\n" }, "SecretName": { - "title": "Secretname", "type": "string", + "title": "Secretname", "description": "SecretName is the name of the secret that this references,\nbut this is just provided for lookup/display purposes. The\nsecret in the reference will be identified by its ID.\n" } - } + }, + "type": "object", + "title": "Secret" }, "SelectBox": { - "title": "SelectBox", - "required": [ - "structure" - ], - "type": "object", "properties": { "structure": { - "title": "Structure", - "minItems": 1, - "type": "array", "items": { "$ref": "#/components/schemas/Structure" - } + }, + "type": "array", + "minItems": 1, + "title": "Structure" } }, - "additionalProperties": false - }, - "ServiceAccessRightsGet": { - "title": "ServiceAccessRightsGet", + "additionalProperties": false, + "type": "object", "required": [ - "service_key", - "service_version", - "gids_with_access_rights" + "structure" ], - "type": "object", + "title": "SelectBox" + }, + "ServiceAccessRightsGet": { "properties": { "service_key": { - "title": "Service Key", + "type": "string", "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", - "type": "string" + "title": "Service Key" }, "service_version": { - "title": "Service Version", + "type": "string", "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" + "title": "Service Version" }, "gids_with_access_rights": { - "title": "Gids With Access Rights", - "type": "object", "additionalProperties": { - "type": "object", "additionalProperties": { "type": "boolean" - } - } + }, + "type": "object" + }, + "type": "object", + "title": "Gids With Access Rights" } - } - }, - "ServiceGet": { - "title": "ServiceGet", + }, + "type": "object", "required": [ - "name", - "description", - "key", - "version", - "type", - "authors", - "contact", - "inputs", - "outputs" + "service_key", + "service_version", + "gids_with_access_rights" ], - "type": "object", + "title": "ServiceAccessRightsGet" + }, + "ServiceGet": { "properties": { "name": { - "title": "Name", "type": "string", + "title": "Name", "description": "short, human readable name for the node", "example": "Fast Counter" }, "thumbnail": { - "title": "Thumbnail", + "type": "string", "maxLength": 2083, "minLength": 1, - "type": "string", - "description": "url to the thumbnail", - "format": "uri" + "format": "uri", + "title": "Thumbnail", + "description": "url to the thumbnail" }, "description": { - "title": "Description", "type": "string", + "title": "Description", "description": "human readable description of the purpose of the node" }, "deprecated": { - "title": "Deprecated", "type": "string", - "description": "If filled with a date, then the service is to be deprecated at that date (e.g. cannot start anymore)", - "format": "date-time" + "format": "date-time", + "title": "Deprecated", + "description": "If filled with a date, then the service is to be deprecated at that date (e.g. cannot start anymore)" }, "classifiers": { - "title": "Classifiers", - "type": "array", "items": { "type": "string" - } + }, + "type": "array", + "title": "Classifiers" }, "quality": { - "title": "Quality", "type": "object", + "title": "Quality", "default": {} }, "accessRights": { - "title": "Accessrights", - "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ServiceGroupAccessRights" }, + "type": "object", + "title": "Accessrights", "description": "service access rights per group id" }, "key": { - "title": "Key", - "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", "type": "string", + "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", + "title": "Key", "description": "distinctive name for the node based on the docker registry path" }, "version": { - "title": "Version", - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", "type": "string", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "title": "Version", "description": "service version number" }, + "version_display": { + "type": "string", + "title": "Version Display", + "description": "A user-friendly or marketing name for the release. This can be used to reference the release in a more readable and recognizable format, such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.' This name is not used for version comparison but is useful for communication and documentation purposes." + }, + "release_date": { + "type": "string", + "format": "date-time", + "title": "Release Date", + "description": "A timestamp when the specific version of the service was released. This field helps in tracking the timeline of releases and understanding the sequence of updates. A timestamp string should be formatted as YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z or [\u00b1]HH[:]MM]" + }, "integration-version": { - "title": "Integration-Version", - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", "type": "string", - "description": "integration version number" + "pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$", + "title": "Integration-Version", + "description": "This version is used to maintain backward compatibility when there are changes in the way a service is integrated into the framework" }, "type": { "allOf": [ @@ -2097,54 +2133,81 @@ "description": "service type" }, "badges": { - "title": "Badges", - "type": "array", "items": { "$ref": "#/components/schemas/Badge" - } + }, + "type": "array", + "title": "Badges" }, "authors": { - "title": "Authors", - "minItems": 1, - "type": "array", "items": { "$ref": "#/components/schemas/Author" - } + }, + "type": "array", + "minItems": 1, + "title": "Authors" }, "contact": { - "title": "Contact", "type": "string", - "description": "email to correspond to the authors about the node", - "format": "email" + "format": "email", + "title": "Contact", + "description": "email to correspond to the authors about the node" }, "inputs": { - "title": "Inputs", + "additionalProperties": { + "$ref": "#/components/schemas/ServiceInput" + }, "type": "object", + "title": "Inputs", "description": "definition of the inputs of this node" }, "outputs": { - "title": "Outputs", + "additionalProperties": { + "$ref": "#/components/schemas/ServiceOutput" + }, "type": "object", + "title": "Outputs", "description": "definition of the outputs of this node" }, "boot-options": { - "title": "Boot-Options", + "additionalProperties": { + "$ref": "#/components/schemas/BootOption" + }, "type": "object", + "title": "Boot-Options", "description": "Service defined boot options. These get injected in the service as env variables." }, "min-visible-inputs": { - "title": "Min-Visible-Inputs", - "minimum": 0, "type": "integer", + "minimum": 0, + "title": "Min-Visible-Inputs", "description": "The number of 'data type inputs' displayed by default in the UI. When None all 'data type inputs' are displayed." }, + "progress_regexp": { + "type": "string", + "title": "Progress Regexp", + "description": "regexp pattern for detecting computational service's progress" + }, "owner": { - "title": "Owner", "type": "string", - "format": "email" + "format": "email", + "title": "Owner" } }, - "description": "Static metadata for a service injected in the image labels\n\nThis is one to one with node-meta-v0.0.1.json", + "type": "object", + "required": [ + "name", + "description", + "key", + "version", + "type", + "authors", + "contact", + "inputs", + "outputs" + ], + "title": "ServiceGet", + "description": "Static metadata for a service injected in the image labels\n\nNOTE: This model is serialized in .osparc/metadata.yml and in the labels of the docker image", "example": { "name": "File Picker", "description": "description", @@ -2183,73 +2246,69 @@ } }, "ServiceGroupAccessRights": { - "title": "ServiceGroupAccessRights", - "type": "object", "properties": { "execute_access": { - "title": "Execute Access", "type": "boolean", + "title": "Execute Access", "description": "defines whether the group can execute the service", "default": false }, "write_access": { - "title": "Write Access", "type": "boolean", + "title": "Write Access", "description": "defines whether the group can modify the service", "default": false } - } + }, + "type": "object", + "title": "ServiceGroupAccessRights" }, "ServiceInput": { - "title": "ServiceInput", - "required": [ - "label", - "description", - "type" - ], - "type": "object", "properties": { "displayOrder": { - "title": "Displayorder", "type": "number", + "title": "Displayorder", "description": "DEPRECATED: new display order is taken from the item position. This will be removed.", "deprecated": true }, "label": { - "title": "Label", "type": "string", + "title": "Label", "description": "short name for the property", "example": "Age" }, "description": { - "title": "Description", "type": "string", + "title": "Description", "description": "description of the property", "example": "Age in seconds since 1970" }, "type": { - "title": "Type", - "pattern": "^(number|integer|boolean|string|ref_contentSchema|data:([^/\\s,]+/[^/\\s,]+|\\[[^/\\s,]+/[^/\\s,]+(,[^/\\s]+/[^/,\\s]+)*\\]))$", "type": "string", + "pattern": "^(number|integer|boolean|string|ref_contentSchema|data:([^/\\s,]+/[^/\\s,]+|\\[[^/\\s,]+/[^/\\s,]+(,[^/\\s]+/[^/,\\s]+)*\\]))$", + "title": "Type", "description": "data type expected on this input glob matching for data type is allowed" }, "contentSchema": { - "title": "Contentschema", "type": "object", + "title": "Contentschema", "description": "jsonschema of this input/output. Required when type='ref_contentSchema'" }, "fileToKeyMap": { - "title": "Filetokeymap", + "additionalProperties": { + "type": "string", + "pattern": "^[-_a-zA-Z0-9]+$" + }, "type": "object", + "title": "Filetokeymap", "description": "Place the data associated with the named keys in files" }, "unit": { - "title": "Unit", "type": "string", + "title": "Unit", "description": "Units, when it refers to a physical quantity" }, "defaultValue": { - "title": "Defaultvalue", "anyOf": [ { "type": "boolean" @@ -2263,115 +2322,127 @@ { "type": "string" } - ] + ], + "title": "Defaultvalue" }, "widget": { - "title": "Widget", "allOf": [ { "$ref": "#/components/schemas/Widget" } ], + "title": "Widget", "description": "custom widget to use instead of the default one determined from the data-type" } - }, - "additionalProperties": false, - "description": "Metadata on a service input port" - }, - "ServiceOutput": { - "title": "ServiceOutput", + }, + "additionalProperties": false, + "type": "object", "required": [ "label", "description", "type" ], - "type": "object", + "title": "ServiceInput", + "description": "Metadata on a service input port" + }, + "ServiceOutput": { "properties": { "displayOrder": { - "title": "Displayorder", "type": "number", + "title": "Displayorder", "description": "DEPRECATED: new display order is taken from the item position. This will be removed.", "deprecated": true }, "label": { - "title": "Label", "type": "string", + "title": "Label", "description": "short name for the property", "example": "Age" }, "description": { - "title": "Description", "type": "string", + "title": "Description", "description": "description of the property", "example": "Age in seconds since 1970" }, "type": { - "title": "Type", - "pattern": "^(number|integer|boolean|string|ref_contentSchema|data:([^/\\s,]+/[^/\\s,]+|\\[[^/\\s,]+/[^/\\s,]+(,[^/\\s]+/[^/,\\s]+)*\\]))$", "type": "string", + "pattern": "^(number|integer|boolean|string|ref_contentSchema|data:([^/\\s,]+/[^/\\s,]+|\\[[^/\\s,]+/[^/\\s,]+(,[^/\\s]+/[^/,\\s]+)*\\]))$", + "title": "Type", "description": "data type expected on this input glob matching for data type is allowed" }, "contentSchema": { - "title": "Contentschema", "type": "object", + "title": "Contentschema", "description": "jsonschema of this input/output. Required when type='ref_contentSchema'" }, "fileToKeyMap": { - "title": "Filetokeymap", + "additionalProperties": { + "type": "string", + "pattern": "^[-_a-zA-Z0-9]+$" + }, "type": "object", + "title": "Filetokeymap", "description": "Place the data associated with the named keys in files" }, "unit": { - "title": "Unit", "type": "string", + "title": "Unit", "description": "Units, when it refers to a physical quantity" }, "widget": { - "title": "Widget", "allOf": [ { "$ref": "#/components/schemas/Widget" } ], + "title": "Widget", "description": "custom widget to use instead of the default one determined from the data-type", "deprecated": true } }, "additionalProperties": false, + "type": "object", + "required": [ + "label", + "description", + "type" + ], + "title": "ServiceOutput", "description": "Base class for service input/outputs" }, "ServicePortGet": { - "title": "ServicePortGet", - "required": [ - "key", - "kind" - ], - "type": "object", "properties": { "key": { - "title": "Key name", - "pattern": "^[^_\\W0-9]\\w*$", "type": "string", + "pattern": "^[^_\\W0-9]\\w*$", + "title": "Key name", "description": "port identifier name" }, "kind": { - "title": "Kind", + "type": "string", "enum": [ "input", "output" ], - "type": "string" + "title": "Kind" }, "content_media_type": { - "title": "Content Media Type", - "type": "string" + "type": "string", + "title": "Content Media Type" }, "content_schema": { - "title": "Content Schema", "type": "object", + "title": "Content Schema", "description": "jsonschema for the port's value. SEE https://json-schema.org/understanding-json-schema/" } }, + "type": "object", + "required": [ + "key", + "kind" + ], + "title": "ServicePortGet", "example": { "key": "input_1", "kind": "input", @@ -2385,147 +2456,147 @@ } }, "ServiceSpec": { - "title": "ServiceSpec", - "type": "object", "properties": { "Name": { - "title": "Name", "type": "string", + "title": "Name", "description": "Name of the service." }, "Labels": { - "title": "Labels", - "type": "object", "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Labels", "description": "User-defined key/value metadata." }, "TaskTemplate": { "$ref": "#/components/schemas/TaskSpec" }, "Mode": { - "title": "Mode", "allOf": [ { "$ref": "#/components/schemas/Mode" } ], + "title": "Mode", "description": "Scheduling mode for the service." }, "UpdateConfig": { - "title": "Updateconfig", "allOf": [ { "$ref": "#/components/schemas/UpdateConfig" } ], + "title": "Updateconfig", "description": "Specification for the update strategy of the service." }, "RollbackConfig": { - "title": "Rollbackconfig", "allOf": [ { "$ref": "#/components/schemas/RollbackConfig" } ], + "title": "Rollbackconfig", "description": "Specification for the rollback strategy of the service." }, "Networks": { - "title": "Networks", - "type": "array", "items": { "$ref": "#/components/schemas/NetworkAttachmentConfig" }, + "type": "array", + "title": "Networks", "description": "Specifies which networks the service should attach to." }, "EndpointSpec": { "$ref": "#/components/schemas/EndpointSpec" } }, + "type": "object", + "title": "ServiceSpec", "description": "User modifiable configuration for a service." }, "ServiceSpecificationsGet": { - "title": "ServiceSpecificationsGet", - "type": "object", "properties": { "sidecar": { - "title": "Sidecar", "allOf": [ { "$ref": "#/components/schemas/ServiceSpec" } ], + "title": "Sidecar", "description": "schedule-time specifications for the service sidecar (follows Docker Service creation API, see https://docs.docker.com/engine/api/v1.25/#operation/ServiceCreate)" }, "service": { - "title": "Service", "allOf": [ { "$ref": "#/components/schemas/ServiceSpec" } ], + "title": "Service", "description": "schedule-time specifications specifications for the service (follows Docker Service creation API (specifically only the Resources part), see https://docs.docker.com/engine/api/v1.41/#tag/Service/operation/ServiceCreate" } - } + }, + "type": "object", + "title": "ServiceSpecificationsGet" }, "ServiceType": { - "title": "ServiceType", + "type": "string", "enum": [ "computational", "dynamic", "frontend", "backend" ], - "type": "string", + "title": "ServiceType", "description": "An enumeration." }, "ServiceUpdate": { - "title": "ServiceUpdate", - "type": "object", "properties": { "accessRights": { - "title": "Accessrights", - "type": "object", "additionalProperties": { "$ref": "#/components/schemas/ServiceGroupAccessRights" }, + "type": "object", + "title": "Accessrights", "description": "service access rights per group id" }, "name": { - "title": "Name", - "type": "string" + "type": "string", + "title": "Name" }, "thumbnail": { - "title": "Thumbnail", + "type": "string", "maxLength": 2083, "minLength": 1, - "type": "string", - "format": "uri" + "format": "uri", + "title": "Thumbnail" }, "description": { - "title": "Description", - "type": "string" + "type": "string", + "title": "Description" }, "deprecated": { - "title": "Deprecated", "type": "string", - "description": "If filled with a date, then the service is to be deprecated at that date (e.g. cannot start anymore)", - "format": "date-time" + "format": "date-time", + "title": "Deprecated", + "description": "If filled with a date, then the service is to be deprecated at that date (e.g. cannot start anymore)" }, "classifiers": { - "title": "Classifiers", - "type": "array", "items": { "type": "string" - } + }, + "type": "array", + "title": "Classifiers" }, "quality": { - "title": "Quality", "type": "object", + "title": "Quality", "default": {} } }, + "type": "object", + "title": "ServiceUpdate", "example": { "accessRights": { "1": { @@ -2604,26 +2675,19 @@ } }, "Spread": { - "title": "Spread", - "type": "object", "properties": { "SpreadDescriptor": { - "title": "Spreaddescriptor", "type": "string", + "title": "Spreaddescriptor", "description": "label descriptor, such as `engine.labels.az`.\n" } - } + }, + "type": "object", + "title": "Spread" }, "Structure": { - "title": "Structure", - "required": [ - "key", - "label" - ], - "type": "object", "properties": { "key": { - "title": "Key", "anyOf": [ { "type": "string" @@ -2634,185 +2698,190 @@ { "type": "number" } - ] + ], + "title": "Key" }, "label": { - "title": "Label", - "type": "string" + "type": "string", + "title": "Label" } }, - "additionalProperties": false + "additionalProperties": false, + "type": "object", + "required": [ + "key", + "label" + ], + "title": "Structure" }, "TaskSpec": { - "title": "TaskSpec", - "type": "object", "properties": { "PluginSpec": { - "title": "Pluginspec", "allOf": [ { "$ref": "#/components/schemas/PluginSpec" } ], + "title": "Pluginspec", "description": "Plugin spec for the service. *(Experimental release only.)*\n\n


\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`.\n" }, "ContainerSpec": { - "title": "Containerspec", "allOf": [ { "$ref": "#/components/schemas/ContainerSpec" } ], + "title": "Containerspec", "description": "Container spec for the service.\n\n


\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`.\n" }, "NetworkAttachmentSpec": { - "title": "Networkattachmentspec", "allOf": [ { "$ref": "#/components/schemas/NetworkAttachmentSpec" } ], + "title": "Networkattachmentspec", "description": "Read-only spec type for non-swarm containers attached to swarm overlay\nnetworks.\n\n


\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`.\n" }, "Resources": { - "title": "Resources", "allOf": [ { "$ref": "#/components/schemas/Resources1" } ], + "title": "Resources", "description": "Resource requirements which apply to each individual container created\nas part of the service.\n" }, "RestartPolicy": { - "title": "Restartpolicy", "allOf": [ { "$ref": "#/components/schemas/RestartPolicy1" } ], + "title": "Restartpolicy", "description": "Specification for the restart policy which applies to containers\ncreated as part of this service.\n" }, "Placement": { "$ref": "#/components/schemas/Placement" }, "ForceUpdate": { - "title": "Forceupdate", "type": "integer", + "title": "Forceupdate", "description": "A counter that triggers an update even if no relevant parameters have\nbeen changed.\n" }, "Runtime": { - "title": "Runtime", "type": "string", + "title": "Runtime", "description": "Runtime is the type of runtime specified for the task executor.\n" }, "Networks": { - "title": "Networks", - "type": "array", "items": { "$ref": "#/components/schemas/NetworkAttachmentConfig" }, + "type": "array", + "title": "Networks", "description": "Specifies which networks the service should attach to." }, "LogDriver": { - "title": "Logdriver", "allOf": [ { "$ref": "#/components/schemas/LogDriver1" } ], + "title": "Logdriver", "description": "Specifies the log driver to use for tasks created from this spec. If\nnot present, the default one for the swarm will be used, finally\nfalling back to the engine default if not specified.\n" } }, + "type": "object", + "title": "TaskSpec", "description": "User modifiable task configuration." }, "TextArea": { - "title": "TextArea", - "required": [ - "minHeight" - ], - "type": "object", "properties": { "minHeight": { - "title": "Minheight", - "exclusiveMinimum": true, "type": "integer", + "exclusiveMinimum": true, + "title": "Minheight", "description": "minimum Height of the textarea", "minimum": 0 } }, - "additionalProperties": false + "additionalProperties": false, + "type": "object", + "required": [ + "minHeight" + ], + "title": "TextArea" }, "TmpfsOptions": { - "title": "TmpfsOptions", - "type": "object", "properties": { "SizeBytes": { - "title": "Sizebytes", "type": "integer", + "title": "Sizebytes", "description": "The size for the tmpfs mount in bytes." }, "Mode": { - "title": "Mode", "type": "integer", + "title": "Mode", "description": "The permission mode for the tmpfs mount in an integer." } }, + "type": "object", + "title": "TmpfsOptions", "description": "Optional configuration for the `tmpfs` type." }, "Type": { - "title": "Type", + "type": "string", "enum": [ "tcp", "udp", "sctp" ], - "type": "string", + "title": "Type", "description": "An enumeration." }, "Type1": { - "title": "Type1", + "type": "string", "enum": [ "bind", "volume", "tmpfs", "npipe" ], - "type": "string", + "title": "Type1", "description": " The mount type:\n\n- `bind` a mount of a file or directory from the host into the container.\n- `volume` a docker volume with the given `Name`.\n- `tmpfs` a `tmpfs`.\n- `npipe` a named pipe from the host into the container." }, "Ulimit1": { - "title": "Ulimit1", - "type": "object", "properties": { "Name": { - "title": "Name", "type": "string", + "title": "Name", "description": "Name of ulimit" }, "Soft": { - "title": "Soft", "type": "integer", + "title": "Soft", "description": "Soft limit" }, "Hard": { - "title": "Hard", "type": "integer", + "title": "Hard", "description": "Hard limit" } - } + }, + "type": "object", + "title": "Ulimit1" }, "UpdateConfig": { - "title": "UpdateConfig", - "type": "object", "properties": { "Parallelism": { - "title": "Parallelism", "type": "integer", + "title": "Parallelism", "description": "Maximum number of tasks to be updated in one iteration (0 means\nunlimited parallelism).\n" }, "Delay": { - "title": "Delay", "type": "integer", + "title": "Delay", "description": "Amount of time between updates, in nanoseconds." }, "FailureAction": { @@ -2824,13 +2893,13 @@ "description": "Action to take if an updated task fails to run, or stops running\nduring the update.\n" }, "Monitor": { - "title": "Monitor", "type": "integer", + "title": "Monitor", "description": "Amount of time to monitor each updated task for failures, in\nnanoseconds.\n" }, "MaxFailureRatio": { - "title": "Maxfailureratio", "type": "number", + "title": "Maxfailureratio", "description": "The fraction of tasks that may fail during an update before the\nfailure action is invoked, specified as a floating point number\nbetween 0 and 1.\n", "default": 0 }, @@ -2843,20 +2912,13 @@ "description": "The order of operations when rolling out an updated task. Either\nthe old task is shut down before the new task is started, or the\nnew task is started before the old task is shut down.\n" } }, + "type": "object", + "title": "UpdateConfig", "description": "Specification for the update strategy of the service." }, "ValidationError": { - "title": "ValidationError", - "required": [ - "loc", - "msg", - "type" - ], - "type": "object", "properties": { "loc": { - "title": "Location", - "type": "array", "items": { "anyOf": [ { @@ -2866,55 +2928,58 @@ "type": "integer" } ] - } + }, + "type": "array", + "title": "Location" }, "msg": { - "title": "Message", - "type": "string" + "type": "string", + "title": "Message" }, "type": { - "title": "Error Type", - "type": "string" + "type": "string", + "title": "Error Type" } - } + }, + "type": "object", + "required": [ + "loc", + "msg", + "type" + ], + "title": "ValidationError" }, "VolumeOptions": { - "title": "VolumeOptions", - "type": "object", "properties": { "NoCopy": { - "title": "Nocopy", "type": "boolean", + "title": "Nocopy", "description": "Populate volume with data from the target.", "default": false }, "Labels": { - "title": "Labels", - "type": "object", "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Labels", "description": "User-defined key/value metadata." }, "DriverConfig": { - "title": "Driverconfig", "allOf": [ { "$ref": "#/components/schemas/DriverConfig" } ], + "title": "Driverconfig", "description": "Map of driver specific options" } }, + "type": "object", + "title": "VolumeOptions", "description": "Optional configuration for the `volume` type." }, "Widget": { - "title": "Widget", - "required": [ - "type", - "details" - ], - "type": "object", "properties": { "type": { "allOf": [ @@ -2925,7 +2990,6 @@ "description": "type of the property" }, "details": { - "title": "Details", "anyOf": [ { "$ref": "#/components/schemas/TextArea" @@ -2933,18 +2997,25 @@ { "$ref": "#/components/schemas/SelectBox" } - ] + ], + "title": "Details" } }, - "additionalProperties": false + "additionalProperties": false, + "type": "object", + "required": [ + "type", + "details" + ], + "title": "Widget" }, "WidgetType": { - "title": "WidgetType", + "type": "string", "enum": [ "TextArea", "SelectBox" ], - "type": "string", + "title": "WidgetType", "description": "An enumeration." } } diff --git a/services/catalog/requirements/_test.in b/services/catalog/requirements/_test.in index c220dc0cfef..e215bca4248 100644 --- a/services/catalog/requirements/_test.in +++ b/services/catalog/requirements/_test.in @@ -12,8 +12,8 @@ --constraint _base.txt - alembic # migration due to pytest_simcore.postgres_service +asgi_lifespan click docker Faker diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index 0f7ed9e800b..e04a0cffe97 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -15,6 +15,8 @@ anyio==4.3.0 # via # -c requirements/_base.txt # httpx +asgi-lifespan==2.1.0 + # via -r requirements/_test.in async-timeout==4.0.3 # via # -c requirements/_base.txt @@ -168,6 +170,7 @@ sniffio==1.3.1 # via # -c requirements/_base.txt # anyio + # asgi-lifespan # httpx sqlalchemy==1.4.52 # via diff --git a/services/catalog/src/simcore_service_catalog/api/dependencies/database.py b/services/catalog/src/simcore_service_catalog/api/dependencies/database.py index d2f318ac96a..2f323db99c5 100644 --- a/services/catalog/src/simcore_service_catalog/api/dependencies/database.py +++ b/services/catalog/src/simcore_service_catalog/api/dependencies/database.py @@ -1,5 +1,6 @@ import logging -from typing import AsyncGenerator, Callable +from collections.abc import AsyncGenerator, Callable +from typing import Annotated from fastapi import Depends from fastapi.requests import Request @@ -7,7 +8,7 @@ from ...db.repositories._base import BaseRepository -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) def _get_db_engine(request: Request) -> AsyncEngine: @@ -18,7 +19,7 @@ def _get_db_engine(request: Request) -> AsyncEngine: def get_repository(repo_type: type[BaseRepository]) -> Callable: async def _get_repo( - engine: AsyncEngine = Depends(_get_db_engine), + engine: Annotated[AsyncEngine, Depends(_get_db_engine)], ) -> AsyncGenerator[BaseRepository, None]: # NOTE: 2 different ideas were tried here with not so good # 1st one was acquiring a connection per repository which lead to the following issue https://github.com/ITISFoundation/osparc-simcore/pull/1966 diff --git a/services/catalog/src/simcore_service_catalog/api/dependencies/director.py b/services/catalog/src/simcore_service_catalog/api/dependencies/director.py index f55b55ac68e..8b6b558428d 100644 --- a/services/catalog/src/simcore_service_catalog/api/dependencies/director.py +++ b/services/catalog/src/simcore_service_catalog/api/dependencies/director.py @@ -1,15 +1,13 @@ +from typing import Annotated + from fastapi import Depends, FastAPI -from fastapi.requests import Request +from servicelib.fastapi.dependencies import get_app from ...services.director import DirectorApi -def _get_app(request: Request) -> FastAPI: - return request.app - - def get_director_api( - app: FastAPI = Depends(_get_app), + app: Annotated[FastAPI, Depends(get_app)], ) -> DirectorApi: director: DirectorApi = app.state.director_api return director diff --git a/services/catalog/src/simcore_service_catalog/api/dependencies/services.py b/services/catalog/src/simcore_service_catalog/api/dependencies/services.py index ad51a5324bf..dfea3b84b53 100644 --- a/services/catalog/src/simcore_service_catalog/api/dependencies/services.py +++ b/services/catalog/src/simcore_service_catalog/api/dependencies/services.py @@ -1,10 +1,9 @@ import logging import urllib.parse from dataclasses import dataclass -from typing import Any, cast +from typing import Annotated, Any, cast -from fastapi import Depends, Header, HTTPException, status -from fastapi.requests import Request +from fastapi import Depends, FastAPI, Header, HTTPException, status from models_library.api_schemas_catalog.services import ServiceGet from models_library.api_schemas_catalog.services_specifications import ( ServiceSpecifications, @@ -12,6 +11,7 @@ from models_library.services import ServiceKey, ServiceVersion from models_library.services_resources import ResourcesDict from pydantic import ValidationError +from servicelib.fastapi.dependencies import get_app from ...core.settings import ApplicationSettings from ...db.repositories.groups import GroupsRepository @@ -21,14 +21,20 @@ from .database import get_repository from .director import get_director_api +_logger = logging.getLogger(__name__) -def get_default_service_resources(request: Request) -> ResourcesDict: - app_settings: ApplicationSettings = request.app.state.settings + +def get_default_service_resources( + app: Annotated[FastAPI, Depends(get_app)] +) -> ResourcesDict: + app_settings: ApplicationSettings = app.state.settings return app_settings.CATALOG_SERVICES_DEFAULT_RESOURCES -def get_default_service_specifications(request: Request) -> ServiceSpecifications: - app_settings: ApplicationSettings = request.app.state.settings +def get_default_service_specifications( + app: Annotated[FastAPI, Depends(get_app)] +) -> ServiceSpecifications: + app_settings: ApplicationSettings = app.state.settings return app_settings.CATALOG_SERVICES_DEFAULT_SPECIFICATIONS @@ -43,8 +49,12 @@ async def check_service_read_access( user_id: int, service_key: ServiceKey, service_version: ServiceVersion, - groups_repository: GroupsRepository = Depends(get_repository(GroupsRepository)), - services_repo: ServicesRepository = Depends(get_repository(ServicesRepository)), + groups_repository: Annotated[ + GroupsRepository, Depends(get_repository(GroupsRepository)) + ], + services_repo: Annotated[ + ServicesRepository, Depends(get_repository(ServicesRepository)) + ], x_simcore_products_name: str = Header(None), ) -> AccessInfo: # get the user's groups @@ -74,13 +84,10 @@ async def check_service_read_access( ) -logger = logging.getLogger(__name__) - - async def get_service_from_registry( service_key: ServiceKey, service_version: ServiceVersion, - director_client: DirectorApi = Depends(get_director_api), + director_client: Annotated[DirectorApi, Depends(get_director_api)], ) -> ServiceGet: """ Retrieves service metadata from the docker registry via the director @@ -105,7 +112,7 @@ async def get_service_from_registry( return service except ValidationError as exc: - logger.warning( + _logger.warning( "Invalid service metadata in registry. Audit registry data for %s %s", f"{service_key=}", f"{service_version=}", diff --git a/services/catalog/src/simcore_service_catalog/api/dependencies/user_groups.py b/services/catalog/src/simcore_service_catalog/api/dependencies/user_groups.py index 277ad40f3cb..b1b4d23a3de 100644 --- a/services/catalog/src/simcore_service_catalog/api/dependencies/user_groups.py +++ b/services/catalog/src/simcore_service_catalog/api/dependencies/user_groups.py @@ -1,3 +1,5 @@ +from typing import Annotated + from fastapi import Depends, Query from models_library.groups import GroupAtDB from models_library.users import UserID @@ -7,12 +9,15 @@ async def list_user_groups( - user_id: UserID - | None = Query( - default=None, - description="if passed, and that user has custom resources, " - "they will be merged with default resources and returned.", - ), - groups_repository: GroupsRepository = Depends(get_repository(GroupsRepository)), + groups_repository: Annotated[ + GroupsRepository, Depends(get_repository(GroupsRepository)) + ], + user_id: Annotated[ + UserID | None, + Query( + description="if passed, and that user has custom resources, " + "they will be merged with default resources and returned.", + ), + ] = None, ) -> list[GroupAtDB]: return await groups_repository.list_user_groups(user_id) if user_id else [] diff --git a/services/catalog/src/simcore_service_catalog/api/rest.py b/services/catalog/src/simcore_service_catalog/api/rest.py new file mode 100644 index 00000000000..8b534edcfd3 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/api/rest.py @@ -0,0 +1,61 @@ +from enum import Enum + +from fastapi import APIRouter + +from .routes import ( + health, + meta, + services, + services_access_rights, + services_ports, + services_resources, + services_specifications, +) + +v0_router = APIRouter() + +# health +v0_router.include_router( + health.router, + tags=["diagnostics"], +) + +# meta +v0_router.include_router( + meta.router, + tags=["meta"], + prefix="/meta", +) + +# services +_SERVICE_PREFIX = "/services" +_SERVICE_TAGS: list[str | Enum] = [ + "services", +] +v0_router.include_router( + services_resources.router, + tags=_SERVICE_TAGS, + prefix=_SERVICE_PREFIX, +) +v0_router.include_router( + services_specifications.router, + tags=_SERVICE_TAGS, + prefix=_SERVICE_PREFIX, +) +v0_router.include_router( + services_ports.router, + tags=_SERVICE_TAGS, + prefix=_SERVICE_PREFIX, +) +v0_router.include_router( + services_access_rights.router, + tags=_SERVICE_TAGS, + prefix=_SERVICE_PREFIX, +) + +# NOTE: that this router must come after resources/specifications/ports/access_rights +v0_router.include_router( + services.router, + tags=_SERVICE_TAGS, + prefix=_SERVICE_PREFIX, +) diff --git a/services/catalog/src/simcore_service_catalog/api/root.py b/services/catalog/src/simcore_service_catalog/api/root.py deleted file mode 100644 index 4efd75f9167..00000000000 --- a/services/catalog/src/simcore_service_catalog/api/root.py +++ /dev/null @@ -1,38 +0,0 @@ -from fastapi import APIRouter - -from .routes import ( - health, - meta, - services, - services_access_rights, - services_ports, - services_resources, - services_specifications, -) - -router = APIRouter() -router.include_router(health.router) - -# API -router.include_router(meta.router, tags=["meta"], prefix="/meta") - -SERVICE_PREFIX = "/services" -SERVICE_TAGS = [ - "services", -] - -router.include_router( - services_resources.router, tags=SERVICE_TAGS, prefix=SERVICE_PREFIX -) -router.include_router( - services_specifications.router, tags=SERVICE_TAGS, prefix=SERVICE_PREFIX -) - -router.include_router(services_ports.router, tags=SERVICE_TAGS, prefix=SERVICE_PREFIX) - -router.include_router( - services_access_rights.router, tags=SERVICE_TAGS, prefix=SERVICE_PREFIX -) - -# NOTE: that this router must come after resources/specifications/ports/access_rights -router.include_router(services.router, tags=SERVICE_TAGS, prefix=SERVICE_PREFIX) diff --git a/services/catalog/src/simcore_service_catalog/api/routes/health.py b/services/catalog/src/simcore_service_catalog/api/routes/health.py index 93d03dfb3a8..aa59a59181a 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/health.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/health.py @@ -5,6 +5,6 @@ router = APIRouter() -@router.get("/", include_in_schema=False) +@router.get("/") async def check_service_health(): return f"{__name__}@{datetime.datetime.now(tz=datetime.timezone.utc).isoformat()}" diff --git a/services/catalog/src/simcore_service_catalog/api/routes/services.py b/services/catalog/src/simcore_service_catalog/api/routes/services.py index 095c09a7252..e7122dcb274 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/services.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/services.py @@ -3,7 +3,7 @@ import asyncio import logging import urllib.parse -from typing import Any, TypeAlias, cast +from typing import Annotated, Any, TypeAlias, cast from aiocache import cached from fastapi import APIRouter, Depends, Header, HTTPException, status @@ -12,13 +12,13 @@ from models_library.services_db import ServiceAccessRightsAtDB, ServiceMetaDataAtDB from pydantic import ValidationError from pydantic.types import PositiveInt +from servicelib.fastapi.requests_decorators import cancel_on_disconnect from starlette.requests import Request from ...db.repositories.groups import GroupsRepository from ...db.repositories.services import ServicesRepository from ...services.director import DirectorApi from ...services.function_services import is_function_service -from ...utils.requests_decorators import cancellable_request from ..dependencies.database import get_repository from ..dependencies.director import get_director_api from ..dependencies.services import get_service_from_registry @@ -76,7 +76,7 @@ def _build_cache_key(fct, *_, **kwargs): # (when e2e runs or by the webserver when listing projects) therefore # a cache is setup here @router.get("", response_model=list[ServiceGet], **RESPONSE_MODEL_POLICY) -@cancellable_request +@cancel_on_disconnect @cached( ttl=LIST_SERVICES_CACHING_TTL, key_builder=_build_cache_key, @@ -84,11 +84,15 @@ def _build_cache_key(fct, *_, **kwargs): async def list_services( request: Request, # pylint:disable=unused-argument user_id: PositiveInt, - details: bool | None = True, - director_client: DirectorApi = Depends(get_director_api), - groups_repository: GroupsRepository = Depends(get_repository(GroupsRepository)), - services_repo: ServicesRepository = Depends(get_repository(ServicesRepository)), - x_simcore_products_name: str = Header(...), + director_client: Annotated[DirectorApi, Depends(get_director_api)], + groups_repository: Annotated[ + GroupsRepository, Depends(get_repository(GroupsRepository)) + ], + services_repo: Annotated[ + ServicesRepository, Depends(get_repository(ServicesRepository)) + ], + x_simcore_products_name: Annotated[str, Header(...)], + details: bool | None = True, # noqa: FBT002 ): # Access layer user_groups = await groups_repository.list_user_groups(user_id) @@ -116,7 +120,7 @@ async def list_services( # FIXME: add name, ddescription, type, etc... # NOTE: here validation is not necessary since key,version were already validated # in terms of time, this takes the most - services_overview = [ + return [ ServiceGet.construct( key=key, version=version, @@ -131,7 +135,6 @@ async def list_services( ) for key, version in services_in_db ] - return services_overview # caching this steps brings down the time to generate it at the expense of being sometimes a bit out of date @cached(ttl=DIRECTOR_CACHING_TTL) @@ -187,9 +190,13 @@ async def cached_registry_services() -> dict[str, Any]: ) async def get_service( user_id: int, - service: ServiceGet = Depends(get_service_from_registry), - groups_repository: GroupsRepository = Depends(get_repository(GroupsRepository)), - services_repo: ServicesRepository = Depends(get_repository(ServicesRepository)), + service: Annotated[ServiceGet, Depends(get_service_from_registry)], + groups_repository: Annotated[ + GroupsRepository, Depends(get_repository(GroupsRepository)) + ], + services_repo: Annotated[ + ServicesRepository, Depends(get_repository(ServicesRepository)) + ], x_simcore_products_name: str = Header(None), ): # get the user groups @@ -255,10 +262,14 @@ async def update_service( service_key: ServiceKey, service_version: ServiceVersion, updated_service: ServiceUpdate, - director_client: DirectorApi = Depends(get_director_api), - groups_repository: GroupsRepository = Depends(get_repository(GroupsRepository)), - services_repo: ServicesRepository = Depends(get_repository(ServicesRepository)), - x_simcore_products_name: str = Header(None), + director_client: Annotated[DirectorApi, Depends(get_director_api)], + groups_repository: Annotated[ + GroupsRepository, Depends(get_repository(GroupsRepository)) + ], + services_repo: Annotated[ + ServicesRepository, Depends(get_repository(ServicesRepository)) + ], + x_simcore_products_name: Annotated[str | None, Header()] = None, ): if is_function_service(service_key): # NOTE: this is a temporary decision after discussing with OM diff --git a/services/catalog/src/simcore_service_catalog/api/routes/services_access_rights.py b/services/catalog/src/simcore_service_catalog/api/routes/services_access_rights.py index 644b019ff93..e9058418b6a 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/services_access_rights.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/services_access_rights.py @@ -1,4 +1,5 @@ import logging +from typing import Annotated from fastapi import APIRouter, Depends, Header from models_library.api_schemas_catalog.service_access_rights import ( @@ -12,12 +13,9 @@ from ..dependencies.services import AccessInfo, check_service_read_access from ._constants import RESPONSE_MODEL_POLICY -# -# Routes ----------------------------------------------------------------------------------------------- -# +_logger = logging.getLogger(__name__) router = APIRouter() -logger = logging.getLogger(__name__) @router.get( @@ -29,9 +27,11 @@ async def get_service_access_rights( service_key: ServiceKey, service_version: ServiceVersion, - _user: AccessInfo = Depends(check_service_read_access), - services_repo: ServicesRepository = Depends(get_repository(ServicesRepository)), - x_simcore_products_name: str = Header(...), + _user: Annotated[AccessInfo, Depends(check_service_read_access)], + services_repo: Annotated[ + ServicesRepository, Depends(get_repository(ServicesRepository)) + ], + x_simcore_products_name: Annotated[str, Header(...)], ): service_access_rights: list[ ServiceAccessRightsAtDB diff --git a/services/catalog/src/simcore_service_catalog/api/routes/services_ports.py b/services/catalog/src/simcore_service_catalog/api/routes/services_ports.py index 24e0386b1c0..b58699866a7 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/services_ports.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/services_ports.py @@ -1,4 +1,5 @@ import logging +from typing import Annotated from fastapi import APIRouter, Depends from models_library.api_schemas_catalog.services import ServiceGet @@ -13,11 +14,6 @@ _logger = logging.getLogger(__name__) - -# -# Routes ----------------------------------------------------------------------------------------------- -# - router = APIRouter() @@ -28,8 +24,8 @@ **RESPONSE_MODEL_POLICY, ) async def list_service_ports( - _user: AccessInfo = Depends(check_service_read_access), - service: ServiceGet = Depends(get_service_from_registry), + _user: Annotated[AccessInfo, Depends(check_service_read_access)], + service: Annotated[ServiceGet, Depends(get_service_from_registry)], ): ports: list[ServicePortGet] = [] diff --git a/services/catalog/src/simcore_service_catalog/api/routes/services_resources.py b/services/catalog/src/simcore_service_catalog/api/routes/services_resources.py index b3848fa7b9b..141f85c98b2 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/services_resources.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/services_resources.py @@ -1,7 +1,7 @@ import logging import urllib.parse from copy import deepcopy -from typing import Any, Final, cast +from typing import Annotated, Any, Final, cast import yaml from fastapi import APIRouter, Depends, HTTPException, status @@ -36,7 +36,7 @@ from ._constants import RESPONSE_MODEL_POLICY, SIMCORE_SERVICE_SETTINGS_LABELS router = APIRouter() -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) SIMCORE_SERVICE_COMPOSE_SPEC_LABEL: Final[str] = "simcore.service.compose-spec" _DEPRECATED_RESOURCES: Final[list[str]] = ["MPI"] @@ -59,7 +59,7 @@ def _compute_service_available_boot_modes( generic_resources: ResourcesDict = {} for entry in resource_entries: if not isinstance(entry.value, dict): - logger.warning( + _logger.warning( "resource %s for %s got invalid type", f"{entry.dict()!r}", f"{service_key}:{service_version}", @@ -97,7 +97,7 @@ def _resources_from_settings( service_resources = deepcopy(default_service_resources) for entry in resource_entries: if not isinstance(entry.value, dict): - logger.warning( + _logger.warning( "resource %s for %s got invalid type", f"{entry.dict()!r}", f"{service_key}:{service_version}", @@ -137,7 +137,7 @@ async def _get_service_labels( f"/services/{urllib.parse.quote_plus(key)}/{version}/labels" ), ) - logger.debug( + _logger.debug( "received for %s %s", f"/services/{urllib.parse.quote_plus(key)}/{version}/labels", f"{service_labels=}", @@ -150,7 +150,7 @@ async def _get_service_labels( # and will fail validating the key or the version if err.status_code == status.HTTP_400_BAD_REQUEST: return None - raise err + raise def _get_service_settings( @@ -160,7 +160,7 @@ def _get_service_settings( list[SimcoreServiceSettingLabelEntry], labels.get(SIMCORE_SERVICE_SETTINGS_LABELS, "[]"), ) - logger.debug("received %s", f"{service_settings=}") + _logger.debug("received %s", f"{service_settings=}") return service_settings @@ -172,10 +172,14 @@ def _get_service_settings( async def get_service_resources( service_key: ServiceKey, service_version: ServiceVersion, - director_client: DirectorApi = Depends(get_director_api), - default_service_resources: ResourcesDict = Depends(get_default_service_resources), - services_repo: ServicesRepository = Depends(get_repository(ServicesRepository)), - user_groups: list[GroupAtDB] = Depends(list_user_groups), + director_client: Annotated[DirectorApi, Depends(get_director_api)], + default_service_resources: Annotated[ + ResourcesDict, Depends(get_default_service_resources) + ], + services_repo: Annotated[ + ServicesRepository, Depends(get_repository(ServicesRepository)) + ], + user_groups: Annotated[list[GroupAtDB], Depends(list_user_groups)], ) -> ServiceResourcesDict: image_version = parse_obj_as(DockerGenericTag, f"{service_key}:{service_version}") if is_function_service(service_key): @@ -196,7 +200,7 @@ async def get_service_resources( ComposeSpecLabelDict | None, service_labels.get(SIMCORE_SERVICE_COMPOSE_SPEC_LABEL, "null"), ) - logger.debug("received %s", f"{service_spec=}") + _logger.debug("received %s", f"{service_spec=}") if service_spec is None: # no compose specifications -> single service diff --git a/services/catalog/src/simcore_service_catalog/api/routes/services_specifications.py b/services/catalog/src/simcore_service_catalog/api/routes/services_specifications.py index 7b2461d050c..bd398f25308 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/services_specifications.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/services_specifications.py @@ -1,4 +1,5 @@ import logging +from typing import Annotated from fastapi import APIRouter, Depends, HTTPException, Query, status from models_library.api_schemas_catalog.services_specifications import ( @@ -28,15 +29,22 @@ async def get_service_specifications( user_id: UserID, service_key: ServiceKey, service_version: ServiceVersion, - strict: bool = Query( - False, - description="if True only the version specs will be retrieved, if False the latest version will be used instead", - ), - groups_repository: GroupsRepository = Depends(get_repository(GroupsRepository)), - services_repo: ServicesRepository = Depends(get_repository(ServicesRepository)), - default_service_specifications: ServiceSpecifications = Depends( - get_default_service_specifications - ), + groups_repository: Annotated[ + GroupsRepository, Depends(get_repository(GroupsRepository)) + ], + services_repo: Annotated[ + ServicesRepository, Depends(get_repository(ServicesRepository)) + ], + default_service_specifications: Annotated[ + ServiceSpecifications, Depends(get_default_service_specifications) + ], + *, + strict: Annotated[ + bool, + Query( + description="if True only the version specs will be retrieved, if False the latest version will be used instead", + ), + ] = False, ): _logger.debug("getting specifications for '%s:%s'", service_key, service_version) diff --git a/services/catalog/src/simcore_service_catalog/cli.py b/services/catalog/src/simcore_service_catalog/cli.py index a0ab31715e2..1e04bbdb763 100644 --- a/services/catalog/src/simcore_service_catalog/cli.py +++ b/services/catalog/src/simcore_service_catalog/cli.py @@ -1,10 +1,13 @@ import logging +import os import typer -from settings_library.utils_cli import create_settings_command +from settings_library.http_client_request import ClientRequestSettings +from settings_library.postgres import PostgresSettings +from settings_library.utils_cli import create_settings_command, print_as_envfile from ._meta import PROJECT_NAME -from .core.settings import ApplicationSettings +from .core.settings import ApplicationSettings, DirectorSettings _logger = logging.getLogger(__name__) @@ -24,3 +27,52 @@ def run(): "$ uvicorn simcore_service_catalog.main:the_app", fg=typer.colors.BLUE, ) + + +@main.command() +def echo_dotenv(ctx: typer.Context, *, minimal: bool = True) -> None: + """Generates and displays a valid environment variables file (also known as dot-envfile) + + Usage: + $ simcore-service echo-dotenv > .env + $ cat .env + $ set -o allexport; source .env; set +o allexport + """ + assert ctx # nosec + + # NOTE: we normally DO NOT USE `os.environ` to capture env vars but this is a special case + # The idea here is to have a command that can generate a **valid** `.env` file that can be used + # to initialized the app. For that reason we fill required fields of the `ApplicationSettings` with + # "fake" but valid values (e.g. generating a password or adding tags as `replace-with-api-key). + # Nonetheless, if the caller of this CLI has already some **valid** env vars in the environment we want to use them ... + # and that is why we use `os.environ`. + + settings = ApplicationSettings.create_from_envs( + CATALOG_POSTGRES=os.environ.get( + "CATALOG_POSTGRES", + PostgresSettings.create_from_envs( + POSTGRES_HOST=os.environ.get( + "POSTGRES_HOST", "replace-with-postgres-host" + ), + POSTGRES_USER=os.environ.get( + "POSTGRES_USER", "replace-with-postgres-user" + ), + POSTGRES_DB=os.environ.get("POSTGRES_DB", "replace-with-postgres-db"), + POSTGRES_PASSWORD=os.environ.get( + "POSTGRES_PASSWORD", "replace-with-postgres-password" + ), + ), + ), + CATALOG_DIRECTOR=DirectorSettings.create_from_envs( + DIRECTOR_HOST=os.environ.get("DIRECTOR_HOST", "fake-director") + ), + CATALOG_CLIENT_REQUEST=ClientRequestSettings.create_from_envs(), + ) + + print_as_envfile( + settings, + compact=False, + verbose=True, + show_secrets=True, + exclude_unset=minimal, + ) diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py index 6a0ec7f882c..68b1a523a90 100644 --- a/services/catalog/src/simcore_service_catalog/core/application.py +++ b/services/catalog/src/simcore_service_catalog/core/application.py @@ -1,51 +1,33 @@ import logging -import time -from typing import Callable -from fastapi import FastAPI, Request -from fastapi.exceptions import RequestValidationError +from fastapi import FastAPI from fastapi.middleware.gzip import GZipMiddleware from models_library.basic_types import BootModeEnum +from servicelib.fastapi import timing_middleware from servicelib.fastapi.openapi import override_fastapi_openapi_method from servicelib.fastapi.profiler_middleware import ProfilerMiddleware from servicelib.fastapi.prometheus_instrumentation import ( setup_prometheus_instrumentation, ) -from servicelib.logging_utils import config_all_loggers -from starlette import status -from starlette.exceptions import HTTPException from starlette.middleware.base import BaseHTTPMiddleware -from .._meta import ( - API_VERSION, - API_VTAG, - APP_FINISHED_BANNER_MSG, - APP_STARTED_BANNER_MSG, - PROJECT_NAME, - SUMMARY, -) -from ..api.errors.http_error import ( - http_error_handler, - make_http_error_handler_for_exception, -) -from ..api.errors.validation_error import http422_error_handler -from ..api.root import router as api_router +from .._meta import API_VERSION, API_VTAG, PROJECT_NAME, SUMMARY +from ..api import rest from ..api.routes.health import router as health_router +from ..exceptions.handlers import setup_exception_handlers from ..services.function_services import setup_function_services -from .events import create_start_app_handler, create_stop_app_handler +from .events import create_on_shutdown, create_on_startup from .settings import ApplicationSettings -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -def init_app(settings: ApplicationSettings | None = None) -> FastAPI: +def create_app(settings: ApplicationSettings | None = None) -> FastAPI: if settings is None: settings = ApplicationSettings.create_from_envs() + assert settings # nosec - logging.basicConfig(level=settings.CATALOG_LOG_LEVEL.value) - logging.root.setLevel(settings.CATALOG_LOG_LEVEL.value) - config_all_loggers(settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED) - logger.debug(settings.json(indent=2)) + _logger.debug(settings.json(indent=2)) app = FastAPI( debug=settings.SC_BOOT_MODE @@ -62,66 +44,37 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: # STATE app.state.settings = settings + # STARTUP-EVENT + app.add_event_handler("startup", create_on_startup(app)) + # PLUGIN SETUP setup_function_services(app) if app.state.settings.CATALOG_PROMETHEUS_INSTRUMENTATION_ENABLED: setup_prometheus_instrumentation(app) + # MIDDLEWARES if app.state.settings.CATALOG_PROFILING: app.add_middleware(ProfilerMiddleware) - # EVENTS - async def _on_startup() -> None: - print(APP_STARTED_BANNER_MSG, flush=True) - - async def _on_shutdown() -> None: - print(APP_FINISHED_BANNER_MSG, flush=True) - - app.add_event_handler("startup", _on_startup) - app.add_event_handler("startup", create_start_app_handler(app)) - - app.add_event_handler("shutdown", create_stop_app_handler(app)) - app.add_event_handler("shutdown", _on_shutdown) + if settings.SC_BOOT_MODE != BootModeEnum.PRODUCTION: + # middleware to time requests (ONLY for development) + app.add_middleware( + BaseHTTPMiddleware, dispatch=timing_middleware.add_process_time_header + ) - # ERROR HANDLERS - app.add_exception_handler(HTTPException, http_error_handler) - app.add_exception_handler(RequestValidationError, http422_error_handler) - # SEE https://docs.python.org/3/library/exceptions.html#exception-hierarchy - app.add_exception_handler( - NotImplementedError, - make_http_error_handler_for_exception( - status.HTTP_501_NOT_IMPLEMENTED, NotImplementedError - ), - ) - app.add_exception_handler( - Exception, - make_http_error_handler_for_exception( - status.HTTP_500_INTERNAL_SERVER_ERROR, Exception - ), - ) + app.add_middleware(GZipMiddleware) - # ROUTING + # ROUTES # healthcheck at / and at /v0/ app.include_router(health_router) - # api under /v* - app.include_router(api_router, prefix=f"/{API_VTAG}") + app.include_router(rest.v0_router, prefix=f"/{API_VTAG}") - # MIDDLEWARES - # middleware to time requests (ONLY for development) - if settings.SC_BOOT_MODE != BootModeEnum.PRODUCTION: - - async def _add_process_time_header(request: Request, call_next: Callable): - start_time = time.time() - response = await call_next(request) - process_time = time.time() - start_time - response.headers["X-Process-Time"] = str(process_time) - return response + # SHUTDOWN-EVENT + app.add_event_handler("shutdown", create_on_shutdown(app)) - app.add_middleware(BaseHTTPMiddleware, dispatch=_add_process_time_header) - - # gzip middleware - app.add_middleware(GZipMiddleware) + # EXCEPTIONS + setup_exception_handlers(app) return app diff --git a/services/catalog/src/simcore_service_catalog/core/background_tasks.py b/services/catalog/src/simcore_service_catalog/core/background_tasks.py index 757cf0efec7..8461c7f3923 100644 --- a/services/catalog/src/simcore_service_catalog/core/background_tasks.py +++ b/services/catalog/src/simcore_service_catalog/core/background_tasks.py @@ -13,7 +13,7 @@ import logging from contextlib import suppress from pprint import pformat -from typing import Any, Final, cast +from typing import Any, Final, NewType, TypeAlias, cast from fastapi import FastAPI from models_library.function_services_catalog.api import iter_service_docker_data @@ -29,12 +29,14 @@ from ..db.repositories.services import ServicesRepository from ..services import access_rights -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) # NOTE: by PC I tried to unify with models_library.services but there are other inconsistencies so I leave if for another time! -ServiceKey = str -ServiceVersion = str -ServiceDockerDataMap = dict[tuple[ServiceKey, ServiceVersion], ServiceDockerData] +ServiceKey = NewType("ServiceKey", str) +ServiceVersion = NewType("ServiceVersion", str) +ServiceDockerDataMap: TypeAlias = dict[ + tuple[ServiceKey, ServiceVersion], ServiceDockerData +] async def _list_services_in_registry( @@ -53,12 +55,12 @@ async def _list_services_in_registry( service_data = ServiceDockerData.parse_obj(service) services[(service_data.key, service_data.version)] = service_data - except ValidationError as exc: - logger.warning( - "Skipping %s:%s from the catalog of services:\n%s", + except ValidationError: # noqa: PERF203 + _logger.warning( + "Skipping %s:%s from the catalog of services:", service.get("key"), service.get("version"), - exc, + exc_info=True, ) return services @@ -137,7 +139,7 @@ async def _ensure_registry_and_database_are_synced(app: FastAPI) -> None: # check that the db has all the services at least once missing_services_in_db = set(services_in_registry.keys()) - services_in_db if missing_services_in_db: - logger.debug( + _logger.debug( "Missing services in db: %s", pformat(missing_services_in_db), ) @@ -182,7 +184,7 @@ async def _ensure_published_templates_accessible( for service in missing_services ] if missing_services_access_rights: - logger.info( + _logger.info( "Adding access rights for published templates\n: %s", missing_services_access_rights, ) @@ -195,7 +197,7 @@ async def _sync_services_task(app: FastAPI) -> None: while app.state.registry_syncer_running: try: - logger.debug("Syncing services between registry and database...") + _logger.debug("Syncing services between registry and database...") # check that the list of services is in sync with the registry await _ensure_registry_and_database_are_synced(app) @@ -206,16 +208,16 @@ async def _sync_services_task(app: FastAPI) -> None: await asyncio.sleep(app.state.settings.CATALOG_BACKGROUND_TASK_REST_TIME) - except asyncio.CancelledError: + except asyncio.CancelledError: # noqa: PERF203 # task is stopped - logger.info("registry syncing task cancelled") + _logger.info("registry syncing task cancelled") raise except Exception: # pylint: disable=broad-except if not app.state.registry_syncer_running: - logger.warning("registry syncing task forced to stop") + _logger.warning("registry syncing task forced to stop") break - logger.exception( + _logger.exception( "Unexpected error while syncing registry entries, restarting now..." ) # wait a bit before retrying, so it does not block everything until the director is up @@ -232,7 +234,7 @@ async def start_registry_sync_task(app: FastAPI) -> None: app.state.registry_syncer_running = True task = asyncio.create_task(_sync_services_task(app)) app.state.registry_sync_task = task - logger.info("registry syncing task started") + _logger.info("registry syncing task started") async def stop_registry_sync_task(app: FastAPI) -> None: @@ -242,4 +244,4 @@ async def stop_registry_sync_task(app: FastAPI) -> None: task.cancel() await task app.state.registry_sync_task = None - logger.info("registry syncing task stopped") + _logger.info("registry syncing task stopped") diff --git a/services/catalog/src/simcore_service_catalog/core/errors.py b/services/catalog/src/simcore_service_catalog/core/errors.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py index 9c7e4770b3d..8b668610f88 100644 --- a/services/catalog/src/simcore_service_catalog/core/events.py +++ b/services/catalog/src/simcore_service_catalog/core/events.py @@ -1,19 +1,24 @@ import logging -from typing import Callable +from collections.abc import Awaitable, Callable +from typing import TypeAlias from fastapi import FastAPI from servicelib.db_async_engine import close_db_connection, connect_to_db +from .._meta import APP_FINISHED_BANNER_MSG, APP_STARTED_BANNER_MSG from ..db.events import setup_default_product from ..services.director import close_director, setup_director from .background_tasks import start_registry_sync_task, stop_registry_sync_task -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -def create_start_app_handler(app: FastAPI) -> Callable: - async def start_app() -> None: - logger.info("Application started") +EventCallable: TypeAlias = Callable[[], Awaitable[None]] + + +def create_on_startup(app: FastAPI) -> EventCallable: + async def _() -> None: + print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 # setup connection to pg db if app.state.settings.CATALOG_POSTGRES: @@ -28,20 +33,23 @@ async def start_app() -> None: # SEE https://github.com/ITISFoundation/osparc-simcore/issues/1728 await start_registry_sync_task(app) - return start_app + _logger.info("Application started") + + return _ -def create_stop_app_handler(app: FastAPI) -> Callable: - async def stop_app() -> None: - logger.info("Application stopping") +def create_on_shutdown(app: FastAPI) -> EventCallable: + async def _() -> None: + _logger.info("Application stopping") + if app.state.settings.CATALOG_DIRECTOR: try: await stop_registry_sync_task(app) await close_director(app) await close_db_connection(app) except Exception: # pylint: disable=broad-except - logger.exception( - "Unexpected error while closing application", exc_info=True - ) + _logger.exception("Unexpected error while closing application") + + print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 - return stop_app + return _ diff --git a/services/catalog/src/simcore_service_catalog/core/settings.py b/services/catalog/src/simcore_service_catalog/core/settings.py index 24902065c94..d888932cf79 100644 --- a/services/catalog/src/simcore_service_catalog/core/settings.py +++ b/services/catalog/src/simcore_service_catalog/core/settings.py @@ -13,7 +13,7 @@ from settings_library.postgres import PostgresSettings from settings_library.utils_logging import MixinLoggingSettings -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) class DirectorSettings(BaseCustomSettings): @@ -29,7 +29,10 @@ def base_url(self) -> str: _DEFAULT_RESOURCES: Final[ResourcesDict] = parse_obj_as( ResourcesDict, { - "CPU": {"limit": 0.1, "reservation": 0.1}, + "CPU": { + "limit": 0.1, + "reservation": 0.1, + }, "RAM": { "limit": parse_obj_as(ByteSize, "2Gib"), "reservation": parse_obj_as(ByteSize, "2Gib"), diff --git a/services/catalog/src/simcore_service_catalog/db/events.py b/services/catalog/src/simcore_service_catalog/db/events.py index c917ade7240..42de4c38620 100644 --- a/services/catalog/src/simcore_service_catalog/db/events.py +++ b/services/catalog/src/simcore_service_catalog/db/events.py @@ -4,7 +4,7 @@ from .repositories.products import ProductsRepository -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) async def setup_default_product(app: FastAPI): diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/groups.py b/services/catalog/src/simcore_service_catalog/db/repositories/groups.py index 347f7eb48f7..81491c58a81 100644 --- a/services/catalog/src/simcore_service_catalog/db/repositories/groups.py +++ b/services/catalog/src/simcore_service_catalog/db/repositories/groups.py @@ -1,4 +1,4 @@ -from typing import Optional, cast +from typing import cast import sqlalchemy as sa from models_library.emails import LowerCaseEmailStr @@ -6,7 +6,7 @@ from pydantic import parse_obj_as from pydantic.types import PositiveInt -from ..errors import RepositoryError +from ...exceptions.db_errors import RepositoryError from ..tables import GroupType, groups, user_to_groups, users from ._base import BaseRepository @@ -32,7 +32,8 @@ async def get_everyone_group(self) -> GroupAtDB: ) row = result.first() if not row: - raise RepositoryError(f"{GroupType.EVERYONE} groups was never initialized") + msg = f"{GroupType.EVERYONE} groups was never initialized" + raise RepositoryError(msg) return GroupAtDB.from_orm(row) async def get_user_gid_from_email( @@ -40,7 +41,7 @@ async def get_user_gid_from_email( ) -> PositiveInt | None: async with self.db_engine.connect() as conn: return cast( - Optional[PositiveInt], + PositiveInt | None, await conn.scalar( sa.select(users.c.primary_gid).where(users.c.email == user_email) ), @@ -49,7 +50,7 @@ async def get_user_gid_from_email( async def get_gid_from_affiliation(self, affiliation: str) -> PositiveInt | None: async with self.db_engine.connect() as conn: return cast( - Optional[PositiveInt], + PositiveInt | None, await conn.scalar( sa.select(groups.c.gid).where(groups.c.name == affiliation) ), diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/projects.py b/services/catalog/src/simcore_service_catalog/db/repositories/projects.py index 8d3c35c32f0..5b2b2d1bbfe 100644 --- a/services/catalog/src/simcore_service_catalog/db/repositories/projects.py +++ b/services/catalog/src/simcore_service_catalog/db/repositories/projects.py @@ -7,7 +7,7 @@ from ..tables import ProjectType, projects from ._base import BaseRepository -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) class ProjectsRepository(BaseRepository): @@ -17,7 +17,7 @@ async def list_services_from_published_templates(self) -> list[ServiceKeyVersion async for row in await conn.stream( sa.select(projects).where( (projects.c.type == ProjectType.TEMPLATE) - & (projects.c.published == True) + & (projects.c.published.is_(True)) ) ): project_workbench = row.workbench @@ -32,7 +32,7 @@ async def list_services_from_published_templates(self) -> list[ServiceKeyVersion continue list_of_published_services.append(ServiceKeyVersion(**service)) except ValidationError: - logger.warning( + _logger.warning( "service %s could not be validated", service, exc_info=True ) continue diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/services.py b/services/catalog/src/simcore_service_catalog/db/repositories/services.py index 70d60fbf991..dfd84a5777f 100644 --- a/services/catalog/src/simcore_service_catalog/db/repositories/services.py +++ b/services/catalog/src/simcore_service_catalog/db/repositories/services.py @@ -26,7 +26,7 @@ from ..tables import services_access_rights, services_meta_data, services_specifications from ._base import BaseRepository -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) def _make_list_services_query( @@ -66,6 +66,28 @@ def _make_list_services_query( return query +def _is_newer( + old: ServiceSpecificationsAtDB | None, + new: ServiceSpecificationsAtDB, +) -> bool: + return old is None or ( + packaging.version.parse(old.service_version) + < packaging.version.parse(new.service_version) + ) + + +def _merge_specs( + everyone_spec: ServiceSpecificationsAtDB | None, + team_specs: dict[GroupID, ServiceSpecificationsAtDB], + user_spec: ServiceSpecificationsAtDB | None, +) -> dict[str, Any]: + merged_spec = {} + for spec in chain([everyone_spec], team_specs.values(), [user_spec]): + if spec is not None: + merged_spec.update(spec.dict(include={"sidecar", "service"})) + return merged_spec + + class ServicesRepository(BaseRepository): """ API that operates on services_access_rights and services_meta_data tables @@ -80,20 +102,20 @@ async def list_services( combine_access_with_and: bool | None = True, product_name: str | None = None, ) -> list[ServiceMetaDataAtDB]: - services_in_db = [] async with self.db_engine.connect() as conn: - async for row in await conn.stream( - _make_list_services_query( - gids, - execute_access, - write_access, - combine_access_with_and, - product_name, + return [ + ServiceMetaDataAtDB.from_orm(row) + async for row in await conn.stream( + _make_list_services_query( + gids, + execute_access, + write_access, + combine_access_with_and, + product_name, + ) ) - ): - services_in_db.append(ServiceMetaDataAtDB.from_orm(row)) - return services_in_db + ] async def list_service_releases( self, @@ -132,10 +154,11 @@ async def list_service_releases( if limit_count and limit_count > 0: query = query.limit(limit_count) - releases = [] async with self.db_engine.connect() as conn: - async for row in await conn.stream(query): - releases.append(ServiceMetaDataAtDB.from_orm(row)) + releases = [ + ServiceMetaDataAtDB.from_orm(row) + async for row in await conn.stream(query) + ] # Now sort naturally from latest first: (This is lame, the sorting should be done in the db) def _by_version(x: ServiceMetaDataAtDB) -> packaging.version.Version: @@ -266,7 +289,6 @@ async def get_service_access_rights( """ - If product_name is not specificed, then all are considered in the query """ - services_in_db = [] search_expression = (services_access_rights.c.key == key) & ( services_access_rights.c.version == version ) @@ -276,9 +298,10 @@ async def get_service_access_rights( query = sa.select(services_access_rights).where(search_expression) async with self.db_engine.connect() as conn: - async for row in await conn.stream(query): - services_in_db.append(ServiceAccessRightsAtDB.from_orm(row)) - return services_in_db + return [ + ServiceAccessRightsAtDB.from_orm(row) + async for row in await conn.stream(query) + ] async def list_services_access_rights( self, @@ -335,7 +358,7 @@ async def upsert_service_access_rights( result = await conn.execute(on_update_stmt) assert result # nosec except ForeignKeyViolation: - logger.warning( + _logger.warning( "The service %s:%s is missing from services_meta_data", rights.key, rights.version, @@ -361,6 +384,7 @@ async def get_service_specifications( key: ServiceKey, version: ServiceVersion, groups: tuple[GroupAtDB, ...], + *, allow_use_latest_service_version: bool = False, ) -> ServiceSpecifications | None: """returns the service specifications for service 'key:version' and for 'groups' @@ -368,7 +392,7 @@ async def get_service_specifications( :param allow_use_latest_service_version: if True, then the latest version of the specs will be returned, defaults to False """ - logger.debug( + _logger.debug( "getting specifications from db for %s", f"{key}:{version} for {groups=}" ) gid_to_group_map = {group.gid: group for group in groups} @@ -392,7 +416,7 @@ async def get_service_specifications( ), ): try: - logger.debug("found following %s", f"{row=}") + _logger.debug("found following %s", f"{row=}") # validate the specs first db_service_spec = ServiceSpecificationsAtDB.from_orm(row) db_spec_version = packaging.version.parse( @@ -421,7 +445,7 @@ async def get_service_specifications( primary_specs = db_service_spec except ValidationError as exc: - logger.warning( + _logger.warning( "skipping service specifications for group '%s' as invalid: %s", f"{row.gid}", f"{exc}", @@ -432,25 +456,3 @@ async def get_service_specifications( ): return ServiceSpecifications.parse_obj(merged_specifications) return None # mypy - - -def _is_newer( - old: ServiceSpecificationsAtDB | None, - new: ServiceSpecificationsAtDB, -): - return old is None or ( - packaging.version.parse(old.service_version) - < packaging.version.parse(new.service_version) - ) - - -def _merge_specs( - everyone_spec: ServiceSpecificationsAtDB | None, - team_specs: dict[GroupID, ServiceSpecificationsAtDB], - user_spec: ServiceSpecificationsAtDB | None, -) -> dict[str, Any]: - merged_spec = {} - for spec in chain([everyone_spec], team_specs.values(), [user_spec]): - if spec is not None: - merged_spec.update(spec.dict(include={"sidecar", "service"})) - return merged_spec diff --git a/services/catalog/src/simcore_service_catalog/api/errors/__init__.py b/services/catalog/src/simcore_service_catalog/exceptions/__init__.py similarity index 100% rename from services/catalog/src/simcore_service_catalog/api/errors/__init__.py rename to services/catalog/src/simcore_service_catalog/exceptions/__init__.py diff --git a/services/catalog/src/simcore_service_catalog/db/errors.py b/services/catalog/src/simcore_service_catalog/exceptions/db_errors.py similarity index 100% rename from services/catalog/src/simcore_service_catalog/db/errors.py rename to services/catalog/src/simcore_service_catalog/exceptions/db_errors.py diff --git a/services/catalog/src/simcore_service_catalog/exceptions/handlers/__init__.py b/services/catalog/src/simcore_service_catalog/exceptions/handlers/__init__.py new file mode 100644 index 00000000000..49620d73f6c --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/exceptions/handlers/__init__.py @@ -0,0 +1,24 @@ +from fastapi import FastAPI, HTTPException, status +from fastapi.exceptions import RequestValidationError + +from ._http_error import http_error_handler, make_http_error_handler_for_exception +from ._validation_error import http422_error_handler + + +def setup_exception_handlers(app: FastAPI) -> None: + app.add_exception_handler(HTTPException, http_error_handler) + app.add_exception_handler(RequestValidationError, http422_error_handler) + + # SEE https://docs.python.org/3/library/exceptions.html#exception-hierarchy + app.add_exception_handler( + NotImplementedError, + make_http_error_handler_for_exception( + status.HTTP_501_NOT_IMPLEMENTED, NotImplementedError + ), + ) + app.add_exception_handler( + Exception, + make_http_error_handler_for_exception( + status.HTTP_500_INTERNAL_SERVER_ERROR, Exception + ), + ) diff --git a/services/catalog/src/simcore_service_catalog/api/errors/http_error.py b/services/catalog/src/simcore_service_catalog/exceptions/handlers/_http_error.py similarity index 80% rename from services/catalog/src/simcore_service_catalog/api/errors/http_error.py rename to services/catalog/src/simcore_service_catalog/exceptions/handlers/_http_error.py index cd6466b7033..f76edb1ed6b 100644 --- a/services/catalog/src/simcore_service_catalog/api/errors/http_error.py +++ b/services/catalog/src/simcore_service_catalog/exceptions/handlers/_http_error.py @@ -1,4 +1,4 @@ -from typing import Callable, Type +from collections.abc import Callable from fastapi import HTTPException from fastapi.encoders import jsonable_encoder @@ -13,8 +13,8 @@ async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse: def make_http_error_handler_for_exception( - status_code: int, exception_cls: Type[BaseException] -) -> Callable[[Request, Type[BaseException]], JSONResponse]: + status_code: int, exception_cls: type[BaseException] +) -> Callable[[Request, type[BaseException]], JSONResponse]: """ Produces a handler for BaseException-type exceptions which converts them into an error JSON response with a given status code @@ -22,7 +22,7 @@ def make_http_error_handler_for_exception( SEE https://docs.python.org/3/library/exceptions.html#concrete-exceptions """ - async def _http_error_handler(_: Request, exc: Type[BaseException]) -> JSONResponse: + async def _http_error_handler(_: Request, exc: type[BaseException]) -> JSONResponse: assert isinstance(exc, exception_cls) # nosec return JSONResponse( content=jsonable_encoder({"errors": [str(exc)]}), status_code=status_code diff --git a/services/catalog/src/simcore_service_catalog/api/errors/validation_error.py b/services/catalog/src/simcore_service_catalog/exceptions/handlers/_validation_error.py similarity index 90% rename from services/catalog/src/simcore_service_catalog/api/errors/validation_error.py rename to services/catalog/src/simcore_service_catalog/exceptions/handlers/_validation_error.py index fb70f6791ac..23aaa1d0f4e 100644 --- a/services/catalog/src/simcore_service_catalog/api/errors/validation_error.py +++ b/services/catalog/src/simcore_service_catalog/exceptions/handlers/_validation_error.py @@ -1,5 +1,3 @@ -from typing import Union - from fastapi.encoders import jsonable_encoder from fastapi.exceptions import RequestValidationError from fastapi.openapi.constants import REF_PREFIX @@ -12,7 +10,7 @@ async def http422_error_handler( _: Request, - exc: Union[RequestValidationError, ValidationError], + exc: RequestValidationError | ValidationError, ) -> JSONResponse: return JSONResponse( content=jsonable_encoder({"errors": exc.errors()}), diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index f775b602a5a..22f1eefd78f 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -1,7 +1,19 @@ """Main application to be deployed in for example uvicorn. """ +import logging + from fastapi import FastAPI -from simcore_service_catalog.core.application import init_app +from servicelib.logging_utils import config_all_loggers +from simcore_service_catalog.core.application import create_app +from simcore_service_catalog.core.settings import ApplicationSettings + +_the_settings = ApplicationSettings.create_from_envs() + +# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 +logging.basicConfig(level=_the_settings.CATALOG_LOG_LEVEL.value) # NOSONAR +logging.root.setLevel(_the_settings.CATALOG_LOG_LEVEL.value) +config_all_loggers(_the_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED) + # SINGLETON FastAPI app -the_app: FastAPI = init_app() +the_app: FastAPI = create_app() diff --git a/services/catalog/src/simcore_service_catalog/services/access_rights.py b/services/catalog/src/simcore_service_catalog/services/access_rights.py index 037ab20121f..89d2b3ad751 100644 --- a/services/catalog/src/simcore_service_catalog/services/access_rights.py +++ b/services/catalog/src/simcore_service_catalog/services/access_rights.py @@ -3,8 +3,9 @@ """ import logging import operator +from collections.abc import Callable from datetime import datetime -from typing import Any, Callable, Optional, Union, cast +from typing import Any, cast from urllib.parse import quote_plus from fastapi import FastAPI @@ -19,7 +20,7 @@ from ..db.repositories.services import ServicesRepository from ..utils.versioning import as_version, is_patch_release -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) OLD_SERVICES_DATE: datetime = datetime(2020, 8, 19) @@ -40,7 +41,7 @@ async def _is_old_service(app: FastAPI, service: ServiceDockerData) -> bool: if not data or "build_date" not in data: return True - logger.debug("retrieved service extras are %s", data) + _logger.debug("retrieved service extras are %s", data) service_build_data = datetime.strptime(data["build_date"], "%Y-%m-%dT%H:%M:%SZ") return service_build_data < OLD_SERVICES_DATE @@ -48,7 +49,7 @@ async def _is_old_service(app: FastAPI, service: ServiceDockerData) -> bool: async def evaluate_default_policy( app: FastAPI, service: ServiceDockerData -) -> tuple[Optional[PositiveInt], list[ServiceAccessRightsAtDB]]: +) -> tuple[PositiveInt | None, list[ServiceAccessRightsAtDB]]: """Given a service, it returns the owner's group-id (gid) and a list of access rights following default access-rights policies @@ -65,7 +66,7 @@ async def evaluate_default_policy( if _is_frontend_service(service) or await _is_old_service(app, service): everyone_gid = (await groups_repo.get_everyone_group()).gid - logger.debug("service %s:%s is old or frontend", service.key, service.version) + _logger.debug("service %s:%s is old or frontend", service.key, service.version) # let's make that one available to everyone group_ids.append(everyone_gid) @@ -76,11 +77,10 @@ async def evaluate_default_policy( for user_email in possible_owner_email: possible_gid = await groups_repo.get_user_gid_from_email(user_email) - if possible_gid: - if not owner_gid: - owner_gid = possible_gid + if possible_gid and not owner_gid: + owner_gid = possible_gid if not owner_gid: - logger.warning("service %s:%s has no owner", service.key, service.version) + _logger.warning("service %s:%s has no owner", service.key, service.version) else: group_ids.append(owner_gid) @@ -106,7 +106,7 @@ async def evaluate_auto_upgrade_policy( # AUTO-UPGRADE PATCH policy: # # - Any new patch released, inherits the access rights from previous compatible version - # - TODO: add as option in the publication contract, i.e. in ServiceDockerData + # - IDEA: add as option in the publication contract, i.e. in ServiceDockerData? # - Does NOT apply to front-end services # # SEE https://github.com/ITISFoundation/osparc-simcore/issues/2244) @@ -135,15 +135,14 @@ async def evaluate_auto_upgrade_policy( previous_release.key, previous_release.version ) - for access in previous_access_rights: - service_access_rights.append( - access.copy( - exclude={"created", "modified"}, - update={"version": service_metadata.version}, - deep=True, - ) + service_access_rights = [ + access.copy( + exclude={"created", "modified"}, + update={"version": service_metadata.version}, + deep=True, ) - + for access in previous_access_rights + ] return service_access_rights @@ -158,16 +157,16 @@ def reduce_access_rights( # TODO: probably a lot of room to optimize # helper functions to simplify operation of access rights - def get_target(access: ServiceAccessRightsAtDB) -> tuple[Union[str, int], ...]: + def get_target(access: ServiceAccessRightsAtDB) -> tuple[str | int, ...]: """Hashable identifier of the resource the access rights apply to""" - return tuple([access.key, access.version, access.gid, access.product_name]) + return (access.key, access.version, access.gid, access.product_name) def get_flags(access: ServiceAccessRightsAtDB) -> dict[str, bool]: """Extracts only""" flags = access.dict(include={"execute_access", "write_access"}) return cast(dict[str, bool], flags) - access_flags_map: dict[tuple[Union[str, int], ...], dict[str, bool]] = {} + access_flags_map: dict[tuple[str | int, ...], dict[str, bool]] = {} for access in access_rights: target = get_target(access) access_flags = access_flags_map.get(target) @@ -179,16 +178,15 @@ def get_flags(access: ServiceAccessRightsAtDB) -> dict[str, bool]: else: access_flags_map[target] = get_flags(access) - reduced_access_rights = [] - for target in access_flags_map: - reduced_access_rights.append( - ServiceAccessRightsAtDB( - key=f"{target[0]}", - version=f"{target[1]}", - gid=int(target[2]), - product_name=f"{target[3]}", - **access_flags_map[target], - ) + reduced_access_rights: list[ServiceAccessRightsAtDB] = [ + ServiceAccessRightsAtDB( + key=f"{target[0]}", + version=f"{target[1]}", + gid=int(target[2]), + product_name=f"{target[3]}", + **access_flags_map[target], ) + for target in access_flags_map + ] return reduced_access_rights diff --git a/services/catalog/src/simcore_service_catalog/services/director.py b/services/catalog/src/simcore_service_catalog/services/director.py index bc8eeb720ed..354ea619239 100644 --- a/services/catalog/src/simcore_service_catalog/services/director.py +++ b/services/catalog/src/simcore_service_catalog/services/director.py @@ -1,7 +1,8 @@ import asyncio import functools import logging -from typing import Any, Awaitable, Callable +from collections.abc import Awaitable, Callable +from typing import Any import httpx from fastapi import FastAPI, HTTPException @@ -12,29 +13,29 @@ from tenacity.stop import stop_after_delay from tenacity.wait import wait_random -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) MINUTE = 60 -director_startup_retry_policy = dict( +director_startup_retry_policy = { # Random service startup order in swarm. # wait_random prevents saturating other services while startup # - wait=wait_random(2, 5), - stop=stop_after_delay(2 * MINUTE), - before_sleep=before_sleep_log(logger, logging.WARNING), - reraise=True, -) + "wait": wait_random(2, 5), + "stop": stop_after_delay(2 * MINUTE), + "before_sleep": before_sleep_log(_logger, logging.WARNING), + "reraise": True, +} -class UnresponsiveService(RuntimeError): +class UnresponsiveServiceError(RuntimeError): pass async def setup_director(app: FastAPI) -> None: if settings := app.state.settings.CATALOG_DIRECTOR: # init client-api - logger.debug("Setup director at %s ...", f"{settings.base_url=}") + _logger.debug("Setup director at %s ...", f"{settings.base_url=}") client = DirectorApi(base_url=settings.base_url, app=app) # check that the director is accessible @@ -42,13 +43,14 @@ async def setup_director(app: FastAPI) -> None: async for attempt in AsyncRetrying(**director_startup_retry_policy): with attempt: if not await client.is_responsive(): - raise UnresponsiveService("Director-v0 is not responsive") + msg = "Director-v0 is not responsive" + raise UnresponsiveServiceError(msg) - logger.info( + _logger.info( "Connection to director-v0 succeded [%s]", json_dumps(attempt.retry_state.retry_object.statistics), ) - except UnresponsiveService: + except UnresponsiveServiceError: await client.close() raise @@ -60,7 +62,7 @@ async def close_director(app: FastAPI) -> None: if client := app.state.director_api: await client.close() - logger.debug("Director client closed successfully") + _logger.debug("Director client closed successfully") # DIRECTOR API CLASS --------------------------------------------- @@ -89,7 +91,7 @@ def _unenvelope_or_raise_error( error = body.get("error") if httpx.codes.is_server_error(resp.status_code): - logger.error( + _logger.error( "director error %d [%s]: %s", resp.status_code, resp.reason_phrase, @@ -114,7 +116,7 @@ async def request_wrapper( try: resp = await request_func(zelf, path=normalized_path, *args, **kwargs) except Exception as err: - logger.exception( + _logger.exception( "Failed request %s to %s%s", request_func.__name__, zelf.client.base_url, @@ -162,7 +164,7 @@ async def put(self, path: str, body: dict) -> httpx.Response: async def is_responsive(self) -> bool: try: - logger.debug("checking director-v0 is responsive") + _logger.debug("checking director-v0 is responsive") health_check_path: str = "/" result = await self.client.head(health_check_path, timeout=1.0) result.raise_for_status() diff --git a/services/catalog/src/simcore_service_catalog/utils/pools.py b/services/catalog/src/simcore_service_catalog/utils/pools.py deleted file mode 100644 index 39d6f5e9801..00000000000 --- a/services/catalog/src/simcore_service_catalog/utils/pools.py +++ /dev/null @@ -1,40 +0,0 @@ -from concurrent.futures import ProcessPoolExecutor -from contextlib import contextmanager -from typing import Iterator - -# only gets created on use and is guaranteed to be the s -# ame for the entire lifetime of the application -__shared_process_pool_executor = {} - - -def get_shared_process_pool_executor(**kwargs) -> ProcessPoolExecutor: - # sometimes a pool requires a specific configuration - # the key helps to distinguish between them in the same application - key = "".join(sorted("_".join((k, str(v))) for k, v in kwargs.items())) - - if key not in __shared_process_pool_executor: - # pylint: disable=consider-using-with - __shared_process_pool_executor[key] = ProcessPoolExecutor(**kwargs) - - return __shared_process_pool_executor[key] - - -# because there is no shared fastapi library, this is a -# duplicate of servicelib.pools.non_blocking_process_pool_executor -@contextmanager -def non_blocking_process_pool_executor(**kwargs) -> Iterator[ProcessPoolExecutor]: - """ - Avoids default context manger behavior which calls - shutdown with wait=True an blocks. - """ - executor = get_shared_process_pool_executor(**kwargs) - try: - yield executor - finally: - # due to an issue in cpython https://bugs.python.org/issue34073 - # bypassing shutdown and using a shared pool - # remove call to get_shared_process_pool_executor and replace with - # a new instance when the issue is fixed - # FIXME: uncomment below line when the issue is fixed - # executor.shutdown(wait=False) - pass diff --git a/services/catalog/src/simcore_service_catalog/utils/requests_decorators.py b/services/catalog/src/simcore_service_catalog/utils/requests_decorators.py deleted file mode 100644 index 534ed7f2d12..00000000000 --- a/services/catalog/src/simcore_service_catalog/utils/requests_decorators.py +++ /dev/null @@ -1,47 +0,0 @@ -import asyncio -import logging -from contextlib import suppress -from functools import wraps -from typing import Any, Callable, Coroutine - -from fastapi import Request, Response - -logger = logging.getLogger(__name__) - -_DEFAULT_CHECK_INTERVAL_S: float = 0.5 - - -async def _cancel_task_if_client_disconnected( - request: Request, task: asyncio.Task, interval: float = _DEFAULT_CHECK_INTERVAL_S -) -> None: - with suppress(asyncio.CancelledError): - while True: - if await request.is_disconnected(): - logger.warning("client %s disconnected!", request.client) - task.cancel() - break - await asyncio.sleep(interval) - - -def cancellable_request(handler: Callable[..., Coroutine[Any, Any, Any]]): - """this decorator periodically checks if the client disconnected and then will cancel the request and return a 499 code (a la nginx).""" - - @wraps(handler) - async def decorator(request: Request, *args, **kwargs) -> Response: - handler_task = asyncio.get_event_loop().create_task( - handler(request, *args, **kwargs) - ) - auto_cancel_task = asyncio.get_event_loop().create_task( - _cancel_task_if_client_disconnected(request, handler_task) - ) - try: - return await handler_task - except asyncio.CancelledError: - logger.warning( - "request %s was cancelled by client %s!", request.url, request.client - ) - return Response("Oh No!", status_code=499) - finally: - auto_cancel_task.cancel() - - return decorator diff --git a/services/catalog/src/simcore_service_catalog/utils/versioning.py b/services/catalog/src/simcore_service_catalog/utils/versioning.py index 5aedea6c8b0..cc08bfec969 100644 --- a/services/catalog/src/simcore_service_catalog/utils/versioning.py +++ b/services/catalog/src/simcore_service_catalog/utils/versioning.py @@ -2,19 +2,19 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -from typing import Union +from typing import TypeAlias import packaging.version from packaging.version import Version -_VersionT = Union[Version, str] +_VersionOrStr: TypeAlias = Version | str -def as_version(v: _VersionT) -> Version: +def as_version(v: _VersionOrStr) -> Version: return packaging.version.Version(v) if isinstance(v, str) else v -def is_patch_release(version: _VersionT, reference: _VersionT) -> bool: +def is_patch_release(version: _VersionOrStr, reference: _VersionOrStr) -> bool: """Returns True if version is a patch release from reference""" v: Version = as_version(version) r: Version = as_version(reference) diff --git a/services/catalog/tests/integration/test_none.py b/services/catalog/tests/integration/test_none.py deleted file mode 100644 index 297cb2d7ad5..00000000000 --- a/services/catalog/tests/integration/test_none.py +++ /dev/null @@ -1,5 +0,0 @@ -# added as minimal integration tests - - -def test_mock(): - assert True diff --git a/services/catalog/tests/unit/conftest.py b/services/catalog/tests/unit/conftest.py index 465b86bb53a..7d7ecac82b0 100644 --- a/services/catalog/tests/unit/conftest.py +++ b/services/catalog/tests/unit/conftest.py @@ -1,21 +1,31 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name +# pylint: disable=not-context-manager +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable -import sys + +from collections.abc import AsyncIterator from pathlib import Path import pytest import simcore_service_catalog +from asgi_lifespan import LifespanManager +from fastapi import FastAPI from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import load_dotenv, setenvs_from_envfile +from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from simcore_service_catalog.core.application import create_app +from simcore_service_catalog.core.settings import ApplicationSettings pytest_plugins = [ - "pytest_simcore.postgres_service", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", + "pytest_simcore.environment_configs", "pytest_simcore.faker_users_data", + "pytest_simcore.faker_products_data", + "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", @@ -23,20 +33,13 @@ ] -_CURRENT_DIR = ( - Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -) - - -## FOLDER LAYOUT --------------------------------------------------------------------- - - @pytest.fixture(scope="session") -def project_slug_dir() -> Path: - folder = _CURRENT_DIR.parent.parent - assert folder.exists() - assert any(folder.glob("src/simcore_service_catalog")) - return folder +def project_slug_dir(osparc_simcore_root_dir: Path) -> Path: + # fixtures in pytest_simcore.environs + service_folder = osparc_simcore_root_dir / "services" / "catalog" + assert service_folder.exists() + assert any(service_folder.glob("src/simcore_service_catalog")) + return service_folder @pytest.fixture(scope="session") @@ -50,28 +53,55 @@ def package_dir() -> Path: @pytest.fixture(scope="session") -def service_env_file(project_slug_dir: Path) -> Path: - env_devel_path = project_slug_dir / ".env-devel" - assert env_devel_path.exists() - return env_devel_path +def env_devel_dict( + env_devel_dict: EnvVarsDict, external_envfile_dict: EnvVarsDict +) -> EnvVarsDict: + if external_envfile_dict: + assert "CATALOG_DEV_FEATURES_ENABLED" in external_envfile_dict + assert "CATALOG_SERVICES_DEFAULT_RESOURCES" in external_envfile_dict + return external_envfile_dict + return env_devel_dict + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + docker_compose_service_environment_dict: EnvVarsDict, +) -> EnvVarsDict: + """Produces testing environment for the app + by replicating the environment defined in the docker-compose + when initialized with .env-devel + """ + return setenvs_from_dict( + monkeypatch, + {**docker_compose_service_environment_dict}, + ) -# TEST ENVIRONS ------ +MAX_TIME_FOR_APP_TO_STARTUP = 10 +MAX_TIME_FOR_APP_TO_SHUTDOWN = 10 -@pytest.fixture(scope="session") -def testing_environ_vars( - testing_environ_vars: EnvVarsDict, service_env_file: Path -) -> EnvVarsDict: - # Extends packages/pytest-simcore/src/pytest_simcore/docker_compose.py::testing_environ_vars - # Environ seen by docker compose (i.e. postgres_db) - app_envs = load_dotenv(service_env_file, verbose=True) - return {**testing_environ_vars, **app_envs} +@pytest.fixture +def app_settings(app_environment: EnvVarsDict) -> ApplicationSettings: + assert app_environment + return ApplicationSettings.create_from_envs() @pytest.fixture -def service_test_environ( - service_env_file: Path, monkeypatch: pytest.MonkeyPatch -) -> EnvVarsDict: - # environs seen by app are defined by the service env-file! - return setenvs_from_envfile(monkeypatch, service_env_file, verbose=True) +async def app( + app_settings: ApplicationSettings, is_pdb_enabled: bool +) -> AsyncIterator[FastAPI]: + assert app_environment + the_test_app = create_app(settings=app_settings) + async with LifespanManager( + the_test_app, + startup_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_STARTUP, + shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN, + ): + yield the_test_app + + +@pytest.fixture +def disable_service_caching(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("AIOCACHE_DISABLE", "1") diff --git a/services/catalog/tests/unit/test_services_director.py b/services/catalog/tests/unit/test_services_director.py index d633f785ea6..a6307f2a5fe 100644 --- a/services/catalog/tests/unit/test_services_director.py +++ b/services/catalog/tests/unit/test_services_director.py @@ -1,48 +1,44 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -# pylint:disable=protected-access -# pylint:disable=not-context-manager +# pylint: disable=not-context-manager +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable - -from typing import Iterator +from collections.abc import Iterator import pytest import respx from fastapi import FastAPI -from fastapi.testclient import TestClient from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx.router import MockRouter from simcore_service_catalog.api.dependencies.director import get_director_api -from simcore_service_catalog.core.application import init_app +from simcore_service_catalog.core.settings import ApplicationSettings from simcore_service_catalog.services.director import DirectorApi @pytest.fixture -def minimal_app( - monkeypatch: pytest.MonkeyPatch, service_test_environ: EnvVarsDict -) -> Iterator[FastAPI]: - # disable a couple of subsystems - monkeypatch.setenv("CATALOG_POSTGRES", "null") - monkeypatch.setenv("SC_BOOT_MODE", "local-development") - - app = init_app() - - yield app - - -@pytest.fixture() -def client(minimal_app: FastAPI) -> Iterator[TestClient]: - # NOTE: this way we ensure the events are run in the application - # since it starts the app on a test server - with TestClient(minimal_app) as client: - yield client +def app_environment( + monkeypatch: pytest.MonkeyPatch, app_environment: EnvVarsDict +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **app_environment, + "CATALOG_POSTGRES": "null", # disable postgres + "SC_BOOT_MODE": "local-development", + }, + ) @pytest.fixture -def mocked_director_service_api(minimal_app: FastAPI) -> Iterator[MockRouter]: +def mocked_director_service_api( + app_settings: ApplicationSettings, +) -> Iterator[MockRouter]: + assert app_settings.CATALOG_DIRECTOR with respx.mock( - base_url=minimal_app.state.settings.CATALOG_DIRECTOR.base_url, + base_url=app_settings.CATALOG_DIRECTOR.base_url, assert_all_called=False, assert_all_mocked=True, ) as respx_mock: @@ -56,13 +52,12 @@ def mocked_director_service_api(minimal_app: FastAPI) -> Iterator[MockRouter]: async def test_director_client_setup( mocked_director_service_api: MockRouter, - minimal_app: FastAPI, - client: TestClient, + app: FastAPI, ): # gets director client as used in handlers - director_api = get_director_api(minimal_app) + director_api = get_director_api(app) - assert minimal_app.state.director_api == director_api + assert app.state.director_api == director_api assert isinstance(director_api, DirectorApi) # use it diff --git a/services/catalog/tests/unit/test_services_function_services.py b/services/catalog/tests/unit/test_services_function_services.py index 69c8744d5d6..17ce2c63721 100644 --- a/services/catalog/tests/unit/test_services_function_services.py +++ b/services/catalog/tests/unit/test_services_function_services.py @@ -1,7 +1,10 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -# pylint:disable=protected-access +# pylint: disable=not-context-manager +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + import pytest from models_library.api_schemas_catalog.services import ServiceDockerData diff --git a/services/catalog/tests/unit/test_utils_service_resources.py b/services/catalog/tests/unit/test_utils_service_resources.py index 2b73bdfc40f..1df8b18b896 100644 --- a/services/catalog/tests/unit/test_utils_service_resources.py +++ b/services/catalog/tests/unit/test_utils_service_resources.py @@ -1,3 +1,11 @@ +# pylint: disable=not-context-manager +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + from typing import Any import pytest @@ -69,12 +77,6 @@ def test_parse_generic_resources( ResourcesDict(), id="empty task resource spec", ), - pytest.param( - ResourcesDict(), - ServiceSpec(TaskTemplate=TaskSpec(Resources=Resources1())), # type: ignore - ResourcesDict(), - id="empty task resource spec", - ), pytest.param( ResourcesDict(), ServiceSpec(TaskTemplate=TaskSpec(Resources=Resources1(Limits=Limit()))), # type: ignore @@ -285,8 +287,8 @@ def test_merge_service_resources_with_user_specs( merged_resources = merge_service_resources_with_user_specs( service_resources, user_specs ) - assert all(key in expected_resources for key in merged_resources.keys()) - assert all(key in merged_resources for key in expected_resources.keys()) + assert all(key in expected_resources for key in merged_resources) + assert all(key in merged_resources for key in expected_resources) for resource_key, resource_value in merged_resources.items(): # NOTE: so that float values are compared correctly assert resource_value.dict() == pytest.approx( diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index aba902bf79a..3428ad188f9 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -5,23 +5,31 @@ import itertools import random +from collections.abc import AsyncIterator, Awaitable, Callable, Iterator from copy import deepcopy from datetime import datetime -from typing import Any, AsyncIterator, Awaitable, Callable, Iterator +from typing import Any import pytest import respx import sqlalchemy as sa from faker import Faker from fastapi import FastAPI +from fastapi.testclient import TestClient +from models_library.products import ProductName from models_library.services import ServiceDockerData from models_library.users import UserID +from pydantic import parse_obj_as from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_postgres import PostgresTestConfig +from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from pytest_simcore.helpers.utils_postgres import ( + PostgresTestConfig, + insert_and_get_row_lifespan, +) from simcore_postgres_database.models.products import products -from simcore_postgres_database.models.users import UserRole, UserStatus, users -from simcore_service_catalog.core.application import init_app +from simcore_postgres_database.models.users import users +from simcore_service_catalog.core.settings import ApplicationSettings from simcore_service_catalog.db.tables import ( groups, services_access_rights, @@ -30,73 +38,61 @@ from sqlalchemy import tuple_ from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.ext.asyncio import AsyncEngine -from starlette.testclient import TestClient - - -@pytest.fixture() -async def products_names( - sqlalchemy_async_engine: AsyncEngine, -) -> AsyncIterator[list[str]]: - """Inits products db table and returns product names""" - data = [ - # already upon creation: ("osparc", r"([\.-]{0,1}osparc[\.-])"), - ("s4l", r"(^s4l[\.-])|(^sim4life\.)|(^api.s4l[\.-])|(^api.sim4life\.)"), - ("tis", r"(^tis[\.-])|(^ti-solutions\.)"), - ] - - # pylint: disable=no-value-for-parameter - - async with sqlalchemy_async_engine.begin() as conn: - # NOTE: The 'default' dialect with current database version settings does not support in-place multirow inserts - for n, (name, regex) in enumerate(data): - stmt = products.insert().values(name=name, host_regex=regex, priority=n) - await conn.execute(stmt) - - names = [ - "osparc", - ] + [items[0] for items in data] - yield names - async with sqlalchemy_async_engine.begin() as conn: - await conn.execute(products.delete()) +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, + postgres_env_vars_dict: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **app_environment, + **postgres_env_vars_dict, + "SC_BOOT_MODE": "local-development", + "POSTGRES_CLIENT_NAME": "pytest_client", + }, + ) @pytest.fixture -def app( - monkeypatch: pytest.MonkeyPatch, - mocker: MockerFixture, - service_test_environ: EnvVarsDict, +async def app_settings( # starts postgres service before app starts postgres_db: sa.engine.Engine, postgres_host_config: PostgresTestConfig, - products_names: list[str], -) -> FastAPI: + app_settings: ApplicationSettings, +) -> ApplicationSettings: + # Database is init BEFORE app + assert postgres_db print("database started:", postgres_host_config) - print("database w/products in table:", products_names) # Ensures both postgres service and app environs are the same! - assert service_test_environ["POSTGRES_USER"] == postgres_host_config["user"] - assert service_test_environ["POSTGRES_DB"] == postgres_host_config["database"] - assert service_test_environ["POSTGRES_PASSWORD"] == postgres_host_config["password"] - - monkeypatch.setenv("SC_BOOT_MODE", "local-development") - monkeypatch.setenv("POSTGRES_CLIENT_NAME", "pytest_client") - app = init_app() - - return app + assert app_settings + assert app_settings.CATALOG_POSTGRES + assert app_settings.CATALOG_POSTGRES.POSTGRES_USER == postgres_host_config["user"] + assert app_settings.CATALOG_POSTGRES.POSTGRES_DB == postgres_host_config["database"] + assert ( + app_settings.CATALOG_POSTGRES.POSTGRES_PASSWORD.get_secret_value() + == postgres_host_config["password"] + ) + return app_settings @pytest.fixture def client(app: FastAPI) -> Iterator[TestClient]: + # NOTE: sync client since we use benchmarch fixture! with TestClient(app) as cli: # Note: this way we ensure the events are run in the application yield cli @pytest.fixture() -def director_mockup(app: FastAPI) -> Iterator[respx.MockRouter]: +def mocked_director_service_api( + app_settings: ApplicationSettings, +) -> Iterator[respx.MockRouter]: with respx.mock( - base_url=app.state.settings.CATALOG_DIRECTOR.base_url, + base_url=app_settings.CATALOG_DIRECTOR.base_url, assert_all_called=False, assert_all_mocked=True, ) as respx_mock: @@ -126,35 +122,69 @@ def director_mockup(app: FastAPI) -> Iterator[respx.MockRouter]: # -@pytest.fixture() -def user_db(postgres_db: sa.engine.Engine, user_id: UserID) -> Iterator[dict]: - with postgres_db.connect() as con: - # removes all users before continuing - con.execute(users.delete()) - con.execute( - users.insert() - .values( - id=user_id, - name="test user", - email="test@user.com", - password_hash="testhash", - status=UserStatus.ACTIVE, - role=UserRole.USER, - ) - .returning(sa.literal_column("*")) - ) - # this is needed to get the primary_gid correctly - result = con.execute(sa.select(users).where(users.c.id == user_id)) - user = result.first() - assert user - yield dict(user) +@pytest.fixture +async def product( + product: dict[str, Any], + sqlalchemy_async_engine: AsyncEngine, +) -> AsyncIterator[dict[str, Any]]: + """ + injects product in db + """ + # NOTE: this fixture ignores products' group-id but it is fine for this test context + assert product["group_id"] is None + async with insert_and_get_row_lifespan( + sqlalchemy_async_engine, + table=products, + values=product, + pk_col=products.c.name, + pk_value=product["name"], + ) as row: + yield row - con.execute(users.delete().where(users.c.id == user_id)) + +@pytest.fixture +def target_product(product: dict[str, Any], product_name: ProductName) -> ProductName: + assert product_name == parse_obj_as(ProductName, product["name"]) + return product_name + + +@pytest.fixture +def other_product(product: dict[str, Any]) -> ProductName: + other = parse_obj_as(ProductName, "osparc") + assert other != product["name"] + return other + + +@pytest.fixture +def products_names( + target_product: ProductName, other_product: ProductName +) -> list[str]: + return [other_product, target_product] + + +@pytest.fixture +async def user( + user: dict[str, Any], + user_id: UserID, + sqlalchemy_async_engine: AsyncEngine, +) -> AsyncIterator[dict[str, Any]]: + """ + injects a user in db + """ + assert user_id == user["id"] + async with insert_and_get_row_lifespan( + sqlalchemy_async_engine, + table=users, + values=user, + pk_col=users.c.id, + pk_value=user["id"], + ) as row: + yield row @pytest.fixture() async def user_groups_ids( - sqlalchemy_async_engine: AsyncEngine, user_db: dict[str, Any] + sqlalchemy_async_engine: AsyncEngine, user: dict[str, Any] ) -> AsyncIterator[list[int]]: """Inits groups table and returns group identifiers""" @@ -173,9 +203,11 @@ async def user_groups_ids( async with sqlalchemy_async_engine.begin() as conn: for row in data: # NOTE: The 'default' dialect with current database version settings does not support in-place multirow inserts - await conn.execute(groups.insert().values(**dict(zip(cols, row)))) + await conn.execute( + groups.insert().values(**dict(zip(cols, row, strict=False))) + ) - gids = [1, user_db["primary_gid"]] + [items[0] for items in data] + gids = [1, user["primary_gid"]] + [items[0] for items in data] yield gids @@ -216,7 +248,7 @@ async def services_db_tables_injector( # pylint: disable=no-value-for-parameter inserted_services: set[tuple[str, str]] = set() - async def inject_in_db(fake_catalog: list[tuple]): + async def _inject_in_db(fake_catalog: list[tuple]): # [(service, ar1, ...), (service2, ar1, ...) ] async with sqlalchemy_async_engine.begin() as conn: @@ -237,7 +269,7 @@ async def inject_in_db(fake_catalog: list[tuple]): stmt_access = services_access_rights.insert().values(access_rights) await conn.execute(stmt_access) - yield inject_in_db + yield _inject_in_db async with sqlalchemy_async_engine.begin() as conn: await conn.execute( @@ -368,29 +400,29 @@ async def service_catalog_faker( everyone_gid, user_gid, team_gid = user_groups_ids def _random_service(**overrides) -> dict[str, Any]: - data = dict( - key=f"simcore/services/{random.choice(['dynamic', 'computational'])}/{faker.name()}", - version=".".join([str(faker.pyint()) for _ in range(3)]), - owner=user_gid, - name=faker.name(), - description=faker.sentence(), - thumbnail=random.choice([faker.image_url(), None]), - classifiers=[], - quality={}, - deprecated=None, - ) + data = { + "key": f"simcore/services/{random.choice(['dynamic', 'computational'])}/{faker.name()}", + "version": ".".join([str(faker.pyint()) for _ in range(3)]), + "owner": user_gid, + "name": faker.name(), + "description": faker.sentence(), + "thumbnail": random.choice([faker.image_url(), None]), + "classifiers": [], + "quality": {}, + "deprecated": None, + } data.update(overrides) return data def _random_access(service, **overrides) -> dict[str, Any]: - data = dict( - key=service["key"], - version=service["version"], - gid=random.choice(user_groups_ids), - execute_access=faker.pybool(), - write_access=faker.pybool(), - product_name=random.choice(products_names), - ) + data = { + "key": service["key"], + "version": service["version"], + "gid": random.choice(user_groups_ids), + "execute_access": faker.pybool(), + "write_access": faker.pybool(), + "product_name": random.choice(products_names), + } data.update(overrides) return data @@ -443,7 +475,7 @@ def _fake_factory( @pytest.fixture -def mock_catalog_background_task(mocker: MockerFixture) -> None: +def mocked_catalog_background_task(mocker: MockerFixture) -> None: """patch the setup of the background task so we can call it manually""" mocker.patch( "simcore_service_catalog.core.events.start_registry_sync_task", diff --git a/services/catalog/tests/unit/with_dbs/test_api_routes_services__list.py b/services/catalog/tests/unit/with_dbs/test_api_routes_services__list.py index 47a54aa5164..0edcda200a6 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_routes_services__list.py +++ b/services/catalog/tests/unit/with_dbs/test_api_routes_services__list.py @@ -9,9 +9,10 @@ from collections.abc import Callable from datetime import datetime, timedelta -import pytest from models_library.api_schemas_catalog.services import ServiceGet +from models_library.products import ProductName from models_library.services import ServiceDockerData +from models_library.users import UserID from pydantic import parse_obj_as from respx.router import MockRouter from starlette import status @@ -26,22 +27,16 @@ ] -@pytest.fixture -def disable_service_caching(monkeypatch): - monkeypatch.setenv("AIOCACHE_DISABLE", 1) - - async def test_list_services_with_details( - mock_catalog_background_task: None, - director_mockup: MockRouter, - client: TestClient, - user_id: int, - products_names: list[str], + mocked_catalog_background_task: None, + mocked_director_service_api: MockRouter, + user_id: UserID, + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, + client: TestClient, benchmark, ): - target_product = products_names[-1] # create some fake services NUM_SERVICES = 1000 fake_services = [ @@ -62,13 +57,14 @@ async def test_list_services_with_details( # now fake the director such that it returns half the services fake_registry_service_data = ServiceDockerData.Config.schema_extra["examples"][0] - director_mockup.get("/services", name="list_services").respond( + mocked_director_service_api.get("/services", name="list_services").respond( 200, json={ "data": [ { **fake_registry_service_data, - **{"key": s[0]["key"], "version": s[0]["version"]}, + "key": s[0]["key"], + "version": s[0]["version"], } for s in fake_services[::2] ] @@ -85,16 +81,16 @@ async def test_list_services_with_details( async def test_list_services_without_details( - mock_catalog_background_task: None, - director_mockup: MockRouter, - client: TestClient, + mocked_catalog_background_task: None, + mocked_director_service_api: MockRouter, user_id: int, - products_names: list[str], + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, + client: TestClient, benchmark, ): - target_product = products_names[-1] + # injects fake data in db NUM_SERVICES = 1000 SERVICE_KEY = "simcore/services/dynamic/jupyterlab" @@ -128,15 +124,15 @@ async def test_list_services_without_details( async def test_list_services_without_details_with_wrong_user_id_returns_403( disable_service_caching, - mock_catalog_background_task: None, - director_mockup: MockRouter, - client: TestClient, + mocked_catalog_background_task: None, + mocked_director_service_api: MockRouter, user_id: int, - products_names: list[str], + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, + client: TestClient, ): - target_product = products_names[-1] + # injects fake data in db NUM_SERVICES = 1 await services_db_tables_injector( @@ -159,19 +155,15 @@ async def test_list_services_without_details_with_wrong_user_id_returns_403( async def test_list_services_without_details_with_another_product_returns_other_services( disable_service_caching: None, - mock_catalog_background_task: None, - director_mockup: MockRouter, - client: TestClient, + mocked_catalog_background_task: None, + mocked_director_service_api: MockRouter, user_id: int, - products_names: list[str], + target_product: ProductName, + other_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, + client: TestClient, ): - target_product = products_names[-1] - assert ( - len(products_names) > 1 - ), "please adjust the fixture to have the right number of products" - # injects fake data in db NUM_SERVICES = 15 await services_db_tables_injector( [ @@ -187,9 +179,7 @@ async def test_list_services_without_details_with_another_product_returns_other_ ) url = URL("/v0/services").with_query({"user_id": user_id, "details": "false"}) - response = client.get( - f"{url}", headers={"x-simcore-products-name": products_names[0]} - ) + response = client.get(f"{url}", headers={"x-simcore-products-name": other_product}) assert response.status_code == 200 data = response.json() assert len(data) == 0 @@ -197,18 +187,15 @@ async def test_list_services_without_details_with_another_product_returns_other_ async def test_list_services_without_details_with_wrong_product_returns_0_service( disable_service_caching, - mock_catalog_background_task, - director_mockup: MockRouter, - client: TestClient, + mocked_catalog_background_task, + mocked_director_service_api: MockRouter, user_id: int, - products_names: list[str], + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, + client: TestClient, ): - target_product = products_names[-1] - assert ( - len(products_names) > 1 - ), "please adjust the fixture to have the right number of products" + # injects fake data in db NUM_SERVICES = 1 await services_db_tables_injector( @@ -235,18 +222,15 @@ async def test_list_services_without_details_with_wrong_product_returns_0_servic async def test_list_services_that_are_deprecated( disable_service_caching, - mock_catalog_background_task, - director_mockup: MockRouter, - client: TestClient, + mocked_catalog_background_task, + mocked_director_service_api: MockRouter, user_id: int, - products_names: list[str], + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, + client: TestClient, ): - target_product = products_names[-1] - assert ( - len(products_names) > 1 - ), "please adjust the fixture to have the right number of products" + # injects fake data in db deprecation_date = datetime.utcnow() + timedelta(days=1) deprecated_service = service_catalog_faker( @@ -271,16 +255,14 @@ async def test_list_services_that_are_deprecated( # for details, the director must return the same service fake_registry_service_data = ServiceDockerData.Config.schema_extra["examples"][0] - director_mockup.get("/services", name="list_services").respond( + mocked_director_service_api.get("/services", name="list_services").respond( 200, json={ "data": [ { **fake_registry_service_data, - **{ - "key": deprecated_service[0]["key"], - "version": deprecated_service[0]["version"], - }, + "key": deprecated_service[0]["key"], + "version": deprecated_service[0]["version"], } ] }, diff --git a/services/catalog/tests/unit/with_dbs/test_api_routes_services_access_rights.py b/services/catalog/tests/unit/with_dbs/test_api_routes_services_access_rights.py index 6869442702c..f2c1edfd413 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_routes_services_access_rights.py +++ b/services/catalog/tests/unit/with_dbs/test_api_routes_services_access_rights.py @@ -7,11 +7,13 @@ # type: ignore import random -from typing import Any, Callable +from collections.abc import Callable +from typing import Any from models_library.api_schemas_catalog.service_access_rights import ( ServiceAccessRightsGet, ) +from models_library.products import ProductName from pydantic import parse_obj_as from respx.router import MockRouter from starlette.testclient import TestClient @@ -26,17 +28,16 @@ async def test_get_service_access_rights( - mock_catalog_background_task: None, - director_mockup: MockRouter, - client: TestClient, - user_db: Callable, - products_names: list[str], + mocked_catalog_background_task: None, + mocked_director_service_api: MockRouter, + user: dict[str, Any], + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, + client: TestClient, ): - target_product = products_names[0] # osparc - user_id = user_db["id"] - user_primary_gid = user_db["primary_gid"] + user_id = user["id"] + user_primary_gid = user["primary_gid"] # create some fake services NUM_SERVICES = 3 @@ -73,18 +74,17 @@ async def test_get_service_access_rights( async def test_get_service_access_rights_with_more_gids( - mock_catalog_background_task: None, - director_mockup: MockRouter, - client: TestClient, - user_db: dict[str, Any], - products_names: list[str], + mocked_catalog_background_task: None, + mocked_director_service_api: MockRouter, + user: dict[str, Any], + other_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, user_groups_ids: list[int], + client: TestClient, ): - target_product = products_names[1] # s4l - user_id = user_db["id"] - user_primary_gid = user_db["primary_gid"] + user_id = user["id"] + user_primary_gid = user["primary_gid"] everyone_gid, user_gid, team_gid = user_groups_ids fake_service = service_catalog_faker( @@ -92,7 +92,7 @@ async def test_get_service_access_rights_with_more_gids( "1.0.1", team_access="x", everyone_access="x", - product=target_product, + product=other_product, ) # injects fake data in db await services_db_tables_injector([fake_service]) @@ -103,7 +103,7 @@ async def test_get_service_access_rights_with_more_gids( ).with_query({"user_id": user_id}) response = client.get( f"{url}", - headers={"x-simcore-products-name": target_product}, + headers={"x-simcore-products-name": other_product}, ) assert response.status_code == 200 data = parse_obj_as(ServiceAccessRightsGet, response.json()) diff --git a/services/catalog/tests/unit/with_dbs/test_api_routes_services_ports.py b/services/catalog/tests/unit/with_dbs/test_api_routes_services_ports.py index b06623a0332..528ecbbed5d 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_routes_services_ports.py +++ b/services/catalog/tests/unit/with_dbs/test_api_routes_services_ports.py @@ -4,7 +4,8 @@ # pylint: disable=too-many-arguments import urllib.parse -from typing import Any, Callable +from collections.abc import Callable +from typing import Any import pytest from pytest_mock.plugin import MockerFixture @@ -21,20 +22,6 @@ ] -@pytest.fixture -def disable_service_caching(monkeypatch: pytest.MonkeyPatch): - monkeypatch.setenv("AIOCACHE_DISABLE", "1") - - -@pytest.fixture -def product_name( - products_names: list[str], -) -> str: - target_product = products_names[-1] - assert target_product - return target_product - - @pytest.fixture def service_key() -> str: return "simcore/services/comp/itis/fake_sleeper" @@ -72,13 +59,13 @@ async def mock_check_service_read_access( @pytest.fixture async def mock_director_service_api( - director_mockup: MockRouter, + mocked_director_service_api: MockRouter, service_key: str, service_version: str, service_metadata: dict[str, Any], ): # SEE services/director/src/simcore_service_director/api/v0/openapi.yaml - director_mockup.get( + mocked_director_service_api.get( f"/services/{urllib.parse.quote_plus(service_key)}/{service_version}", name="services_by_key_version_get", ).respond( @@ -93,7 +80,7 @@ async def mock_director_service_api( async def test_list_service_ports( disable_service_caching: None, - mock_catalog_background_task: None, + mocked_catalog_background_task: None, mock_check_service_read_access: None, mock_director_service_api: None, client: TestClient, diff --git a/services/catalog/tests/unit/with_dbs/test_api_routes_services_resources.py b/services/catalog/tests/unit/with_dbs/test_api_routes_services_resources.py index d0b95a8eb86..9b8f909f874 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_routes_services_resources.py +++ b/services/catalog/tests/unit/with_dbs/test_api_routes_services_resources.py @@ -3,10 +3,11 @@ # pylint: disable=unused-variable import urllib.parse +from collections.abc import Callable from copy import deepcopy from dataclasses import dataclass from random import choice, randint -from typing import Any, Callable +from typing import Any import httpx import pytest @@ -37,16 +38,14 @@ @pytest.fixture def mock_director_service_labels( - director_mockup: respx.MockRouter, app: FastAPI + mocked_director_service_api: respx.MockRouter, app: FastAPI ) -> Route: slash = urllib.parse.quote_plus("/") - mock_route = director_mockup.get( + return mocked_director_service_api.get( url__regex=rf"v0/services/simcore{slash}services{slash}(comp|dynamic|frontend)({slash}[\w{slash}-]+)+/[0-9]+.[0-9]+.[0-9]+/labels", name="get_service_labels", ).respond(200, json={"data": {}}) - return mock_route - @pytest.fixture def service_labels(faker: Faker) -> Callable[..., dict[str, Any]]: @@ -183,7 +182,7 @@ class _ServiceResourceParams: ], ) async def test_get_service_resources( - mock_catalog_background_task, + mocked_catalog_background_task, mock_director_service_labels: Route, client: TestClient, params: _ServiceResourceParams, @@ -209,7 +208,7 @@ async def test_get_service_resources( @pytest.fixture def create_mock_director_service_labels( - director_mockup: respx.MockRouter, app: FastAPI + mocked_director_service_api: respx.MockRouter, app: FastAPI ) -> Callable: def factory(services_labels: dict[str, dict[str, Any]]) -> None: for service_name, data in services_labels.items(): @@ -217,7 +216,7 @@ def factory(services_labels: dict[str, dict[str, Any]]) -> None: f"simcore/services/dynamic/{service_name}" ) for k, mock_key in enumerate((encoded_key, service_name)): - director_mockup.get( + mocked_director_service_api.get( url__regex=rf"v0/services/{mock_key}/[\w/.]+/labels", name=f"get_service_labels_for_{service_name}_{k}", ).respond(200, json={"data": data}) @@ -287,7 +286,7 @@ def factory(services_labels: dict[str, dict[str, Any]]) -> None: ], ) async def test_get_service_resources_sim4life_case( - mock_catalog_background_task, + mocked_catalog_background_task, create_mock_director_service_labels: Callable, client: TestClient, mapped_services_labels: dict[str, dict[str, Any]], @@ -307,7 +306,7 @@ async def test_get_service_resources_sim4life_case( async def test_get_service_resources_raises_errors( - mock_catalog_background_task, + mocked_catalog_background_task, mock_director_service_labels: Route, client: TestClient, ) -> None: diff --git a/services/catalog/tests/unit/with_dbs/test_api_routes_services_specifications.py b/services/catalog/tests/unit/with_dbs/test_api_routes_services_specifications.py index 40d4132e417..d88122c21d7 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_routes_services_specifications.py +++ b/services/catalog/tests/unit/with_dbs/test_api_routes_services_specifications.py @@ -5,8 +5,9 @@ import asyncio +from collections.abc import AsyncIterator, Awaitable, Callable from random import choice, randint -from typing import Any, AsyncIterator, Awaitable, Callable +from typing import Any import pytest import respx @@ -29,6 +30,7 @@ Resources1 as ServiceTaskResources, ) from models_library.generated_models.docker_rest_api import ServiceSpec +from models_library.products import ProductName from models_library.users import UserID from simcore_postgres_database.models.groups import user_to_groups from simcore_postgres_database.models.services_specifications import ( @@ -121,8 +123,8 @@ def _creator(service_key, service_version, gid) -> ServiceSpecificationsAtDB: async def test_get_service_specifications_returns_403_if_user_does_not_exist( - mock_catalog_background_task, - director_mockup: respx.MockRouter, + mocked_catalog_background_task, + mocked_director_service_api: respx.MockRouter, client: TestClient, user_id: UserID, ): @@ -136,12 +138,12 @@ async def test_get_service_specifications_returns_403_if_user_does_not_exist( async def test_get_service_specifications_of_unknown_service_returns_default_specs( - mock_catalog_background_task, - director_mockup: respx.MockRouter, + mocked_catalog_background_task, + mocked_director_service_api: respx.MockRouter, app: FastAPI, client: TestClient, user_id: UserID, - user_db: dict[str, Any], + user: dict[str, Any], faker: Faker, ): service_key = ( @@ -160,21 +162,20 @@ async def test_get_service_specifications_of_unknown_service_returns_default_spe async def test_get_service_specifications( - mock_catalog_background_task, - director_mockup: respx.MockRouter, + mocked_catalog_background_task, + mocked_director_service_api: respx.MockRouter, app: FastAPI, client: TestClient, user_id: UserID, - user_db: dict[str, Any], + user: dict[str, Any], user_groups_ids: list[int], - products_names: list[str], + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, services_specifications_injector: Callable, sqlalchemy_async_engine: AsyncEngine, create_service_specifications: Callable[..., ServiceSpecificationsAtDB], ): - target_product = products_names[-1] SERVICE_KEY = "simcore/services/dynamic/jupyterlab" SERVICE_VERSION = "0.0.1" await services_db_tables_injector( @@ -253,20 +254,19 @@ async def test_get_service_specifications( async def test_get_service_specifications_are_passed_to_newer_versions_of_service( - mock_catalog_background_task, - director_mockup: respx.MockRouter, + mocked_catalog_background_task, + mocked_director_service_api: respx.MockRouter, app: FastAPI, client: TestClient, user_id: UserID, - user_db: dict[str, Any], + user: dict[str, Any], user_groups_ids: list[int], - products_names: list[str], + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, services_specifications_injector: Callable, create_service_specifications: Callable[..., ServiceSpecificationsAtDB], ): - target_product = products_names[-1] SERVICE_KEY = "simcore/services/dynamic/jupyterlab" sorted_versions = [ "0.0.1", diff --git a/services/catalog/tests/unit/with_dbs/test_db_repositories.py b/services/catalog/tests/unit/with_dbs/test_db_repositories.py index 677200a266b..245ab7141dc 100644 --- a/services/catalog/tests/unit/with_dbs/test_db_repositories.py +++ b/services/catalog/tests/unit/with_dbs/test_db_repositories.py @@ -2,10 +2,11 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable +from collections.abc import Callable from dataclasses import dataclass, field -from typing import Callable import pytest +from models_library.products import ProductName from models_library.services_db import ServiceAccessRightsAtDB, ServiceMetaDataAtDB from packaging import version from simcore_service_catalog.db.repositories.services import ServicesRepository @@ -22,8 +23,7 @@ @pytest.fixture def services_repo(sqlalchemy_async_engine: AsyncEngine): - repo = ServicesRepository(sqlalchemy_async_engine) - return repo + return ServicesRepository(sqlalchemy_async_engine) @dataclass @@ -37,11 +37,10 @@ class FakeCatalogInfo: @pytest.fixture() async def fake_catalog_with_jupyterlab( - products_names: list[str], + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, ) -> FakeCatalogInfo: - target_product = products_names[-1] # injects fake data in db await services_db_tables_injector( @@ -84,13 +83,12 @@ async def fake_catalog_with_jupyterlab( ] ) - info = FakeCatalogInfo( + return FakeCatalogInfo( expected_services_count=5, expected_latest="1.1.3", expected_1_1_x=["1.1.3", "1.1.0"], expected_0_x_x=["0.10.0", "0.0.7", "0.0.1"], ) - return info async def test_create_services( @@ -118,11 +116,10 @@ async def test_create_services( async def test_read_services( services_repo: ServicesRepository, user_groups_ids: list[int], - products_names: list[str], + target_product: ProductName, service_catalog_faker: Callable, services_db_tables_injector: Callable, ): - target_product = products_names[-1] # injects fake data in db await services_db_tables_injector( diff --git a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py index d7e663da066..55bee7c0ff8 100644 --- a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py +++ b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py @@ -2,10 +2,11 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -from typing import Callable +from collections.abc import Callable from fastapi import FastAPI from models_library.groups import GroupAtDB +from models_library.products import ProductName from models_library.services import ServiceDockerData, ServiceVersion from models_library.services_db import ServiceAccessRightsAtDB from pydantic import parse_obj_as @@ -83,13 +84,13 @@ def test_reduce_access_rights(): async def test_auto_upgrade_policy( sqlalchemy_async_engine: AsyncEngine, user_groups_ids: list[int], - products_names: list[str], + target_product: ProductName, + other_product: ProductName, services_db_tables_injector: Callable, service_catalog_faker: Callable, mocker, ): everyone_gid, user_gid, team_gid = user_groups_ids - target_product = products_names[0] # Avoids calls to director API mocker.patch( @@ -146,7 +147,7 @@ async def test_auto_upgrade_policy( "1.0.10", team_access="x", everyone_access=None, - product=products_names[-1], + product=other_product, ), ] ) @@ -185,7 +186,7 @@ async def test_auto_upgrade_policy( assert {a.gid for a in inherited_access_rights} == {team_gid, owner_gid} assert {a.product_name for a in inherited_access_rights} == { target_product, - products_names[-1], + other_product, } # ALL @@ -196,5 +197,5 @@ async def test_auto_upgrade_policy( assert {a.gid for a in service_access_rights} == {team_gid, owner_gid} assert {a.product_name for a in service_access_rights} == { target_product, - products_names[-1], + other_product, } diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 1c94c000275..b55d599fe72 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -131,7 +131,7 @@ services: init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: - CATALOG_BACKGROUND_TASK_REST_TIME: ${CATALOG_BACKGROUND_TASK_REST_TIME:-60} + CATALOG_BACKGROUND_TASK_REST_TIME: ${CATALOG_BACKGROUND_TASK_REST_TIME} CATALOG_DEV_FEATURES_ENABLED: ${CATALOG_DEV_FEATURES_ENABLED} CATALOG_LOGLEVEL: ${CATALOG_LOGLEVEL} CATALOG_PROFILING: ${CATALOG_PROFILING} From 9254c7f2f85f8ca4001091e24d056522118627fc Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 5 Jun 2024 17:25:22 +0200 Subject: [PATCH 017/219] =?UTF-8?q?=E2=9C=A8=20[Frontend]=20Dashboard:=20f?= =?UTF-8?q?ilters=20on=20the=20left=20(#5907)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/dashboard/CardBase.js | 4 +- .../class/osparc/dashboard/Dashboard.js | 8 +- .../class/osparc/dashboard/DataBrowser.js | 12 +- .../class/osparc/dashboard/GridButtonItem.js | 5 +- .../class/osparc/dashboard/ListButtonItem.js | 6 +- .../osparc/dashboard/ResourceBrowserBase.js | 25 +++ .../class/osparc/dashboard/ResourceFilter.js | 169 ++++++++++++++++++ .../class/osparc/dashboard/SearchBarFilter.js | 43 +++-- .../class/osparc/dashboard/ServiceBrowser.js | 2 + .../class/osparc/dashboard/StudyBrowser.js | 2 +- .../class/osparc/dashboard/TemplateBrowser.js | 2 + .../source/class/osparc/desktop/MainPage.js | 24 +-- .../class/osparc/form/tag/TagToggleButton.js | 11 +- .../source/class/osparc/theme/Appearance.js | 41 +++++ .../source/class/osparc/theme/Decoration.js | 24 +++ .../osparc/ui/basic/LoadingPageHandler.js | 53 +++++- 16 files changed, 373 insertions(+), 58 deletions(-) create mode 100644 services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js index 32172e45996..a6c532495fa 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js @@ -103,7 +103,7 @@ qx.Class.define("osparc.dashboard.CardBase", { if (checks && myGroupId in checks) { const myAccessRights = checks[myGroupId]; const totalAccess = "delete" in myAccessRights ? myAccessRights["delete"] : myAccessRights["write_access"]; - if (sharedWith === "my-studies") { + if (sharedWith === "my-resources") { return !totalAccess; } else if (sharedWith === "shared-with-me") { return totalAccess; @@ -892,7 +892,7 @@ qx.Class.define("osparc.dashboard.CardBase", { }, _filterTags: function(tags) { - const checks = this.getTags().map(tag => tag.name); + const checks = this.getTags().map(tag => tag.id); return this.self().filterTags(checks, tags); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js index 4bd888292ed..8519daadc14 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js @@ -42,8 +42,8 @@ qx.Class.define("osparc.dashboard.Dashboard", { osparc.utils.Utils.setIdToWidget(this, "dashboard"); this.set({ - contentPaddingTop: 15, - contentPaddingLeft: 0, + contentPadding: this.self().PADDING, + contentPaddingBottom: 0, barPosition: "top" }); @@ -71,6 +71,10 @@ qx.Class.define("osparc.dashboard.Dashboard", { } }, + statics: { + PADDING: 15 + }, + members: { __studyBrowser: null, __templateBrowser: null, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js index 157f17a7521..3e7947d41de 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js @@ -53,7 +53,7 @@ qx.Class.define("osparc.dashboard.DataBrowser", { icon: "@FontAwesome5Solid/sync-alt/14", allowGrowX: false }); - this._add(control); + this._addToMainLayout(control); break; case "tree-folder-layout": control = new qx.ui.splitpane.Pane("horizontal"); @@ -61,7 +61,7 @@ qx.Class.define("osparc.dashboard.DataBrowser", { width: 2, backgroundColor: "scrollbar-passive" }); - this._add(control, { + this._addToMainLayout(control, { flex: 1 }); break; @@ -86,6 +86,10 @@ qx.Class.define("osparc.dashboard.DataBrowser", { alignY: "middle" }); break; + case "actions-toolbar": + control = new qx.ui.toolbar.ToolBar(); + this._addToMainLayout(control); + break; } return control || this.base(arguments, id); @@ -117,7 +121,7 @@ qx.Class.define("osparc.dashboard.DataBrowser", { const filesTree = this.getChildControl("files-tree"); const folderViewer = this.getChildControl("folder-viewer"); - const actionsToolbar = new qx.ui.toolbar.ToolBar(); + const actionsToolbar = this.getChildControl("actions-toolbar"); const fileActions = new qx.ui.toolbar.Part(); const addFile = new qx.ui.toolbar.Part(); actionsToolbar.add(fileActions); @@ -161,8 +165,6 @@ qx.Class.define("osparc.dashboard.DataBrowser", { this.getChildControl("files-tree").populateTree(fileMetadata["locationId"]); }, this); fileActions.add(selectedFileLayout); - - this._add(actionsToolbar); }, __selectionChanged: function(selectedItem) { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js index 3c3477e5375..fa722cdfbf7 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js @@ -252,8 +252,11 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { tagsContainer.removeAll(); tags.forEach(tag => { const tagUI = new osparc.ui.basic.Tag(tag.name, tag.color, "searchBarFilter"); + tagUI.set({ + font: "text-12", + toolTipText: this.tr("Click to filter by this Tag") + }); tagUI.addListener("tap", () => this.fireDataEvent("tagClicked", tag)); - tagUI.setFont("text-12"); tagsContainer.add(tagUI); }); } diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js index 25945e0f8f2..4f7bdcc4121 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js @@ -240,9 +240,11 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { const tagsContainer = this.getChildControl("tags"); tagsContainer.removeAll(); tags.forEach(tag => { - const tagUI = new osparc.ui.basic.Tag(tag.name, tag.color, "searchBarFilter").set({ + const tagUI = new osparc.ui.basic.Tag(tag.name, tag.color, "searchBarFilter"); + tagUI.set({ alignY: "middle", - font: "text-12" + font: "text-12", + toolTipText: this.tr("Click to filter by this Tag") }); tagUI.addListener("tap", () => this.fireDataEvent("tagClicked", tag)); tagsContainer.add(tagUI); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index 389eff12b28..f93d59ff922 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -318,6 +318,31 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { this._toolbar.add(viewModeLayout); }, + _addResourceFilter: function() { + const resourceFilter = new osparc.dashboard.ResourceFilter(this._resourceType).set({ + marginTop: osparc.dashboard.SearchBarFilter.HEIGHT + 10, // aligned with toolbar buttons: search bar + spacing + maxWidth: osparc.ui.basic.LoadingPageHandler.SIDE_SPACER_WIDTH, + width: osparc.ui.basic.LoadingPageHandler.SIDE_SPACER_WIDTH + }); + + resourceFilter.addListener("changeSharedWith", e => { + const sharedWith = e.getData(); + this._searchBarFilter.setSharedWithActiveFilter(sharedWith.id, sharedWith.label); + }, this); + + resourceFilter.addListener("changeSelectedTags", e => { + const selectedTagIds = e.getData(); + this._searchBarFilter.setTagsActiveFilter(selectedTagIds); + }, this); + + this._searchBarFilter.addListener("filterChanged", e => { + const filterData = e.getData(); + resourceFilter.filterChanged(filterData); + }); + + this._addToLeftColumn(resourceFilter); + }, + /** * Function that resets the selected item */ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js new file mode 100644 index 00000000000..28f34825cb2 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js @@ -0,0 +1,169 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.dashboard.ResourceFilter", { + extend: qx.ui.core.Widget, + + construct: function(resourceType) { + this.base(arguments); + + this.__resourceType = resourceType; + this.__sharedWithButtons = []; + this.__tagButtons = []; + + this._setLayout(new qx.ui.layout.VBox()); + this.__buildLayout(); + }, + + events: { + "changeSharedWith": "qx.event.type.Data", + "changeSelectedTags": "qx.event.type.Data" + }, + + members: { + __resourceType: null, + __sharedWithButtons: null, + __tagButtons: null, + + __buildLayout: function() { + const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(40)); + layout.add(this.__createSharedWithFilterLayout()); + if (this.__resourceType !== "service") { + layout.add(this.__createTagsFilterLayout()); + } + + const scrollContainer = new qx.ui.container.Scroll(); + scrollContainer.add(layout); + this._add(scrollContainer, { + flex: 1 + }); + }, + + __createSharedWithFilterLayout: function() { + const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + const radioGroup = new qx.ui.form.RadioGroup(); + + const options = osparc.dashboard.SearchBarFilter.getSharedWithOptions(this.__resourceType); + options.forEach(option => { + if (this.__resourceType === "study" && option.id === "shared-with-everyone") { + return; + } + const button = new qx.ui.toolbar.RadioButton(option.label, option.icon); + button.id = option.id; + button.set({ + appearance: "filter-toggle-button" + }); + + layout.add(button); + radioGroup.add(button); + + button.addListener("execute", () => { + this.fireDataEvent("changeSharedWith", { + id: option.id, + label: option.label + }); + }, this); + + this.__sharedWithButtons.push(button); + }); + + radioGroup.setAllowEmptySelection(false); + + return layout; + }, + + __createTagsFilterLayout: function() { + const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + + this.__populateTags(layout, []); + osparc.store.Store.getInstance().addListener("changeTags", () => { + this.__populateTags(layout, this.__getSelectedTagIds()); + }, this); + + return layout; + }, + + __getSelectedTagIds: function() { + const selectedTagIds = this.__tagButtons.filter(btn => btn.getValue()).map(btn => btn.id); + return selectedTagIds; + }, + + __populateTags: function(layout, selectedTagIds) { + const maxTags = 5; + this.__tagButtons = []; + layout.removeAll(); + osparc.store.Store.getInstance().getTags().forEach((tag, idx) => { + const button = new qx.ui.form.ToggleButton(tag.name, "@FontAwesome5Solid/tag/20"); + button.id = tag.id; + button.set({ + appearance: "filter-toggle-button", + value: selectedTagIds.includes(tag.id) + }); + button.getChildControl("icon").set({ + textColor: tag.color + }); + + layout.add(button); + + button.addListener("execute", () => { + const selection = this.__getSelectedTagIds(); + this.fireDataEvent("changeSelectedTags", selection); + }, this); + + button.setVisibility(idx >= maxTags ? "excluded" : "visible"); + + this.__tagButtons.push(button); + }); + + + if (this.__tagButtons.length >= maxTags) { + const showAllButton = new qx.ui.form.Button(this.tr("Show all Tags..."), "@FontAwesome5Solid/tags/20"); + showAllButton.set({ + appearance: "filter-toggle-button" + }); + showAllButton.showingAll = false; + showAllButton.addListener("execute", () => { + if (showAllButton.showingAll) { + this.__tagButtons.forEach((btn, idx) => btn.setVisibility(idx >= maxTags ? "excluded" : "visible")); + showAllButton.setLabel(this.tr("Show all Tags...")); + showAllButton.showingAll = false; + } else { + this.__tagButtons.forEach(btn => btn.setVisibility("visible")); + showAllButton.setLabel(this.tr("Show less Tags...")); + showAllButton.showingAll = true; + } + }); + layout.add(showAllButton); + } + }, + + filterChanged: function(filterData) { + if ("sharedWith" in filterData) { + const foundBtn = this.__sharedWithButtons.find(btn => btn.id === filterData["sharedWith"]); + if (foundBtn) { + foundBtn.setValue(true); + } + } + if ("tags" in filterData) { + this.__tagButtons.forEach(btn => { + btn.setValue(filterData["tags"].includes(btn.id)); + }); + } + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js index c13ed9956ab..a65118e6a0f 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js @@ -28,7 +28,7 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { this.set({ backgroundColor: "input_background", paddingLeft: 6, - height: 36 + height: this.self().HEIGHT }); this.getContentElement().setStyles({ "border-radius": "5px" @@ -44,23 +44,31 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { }, statics: { + HEIGHT: 36, + getSharedWithOptions: function(resourceType) { return [{ id: "show-all", label: qx.locale.Manager.tr("All ") + osparc.product.Utils.resourceTypeToAlias(resourceType, { + firstUpperCase: true, plural: true - }) + }), + icon: "@FontAwesome5Solid/home/20" }, { - id: "my-studies", + id: "my-resources", label: qx.locale.Manager.tr("My ") + osparc.product.Utils.resourceTypeToAlias(resourceType, { + firstUpperCase: true, plural: true - }) + }), + icon: "@FontAwesome5Solid/user/20" }, { id: "shared-with-me", - label: qx.locale.Manager.tr("Shared with Me") + label: qx.locale.Manager.tr("Shared with Me"), + icon: "@FontAwesome5Solid/users/20" }, { id: "shared-with-everyone", - label: qx.locale.Manager.tr("Shared with Everyone") + label: qx.locale.Manager.tr("Shared with Everyone"), + icon: "@FontAwesome5Solid/globe/20" }]; } }, @@ -241,7 +249,18 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { }, addTagActiveFilter: function(tag) { - this.__addChip("tag", tag.name, tag.name); + this.__addChip("tag", tag.id, tag.name); + }, + + setTagsActiveFilter: function(tagIds) { + const tags = osparc.store.Store.getInstance().getTags(); + tags.forEach(tag => { + if (tagIds.includes(tag.id)) { + this.__addChip("tag", tag.id, tag.name); + } else { + this.__removeChip("tag", tag.id, tag.name); + } + }); }, setSharedWithActiveFilter: function(optionId, optionLabel) { @@ -268,19 +287,11 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { const chipButton = new qx.ui.form.Button().set({ label: osparc.utils.Utils.capitalize(chipType) + " = '" + chipLabel + "'", icon: "@MaterialIcons/close/12", - iconPosition: "right", - paddingRight: 6, - paddingLeft: 6, - alignY: "middle", toolTipText: chipLabel, - maxHeight: 26, - maxWidth: 210 + appearance: "chip-button" }); chipButton.type = chipType; chipButton.id = chipId; - chipButton.getContentElement().setStyles({ - "border-radius": "6px" - }); chipButton.addListener("execute", () => this.__removeChip(chipType, chipId), this); return chipButton; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js index f53d5c18da2..e57be5ab191 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js @@ -172,6 +172,8 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { this._addGroupByButton(); this._addViewModeButton(); + this._addResourceFilter(); + return this._resourcesContainer; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index b1a87f56194..76527248632 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -601,9 +601,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }); this.__addSortByButton(); - this.__addShowSharedWithButton(); this._addViewModeButton(); + this._addResourceFilter(); this.__addNewStudyButtons(); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js index 502398e3100..d81b1a8dada 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js @@ -190,6 +190,8 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { this._addGroupByButton(); this._addViewModeButton(); + this._addResourceFilter(); + this._resourcesContainer.addListener("changeVisibility", () => this.__evaluateUpdateAllButton()); return this._resourcesContainer; diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js index ad1d6f6441c..7ba78f72356 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js @@ -80,10 +80,6 @@ qx.Class.define("osparc.desktop.MainPage", { }); }, - statics: { - MIN_STUDIES_PER_ROW: 4 - }, - members: { __navBar: null, __dashboard: null, @@ -213,26 +209,8 @@ qx.Class.define("osparc.desktop.MainPage", { paddingBottom: 6 }); this.__navBar.addDashboardTabButtons(tabsBar); - const itemWidth = osparc.dashboard.GridButtonBase.ITEM_WIDTH + osparc.dashboard.GridButtonBase.SPACING; - dashboard.setMinWidth(this.self().MIN_STUDIES_PER_ROW * itemWidth + 8); - const fitResourceCards = () => { - const w = document.documentElement.clientWidth; - const nStudies = Math.floor((w - 2*150 - 8) / itemWidth); - const newWidth = nStudies * itemWidth + 8; - if (newWidth > dashboard.getMinWidth()) { - dashboard.setWidth(newWidth); - } else { - dashboard.setWidth(dashboard.getMinWidth()); - } - }; - fitResourceCards(); - window.addEventListener("resize", () => fitResourceCards()); const dashboardLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); - dashboardLayout.add(new qx.ui.core.Widget(), { - flex: 1 - }); - dashboardLayout.add(dashboard); - dashboardLayout.add(new qx.ui.core.Widget(), { + dashboardLayout.add(dashboard, { flex: 1 }); return dashboardLayout; diff --git a/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js b/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js index 86e228dbcb6..3075d738cf3 100644 --- a/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js +++ b/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js @@ -52,14 +52,17 @@ qx.Class.define("osparc.form.tag.TagToggleButton", { control = new qx.ui.basic.Label(this.getLabel()).set({ allowStretchX: true }); - control.setAnonymous(true); - control.setRich(this.getRich()); - this._add(control, { - flex: 1 + control.set({ + textColor: "text", + anonymous: true, + rich: this.getRich() }); if (this.getLabel() == null || this.getShow() === "icon") { control.exclude(); } + this._add(control, { + flex: 1 + }); break; case "check": control = new qx.ui.basic.Image(); diff --git a/services/static-webserver/client/source/class/osparc/theme/Appearance.js b/services/static-webserver/client/source/class/osparc/theme/Appearance.js index 3e51f88a0a3..e2c4928bec3 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Appearance.js +++ b/services/static-webserver/client/source/class/osparc/theme/Appearance.js @@ -1154,6 +1154,47 @@ qx.Theme.define("osparc.theme.Appearance", { }) }, + "chip-button": { + include: "material-button", + style: () => ({ + iconPosition: "right", + textColor: "text", + alignY: "middle", + paddingRight: 6, + paddingLeft: 6, + maxHeight: 26, + maxWidth: 260, + decorator: "chip-button" + }) + }, + + "filter-toggle-button": { + include: "material-button", + alias: "material-button", + style: states => ({ + font: "text-13", + textColor: "text", + padding: 6, + gap: 8, + decorator: (states.hovered || states.pressed || states.checked) ? "filter-toggle-button-selected" : "filter-toggle-button" + }) + }, + + "filter-toggle-button/label": { + include: "material-button/label", + style: () => ({ + textColor: "text" + }) + }, + + "filter-toggle-button/icon": { + include: "material-button/icon", + style: () => ({ + width: 25, + scale: true + }) + }, + /* --------------------------------------------------------------------------- virtual overrides diff --git a/services/static-webserver/client/source/class/osparc/theme/Decoration.js b/services/static-webserver/client/source/class/osparc/theme/Decoration.js index 02f954d7786..6cda4485919 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Decoration.js +++ b/services/static-webserver/client/source/class/osparc/theme/Decoration.js @@ -305,6 +305,30 @@ qx.Theme.define("osparc.theme.Decoration", { } }, + "chip-button": { + style: { + width: 1, + radius: 6, + color: "text", + backgroundColor: "transparent" + } + }, + + "filter-toggle-button": { + style: { + width: 1, + radius: 8, + color: "transparent" + } + }, + + "filter-toggle-button-selected": { + include: "filter-toggle-button", + style: { + color: "text" + } + }, + "pb-listitem": { style: { radius: 5 diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/LoadingPageHandler.js b/services/static-webserver/client/source/class/osparc/ui/basic/LoadingPageHandler.js index 61283aeb7c7..e5bf60dab2a 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/LoadingPageHandler.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/LoadingPageHandler.js @@ -35,13 +35,58 @@ qx.Class.define("osparc.ui.basic.LoadingPageHandler", { this._loadingPage = new osparc.ui.message.Loading(); stack.add(this._loadingPage); + const padding = osparc.dashboard.Dashboard.PADDING; + const leftColumnWidth = this.self().SIDE_SPACER_WIDTH; + const emptyColumnMinWidth = 50; + const spacing = 20; + const mainLayoutsScroll = 8; + + this.__mainLayoutWithSides = new qx.ui.container.Composite(new qx.ui.layout.HBox(spacing)) + stack.add(this.__mainLayoutWithSides); + + this.__leftColum = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ + width: leftColumnWidth + }); + this.__mainLayoutWithSides.add(this.__leftColum); + this._mainLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); - stack.add(this._mainLayout); + this.__mainLayoutWithSides.add(this._mainLayout); + + const rightColum = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + this.__mainLayoutWithSides.add(rightColum, { + flex: 1 + }); + + const itemWidth = osparc.dashboard.GridButtonBase.ITEM_WIDTH + osparc.dashboard.GridButtonBase.SPACING; + this._mainLayout.setMinWidth(this.self().MIN_STUDIES_PER_ROW * itemWidth + mainLayoutsScroll); + const fitResourceCards = () => { + const w = document.documentElement.clientWidth; + const nStudies = Math.floor((w - 2*padding - 2*spacing - leftColumnWidth - emptyColumnMinWidth) / itemWidth); + const newWidth = nStudies * itemWidth + 8; + if (newWidth > this._mainLayout.getMinWidth()) { + this._mainLayout.setWidth(newWidth); + } else { + this._mainLayout.setWidth(this._mainLayout.getMinWidth()); + } + + const compactVersion = w < this._mainLayout.getMinWidth() + leftColumnWidth + emptyColumnMinWidth; + this.__leftColum.setVisibility(compactVersion ? "excluded" : "visible"); + rightColum.setVisibility(compactVersion ? "excluded" : "visible"); + }; + fitResourceCards(); + window.addEventListener("resize", () => fitResourceCards()); + }, + + statics: { + MIN_STUDIES_PER_ROW: 4, + SIDE_SPACER_WIDTH: 180 }, members: { __stack: null, _loadingPage: null, + __mainLayoutWithSides: null, + __leftColum: null, _mainLayout: null, _showLoadingPage: function(label) { @@ -52,13 +97,17 @@ qx.Class.define("osparc.ui.basic.LoadingPageHandler", { }, _showMainLayout: function() { - this.__stack.setSelection([this._mainLayout]); + this.__stack.setSelection([this.__mainLayoutWithSides]); }, _hideLoadingPage: function() { this._showMainLayout(); }, + _addToLeftColumn: function(widget, props = {}) { + this.__leftColum.add(widget, props); + }, + _addToMainLayout: function(widget, props = {}) { this._mainLayout.add(widget, props); } From 1f94a48de77f586a1a82cd0751196609a511f70a Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 5 Jun 2024 21:45:48 +0200 Subject: [PATCH 018/219] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20App=20Mode:?= =?UTF-8?q?=20Pop=20up=20instructions=20if=20any=20(#5915)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../class/osparc/dashboard/GridButtonPlaceholder.js | 2 -- .../client/source/class/osparc/node/BaseNodeView.js | 13 ++++++++----- .../source/class/osparc/ui/markdown/Markdown.js | 5 ++++- .../client/source/class/osparc/ui/window/Window.js | 6 ++++-- 4 files changed, 16 insertions(+), 10 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js index 9d7b6ae1959..89a055b1217 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js @@ -116,8 +116,6 @@ qx.Class.define("osparc.dashboard.GridButtonPlaceholder", { this.getChildControl("progress-bar").set({ visibility: showProgressBar ? "visible" : "excluded" }); - - // this._getChildren().forEach(item => item.setOpacity(0.8)); }, isLocked: function() { diff --git a/services/static-webserver/client/source/class/osparc/node/BaseNodeView.js b/services/static-webserver/client/source/class/osparc/node/BaseNodeView.js index 0e0935e39ea..8e103067905 100644 --- a/services/static-webserver/client/source/class/osparc/node/BaseNodeView.js +++ b/services/static-webserver/client/source/class/osparc/node/BaseNodeView.js @@ -144,7 +144,7 @@ qx.Class.define("osparc.node.BaseNodeView", { const instructionsBtn = this.__instructionsBtn = new qx.ui.form.Button(this.tr("Instructions"), "@FontAwesome5Solid/book/17").set({ backgroundColor: "background-main-3" }); - instructionsBtn.addListener("appear", () => osparc.utils.Utils.makeButtonBlink(instructionsBtn, 3)); + instructionsBtn.addListener("appear", () => this.__openInstructions(), this); instructionsBtn.addListener("execute", () => this.__openInstructions(), this); header.add(instructionsBtn); @@ -247,22 +247,25 @@ qx.Class.define("osparc.node.BaseNodeView", { const descView = new osparc.ui.markdown.Markdown().set({ value: desc, padding: 3, - noMargin: true + noMargin: true, + font: "text-14" }); const scrollContainer = new qx.ui.container.Scroll(); scrollContainer.add(descView); const title = this.tr("Instructions") + " - " + this.getNode().getLabel(); const width = 500; const height = 500; - const win = osparc.ui.window.Window.popUpInWindow(scrollContainer, title, width, height).set({ + const win = this.__instructionsWindow = osparc.ui.window.Window.popUpInWindow(scrollContainer, title, width, height).set({ modal: false, clickAwayClose: false }); win.getContentElement().setStyles({ "border-color": qx.theme.manager.Color.getInstance().resolve("strong-main") }); - win.addListener("close", () => this.__instructionsWindow = null, this); - this.__instructionsWindow = win; + win.addListener("close", () => { + this.__instructionsWindow = null; + osparc.utils.Utils.makeButtonBlink(this.__instructionsBtn, 2); + }, this); } }, diff --git a/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js b/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js index 0fc232372db..aecfd5df5d4 100644 --- a/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js +++ b/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js @@ -80,6 +80,7 @@ qx.Class.define("osparc.ui.markdown.Markdown", { _applyMarkdown: function(value = "") { this.__loadMarked.then(() => { const renderer = new marked.Renderer(); + const linkRenderer = renderer.link; renderer.link = (href, title, text) => { const linkColor = qx.theme.manager.Color.getInstance().resolve("link"); @@ -88,16 +89,18 @@ qx.Class.define("osparc.ui.markdown.Markdown", { const linkWithRightColor = html.replace(/^ { this.__parseImages(); this.__resizeMe(); }, this, 100); + this.__resizeMe(); }).catch(error => console.error(error)); }, diff --git a/services/static-webserver/client/source/class/osparc/ui/window/Window.js b/services/static-webserver/client/source/class/osparc/ui/window/Window.js index 1da5d533296..eb22887cc84 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/Window.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/Window.js @@ -108,8 +108,10 @@ qx.Class.define("osparc.ui.window.Window", { this.setOpacity(0); this.base(arguments); setTimeout(() => { - this.center(); - this.setOpacity(1); + if (this) { + this.center(); + this.setOpacity(1); + } }, 1); } else { this.base(arguments); From 4b037455ace6e673b2450f2c9aa940c63bafcd02 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Thu, 6 Jun 2024 08:20:14 +0200 Subject: [PATCH 019/219] =?UTF-8?q?=F0=9F=8E=A8=E2=99=BB=EF=B8=8FUse=20str?= =?UTF-8?q?uctured=20parent=20project/node=20in=20director-v2=20and=20conn?= =?UTF-8?q?ect=20with=20RUT=20(#5877)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../utils_projects_metadata.py | 5 + .../utils_projects_nodes.py | 1 - .../api/routes/computations.py | 123 ++++++++++-------- .../core/errors.py | 2 +- .../models/comp_runs.py | 4 + .../modules/comp_scheduler/base_scheduler.py | 52 +++++--- .../db/repositories/projects_metadata.py | 33 ++--- .../utils/computations.py | 2 +- .../simcore_service_director_v2/utils/db.py | 3 + .../utils/rabbitmq.py | 15 ++- .../unit/with_dbs/test_utils_rabbitmq.py | 5 + .../projects/_crud_api_create.py | 7 + .../projects/_crud_handlers.py | 1 - 13 files changed, 159 insertions(+), 94 deletions(-) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py index 6d72bd43e3c..39749b7fdbf 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py @@ -65,6 +65,11 @@ class Config: async def get(connection: SAConnection, project_uuid: uuid.UUID) -> ProjectMetadata: + """ + Raises: + DBProjectNotFoundError: project not found + + """ # JOIN LEFT OUTER get_stmt = ( sa.select( diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 40a3726bab0..09cb8a561f4 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -273,7 +273,6 @@ async def get_project_id_from_node_id( """ WARNING: this function should not be used! it has a flaw! a Node ID is not unique and there can be more than one project linked to it. - TODO: return project idS? and adapt code Raises: ProjectNodesNodeNotFound: if no node_id found diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index 905c048b19e..52988271471 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -18,7 +18,7 @@ import contextlib import logging -from typing import Annotated, Any +from typing import Annotated, Any, Final import networkx as nx from fastapi import APIRouter, Depends, HTTPException @@ -38,10 +38,7 @@ from pydantic import AnyHttpUrl, parse_obj_as from servicelib.async_utils import run_sequentially_in_context from servicelib.rabbitmq import RabbitMQRPCClient -from simcore_postgres_database.utils_projects_nodes import ( - ProjectNodesNodeNotFoundError, - ProjectNodesNonUniqueNodeFoundError, -) +from simcore_postgres_database.utils_projects_metadata import DBProjectNotFoundError from starlette import status from starlette.requests import Request from tenacity import retry @@ -75,12 +72,7 @@ from ...modules.db.repositories.users import UsersRepository from ...modules.director_v0 import DirectorV0Client from ...modules.resource_usage_tracker_client import ResourceUsageTrackerClient -from ...utils.computations import ( - find_deprecated_tasks, - get_pipeline_state_from_task_states, - is_pipeline_running, - is_pipeline_stopped, -) +from ...utils import computations as utils from ...utils.dags import ( compute_pipeline_details, compute_pipeline_started_timestamp, @@ -99,7 +91,7 @@ from ..dependencies.scheduler import get_scheduler from .computations_tasks import analyze_pipeline -PIPELINE_ABORT_TIMEOUT_S = 10 +_PIPELINE_ABORT_TIMEOUT_S: Final[int] = 10 _logger = logging.getLogger(__name__) @@ -109,10 +101,10 @@ async def _check_pipeline_not_running( comp_tasks_repo: CompTasksRepository, computation: ComputationCreate ) -> None: - pipeline_state = get_pipeline_state_from_task_states( + pipeline_state = utils.get_pipeline_state_from_task_states( await comp_tasks_repo.list_computational_tasks(computation.project_id) ) - if is_pipeline_running(pipeline_state): + if utils.is_pipeline_running(pipeline_state): raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail=f"Project {computation.project_id} already started, current state is {pipeline_state}", @@ -126,7 +118,7 @@ async def _check_pipeline_startable( clusters_repo: ClustersRepository, ) -> None: assert computation.product_name # nosec - if deprecated_tasks := await find_deprecated_tasks( + if deprecated_tasks := await utils.find_deprecated_tasks( computation.user_id, computation.product_name, [ @@ -155,39 +147,64 @@ async def _check_pipeline_startable( ) from exc +_UNKNOWN_NODE: Final[str] = "unknown node" + + async def _get_project_metadata( + project_id: ProjectID, project_repo: ProjectsRepository, projects_metadata_repo: ProjectsMetadataRepository, - computation: ComputationCreate, ) -> ProjectMetadataDict: - current_project_metadata = await projects_metadata_repo.get_custom_metadata( - computation.project_id - ) - - if not current_project_metadata: - return {} - if "node_id" not in current_project_metadata: - return {} - - parent_node_id = NodeID(current_project_metadata["node_id"]) - parent_node_idstr = NodeIDStr(f"{parent_node_id}") try: - parent_project_id = await project_repo.get_project_id_from_node(parent_node_id) - parent_project = await project_repo.get_project(parent_project_id) - assert parent_node_idstr in parent_project.workbench + project_ancestors = await projects_metadata_repo.get_project_ancestors( + project_id + ) + if project_ancestors.parent_project_uuid is None: + # no parents here + return {} + + assert project_ancestors.parent_node_id is not None # nosec + assert project_ancestors.root_project_uuid is not None # nosec + assert project_ancestors.root_node_id is not None # nosec + + async def _get_project_node_names( + project_uuid: ProjectID, node_id: NodeID + ) -> tuple[str, str]: + prj = await project_repo.get_project(project_uuid) + node_id_str = NodeIDStr(f"{node_id}") + if node_id_str not in prj.workbench: + _logger.error( + "%s not found in %s. it is an ancestor of %s. Please check!", + f"{node_id=}", + f"{prj.uuid=}", + f"{project_id=}", + ) + return prj.name, _UNKNOWN_NODE + return prj.name, prj.workbench[node_id_str].label + + parent_project_name, parent_node_name = await _get_project_node_names( + project_ancestors.parent_project_uuid, project_ancestors.parent_node_id + ) + root_parent_project_name, root_parent_node_name = await _get_project_node_names( + project_ancestors.root_project_uuid, project_ancestors.root_node_id + ) return ProjectMetadataDict( - parent_node_id=parent_node_id, - parent_node_name=parent_project.workbench[parent_node_idstr].label, - parent_project_id=parent_project_id, - parent_project_name=parent_project.name, + parent_node_id=project_ancestors.parent_node_id, + parent_node_name=parent_node_name, + parent_project_id=project_ancestors.parent_project_uuid, + parent_project_name=parent_project_name, + root_parent_node_id=project_ancestors.root_node_id, + root_parent_node_name=root_parent_node_name, + root_parent_project_id=project_ancestors.root_project_uuid, + root_parent_project_name=root_parent_project_name, ) - except ( - ProjectNotFoundError, - ProjectNodesNodeNotFoundError, - ProjectNodesNonUniqueNodeFoundError, - ): - _logger.exception("Could not find project/node: %s", f"{parent_node_id=}") - return {} + + except DBProjectNotFoundError: + _logger.exception("Could not find project: %s", f"{project_id=}") + except ProjectNotFoundError as exc: + _logger.exception("Could not find parent project: %s", f"{exc.project_id=}") + + return {} async def _try_start_pipeline( @@ -237,7 +254,7 @@ async def _try_start_pipeline( wallet_id=wallet_id, wallet_name=wallet_name, project_metadata=await _get_project_metadata( - project_repo, projects_metadata_repo, computation + computation.project_id, project_repo, projects_metadata_repo ), ) or {}, @@ -349,7 +366,7 @@ async def create_computation( # noqa: PLR0913 for t in comp_tasks if f"{t.node_id}" in set(minimal_computational_dag.nodes()) ] - pipeline_state = get_pipeline_state_from_task_states(filtered_tasks) + pipeline_state = utils.get_pipeline_state_from_task_states(filtered_tasks) # get run details if any last_run: CompRunsAtDB | None = None @@ -446,7 +463,9 @@ async def get_computation( project_id, comp_pipelines_repo, comp_tasks_repo ) - pipeline_state: RunningState = get_pipeline_state_from_task_states(filtered_tasks) + pipeline_state: RunningState = utils.get_pipeline_state_from_task_states( + filtered_tasks + ) _logger.debug( "Computational task status by %s for %s has %s", @@ -531,9 +550,9 @@ async def stop_computation( filtered_tasks = [ t for t in tasks if f"{t.node_id}" in set(pipeline_dag.nodes()) ] - pipeline_state = get_pipeline_state_from_task_states(filtered_tasks) + pipeline_state = utils.get_pipeline_state_from_task_states(filtered_tasks) - if is_pipeline_running(pipeline_state): + if utils.is_pipeline_running(pipeline_state): await scheduler.stop_pipeline(computation_stop.user_id, project_id) # get run details if any @@ -592,8 +611,8 @@ async def delete_computation( comp_tasks: list[CompTaskAtDB] = await comp_tasks_repo.list_computational_tasks( project_id ) - pipeline_state = get_pipeline_state_from_task_states(comp_tasks) - if is_pipeline_running(pipeline_state): + pipeline_state = utils.get_pipeline_state_from_task_states(comp_tasks) + if utils.is_pipeline_running(pipeline_state): if not computation_stop.force: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, @@ -614,7 +633,7 @@ def return_last_value(retry_state: Any) -> Any: return retry_state.outcome.result() @retry( - stop=stop_after_delay(PIPELINE_ABORT_TIMEOUT_S), + stop=stop_after_delay(_PIPELINE_ABORT_TIMEOUT_S), wait=wait_random(0, 2), retry_error_callback=return_last_value, retry=retry_if_result(lambda result: result is False), @@ -625,17 +644,17 @@ async def check_pipeline_stopped() -> bool: comp_tasks: list[ CompTaskAtDB ] = await comp_tasks_repo.list_computational_tasks(project_id) - pipeline_state = get_pipeline_state_from_task_states( + pipeline_state = utils.get_pipeline_state_from_task_states( comp_tasks, ) - return is_pipeline_stopped(pipeline_state) + return utils.is_pipeline_stopped(pipeline_state) # wait for the pipeline to be stopped if not await check_pipeline_stopped(): _logger.error( "pipeline %s could not be stopped properly after %ss", project_id, - PIPELINE_ABORT_TIMEOUT_S, + _PIPELINE_ABORT_TIMEOUT_S, ) # delete the pipeline now diff --git a/services/director-v2/src/simcore_service_director_v2/core/errors.py b/services/director-v2/src/simcore_service_director_v2/core/errors.py index e6cfc7c8cd8..d53d5fdca1c 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/core/errors.py @@ -19,7 +19,6 @@ } """ - from models_library.errors import ErrorDict from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -90,6 +89,7 @@ class ProjectNotFoundError(DirectorError): def __init__(self, project_id: ProjectID): super().__init__(f"project {project_id} not found") + self.project_id = project_id class PricingPlanUnitNotFoundError(DirectorError): diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index b8177bdb2a2..1d7800b9788 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -18,6 +18,10 @@ class ProjectMetadataDict(TypedDict, total=False): parent_node_name: str parent_project_id: ProjectID parent_project_name: str + root_parent_project_id: ProjectID + root_parent_project_name: str + root_parent_node_id: NodeID + root_parent_node_name: str class RunMetadataDict(TypedDict, total=False): diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py index bff129e75d7..e496be2d89c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py @@ -10,6 +10,7 @@ The sidecar will then change the state to STARTED, then to SUCCESS or FAILED. """ + import asyncio import datetime import logging @@ -413,15 +414,21 @@ async def _process_started_tasks( ), wallet_id=run_metadata.get("wallet_id"), wallet_name=run_metadata.get("wallet_name"), - pricing_plan_id=t.pricing_info.get("pricing_plan_id") - if t.pricing_info - else None, - pricing_unit_id=t.pricing_info.get("pricing_unit_id") - if t.pricing_info - else None, - pricing_unit_cost_id=t.pricing_info.get("pricing_unit_cost_id") - if t.pricing_info - else None, + pricing_plan_id=( + t.pricing_info.get("pricing_plan_id") + if t.pricing_info + else None + ), + pricing_unit_id=( + t.pricing_info.get("pricing_unit_id") + if t.pricing_info + else None + ), + pricing_unit_cost_id=( + t.pricing_info.get("pricing_unit_cost_id") + if t.pricing_info + else None + ), product_name=run_metadata.get( "product_name", UNDEFINED_STR_METADATA ), @@ -431,16 +438,27 @@ async def _process_started_tasks( user_id=user_id, user_email=run_metadata.get("user_email", UNDEFINED_STR_METADATA), project_id=t.project_id, - project_name=run_metadata.get("project_metadata", {}).get( - "parent_project_name", - run_metadata.get("project_name", UNDEFINED_STR_METADATA), + project_name=run_metadata.get("project_metadata", {}).get( # type: ignore[arg-type] + "project_name", UNDEFINED_STR_METADATA ), node_id=t.node_id, - node_name=run_metadata.get("project_metadata", {}).get( - "parent_node_name", - run_metadata.get("node_id_names_map", {}).get( - t.node_id, UNDEFINED_STR_METADATA - ), + node_name=run_metadata.get("node_id_names_map", {}).get( + t.node_id, UNDEFINED_STR_METADATA + ), + parent_project_id=run_metadata.get("project_metadata", {}).get( + "parent_project_id" + ), + parent_node_id=run_metadata.get("project_metadata", {}).get( + "parent_node_id" + ), + root_parent_project_id=run_metadata.get("project_metadata", {}).get( + "root_parent_project_id" + ), + root_parent_project_name=run_metadata.get( + "project_metadata", {} + ).get("root_parent_project_name"), + root_parent_node_id=run_metadata.get("project_metadata", {}).get( + "root_parent_node_id" ), service_key=ServiceKey(t.image.name), service_version=ServiceVersion(t.image.tag), diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_metadata.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_metadata.py index 0d62739f88e..dca3b0d32c5 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_metadata.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_metadata.py @@ -1,4 +1,4 @@ -from typing import Any +from dataclasses import dataclass from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -10,26 +10,27 @@ from ._base import BaseRepository +@dataclass(frozen=True, kw_only=True, slots=True) +class ProjectAncestors: + parent_project_uuid: ProjectID | None + parent_node_id: NodeID | None + root_project_uuid: ProjectID | None + root_node_id: NodeID | None + + class ProjectsMetadataRepository(BaseRepository): - async def get_custom_metadata(self, project_id: ProjectID) -> dict[str, Any] | None: + async def get_project_ancestors(self, project_id: ProjectID) -> ProjectAncestors: """ Raises: - DBProjectNotFoundError + DBProjectNotFoundError: project not found """ async with self.db_engine.acquire() as conn: project_metadata: ProjectMetadata = await projects_metadata_get( conn, project_id ) - custom_metadata: dict[str, Any] | None = project_metadata.custom - return custom_metadata - - async def get_parent_project_and_node( - self, project_id: ProjectID - ) -> tuple[ProjectID, NodeID] | None: - async with self.db_engine.acquire() as conn: - project_metadata: ProjectMetadata = await projects_metadata_get( - conn, project_id - ) - if project_metadata.parent_project_uuid and project_metadata.parent_node_id: - return project_metadata.parent_project_uuid, project_metadata.parent_node_id - return None + return ProjectAncestors( + parent_project_uuid=project_metadata.parent_project_uuid, + parent_node_id=project_metadata.parent_node_id, + root_project_uuid=project_metadata.root_parent_project_uuid, + root_node_id=project_metadata.root_parent_node_id, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index b16c61ebaf0..bbb10c03600 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -12,7 +12,7 @@ from ..modules.catalog import CatalogClient from ..modules.db.tables import NodeClass -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) _COMPLETED_STATES = (RunningState.ABORTED, RunningState.FAILED, RunningState.SUCCESS) _RUNNING_STATES = (RunningState.STARTED,) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/db.py b/services/director-v2/src/simcore_service_director_v2/utils/db.py index 716d32c4cd2..755a13c038b 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/db.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/db.py @@ -1,4 +1,5 @@ import json +import logging from typing import Any from fastapi import FastAPI @@ -25,6 +26,8 @@ RunningState.UNKNOWN: StateType.FAILED } +_logger = logging.getLogger(__name__) + def to_clusters_db(cluster: BaseCluster, *, only_update: bool) -> dict[str, Any]: db_model: dict[str, Any] = json.loads( diff --git a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py index e239d57b673..57d014a3c0f 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py @@ -85,6 +85,11 @@ async def publish_service_resource_tracking_started( # pylint: disable=too-many project_name: str, node_id: NodeID, node_name: str, + parent_project_id: ProjectID | None, + parent_node_id: NodeID | None, + root_parent_project_id: ProjectID | None, + root_parent_project_name: str | None, + root_parent_node_id: NodeID | None, service_key: ServiceKey, service_version: ServiceVersion, service_type: ServiceType, @@ -106,11 +111,11 @@ async def publish_service_resource_tracking_started( # pylint: disable=too-many project_name=project_name, node_id=node_id, node_name=node_name, - parent_project_id=project_id, # <-- SAN please modify - root_parent_project_id=project_id, # <-- SAN please modify - root_parent_project_name=project_name, # <-- SAN please modify - parent_node_id=node_id, # <-- SAN please modify - root_parent_node_id=node_id, # <-- SAN please modify + parent_project_id=parent_project_id or project_id, + root_parent_project_id=root_parent_project_id or project_id, + root_parent_project_name=root_parent_project_name or project_name, + parent_node_id=parent_node_id or node_id, + root_parent_node_id=root_parent_node_id or node_id, service_key=service_key, service_version=service_version, service_type=service_type, diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py index 35dbbc91b75..2b402d02b6b 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py @@ -194,6 +194,11 @@ async def test_publish_service_resource_tracking_started( project_name=project.name, node_id=random_task.node_id, node_name=project.workbench[NodeIDStr(f"{random_task.node_id}")].label, + parent_project_id=None, + parent_node_id=None, + root_parent_project_id=None, + root_parent_project_name=None, + root_parent_node_id=None, service_key=ServiceKey(random_task.image.name), service_version=ServiceVersion(random_task.image.tag), service_type=ServiceType.COMPUTATIONAL, diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py index 597b3fc5e91..75e66d272d7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py @@ -358,6 +358,13 @@ async def create_project( # pylint: disable=too-many-arguments # noqa: C901, P raise web.HTTPUnauthorized from exc except (ParentProjectNotFoundError, ParentNodeNotFoundError) as exc: + if project_uuid := new_project.get("uuid"): + await projects_api.submit_delete_project_task( + app=request.app, + project_uuid=project_uuid, + user_id=user_id, + simcore_user_agent=simcore_user_agent, + ) raise web.HTTPNotFound(reason=f"{exc}") from exc except asyncio.CancelledError: diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py index a7611b83c65..56b98aa210b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py @@ -127,7 +127,6 @@ async def create_project(request: web.Request): if not request.can_read_body: # request w/o body - assert query_params.from_study # nosec predefined_project = None else: # request w/ body (I found cases in which body = {}) From b20923a96f3f5f2a6b189087d7ac74a84971bedd Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Thu, 6 Jun 2024 10:00:14 +0200 Subject: [PATCH 020/219] =?UTF-8?q?=F0=9F=90=9BDask=20sidecar:=20disable?= =?UTF-8?q?=20concurrent=20pulling=20of=20files=20until=20new=20version=20?= =?UTF-8?q?is=20out=20(#5912)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../computational_sidecar/core.py | 4 +++- tests/e2e/Makefile | 2 -- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py index 424ee1b0eb9..e5a7ce93207 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py @@ -92,7 +92,9 @@ async def _write_input_data( ) else: local_input_data_file[input_key] = input_params - await asyncio.gather(*download_tasks) + # NOTE: temporary solution until new version is created + for task in download_tasks: + await task input_data_file.write_text(json.dumps(local_input_data_file)) await self._publish_sidecar_log("All the input data were downloaded.") diff --git a/tests/e2e/Makefile b/tests/e2e/Makefile index 30d457eca3b..de599825054 100644 --- a/tests/e2e/Makefile +++ b/tests/e2e/Makefile @@ -108,8 +108,6 @@ clean-up: ## remove everything test: ## test the platform # tests npm test - # tests whether tutorial run - npm run tutorials http://127.0.0.1:9081 .PHONY: dev-prepare-jupyters From 0839f0b6a31ce783e03accb1e2ec9f17ef46d9a8 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Thu, 6 Jun 2024 12:03:03 +0200 Subject: [PATCH 021/219] =?UTF-8?q?=E2=9C=A8=20Efs=20Guardian=20can=20crea?= =?UTF-8?q?te=20directories=20(OPS=20=E2=9A=A0=EF=B8=8F)=20(#5911)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .env-devel | 3 + .../api_schemas_efs_guardian/__init__.py | 9 +++ .../rpc_interfaces/efs_guardian/__init__.py | 0 .../efs_guardian/efs_manager.py | 34 +++++++++++ services/docker-compose.yml | 7 +++ .../api/rpc/_efs_guardian.py | 25 ++++++++ .../api/rpc/{rpc_routes.py => routes.py} | 13 ++++- .../core/application.py | 8 ++- .../core/settings.py | 13 +++-- .../services/efs_manager.py | 39 +++++++++++++ .../services/efs_manager_setup.py | 54 +++++++++++++++++ .../services/modules/__init__.py | 0 .../services/modules/rabbitmq.py | 56 ++++++++++++++++++ services/efs-guardian/tests/unit/conftest.py | 40 ++++++++++++- .../tests/unit/test_api_health.py | 24 +++++++- .../tests/unit/test_efs_manager.py | 58 +++++++++++++++++++ 16 files changed, 374 insertions(+), 9 deletions(-) create mode 100644 packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py create mode 100644 packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/__init__.py create mode 100644 packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/_efs_guardian.py rename services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/{rpc_routes.py => routes.py} (53%) create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_setup.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/services/modules/__init__.py create mode 100644 services/efs-guardian/src/simcore_service_efs_guardian/services/modules/rabbitmq.py create mode 100644 services/efs-guardian/tests/unit/test_efs_manager.py diff --git a/.env-devel b/.env-devel index 648f3037638..b93c1ad9b9b 100644 --- a/.env-devel +++ b/.env-devel @@ -64,6 +64,9 @@ DIRECTOR_PORT=8080 DIRECTOR_REGISTRY_CACHING_TTL=900 DIRECTOR_REGISTRY_CACHING=True +EFS_DNS_NAME=fs-xxx.efs.us-east-1.amazonaws.com +EFS_MOUNTED_PATH=/tmp/efs + # DIRECTOR_V2 ---- COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}' COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_FILE_LINK_TYPE=S3 diff --git a/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py b/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py new file mode 100644 index 00000000000..50793febaf9 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py @@ -0,0 +1,9 @@ +from typing import Final + +from pydantic import parse_obj_as + +from ..rabbitmq_basic_types import RPCNamespace + +EFS_GUARDIAN_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( + RPCNamespace, "efs-guardian" +) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/__init__.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py new file mode 100644 index 00000000000..b5e8aa219e9 --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py @@ -0,0 +1,34 @@ +import logging +from pathlib import Path +from typing import Final + +from models_library.api_schemas_efs_guardian import EFS_GUARDIAN_RPC_NAMESPACE +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from models_library.rabbitmq_basic_types import RPCMethodName +from pydantic import NonNegativeInt, parse_obj_as + +from ....logging_utils import log_decorator +from ....rabbitmq import RabbitMQRPCClient + +_logger = logging.getLogger(__name__) + + +_DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 + + +@log_decorator(_logger, level=logging.DEBUG) +async def create_project_specific_data_dir( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + project_id: ProjectID, + node_id: NodeID, +) -> Path: + output: Path = await rabbitmq_rpc_client.request( + EFS_GUARDIAN_RPC_NAMESPACE, + parse_obj_as(RPCMethodName, "create_project_specific_data_dir"), + project_id=project_id, + node_id=node_id, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + return output diff --git a/services/docker-compose.yml b/services/docker-compose.yml index b55d599fe72..194e76769e8 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -351,6 +351,13 @@ services: - default environment: LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + RABBIT_HOST: ${RABBIT_HOST} + RABBIT_PASSWORD: ${RABBIT_PASSWORD} + RABBIT_PORT: ${RABBIT_PORT} + RABBIT_SECURE: ${RABBIT_SECURE} + RABBIT_USER: ${RABBIT_USER} + EFS_DNS_NAME: ${EFS_DNS_NAME} + EFS_MOUNTED_PATH: ${EFS_MOUNTED_PATH} invitations: image: ${DOCKER_REGISTRY:-itisfoundation}/invitations:${DOCKER_IMAGE_TAG:-latest} diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/_efs_guardian.py b/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/_efs_guardian.py new file mode 100644 index 00000000000..61b5e588b83 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/_efs_guardian.py @@ -0,0 +1,25 @@ +from pathlib import Path + +from fastapi import FastAPI +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from servicelib.rabbitmq import RPCRouter + +from ...services.efs_manager_setup import get_efs_manager + +router = RPCRouter() + + +@router.expose(reraise_if_error_type=()) +async def create_project_specific_data_dir( + app: FastAPI, + *, + project_id: ProjectID, + node_id: NodeID, +) -> Path: + _efs_manager = get_efs_manager(app) + + return await _efs_manager.create_project_specific_data_dir( + project_id=project_id, + node_id=node_id, + ) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/rpc_routes.py b/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/routes.py similarity index 53% rename from services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/rpc_routes.py rename to services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/routes.py index c79ed1f7ed3..9a1f349fa29 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/rpc_routes.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/routes.py @@ -1,11 +1,22 @@ from collections.abc import Awaitable, Callable from fastapi import FastAPI +from models_library.api_schemas_efs_guardian import EFS_GUARDIAN_RPC_NAMESPACE +from servicelib.rabbitmq import RPCRouter + +from ...services.modules.rabbitmq import get_rabbitmq_rpc_server +from . import _efs_guardian + +ROUTERS: list[RPCRouter] = [ + _efs_guardian.router, +] def on_app_startup(app: FastAPI) -> Callable[[], Awaitable[None]]: async def _start() -> None: - assert app # nosec + rpc_server = get_rabbitmq_rpc_server(app) + for router in ROUTERS: + await rpc_server.register_router(router, EFS_GUARDIAN_RPC_NAMESPACE, app) return _start diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py index da0d9deb0d2..88c20f25ea3 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py @@ -11,7 +11,9 @@ APP_STARTED_DISABLED_BANNER_MSG, ) from ..api.rest.routes import setup_api_routes -from ..api.rpc.rpc_routes import setup_rpc_routes +from ..api.rpc.routes import setup_rpc_routes +from ..services.efs_manager_setup import setup as setup_efs_manager +from ..services.modules.rabbitmq import setup as setup_rabbitmq from .settings import ApplicationSettings logger = logging.getLogger(__name__) @@ -34,10 +36,12 @@ def create_app(settings: ApplicationSettings) -> FastAPI: assert app.state.settings.API_VERSION == API_VERSION # nosec # PLUGINS SETUP + setup_rabbitmq(app) + setup_api_routes(app) setup_rpc_routes(app) - # ERROR HANDLERS + setup_efs_manager(app) # EVENTS async def _on_startup() -> None: diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py index aedbca71f0c..e35bcd64d0a 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py @@ -1,4 +1,5 @@ from functools import cached_property +from pathlib import Path from typing import Final, cast from fastapi import FastAPI @@ -10,6 +11,7 @@ ) from pydantic import Field, PositiveInt, validator from settings_library.base import BaseCustomSettings +from settings_library.rabbit import RabbitSettings from settings_library.utils_logging import MixinLoggingSettings from .._meta import API_VERSION, API_VTAG, APP_NAME @@ -22,7 +24,11 @@ class AwsEfsSettings(BaseCustomSettings): description="AWS Elastic File System DNS name", example="fs-xxx.efs.us-east-1.amazonaws.com", ) - EFS_BASE_DIRECTORY: str = Field(default="project-specific-data") + EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: str = Field(default="project-specific-data") + EFS_MOUNTED_PATH: Path = Field( + default=Path("/data/efs"), + description="This is the path where EFS is mounted to the EC2 machine", + ) class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): @@ -66,9 +72,8 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) - EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings | None = Field( - auto_default_from_env=True - ) + EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings = Field(auto_default_from_env=True) + EFS_GUARDIAN_RABBITMQ: RabbitSettings = Field(auto_default_from_env=True) @cached_property def LOG_LEVEL(self) -> LogLevel: # noqa: N802 diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py new file mode 100644 index 00000000000..4e249188c47 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py @@ -0,0 +1,39 @@ +from dataclasses import dataclass +from pathlib import Path + +from fastapi import FastAPI +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID + + +@dataclass(frozen=True) +class EfsManager: + app: FastAPI + + _efs_mounted_path: Path + _project_specific_data_base_directory: str + + @classmethod + async def create( + cls, + app: FastAPI, + efs_mounted_path: Path, + project_specific_data_base_directory: str, + ): + return cls(app, efs_mounted_path, project_specific_data_base_directory) + + async def initialize_directories(self): + _dir_path = self._efs_mounted_path / self._project_specific_data_base_directory + Path.mkdir(_dir_path, parents=True, exist_ok=True) + + async def create_project_specific_data_dir( + self, project_id: ProjectID, node_id: NodeID + ) -> Path: + _dir_path = ( + self._efs_mounted_path + / self._project_specific_data_base_directory + / f"{project_id}" + / f"{node_id}" + ) + Path.mkdir(_dir_path, parents=True, exist_ok=True) + return _dir_path diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_setup.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_setup.py new file mode 100644 index 00000000000..9f0ded69552 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_setup.py @@ -0,0 +1,54 @@ +import logging +from typing import cast + +from fastapi import FastAPI +from simcore_service_efs_guardian.core.settings import AwsEfsSettings +from tenacity import ( + AsyncRetrying, + before_sleep_log, + stop_after_delay, + wait_random_exponential, +) + +from ..exceptions.custom_errors import ApplicationSetupError +from .efs_manager import EfsManager + +_logger = logging.getLogger(__name__) + + +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + aws_efs_settings: AwsEfsSettings = ( + app.state.settings.EFS_GUARDIAN_AWS_EFS_SETTINGS + ) + + app.state.efs_manager = None + app.state.efs_manager = efs_manager = await EfsManager.create( + app, + aws_efs_settings.EFS_MOUNTED_PATH, + aws_efs_settings.EFS_PROJECT_SPECIFIC_DATA_DIRECTORY, + ) + + async for attempt in AsyncRetrying( + reraise=True, + stop=stop_after_delay(120), + wait=wait_random_exponential(max=30), + before_sleep=before_sleep_log(_logger, logging.WARNING), + ): + with attempt: + await efs_manager.initialize_directories() + + async def on_shutdown() -> None: + if app.state.efs_manager: + ... + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_efs_manager(app: FastAPI) -> EfsManager: + if not app.state.efs_manager: + raise ApplicationSetupError( + msg="Efs Manager is not available. Please check the configuration." + ) + return cast(EfsManager, app.state.efs_manager) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/__init__.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/rabbitmq.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/rabbitmq.py new file mode 100644 index 00000000000..82ef1aae84c --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/rabbitmq.py @@ -0,0 +1,56 @@ +import logging +from typing import cast + +from fastapi import FastAPI +from servicelib.rabbitmq import ( + RabbitMQClient, + RabbitMQRPCClient, + wait_till_rabbitmq_responsive, +) +from settings_library.rabbit import RabbitSettings + +from ...exceptions.custom_errors import ApplicationSetupError + +logger = logging.getLogger(__name__) + + +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + app.state.rabbitmq_client = None + settings: RabbitSettings | None = app.state.settings.EFS_GUARDIAN_RABBITMQ + if not settings: + raise ApplicationSetupError( + msg="Rabbit MQ client is de-activated in the settings" + ) + await wait_till_rabbitmq_responsive(settings.dsn) + app.state.rabbitmq_client = RabbitMQClient( + client_name="efs-guardian", settings=settings + ) + app.state.rabbitmq_rpc_server = await RabbitMQRPCClient.create( + client_name="efs_guardian_rpc_server", settings=settings + ) + + async def on_shutdown() -> None: + if app.state.rabbitmq_client: + await app.state.rabbitmq_client.close() + if app.state.rabbitmq_rpc_server: + await app.state.rabbitmq_rpc_server.close() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_rabbitmq_client(app: FastAPI) -> RabbitMQClient: + if not app.state.rabbitmq_client: + raise ApplicationSetupError( + msg="RabbitMQ client is not available. Please check the configuration." + ) + return cast(RabbitMQClient, app.state.rabbitmq_client) + + +def get_rabbitmq_rpc_server(app: FastAPI) -> RabbitMQRPCClient: + assert app.state.rabbitmq_rpc_server # nosec + return cast(RabbitMQRPCClient, app.state.rabbitmq_rpc_server) + + +__all__ = ("RabbitMQClient",) diff --git a/services/efs-guardian/tests/unit/conftest.py b/services/efs-guardian/tests/unit/conftest.py index 9c53ab29a3f..1030c433bd9 100644 --- a/services/efs-guardian/tests/unit/conftest.py +++ b/services/efs-guardian/tests/unit/conftest.py @@ -3,8 +3,9 @@ # pylint:disable=redefined-outer-name import re -from collections.abc import AsyncIterator +from collections.abc import AsyncIterator, Callable from pathlib import Path +from typing import Awaitable import httpx import pytest @@ -14,13 +15,24 @@ from fastapi import FastAPI from httpx import ASGITransport from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from servicelib.rabbitmq import RabbitMQRPCClient +from settings_library.rabbit import RabbitSettings from simcore_service_efs_guardian.core.application import create_app from simcore_service_efs_guardian.core.settings import ApplicationSettings pytest_plugins = [ "pytest_simcore.cli_runner", + "pytest_simcore.docker_compose", + "pytest_simcore.docker_registry", + "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.pydantic_models", + "pytest_simcore.pytest_global_environs", + "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", + "pytest_simcore.tmp_path_extra", + "pytest_simcore.aws_s3_service", + "pytest_simcore.aws_server", ] @@ -83,6 +95,9 @@ def app_environment( monkeypatch, { **docker_compose_service_efs_guardian_env_vars, + "EFS_DNS_NAME": "fs-xxx.efs.us-east-1.amazonaws.com", + "EFS_MOUNTED_PATH": "/tmp/efs", + "EFS_PROJECT_SPECIFIC_DATA_DIRECTORY": "project-specific-data", }, ) @@ -115,3 +130,26 @@ async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: client._transport, ASGITransport # pylint: disable=protected-access ) yield client + + +@pytest.fixture +async def rpc_client( + rabbit_service: RabbitSettings, + app: FastAPI, + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + return await rabbitmq_rpc_client("client") + + +# @pytest.fixture +# def mocked_setup_rabbitmq(mocker: MockerFixture): +# return ( +# mocker.patch( +# "simcore_service_efs_guardian.core.application.setup_rabbitmq", +# autospec=True, +# ), +# mocker.patch( +# "simcore_service_efs_guardian.core.application.setup_rpc_routes", +# autospec=True, +# ), +# ) diff --git a/services/efs-guardian/tests/unit/test_api_health.py b/services/efs-guardian/tests/unit/test_api_health.py index 791fb2bee26..22bc3377a8f 100644 --- a/services/efs-guardian/tests/unit/test_api_health.py +++ b/services/efs-guardian/tests/unit/test_api_health.py @@ -3,10 +3,32 @@ # pylint:disable=redefined-outer-name import httpx +import pytest +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from settings_library.rabbit import RabbitSettings from starlette import status +pytest_simcore_core_services_selection = ["rabbit"] +pytest_simcore_ops_services_selection = [] -async def test_healthcheck(client: httpx.AsyncClient): + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, + rabbit_env_vars_dict: EnvVarsDict, # rabbitMQ settings from 'rabbit' service +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **app_environment, + **rabbit_env_vars_dict, + }, + ) + + +async def test_healthcheck(rabbit_service: RabbitSettings, client: httpx.AsyncClient): response = await client.get("/") response.raise_for_status() assert response.status_code == status.HTTP_200_OK diff --git a/services/efs-guardian/tests/unit/test_efs_manager.py b/services/efs-guardian/tests/unit/test_efs_manager.py new file mode 100644 index 00000000000..acc72a9790f --- /dev/null +++ b/services/efs-guardian/tests/unit/test_efs_manager.py @@ -0,0 +1,58 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from pathlib import Path + +import pytest +from faker import Faker +from fastapi import FastAPI +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.efs_guardian import efs_manager +from simcore_service_efs_guardian.core.settings import AwsEfsSettings + +pytest_simcore_core_services_selection = ["rabbit"] +pytest_simcore_ops_services_selection = [] + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, + rabbit_env_vars_dict: EnvVarsDict, # rabbitMQ settings from 'rabbit' service +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **app_environment, + **rabbit_env_vars_dict, + }, + ) + + +async def test_rpc_create_project_specific_data_dir( + rpc_client: RabbitMQRPCClient, + faker: Faker, + app: FastAPI, +): + aws_efs_settings: AwsEfsSettings = app.state.settings.EFS_GUARDIAN_AWS_EFS_SETTINGS + + _project_id = faker.uuid4() + _node_id = faker.uuid4() + + result = await efs_manager.create_project_specific_data_dir( + rpc_client, project_id=_project_id, node_id=_node_id + ) + assert isinstance(result, Path) + _expected_path = ( + aws_efs_settings.EFS_MOUNTED_PATH + / aws_efs_settings.EFS_PROJECT_SPECIFIC_DATA_DIRECTORY + / _project_id + / _node_id + ) + assert _expected_path == result + assert _expected_path.exists From 009f1b252585d1931471d61b1dd1b9eaf22a85ad Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Thu, 6 Jun 2024 13:35:54 +0200 Subject: [PATCH 022/219] =?UTF-8?q?=F0=9F=90=9B=20[Frontend=20bugfix]=20Mo?= =?UTF-8?q?ve=20side=20spacer=20to=20Resource=20Browser=20(#5919)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../class/osparc/dashboard/DataBrowser.js | 6 +- .../osparc/dashboard/ResourceBrowserBase.js | 63 ++++++++++++++++--- .../osparc/ui/basic/LoadingPageHandler.js | 59 ++--------------- 3 files changed, 64 insertions(+), 64 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js index 3e7947d41de..4d6a585078b 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js @@ -53,7 +53,7 @@ qx.Class.define("osparc.dashboard.DataBrowser", { icon: "@FontAwesome5Solid/sync-alt/14", allowGrowX: false }); - this._addToMainLayout(control); + this._addToLayout(control); break; case "tree-folder-layout": control = new qx.ui.splitpane.Pane("horizontal"); @@ -61,7 +61,7 @@ qx.Class.define("osparc.dashboard.DataBrowser", { width: 2, backgroundColor: "scrollbar-passive" }); - this._addToMainLayout(control, { + this._addToLayout(control, { flex: 1 }); break; @@ -88,7 +88,7 @@ qx.Class.define("osparc.dashboard.DataBrowser", { break; case "actions-toolbar": control = new qx.ui.toolbar.ToolBar(); - this._addToMainLayout(control); + this._addToLayout(control); break; } diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index f93d59ff922..3c91b5af6a3 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -33,6 +33,47 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { this._showLoadingPage(this.tr("Starting") + " " + osparc.store.StaticInfo.getInstance().getDisplayName()); + const padding = osparc.dashboard.Dashboard.PADDING; + const leftColumnWidth = this.self().SIDE_SPACER_WIDTH; + const emptyColumnMinWidth = 50; + const spacing = 20; + const mainLayoutsScroll = 8; + + const mainLayoutWithSideSpacers = new qx.ui.container.Composite(new qx.ui.layout.HBox(spacing)) + this._addToMainLayout(mainLayoutWithSideSpacers); + + this.__leftLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ + width: leftColumnWidth + }); + mainLayoutWithSideSpacers.add(this.__leftLayout); + + this.__centerLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + mainLayoutWithSideSpacers.add(this.__centerLayout); + + const rightColum = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + mainLayoutWithSideSpacers.add(rightColum, { + flex: 1 + }); + + const itemWidth = osparc.dashboard.GridButtonBase.ITEM_WIDTH + osparc.dashboard.GridButtonBase.SPACING; + this.__centerLayout.setMinWidth(this.self().MIN_GRID_CARDS_PER_ROW * itemWidth + mainLayoutsScroll); + const fitResourceCards = () => { + const w = document.documentElement.clientWidth; + const nStudies = Math.floor((w - 2*padding - 2*spacing - leftColumnWidth - emptyColumnMinWidth) / itemWidth); + const newWidth = nStudies * itemWidth + 8; + if (newWidth > this.__centerLayout.getMinWidth()) { + this.__centerLayout.setWidth(newWidth); + } else { + this.__centerLayout.setWidth(this.__centerLayout.getMinWidth()); + } + + const compactVersion = w < this.__centerLayout.getMinWidth() + leftColumnWidth + emptyColumnMinWidth; + this.__leftLayout.setVisibility(compactVersion ? "excluded" : "visible"); + rightColum.setVisibility(compactVersion ? "excluded" : "visible"); + }; + fitResourceCards(); + window.addEventListener("resize", () => fitResourceCards()); + this.addListener("appear", () => this._moreResourcesRequired()); }, @@ -42,6 +83,8 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { statics: { PAGINATED_STUDIES: 10, + MIN_GRID_CARDS_PER_ROW: 4, + SIDE_SPACER_WIDTH: 180, checkLoggedIn: function() { const isLogged = osparc.auth.Manager.getInstance().isLoggedIn(); @@ -165,6 +208,8 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }, members: { + __leftLayout: null, + __centerLayout: null, _resourceType: null, _resourcesList: null, _topBar: null, @@ -182,7 +227,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { scroll.getChildControl("pane").addListener("scrollY", () => this._moreResourcesRequired(), this); control = this._createLayout(); scroll.add(control); - this._addToMainLayout(scroll, { + this._addToLayout(scroll, { flex: 1 }); break; @@ -191,6 +236,10 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { return control || this.base(arguments, id); }, + _addToLayout: function(widget, props = {}) { + this.__centerLayout.add(widget, props) + }, + initResources: function() { throw new Error("Abstract method called!"); }, @@ -205,7 +254,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { _createResourcesLayout: function() { const topBar = this.__createTopBar(); - this._addToMainLayout(topBar); + this._addToLayout(topBar); const toolbar = this._toolbar = new qx.ui.toolbar.ToolBar().set({ backgroundColor: "transparent", @@ -213,7 +262,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { paddingRight: 8, alignY: "middle" }); - this._addToMainLayout(toolbar); + this._addToLayout(toolbar); this.__viewModeLayout = new qx.ui.toolbar.Part(); @@ -224,7 +273,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { resourcesContainer.addListener("publishTemplate", e => this.fireDataEvent("publishTemplate", e.getData())); resourcesContainer.addListener("tagClicked", e => this._searchBarFilter.addTagActiveFilter(e.getData())); resourcesContainer.addListener("emptyStudyClicked", e => this._deleteResourceRequested(e.getData())); - this._addToMainLayout(resourcesContainer); + this._addToLayout(resourcesContainer); }, __createTopBar: function() { @@ -321,8 +370,8 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { _addResourceFilter: function() { const resourceFilter = new osparc.dashboard.ResourceFilter(this._resourceType).set({ marginTop: osparc.dashboard.SearchBarFilter.HEIGHT + 10, // aligned with toolbar buttons: search bar + spacing - maxWidth: osparc.ui.basic.LoadingPageHandler.SIDE_SPACER_WIDTH, - width: osparc.ui.basic.LoadingPageHandler.SIDE_SPACER_WIDTH + maxWidth: this.self().SIDE_SPACER_WIDTH, + width: this.self().SIDE_SPACER_WIDTH }); resourceFilter.addListener("changeSharedWith", e => { @@ -340,7 +389,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { resourceFilter.filterChanged(filterData); }); - this._addToLeftColumn(resourceFilter); + this.__leftLayout.add(resourceFilter); }, /** diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/LoadingPageHandler.js b/services/static-webserver/client/source/class/osparc/ui/basic/LoadingPageHandler.js index e5bf60dab2a..f8090e1bdfa 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/LoadingPageHandler.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/LoadingPageHandler.js @@ -35,59 +35,14 @@ qx.Class.define("osparc.ui.basic.LoadingPageHandler", { this._loadingPage = new osparc.ui.message.Loading(); stack.add(this._loadingPage); - const padding = osparc.dashboard.Dashboard.PADDING; - const leftColumnWidth = this.self().SIDE_SPACER_WIDTH; - const emptyColumnMinWidth = 50; - const spacing = 20; - const mainLayoutsScroll = 8; - - this.__mainLayoutWithSides = new qx.ui.container.Composite(new qx.ui.layout.HBox(spacing)) - stack.add(this.__mainLayoutWithSides); - - this.__leftColum = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ - width: leftColumnWidth - }); - this.__mainLayoutWithSides.add(this.__leftColum); - - this._mainLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); - this.__mainLayoutWithSides.add(this._mainLayout); - - const rightColum = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); - this.__mainLayoutWithSides.add(rightColum, { - flex: 1 - }); - - const itemWidth = osparc.dashboard.GridButtonBase.ITEM_WIDTH + osparc.dashboard.GridButtonBase.SPACING; - this._mainLayout.setMinWidth(this.self().MIN_STUDIES_PER_ROW * itemWidth + mainLayoutsScroll); - const fitResourceCards = () => { - const w = document.documentElement.clientWidth; - const nStudies = Math.floor((w - 2*padding - 2*spacing - leftColumnWidth - emptyColumnMinWidth) / itemWidth); - const newWidth = nStudies * itemWidth + 8; - if (newWidth > this._mainLayout.getMinWidth()) { - this._mainLayout.setWidth(newWidth); - } else { - this._mainLayout.setWidth(this._mainLayout.getMinWidth()); - } - - const compactVersion = w < this._mainLayout.getMinWidth() + leftColumnWidth + emptyColumnMinWidth; - this.__leftColum.setVisibility(compactVersion ? "excluded" : "visible"); - rightColum.setVisibility(compactVersion ? "excluded" : "visible"); - }; - fitResourceCards(); - window.addEventListener("resize", () => fitResourceCards()); - }, - - statics: { - MIN_STUDIES_PER_ROW: 4, - SIDE_SPACER_WIDTH: 180 + this.__mainLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox()) + stack.add(this.__mainLayout); }, members: { __stack: null, _loadingPage: null, - __mainLayoutWithSides: null, - __leftColum: null, - _mainLayout: null, + __mainLayout: null, _showLoadingPage: function(label) { if (label) { @@ -97,19 +52,15 @@ qx.Class.define("osparc.ui.basic.LoadingPageHandler", { }, _showMainLayout: function() { - this.__stack.setSelection([this.__mainLayoutWithSides]); + this.__stack.setSelection([this.__mainLayout]); }, _hideLoadingPage: function() { this._showMainLayout(); }, - _addToLeftColumn: function(widget, props = {}) { - this.__leftColum.add(widget, props); - }, - _addToMainLayout: function(widget, props = {}) { - this._mainLayout.add(widget, props); + this.__mainLayout.add(widget, props); } } }); From 74d48705e2c467d0c5c3cabce81ce7654fc3429a Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Thu, 6 Jun 2024 18:45:23 +0200 Subject: [PATCH 023/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=F0=9F=8E=A8=20`ooil?= =?UTF-8?q?=20config=20init`=20to=20create=20first=20`.osparc`=20config=20?= =?UTF-8?q?layout=20(#5913)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../service-integration/requirements/_base.in | 2 + .../requirements/_base.txt | 35 ++++++- .../service-integration/scripts/ooil.bash | 3 +- .../src/service_integration/cli.py | 61 ------------ .../src/service_integration/cli/__init__.py | 72 ++++++++++++++ .../compose.py => cli/_compose_spec.py} | 65 +++++++------ .../{commands/config.py => cli/_config.py} | 94 ++++++++++++------- .../metadata.py => cli/_metadata.py} | 43 +++++---- .../run_creator.py => cli/_run_creator.py} | 34 +++---- .../{commands/test.py => cli/_test.py} | 12 ++- .../service_integration/commands/__init__.py | 0 .../src/service_integration/errors.py | 8 ++ .../src/service_integration/osparc_config.py | 9 +- .../tests/test_command_config.py | 1 + .../tests/test_command_metadata.py | 2 +- 15 files changed, 265 insertions(+), 176 deletions(-) delete mode 100644 packages/service-integration/src/service_integration/cli.py create mode 100644 packages/service-integration/src/service_integration/cli/__init__.py rename packages/service-integration/src/service_integration/{commands/compose.py => cli/_compose_spec.py} (81%) rename packages/service-integration/src/service_integration/{commands/config.py => cli/_config.py} (55%) rename packages/service-integration/src/service_integration/{commands/metadata.py => cli/_metadata.py} (63%) rename packages/service-integration/src/service_integration/{commands/run_creator.py => cli/_run_creator.py} (79%) rename packages/service-integration/src/service_integration/{commands/test.py => cli/_test.py} (67%) delete mode 100644 packages/service-integration/src/service_integration/commands/__init__.py diff --git a/packages/service-integration/requirements/_base.in b/packages/service-integration/requirements/_base.in index a8955a4a9b5..fee8aa856e2 100644 --- a/packages/service-integration/requirements/_base.in +++ b/packages/service-integration/requirements/_base.in @@ -6,7 +6,9 @@ --requirement ../../../packages/models-library/requirements/_base.in click +cookiecutter docker # pytest-plugin +jinja2_time jsonschema # pytest-plugin pytest # pytest-plugin pyyaml diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index 44411a2736b..391819cffdd 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -1,20 +1,30 @@ arrow==1.3.0 - # via -r requirements/../../../packages/models-library/requirements/_base.in + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # cookiecutter + # jinja2-time attrs==23.2.0 # via # jsonschema # referencing +binaryornot==0.4.4 + # via cookiecutter certifi==2024.2.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests +chardet==5.2.0 + # via binaryornot charset-normalizer==3.3.2 # via requests click==8.1.7 # via # -r requirements/_base.in + # cookiecutter # typer +cookiecutter==2.6.0 + # via -r requirements/_base.in dnspython==2.6.1 # via email-validator docker==7.1.0 @@ -29,6 +39,14 @@ idna==3.7 # requests iniconfig==2.0.0 # via pytest +jinja2==3.1.4 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # cookiecutter + # jinja2-time +jinja2-time==0.2.0 + # via -r requirements/_base.in jsonschema==4.22.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -37,6 +55,8 @@ jsonschema-specifications==2023.12.1 # via jsonschema markdown-it-py==3.0.0 # via rich +markupsafe==2.1.5 + # via jinja2 mdurl==0.1.2 # via markdown-it-py orjson==3.10.3 @@ -59,19 +79,26 @@ pytest==8.2.0 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-slugify==8.0.4 + # via cookiecutter pyyaml==6.0.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in + # cookiecutter referencing==0.35.1 # via # jsonschema # jsonschema-specifications requests==2.32.2 - # via docker + # via + # cookiecutter + # docker rich==13.7.1 - # via typer + # via + # cookiecutter + # typer rpds-py==0.18.0 # via # jsonschema @@ -80,6 +107,8 @@ shellingham==1.5.4 # via typer six==1.16.0 # via python-dateutil +text-unidecode==1.3 + # via python-slugify tomli==2.0.1 # via pytest typer==0.12.3 diff --git a/packages/service-integration/scripts/ooil.bash b/packages/service-integration/scripts/ooil.bash index 7e5eb116d17..b4527683ef1 100755 --- a/packages/service-integration/scripts/ooil.bash +++ b/packages/service-integration/scripts/ooil.bash @@ -6,7 +6,7 @@ set -o nounset set -o pipefail IFS=$'\n\t' -IMAGE_NAME="${DOCKER_REGISTRY:-itisfoundation}/service-integration:${OOIL_IMAGE_TAG:-master-github-latest}" +IMAGE_NAME="${DOCKER_REGISTRY:-local}/service-integration:${OOIL_IMAGE_TAG:-production}" WORKDIR="$(pwd)" # @@ -20,6 +20,7 @@ WORKDIR="$(pwd)" run() { docker run \ --rm \ + --tty \ --volume="/etc/group:/etc/group:ro" \ --volume="/etc/passwd:/etc/passwd:ro" \ --user="$(id --user "$USER")":"$(id --group "$USER")" \ diff --git a/packages/service-integration/src/service_integration/cli.py b/packages/service-integration/src/service_integration/cli.py deleted file mode 100644 index a257be65c14..00000000000 --- a/packages/service-integration/src/service_integration/cli.py +++ /dev/null @@ -1,61 +0,0 @@ -# Allows entrypoint via python -m as well - - -import rich -import typer - -from ._meta import __version__ -from .commands import compose, config, metadata, run_creator, test -from .settings import AppSettings - -app = typer.Typer() - - -def _version_callback(value: bool): - if value: - rich.print(__version__) - raise typer.Exit - - -@app.callback() -def main( - ctx: typer.Context, - version: bool = typer.Option( - False, - "--version", - callback=_version_callback, - is_eager=True, - ), - registry_name: str = typer.Option( - None, - "--REGISTRY_NAME", - help="image registry name. Full url or prefix used as prefix in an image name", - ), - compose_version: str = typer.Option( - None, - "--COMPOSE_VERSION", - help="version used for docker compose specification", - ), -): - """o2s2parc service integration library""" - assert isinstance(version, bool | None) # nosec - - overrides = {} - if registry_name: - overrides["REGISTRY_NAME"] = registry_name - - if compose_version: - overrides["COMPOSE_VERSION"] = compose_version - - # save states - ctx.settings = AppSettings.parse_obj(overrides) - - -# new -app.command("compose")(compose.main) -app.command("config")(config.main) -app.command("test")(test.main) -# legacy -app.command("bump-version")(metadata.bump_version) -app.command("get-version")(metadata.get_version) -app.command("run-creator")(run_creator.main) diff --git a/packages/service-integration/src/service_integration/cli/__init__.py b/packages/service-integration/src/service_integration/cli/__init__.py new file mode 100644 index 00000000000..26b9697633c --- /dev/null +++ b/packages/service-integration/src/service_integration/cli/__init__.py @@ -0,0 +1,72 @@ +# Allows entrypoint via python -m as well + +from typing import Annotated + +import rich +import typer + +from .._meta import __version__ +from ..settings import AppSettings +from . import _compose_spec, _metadata, _run_creator, _test +from ._config import config_app + +app = typer.Typer() + + +def _version_callback(value: bool): # noqa: FBT002 + if value: + rich.print(__version__) + raise typer.Exit + + +@app.callback() +def main( + ctx: typer.Context, + registry_name: Annotated[ + str, + typer.Option( + "--REGISTRY_NAME", + help="image registry name. Full url or prefix used as prefix in an image name", + ), + ] = None, + compose_version: Annotated[ + str, + typer.Option( + "--COMPOSE_VERSION", + help="version used for docker compose specification", + ), + ] = None, + version: Annotated[ # noqa: FBT002 + bool, + typer.Option( + "--version", + callback=_version_callback, + is_eager=True, + ), + ] = False, +): + """o2s2parc service Integration Library (OOIL in short)""" + assert isinstance(version, bool | None) # nosec + + overrides = {} + if registry_name: + overrides["REGISTRY_NAME"] = registry_name + + if compose_version: + overrides["COMPOSE_VERSION"] = compose_version + + # save states + ctx.settings = AppSettings.parse_obj(overrides) + + +# +# REGISTER commands and/or sub-apps +# + +app.command("compose")(_compose_spec.create_compose) +app.add_typer(config_app, name="config", help="Manage osparc config files") +app.command("test")(_test.run_tests) +# legacy +app.command("bump-version")(_metadata.bump_version) +app.command("get-version")(_metadata.get_version) +app.command("run-creator")(_run_creator.run_creator) diff --git a/packages/service-integration/src/service_integration/commands/compose.py b/packages/service-integration/src/service_integration/cli/_compose_spec.py similarity index 81% rename from packages/service-integration/src/service_integration/commands/compose.py rename to packages/service-integration/src/service_integration/cli/_compose_spec.py index 3904828cad5..c2f2477c622 100644 --- a/packages/service-integration/src/service_integration/commands/compose.py +++ b/packages/service-integration/src/service_integration/cli/_compose_spec.py @@ -1,7 +1,8 @@ import subprocess -from datetime import datetime from pathlib import Path +from typing import Annotated +import arrow import rich import typer import yaml @@ -9,6 +10,7 @@ from rich.console import Console from ..compose_spec_model import ComposeSpecification +from ..errors import UndefinedOciImageSpecError from ..oci_image_spec import LS_LABEL_PREFIX, OCI_LABEL_PREFIX from ..osparc_config import ( OSPARC_CONFIG_DIRNAME, @@ -61,10 +63,10 @@ def create_docker_compose_image_spec( config_basedir = meta_config_path.parent - # required + # REQUIRED meta_cfg = MetadataConfig.from_yaml(meta_config_path) - # required + # REQUIRED if docker_compose_overwrite_path: docker_compose_overwrite_cfg = DockerComposeOverwriteConfig.from_yaml( docker_compose_overwrite_path @@ -74,11 +76,10 @@ def create_docker_compose_image_spec( service_name=meta_cfg.service_name() ) - # optional + # OPTIONAL runtime_cfg = None if service_config_path: try: - # TODO: should include default? runtime_cfg = RuntimeConfig.from_yaml(service_config_path) except FileNotFoundError: rich.print("No runtime config found (optional), using default.") @@ -90,13 +91,11 @@ def create_docker_compose_image_spec( (config_basedir / f"{OCI_LABEL_PREFIX}.yml").read_text() ) if not oci_spec: - msg = "Undefined OCI image spec" - raise ValueError(msg) + raise UndefinedOciImageSpecError oci_labels = to_labels(oci_spec, prefix_key=OCI_LABEL_PREFIX) extra_labels.update(oci_labels) - except (FileNotFoundError, ValueError): - + except (FileNotFoundError, UndefinedOciImageSpecError): try: # if not OCI, try label-schema ls_spec = yaml.safe_load( @@ -109,9 +108,11 @@ def create_docker_compose_image_spec( "No explicit config for OCI/label-schema found (optional), skipping OCI annotations." ) # add required labels - extra_labels[f"{LS_LABEL_PREFIX}.build-date"] = datetime.utcnow().strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) + + # SEE https://github.com/opencontainers/image-spec/blob/main/annotations.md#pre-defined-annotation-keys + # Format the datetime object as a string following RFC-3339 + rfc3339_format = arrow.now().format("YYYY-MM-DDTHH:mm:ssZ") + extra_labels[f"{LS_LABEL_PREFIX}.build-date"] = rfc3339_format extra_labels[f"{LS_LABEL_PREFIX}.schema-version"] = "1.0" extra_labels[f"{LS_LABEL_PREFIX}.vcs-ref"] = _run_git_or_empty_string( @@ -130,25 +131,28 @@ def create_docker_compose_image_spec( ) -def main( +def create_compose( ctx: typer.Context, - config_path: Path = typer.Option( - OSPARC_CONFIG_DIRNAME, - "-m", - "--metadata", - help="osparc config file or folder. " - "If the latter, it will scan for configs using the glob pattern 'config_path/**/metadata.yml' ", - ), - to_spec_file: Path = typer.Option( - Path("docker-compose.yml"), - "-f", - "--to-spec-file", - help="Output docker-compose image spec", - ), + config_path: Annotated[ + Path, + typer.Option( + "-m", + "--metadata", + help="osparc config file or folder. " + "If the latter, it will scan for configs using the glob pattern 'config_path/**/metadata.yml' ", + ), + ] = Path(OSPARC_CONFIG_DIRNAME), + to_spec_file: Annotated[ + Path, + typer.Option( + "-f", + "--to-spec-file", + help="Output docker-compose image spec", + ), + ] = Path("docker-compose.yml"), ): - """create docker image/runtime compose-specs from an osparc config""" + """Creates the docker image/runtime compose-spec file from an .osparc config""" - # TODO: all these MUST be replaced by osparc_config.ConfigFilesStructure if not config_path.exists(): msg = "Invalid path to metadata file or folder" raise typer.BadParameter(msg) @@ -168,10 +172,10 @@ def main( config_name = meta_config.parent.name configs_kwargs_map[config_name] = {} - # load meta [required] + # load meta REQUIRED configs_kwargs_map[config_name]["meta_config_path"] = meta_config - # others [optional] + # others OPTIONAL for file_name, arg_name in ( ("docker-compose.overwrite.yml", "docker_compose_overwrite_path"), ("runtime.yml", "service_config_path"), @@ -194,7 +198,6 @@ def main( settings, **configs_kwargs_map[config_name] ).dict(exclude_unset=True) - # FIXME: shaky! why first decides ?? if n == 0: compose_spec_dict = nth_compose_spec else: diff --git a/packages/service-integration/src/service_integration/commands/config.py b/packages/service-integration/src/service_integration/cli/_config.py similarity index 55% rename from packages/service-integration/src/service_integration/commands/config.py rename to packages/service-integration/src/service_integration/cli/_config.py index e1e5b8ef5b1..9932140a48e 100644 --- a/packages/service-integration/src/service_integration/commands/config.py +++ b/packages/service-integration/src/service_integration/cli/_config.py @@ -1,27 +1,41 @@ import json from pathlib import Path -from typing import Final +from typing import Annotated, Final import rich import typer import yaml -from pydantic import ValidationError -from pydantic.main import BaseModel +from pydantic import BaseModel from ..compose_spec_model import ComposeSpecification +from ..errors import InvalidLabelsError from ..osparc_config import ( + OSPARC_CONFIG_COMPOSE_SPEC_NAME, OSPARC_CONFIG_DIRNAME, + OSPARC_CONFIG_METADATA_NAME, + OSPARC_CONFIG_RUNTIME_NAME, DockerComposeOverwriteConfig, MetadataConfig, RuntimeConfig, ) -def create_osparc_specs( +def _get_labels_or_raise(build_labels) -> dict[str, str]: + if isinstance(build_labels, list): + return dict(item.strip().split("=") for item in build_labels) + if isinstance(build_labels, dict): + return build_labels + if labels__root__ := build_labels.__root__: + assert isinstance(labels__root__, dict) # nosec + return labels__root__ + raise InvalidLabelsError(build_labels=build_labels) + + +def _create_config_from_compose_spec( compose_spec_path: Path, - docker_compose_overwrite_path: Path = Path("docker-compose.overwrite.yml"), - metadata_path: Path = Path("metadata.yml"), - service_specs_path: Path = Path("runtime-spec.yml"), + docker_compose_overwrite_path: Path, + metadata_path: Path, + service_specs_path: Path, ): rich.print(f"Creating osparc config files from {compose_spec_path}") @@ -49,22 +63,12 @@ def _save(service_name: str, filename: Path, model: BaseModel): for service_name in compose_spec.services: try: - labels: dict[str, str] = {} if build_labels := compose_spec.services[ service_name ].build.labels: # AttributeError if build is str - if isinstance(build_labels, list): - labels = dict(item.strip().split("=") for item in build_labels) - elif isinstance(build_labels, dict): - labels = build_labels - elif labels__root__ := build_labels.__root__: - assert isinstance(labels__root__, dict) # nosec - labels = labels__root__ - else: - msg = f"Invalid build labels {build_labels}" - raise ValueError(msg) + labels: dict[str, str] = _get_labels_or_raise(build_labels) meta_cfg = MetadataConfig.from_labels_annotations(labels) _save(service_name, metadata_path, meta_cfg) @@ -82,7 +86,11 @@ def _save(service_name: str, filename: Path, model: BaseModel): runtime_cfg = RuntimeConfig.from_labels_annotations(labels) _save(service_name, service_specs_path, runtime_cfg) - except (AttributeError, ValidationError, TypeError, ValueError) as err: + except ( # noqa: PERF203 + AttributeError, + TypeError, + ValueError, + ) as err: rich.print( f"WARNING: failure producing specs for {service_name}: {err}" ) @@ -90,30 +98,46 @@ def _save(service_name: str, filename: Path, model: BaseModel): rich.print("osparc config files created") -def main( - from_spec_file: Path = typer.Option( - Path("docker-compose.yml"), - "-f", - "--from-spec-file", - help="docker-compose used to deduce osparc config", - ), +config_app = typer.Typer() + + +@config_app.command(name="create") +def create_config( + from_spec_file: Annotated[ + Path, + typer.Option( + "-f", + "--from-spec-file", + help="docker-compose used to deduce osparc config", + ), + ] = Path("docker-compose.yml"), ): - """Creates osparc config from complete docker compose-spec""" - # TODO: sync defaults among CLI commands + """Creates osparc configuration folder from a complete docker compose-spec""" config_dir = from_spec_file.parent / OSPARC_CONFIG_DIRNAME - project_cfg_path = config_dir / "docker-compose.overwrite.yml" - meta_cfg_path = config_dir / "metadata.yml" - runtime_cfg_path = config_dir / "runtime.yml" + project_cfg_path = config_dir / OSPARC_CONFIG_COMPOSE_SPEC_NAME + meta_cfg_path = config_dir / OSPARC_CONFIG_METADATA_NAME + runtime_cfg_path = config_dir / OSPARC_CONFIG_RUNTIME_NAME meta_cfg_path.parent.mkdir(parents=True, exist_ok=True) runtime_cfg_path.parent.mkdir(parents=True, exist_ok=True) rich.print(f"Creating {config_dir} from {from_spec_file} ...") - create_osparc_specs( + _create_config_from_compose_spec( from_spec_file, project_cfg_path, meta_cfg_path, runtime_cfg_path ) -if __name__ == "__main__": - # pylint: disable=no-value-for-parameter - main() +_COOKIECUTTER_GITHUB_URL = "gh:itisfoundation/cookiecutter-osparc-service" + + +@config_app.command(name="init") +def init_config( + template: Annotated[ + str, typer.Option(help="Github repo or path to the template") + ] = _COOKIECUTTER_GITHUB_URL, + checkout: Annotated[str, typer.Option(help="Branch if different from main")] = None, +): + """runs cookie-cutter""" + from cookiecutter.main import cookiecutter + + cookiecutter(template, checkout=checkout) diff --git a/packages/service-integration/src/service_integration/commands/metadata.py b/packages/service-integration/src/service_integration/cli/_metadata.py similarity index 63% rename from packages/service-integration/src/service_integration/commands/metadata.py rename to packages/service-integration/src/service_integration/cli/_metadata.py index eb6e153b7f5..a06504b3b99 100644 --- a/packages/service-integration/src/service_integration/commands/metadata.py +++ b/packages/service-integration/src/service_integration/cli/_metadata.py @@ -1,13 +1,12 @@ from collections import OrderedDict from enum import Enum from pathlib import Path +from typing import Annotated import rich import typer -import yaml -from models_library.services import ServiceDockerData -from ..osparc_config import OSPARC_CONFIG_DIRNAME +from ..osparc_config import OSPARC_CONFIG_DIRNAME, MetadataConfig from ..versioning import bump_version_string from ..yaml_utils import ordered_safe_dump, ordered_safe_load @@ -24,21 +23,23 @@ class UpgradeTags(str, Enum): def bump_version( - target_version: TargetVersionChoices = typer.Argument( - TargetVersionChoices.SEMANTIC_VERSION - ), - upgrade: UpgradeTags = typer.Option(..., case_sensitive=False), - metadata_file: Path = typer.Option( - "metadata/metadata.yml", - help="The metadata yaml file", - ), + upgrade: Annotated[UpgradeTags, typer.Option(case_sensitive=False)], + metadata_file: Annotated[ + Path, + typer.Option( + help="The metadata yaml file", + ), + ] = Path("metadata/metadata.yml"), + target_version: Annotated[ + TargetVersionChoices, typer.Argument() + ] = TargetVersionChoices.SEMANTIC_VERSION, ): """Bumps target version in metadata (legacy)""" # load raw_data: OrderedDict = ordered_safe_load(metadata_file.read_text()) # parse and validate - metadata = ServiceDockerData(**raw_data) + metadata = MetadataConfig(**raw_data) # get + bump + set attrname = target_version.replace("-", "_") @@ -54,18 +55,20 @@ def bump_version( def get_version( - target_version: TargetVersionChoices = typer.Argument( - TargetVersionChoices.SEMANTIC_VERSION - ), - metadata_file: Path = typer.Option( - f"{OSPARC_CONFIG_DIRNAME}/metadata.yml", - help="The metadata yaml file", - ), + target_version: Annotated[ + TargetVersionChoices, typer.Argument() + ] = TargetVersionChoices.SEMANTIC_VERSION, + metadata_file: Annotated[ + Path, + typer.Option( + help="The metadata yaml file", + ), + ] = Path(f"{OSPARC_CONFIG_DIRNAME}/metadata.yml"), ): """Prints to output requested version (legacy)""" # parse and validate - metadata = ServiceDockerData(**yaml.safe_load(metadata_file.read_text())) + metadata = MetadataConfig.from_yaml(metadata_file) attrname = target_version.replace("-", "_") current_version: str = getattr(metadata, attrname) diff --git a/packages/service-integration/src/service_integration/commands/run_creator.py b/packages/service-integration/src/service_integration/cli/_run_creator.py similarity index 79% rename from packages/service-integration/src/service_integration/commands/run_creator.py rename to packages/service-integration/src/service_integration/cli/_run_creator.py index 3b08948eeec..8cadef194a4 100644 --- a/packages/service-integration/src/service_integration/commands/run_creator.py +++ b/packages/service-integration/src/service_integration/cli/_run_creator.py @@ -1,10 +1,11 @@ import stat from pathlib import Path +from typing import Annotated import typer import yaml -from ..osparc_config import OSPARC_CONFIG_DIRNAME +from ..osparc_config import OSPARC_CONFIG_DIRNAME, OSPARC_CONFIG_METADATA_NAME def get_input_config(metadata_file: Path) -> dict: @@ -16,17 +17,21 @@ def get_input_config(metadata_file: Path) -> dict: return inputs -def main( - metadata_file: Path = typer.Option( - f"{OSPARC_CONFIG_DIRNAME}/metadata.yml", - "--metadata", - help="The metadata yaml of the node", - ), - run_script_file_path: Path = typer.Option( - ..., - "--runscript", - help="Path to the run script ", - ), +def run_creator( + run_script_file_path: Annotated[ + Path, + typer.Option( + "--runscript", + help="Path to the run script ", + ), + ], + metadata_file: Annotated[ + Path, + typer.Option( + "--metadata", + help="The metadata yaml of the node", + ), + ] = Path(f"{OSPARC_CONFIG_DIRNAME}/{OSPARC_CONFIG_METADATA_NAME}"), ): """Creates a sh script that uses jq tool to retrieve variables to use in sh from a json file for use in an osparc service (legacy). @@ -79,8 +84,3 @@ def main( run_script_file_path.write_text(shell_script) st = run_script_file_path.stat() run_script_file_path.chmod(st.st_mode | stat.S_IEXEC) - - -if __name__ == "__main__": - # pylint: disable=no-value-for-parameter - main() diff --git a/packages/service-integration/src/service_integration/commands/test.py b/packages/service-integration/src/service_integration/cli/_test.py similarity index 67% rename from packages/service-integration/src/service_integration/commands/test.py rename to packages/service-integration/src/service_integration/cli/_test.py index 3bf25551dc2..cb999b32307 100644 --- a/packages/service-integration/src/service_integration/commands/test.py +++ b/packages/service-integration/src/service_integration/cli/_test.py @@ -1,15 +1,19 @@ from pathlib import Path +from typing import Annotated import rich import typer from ..service import pytest_runner +test_app = typer.Typer() -def main( - service_dir: Path = typer.Argument( - ..., help="Root directory of the service under test" - ), + +@test_app.command("run") +def run_tests( + service_dir: Annotated[ + Path, typer.Argument(help="Root directory of the service under test") + ], ): """Runs tests against service directory""" diff --git a/packages/service-integration/src/service_integration/commands/__init__.py b/packages/service-integration/src/service_integration/commands/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/packages/service-integration/src/service_integration/errors.py b/packages/service-integration/src/service_integration/errors.py index e9a857edc1c..8d216b7d918 100644 --- a/packages/service-integration/src/service_integration/errors.py +++ b/packages/service-integration/src/service_integration/errors.py @@ -7,3 +7,11 @@ class ServiceIntegrationError(PydanticErrorMixin, RuntimeError): class ConfigNotFoundError(ServiceIntegrationError): msg_template = "could not find any osparc config under {basedir}" + + +class UndefinedOciImageSpecError(ServiceIntegrationError): + ... + + +class InvalidLabelsError(PydanticErrorMixin, ValueError): + template_msg = "Invalid build labels {build_labels}" diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 17c5f1d181f..b3eb998e7dd 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -14,7 +14,7 @@ import logging from pathlib import Path -from typing import Any, Literal +from typing import Any, Final, Literal from models_library.callbacks_mapping import CallbacksMapping from models_library.service_settings_labels import ( @@ -48,7 +48,10 @@ _logger = logging.getLogger(__name__) -OSPARC_CONFIG_DIRNAME = ".osparc" +OSPARC_CONFIG_DIRNAME: Final[str] = ".osparc" +OSPARC_CONFIG_COMPOSE_SPEC_NAME: Final[str] = "docker-compose.overwrite.yml" +OSPARC_CONFIG_METADATA_NAME: Final[str] = "metadata.yml" +OSPARC_CONFIG_RUNTIME_NAME: Final[str] = "runtime.yml" SERVICE_KEY_FORMATS = { @@ -94,7 +97,7 @@ class MetadataConfig(ServiceDockerData): @validator("contact") @classmethod - def check_contact_in_authors(cls, v, values): + def _check_contact_in_authors(cls, v, values): """catalog service relies on contact and author to define access rights""" authors_emails = {author.email for author in values["authors"]} if v not in authors_emails: diff --git a/packages/service-integration/tests/test_command_config.py b/packages/service-integration/tests/test_command_config.py index 08967ba63e6..f6243efd59f 100644 --- a/packages/service-integration/tests/test_command_config.py +++ b/packages/service-integration/tests/test_command_config.py @@ -27,6 +27,7 @@ def test_create_new_osparc_config( result = run_program_with_args( "config", + "create", "--from-spec-file", str(tmp_compose_spec), ) diff --git a/packages/service-integration/tests/test_command_metadata.py b/packages/service-integration/tests/test_command_metadata.py index 24073dcbc42..7204fc953c6 100644 --- a/packages/service-integration/tests/test_command_metadata.py +++ b/packages/service-integration/tests/test_command_metadata.py @@ -8,7 +8,7 @@ import pytest import yaml -from service_integration.commands.metadata import TargetVersionChoices +from service_integration.cli._metadata import TargetVersionChoices @pytest.fixture From d25082c7934cdd4574c94e8d29bc9b6aeb1c01d6 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Fri, 7 Jun 2024 13:24:18 +0200 Subject: [PATCH 024/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20refactor=20stop=20?= =?UTF-8?q?service=20command=20=E2=9A=A0=EF=B8=8F=F0=9F=9A=A8=20(#5924)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../dynamic_services.py | 25 ++++- .../dynamic_scheduler/services.py | 15 ++- .../api/rpc/_services.py | 15 +-- .../services/director_v2/_public_client.py | 8 +- .../services/director_v2/_thin_client.py | 34 +++---- .../unit/api_rpc/test_api_rpc__services.py | 64 +++++++++---- .../dynamic_scheduler/api.py | 25 ++--- .../garbage_collector/_core_orphans.py | 13 ++- .../projects/_nodes_handlers.py | 24 ++--- .../projects/projects_api.py | 15 ++- .../unit/isolated/test_dynamic_scheduler.py | 12 +-- .../isolated/test_garbage_collector_core.py | 25 +++-- .../02/test_projects_crud_handlers__delete.py | 17 +++- .../02/test_projects_nodes_handler.py | 18 +++- .../02/test_projects_states_handlers.py | 23 +++-- .../test_resource_manager.py | 92 ++++++++++++------- 16 files changed, 276 insertions(+), 149 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py index 6adb136b2c3..48ef3c48445 100644 --- a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py +++ b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py @@ -1,12 +1,16 @@ from typing import Any, ClassVar from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceCreate +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from models_library.resource_tracker import HardwareInfo, PricingInfo from models_library.services_resources import ServiceResourcesDictHelpers +from models_library.users import UserID from models_library.wallets import WalletInfo +from pydantic import BaseModel -class RPCDynamicServiceCreate(DynamicServiceCreate): +class DynamicServiceStart(DynamicServiceCreate): request_dns: str request_scheme: str simcore_user_agent: str @@ -32,3 +36,22 @@ class Config: "hardware_info": HardwareInfo.Config.schema_extra["examples"][0], } } + + +class DynamicServiceStop(BaseModel): + user_id: UserID + project_id: ProjectID + node_id: NodeID + simcore_user_agent: str + save_state: bool + + class Config: + schema_extra: ClassVar[dict[str, Any]] = { + "example": { + "user_id": 234, + "project_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", + "node_id": "75c7f3f4-18f9-4678-8610-54a2ade78eaa", + "simcore_user_agent": "", + "save_state": True, + } + } diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py index 662280faf27..9da2dad425e 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py @@ -4,7 +4,8 @@ from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler import DYNAMIC_SCHEDULER_RPC_NAMESPACE from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - RPCDynamicServiceCreate, + DynamicServiceStart, + DynamicServiceStop, ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects_nodes_io import NodeID @@ -44,12 +45,12 @@ async def get_service_status( async def run_dynamic_service( rabbitmq_rpc_client: RabbitMQRPCClient, *, - rpc_dynamic_service_create: RPCDynamicServiceCreate, + dynamic_service_start: DynamicServiceStart, ) -> DynamicServiceGet | NodeGet: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, parse_obj_as(RPCMethodName, "run_dynamic_service"), - rpc_dynamic_service_create=rpc_dynamic_service_create, + dynamic_service_start=dynamic_service_start, timeout_s=_RPC_DEFAULT_TIMEOUT_S, ) assert isinstance(result, DynamicServiceGet | NodeGet) # nosec @@ -60,17 +61,13 @@ async def run_dynamic_service( async def stop_dynamic_service( rabbitmq_rpc_client: RabbitMQRPCClient, *, - node_id: NodeID, - simcore_user_agent: str, - save_state: bool, + dynamic_service_stop: DynamicServiceStop, timeout_s: NonNegativeInt, ) -> None: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, parse_obj_as(RPCMethodName, "stop_dynamic_service"), - node_id=node_id, - simcore_user_agent=simcore_user_agent, - save_state=save_state, + dynamic_service_stop=dynamic_service_stop, timeout_s=timeout_s, ) assert result is None # nosec diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py index 416775b9d25..7f27bdcb5dc 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py @@ -1,7 +1,8 @@ from fastapi import FastAPI from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - RPCDynamicServiceCreate, + DynamicServiceStart, + DynamicServiceStop, ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects_nodes_io import NodeID @@ -27,10 +28,10 @@ async def get_service_status( @router.expose() async def run_dynamic_service( - app: FastAPI, *, rpc_dynamic_service_create: RPCDynamicServiceCreate + app: FastAPI, *, dynamic_service_start: DynamicServiceStart ) -> NodeGet | DynamicServiceGet: director_v2_client = DirectorV2Client.get_from_app_state(app) - return await director_v2_client.run_dynamic_service(rpc_dynamic_service_create) + return await director_v2_client.run_dynamic_service(dynamic_service_start) @router.expose( @@ -40,13 +41,13 @@ async def run_dynamic_service( ) ) async def stop_dynamic_service( - app: FastAPI, *, node_id: NodeID, simcore_user_agent: str, save_state: bool + app: FastAPI, *, dynamic_service_stop: DynamicServiceStop ) -> NodeGet | DynamicServiceGet: director_v2_client = DirectorV2Client.get_from_app_state(app) settings: ApplicationSettings = app.state.settings return await director_v2_client.stop_dynamic_service( - node_id=node_id, - simcore_user_agent=simcore_user_agent, - save_state=save_state, + node_id=dynamic_service_stop.node_id, + simcore_user_agent=dynamic_service_stop.simcore_user_agent, + save_state=dynamic_service_stop.save_state, timeout=settings.DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT, ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py index e9e6ee6fff0..6c514b48f79 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py @@ -4,7 +4,7 @@ from fastapi import FastAPI, status from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - RPCDynamicServiceCreate, + DynamicServiceStart, ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects_nodes_io import NodeID @@ -55,11 +55,9 @@ async def get_status( raise async def run_dynamic_service( - self, rpc_dynamic_service_create: RPCDynamicServiceCreate + self, dynamic_service_start: DynamicServiceStart ) -> NodeGet | DynamicServiceGet: - response = await self.thin_client.post_dynamic_service( - rpc_dynamic_service_create - ) + response = await self.thin_client.post_dynamic_service(dynamic_service_start) dict_response: dict[str, Any] = response.json() # legacy services diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py index 8de10a032c8..e0a138ad18c 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py @@ -3,7 +3,7 @@ from fastapi import FastAPI, status from httpx import Response, Timeout from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - RPCDynamicServiceCreate, + DynamicServiceStart, ) from models_library.projects_nodes_io import NodeID from models_library.services_resources import ServiceResourcesDictHelpers @@ -48,29 +48,29 @@ async def get_status(self, node_id: NodeID) -> Response: @retry_on_errors() @expect_status(status.HTTP_201_CREATED) async def post_dynamic_service( - self, rpc_dynamic_service_create: RPCDynamicServiceCreate + self, dynamic_service_start: DynamicServiceStart ) -> Response: post_data = { - "product_name": rpc_dynamic_service_create.product_name, - "can_save": rpc_dynamic_service_create.can_save, - "user_id": rpc_dynamic_service_create.user_id, - "project_id": rpc_dynamic_service_create.project_id, - "key": rpc_dynamic_service_create.key, - "version": rpc_dynamic_service_create.version, - "node_uuid": rpc_dynamic_service_create.node_uuid, - "basepath": f"/x/{rpc_dynamic_service_create.node_uuid}", + "product_name": dynamic_service_start.product_name, + "can_save": dynamic_service_start.can_save, + "user_id": dynamic_service_start.user_id, + "project_id": dynamic_service_start.project_id, + "key": dynamic_service_start.key, + "version": dynamic_service_start.version, + "node_uuid": dynamic_service_start.node_uuid, + "basepath": f"/x/{dynamic_service_start.node_uuid}", "service_resources": ServiceResourcesDictHelpers.create_jsonable( - rpc_dynamic_service_create.service_resources + dynamic_service_start.service_resources ), - "wallet_info": rpc_dynamic_service_create.wallet_info, - "pricing_info": rpc_dynamic_service_create.pricing_info, - "hardware_info": rpc_dynamic_service_create.hardware_info, + "wallet_info": dynamic_service_start.wallet_info, + "pricing_info": dynamic_service_start.pricing_info, + "hardware_info": dynamic_service_start.hardware_info, } headers = { - X_DYNAMIC_SIDECAR_REQUEST_DNS: rpc_dynamic_service_create.request_dns, - X_DYNAMIC_SIDECAR_REQUEST_SCHEME: rpc_dynamic_service_create.request_scheme, - X_SIMCORE_USER_AGENT: rpc_dynamic_service_create.simcore_user_agent, + X_DYNAMIC_SIDECAR_REQUEST_DNS: dynamic_service_start.request_dns, + X_DYNAMIC_SIDECAR_REQUEST_SCHEME: dynamic_service_start.request_scheme, + X_SIMCORE_USER_AGENT: dynamic_service_start.simcore_user_agent, } return await self.client.post( diff --git a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py index 34387e93d7f..7c8dada1e18 100644 --- a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py +++ b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py @@ -11,10 +11,13 @@ from fastapi.encoders import jsonable_encoder from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - RPCDynamicServiceCreate, + DynamicServiceStart, + DynamicServiceStop, ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle +from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from models_library.users import UserID from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQRPCClient, RPCServerError @@ -24,8 +27,10 @@ ServiceWasNotFoundError, ) from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings pytest_simcore_core_services_selection = [ + "redis", "rabbit", ] @@ -125,9 +130,9 @@ def mock_director_v2_service_state( @pytest.fixture def app_environment( - disable_redis_setup: None, app_environment: EnvVarsDict, rabbit_service: RabbitSettings, + redis_service: RedisSettings, ) -> EnvVarsDict: return app_environment @@ -166,10 +171,10 @@ async def test_get_state( @pytest.fixture -def rpc_dynamic_service_create() -> RPCDynamicServiceCreate: +def dynamic_service_start() -> DynamicServiceStart: # one for legacy and one for new style? - return RPCDynamicServiceCreate.parse_obj( - RPCDynamicServiceCreate.Config.schema_extra["example"] + return DynamicServiceStart.parse_obj( + DynamicServiceStart.Config.schema_extra["example"] ) @@ -221,11 +226,11 @@ async def test_run_dynamic_service( mock_director_v0_service_run: None, mock_director_v2_service_run: None, rpc_client: RabbitMQRPCClient, - rpc_dynamic_service_create: RPCDynamicServiceCreate, + dynamic_service_start: DynamicServiceStart, is_legacy: bool, ): result = await services.run_dynamic_service( - rpc_client, rpc_dynamic_service_create=rpc_dynamic_service_create + rpc_client, dynamic_service_start=dynamic_service_start ) if is_legacy: @@ -254,6 +259,16 @@ def node_id_manual_intervention(faker: Faker) -> NodeID: return faker.uuid4(cast_to=None) +@pytest.fixture +def user_id() -> UserID: + return 42 + + +@pytest.fixture +def project_id(faker: Faker) -> ProjectID: + return faker.uuid4(cast_to=None) + + @pytest.fixture def mock_director_v0_service_stop( fake_director_v0_base_url: str, @@ -344,18 +359,27 @@ async def test_stop_dynamic_service( mock_director_v0_service_stop: None, mock_director_v2_service_stop: None, rpc_client: RabbitMQRPCClient, + user_id: UserID, + project_id: ProjectID, node_id: NodeID, node_id_not_found: NodeID, node_id_manual_intervention: NodeID, simcore_user_agent: str, save_state: bool, ): + def _get_rpc_stop(with_node_id: NodeID) -> DynamicServiceStop: + return DynamicServiceStop( + user_id=user_id, + project_id=project_id, + node_id=with_node_id, + simcore_user_agent=simcore_user_agent, + save_state=save_state, + ) + # service was stopped result = await services.stop_dynamic_service( rpc_client, - node_id=node_id, - simcore_user_agent=simcore_user_agent, - save_state=save_state, + dynamic_service_stop=_get_rpc_stop(node_id), timeout_s=5, ) assert result is None @@ -364,9 +388,7 @@ async def test_stop_dynamic_service( with pytest.raises(ServiceWasNotFoundError): await services.stop_dynamic_service( rpc_client, - node_id=node_id_not_found, - simcore_user_agent=simcore_user_agent, - save_state=save_state, + dynamic_service_stop=_get_rpc_stop(node_id_not_found), timeout_s=5, ) @@ -374,9 +396,7 @@ async def test_stop_dynamic_service( with pytest.raises(ServiceWaitingForManualInterventionError): await services.stop_dynamic_service( rpc_client, - node_id=node_id_manual_intervention, - simcore_user_agent=simcore_user_agent, - save_state=save_state, + dynamic_service_stop=_get_rpc_stop(node_id_manual_intervention), timeout_s=5, ) @@ -399,6 +419,8 @@ def mock_raise_generic_error( async def test_stop_dynamic_service_serializes_generic_errors( mock_raise_generic_error: None, rpc_client: RabbitMQRPCClient, + user_id: UserID, + project_id: ProjectID, node_id: NodeID, simcore_user_agent: str, save_state: bool, @@ -408,8 +430,12 @@ async def test_stop_dynamic_service_serializes_generic_errors( ): await services.stop_dynamic_service( rpc_client, - node_id=node_id, - simcore_user_agent=simcore_user_agent, - save_state=save_state, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=project_id, + node_id=node_id, + simcore_user_agent=simcore_user_agent, + save_state=save_state, + ), timeout_s=5, ) diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py index 8709b662541..637d308c56e 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py @@ -5,7 +5,8 @@ from aiohttp import web from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - RPCDynamicServiceCreate, + DynamicServiceStart, + DynamicServiceStop, ) from models_library.api_schemas_webserver.projects_nodes import ( NodeGet, @@ -42,20 +43,18 @@ async def get_dynamic_service( async def run_dynamic_service( - app: web.Application, *, rpc_dynamic_service_create: RPCDynamicServiceCreate + app: web.Application, *, dynamic_service_start: DynamicServiceStart ) -> DynamicServiceGet | NodeGet: return await services.run_dynamic_service( get_rabbitmq_rpc_client(app), - rpc_dynamic_service_create=rpc_dynamic_service_create, + dynamic_service_start=dynamic_service_start, ) async def stop_dynamic_service( app: web.Application, *, - node_id: NodeID, - simcore_user_agent: str, - save_state: bool, + dynamic_service_stop: DynamicServiceStop, progress: ProgressBarData | None = None, ) -> None: async with AsyncExitStack() as stack: @@ -65,9 +64,7 @@ async def stop_dynamic_service( settings: DynamicSchedulerSettings = get_plugin_settings(app) await services.stop_dynamic_service( get_rabbitmq_rpc_client(app), - node_id=node_id, - simcore_user_agent=simcore_user_agent, - save_state=save_state, + dynamic_service_stop=dynamic_service_stop, timeout_s=settings.DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT, ) @@ -118,9 +115,13 @@ async def stop_dynamic_services_in_project( services_to_stop = [ stop_dynamic_service( app=app, - node_id=service.node_uuid, - simcore_user_agent=simcore_user_agent, - save_state=save_state, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service.project_id, + node_id=service.node_uuid, + simcore_user_agent=simcore_user_agent, + save_state=save_state, + ), progress=progress_bar.sub_progress( 1, description=f"{service.node_uuid}" ), diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py index 01426c6ea11..491189039f6 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py @@ -3,6 +3,9 @@ from aiohttp import web from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStop, +) from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE @@ -51,9 +54,13 @@ async def _remove_service( ): await dynamic_scheduler_api.stop_dynamic_service( app, - node_id=node_id, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=save_service_state, + dynamic_service_stop=DynamicServiceStop( + user_id=service.user_id, + project_id=service.project_id, + node_id=service.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=save_service_state, + ), ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py index 6a7109799e0..8fe5e00d69b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py @@ -11,6 +11,9 @@ ServiceAccessRightsGet, ) from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStop, +) from models_library.api_schemas_webserver.projects_nodes import ( NodeCreate, NodeCreated, @@ -326,17 +329,12 @@ async def _stop_dynamic_service_task( _task_progress: TaskProgress, *, app: web.Application, - node_id: NodeID, - simcore_user_agent: str, - save_state: bool, + dynamic_service_stop: DynamicServiceStop, ): # NOTE: _handle_project_nodes_exceptions only decorate handlers try: await dynamic_scheduler_api.stop_dynamic_service( - app, - node_id=node_id, - simcore_user_agent=simcore_user_agent, - save_state=save_state, + app, dynamic_service_stop=dynamic_service_stop ) raise web.HTTPNoContent(content_type=MIMETYPE_APPLICATION_JSON) @@ -376,11 +374,15 @@ async def stop_node(request: web.Request) -> web.Response: task_context=jsonable_encoder(req_ctx), # task arguments from here on --- app=request.app, - node_id=path_params.node_id, - simcore_user_agent=request.headers.get( - X_SIMCORE_USER_AGENT, UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE + dynamic_service_stop=DynamicServiceStop( + user_id=req_ctx.user_id, + project_id=path_params.project_id, + node_id=path_params.node_id, + simcore_user_agent=request.headers.get( + X_SIMCORE_USER_AGENT, UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE + ), + save_state=save_state, ), - save_state=save_state, fire_and_forget=True, ) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 28655495bfb..28296fa4018 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -27,7 +27,8 @@ GetProjectInactivityResponse, ) from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - RPCDynamicServiceCreate, + DynamicServiceStart, + DynamicServiceStop, ) from models_library.api_schemas_webserver.projects import ProjectPatch from models_library.api_schemas_webserver.projects_nodes import NodePatch @@ -679,7 +680,7 @@ async def _start_dynamic_service( ) await dynamic_scheduler_api.run_dynamic_service( app=request.app, - rpc_dynamic_service_create=RPCDynamicServiceCreate( + dynamic_service_start=DynamicServiceStart( product_name=product_name, can_save=save_state, project_id=project_uuid, @@ -800,9 +801,13 @@ async def _remove_service_and_its_data_folders( # no need to save the state of the node when deleting it await dynamic_scheduler_api.stop_dynamic_service( app, - node_id=NodeID(node_uuid), - simcore_user_agent=user_agent, - save_state=False, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=project_uuid, + node_id=NodeID(node_uuid), + simcore_user_agent=user_agent, + save_state=False, + ), ) # remove the node's data if any diff --git a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py index 83de45b76e4..0823f52b1b2 100644 --- a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py +++ b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py @@ -7,7 +7,7 @@ from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - RPCDynamicServiceCreate, + DynamicServiceStart, ) from models_library.api_schemas_webserver.projects_nodes import ( NodeGet, @@ -46,9 +46,9 @@ def mock_rpc_client( @pytest.fixture -def rpc_dynamic_service_create() -> RPCDynamicServiceCreate: - return RPCDynamicServiceCreate.parse_obj( - RPCDynamicServiceCreate.Config.schema_extra["example"] +def dynamic_service_start() -> DynamicServiceStart: + return DynamicServiceStart.parse_obj( + DynamicServiceStart.Config.schema_extra["example"] ) @@ -108,11 +108,11 @@ async def test_run_dynamic_service( mock_rpc_client: None, mocked_app: AsyncMock, expected_response: NodeGet | NodeGetIdle | DynamicServiceGet, - rpc_dynamic_service_create: RPCDynamicServiceCreate, + dynamic_service_start: DynamicServiceStart, ): assert ( await run_dynamic_service( - mocked_app, rpc_dynamic_service_create=rpc_dynamic_service_create + mocked_app, dynamic_service_start=dynamic_service_start ) == expected_response ) diff --git a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py index 8f1d27b2c21..78c0d96e46c 100644 --- a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py +++ b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py @@ -9,9 +9,13 @@ import pytest from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStop, +) from models_library.projects import ProjectID from models_library.users import UserID from pytest_mock import MockerFixture +from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.garbage_collector._core_orphans import ( remove_orphaned_services, @@ -200,11 +204,16 @@ async def test_remove_orphaned_services( else: mock_get_user_role.assert_not_called() mock_has_write_permission.assert_not_called() + mock_stop_dynamic_service.assert_called_once_with( mock_app, - node_id=fake_running_service.node_uuid, - simcore_user_agent=mock.ANY, - save_state=expected_save_state, + dynamic_service_stop=DynamicServiceStop( + user_id=fake_running_service.user_id, + project_id=fake_running_service.project_id, + node_id=fake_running_service.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + ), ) @@ -238,9 +247,13 @@ async def test_remove_orphaned_services_inexisting_user_does_not_save_state( mock_has_write_permission.assert_not_called() mock_stop_dynamic_service.assert_called_once_with( mock_app, - node_id=fake_running_service.node_uuid, - simcore_user_agent=mock.ANY, - save_state=False, + dynamic_service_stop=DynamicServiceStop( + user_id=fake_running_service.user_id, + project_id=fake_running_service.project_id, + node_id=fake_running_service.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=False, + ), ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py index f5980ff2457..f54537a6d0e 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py @@ -16,6 +16,9 @@ from aiohttp.test_utils import TestClient from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStop, +) from models_library.projects import ProjectID from models_library.projects_state import ProjectStatus from pytest_simcore.helpers.utils_assert import assert_status @@ -72,8 +75,10 @@ async def test_delete_project( await _request_delete_project(client, user_project, expected.no_content) + user_id: int = logged_user["id"] + tasks = _crud_api_delete.get_scheduled_tasks( - project_uuid=user_project["uuid"], user_id=logged_user["id"] + project_uuid=user_project["uuid"], user_id=user_id ) if expected.no_content == status.HTTP_204_NO_CONTENT: @@ -91,9 +96,13 @@ async def test_delete_project( expected_calls = [ call( app=client.app, - node_id=service.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=True, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service.project_id, + node_id=service.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=True, + ), progress=mock.ANY, ) for service in fakes diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index ca456f236ae..9ceec731870 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -21,6 +21,9 @@ from aioresponses import aioresponses from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStop, +) from models_library.api_schemas_storage import FileMetaDataGet, PresignedLink from models_library.generics import Envelope from models_library.projects_nodes_io import NodeID @@ -389,7 +392,7 @@ def num_services( def inc_running_services(self, *args, **kwargs): # noqa: ARG002 self.running_services_uuids.append( - kwargs["rpc_dynamic_service_create"].node_uuid + kwargs["dynamic_service_start"].node_uuid ) # let's count the started services @@ -512,7 +515,7 @@ async def inc_running_services(self, *args, **kwargs): # noqa: ARG002 # reproduces real world conditions and makes test to fail await asyncio.sleep(SERVICE_IS_RUNNING_AFTER_S) self.running_services_uuids.append( - kwargs["rpc_dynamic_service_create"].node_uuid + kwargs["dynamic_service_start"].node_uuid ) # let's count the started services @@ -634,6 +637,7 @@ async def test_creating_deprecated_node_returns_406_not_acceptable( @pytest.mark.parametrize(*standard_role_response(), ids=str) async def test_delete_node( client: TestClient, + logged_user: dict, user_project: ProjectDict, expected: ExpectedResponse, mocked_director_v2_api: dict[str, mock.MagicMock], @@ -681,9 +685,13 @@ async def test_delete_node( "dynamic_scheduler.api.stop_dynamic_service" ].assert_called_once_with( mock.ANY, - node_id=NodeID(node_id), - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=False, + dynamic_service_stop=DynamicServiceStop( + user_id=logged_user["id"], + project_id=user_project["uuid"], + node_id=NodeID(node_id), + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=False, + ), ) mocked_director_v2_api[ "dynamic_scheduler.api.stop_dynamic_service" diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 6a556465bdc..d95fb1313e1 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -22,7 +22,8 @@ from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - RPCDynamicServiceCreate, + DynamicServiceStart, + DynamicServiceStop, ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects import ProjectID @@ -370,7 +371,7 @@ async def test_open_project( calls.append( call( app=client.app, - rpc_dynamic_service_create=RPCDynamicServiceCreate( + dynamic_service_start=DynamicServiceStart( project_id=user_project["uuid"], service_key=service["key"], service_uuid=service_uuid, @@ -449,7 +450,7 @@ async def test_open_template_project_for_edition( calls.append( call( app=client.app, - rpc_dynamic_service_create=RPCDynamicServiceCreate( + dynamic_service_start=DynamicServiceStart( project_id=template_project["uuid"], service_key=service["key"], service_uuid=service_uuid, @@ -798,6 +799,8 @@ async def test_close_project( "dynamic_scheduler.api.list_dynamic_services" ].return_value = fake_dynamic_services + user_id = logged_user["id"] + assert client.app # open project client_id = client_session_id_factory() @@ -809,7 +812,7 @@ async def test_close_project( client.app, ProjectID(user_project["uuid"]) ) mocked_director_v2_api["director_v2.api.list_dynamic_services"].assert_any_call( - client.app, logged_user["id"], user_project["uuid"] + client.app, user_id, user_project["uuid"] ) mocked_director_v2_api["director_v2.api.list_dynamic_services"].reset_mock() else: @@ -830,7 +833,7 @@ async def test_close_project( calls = [ call( client.app, - user_id=logged_user["id"], + user_id=user_id, project_id=user_project["uuid"], ), ] @@ -841,9 +844,13 @@ async def test_close_project( calls = [ call( app=client.app, - node_id=service.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=True, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service.project_id, + node_id=service.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=True, + ), progress=mock.ANY, ) for service in fake_dynamic_services diff --git a/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py index 85ad63e9aa6..27ad5fe04f5 100644 --- a/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py @@ -20,6 +20,9 @@ from aiohttp.test_utils import TestClient from aioresponses import aioresponses from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStop, +) from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_mock import MockerFixture from pytest_simcore.helpers.utils_assert import assert_status @@ -486,9 +489,9 @@ async def test_interactive_services_removed_after_logout( mocked_notifications_plugin: dict[str, mock.Mock], ): assert client.app - + user_id = logged_user["id"] service = await create_dynamic_service_mock( - user_id=logged_user["id"], project_id=empty_user_project["uuid"] + user_id=user_id, project_id=empty_user_project["uuid"] ) # create websocket client_session_id1 = client_session_id_factory() @@ -518,9 +521,13 @@ async def test_interactive_services_removed_after_logout( "dynamic_scheduler.api.stop_dynamic_service" ].assert_awaited_with( app=client.app, - node_id=service.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service.project_id, + node_id=service.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + ), progress=mock.ANY, ) @@ -548,9 +555,9 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t mocked_notifications_plugin: dict[str, mock.Mock], ): assert client.app - + user_id = logged_user["id"] service = await create_dynamic_service_mock( - user_id=logged_user["id"], project_id=empty_user_project["uuid"] + user_id=user_id, project_id=empty_user_project["uuid"] ) # create first websocket client_session_id1 = client_session_id_factory() @@ -584,7 +591,7 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t "locked": { "value": False, "owner": { - "user_id": logged_user["id"], + "user_id": user_id, "first_name": logged_user.get("first_name", None), "last_name": logged_user.get("last_name", None), }, @@ -635,9 +642,13 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t calls = [ call( app=client.app, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, - node_id=service.node_uuid, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service.project_id, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + node_id=service.node_uuid, + ), progress=mock.ANY, ) ] @@ -682,15 +693,16 @@ async def test_interactive_services_removed_per_project( open_project: Callable, mocked_notifications_plugin: dict[str, mock.Mock], ): + user_id = logged_user["id"] # create server with delay set to DELAY service1 = await create_dynamic_service_mock( - user_id=logged_user["id"], project_id=empty_user_project["uuid"] + user_id=user_id, project_id=empty_user_project["uuid"] ) service2 = await create_dynamic_service_mock( - user_id=logged_user["id"], project_id=empty_user_project2["uuid"] + user_id=user_id, project_id=empty_user_project2["uuid"] ) service3 = await create_dynamic_service_mock( - user_id=logged_user["id"], project_id=empty_user_project2["uuid"] + user_id=user_id, project_id=empty_user_project2["uuid"] ) # create websocket1 from tab1 client_session_id1 = client_session_id_factory() @@ -714,9 +726,13 @@ async def test_interactive_services_removed_per_project( calls = [ call( app=client.app, - node_id=service1.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service1.project_id, + node_id=service1.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + ), progress=mock.ANY, ) ] @@ -739,16 +755,24 @@ async def test_interactive_services_removed_per_project( calls = [ call( app=client.server.app, - node_id=service2.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service2.project_id, + node_id=service2.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + ), progress=mock.ANY, ), call( app=client.server.app, - node_id=service3.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service3.project_id, + node_id=service3.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + ), progress=mock.ANY, ), ] @@ -840,8 +864,9 @@ async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( open_project: Callable, mocked_notifications_plugin: dict[str, mock.Mock], ): + user_id = logged_user["id"] service = await create_dynamic_service_mock( - user_id=logged_user["id"], project_id=empty_user_project["uuid"] + user_id=user_id, project_id=empty_user_project["uuid"] ) # create websocket client_session_id1 = client_session_id_factory() @@ -863,9 +888,13 @@ async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( calls = [ call( app=client.server.app, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, - node_id=service.node_uuid, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service.project_id, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + node_id=service.node_uuid, + ), progress=mock.ANY, ) ] @@ -899,26 +928,27 @@ async def test_regression_removing_unexisting_user( # regression test for https://github.com/ITISFoundation/osparc-simcore/issues/2504 assert client.app # remove project + user_id = logged_user["id"] delete_task = await submit_delete_project_task( app=client.app, project_uuid=empty_user_project["uuid"], - user_id=logged_user["id"], + user_id=user_id, simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, ) await delete_task # remove user - await delete_user_without_projects(app=client.app, user_id=logged_user["id"]) + await delete_user_without_projects(app=client.app, user_id=user_id) with pytest.raises(UserNotFoundError): await remove_project_dynamic_services( - user_id=logged_user["id"], + user_id=user_id, project_uuid=empty_user_project["uuid"], app=client.app, simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, ) with pytest.raises(ProjectNotFoundError): await remove_project_dynamic_services( - user_id=logged_user["id"], + user_id=user_id, project_uuid=empty_user_project["uuid"], app=client.app, user_name={"first_name": "my name is", "last_name": "pytest"}, From e10ce2f88e1f7e68421a363caec357690a6ea68b Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Mon, 10 Jun 2024 11:59:22 +0200 Subject: [PATCH 025/219] =?UTF-8?q?=E2=9C=A8=20add=20EFS=20mount=20option?= =?UTF-8?q?=20to=20sidecar=20(#5873)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ubuntu --- .env-devel | 2 + .../efs_guardian/efs_manager.py | 2 + .../src/settings_library/efs.py | 47 +++++++++++++ .../core/dynamic_services_settings/sidecar.py | 5 ++ .../docker_service_specs/sidecar.py | 47 +++++++++++-- .../scheduler/_core/_event_create_sidecars.py | 7 +- .../modules/dynamic_sidecar/volumes.py | 68 ++++++++++++++++++- ...es_dynamic_sidecar_docker_service_specs.py | 9 ++- services/docker-compose.yml | 7 ++ .../api/rpc/_efs_guardian.py | 6 +- .../core/settings.py | 14 +--- .../services/efs_manager.py | 3 +- .../services/efs_manager_setup.py | 2 +- services/efs-guardian/tests/unit/conftest.py | 15 +--- .../tests/unit/test_efs_manager.py | 7 +- 15 files changed, 195 insertions(+), 46 deletions(-) create mode 100644 packages/settings-library/src/settings_library/efs.py diff --git a/.env-devel b/.env-devel index b93c1ad9b9b..d50e0af58b5 100644 --- a/.env-devel +++ b/.env-devel @@ -66,6 +66,8 @@ DIRECTOR_REGISTRY_CACHING=True EFS_DNS_NAME=fs-xxx.efs.us-east-1.amazonaws.com EFS_MOUNTED_PATH=/tmp/efs +EFS_PROJECT_SPECIFIC_DATA_DIRECTORY=project-specific-data +EFS_ONLY_ENABLED_FOR_USERIDS=[] # DIRECTOR_V2 ---- COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}' diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py index b5e8aa219e9..592959eb08c 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py @@ -23,12 +23,14 @@ async def create_project_specific_data_dir( *, project_id: ProjectID, node_id: NodeID, + storage_directory_name: str, ) -> Path: output: Path = await rabbitmq_rpc_client.request( EFS_GUARDIAN_RPC_NAMESPACE, parse_obj_as(RPCMethodName, "create_project_specific_data_dir"), project_id=project_id, node_id=node_id, + storage_directory_name=storage_directory_name, timeout_s=_DEFAULT_TIMEOUT_S, ) return output diff --git a/packages/settings-library/src/settings_library/efs.py b/packages/settings-library/src/settings_library/efs.py new file mode 100644 index 00000000000..d09b8abb20f --- /dev/null +++ b/packages/settings-library/src/settings_library/efs.py @@ -0,0 +1,47 @@ +from pathlib import Path + +from pydantic import Field + +from .base import BaseCustomSettings + + +class AwsEfsSettings(BaseCustomSettings): + EFS_DNS_NAME: str = Field( + description="AWS Elastic File System DNS name", + example="fs-xxx.efs.us-east-1.amazonaws.com", + ) + EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: str + EFS_MOUNTED_PATH: Path = Field( + description="This is the path where EFS is mounted to the EC2 machine", + ) + EFS_ONLY_ENABLED_FOR_USERIDS: list[int] = Field( + description="This is temporary solution so we can enable it for specific users for testing purpose", + example=[1], + ) + + +NFS_PROTOCOL = "4.1" +READ_SIZE = "1048576" +WRITE_SIZE = "1048576" +RECOVERY_MODE = "hard" +NFS_REQUEST_TIMEOUT = "600" +NUMBER_OF_RETRANSMISSIONS = "2" +PORT_MODE = "noresvport" + +""" +`sudo mount -t nfs -o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport` + +Explanation: + +nfsvers=4.1: Specifies the NFS protocol version to use; here, it is version 4.1, which supports improved security features and performance optimizations over earlier versions. + +rsize=1048576 and wsize=1048576: Set the read and write buffer sizes in bytes, respectively. Here, both are set to 1,048,576 bytes (1 MB). Larger buffer sizes can improve performance over high-latency networks by allowing more data to be transferred with each read or write request. + +hard: Specifies the recovery behavior of the NFS client. If the NFS server becomes unreachable, the NFS client will retry the request until the server becomes available again. The alternative is soft, where the NFS client gives up after a certain number of retries, potentially leading to data corruption or loss. + +timeo=600: Sets the timeout value for NFS requests in deciseconds (tenths of a second). Here, 600 deciseconds means 60 seconds. This is how long the NFS client will wait for a response from the server before retrying or failing. + +retrans=2: Sets the number of retransmissions for each NFS request if a response is not received before the timeout. Here, it will retry each request twice. + +noresvport: Normally, NFS uses a reserved port (number below 1024) for communicating, which requires root privileges on the client side. noresvport allows using non-reserved ports, which can be helpful in environments where clients don't have root privileges. +""" diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py index 547a81484ce..35d1e9e7afb 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py @@ -11,6 +11,7 @@ ) from pydantic import Field, PositiveInt, validator from settings_library.base import BaseCustomSettings +from settings_library.efs import AwsEfsSettings from settings_library.r_clone import RCloneSettings as SettingsLibraryRCloneSettings from settings_library.utils_logging import MixinLoggingSettings from settings_library.utils_service import DEFAULT_FASTAPI_PORT @@ -124,6 +125,10 @@ class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): DYNAMIC_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field(auto_default_from_env=True) + DYNAMIC_SIDECAR_EFS_SETTINGS: AwsEfsSettings | None = Field( + auto_default_from_env=True + ) + DYNAMIC_SIDECAR_PLACEMENT_SETTINGS: PlacementSettings = Field( auto_default_from_env=True ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index 43dcef06f14..b26576068d8 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -16,6 +16,8 @@ from models_library.service_settings_labels import SimcoreServiceSettingsLabel from models_library.utils.json_serialization import json_dumps from pydantic import ByteSize, parse_obj_as +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.efs_guardian import efs_manager from servicelib.utils import unused_port from settings_library.node_ports import StorageAuthSettings @@ -196,13 +198,14 @@ def get_prometheus_monitoring_networks( ) -def _get_mounts( +async def _get_mounts( *, scheduler_data: SchedulerData, dynamic_sidecar_settings: DynamicSidecarSettings, dynamic_services_scheduler_settings: DynamicServicesSchedulerSettings, app_settings: AppSettings, has_quota_support: bool, + rpc_client: RabbitMQRPCClient, ) -> list[dict[str, Any]]: mounts: list[dict[str, Any]] = [ # docker socket needed to use the docker api @@ -252,10 +255,44 @@ def _get_mounts( volume_size_limit=volume_size_limits.get(f"{path_to_mount}"), ) ) + + # We check whether user has access to EFS feature + use_efs = False + efs_settings = dynamic_sidecar_settings.DYNAMIC_SIDECAR_EFS_SETTINGS + if ( + efs_settings + and scheduler_data.user_id in efs_settings.EFS_ONLY_ENABLED_FOR_USERIDS + ): + use_efs = True + # state paths now get mounted via different driver and are synced to s3 automatically for path_to_mount in scheduler_data.paths_mapping.state_paths: + if use_efs: + assert dynamic_sidecar_settings.DYNAMIC_SIDECAR_EFS_SETTINGS # nosec + + _storage_directory_name = DynamicSidecarVolumesPathsResolver.volume_name( + path_to_mount + ).strip("_") + await efs_manager.create_project_specific_data_dir( + rpc_client, + project_id=scheduler_data.project_id, + node_id=scheduler_data.node_uuid, + storage_directory_name=_storage_directory_name, + ) + mounts.append( + DynamicSidecarVolumesPathsResolver.mount_efs( + swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, + path=path_to_mount, + node_uuid=scheduler_data.node_uuid, + run_id=scheduler_data.run_id, + project_id=scheduler_data.project_id, + user_id=scheduler_data.user_id, + efs_settings=dynamic_sidecar_settings.DYNAMIC_SIDECAR_EFS_SETTINGS, + storage_directory_name=_storage_directory_name, + ) + ) # for now only enable this with dev features enabled - if app_settings.DIRECTOR_V2_DEV_FEATURE_R_CLONE_MOUNTS_ENABLED: + elif app_settings.DIRECTOR_V2_DEV_FEATURE_R_CLONE_MOUNTS_ENABLED: mounts.append( DynamicSidecarVolumesPathsResolver.mount_r_clone( swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, @@ -351,7 +388,7 @@ def _get_ports( return ports -def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: PLR0913 +async def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: PLR0913 scheduler_data: SchedulerData, dynamic_sidecar_settings: DynamicSidecarSettings, dynamic_services_scheduler_settings: DynamicServicesSchedulerSettings, @@ -364,6 +401,7 @@ def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: PLR091 hardware_info: HardwareInfo | None, metrics_collection_allowed: bool, telemetry_enabled: bool, + rpc_client: RabbitMQRPCClient, ) -> AioDockerServiceSpec: """ The dynamic-sidecar is responsible for managing the lifecycle @@ -375,12 +413,13 @@ def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: PLR091 """ compose_namespace = get_compose_namespace(scheduler_data.node_uuid) - mounts = _get_mounts( + mounts = await _get_mounts( scheduler_data=scheduler_data, dynamic_services_scheduler_settings=dynamic_services_scheduler_settings, dynamic_sidecar_settings=dynamic_sidecar_settings, app_settings=app_settings, has_quota_support=has_quota_support, + rpc_client=rpc_client, ) ports = _get_ports( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py index 572017c8567..6943f7a0852 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py @@ -18,7 +18,7 @@ from models_library.service_settings_labels import SimcoreServiceSettingsLabel from models_library.services import RunID from models_library.utils.json_serialization import json_dumps -from servicelib.rabbitmq import RabbitMQClient +from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient from simcore_postgres_database.models.comp_tasks import NodeClass from .....core.dynamic_services_settings import DynamicServicesSettings @@ -222,9 +222,11 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: user_id=scheduler_data.user_id, product_name=scheduler_data.product_name ) + rpc_client: RabbitMQRPCClient = app.state.rabbitmq_rpc_client + # WARNING: do NOT log, this structure has secrets in the open # If you want to log, please use an obfuscator - dynamic_sidecar_service_spec_base: AioDockerServiceSpec = get_dynamic_sidecar_spec( + dynamic_sidecar_service_spec_base: AioDockerServiceSpec = await get_dynamic_sidecar_spec( scheduler_data=scheduler_data, dynamic_sidecar_settings=dynamic_sidecar_settings, dynamic_services_scheduler_settings=dynamic_services_scheduler_settings, @@ -236,6 +238,7 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: allow_internet_access=allow_internet_access, metrics_collection_allowed=metrics_collection_allowed, telemetry_enabled=is_telemetry_enabled, + rpc_client=rpc_client, ) catalog_client = CatalogClient.instance(app) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/volumes.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/volumes.py index 986f71e1bb1..8a6d85c906b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/volumes.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/volumes.py @@ -7,6 +7,16 @@ from models_library.services import RunID from models_library.users import UserID from servicelib.docker_constants import PREFIX_DYNAMIC_SIDECAR_VOLUMES +from settings_library.efs import ( + NFS_PROTOCOL, + NFS_REQUEST_TIMEOUT, + NUMBER_OF_RETRANSMISSIONS, + PORT_MODE, + READ_SIZE, + RECOVERY_MODE, + WRITE_SIZE, + AwsEfsSettings, +) from settings_library.r_clone import S3Provider from ...core.dynamic_services_settings.sidecar import RCloneSettings @@ -75,6 +85,23 @@ def _get_s3_volume_driver_config( return driver_config +def _get_efs_volume_driver_config( + efs_settings: AwsEfsSettings, + project_id: ProjectID, + node_uuid: NodeID, + storage_directory_name: str, +) -> dict[str, Any]: + assert "/" not in storage_directory_name # nosec + driver_config: dict[str, Any] = { + "Options": { + "type": "nfs", + "o": f"addr={efs_settings.EFS_DNS_NAME},rw,nfsvers={NFS_PROTOCOL},rsize={READ_SIZE},wsize={WRITE_SIZE},{RECOVERY_MODE},timeo={NFS_REQUEST_TIMEOUT},retrans={NUMBER_OF_RETRANSMISSIONS},{PORT_MODE}", + "device": f":/{efs_settings.EFS_PROJECT_SPECIFIC_DATA_DIRECTORY}/{project_id}/{node_uuid}/{storage_directory_name}", + }, + } + return driver_config + + class DynamicSidecarVolumesPathsResolver: BASE_PATH: Path = Path("/dy-volumes") @@ -85,7 +112,8 @@ def target(cls, path: Path) -> str: return f"{target_path}" @classmethod - def _volume_name(cls, path: Path) -> str: + def volume_name(cls, path: Path) -> str: + """Returns a volume name created from path. There is not possibility to go back to the original path from the volume name""" return f"{path}".replace(os.sep, "_") @classmethod @@ -104,7 +132,7 @@ def source(cls, path: Path, node_uuid: NodeID, run_id: RunID) -> str: # NOTE: issues can occur when the paths of the mounted outputs, inputs # and state folders are very long and share the same subdirectory path. # Reversing volume name to prevent these issues from happening. - reversed_volume_name = cls._volume_name(path)[::-1] + reversed_volume_name = cls.volume_name(path)[::-1] unique_name = f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{run_id}_{node_uuid}_{reversed_volume_name}" return unique_name[:255] @@ -217,7 +245,41 @@ def mount_r_clone( r_clone_settings=r_clone_settings, project_id=project_id, node_uuid=node_uuid, - storage_directory_name=cls._volume_name(path).strip("_"), + storage_directory_name=cls.volume_name(path).strip("_"), + ), + }, + } + + @classmethod + def mount_efs( + cls, + swarm_stack_name: str, + path: Path, + node_uuid: NodeID, + run_id: RunID, + project_id: ProjectID, + user_id: UserID, + efs_settings: AwsEfsSettings, + storage_directory_name: str, + ) -> dict[str, Any]: + return { + "Source": cls.source(path, node_uuid, run_id), + "Target": cls.target(path), + "Type": "volume", + "VolumeOptions": { + "Labels": { + "source": cls.source(path, node_uuid, run_id), + "run_id": f"{run_id}", + "node_uuid": f"{node_uuid}", + "study_id": f"{project_id}", + "user_id": f"{user_id}", + "swarm_stack_name": swarm_stack_name, + }, + "DriverConfig": _get_efs_volume_driver_config( + efs_settings=efs_settings, + project_id=project_id, + node_uuid=node_uuid, + storage_directory_name=storage_directory_name, ), }, } diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index 9a0371e2d80..ee5f4a4f15a 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -6,6 +6,7 @@ import json from collections.abc import Mapping from typing import Any, cast +from unittest.mock import Mock import pytest import respx @@ -408,7 +409,7 @@ def expected_dynamic_sidecar_spec( } -def test_get_dynamic_proxy_spec( +async def test_get_dynamic_proxy_spec( mocked_catalog_service_api: respx.MockRouter, minimal_app: FastAPI, scheduler_data: SchedulerData, @@ -436,7 +437,7 @@ def test_get_dynamic_proxy_spec( for count in range(1, 11): # loop to check it does not repeat copies print(f"{count:*^50}") - dynamic_sidecar_spec: AioDockerServiceSpec = get_dynamic_sidecar_spec( + dynamic_sidecar_spec: AioDockerServiceSpec = await get_dynamic_sidecar_spec( scheduler_data=scheduler_data, dynamic_sidecar_settings=dynamic_sidecar_settings, dynamic_services_scheduler_settings=dynamic_services_scheduler_settings, @@ -448,6 +449,7 @@ def test_get_dynamic_proxy_spec( allow_internet_access=False, metrics_collection_allowed=True, telemetry_enabled=True, + rpc_client=Mock(), ) exclude_keys: Mapping[int | str, Any] = { @@ -530,7 +532,7 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( hardware_info: HardwareInfo, fake_service_specifications: dict[str, Any], ): - dynamic_sidecar_spec: AioDockerServiceSpec = get_dynamic_sidecar_spec( + dynamic_sidecar_spec: AioDockerServiceSpec = await get_dynamic_sidecar_spec( scheduler_data=scheduler_data, dynamic_sidecar_settings=dynamic_sidecar_settings, dynamic_services_scheduler_settings=dynamic_services_scheduler_settings, @@ -542,6 +544,7 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( allow_internet_access=False, metrics_collection_allowed=True, telemetry_enabled=True, + rpc_client=Mock(), ) assert dynamic_sidecar_spec dynamic_sidecar_spec_dict = dynamic_sidecar_spec.dict() diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 194e76769e8..0d4f2fe390e 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -300,6 +300,11 @@ services: R_CLONE_OPTION_TRANSFERS: ${R_CLONE_OPTION_TRANSFERS} R_CLONE_PROVIDER: ${R_CLONE_PROVIDER} + EFS_DNS_NAME: ${EFS_DNS_NAME} + EFS_MOUNTED_PATH: ${EFS_MOUNTED_PATH} + EFS_ONLY_ENABLED_FOR_USERIDS: ${EFS_ONLY_ENABLED_FOR_USERIDS} + EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: ${EFS_PROJECT_SPECIFIC_DATA_DIRECTORY} + RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} @@ -358,6 +363,8 @@ services: RABBIT_USER: ${RABBIT_USER} EFS_DNS_NAME: ${EFS_DNS_NAME} EFS_MOUNTED_PATH: ${EFS_MOUNTED_PATH} + EFS_ONLY_ENABLED_FOR_USERIDS: ${EFS_ONLY_ENABLED_FOR_USERIDS} + EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: ${EFS_PROJECT_SPECIFIC_DATA_DIRECTORY} invitations: image: ${DOCKER_REGISTRY:-itisfoundation}/invitations:${DOCKER_IMAGE_TAG:-latest} diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/_efs_guardian.py b/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/_efs_guardian.py index 61b5e588b83..9fe0978471d 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/_efs_guardian.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/api/rpc/_efs_guardian.py @@ -12,14 +12,12 @@ @router.expose(reraise_if_error_type=()) async def create_project_specific_data_dir( - app: FastAPI, - *, - project_id: ProjectID, - node_id: NodeID, + app: FastAPI, *, project_id: ProjectID, node_id: NodeID, storage_directory_name: str ) -> Path: _efs_manager = get_efs_manager(app) return await _efs_manager.create_project_specific_data_dir( project_id=project_id, node_id=node_id, + storage_directory_name=storage_directory_name, ) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py index e35bcd64d0a..adf172a4b0c 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py @@ -1,5 +1,4 @@ from functools import cached_property -from pathlib import Path from typing import Final, cast from fastapi import FastAPI @@ -11,6 +10,7 @@ ) from pydantic import Field, PositiveInt, validator from settings_library.base import BaseCustomSettings +from settings_library.efs import AwsEfsSettings from settings_library.rabbit import RabbitSettings from settings_library.utils_logging import MixinLoggingSettings @@ -19,18 +19,6 @@ EFS_GUARDIAN_ENV_PREFIX: Final[str] = "EFS_GUARDIAN_" -class AwsEfsSettings(BaseCustomSettings): - EFS_DNS_NAME: str = Field( - description="AWS Elastic File System DNS name", - example="fs-xxx.efs.us-east-1.amazonaws.com", - ) - EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: str = Field(default="project-specific-data") - EFS_MOUNTED_PATH: Path = Field( - default=Path("/data/efs"), - description="This is the path where EFS is mounted to the EC2 machine", - ) - - class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # CODE STATICS --------------------------------------------------------- API_VERSION: str = API_VERSION diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py index 4e249188c47..d3c2d0802b5 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py @@ -27,13 +27,14 @@ async def initialize_directories(self): Path.mkdir(_dir_path, parents=True, exist_ok=True) async def create_project_specific_data_dir( - self, project_id: ProjectID, node_id: NodeID + self, project_id: ProjectID, node_id: NodeID, storage_directory_name: str ) -> Path: _dir_path = ( self._efs_mounted_path / self._project_specific_data_base_directory / f"{project_id}" / f"{node_id}" + / f"{storage_directory_name}" ) Path.mkdir(_dir_path, parents=True, exist_ok=True) return _dir_path diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_setup.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_setup.py index 9f0ded69552..e418d27cc59 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_setup.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_setup.py @@ -2,7 +2,7 @@ from typing import cast from fastapi import FastAPI -from simcore_service_efs_guardian.core.settings import AwsEfsSettings +from settings_library.efs import AwsEfsSettings from tenacity import ( AsyncRetrying, before_sleep_log, diff --git a/services/efs-guardian/tests/unit/conftest.py b/services/efs-guardian/tests/unit/conftest.py index 1030c433bd9..0b974e13645 100644 --- a/services/efs-guardian/tests/unit/conftest.py +++ b/services/efs-guardian/tests/unit/conftest.py @@ -98,6 +98,7 @@ def app_environment( "EFS_DNS_NAME": "fs-xxx.efs.us-east-1.amazonaws.com", "EFS_MOUNTED_PATH": "/tmp/efs", "EFS_PROJECT_SPECIFIC_DATA_DIRECTORY": "project-specific-data", + "EFS_ONLY_ENABLED_FOR_USERIDS": "[]", }, ) @@ -139,17 +140,3 @@ async def rpc_client( rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], ) -> RabbitMQRPCClient: return await rabbitmq_rpc_client("client") - - -# @pytest.fixture -# def mocked_setup_rabbitmq(mocker: MockerFixture): -# return ( -# mocker.patch( -# "simcore_service_efs_guardian.core.application.setup_rabbitmq", -# autospec=True, -# ), -# mocker.patch( -# "simcore_service_efs_guardian.core.application.setup_rpc_routes", -# autospec=True, -# ), -# ) diff --git a/services/efs-guardian/tests/unit/test_efs_manager.py b/services/efs-guardian/tests/unit/test_efs_manager.py index acc72a9790f..ede34e1824c 100644 --- a/services/efs-guardian/tests/unit/test_efs_manager.py +++ b/services/efs-guardian/tests/unit/test_efs_manager.py @@ -43,9 +43,13 @@ async def test_rpc_create_project_specific_data_dir( _project_id = faker.uuid4() _node_id = faker.uuid4() + _storage_directory_name = faker.name() result = await efs_manager.create_project_specific_data_dir( - rpc_client, project_id=_project_id, node_id=_node_id + rpc_client, + project_id=_project_id, + node_id=_node_id, + storage_directory_name=_storage_directory_name, ) assert isinstance(result, Path) _expected_path = ( @@ -53,6 +57,7 @@ async def test_rpc_create_project_specific_data_dir( / aws_efs_settings.EFS_PROJECT_SPECIFIC_DATA_DIRECTORY / _project_id / _node_id + / _storage_directory_name ) assert _expected_path == result assert _expected_path.exists From 9321b203c6def6a1e702fa5daa53a6dcafefc937 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 10 Jun 2024 13:56:55 +0200 Subject: [PATCH 026/219] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20API=20creat?= =?UTF-8?q?ion=20Bugfix:=20Allow=20only=20future=20dates=20(#5929)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../class/osparc/desktop/preferences/window/CreateAPIKey.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/window/CreateAPIKey.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/window/CreateAPIKey.js index a1fff213357..3540f23b518 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/window/CreateAPIKey.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/window/CreateAPIKey.js @@ -46,8 +46,8 @@ qx.Class.define("osparc.desktop.preferences.window.CreateAPIKey", { expirationDate.addListener("changeValue", e => { const date = e.getData(); if (date) { - // allow only today and future dates - if (new Date(date).getDate() < new Date().getDate()) { + // allow only future dates + if (new Date() > new Date(date)) { const msg = this.tr("Choose a future date"); osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); expirationDate.resetValue(); From 9e48b49df9ef2859ca893e2cd26ffac8085ae9ae Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Tue, 11 Jun 2024 08:31:31 +0200 Subject: [PATCH 027/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20rabbitmq=20queue?= =?UTF-8?q?=20names=20=E2=9A=A0=EF=B8=8F=F0=9F=9A=A8=20(#5931)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../service-library/src/servicelib/rabbitmq/_utils.py | 3 +-- services/docker-compose.yml | 8 ++++---- .../notifications/_rabbitmq_consumers_common.py | 2 +- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/_utils.py b/packages/service-library/src/servicelib/rabbitmq/_utils.py index 4da4905392b..bab135eb146 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_utils.py +++ b/packages/service-library/src/servicelib/rabbitmq/_utils.py @@ -1,5 +1,4 @@ import logging -import os import socket from typing import Any, Final @@ -52,7 +51,7 @@ async def wait_till_rabbitmq_responsive(url: str) -> bool: def get_rabbitmq_client_unique_name(base_name: str) -> str: - return f"{base_name}_{socket.gethostname()}_{os.getpid()}" + return f"{base_name}_{socket.gethostname()}" async def declare_queue( diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 0d4f2fe390e..63f2cbfc8da 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -528,7 +528,7 @@ services: webserver: image: ${DOCKER_REGISTRY:-itisfoundation}/webserver:${DOCKER_IMAGE_TAG:-latest} init: true - hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" + hostname: "wb-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: &webserver_environment AIODEBUG_SLOW_DURATION_SECS: ${AIODEBUG_SLOW_DURATION_SECS} @@ -734,7 +734,7 @@ services: wb-api-server: image: ${DOCKER_REGISTRY:-itisfoundation}/webserver:${DOCKER_IMAGE_TAG:-latest} init: true - hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" + hostname: "api-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: <<: *webserver_environment WEBSERVER_HOST: ${WB_API_WEBSERVER_HOST} @@ -747,7 +747,7 @@ services: wb-db-event-listener: image: ${DOCKER_REGISTRY:-itisfoundation}/webserver:${DOCKER_IMAGE_TAG:-latest} init: true - hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" + hostname: "db-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: WEBSERVER_LOGLEVEL: ${WB_DB_EL_LOGLEVEL} @@ -831,7 +831,7 @@ services: wb-garbage-collector: image: ${DOCKER_REGISTRY:-itisfoundation}/webserver:${DOCKER_IMAGE_TAG:-latest} init: true - hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" + hostname: "gc-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: WEBSERVER_LOGLEVEL: ${WB_GC_LOGLEVEL} diff --git a/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_consumers_common.py b/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_consumers_common.py index 8838c26a6ef..9f033db621b 100644 --- a/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_consumers_common.py +++ b/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_consumers_common.py @@ -37,7 +37,7 @@ async def subscribe_to_rabbitmq( ) for p in exchange_to_parser_config ), - reraise=False, + reraise=True, ) return { exchange_name: queue_name From 931776fb04d5abf2e5a2ab2b17fc10858ca46fc7 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Wed, 12 Jun 2024 08:28:06 +0200 Subject: [PATCH 028/219] =?UTF-8?q?=F0=9F=90=9B=20fix=20patching=20project?= =?UTF-8?q?=20workbench=20when=20changing=20data=20type=20=F0=9F=9A=A8=20(?= =?UTF-8?q?#5932)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../projects/utils.py | 2 +- .../tests/unit/isolated/test_project_utils.py | 33 ++++++++++++ .../02/test_projects_nodes_handlers__patch.py | 53 +++++++++++++++++++ .../resource_usage_tracker.py | 0 .../tests/{ => sim4life}/sim4life.py | 0 .../e2e-playwright/tests/ti_plan/conftest.py | 24 +++++++++ .../tests/{ => ti_plan}/ti_plan.py | 7 +-- 7 files changed, 115 insertions(+), 4 deletions(-) rename tests/e2e-playwright/tests/{ => resource_usage_tracker}/resource_usage_tracker.py (100%) rename tests/e2e-playwright/tests/{ => sim4life}/sim4life.py (100%) create mode 100644 tests/e2e-playwright/tests/ti_plan/conftest.py rename tests/e2e-playwright/tests/{ => ti_plan}/ti_plan.py (97%) diff --git a/services/web/server/src/simcore_service_webserver/projects/utils.py b/services/web/server/src/simcore_service_webserver/projects/utils.py index 474cbd800c5..d54bc2b433d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/utils.py @@ -359,7 +359,7 @@ def _cast_outputs_store(dict_data: dict[str, Any]) -> None: continue # if the entry was modified put the new one modified_entry = {k: new_dict[k]} - if isinstance(new_dict[k], dict): + if isinstance(current_dict[k], dict) and isinstance(new_dict[k], dict): modified_entry = { k: find_changed_node_keys( current_dict[k], new_dict[k], look_for_removed_keys=True diff --git a/services/web/server/tests/unit/isolated/test_project_utils.py b/services/web/server/tests/unit/isolated/test_project_utils.py index 7ee45b6a151..a5319cacd78 100644 --- a/services/web/server/tests/unit/isolated/test_project_utils.py +++ b/services/web/server/tests/unit/isolated/test_project_utils.py @@ -64,6 +64,39 @@ }, id="patch with 2x nested new data", ), + pytest.param( + {"inputs": {"in_1": 1}}, + { + "inputs": { + "in_1": { + "nodeUuid": "c374e5ba-fc42-5c40-ae74-df7ef337f597", + "output": "out_1", + } + } + }, + { + "inputs": { + "in_1": { + "nodeUuid": "c374e5ba-fc42-5c40-ae74-df7ef337f597", + "output": "out_1", + } + } + }, + id="patch with new data type change int -> dict", + ), + pytest.param( + { + "inputs": { + "in_1": { + "nodeUuid": "c374e5ba-fc42-5c40-ae74-df7ef337f597", + "output": "out_1", + } + } + }, + {"inputs": {"in_1": 1}}, + {"inputs": {"in_1": 1}}, + id="patch with new data type change dict -> int", + ), pytest.param( {"remove_entries_in_dict": {"outputs": {"out_1": 123, "out_3": True}}}, {"remove_entries_in_dict": {"outputs": {}}}, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py index c0dc6bc2f08..1706d21f688 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py @@ -163,3 +163,56 @@ async def test_patch_project_node( assert _tested_node["inputsRequired"] == _patch_inputs_required["inputsRequired"] assert _tested_node["inputNodes"] == _patch_input_nodes["inputNodes"] assert _tested_node["bootOptions"] == _patch_boot_options["bootOptions"] + + +@pytest.mark.parametrize( + "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] +) +async def test_patch_project_node_inputs_with_data_type_change( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: HTTPStatus, + mock_catalog_api_get_services_for_user_in_product, + mock_project_uses_available_services, +): + node_id = next(iter(user_project["workbench"])) + assert client.app + base_url = client.app.router["patch_project_node"].url_for( + project_id=user_project["uuid"], node_id=node_id + ) + # inputs + _patch_inputs = { + "inputs": { + "input_3": 0.0, # <-- Changing type + "input_2": 3.0, + "input_1": { # <-- Changing type + "nodeUuid": "c374e5ba-fc42-5c40-ae74-df7ef337f597", + "output": "out_1", + }, + } + } + resp = await client.patch( + f"{base_url}", + data=json.dumps(_patch_inputs), + ) + await assert_status(resp, expected) + assert _patch_inputs["inputs"] == _patch_inputs["inputs"] + + # Change input data type + _patch_inputs = { + "inputs": { + "input_3": { # <-- Changing type + "nodeUuid": "c374e5ba-fc42-5c40-ae74-df7ef337f597", + "output": "out_1", + }, + "input_2": 3.0, + "input_1": 5.5, # <-- Changing type + } + } + resp = await client.patch( + f"{base_url}", + data=json.dumps(_patch_inputs), + ) + await assert_status(resp, expected) + assert _patch_inputs["inputs"] == _patch_inputs["inputs"] diff --git a/tests/e2e-playwright/tests/resource_usage_tracker.py b/tests/e2e-playwright/tests/resource_usage_tracker/resource_usage_tracker.py similarity index 100% rename from tests/e2e-playwright/tests/resource_usage_tracker.py rename to tests/e2e-playwright/tests/resource_usage_tracker/resource_usage_tracker.py diff --git a/tests/e2e-playwright/tests/sim4life.py b/tests/e2e-playwright/tests/sim4life/sim4life.py similarity index 100% rename from tests/e2e-playwright/tests/sim4life.py rename to tests/e2e-playwright/tests/sim4life/sim4life.py diff --git a/tests/e2e-playwright/tests/ti_plan/conftest.py b/tests/e2e-playwright/tests/ti_plan/conftest.py new file mode 100644 index 00000000000..c8b45d3e932 --- /dev/null +++ b/tests/e2e-playwright/tests/ti_plan/conftest.py @@ -0,0 +1,24 @@ +# pylint: disable=redefined-outer-name +import pytest + + +def pytest_addoption(parser: pytest.Parser) -> None: + group = parser.getgroup( + "oSparc e2e options", description="oSPARC-e2e specific parameters" + ) + group.addoption( + "--service-opening-waiting-timeout", + action="store", + type=int, + default=300000, # 5 mins + help="Defines a waiting timeout in milliseconds for opening a service.", + ) + + +@pytest.fixture +def service_opening_waiting_timeout(request: pytest.FixtureRequest) -> int: + service_opening_waiting_timeout = request.config.getoption( + "--service-opening-waiting-timeout" + ) + assert isinstance(service_opening_waiting_timeout, int) + return service_opening_waiting_timeout diff --git a/tests/e2e-playwright/tests/ti_plan.py b/tests/e2e-playwright/tests/ti_plan/ti_plan.py similarity index 97% rename from tests/e2e-playwright/tests/ti_plan.py rename to tests/e2e-playwright/tests/ti_plan/ti_plan.py index 117d8d5bb8d..0c185519595 100644 --- a/tests/e2e-playwright/tests/ti_plan.py +++ b/tests/e2e-playwright/tests/ti_plan/ti_plan.py @@ -25,6 +25,7 @@ def test_tip( api_request_context: APIRequestContext, product_url: AnyUrl, product_billable: bool, + service_opening_waiting_timeout: int, ): handler = SocketIOOsparcMessagePrinter() # log_in_and_out is the initial websocket @@ -66,7 +67,7 @@ def test_tip( # Electrode Selector es_page = page.frame_locator(f'[osparc-test-id="iframe_{node_ids[0]}"]') expect(es_page.get_by_test_id("TargetStructure_Selector")).to_be_visible( - timeout=300000 + timeout=service_opening_waiting_timeout ) # Sometimes this iframe flicks and shows a white page. This wait will avoid it page.wait_for_timeout(5000) @@ -108,7 +109,7 @@ def test_tip( # Optimal Configuration Identification ti_page = page.frame_locator(f'[osparc-test-id="iframe_{node_ids[1]}"]') expect(ti_page.get_by_role("button", name="Run Optimization")).to_be_visible( - timeout=300000 + timeout=service_opening_waiting_timeout ) run_button = ti_page.get_by_role("button", name="Run Optimization") run_button.click() @@ -162,7 +163,7 @@ def test_tip( f'[osparc-test-id="iframe_{node_ids[2]}"]' ) expect(s4l_postpro_page.get_by_test_id("mode-button-postro")).to_be_visible( - timeout=300000 + timeout=service_opening_waiting_timeout ) # click on the postpro mode button s4l_postpro_page.get_by_test_id("mode-button-postro").click() From efe5bb113dc500b0cc0a6a922d7792140196ac6c Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 12 Jun 2024 11:22:07 +0200 Subject: [PATCH 029/219] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Dashboard:?= =?UTF-8?q?=20Filter=20Services=20by=20Type=20(#5930)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/dashboard/CardBase.js | 23 +++++++ .../osparc/dashboard/ResourceBrowserBase.js | 5 ++ .../class/osparc/dashboard/ResourceFilter.js | 60 ++++++++++++++++++- .../class/osparc/dashboard/SearchBarFilter.js | 53 +++++++++++++--- .../source/class/osparc/filter/MFilterable.js | 2 +- .../class/osparc/filter/NodeTypeFilter.js | 2 +- 6 files changed, 132 insertions(+), 13 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js index a6c532495fa..e6a6971f4c3 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js @@ -117,6 +117,17 @@ qx.Class.define("osparc.dashboard.CardBase", { return false; }, + filterServiceType: function(resourceType, metaData, serviceType) { + if (serviceType && resourceType === "service") { + if (metaData && metaData.type) { + const matches = metaData.type === serviceType; + return !matches; + } + return false; + } + return false; + }, + filterClassifiers: function(checks, classifiers) { if (classifiers && classifiers.length) { const includesAll = classifiers.every(classifier => checks.includes(classifier)); @@ -901,6 +912,12 @@ qx.Class.define("osparc.dashboard.CardBase", { return this.self().filterSharedWith(checks, sharedWith); }, + _filterServiceType: function(serviceType) { + const resourceType = this.getResourceType(); + const resourceData = this.getResourceData(); + return this.self().filterServiceType(resourceType, resourceData, serviceType); + }, + _filterClassifiers: function(classifiers) { const checks = this.getClassifiers(); return this.self().filterClassifiers(checks, classifiers); @@ -921,6 +938,9 @@ qx.Class.define("osparc.dashboard.CardBase", { if (this._filterSharedWith(data.sharedWith)) { return true; } + if (this._filterServiceType(data.serviceType)) { + return true; + } if (this._filterClassifiers(data.classifiers)) { return true; } @@ -942,6 +962,9 @@ qx.Class.define("osparc.dashboard.CardBase", { if (data.sharedWith) { return true; } + if ("serviceType" in data) { + return true; + } if (data.classifiers && data.classifiers.length) { return true; } diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index 3c91b5af6a3..f763d7f9c84 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -384,6 +384,11 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { this._searchBarFilter.setTagsActiveFilter(selectedTagIds); }, this); + resourceFilter.addListener("changeServiceType", e => { + const serviceType = e.getData(); + this._searchBarFilter.setServiceTypeActiveFilter(serviceType.id, serviceType.label); + }, this); + this._searchBarFilter.addListener("filterChanged", e => { const filterData = e.getData(); resourceFilter.filterChanged(filterData); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js index 28f34825cb2..29f1e377292 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js @@ -25,6 +25,7 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { this.__resourceType = resourceType; this.__sharedWithButtons = []; this.__tagButtons = []; + this.__serviceTypeButtons = []; this._setLayout(new qx.ui.layout.VBox()); this.__buildLayout(); @@ -32,13 +33,15 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { events: { "changeSharedWith": "qx.event.type.Data", - "changeSelectedTags": "qx.event.type.Data" + "changeSelectedTags": "qx.event.type.Data", + "changeServiceType": "qx.event.type.Data" }, members: { __resourceType: null, __sharedWithButtons: null, __tagButtons: null, + __serviceTypeButtons: null, __buildLayout: function() { const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(40)); @@ -46,6 +49,9 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { if (this.__resourceType !== "service") { layout.add(this.__createTagsFilterLayout()); } + if (this.__resourceType === "service") { + layout.add(this.__createServiceTypeFilterLayout()); + } const scrollContainer = new qx.ui.container.Scroll(); scrollContainer.add(layout); @@ -54,9 +60,12 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { }); }, + /* SHARED WITH */ __createSharedWithFilterLayout: function() { const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + const radioGroup = new qx.ui.form.RadioGroup(); + radioGroup.setAllowEmptySelection(false); const options = osparc.dashboard.SearchBarFilter.getSharedWithOptions(this.__resourceType); options.forEach(option => { @@ -82,11 +91,11 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { this.__sharedWithButtons.push(button); }); - radioGroup.setAllowEmptySelection(false); - return layout; }, + /* /SHARED WITH */ + /* TAGS */ __createTagsFilterLayout: function() { const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); @@ -151,6 +160,46 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { layout.add(showAllButton); } }, + /* /TAGS */ + + /* SERVICE TYPE */ + __createServiceTypeFilterLayout: function() { + const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + + const radioGroup = new qx.ui.form.RadioGroup(); + radioGroup.setAllowEmptySelection(true); + + const serviceTypes = osparc.service.Utils.TYPES; + Object.keys(serviceTypes).forEach(serviceId => { + if (!["computational", "dynamic"].includes(serviceId)) { + return; + } + const serviceType = serviceTypes[serviceId]; + const iconSize = 20; + const button = new qx.ui.toolbar.RadioButton(serviceType.label, serviceType.icon+iconSize); + button.id = serviceId; + button.set({ + appearance: "filter-toggle-button", + value: false + }); + + layout.add(button); + radioGroup.add(button); + + button.addListener("execute", () => { + const checked = button.getValue(); + this.fireDataEvent("changeServiceType", { + id: checked ? serviceId : null, + label: checked ? serviceType.label : null + }); + }, this); + + this.__serviceTypeButtons.push(button); + }); + + return layout; + }, + /* /SERVICE TYPE */ filterChanged: function(filterData) { if ("sharedWith" in filterData) { @@ -164,6 +213,11 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { btn.setValue(filterData["tags"].includes(btn.id)); }); } + if ("serviceType" in filterData) { + this.__serviceTypeButtons.forEach(btn => { + btn.setValue(filterData["serviceType"] === btn.id); + }); + } } } }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js index a65118e6a0f..4be366fc05d 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js @@ -140,19 +140,27 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { } const menu = this.__filtersMenu; menu.removeAll(); - const tagsButton = new qx.ui.menu.Button(this.tr("Tags"), "@FontAwesome5Solid/tags/12"); - osparc.utils.Utils.setIdToWidget(tagsButton, "searchBarFilter-tags-button"); - this.__addTags(tagsButton); - menu.add(tagsButton); const sharedWithButton = new qx.ui.menu.Button(this.tr("Shared with"), "@FontAwesome5Solid/share-alt/12"); this.__addSharedWith(sharedWithButton); menu.add(sharedWithButton); - const classifiersButton = new qx.ui.menu.Button(this.tr("Classifiers"), "@FontAwesome5Solid/search/12"); - osparc.utils.Utils.setIdToWidget(classifiersButton, "searchBarFilter-classifiers"); - this.__addClassifiers(classifiersButton); - menu.add(classifiersButton); + if (this.__resourceType !== "service") { + const tagsButton = new qx.ui.menu.Button(this.tr("Tags"), "@FontAwesome5Solid/tags/12"); + osparc.utils.Utils.setIdToWidget(tagsButton, "searchBarFilter-tags-button"); + this.__addTags(tagsButton); + menu.add(tagsButton); + + const classifiersButton = new qx.ui.menu.Button(this.tr("Classifiers"), "@FontAwesome5Solid/search/12"); + this.__addClassifiers(classifiersButton); + menu.add(classifiersButton); + } + + if (this.__resourceType === "service") { + const serviceTypeButton = new qx.ui.menu.Button(this.tr("Service Type"), "@FontAwesome5Solid/cogs/12"); + this.__addServiceTypes(serviceTypeButton); + menu.add(serviceTypeButton); + } }, __attachEventHandlers: function() { @@ -248,6 +256,22 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { } }, + __addServiceTypes: function(menuButton) { + const serviceTypeMenu = new qx.ui.menu.Menu(); + menuButton.setMenu(serviceTypeMenu); + const serviceTypes = osparc.service.Utils.TYPES; + Object.keys(serviceTypes).forEach(serviceId => { + if (!["computational", "dynamic"].includes(serviceId)) { + return; + } + const serviceType = serviceTypes[serviceId]; + const iconSize = 12; + const serviceTypeButton = new qx.ui.menu.Button(serviceType.label, serviceType.icon+iconSize); + serviceTypeMenu.add(serviceTypeButton); + serviceTypeButton.addListener("execute", () => this.__addChip("service-type", serviceId, serviceType.label), this); + }); + }, + addTagActiveFilter: function(tag) { this.__addChip("tag", tag.id, tag.name); }, @@ -272,6 +296,15 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { } }, + setServiceTypeActiveFilter: function(optionId, optionLabel) { + this.__removeChips("service-type"); + if (optionId && optionLabel) { + this.__addChip("service-type", optionId, optionLabel); + } else { + this.__filter(); + } + }, + __addChip: function(type, id, label) { const activeFilter = this.getChildControl("active-filters"); const chipFound = activeFilter.getChildren().find(chip => chip.type === type && chip.id === id); @@ -328,6 +361,7 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { tags: [], classifiers: [], sharedWith: null, + serviceType: null, text: "" }; const textFilter = this.getTextFilterValue(); @@ -343,6 +377,9 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { case "shared-with": filterData.sharedWith = chip.id === "show-all" ? null : chip.id; break; + case "service-type": + filterData.serviceType = chip.id; + break; } }); return filterData; diff --git a/services/static-webserver/client/source/class/osparc/filter/MFilterable.js b/services/static-webserver/client/source/class/osparc/filter/MFilterable.js index 2d0cec20571..543f1cce5aa 100644 --- a/services/static-webserver/client/source/class/osparc/filter/MFilterable.js +++ b/services/static-webserver/client/source/class/osparc/filter/MFilterable.js @@ -31,7 +31,7 @@ qx.Mixin.define("osparc.filter.MFilterable", { }, /** * Subscriber function for incoming messages. It implements the common filtering workflow of every - * filterable GUI element: If the filter state is appropiate, compare it with the own state and act + * filterable GUI element: If the filter state is appropriate, compare it with the own state and act * accordingly by applying the filter or removing it. * * @param {qx.event.message.Message} msg Message dispatched. diff --git a/services/static-webserver/client/source/class/osparc/filter/NodeTypeFilter.js b/services/static-webserver/client/source/class/osparc/filter/NodeTypeFilter.js index fc5ccea318b..80becd17ada 100644 --- a/services/static-webserver/client/source/class/osparc/filter/NodeTypeFilter.js +++ b/services/static-webserver/client/source/class/osparc/filter/NodeTypeFilter.js @@ -27,7 +27,7 @@ qx.Class.define("osparc.filter.NodeTypeFilter", { * @extends osparc.filter.TagsFilter */ construct: function(filterId, filterGroupId) { - this.base(arguments, this.tr("Node types"), filterId, filterGroupId); + this.base(arguments, this.tr("Service types"), filterId, filterGroupId); this._setLayout(new qx.ui.layout.HBox()); this.__buildMenu(); From 641b32884dbdd8b2dd8f424f26b7e62858788a21 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Wed, 12 Jun 2024 15:02:58 +0200 Subject: [PATCH 030/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20refactor=20`RedisC?= =?UTF-8?q?lientsManager`=20and=20`RedisClientSDK`=20(#5888)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../deferred_tasks/_deferred_manager.py | 4 +- .../deferred_tasks/_redis_task_tracker.py | 16 +-- .../service-library/src/servicelib/redis.py | 107 +++++++++--------- .../src/servicelib/retry_policies.py | 12 +- .../tests/deferred_tasks/example_app.py | 14 +-- .../test__base_deferred_handler.py | 14 +-- .../test__redis_task_tracker.py | 6 +- packages/service-library/tests/test_redis.py | 20 +++- .../modules/redis.py | 11 +- .../api/rest/_dependencies.py | 4 +- .../api/rest/_health.py | 6 +- .../services/redis.py | 14 +-- .../src/simcore_service_webserver/redis.py | 19 ++-- 13 files changed, 129 insertions(+), 118 deletions(-) diff --git a/packages/service-library/src/servicelib/deferred_tasks/_deferred_manager.py b/packages/service-library/src/servicelib/deferred_tasks/_deferred_manager.py index 0447d7834c7..154d54f57e7 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_deferred_manager.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_deferred_manager.py @@ -11,7 +11,7 @@ from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitRouter from pydantic import NonNegativeInt from servicelib.logging_utils import log_catch, log_context -from servicelib.redis import RedisClientSDKHealthChecked +from servicelib.redis import RedisClientSDK from settings_library.rabbit import RabbitSettings from ._base_deferred_handler import ( @@ -116,7 +116,7 @@ class DeferredManager: # pylint:disable=too-many-instance-attributes def __init__( self, rabbit_settings: RabbitSettings, - scheduler_redis_sdk: RedisClientSDKHealthChecked, + scheduler_redis_sdk: RedisClientSDK, *, globals_context: GlobalsContext, max_workers: NonNegativeInt = _DEFAULT_DEFERRED_MANAGER_WORKER_SLOTS, diff --git a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py index 528f7cc971c..87c6f86352e 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py @@ -3,7 +3,7 @@ from pydantic import NonNegativeInt -from ..redis import RedisClientSDKHealthChecked +from ..redis import RedisClientSDK from ..utils import logged_gather from ._base_task_tracker import BaseTaskTracker from ._models import TaskUID @@ -18,36 +18,36 @@ def _get_key(task_uid: TaskUID) -> str: class RedisTaskTracker(BaseTaskTracker): - def __init__(self, redis_sdk: RedisClientSDKHealthChecked) -> None: - self.redis_sdk = redis_sdk + def __init__(self, redis_client_sdk: RedisClientSDK) -> None: + self.redis_client_sdk = redis_client_sdk async def get_new_unique_identifier(self) -> TaskUID: candidate_already_exists = True while candidate_already_exists: candidate = f"{uuid4()}" candidate_already_exists = ( - await self.redis_sdk.redis.get(_get_key(candidate)) is not None + await self.redis_client_sdk.redis.get(_get_key(candidate)) is not None ) return TaskUID(candidate) async def _get_raw(self, redis_key: str) -> TaskScheduleModel | None: - found_data = await self.redis_sdk.redis.get(redis_key) + found_data = await self.redis_client_sdk.redis.get(redis_key) return None if found_data is None else TaskScheduleModel.parse_raw(found_data) async def get(self, task_uid: TaskUID) -> TaskScheduleModel | None: return await self._get_raw(_get_key(task_uid)) async def save(self, task_uid: TaskUID, task_schedule: TaskScheduleModel) -> None: - await self.redis_sdk.redis.set(_get_key(task_uid), task_schedule.json()) + await self.redis_client_sdk.redis.set(_get_key(task_uid), task_schedule.json()) async def remove(self, task_uid: TaskUID) -> None: - await self.redis_sdk.redis.delete(_get_key(task_uid)) + await self.redis_client_sdk.redis.delete(_get_key(task_uid)) async def all(self) -> list[TaskScheduleModel]: return await logged_gather( *[ self._get_raw(x) - async for x in self.redis_sdk.redis.scan_iter( + async for x in self.redis_client_sdk.redis.scan_iter( match=f"{_TASK_TRACKER_PREFIX}*" ) ], diff --git a/packages/service-library/src/servicelib/redis.py b/packages/service-library/src/servicelib/redis.py index 5570d554c24..d90f124be97 100644 --- a/packages/service-library/src/servicelib/redis.py +++ b/packages/service-library/src/servicelib/redis.py @@ -20,12 +20,17 @@ from .background_task import periodic_task, start_periodic_task, stop_periodic_task from .logging_utils import log_catch, log_context from .retry_policies import RedisRetryPolicyUponInitialization -from .utils import logged_gather _DEFAULT_LOCK_TTL: Final[datetime.timedelta] = datetime.timedelta(seconds=10) _DEFAULT_SOCKET_TIMEOUT: Final[datetime.timedelta] = datetime.timedelta(seconds=30) +_DEFAULT_DECODE_RESPONSES: Final[bool] = True +_DEFAULT_HEALTH_CHECK_INTERVAL: Final[datetime.timedelta] = datetime.timedelta( + seconds=5 +) + + _logger = logging.getLogger(__name__) @@ -44,7 +49,12 @@ class CouldNotConnectToRedisError(BaseRedisError): @dataclass class RedisClientSDK: redis_dsn: str + decode_responses: bool = _DEFAULT_DECODE_RESPONSES + health_check_interval: datetime.timedelta = _DEFAULT_HEALTH_CHECK_INTERVAL + _client: aioredis.Redis = field(init=False) + _health_check_task: Task | None = None + _is_healthy: bool = False @property def redis(self) -> aioredis.Redis: @@ -63,7 +73,7 @@ def __post_init__(self): socket_timeout=_DEFAULT_SOCKET_TIMEOUT.total_seconds(), socket_connect_timeout=_DEFAULT_SOCKET_TIMEOUT.total_seconds(), encoding="utf-8", - decode_responses=True, + decode_responses=self.decode_responses, ) @retry(**RedisRetryPolicyUponInitialization(_logger).kwargs) @@ -71,6 +81,14 @@ async def setup(self) -> None: if not await self._client.ping(): await self.shutdown() raise CouldNotConnectToRedisError(dsn=self.redis_dsn) + + self._is_healthy = True + self._health_check_task = start_periodic_task( + self._check_health, + interval=self.health_check_interval, + task_name=f"redis_service_health_check_{self.redis_dsn}", + ) + _logger.info( "Connection to %s succeeded with %s", f"redis at {self.redis_dsn=}", @@ -78,6 +96,9 @@ async def setup(self) -> None: ) async def shutdown(self) -> None: + if self._health_check_task: + await stop_periodic_task(self._health_check_task) + # NOTE: redis-py does not yet completely fill all the needed types for mypy await self._client.aclose(close_connection_pool=True) # type: ignore[attr-defined] @@ -87,6 +108,21 @@ async def ping(self) -> bool: return True return False + async def _check_health(self) -> None: + self._is_healthy = await self.ping() + + @property + def is_healthy(self) -> bool: + """Returns the result of the last health check. + If redis becomes available, after being not available, + it will once more return ``True`` + + Returns: + ``False``: if the service is no longer reachable + ``True``: when service is reachable + """ + return self._is_healthy + @contextlib.asynccontextmanager async def lock_context( self, @@ -169,49 +205,11 @@ async def lock_value(self, lock_name: str) -> str | None: return output -class RedisClientSDKHealthChecked(RedisClientSDK): - """ - Provides access to ``is_healthy`` property, to be used for defining - health check handlers. - """ - - def __init__( - self, - redis_dsn: str, - health_check_interval: datetime.timedelta = datetime.timedelta(seconds=5), - ) -> None: - super().__init__(redis_dsn) - self.health_check_interval: datetime.timedelta = health_check_interval - self._health_check_task: Task | None = None - self._is_healthy: bool = True - - @property - def is_healthy(self) -> bool: - """Provides the status of Redis. - If redis becomes available, after being not available, - it will once more return ``True`` - - Returns: - ``False``: if the service is no longer reachable - ``True``: when service is reachable - """ - return self._is_healthy - - async def _check_health(self) -> None: - self._is_healthy = await self.ping() - - async def setup(self) -> None: - await super().setup() - self._health_check_task = start_periodic_task( - self._check_health, - interval=self.health_check_interval, - task_name="redis_service_health_check", - ) - - async def shutdown(self) -> None: - if self._health_check_task: - await stop_periodic_task(self._health_check_task) - await super().shutdown() +@dataclass(frozen=True) +class RedisManagerDBConfig: + database: RedisDatabase + decode_responses: bool = _DEFAULT_DECODE_RESPONSES + health_check_interval: datetime.timedelta = _DEFAULT_HEALTH_CHECK_INTERVAL @dataclass @@ -220,20 +218,27 @@ class RedisClientsManager: Manages the lifetime of redis client sdk connections """ - databases: set[RedisDatabase] + databases_configs: set[RedisManagerDBConfig] settings: RedisSettings _client_sdks: dict[RedisDatabase, RedisClientSDK] = field(default_factory=dict) async def setup(self) -> None: - for db in self.databases: - self._client_sdks[db] = client_sdk = RedisClientSDK( - redis_dsn=self.settings.build_redis_dsn(db) + for config in self.databases_configs: + self._client_sdks[config.database] = RedisClientSDK( + redis_dsn=self.settings.build_redis_dsn(config.database), + decode_responses=config.decode_responses, + health_check_interval=config.health_check_interval, ) - await client_sdk.setup() + + for client in self._client_sdks.values(): + await client.setup() async def shutdown(self) -> None: - await logged_gather(*(c.shutdown() for c in self._client_sdks.values())) + # NOTE: somehow using logged_gather is not an option + # doing so will make the shutdown procedure hang + for client in self._client_sdks.values(): + await client.shutdown() def client(self, database: RedisDatabase) -> RedisClientSDK: return self._client_sdks[database] diff --git a/packages/service-library/src/servicelib/retry_policies.py b/packages/service-library/src/servicelib/retry_policies.py index a85bc968322..b5848a990a4 100644 --- a/packages/service-library/src/servicelib/retry_policies.py +++ b/packages/service-library/src/servicelib/retry_policies.py @@ -21,12 +21,12 @@ class PostgresRetryPolicyUponInitialization: def __init__(self, logger: logging.Logger | None = None): logger = logger or log - self.kwargs = dict( - wait=wait_fixed(self.WAIT_SECS), - stop=stop_after_attempt(self.ATTEMPTS_COUNT), - before_sleep=before_sleep_log(logger, logging.WARNING), - reraise=True, - ) + self.kwargs = { + "wait": wait_fixed(self.WAIT_SECS), + "stop": stop_after_attempt(self.ATTEMPTS_COUNT), + "before_sleep": before_sleep_log(logger, logging.WARNING), + "reraise": True, + } class RedisRetryPolicyUponInitialization(PostgresRetryPolicyUponInitialization): diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py index e363204d249..75850fddc2e 100644 --- a/packages/service-library/tests/deferred_tasks/example_app.py +++ b/packages/service-library/tests/deferred_tasks/example_app.py @@ -15,7 +15,7 @@ StartContext, TaskUID, ) -from servicelib.redis import RedisClientSDK, RedisClientSDKHealthChecked +from servicelib.redis import RedisClientSDK from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisDatabase, RedisSettings @@ -54,7 +54,7 @@ async def on_result(cls, result: str, context: DeferredContext) -> None: class InMemoryLists: def __init__(self, redis_settings: RedisSettings, port: int) -> None: - self.redis_sdk = RedisClientSDK( + self.redis_client_sdk = RedisClientSDK( redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS) ) self.port = port @@ -63,10 +63,10 @@ def _get_queue_name(self, queue_name: str) -> str: return f"in_memory_lists::{queue_name}.{self.port}" async def append_to(self, queue_name: str, value: Any) -> None: - await self.redis_sdk.redis.rpush(self._get_queue_name(queue_name), value) # type: ignore + await self.redis_client_sdk.redis.rpush(self._get_queue_name(queue_name), value) # type: ignore async def get_all_from(self, queue_name: str) -> list: - return await self.redis_sdk.redis.lrange( + return await self.redis_client_sdk.redis.lrange( self._get_queue_name(queue_name), 0, -1 ) # type: ignore @@ -79,18 +79,18 @@ def __init__( in_memory_lists: InMemoryLists, max_workers: NonNegativeInt, ) -> None: - self._redis_client = RedisClientSDKHealthChecked( + self._redis_client_sdk = RedisClientSDK( redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS) ) self._manager = DeferredManager( rabbit_settings, - self._redis_client, + self._redis_client_sdk, globals_context={"in_memory_lists": in_memory_lists}, max_workers=max_workers, ) async def setup(self) -> None: - await self._redis_client.setup() + await self._redis_client_sdk.setup() await self._manager.setup() diff --git a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py index cb45f25a329..d06294a2eb5 100644 --- a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py +++ b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py @@ -24,7 +24,7 @@ ) from servicelib.deferred_tasks._models import TaskResultError, TaskUID from servicelib.deferred_tasks._task_schedule import TaskState -from servicelib.redis import RedisClientSDKHealthChecked +from servicelib.redis import RedisClientSDK from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisDatabase, RedisSettings from tenacity._asyncio import AsyncRetrying @@ -49,12 +49,10 @@ class MockKeys(StrAutoEnum): @pytest.fixture -async def redis_sdk( +async def redis_client_sdk( redis_service: RedisSettings, -) -> AsyncIterable[RedisClientSDKHealthChecked]: - sdk = RedisClientSDKHealthChecked( - redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS) - ) +) -> AsyncIterable[RedisClientSDK]: + sdk = RedisClientSDK(redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS)) await sdk.setup() yield sdk await sdk.shutdown() @@ -68,12 +66,12 @@ def mocked_deferred_globals() -> dict[str, Any]: @pytest.fixture async def deferred_manager( rabbit_service: RabbitSettings, - redis_sdk: RedisClientSDKHealthChecked, + redis_client_sdk: RedisClientSDK, mocked_deferred_globals: dict[str, Any], ) -> AsyncIterable[DeferredManager]: manager = DeferredManager( rabbit_service, - redis_sdk, + redis_client_sdk, globals_context=mocked_deferred_globals, max_workers=10, ) diff --git a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py index 60f5a9d3fed..3ec3bde01ed 100644 --- a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py +++ b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py @@ -9,7 +9,7 @@ from servicelib.deferred_tasks._models import TaskUID from servicelib.deferred_tasks._redis_task_tracker import RedisTaskTracker from servicelib.deferred_tasks._task_schedule import TaskScheduleModel, TaskState -from servicelib.redis import RedisClientSDKHealthChecked +from servicelib.redis import RedisClientSDK from servicelib.utils import logged_gather pytest_simcore_core_services_selection = [ @@ -33,7 +33,7 @@ def task_schedule() -> TaskScheduleModel: async def test_task_tracker_workflow( - redis_client_sdk_deferred_tasks: RedisClientSDKHealthChecked, + redis_client_sdk_deferred_tasks: RedisClientSDK, task_schedule: TaskScheduleModel, ): task_tracker = RedisTaskTracker(redis_client_sdk_deferred_tasks) @@ -51,7 +51,7 @@ async def test_task_tracker_workflow( @pytest.mark.parametrize("count", [0, 1, 10, 100]) async def test_task_tracker_list_all_entries( - redis_client_sdk_deferred_tasks: RedisClientSDKHealthChecked, + redis_client_sdk_deferred_tasks: RedisClientSDK, task_schedule: TaskScheduleModel, count: int, ): diff --git a/packages/service-library/tests/test_redis.py b/packages/service-library/tests/test_redis.py index 29b039f5975..2c3057cd8f1 100644 --- a/packages/service-library/tests/test_redis.py +++ b/packages/service-library/tests/test_redis.py @@ -18,8 +18,8 @@ from servicelib.redis import ( CouldNotAcquireLockError, RedisClientSDK, - RedisClientSDKHealthChecked, RedisClientsManager, + RedisManagerDBConfig, ) from settings_library.redis import RedisDatabase, RedisSettings @@ -249,13 +249,17 @@ async def _inc_counter() -> None: async def test_redis_client_sdks_manager(redis_service: RedisSettings): - all_redis_databases: set[RedisDatabase] = set(RedisDatabase) - manager = RedisClientsManager(databases=all_redis_databases, settings=redis_service) + all_redis_configs: set[RedisManagerDBConfig] = { + RedisManagerDBConfig(db) for db in RedisDatabase + } + manager = RedisClientsManager( + databases_configs=all_redis_configs, settings=redis_service + ) await manager.setup() - for database in all_redis_databases: - assert manager.client(database) + for config in all_redis_configs: + assert manager.client(config.database) await manager.shutdown() @@ -263,9 +267,13 @@ async def test_redis_client_sdks_manager(redis_service: RedisSettings): async def test_redis_client_sdk_health_checked(redis_service: RedisSettings): # setup redis_resources_dns = redis_service.build_redis_dsn(RedisDatabase.RESOURCES) - client = RedisClientSDKHealthChecked(redis_resources_dns) + client = RedisClientSDK(redis_resources_dns) assert client assert client.redis_dsn == redis_resources_dns + + # ensure nothing happens if shutdown is called before setup + await client.shutdown() + await client.setup() await client._check_health() # noqa: SLF001 diff --git a/services/director-v2/src/simcore_service_director_v2/modules/redis.py b/services/director-v2/src/simcore_service_director_v2/modules/redis.py index b111b8792ed..7cb6f86cc82 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/redis.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/redis.py @@ -1,5 +1,5 @@ from fastapi import FastAPI -from servicelib.redis import RedisClientsManager +from servicelib.redis import RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase from ..core.settings import AppSettings @@ -10,9 +10,12 @@ async def on_startup() -> None: settings: AppSettings = app.state.settings app.state.redis_clients_manager = redis_clients_manager = RedisClientsManager( - databases={ - RedisDatabase.LOCKS, - RedisDatabase.DISTRIBUTED_IDENTIFIERS, + databases_configs={ + RedisManagerDBConfig(db) + for db in ( + RedisDatabase.LOCKS, + RedisDatabase.DISTRIBUTED_IDENTIFIERS, + ) }, settings=settings.REDIS, ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py index 286e66ef2f7..a76120eb964 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py @@ -1,7 +1,7 @@ from fastapi import Request from servicelib.fastapi.dependencies import get_app, get_reverse_url_mapper from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient -from servicelib.redis import RedisClientSDKHealthChecked +from servicelib.redis import RedisClientSDK from simcore_service_dynamic_scheduler.services.redis import get_redis_client from ...services.rabbitmq import get_rabbitmq_client, get_rabbitmq_rpc_server @@ -18,7 +18,7 @@ def get_rabbitmq_rpc_server_from_request(request: Request) -> RabbitMQRPCClient: return get_rabbitmq_rpc_server(request.app) -def get_redis_client_from_request(request: Request) -> RedisClientSDKHealthChecked: +def get_redis_client_from_request(request: Request) -> RedisClientSDK: return get_redis_client(request.app) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py index 93a220cb68a..515602aef7c 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py @@ -8,7 +8,7 @@ REDIS_CLIENT_UNHEALTHY_MSG, ) from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient -from servicelib.redis import RedisClientSDKHealthChecked +from servicelib.redis import RedisClientSDK from ._dependencies import ( get_rabbitmq_client_from_request, @@ -29,9 +29,7 @@ async def healthcheck( rabbit_rpc_server: Annotated[ RabbitMQRPCClient, Depends(get_rabbitmq_rpc_server_from_request) ], - redis_client_sdk: Annotated[ - RedisClientSDKHealthChecked, Depends(get_redis_client_from_request) - ], + redis_client_sdk: Annotated[RedisClientSDK, Depends(get_redis_client_from_request)], ): if not rabbit_client.healthy or not rabbit_rpc_server.healthy: raise HealthCheckError(RABBITMQ_CLIENT_UNHEALTHY_MSG) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py index 30538d4a25f..7904d5e1a5d 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py @@ -1,5 +1,5 @@ from fastapi import FastAPI -from servicelib.redis import RedisClientSDKHealthChecked +from servicelib.redis import RedisClientSDK from settings_library.redis import RedisDatabase, RedisSettings @@ -8,15 +8,11 @@ def setup_redis(app: FastAPI) -> None: async def on_startup() -> None: redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS) - app.state.redis_client_sdk = client = RedisClientSDKHealthChecked( - redis_locks_dsn - ) + app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn) await client.setup() async def on_shutdown() -> None: - redis_client_sdk: None | RedisClientSDKHealthChecked = ( - app.state.redis_client_sdk - ) + redis_client_sdk: None | RedisClientSDK = app.state.redis_client_sdk if redis_client_sdk: await redis_client_sdk.shutdown() @@ -24,6 +20,6 @@ async def on_shutdown() -> None: app.add_event_handler("shutdown", on_shutdown) -def get_redis_client(app: FastAPI) -> RedisClientSDKHealthChecked: - redis_client_sdk: RedisClientSDKHealthChecked = app.state.redis_client_sdk +def get_redis_client(app: FastAPI) -> RedisClientSDK: + redis_client_sdk: RedisClientSDK = app.state.redis_client_sdk return redis_client_sdk diff --git a/services/web/server/src/simcore_service_webserver/redis.py b/services/web/server/src/simcore_service_webserver/redis.py index fd1f891187f..deee93f1fbd 100644 --- a/services/web/server/src/simcore_service_webserver/redis.py +++ b/services/web/server/src/simcore_service_webserver/redis.py @@ -3,7 +3,7 @@ import redis.asyncio as aioredis from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from servicelib.redis import RedisClientSDK, RedisClientsManager +from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase, RedisSettings from ._constants import APP_SETTINGS_KEY @@ -32,13 +32,16 @@ async def setup_redis_client(app: web.Application): """ redis_settings: RedisSettings = get_plugin_settings(app) app[_APP_REDIS_CLIENTS_MANAGER] = manager = RedisClientsManager( - databases={ - RedisDatabase.RESOURCES, - RedisDatabase.LOCKS, - RedisDatabase.VALIDATION_CODES, - RedisDatabase.SCHEDULED_MAINTENANCE, - RedisDatabase.USER_NOTIFICATIONS, - RedisDatabase.ANNOUNCEMENTS, + databases_configs={ + RedisManagerDBConfig(db) + for db in ( + RedisDatabase.RESOURCES, + RedisDatabase.LOCKS, + RedisDatabase.VALIDATION_CODES, + RedisDatabase.SCHEDULED_MAINTENANCE, + RedisDatabase.USER_NOTIFICATIONS, + RedisDatabase.ANNOUNCEMENTS, + ) }, settings=redis_settings, ) From ab8bb8945ea13b8257a5b7a62a6068864d36e06c Mon Sep 17 00:00:00 2001 From: Mads Bisgaard <126242332+bisgaard-itis@users.noreply.github.com> Date: Thu, 13 Jun 2024 14:53:26 +0200 Subject: [PATCH 031/219] =?UTF-8?q?=F0=9F=8E=A8=20Improve=20profiling=20mi?= =?UTF-8?q?ddleware=20(#5935)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../servicelib/_utils_profiling_middleware.py | 29 ------- .../servicelib/aiohttp/profiler_middleware.py | 64 ++++++++------- .../src/servicelib/async_utils.py | 34 ++++++-- .../servicelib/fastapi/profiler_middleware.py | 80 +++++++++++++------ .../servicelib/utils_profiling_middleware.py | 67 ++++++++++++++++ .../director-v2/tests/integration/02/utils.py | 10 ++- tmp_urls.txt | 1 + 7 files changed, 191 insertions(+), 94 deletions(-) delete mode 100644 packages/service-library/src/servicelib/_utils_profiling_middleware.py create mode 100644 packages/service-library/src/servicelib/utils_profiling_middleware.py create mode 100644 tmp_urls.txt diff --git a/packages/service-library/src/servicelib/_utils_profiling_middleware.py b/packages/service-library/src/servicelib/_utils_profiling_middleware.py deleted file mode 100644 index bf477300298..00000000000 --- a/packages/service-library/src/servicelib/_utils_profiling_middleware.py +++ /dev/null @@ -1,29 +0,0 @@ -import json - -from servicelib.mimetype_constants import ( - MIMETYPE_APPLICATION_JSON, - MIMETYPE_APPLICATION_ND_JSON, -) - - -def append_profile(body: str, profile: str) -> str: - try: - json.loads(body) - body += "\n" if not body.endswith("\n") else "" - except json.decoder.JSONDecodeError: - pass - body += json.dumps({"profile": profile}) - return body - - -def check_response_headers( - response_headers: dict[bytes, bytes] -) -> list[tuple[bytes, bytes]]: - original_content_type: str = response_headers[b"content-type"].decode() - assert original_content_type in { - MIMETYPE_APPLICATION_ND_JSON.encode(), - MIMETYPE_APPLICATION_JSON.encode(), - } # nosec - headers: dict = {} - headers[b"content-type"] = MIMETYPE_APPLICATION_ND_JSON.encode() - return list(headers.items()) diff --git a/packages/service-library/src/servicelib/aiohttp/profiler_middleware.py b/packages/service-library/src/servicelib/aiohttp/profiler_middleware.py index f47c577d868..eab7d1fc598 100644 --- a/packages/service-library/src/servicelib/aiohttp/profiler_middleware.py +++ b/packages/service-library/src/servicelib/aiohttp/profiler_middleware.py @@ -1,43 +1,49 @@ from aiohttp.web import HTTPInternalServerError, Request, StreamResponse, middleware -from pyinstrument import Profiler from servicelib.mimetype_constants import ( MIMETYPE_APPLICATION_JSON, MIMETYPE_APPLICATION_ND_JSON, ) -from .._utils_profiling_middleware import append_profile +from ..utils_profiling_middleware import _is_profiling, _profiler, append_profile @middleware async def profiling_middleware(request: Request, handler): - profiler: Profiler | None = None if request.headers.get("x-profile") is not None: - profiler = Profiler(async_mode="enabled") - profiler.start() + try: + if _profiler.is_running or (_profiler.last_session is not None): + raise HTTPInternalServerError( + reason="Profiler is already running. Only a single request can be profiled at any given time.", + headers={}, + ) + _profiler.reset() + _is_profiling.set(True) - response = await handler(request) + with _profiler: + response = await handler(request) - if profiler is None: - return response - if response.content_type != MIMETYPE_APPLICATION_JSON: - raise HTTPInternalServerError( - reason=f"Profiling middleware is not compatible with {response.content_type=}", - headers={}, - ) + if response.content_type != MIMETYPE_APPLICATION_JSON: + raise HTTPInternalServerError( + reason=f"Profiling middleware is not compatible with {response.content_type=}", + headers={}, + ) - stream_response = StreamResponse( - status=response.status, - reason=response.reason, - headers=response.headers, - ) - stream_response.content_type = MIMETYPE_APPLICATION_ND_JSON - await stream_response.prepare(request) - await stream_response.write(response.body) - profiler.stop() - await stream_response.write( - append_profile( - "\n", profiler.output_text(unicode=True, color=True, show_all=True) - ).encode() - ) - await stream_response.write_eof() - return stream_response + stream_response = StreamResponse( + status=response.status, + reason=response.reason, + headers=response.headers, + ) + stream_response.content_type = MIMETYPE_APPLICATION_ND_JSON + await stream_response.prepare(request) + await stream_response.write(response.body) + await stream_response.write( + append_profile( + "\n", _profiler.output_text(unicode=True, color=True, show_all=True) + ).encode() + ) + await stream_response.write_eof() + finally: + _profiler.reset() + return stream_response + + return await handler(request) diff --git a/packages/service-library/src/servicelib/async_utils.py b/packages/service-library/src/servicelib/async_utils.py index 40f5ca32e0c..b25955da95a 100644 --- a/packages/service-library/src/servicelib/async_utils.py +++ b/packages/service-library/src/servicelib/async_utils.py @@ -4,7 +4,9 @@ from contextlib import suppress from dataclasses import dataclass from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, Deque +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Deque + +from .utils_profiling_middleware import dont_profile, is_profiling, profile logger = logging.getLogger(__name__) @@ -30,6 +32,13 @@ class Context: task: asyncio.Task | None = None +@dataclass +class QueueElement: + do_profile: bool = False + input: Awaitable | None = None + output: Any | None = None + + _sequential_jobs_contexts: dict[str, Context] = {} @@ -138,15 +147,18 @@ async def wrapper(*args: Any, **kwargs: Any) -> Any: if not context.initialized: context.initialized = True - async def worker(in_q: Queue, out_q: Queue) -> None: + async def worker(in_q: Queue[QueueElement], out_q: Queue) -> None: while True: - awaitable = await in_q.get() + element = await in_q.get() in_q.task_done() # check if requested to shutdown - if awaitable is None: - break try: - result = await awaitable + do_profile = element.do_profile + awaitable = element.input + if awaitable is None: + break + with profile(do_profile): + result = await awaitable except Exception as e: # pylint: disable=broad-except result = e await out_q.put(result) @@ -161,9 +173,15 @@ async def worker(in_q: Queue, out_q: Queue) -> None: worker(context.in_queue, context.out_queue) ) - await context.in_queue.put(decorated_function(*args, **kwargs)) + with dont_profile(): + # ensure profiler is disabled in order to capture profile of endpoint code + queue_input = QueueElement( + input=decorated_function(*args, **kwargs), + do_profile=is_profiling(), + ) + await context.in_queue.put(queue_input) + wrapped_result = await context.out_queue.get() - wrapped_result = await context.out_queue.get() if isinstance(wrapped_result, Exception): raise wrapped_result diff --git a/packages/service-library/src/servicelib/fastapi/profiler_middleware.py b/packages/service-library/src/servicelib/fastapi/profiler_middleware.py index 9e1e2bc14f0..cdf8c77206b 100644 --- a/packages/service-library/src/servicelib/fastapi/profiler_middleware.py +++ b/packages/service-library/src/servicelib/fastapi/profiler_middleware.py @@ -1,11 +1,16 @@ -from typing import Any +from typing import Any, Final from fastapi import FastAPI -from pyinstrument import Profiler +from servicelib.aiohttp import status from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from starlette.requests import Request -from .._utils_profiling_middleware import append_profile, check_response_headers +from ..utils_profiling_middleware import ( + _is_profiling, + _profiler, + append_profile, + check_response_headers, +) def is_last_response(response_headers: dict[bytes, bytes], message: dict[str, Any]): @@ -28,43 +33,66 @@ class ProfilerMiddleware: def __init__(self, app: FastAPI): self._app: FastAPI = app - self._profile_header_trigger: str = "x-profile" + self._profile_header_trigger: Final[str] = "x-profile" async def __call__(self, scope, receive, send): if scope["type"] != "http": await self._app(scope, receive, send) return - profiler: Profiler | None = None request: Request = Request(scope) request_headers = dict(request.headers) response_headers: dict[bytes, bytes] = {} - if request_headers.get(self._profile_header_trigger) is not None: + if request_headers.get(self._profile_header_trigger) is None: + await self._app(scope, receive, send) + return + + if _profiler.is_running or (_profiler.last_session is not None): + response = { + "type": "http.response.start", + "status": status.HTTP_500_INTERNAL_SERVER_ERROR, + "headers": [ + (b"content-type", b"text/plain"), + ], + } + await send(response) + response_body = { + "type": "http.response.body", + "body": b"Profiler is already running. Only a single request can be profiled at any give time.", + } + await send(response_body) + return + + try: request_headers.pop(self._profile_header_trigger) scope["headers"] = [ (k.encode("utf8"), v.encode("utf8")) for k, v in request_headers.items() ] - profiler = Profiler(async_mode="enabled") - profiler.start() + _profiler.start() + _is_profiling.set(True) - async def _send_wrapper(message): - if isinstance(profiler, Profiler): - nonlocal response_headers - if message["type"] == "http.response.start": - response_headers = dict(message.get("headers")) - message["headers"] = check_response_headers(response_headers) - elif message["type"] == "http.response.body": - if is_last_response(response_headers, message): - profiler.stop() - message["body"] = append_profile( - message["body"].decode(), - profiler.output_text( + async def _send_wrapper(message): + if _is_profiling.get(): + nonlocal response_headers + if message["type"] == "http.response.start": + response_headers = dict(message.get("headers")) + message["headers"] = check_response_headers(response_headers) + elif message["type"] == "http.response.body": + if is_last_response(response_headers, message): + _profiler.stop() + profile_text = _profiler.output_text( unicode=True, color=True, show_all=True - ), - ).encode() - else: - message["more_body"] = True - await send(message) + ) + _profiler.reset() + message["body"] = append_profile( + message["body"].decode(), profile_text + ).encode() + else: + message["more_body"] = True + await send(message) + + await self._app(scope, receive, _send_wrapper) - await self._app(scope, receive, _send_wrapper) + finally: + _profiler.reset() diff --git a/packages/service-library/src/servicelib/utils_profiling_middleware.py b/packages/service-library/src/servicelib/utils_profiling_middleware.py new file mode 100644 index 00000000000..bf4527eba9c --- /dev/null +++ b/packages/service-library/src/servicelib/utils_profiling_middleware.py @@ -0,0 +1,67 @@ +import contextvars +import json +from contextlib import contextmanager +from typing import Iterator + +from pyinstrument import Profiler +from servicelib.mimetype_constants import ( + MIMETYPE_APPLICATION_JSON, + MIMETYPE_APPLICATION_ND_JSON, +) + +_profiler = Profiler(async_mode="enabled") +_is_profiling = contextvars.ContextVar("_is_profiling", default=False) + + +def is_profiling() -> bool: + return _is_profiling.get() + + +@contextmanager +def profile(do_profile: bool | None = None) -> Iterator[None]: + """Context manager which temporarily removes request profiler from context""" + if do_profile is None: + do_profile = _is_profiling.get() + if do_profile: + try: + _profiler.start() + yield + finally: + _profiler.stop() + else: + yield None + + +@contextmanager +def dont_profile() -> Iterator[None]: + if _is_profiling.get(): + try: + _profiler.stop() + yield + finally: + _profiler.start() + else: + yield + + +def append_profile(body: str, profile_text: str) -> str: + try: + json.loads(body) + body += "\n" if not body.endswith("\n") else "" + except json.decoder.JSONDecodeError: + pass + body += json.dumps({"profile": profile_text}) + return body + + +def check_response_headers( + response_headers: dict[bytes, bytes] +) -> list[tuple[bytes, bytes]]: + original_content_type: str = response_headers[b"content-type"].decode() + assert original_content_type in { + MIMETYPE_APPLICATION_ND_JSON, + MIMETYPE_APPLICATION_JSON, + } # nosec + headers: dict = {} + headers[b"content-type"] = MIMETYPE_APPLICATION_ND_JSON.encode() + return list(headers.items()) diff --git a/services/director-v2/tests/integration/02/utils.py b/services/director-v2/tests/integration/02/utils.py index 325398ddc9f..41efb6e84ec 100644 --- a/services/director-v2/tests/integration/02/utils.py +++ b/services/director-v2/tests/integration/02/utils.py @@ -529,13 +529,19 @@ async def _port_forward_legacy_service( # pylint: disable=redefined-outer-name # Legacy services are started --endpoint-mode dnsrr, it needs to # be changed to vip otherwise the port forward will not work result = run_command(f"docker service update {service_name} --endpoint-mode=vip") - assert "verify: Service converged" in result + assert ( + "verify: Service converged" in result + or f"verify: Service {service_name} converged" in result + ) # Finally forward the port on a random assigned port. result = run_command( f"docker service update {service_name} --publish-add :{internal_port}" ) - assert "verify: Service converged" in result + assert ( + "verify: Service converged" in result + or f"verify: Service {service_name} converged" in result + ) # inspect service and fetch the port async with aiodocker.Docker() as docker_client: diff --git a/tmp_urls.txt b/tmp_urls.txt new file mode 100644 index 00000000000..9edb50399e3 --- /dev/null +++ b/tmp_urls.txt @@ -0,0 +1 @@ +\"http://10.43.103.193.nip.io:8006/v0/me\" -X GET -H "accept: application/json" -H "Authorization: Basic dGVzdF9iODkxNjUwZmViZjY2OTNlZjc3MToxNzliM2E4OTRiNTY0ZGY5NjExYzY5ZmE4NDcxNjNiYzhmYzdkMGY0" From 0d4985e4de9ee5cf6c77fcae23fed5c96ec90368 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Fri, 14 Jun 2024 09:39:55 +0200 Subject: [PATCH 032/219] =?UTF-8?q?=F0=9F=90=9BMaintenance:=20disable=20CI?= =?UTF-8?q?=20problematic=20test=20(#5944)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/service-library/tests/test_redis_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/service-library/tests/test_redis_utils.py b/packages/service-library/tests/test_redis_utils.py index e1666f1b1a4..269c3807cc5 100644 --- a/packages/service-library/tests/test_redis_utils.py +++ b/packages/service-library/tests/test_redis_utils.py @@ -67,6 +67,7 @@ async def test_exclusive_sequentially( await _contained_client(get_redis_client_sdk, lock_name, task_duration) +@pytest.mark.skip(reason="ANE please check that one too") async def test_exclusive_parallel_lock_is_released_and_reacquired( get_redis_client_sdk: Callable[ [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] From 7b2d66ab1af6fc7c7ff02983119d66a5ccbd52a3 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Fri, 14 Jun 2024 13:21:56 +0200 Subject: [PATCH 033/219] =?UTF-8?q?=F0=9F=90=9B=20disable=20health=20check?= =?UTF-8?q?=20for=20Redis=20client=20(#5946)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../service-library/src/servicelib/redis.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/service-library/src/servicelib/redis.py b/packages/service-library/src/servicelib/redis.py index d90f124be97..83e29d0b4f2 100644 --- a/packages/service-library/src/servicelib/redis.py +++ b/packages/service-library/src/servicelib/redis.py @@ -17,7 +17,7 @@ from settings_library.redis import RedisDatabase, RedisSettings from tenacity import retry -from .background_task import periodic_task, start_periodic_task, stop_periodic_task +from .background_task import periodic_task, stop_periodic_task from .logging_utils import log_catch, log_context from .retry_policies import RedisRetryPolicyUponInitialization @@ -54,7 +54,9 @@ class RedisClientSDK: _client: aioredis.Redis = field(init=False) _health_check_task: Task | None = None - _is_healthy: bool = False + _is_healthy: bool = ( + True # revert back to False when stop_periodic_task issue is fixed + ) @property def redis(self) -> aioredis.Redis: @@ -83,11 +85,12 @@ async def setup(self) -> None: raise CouldNotConnectToRedisError(dsn=self.redis_dsn) self._is_healthy = True - self._health_check_task = start_periodic_task( - self._check_health, - interval=self.health_check_interval, - task_name=f"redis_service_health_check_{self.redis_dsn}", - ) + # Disabled till issue with stop_periodic_task is fixed + # self._health_check_task = start_periodic_task( + # self._check_health, + # interval=self.health_check_interval, + # task_name=f"redis_service_health_check_{self.redis_dsn}", + # ) _logger.info( "Connection to %s succeeded with %s", From 90910149193a84c6716c615ebf2ae07e9e06c7fc Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Fri, 14 Jun 2024 14:42:45 +0200 Subject: [PATCH 034/219] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Maintenance:=20upg?= =?UTF-8?q?rade=20docker=20image=20base=20to=20latest=20Debian=20Bookworm?= =?UTF-8?q?=20&=20latest=20python=203.10.14=20=F0=9F=9A=A8=20(#5937)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/postgres-database/scripts/erd/Dockerfile | 4 ++-- packages/postgres-database/scripts/erd/Makefile | 2 +- .../postgres-database/tests/docker-compose.prod.yml | 1 - packages/postgres-database/tests/docker-compose.yml | 1 - packages/service-integration/Dockerfile | 4 ++-- .../tests/aiohttp/with_postgres/docker-compose.yml | 1 - packages/settings-library/tests/test_application.py | 2 +- requirements/how-to-upgrade-python.md | 2 +- requirements/tools/Dockerfile | 4 ++-- requirements/tools/Makefile | 2 +- scripts/apt-packages-versions/Dockerfile | 4 ++-- scripts/erd/Dockerfile | 4 ++-- scripts/maintenance/migrate_project/Dockerfile | 2 +- scripts/mypy/Dockerfile | 4 ++-- scripts/openapi-pydantic-models-generator.bash | 13 ++++--------- scripts/pydeps-docker/Dockerfile | 4 ++-- scripts/pydeps.bash | 12 ++++-------- scripts/pyupgrade.bash | 4 ++-- services/agent/Dockerfile | 4 ++-- services/api-server/Dockerfile | 4 ++-- .../tests/unit/_with_db/data/docker-compose.yml | 1 - services/autoscaling/Dockerfile | 6 +++--- .../utils/utils_docker.py | 1 - .../tests/manual/docker-compose-computational.yml | 1 - .../autoscaling/tests/manual/docker-compose.yml | 1 - .../autoscaling/tests/unit/test_utils_docker.py | 2 +- services/catalog/Dockerfile | 4 ++-- services/clusters-keeper/Dockerfile | 6 +++--- .../data/docker-compose.yml | 1 - services/dask-sidecar/Dockerfile | 4 ++-- services/datcore-adapter/Dockerfile | 4 ++-- services/director-v2/Dockerfile | 4 ++-- services/director-v2/docker-compose-extra.yml | 1 - .../core/dynamic_services_settings/scheduler.py | 2 +- services/docker-compose-build.yml | 1 - services/docker-compose-deploy.yml | 2 -- services/docker-compose-ops-ci.yml | 1 - services/docker-compose-ops.yml | 1 - services/docker-compose.devel-frontend.yml | 1 - services/docker-compose.devel.yml | 1 - services/docker-compose.local.yml | 1 - services/docker-compose.yml | 1 - services/dynamic-scheduler/Dockerfile | 4 ++-- services/dynamic-sidecar/Dockerfile | 8 ++++---- .../tests/unit/test_core_docker_compose_utils.py | 1 - .../tests/unit/test_core_docker_utils.py | 1 - .../dynamic-sidecar/tests/unit/test_core_utils.py | 1 - services/efs-guardian/Dockerfile | 6 +++--- services/invitations/Dockerfile | 4 ++-- services/migration/Dockerfile | 4 ++-- services/osparc-gateway-server/Dockerfile | 2 +- services/payments/Dockerfile | 4 ++-- services/resource-usage-tracker/Dockerfile | 4 ++-- .../client/tools/docker-compose.yml | 1 - services/storage/Dockerfile | 4 ++-- services/web/Dockerfile | 4 ++-- .../unit/isolated/test_application_settings.py | 2 +- .../tests/unit/with_dbs/docker-compose-devel.yml | 1 - .../server/tests/unit/with_dbs/docker-compose.yml | 1 - tests/e2e/docker-compose.yml | 1 - tests/environment-setup/test_used_docker_compose.py | 6 ++---- tests/performance/docker-compose.yml | 2 -- 62 files changed, 74 insertions(+), 112 deletions(-) diff --git a/packages/postgres-database/scripts/erd/Dockerfile b/packages/postgres-database/scripts/erd/Dockerfile index e3cf5d19c91..855ba38136b 100644 --- a/packages/postgres-database/scripts/erd/Dockerfile +++ b/packages/postgres-database/scripts/erd/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base RUN apt-get update \ && apt-get -y install --no-install-recommends\ diff --git a/packages/postgres-database/scripts/erd/Makefile b/packages/postgres-database/scripts/erd/Makefile index 1e781a30b77..c4adb54d2cb 100644 --- a/packages/postgres-database/scripts/erd/Makefile +++ b/packages/postgres-database/scripts/erd/Makefile @@ -3,7 +3,7 @@ # .DEFAULT_GOAL := help -PYTHON_VERSION=3.10.10 +PYTHON_VERSION=3.10.14 # locations REPODIR := $(shell git rev-parse --show-toplevel) diff --git a/packages/postgres-database/tests/docker-compose.prod.yml b/packages/postgres-database/tests/docker-compose.prod.yml index a35f7fb0fe1..cb0c8ffbe53 100644 --- a/packages/postgres-database/tests/docker-compose.prod.yml +++ b/packages/postgres-database/tests/docker-compose.prod.yml @@ -1,4 +1,3 @@ -version: "3.8" services: postgres: volumes: diff --git a/packages/postgres-database/tests/docker-compose.yml b/packages/postgres-database/tests/docker-compose.yml index 45f45b235de..cfaa1c2b9ba 100644 --- a/packages/postgres-database/tests/docker-compose.yml +++ b/packages/postgres-database/tests/docker-compose.yml @@ -1,4 +1,3 @@ -version: "3.8" services: postgres: image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" diff --git a/packages/service-integration/Dockerfile b/packages/service-integration/Dockerfile index a2b55364427..0a45be3b4c4 100644 --- a/packages/service-integration/Dockerfile +++ b/packages/service-integration/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base LABEL maintainer=pcrespov diff --git a/packages/service-library/tests/aiohttp/with_postgres/docker-compose.yml b/packages/service-library/tests/aiohttp/with_postgres/docker-compose.yml index bb5e2785a39..22ebab6fa0c 100644 --- a/packages/service-library/tests/aiohttp/with_postgres/docker-compose.yml +++ b/packages/service-library/tests/aiohttp/with_postgres/docker-compose.yml @@ -1,4 +1,3 @@ -version: "3.8" services: postgres: image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" diff --git a/packages/settings-library/tests/test_application.py b/packages/settings-library/tests/test_application.py index e2a3c35f01d..8447a253a21 100644 --- a/packages/settings-library/tests/test_application.py +++ b/packages/settings-library/tests/test_application.py @@ -17,7 +17,7 @@ def envs_from_docker_inspect() -> EnvVarsDict: "PATH=/home/scu/.venv/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", "LANG=C.UTF-8", "GPG_KEY=A035C8C19219BA821ECEA86B64E628F8D684696D", - "PYTHON_VERSION=3.10.10", + "PYTHON_VERSION=3.10.14", "PYTHON_PIP_VERSION=22.3.1", "PYTHON_SETUPTOOLS_VERSION=65.5.1", "PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/d5cb0afaf23b8520f1bbcfed521017b4a95f5c01/public/get-pip.py", diff --git a/requirements/how-to-upgrade-python.md b/requirements/how-to-upgrade-python.md index 27278c27397..90aaa6e1ef7 100644 --- a/requirements/how-to-upgrade-python.md +++ b/requirements/how-to-upgrade-python.md @@ -15,7 +15,7 @@ Both python and pip version are specified: - in the services/scripts ``Dockerfile``: ```Dockerfile ARG PYTHON_VERSION="3.9.12" - FROM python:${PYTHON_VERSION}-slim-buster as base + FROM python:${PYTHON_VERSION}-slim-bookworm as base ``` - in the CI ``.github/workflows/ci-testing-deploy.yml`` ```yaml diff --git a/requirements/tools/Dockerfile b/requirements/tools/Dockerfile index a35a9d33e55..64669258193 100644 --- a/requirements/tools/Dockerfile +++ b/requirements/tools/Dockerfile @@ -8,8 +8,8 @@ # - Can be installed with pyenv (SEE pyenv install --list ) # # -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base ENV VIRTUAL_ENV=/home/scu/.venv diff --git a/requirements/tools/Makefile b/requirements/tools/Makefile index e1dd0c246ef..8a39071579d 100644 --- a/requirements/tools/Makefile +++ b/requirements/tools/Makefile @@ -14,7 +14,7 @@ # .DEFAULT_GOAL := help -PYTHON_VERSION=3.10.10 +PYTHON_VERSION=3.10.14 # locations REPODIR := $(shell git rev-parse --show-toplevel) diff --git a/scripts/apt-packages-versions/Dockerfile b/scripts/apt-packages-versions/Dockerfile index 6a064320f79..c7c1724982e 100644 --- a/scripts/apt-packages-versions/Dockerfile +++ b/scripts/apt-packages-versions/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base RUN \ apt-get update && \ diff --git a/scripts/erd/Dockerfile b/scripts/erd/Dockerfile index 85fb18785d7..887ab055368 100644 --- a/scripts/erd/Dockerfile +++ b/scripts/erd/Dockerfile @@ -7,8 +7,8 @@ # - https://erdantic.drivendata.org/stable/ # -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base RUN apt-get update \ && apt-get -y install --no-install-recommends\ diff --git a/scripts/maintenance/migrate_project/Dockerfile b/scripts/maintenance/migrate_project/Dockerfile index bdef0d4c326..72afc85d823 100644 --- a/scripts/maintenance/migrate_project/Dockerfile +++ b/scripts/maintenance/migrate_project/Dockerfile @@ -1,5 +1,5 @@ # syntax=docker/dockerfile:1 -FROM python:3.10.10-buster +FROM python:3.10.14-buster RUN curl https://rclone.org/install.sh | bash && \ rclone --version diff --git a/scripts/mypy/Dockerfile b/scripts/mypy/Dockerfile index 72fff2f9eec..06a82234250 100644 --- a/scripts/mypy/Dockerfile +++ b/scripts/mypy/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base COPY requirements.txt /requirements.txt diff --git a/scripts/openapi-pydantic-models-generator.bash b/scripts/openapi-pydantic-models-generator.bash index 3553bb9c561..9264a06b48c 100755 --- a/scripts/openapi-pydantic-models-generator.bash +++ b/scripts/openapi-pydantic-models-generator.bash @@ -6,13 +6,11 @@ set -o nounset set -o pipefail IFS=$'\n\t' -PYTHON_VERSION=3.10.10 +PYTHON_VERSION=3.10.14 IMAGE_NAME="local/datamodel-code-generator:${PYTHON_VERSION}" WORKDIR="$(pwd)" - -Build() -{ +Build() { docker buildx build \ --build-arg PYTHON_VERSION="${PYTHON_VERSION}" \ --build-arg HOME_DIR="/home/$USER" \ @@ -35,9 +33,7 @@ ENTRYPOINT ["datamodel-codegen", \ EOF } - -Run() -{ +Run() { docker run \ -it \ --workdir="/home/$USER/workdir" \ @@ -50,8 +46,7 @@ Run() } -Help() -{ +Help() { echo "Please check https://koxudaxi.github.io/datamodel-code-generator/ for help on usage" } diff --git a/scripts/pydeps-docker/Dockerfile b/scripts/pydeps-docker/Dockerfile index 8de828680e1..80c5777957a 100644 --- a/scripts/pydeps-docker/Dockerfile +++ b/scripts/pydeps-docker/Dockerfile @@ -8,8 +8,8 @@ # - Can be installed with pyenv (SEE pyenv install --list ) # # -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base RUN apt-get update \ diff --git a/scripts/pydeps.bash b/scripts/pydeps.bash index 18fcce0a469..affd21597d7 100755 --- a/scripts/pydeps.bash +++ b/scripts/pydeps.bash @@ -7,14 +7,12 @@ set -o nounset set -o pipefail IFS=$'\n\t' -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -PYTHON_VERSION=3.10.10 +SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) +PYTHON_VERSION=3.10.14 IMAGE_NAME="local/pydeps-devkit:${PYTHON_VERSION}" WORKDIR="$(pwd)" - -Build() -{ +Build() { docker buildx build \ --load \ --build-arg PYTHON_VERSION="${PYTHON_VERSION}" \ @@ -23,9 +21,7 @@ Build() "$SCRIPT_DIR/pydeps-docker" } - -Run() -{ +Run() { docker run \ -it \ --workdir="/home/$USER/workdir" \ diff --git a/scripts/pyupgrade.bash b/scripts/pyupgrade.bash index cb4420d6d3d..9a775686db3 100755 --- a/scripts/pyupgrade.bash +++ b/scripts/pyupgrade.bash @@ -8,7 +8,7 @@ IFS=$'\n\t' # # # NOTE: check --py* flag in CLI when PYTHON_VERSION is modified -PYTHON_VERSION=3.10.10 +PYTHON_VERSION=3.10.14 IMAGE_NAME="local/pyupgrade-devkit:${PYTHON_VERSION}" WORKDIR="$(pwd)" @@ -18,7 +18,7 @@ Build() { --build-arg HOME_DIR="/home/$USER" \ --tag "$IMAGE_NAME" \ - < /docker-pull.compose.yml" " && " 'echo "#!/bin/sh\necho Pulling started at \\$(date)\ndocker compose --project-name=autoscaleprepull --file=/docker-pull.compose.yml pull --ignore-pull-failures" > /docker-pull-script.sh' diff --git a/services/catalog/Dockerfile b/services/catalog/Dockerfile index e7af8ae1712..ed5270c29b3 100644 --- a/services/catalog/Dockerfile +++ b/services/catalog/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # # USAGE: diff --git a/services/clusters-keeper/Dockerfile b/services/clusters-keeper/Dockerfile index f230abb4300..8a9fbc27f53 100644 --- a/services/clusters-keeper/Dockerfile +++ b/services/clusters-keeper/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: @@ -13,7 +13,7 @@ FROM python:${PYTHON_VERSION}-slim-buster as base LABEL maintainer=sanderegg # NOTE: to list the latest version run `make` inside `scripts/apt-packages-versions` -ENV DOCKER_APT_VERSION="5:24.0.5-1~debian.10~buster" +ENV DOCKER_APT_VERSION="5:26.1.4-1~debian.12~bookworm" # for docker apt caching to work this needs to be added: [https://vsupalov.com/buildkit-cache-mount-dockerfile/] RUN rm -f /etc/apt/apt.conf.d/docker-clean && \ diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml b/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml index a5b24f37cef..fb9dff64131 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml @@ -1,4 +1,3 @@ -version: "3.8" services: dask-scheduler: image: ${DOCKER_REGISTRY:-itisfoundation}/dask-sidecar:${DOCKER_IMAGE_TAG} diff --git a/services/dask-sidecar/Dockerfile b/services/dask-sidecar/Dockerfile index fbfebce397d..3400eaa414e 100644 --- a/services/dask-sidecar/Dockerfile +++ b/services/dask-sidecar/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM --platform=${TARGETPLATFORM} python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM --platform=${TARGETPLATFORM} python:${PYTHON_VERSION}-slim-bookworm as base ARG TARGETPLATFORM ARG BUILDPLATFORM RUN echo "I am running on $BUILDPLATFORM, building for $TARGETPLATFORM" > /log diff --git a/services/datcore-adapter/Dockerfile b/services/datcore-adapter/Dockerfile index 13344b74cf1..f6c693651d0 100644 --- a/services/datcore-adapter/Dockerfile +++ b/services/datcore-adapter/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: diff --git a/services/director-v2/Dockerfile b/services/director-v2/Dockerfile index cb88ac688ed..1b74d8031ef 100644 --- a/services/director-v2/Dockerfile +++ b/services/director-v2/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: diff --git a/services/director-v2/docker-compose-extra.yml b/services/director-v2/docker-compose-extra.yml index 835409c3775..a997c7eab6a 100644 --- a/services/director-v2/docker-compose-extra.yml +++ b/services/director-v2/docker-compose-extra.yml @@ -1,4 +1,3 @@ -version: "3.8" services: postgres: image: postgres:14.5-alpine@sha256:db802f226b620fc0b8adbeca7859eb203c8d3c9ce5d84870fadee05dea8f50ce diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py index d157ca70f32..3efdee495e7 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py @@ -30,7 +30,7 @@ class DynamicServicesSchedulerSettings(BaseCustomSettings): ) DYNAMIC_SIDECAR_DOCKER_COMPOSE_VERSION: str = Field( - "3.8", description="docker-compose spec version used in the compose-specs" + "3.8", description="docker-compose spec version used in the compose-specs", deprecated=True ) DYNAMIC_SIDECAR_ENABLE_VOLUME_LIMITS: bool = Field( diff --git a/services/docker-compose-build.yml b/services/docker-compose-build.yml index e51ca8dbd7b..df66ec7a41c 100644 --- a/services/docker-compose-build.yml +++ b/services/docker-compose-build.yml @@ -9,7 +9,6 @@ # # NOTE: the dask-scheduler uses the same image as the dask-sidecar. there is no need to build it twice! # -version: "3.8" services: service-integration: image: local/service-integration:${BUILD_TARGET:?build_target_required} diff --git a/services/docker-compose-deploy.yml b/services/docker-compose-deploy.yml index f8e306b0ed2..a35002e47cb 100644 --- a/services/docker-compose-deploy.yml +++ b/services/docker-compose-deploy.yml @@ -1,5 +1,3 @@ -version: '3.8' -services: agent: image: ${DOCKER_REGISTRY:-itisfoundation}/agent:${DOCKER_IMAGE_TAG:-latest} api-server: diff --git a/services/docker-compose-ops-ci.yml b/services/docker-compose-ops-ci.yml index f2a244ba4f4..4edaf691488 100644 --- a/services/docker-compose-ops-ci.yml +++ b/services/docker-compose-ops-ci.yml @@ -17,7 +17,6 @@ # # NOTE: this stack cannot be called tools because it collides with default network created in services/static-webserver/client/tools/docker-compose.yml # IMPORTANT: This stack IS NOT used in the deployed version -version: "3.8" services: minio: diff --git a/services/docker-compose-ops.yml b/services/docker-compose-ops.yml index 4c6398e454a..ff4949f9c43 100644 --- a/services/docker-compose-ops.yml +++ b/services/docker-compose-ops.yml @@ -17,7 +17,6 @@ # # NOTE: this stack cannot be called tools because it collides with default network created in services/static-webserver/client/tools/docker-compose.yml # IMPORTANT: This stack IS NOT used in the deployed version -version: "3.8" services: adminer: diff --git a/services/docker-compose.devel-frontend.yml b/services/docker-compose.devel-frontend.yml index f662d567a65..bea76b28d7e 100644 --- a/services/docker-compose.devel-frontend.yml +++ b/services/docker-compose.devel-frontend.yml @@ -1,6 +1,5 @@ # This gets used only after services/docker-compose.local.yml and overrides the definition of # the static-webserver to be the only one running the dev image -version: "3.8" services: static-webserver: image: ${DOCKER_REGISTRY:-itisfoundation}/static-webserver:development diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml index 840497e81e3..542d9afa046 100644 --- a/services/docker-compose.devel.yml +++ b/services/docker-compose.devel.yml @@ -3,7 +3,6 @@ # NOTES: # - port 3000 used for ptsv # -version: "3.8" x-common-environment: &common-environment # Enforces *_DEBUG option in all services. ONLY allowed in devel-mode! DEBUG : "true" diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml index b5859000ddb..2ba2aa37a2d 100644 --- a/services/docker-compose.local.yml +++ b/services/docker-compose.local.yml @@ -10,7 +10,6 @@ # - debug mode can be activated if SC_BOOT_MODE=debug-ptvsd (this is the default in devel). # - use vscode debugger "Python: Remote Attach *" config in ''.vscode-template/launch.json' # -version: "3.8" x-common-environment: &common_environment SWARM_STACK_NAME : ${SWARM_STACK_NAME:-simcore_local} services: diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 63f2cbfc8da..513224158c4 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -1,4 +1,3 @@ -version: "3.8" x-dask-tls-secrets: &dask_tls_secrets - source: dask_tls_key target: ${DASK_TLS_KEY} diff --git a/services/dynamic-scheduler/Dockerfile b/services/dynamic-scheduler/Dockerfile index 3cddd1080c8..f250232d1df 100644 --- a/services/dynamic-scheduler/Dockerfile +++ b/services/dynamic-scheduler/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: diff --git a/services/dynamic-sidecar/Dockerfile b/services/dynamic-sidecar/Dockerfile index 47b12160f03..0a6e223040f 100644 --- a/services/dynamic-sidecar/Dockerfile +++ b/services/dynamic-sidecar/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: # cd sercices/dynamic-sidecar @@ -12,8 +12,8 @@ FROM python:${PYTHON_VERSION}-slim-buster as base LABEL maintainer="Andrei Neagu " # NOTE: to list the latest version run `make` inside `scripts/apt-packages-versions` -ENV DOCKER_APT_VERSION="5:24.0.5-1~debian.10~buster" -ENV DOCKER_COMPOSE_APT_VERSION="2.20.2-1~debian.10~buster" +ENV DOCKER_APT_VERSION="5:26.1.4-1~debian.12~bookworm" +ENV DOCKER_COMPOSE_APT_VERSION="2.27.1-1~debian.12~bookworm" # for docker apt caching to work this needs to be added: [https://vsupalov.com/buildkit-cache-mount-dockerfile/] RUN rm -f /etc/apt/apt.conf.d/docker-clean && \ diff --git a/services/dynamic-sidecar/tests/unit/test_core_docker_compose_utils.py b/services/dynamic-sidecar/tests/unit/test_core_docker_compose_utils.py index c44603b2057..115955a2d89 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_docker_compose_utils.py +++ b/services/dynamic-sidecar/tests/unit/test_core_docker_compose_utils.py @@ -25,7 +25,6 @@ SLEEP_TIME_S = 60 COMPOSE_SPEC_SAMPLE = { - "version": "3.8", "services": { "my-test-container": { "environment": [ diff --git a/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py b/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py index 900d1b2df6e..a7d2254425c 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py +++ b/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py @@ -133,7 +133,6 @@ async def test_get_running_containers_count_from_names( COMPOSE_SPEC_SAMPLE = { - "version": "3.8", "services": { "my-test-container": { "environment": [ diff --git a/services/dynamic-sidecar/tests/unit/test_core_utils.py b/services/dynamic-sidecar/tests/unit/test_core_utils.py index 1e650ee3c48..76913bc9347 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_utils.py +++ b/services/dynamic-sidecar/tests/unit/test_core_utils.py @@ -17,7 +17,6 @@ def cmd(tmp_path: Path, sleep: int): docker_compose = tmp_path / "docker_compose.yml" docker_compose.write_text( f"""\ -version: "3.8" services: my-container: environment: diff --git a/services/efs-guardian/Dockerfile b/services/efs-guardian/Dockerfile index d1468f443f2..4ab1ab6e8fa 100644 --- a/services/efs-guardian/Dockerfile +++ b/services/efs-guardian/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: @@ -13,7 +13,7 @@ FROM python:${PYTHON_VERSION}-slim-buster as base LABEL maintainer=sanderegg # NOTE: to list the latest version run `make` inside `scripts/apt-packages-versions` -ENV DOCKER_APT_VERSION="5:24.0.5-1~debian.10~buster" +ENV DOCKER_APT_VERSION="5:26.1.4-1~debian.12~bookworm" # for docker apt caching to work this needs to be added: [https://vsupalov.com/buildkit-cache-mount-dockerfile/] RUN rm -f /etc/apt/apt.conf.d/docker-clean && \ diff --git a/services/invitations/Dockerfile b/services/invitations/Dockerfile index f511098eba3..5e96ede97e0 100644 --- a/services/invitations/Dockerfile +++ b/services/invitations/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: diff --git a/services/migration/Dockerfile b/services/migration/Dockerfile index bf015cea668..c40a4fd1dff 100644 --- a/services/migration/Dockerfile +++ b/services/migration/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base LABEL maintainer=sanderegg diff --git a/services/osparc-gateway-server/Dockerfile b/services/osparc-gateway-server/Dockerfile index dd040f4bf4e..f5e9ff32d93 100644 --- a/services/osparc-gateway-server/Dockerfile +++ b/services/osparc-gateway-server/Dockerfile @@ -1,5 +1,5 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" +ARG PYTHON_VERSION="3.10.14" FROM python:${PYTHON_VERSION}-slim-bullseye as base ARG TARGETPLATFORM ARG BUILDPLATFORM diff --git a/services/payments/Dockerfile b/services/payments/Dockerfile index 692e6034022..e4ba13fe8f0 100644 --- a/services/payments/Dockerfile +++ b/services/payments/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: diff --git a/services/resource-usage-tracker/Dockerfile b/services/resource-usage-tracker/Dockerfile index 5af3d0add5e..459025da7f6 100644 --- a/services/resource-usage-tracker/Dockerfile +++ b/services/resource-usage-tracker/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: diff --git a/services/static-webserver/client/tools/docker-compose.yml b/services/static-webserver/client/tools/docker-compose.yml index 88cfbdeb7cd..c0533894c61 100644 --- a/services/static-webserver/client/tools/docker-compose.yml +++ b/services/static-webserver/client/tools/docker-compose.yml @@ -1,7 +1,6 @@ # # Used in development to compile source code using a running qooxdoo-kit container # -version: "3.8" services: qooxdoo-kit: image: itisfoundation/qooxdoo-kit:${QOOXDOO_KIT_TAG} diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index 72ceb513efb..b2e26143008 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: # cd sercices/storage diff --git a/services/web/Dockerfile b/services/web/Dockerfile index 55dd0dd8229..9cefb777a8c 100644 --- a/services/web/Dockerfile +++ b/services/web/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.10.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +ARG PYTHON_VERSION="3.10.14" +FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: # cd sercices/web diff --git a/services/web/server/tests/unit/isolated/test_application_settings.py b/services/web/server/tests/unit/isolated/test_application_settings.py index 1b6e713cd4c..c97ff2a59b0 100644 --- a/services/web/server/tests/unit/isolated/test_application_settings.py +++ b/services/web/server/tests/unit/isolated/test_application_settings.py @@ -78,7 +78,7 @@ def mock_env_dockerfile_build(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: PYTHON_GET_PIP_SHA256=6123659241292b2147b58922b9ffe11dda66b39d52d8a6f3aa310bc1d60ea6f7 PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/a1675ab6c2bd898ed82b1f58c486097f763c74a9/public/get-pip.py PYTHON_PIP_VERSION=21.1.3 - PYTHON_VERSION=3.10.10 + PYTHON_VERSION=3.10.14 PYTHONDONTWRITEBYTECODE=1 PYTHONOPTIMIZE=TRUE SC_BOOT_MODE=production diff --git a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml index 4437016287f..fb82b3be2da 100644 --- a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml +++ b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml @@ -1,4 +1,3 @@ -version: "3.8" services: postgres: image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" diff --git a/services/web/server/tests/unit/with_dbs/docker-compose.yml b/services/web/server/tests/unit/with_dbs/docker-compose.yml index 0ed34d0fa51..dc57db07383 100644 --- a/services/web/server/tests/unit/with_dbs/docker-compose.yml +++ b/services/web/server/tests/unit/with_dbs/docker-compose.yml @@ -1,4 +1,3 @@ -version: "3.8" services: postgres: image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" diff --git a/tests/e2e/docker-compose.yml b/tests/e2e/docker-compose.yml index 549c43a0ac0..245eb07482b 100644 --- a/tests/e2e/docker-compose.yml +++ b/tests/e2e/docker-compose.yml @@ -1,4 +1,3 @@ -version: "3.8" services: registry: image: registry:2 diff --git a/tests/environment-setup/test_used_docker_compose.py b/tests/environment-setup/test_used_docker_compose.py index 08066b771d0..946da61d569 100644 --- a/tests/environment-setup/test_used_docker_compose.py +++ b/tests/environment-setup/test_used_docker_compose.py @@ -113,9 +113,7 @@ def test_validate_compose_file( ): assert compose_path.exists() compose = yaml.safe_load(compose_path.read_text()) - print( - str(compose_path.relative_to(repo_dir)), "-> version=", compose.get("version") - ) + print(str(compose_path.relative_to(repo_dir))) # NOTE: with docker stack config, the .env file MUST be alongside the docker-compose file @@ -134,4 +132,4 @@ def test_validate_compose_file( ) # About versioning https://docs.docker.com/compose/compose-file/compose-file-v3/ - assert compose["version"] == "3.8" + assert "version" not in compose diff --git a/tests/performance/docker-compose.yml b/tests/performance/docker-compose.yml index 2aeed2782fc..2623454210e 100644 --- a/tests/performance/docker-compose.yml +++ b/tests/performance/docker-compose.yml @@ -1,5 +1,3 @@ -version: '3.8' - services: master: image: itisfoundation/locust:${LOCUST_VERSION} From bfab27afaacd1cea38d68d05ea4785833f24228e Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 17 Jun 2024 11:09:32 +0200 Subject: [PATCH 035/219] =?UTF-8?q?=F0=9F=90=9B=20[Frontend=20bugfix]=20Li?= =?UTF-8?q?st=20everyone=20group=20and=20provide=20product-everyone=20grou?= =?UTF-8?q?p=20in=20potential=20collaborators=20(#5945)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/dashboard/CardBase.js | 2 +- .../class/osparc/dashboard/Dashboard.js | 2 +- .../class/osparc/dashboard/StudyBrowser.js | 37 ++++++------------ .../class/osparc/share/Collaborators.js | 21 +++++----- .../osparc/share/CollaboratorsService.js | 20 +++++++--- .../class/osparc/share/CollaboratorsStudy.js | 20 +++++++--- .../osparc/share/NewCollaboratorsManager.js | 17 +++++--- .../class/osparc/share/PublishTemplate.js | 39 +++++-------------- .../client/source/class/osparc/store/Store.js | 38 +++++++++--------- .../class/osparc/study/SaveAsTemplate.js | 10 ++--- 10 files changed, 98 insertions(+), 108 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js index e6a6971f4c3..37d2d79be66 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js @@ -758,7 +758,7 @@ qx.Class.define("osparc.dashboard.CardBase", { Promise.all([ store.getGroupEveryone(), store.getProductEveryone(), - store.getVisibleMembers(), + store.getReachableMembers(), store.getGroupsOrganizations() ]) .then(values => { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js index 8519daadc14..e7887477da5 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js @@ -166,7 +166,7 @@ qx.Class.define("osparc.dashboard.Dashboard", { const preResourcePromises = []; const store = osparc.store.Store.getInstance(); - preResourcePromises.push(store.getVisibleMembers()); + preResourcePromises.push(store.getAllGroupsAndMembers()); preResourcePromises.push(store.getAllServices(true)); if (permissions.canDo("study.tag")) { preResourcePromises.push(osparc.data.Resources.get("tags")); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index 76527248632..6197de76c7c 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -72,15 +72,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // overridden initResources: function() { this._resourcesList = []; - const preResourcePromises = []; - const store = osparc.store.Store.getInstance(); - preResourcePromises.push(store.getVisibleMembers()); - preResourcePromises.push(store.getAllServices()); - if (osparc.data.Permissions.getInstance().canDo("study.tag")) { - preResourcePromises.push(osparc.data.Resources.get("tags")); - } - preResourcePromises.push(this.__getActiveStudy()); - Promise.all(preResourcePromises) + this.__getActiveStudy() .then(() => { this.getChildControl("resources-layout"); this.__attachEventHandlers(); @@ -100,23 +92,18 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __getActiveStudy: function() { - return new Promise(resolve => { - const params = { - url: { - tabId: osparc.utils.Utils.getClientSessionID() + const params = { + url: { + tabId: osparc.utils.Utils.getClientSessionID() + } + }; + return osparc.data.Resources.fetch("studies", "getActive", params) + .then(studyData => { + if (studyData) { + osparc.store.Store.getInstance().setCurrentStudyId(studyData["uuid"]); } - }; - osparc.data.Resources.fetch("studies", "getActive", params) - .then(studyData => { - if (studyData) { - osparc.store.Store.getInstance().setCurrentStudyId(studyData["uuid"]); - resolve(studyData["uuid"]); - } else { - resolve(null); - } - }) - .catch(err => console.error(err)); - }); + }) + .catch(err => console.error(err)); }, reloadResources: function() { diff --git a/services/static-webserver/client/source/class/osparc/share/Collaborators.js b/services/static-webserver/client/source/class/osparc/share/Collaborators.js index c0ed320a402..415fcec3888 100644 --- a/services/static-webserver/client/source/class/osparc/share/Collaborators.js +++ b/services/static-webserver/client/source/class/osparc/share/Collaborators.js @@ -146,16 +146,6 @@ qx.Class.define("osparc.share.Collaborators", { } return vBox; - }, - - getEveryoneObj: function() { - return { - "gid": 1, - "label": qx.locale.Manager.tr("Public"), - "description": "", - "thumbnail": null, - "collabType": 0 - } } }, @@ -365,6 +355,11 @@ qx.Class.define("osparc.share.Collaborators", { _reloadCollaboratorsList: function() { this.__collaboratorsModel.removeAll(); + const store = osparc.store.Store.getInstance(); + const everyoneGIds = [ + store.getEveryoneProductGroup()["gid"], + store.getEveryoneGroup()["gid"] + ]; const accessRights = this._serializedDataCopy["accessRights"]; const collaboratorsList = []; Object.keys(accessRights).forEach(gid => { @@ -373,6 +368,7 @@ qx.Class.define("osparc.share.Collaborators", { // Do not override collaborator object const collaborator = osparc.utils.Utils.deepCloneObject(collab); if ("first_name" in collaborator) { + // user collaborator["thumbnail"] = osparc.utils.Avatar.getUrl(collaborator["login"], 32); collaborator["name"] = osparc.utils.Utils.firstsUp( `${"first_name" in collaborator && collaborator["first_name"] != null ? @@ -380,6 +376,11 @@ qx.Class.define("osparc.share.Collaborators", { `${"last_name" in collaborator && collaborator["last_name"] ? collaborator["last_name"] : ""}` ); + } else if (everyoneGIds.includes(parseInt(gid))) { + // everyone product or everyone + if (collaborator["thumbnail"] === null) { + collaborator["thumbnail"] = "@FontAwesome5Solid/globe/32"; + } } collaborator["accessRights"] = accessRights[gid]; collaborator["showOptions"] = (this._resourceType === "service") ? this._canIWrite() : this._canIDelete(); diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js index 4532413de79..ae07e6b4ce7 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js @@ -34,13 +34,13 @@ qx.Class.define("osparc.share.CollaboratorsService", { this._resourceType = "service"; const serviceDataCopy = osparc.utils.Utils.deepCloneObject(serviceData); - if (serviceData.resourceType === "service") { - osparc.data.Roles.createServicesRolesResourceInfo(); - } + osparc.data.Roles.createServicesRolesResourceInfo(); - const initCollabs = this.self().getEveryoneObj(); + const initCollabs = []; + initCollabs.push(this.self().getEveryoneProductObj()); + initCollabs.push(this.self().getEveryoneObj()); - this.base(arguments, serviceDataCopy, [initCollabs]); + this.base(arguments, serviceDataCopy, initCollabs); }, events: { @@ -71,8 +71,16 @@ qx.Class.define("osparc.share.CollaboratorsService", { }; }, + getEveryoneProductObj: function() { + const everyoneProductGroup = osparc.store.Store.getInstance().getEveryoneProductGroup(); + const everyone = osparc.utils.Utils.deepCloneObject(everyoneProductGroup); + everyone["accessRights"] = this.getCollaboratorAccessRight(); + return everyone; + }, + getEveryoneObj: function() { - const everyone = osparc.share.Collaborators.getEveryoneObj(); + const everyoneGroup = osparc.store.Store.getInstance().getEveryoneGroup(); + const everyone = osparc.utils.Utils.deepCloneObject(everyoneGroup); everyone["accessRights"] = this.getCollaboratorAccessRight(); return everyone; } diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js index d1c2a02a008..e136c82dc19 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js @@ -35,13 +35,13 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { this._resourceType = studyData["resourceType"]; // study or template const studyDataCopy = osparc.data.model.Study.deepCloneStudyObject(studyData); + osparc.data.Roles.createRolesStudyResourceInfo(); + const initCollabs = []; if (osparc.data.Permissions.getInstance().canDo("study.everyone.share")) { + initCollabs.push(this.self().getEveryoneProductObj(this._resourceType === "study")); initCollabs.push(this.self().getEveryoneObj(this._resourceType === "study")); } - if (studyData.resourceType === "study" || studyData.resourceType === "template") { - osparc.data.Roles.createRolesStudyResourceInfo(); - } this.base(arguments, studyDataCopy, initCollabs); }, @@ -112,9 +112,17 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { return true; }, - getEveryoneObj: function(isResourceStudy) { - const everyone = osparc.share.Collaborators.getEveryoneObj(); - everyone["accessRights"] = isResourceStudy ? this.getCollaboratorAccessRight() : this.getViewerAccessRight(); + getEveryoneProductObj: function(isStudy) { + const everyoneProductGroup = osparc.store.Store.getInstance().getEveryoneProductGroup(); + const everyone = osparc.utils.Utils.deepCloneObject(everyoneProductGroup); + everyone["accessRights"] = isStudy ? this.getCollaboratorAccessRight() : this.getViewerAccessRight(); + return everyone; + }, + + getEveryoneObj: function(isStudy) { + const everyoneGroup = osparc.store.Store.getInstance().getEveryoneGroup(); + const everyone = osparc.utils.Utils.deepCloneObject(everyoneGroup); + everyone["accessRights"] = isStudy ? this.getCollaboratorAccessRight() : this.getViewerAccessRight(); return everyone; } }, diff --git a/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js b/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js index fe42e46c1ed..4c11627b748 100644 --- a/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js +++ b/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js @@ -79,15 +79,20 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { }, __reloadCollaborators: function() { - let includeEveryone = false; + let includeProductEveryone = false; if (this.__showOrganizations === false) { - includeEveryone = false; + includeProductEveryone = false; + } else if (this.__resourceData && this.__resourceData["resourceType"] === "template") { + // studies can't be shared with ProductEveryone + includeProductEveryone = false; + } else if (this.__resourceData && this.__resourceData["resourceType"] === "template") { + // only users with permissions can share templates with ProductEveryone + includeProductEveryone = osparc.data.Permissions.getInstance().canDo("study.everyone.share"); } else if (this.__resourceData && this.__resourceData["resourceType"] === "service") { - includeEveryone = true; - } else { - includeEveryone = osparc.data.Permissions.getInstance().canDo("study.everyone.share"); + // all users can share services with ProductEveryone + includeProductEveryone = true; } - osparc.store.Store.getInstance().getPotentialCollaborators(false, includeEveryone) + osparc.store.Store.getInstance().getPotentialCollaborators(false, includeProductEveryone) .then(potentialCollaborators => { this.__visibleCollaborators = potentialCollaborators; this.__addEditors(); diff --git a/services/static-webserver/client/source/class/osparc/share/PublishTemplate.js b/services/static-webserver/client/source/class/osparc/share/PublishTemplate.js index 00fdf240bc7..723407303f9 100644 --- a/services/static-webserver/client/source/class/osparc/share/PublishTemplate.js +++ b/services/static-webserver/client/source/class/osparc/share/PublishTemplate.js @@ -36,13 +36,11 @@ qx.Class.define("osparc.share.PublishTemplate", { const store = osparc.store.Store.getInstance(); Promise.all([ store.getGroupsMe(), - store.getProductEveryone(), - store.getGroupEveryone() + store.getProductEveryone() ]) .then(values => { const groupMe = values[0]; const groupProductEveryone = values[1]; - const groupEveryone = values[2]; this.__rbManager.getChildren().forEach(rb => { if (rb.contextId === this.self().SharingOpts["me"].contextId) { rb.gid = groupMe["gid"]; @@ -54,12 +52,6 @@ qx.Class.define("osparc.share.PublishTemplate", { rb.show(); } } - if (rb.contextId === this.self().SharingOpts["all"].contextId) { - if (osparc.data.Permissions.getInstance().canDo("studies.template.create.all")) { - rb.gid = groupEveryone["gid"]; - rb.show(); - } - } }); }); }, @@ -85,11 +77,7 @@ qx.Class.define("osparc.share.PublishTemplate", { }, "productAll": { contextId: 2, - label: "Public for Product users" - }, - "all": { - contextId: 3, - label: "Public" + label: "Available to all users" } } }, @@ -124,7 +112,6 @@ qx.Class.define("osparc.share.PublishTemplate", { break; } case "productAll": - case "all": rb.exclude(); this._add(rb); break; @@ -151,24 +138,18 @@ qx.Class.define("osparc.share.PublishTemplate", { return false; }, - __getSelectedOrgIDs: function() { - if (this.__isGroupSelected("orgs")) { - return this.__myOrgs.getSelectedOrgIDs(); - } - return []; - }, - getSelectedGroups: function() { let groupIDs = []; - const selection = this.__rbManager.getSelection(); - if (selection.length) { - switch (selection[0].contextId) { + const selections = this.__rbManager.getSelection(); + if (selections.length) { + const selection = selections[0]; + switch (selection.contextId) { case this.self().SharingOpts["me"].contextId: - case this.self().SharingOpts["orgs"].contextId: - groupIDs = this.__getSelectedOrgIDs(); + case this.self().SharingOpts["productAll"].contextId: + groupIDs = [selection.gid]; break; - case this.self().SharingOpts["all"].contextId: - groupIDs = [selection[0].gid]; + case this.self().SharingOpts["orgs"].contextId: + groupIDs = this.__myOrgs.getSelectedOrgIDs(); break; } } diff --git a/services/static-webserver/client/source/class/osparc/store/Store.js b/services/static-webserver/client/source/class/osparc/store/Store.js index 973a741d776..69ab07d4dbb 100644 --- a/services/static-webserver/client/source/class/osparc/store/Store.js +++ b/services/static-webserver/client/source/class/osparc/store/Store.js @@ -176,6 +176,14 @@ qx.Class.define("osparc.store.Store", { check: "Object", init: {} }, + everyoneProductGroup: { + check: "Object", + init: {} + }, + everyoneGroup: { + check: "Object", + init: {} + }, clusters: { check: "Array", init: [], @@ -541,7 +549,7 @@ qx.Class.define("osparc.store.Store", { return new Promise(resolve => { const promises = []; promises.push(this.getGroupsMe()); - promises.push(this.getVisibleMembers()); + promises.push(this.getReachableMembers()); promises.push(this.getGroupsOrganizations()); promises.push(this.getProductEveryone()); promises.push(this.getGroupEveryone()); @@ -588,15 +596,12 @@ qx.Class.define("osparc.store.Store", { }); }, - getVisibleMembers: function(reload = false) { + getAllGroupsAndMembers: function() { return new Promise(resolve => { - const reachableMembers = this.getReachableMembers(); - if (!reload && Object.keys(reachableMembers).length) { - resolve(reachableMembers); - return; - } osparc.data.Resources.get("organizations") .then(resp => { + this.setEveryoneGroup(resp["all"]); + this.setEveryoneProductGroup(resp["product"]); const orgMembersPromises = []; const orgs = resp["organizations"]; orgs.forEach(org => { @@ -609,6 +614,7 @@ qx.Class.define("osparc.store.Store", { }); Promise.all(orgMembersPromises) .then(orgMemberss => { + const reachableMembers = this.getReachableMembers(); orgMemberss.forEach(orgMembers => { orgMembers.forEach(orgMember => { orgMember["label"] = osparc.utils.Utils.firstsUp( @@ -618,19 +624,18 @@ qx.Class.define("osparc.store.Store", { reachableMembers[orgMember["gid"]] = orgMember; }); }); - resolve(reachableMembers); + resolve(); }); }); }); }, - getPotentialCollaborators: function(includeMe = false, includeGlobalEveryone = false) { + getPotentialCollaborators: function(includeMe = false, includeProductEveryone = false) { return new Promise((resolve, reject) => { const promises = []; promises.push(this.getGroupsOrganizations()); - promises.push(this.getVisibleMembers()); - promises.push(this.getProductEveryone()); - promises.push(this.getGroupEveryone()); + promises.push(this.getReachableMembers()); + promises.push(this.getEveryoneProductGroup()); Promise.all(promises) .then(values => { const orgs = values[0]; // array @@ -657,15 +662,10 @@ qx.Class.define("osparc.store.Store", { }; } const productEveryone = values[2]; // entry - if (productEveryone && productEveryone["accessRights"]["read"]) { + if (includeProductEveryone && productEveryone) { productEveryone["collabType"] = 0; potentialCollaborators[productEveryone["gid"]] = productEveryone; } - const groupEveryone = values[3]; - if (includeGlobalEveryone && groupEveryone) { - groupEveryone["collabType"] = 0; - potentialCollaborators[groupEveryone["gid"]] = groupEveryone; - } resolve(potentialCollaborators); }) .catch(err => { @@ -696,7 +696,7 @@ qx.Class.define("osparc.store.Store", { getUser: function(uid) { return new Promise(resolve => { if (uid) { - this.getVisibleMembers() + this.getReachableMembers() .then(visibleMembers => { resolve(Object.values(visibleMembers).find(member => member.id === uid)); }) diff --git a/services/static-webserver/client/source/class/osparc/study/SaveAsTemplate.js b/services/static-webserver/client/source/class/osparc/study/SaveAsTemplate.js index f215f552edc..6ccec87e25a 100644 --- a/services/static-webserver/client/source/class/osparc/study/SaveAsTemplate.js +++ b/services/static-webserver/client/source/class/osparc/study/SaveAsTemplate.js @@ -68,14 +68,14 @@ qx.Class.define("osparc.study.SaveAsTemplate", { }, __publishTemplate: function() { + this.__studyDataClone["accessRights"] = {}; const selectedGroupIDs = this.__shareWith.getSelectedGroups(); selectedGroupIDs.forEach(gid => { - this.__studyDataClone["accessRights"][gid] = { - "read": true, - "write": false, - "delete": false - }; + this.__studyDataClone["accessRights"][gid] = osparc.share.CollaboratorsStudy.getViewerAccessRight(); }); + // Make publisher owner + const myGroupId = osparc.auth.Data.getInstance().getGroupId(); + this.__studyDataClone["accessRights"][myGroupId] = osparc.share.CollaboratorsStudy.getOwnerAccessRight(); this.fireDataEvent("publishTemplate", { "studyData": this.__studyDataClone, From f17ed4b9d976e417597835c9f578ba55be4b7b67 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 17 Jun 2024 17:46:41 +0200 Subject: [PATCH 036/219] =?UTF-8?q?=F0=9F=90=9B=3F=20[Frontend=20bugfix=3F?= =?UTF-8?q?]=20Directly=20switch=20to=20service's=20iframe=20when=20its=20?= =?UTF-8?q?status=20is=20``running``=20(#5948)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../client/source/class/osparc/data/model/Node.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index 488c1b54f6f..940d1a27adb 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -1331,9 +1331,16 @@ qx.Class.define("osparc.data.model.Node", { // ping for some time until it is really ready fetch(srvUrl) .then(request => { + /* if (request.status >= 200 || request.status < 300) { this.__waitForServiceWebsite(srvUrl) } + */ + // instead of + // - requesting its frontend to make sure it is ready and ... + // - waiting for the "load" event triggered by the content of the iframe + // we will skip those steps and directly switch its iframe + this.__serviceReadyIn(srvUrl); }) .catch(err => { this.getStatus().setInteractive("connecting"); From f909219202e7a0f73f31e825d07278fe47d6bc68 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jun 2024 18:38:37 +0200 Subject: [PATCH 037/219] Bump codecov/codecov-action from 4.4.1 to 4.5.0 (#5954) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-testing-deploy.yml | 72 ++++++++++++------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 87c2ee0c7b2..4c8d9fe8d3c 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -336,7 +336,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/webserver.bash test_with_db 01 - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -375,7 +375,7 @@ jobs: run: ./ci/github/unit-testing/webserver.bash install - name: test run: ./ci/github/unit-testing/webserver.bash test_with_db 02 - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -414,7 +414,7 @@ jobs: run: ./ci/github/unit-testing/webserver.bash install - name: test run: ./ci/github/unit-testing/webserver.bash test_with_db 03 - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -456,7 +456,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/storage.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -500,7 +500,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/agent.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -581,7 +581,7 @@ jobs: - name: OAS backwards compatibility check if: always() run: ./ci/github/unit-testing/api-server.bash openapi-diff - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -623,7 +623,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/autoscaling.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -671,7 +671,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/catalog/test_failures - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -723,7 +723,7 @@ jobs: source .venv/bin/activate && \ pushd services/clusters-keeper && \ make test-ci-unit - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -771,7 +771,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/datcore-adapter/test_failures - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -806,7 +806,7 @@ jobs: run: ./ci/github/unit-testing/director.bash install - name: test run: ./ci/github/unit-testing/director.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -854,7 +854,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/director-v2/test_failures - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -896,7 +896,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/aws-library.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -938,7 +938,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/dask-task-models-library.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -980,7 +980,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/dask-sidecar.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1032,7 +1032,7 @@ jobs: source .venv/bin/activate && \ pushd services/osparc-gateway-server && \ make test-ci-unit - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1074,7 +1074,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/payments.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1116,7 +1116,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/dynamic-scheduler.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1168,7 +1168,7 @@ jobs: source .venv/bin/activate && \ pushd services/resource-usage-tracker && \ make test-ci-unit - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1210,7 +1210,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/dynamic-sidecar.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1262,7 +1262,7 @@ jobs: source .venv/bin/activate && \ pushd services/efs-guardian && \ make test-ci-unit - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1376,7 +1376,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/postgres-database.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1418,7 +1418,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/invitations.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1460,7 +1460,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/service-integration.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1502,7 +1502,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/service-library.bash test_all - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1544,7 +1544,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/settings-library.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1585,7 +1585,7 @@ jobs: run: ./ci/github/unit-testing/models-library.bash typecheck - name: test run: ./ci/github/unit-testing/models-library.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1631,7 +1631,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/notifications-library.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1675,7 +1675,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/simcore-sdk.bash test - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: unittests #optional @@ -1784,7 +1784,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/webserver.bash clean_up - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: integrationtests #optional @@ -1846,7 +1846,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/webserver.bash clean_up - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: integrationtests #optional @@ -1908,7 +1908,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/director-v2.bash clean_up - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: integrationtests #optional @@ -1974,7 +1974,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/director-v2.bash clean_up - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: integrationtests #optional @@ -2038,7 +2038,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/dynamic-sidecar.bash clean_up - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: integrationtests #optional @@ -2115,7 +2115,7 @@ jobs: run: | pushd services/osparc-gateway-server && \ make down - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: integrationtests #optional @@ -2177,7 +2177,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/simcore-sdk.bash clean_up - - uses: codecov/codecov-action@v4.4.1 + - uses: codecov/codecov-action@v4.5.0 with: flags: integrationtests From bb3fe6b9063ee1992131586d41d07cbec6ec7c07 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 18 Jun 2024 12:06:31 +0200 Subject: [PATCH 038/219] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Request=20A?= =?UTF-8?q?ccount=20form=20for=20Billable=20products=20(#5951)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/CookiePolicy.js | 28 +++++++++++++++---- .../source/class/osparc/auth/LoginPage.js | 2 +- .../source/class/osparc/auth/ui/LoginView.js | 2 +- .../class/osparc/auth/ui/RequestAccount.js | 14 +++++++--- 4 files changed, 34 insertions(+), 12 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/CookiePolicy.js b/services/static-webserver/client/source/class/osparc/CookiePolicy.js index c719ec5f817..3d3b6d7d21c 100644 --- a/services/static-webserver/client/source/class/osparc/CookiePolicy.js +++ b/services/static-webserver/client/source/class/osparc/CookiePolicy.js @@ -52,6 +52,24 @@ qx.Class.define("osparc.CookiePolicy", { declineCookies: function() { osparc.utils.Utils.cookie.deleteCookie(this.COOKIES_ACCEPTED_NAME); + }, + + getITISPrivacyPolicyLink: function(linkText = "Privacy Policy") { + const color = qx.theme.manager.Color.getInstance().resolve("text"); + const link = `${linkText}`; + return link; + }, + + getS4LPrivacyPolicyLink: function(linkText = "Privacy Policy") { + const color = qx.theme.manager.Color.getInstance().resolve("text"); + const link = `${linkText}`; + return link; + }, + + getZMTEULALink: function(linkText = "end users license agreement (EULA)") { + const color = qx.theme.manager.Color.getInstance().resolve("text"); + const link = `${linkText}`; + return link; } }, @@ -60,9 +78,8 @@ qx.Class.define("osparc.CookiePolicy", { let control; switch (id) { case "cookie-text": { - const color = qx.theme.manager.Color.getInstance().resolve("text"); - const textLink = `Privacy Policy.`; - const text = this.tr("This website applies cookies to personalize your experience and to make our site easier to navigate. By visiting the site, you agree to the ") + textLink; + const link = osparc.CookiePolicy.getITISPrivacyPolicyLink("Privacy Policy"); + const text = this.tr("This website applies cookies to personalize your experience and to make our site easier to navigate. By visiting the site, you agree to the ") + link + "."; control = new qx.ui.basic.Label(text).set({ rich : true }); @@ -73,9 +90,8 @@ qx.Class.define("osparc.CookiePolicy", { break; } case "cookie-text-s4l": { - const color = qx.theme.manager.Color.getInstance().resolve("text"); - const textLink = `Privacy Policy.`; - const text = this.tr("This website applies cookies to personalize your experience and to make our site easier to navigate. By visiting the site, you agree to the ") + textLink; + const link = osparc.CookiePolicy.getS4LPrivacyPolicyLink("Privacy Policy"); + const text = this.tr("This website applies cookies to personalize your experience and to make our site easier to navigate. By visiting the site, you agree to the ") + link + "."; control = new qx.ui.basic.Label(text).set({ rich : true }); diff --git a/services/static-webserver/client/source/class/osparc/auth/LoginPage.js b/services/static-webserver/client/source/class/osparc/auth/LoginPage.js index 5b4f96e3086..6aaf505be0a 100644 --- a/services/static-webserver/client/source/class/osparc/auth/LoginPage.js +++ b/services/static-webserver/client/source/class/osparc/auth/LoginPage.js @@ -212,7 +212,7 @@ qx.Class.define("osparc.auth.LoginPage", { const registration = this.getChildControl("registration-view"); const config = osparc.store.Store.getInstance().get("config"); let requestAccount = null; - if (config["invitation_required"] && osparc.product.Utils.isS4LProduct()) { + if (config["invitation_required"] && osparc.desktop.credits.Utils.areWalletsEnabled()) { requestAccount = this.getChildControl("request-account"); } const verifyPhoneNumber = this.getChildControl("verify-phone-number-view"); diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js index a55853dcfae..005557f9653 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js @@ -96,7 +96,7 @@ qx.Class.define("osparc.auth.ui.LoginView", { createAccountBtn.addListener("execute", () => { createAccountBtn.setEnabled(false); if (config["invitation_required"]) { - if (osparc.product.Utils.isS4LProduct()) { + if (osparc.desktop.credits.Utils.areWalletsEnabled()) { this.fireEvent("toRequestAccount"); } else { osparc.store.Support.openInvitationRequiredDialog(); diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js index 5323b7f2e93..d1f68963fcf 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js @@ -193,9 +193,13 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { doubleSpaced.push(hear); this._form.add(hear, this.tr("How did you hear about us?"), null, "hear"); - // eula links - const color = qx.theme.manager.Color.getInstance().resolve("text"); - const ppText = `I acknowledge that data will be processed in accordance with our privacy policy`; + // accept links + // Privacy Policy link + let ppLink = osparc.CookiePolicy.getS4LPrivacyPolicyLink("our privacy policy"); + if (osparc.product.Utils.isProduct("tis")) { + ppLink = osparc.CookiePolicy.getITISPrivacyPolicyLink("our privacy policy"); + } + const ppText = this.tr("I acknowledge that data will be processed in accordance with ") + ppLink; const privacyPolicy = new qx.ui.form.CheckBox().set({ required: true, value: false @@ -203,7 +207,9 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { doubleSpaced.push(privacyPolicy); this._form.add(privacyPolicy, ppText, null, "privacyPolicy") - const eulaText = `I accept the end users license agreement (EULA) and I will use the product in accordance with it.`; + // Eula link + const eulaLink = osparc.CookiePolicy.getZMTEULALink("end users license agreement (EULA)"); + const eulaText = "I accept the " + eulaLink + " and I will use the product in accordance with it"; const eula = new qx.ui.form.CheckBox().set({ required: true, value: false From 094dc138298d088f6744f5326695c7a046b9c114 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Tue, 18 Jun 2024 12:17:29 +0200 Subject: [PATCH 039/219] =?UTF-8?q?=F0=9F=90=9BBugfix:=20fix=20typo=20in?= =?UTF-8?q?=20docker-compose=20for=20CI=20deployment=20(#5958)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- services/docker-compose-deploy.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/services/docker-compose-deploy.yml b/services/docker-compose-deploy.yml index a35002e47cb..fb7adc69a9e 100644 --- a/services/docker-compose-deploy.yml +++ b/services/docker-compose-deploy.yml @@ -1,3 +1,4 @@ +services: agent: image: ${DOCKER_REGISTRY:-itisfoundation}/agent:${DOCKER_IMAGE_TAG:-latest} api-server: From 192e7314c6bc3fb572b18634992ed136f0fe8b40 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 18 Jun 2024 13:14:36 +0200 Subject: [PATCH 040/219] =?UTF-8?q?=F0=9F=90=9B=20[Frontend=20bugfix]=20pa?= =?UTF-8?q?tch=20Services=20access=20rights=20(#5949)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../client/source/class/osparc/service/ServiceListItem.js | 1 + .../client/source/class/osparc/share/CollaboratorsService.js | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js b/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js index 2de25faf1f3..c2bf481e19e 100644 --- a/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js +++ b/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js @@ -29,6 +29,7 @@ qx.Class.define("osparc.service.ServiceListItem", { allowGrowX: true }); + this.setResourceType("service"); if (serviceModel) { this.setServiceModel(serviceModel); } diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js index ae07e6b4ce7..c83ad202cb5 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js @@ -100,7 +100,7 @@ qx.Class.define("osparc.share.CollaboratorsService", { gids.forEach(gid => { newAccessRights[gid] = this.self().getCollaboratorAccessRight(); }); - osparc.info.ServiceUtils.patchServiceData(this._serializedDataCopy, "accessRights", this._serializedDataCopy) + osparc.info.ServiceUtils.patchServiceData(this._serializedDataCopy, "accessRights", newAccessRights) .then(() => { this.fireDataEvent("updateAccessRights", this._serializedDataCopy); let text = this.tr("Editor(s) successfully added."); @@ -111,7 +111,7 @@ qx.Class.define("osparc.share.CollaboratorsService", { }) .catch(err => { console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went adding editor(s)"), "ERROR"); + osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong adding editor(s)"), "ERROR"); }) .finally(() => cb()); }, From 0bab157e6241017cdcac83f4cf760aa12101c8af Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 18 Jun 2024 14:41:25 +0200 Subject: [PATCH 041/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Splits=20models=5F?= =?UTF-8?q?library.services=20(#5921)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .vscode/extensions.json | 1 + .../scripts/create_node-meta-schema.py | 4 +- .../container_tasks/io.py | 3 +- .../api_schemas_catalog/services.py | 7 +- .../src/models_library/basic_regex.py | 2 + .../function_services_catalog/_utils.py | 12 +- .../function_services_catalog/api.py | 4 +- .../services/demo_units.py | 6 +- .../services/file_picker.py | 8 +- .../services/iter_range.py | 9 +- .../services/iter_sensitivity.py | 5 +- .../services/nodes_group.py | 5 +- .../services/parameters.py | 9 +- .../services/probes.py | 12 +- .../src/models_library/projects_nodes.py | 11 +- .../src/models_library/projects_nodes_io.py | 2 +- .../src/models_library/services.py | 569 ++---------------- .../src/models_library/services_access.py | 4 +- .../src/models_library/services_authoring.py | 41 ++ .../src/models_library/services_base.py | 47 ++ .../src/models_library/services_constants.py | 15 +- .../src/models_library/services_db.py | 7 +- .../src/models_library/services_enums.py | 21 + .../src/models_library/services_io.py | 252 ++++++++ .../src/models_library/services_metadata.py | 59 ++ .../src/models_library/services_regex.py | 44 ++ .../src/models_library/services_resources.py | 2 +- .../src/models_library/services_types.py | 82 +++ .../models_library/utils/common_validators.py | 3 +- .../src/models_library/utils/services_io.py | 2 +- .../tests/test__models_fit_schemas.py | 4 +- .../tests/test_function_services_catalog.py | 4 +- .../models-library/tests/test_services.py | 14 +- .../models-library/tests/test_services_io.py | 6 +- .../src/service_integration/_meta.py | 2 +- .../src/service_integration/osparc_config.py | 8 +- .../src/simcore_sdk/node_ports_v2/port.py | 6 +- .../node_ports_v2/ports_mapping.py | 6 +- scripts/common-package.Makefile | 1 - scripts/common.Makefile | 19 + .../models/schemas/solvers.py | 5 +- .../services/catalog.py | 4 +- .../test_api_routers_solvers_jobs.py | 4 +- services/catalog/openapi.json | 15 +- .../core/background_tasks.py | 14 +- .../services/access_rights.py | 10 +- .../services/function_services.py | 4 +- .../unit/test_services_function_services.py | 6 +- .../catalog/tests/unit/with_dbs/conftest.py | 6 +- .../test_api_routes_services__list.py | 10 +- .../with_dbs/test_services_access_rights.py | 6 +- .../computational_sidecar/models.py | 4 +- .../dask-sidecar/tests/unit/test_tasks.py | 4 +- .../models/comp_tasks.py | 8 +- .../db/repositories/comp_tasks/_utils.py | 17 +- .../utils/computations.py | 3 +- .../with_dbs/test_api_route_computations.py | 12 +- services/dynamic-sidecar/openapi.json | 3 +- .../api/v0/openapi.yaml | 28 +- .../meta_modeling/_iterations.py | 6 +- 60 files changed, 805 insertions(+), 682 deletions(-) create mode 100644 packages/models-library/src/models_library/services_authoring.py create mode 100644 packages/models-library/src/models_library/services_base.py create mode 100644 packages/models-library/src/models_library/services_io.py create mode 100644 packages/models-library/src/models_library/services_metadata.py create mode 100644 packages/models-library/src/models_library/services_regex.py create mode 100644 packages/models-library/src/models_library/services_types.py diff --git a/.vscode/extensions.json b/.vscode/extensions.json index cbf247328a8..106f454943c 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -2,6 +2,7 @@ "recommendations": [ "42Crunch.vscode-openapi", "charliermarsh.ruff", + "DevSoft.svg-viewer-vscode", "eamodio.gitlens", "exiasr.hadolint", "hediet.vscode-drawio", diff --git a/api/specs/director/schemas/scripts/create_node-meta-schema.py b/api/specs/director/schemas/scripts/create_node-meta-schema.py index 8089c17f9d0..29b4a02a9b2 100644 --- a/api/specs/director/schemas/scripts/create_node-meta-schema.py +++ b/api/specs/director/schemas/scripts/create_node-meta-schema.py @@ -8,14 +8,14 @@ from pathlib import Path import jsonref -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent if __name__ == "__main__": with Path.open(CURRENT_DIR.parent / "node-meta-v0.0.1-pydantic.json", "w") as f: - schema = ServiceDockerData.schema_json() + schema = ServiceMetaDataPublished.schema_json() schema_without_ref = jsonref.loads(schema) json.dump(schema_without_ref, f, indent=2) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py index 4b63d2f6911..ae72de765bf 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py @@ -3,9 +3,8 @@ from pathlib import Path from typing import Any, ClassVar, Union, cast -from models_library.basic_regex import MIME_TYPE_RE +from models_library.basic_regex import MIME_TYPE_RE, PROPERTY_KEY_RE from models_library.generics import DictModel -from models_library.services import PROPERTY_KEY_RE from pydantic import ( AnyUrl, BaseModel, diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services.py b/packages/models-library/src/models_library/api_schemas_catalog/services.py index f491eb6bcaf..1d238e9d95c 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/services.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/services.py @@ -3,12 +3,13 @@ from pydantic import Extra from ..emails import LowerCaseEmailStr -from ..services import BaseServiceMetaData, ServiceDockerData +from ..services import ServiceMetaDataPublished from ..services_access import ServiceAccessRights +from ..services_metadata import ServiceMetaDataEditable from ..services_resources import ServiceResourcesDict -class ServiceUpdate(BaseServiceMetaData, ServiceAccessRights): +class ServiceUpdate(ServiceMetaDataEditable, ServiceAccessRights): class Config: schema_extra: ClassVar[dict[str, Any]] = { "example": { @@ -61,7 +62,7 @@ class Config: class ServiceGet( - ServiceDockerData, ServiceAccessRights, BaseServiceMetaData + ServiceMetaDataPublished, ServiceAccessRights, ServiceMetaDataEditable ): # pylint: disable=too-many-ancestors owner: LowerCaseEmailStr | None diff --git a/packages/models-library/src/models_library/basic_regex.py b/packages/models-library/src/models_library/basic_regex.py index eb0af9b5646..c1d519a0231 100644 --- a/packages/models-library/src/models_library/basic_regex.py +++ b/packages/models-library/src/models_library/basic_regex.py @@ -75,3 +75,5 @@ r"(?::(?P[\w][\w.-]{0,127}))?" r"(?P\@sha256:[a-fA-F0-9]{32,64})?$" ) + +PROPERTY_KEY_RE = r"^[-_a-zA-Z0-9]+$" # TODO: PC->* it would be advisable to have this "variable friendly" (see VARIABLE_NAME_RE) diff --git a/packages/models-library/src/models_library/function_services_catalog/_utils.py b/packages/models-library/src/models_library/function_services_catalog/_utils.py index a89bdf170b7..4cd1275b5e0 100644 --- a/packages/models-library/src/models_library/function_services_catalog/_utils.py +++ b/packages/models-library/src/models_library/function_services_catalog/_utils.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from urllib.parse import quote -from ..services import Author, ServiceDockerData, ServiceKey, ServiceVersion +from ..services import Author, ServiceKey, ServiceMetaDataPublished, ServiceVersion from ._settings import AUTHORS, FunctionServiceSettings _logger = logging.getLogger(__name__) @@ -30,7 +30,7 @@ class ServiceNotFound(KeyError): @dataclass class _Record: - meta: ServiceDockerData + meta: ServiceMetaDataPublished implementation: Callable | None = None is_under_development: bool = False @@ -44,14 +44,14 @@ def __init__(self, settings: FunctionServiceSettings | None = None): def add( self, - meta: ServiceDockerData, + meta: ServiceMetaDataPublished, implementation: Callable | None = None, is_under_development: bool = False, ): """ raises ValueError """ - if not isinstance(meta, ServiceDockerData): + if not isinstance(meta, ServiceMetaDataPublished): msg = f"Expected ServiceDockerData, got {type(meta)}" raise ValueError(msg) @@ -89,7 +89,7 @@ def _items( continue yield key, value - def iter_metadata(self) -> Iterator[ServiceDockerData]: + def iter_metadata(self) -> Iterator[ServiceMetaDataPublished]: """WARNING: this function might skip services marked as 'under development'""" for _, f in self._items(): yield f.meta @@ -115,7 +115,7 @@ def get_implementation( def get_metadata( self, service_key: ServiceKey, service_version: ServiceVersion - ) -> ServiceDockerData: + ) -> ServiceMetaDataPublished: """raises ServiceNotFound""" try: func = self._functions[(service_key, service_version)] diff --git a/packages/models-library/src/models_library/function_services_catalog/api.py b/packages/models-library/src/models_library/function_services_catalog/api.py index ab11a2255a5..90c8e0224f9 100644 --- a/packages/models-library/src/models_library/function_services_catalog/api.py +++ b/packages/models-library/src/models_library/function_services_catalog/api.py @@ -8,7 +8,7 @@ from collections.abc import Iterator -from ..services import ServiceDockerData +from ..services import ServiceMetaDataPublished from ._key_labels import is_function_service, is_iterator_service from ._registry import catalog from .services.parameters import is_parameter_service @@ -20,7 +20,7 @@ assert is_probe_service # nosec -def iter_service_docker_data() -> Iterator[ServiceDockerData]: +def iter_service_docker_data() -> Iterator[ServiceMetaDataPublished]: for meta_obj in catalog.iter_metadata(): # NOTE: the originals are this way not modified from outside copied_meta_obj = meta_obj.copy(deep=True) diff --git a/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py b/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py index 3e739d14b6e..298ac02c82b 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py @@ -1,10 +1,10 @@ from ...services import ( - LATEST_INTEGRATION_VERSION, - ServiceDockerData, ServiceInput, + ServiceMetaDataPublished, ServiceOutput, ServiceType, ) +from ...services_constants import LATEST_INTEGRATION_VERSION from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import OM, PC, FunctionServices, create_fake_thumbnail_url @@ -15,7 +15,7 @@ # If this assumption cannot be guaranteed anymore the test must be updated. # -META = ServiceDockerData.parse_obj( +META = ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/demo-units", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py b/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py index 8062b36fa56..0e0554842fb 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py @@ -1,10 +1,14 @@ from typing import Final -from ...services import LATEST_INTEGRATION_VERSION, ServiceDockerData, ServiceType +from ...services import ( + LATEST_INTEGRATION_VERSION, + ServiceMetaDataPublished, + ServiceType, +) from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import OM, FunctionServices -META: Final = ServiceDockerData.parse_obj( +META: Final = ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/file-picker", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py b/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py index b644c71af4f..662cbf327cf 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py @@ -1,15 +1,18 @@ from collections.abc import Iterator from ...projects_nodes import OutputID, OutputsDict -from ...services import LATEST_INTEGRATION_VERSION, ServiceDockerData, ServiceType +from ...services import ServiceMetaDataPublished, ServiceType +from ...services_constants import LATEST_INTEGRATION_VERSION from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import OM, FunctionServices, create_fake_thumbnail_url -def create_metadata(type_name: str, prefix: str | None = None) -> ServiceDockerData: +def create_metadata( + type_name: str, prefix: str | None = None +) -> ServiceMetaDataPublished: prefix = prefix or type_name LABEL = f"{type_name.capitalize()} iterator" - return ServiceDockerData.parse_obj( + return ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/{prefix}-range", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py b/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py index 7df55e8bfb9..f0199389885 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py @@ -5,14 +5,15 @@ from pydantic import schema_of from ...projects_nodes import OutputID, OutputsDict -from ...services import LATEST_INTEGRATION_VERSION, ServiceDockerData, ServiceType +from ...services import ServiceMetaDataPublished, ServiceType +from ...services_constants import LATEST_INTEGRATION_VERSION from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import EN, OM, FunctionServices, create_fake_thumbnail_url LIST_NUMBERS_SCHEMA: dict[str, Any] = schema_of(list[float], title="list[number]") -META = ServiceDockerData.parse_obj( +META = ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/sensitivity", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py b/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py index c14ff00efb3..bfde87e52c3 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py @@ -1,4 +1,5 @@ -from ...services import LATEST_INTEGRATION_VERSION, ServiceDockerData, ServiceType +from ...services import ServiceMetaDataPublished, ServiceType +from ...services_constants import LATEST_INTEGRATION_VERSION from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import OM, FunctionServices @@ -6,7 +7,7 @@ # NOTE: DO not mistake with simcore/services/frontend/nodes-group/macros/ # which needs to be redefined. # -META = ServiceDockerData.parse_obj( +META = ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/nodes-group", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/parameters.py b/packages/models-library/src/models_library/function_services_catalog/services/parameters.py index fe7a0593598..e0e25b6ee11 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/parameters.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/parameters.py @@ -1,17 +1,18 @@ from typing import Final -from ...services import LATEST_INTEGRATION_VERSION, ServiceDockerData, ServiceType +from ...services import ServiceMetaDataPublished, ServiceType +from ...services_constants import LATEST_INTEGRATION_VERSION from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import OM, FunctionServices, create_fake_thumbnail_url -def _create_metadata(type_name: str) -> ServiceDockerData: +def _create_metadata(type_name: str) -> ServiceMetaDataPublished: """ Represents a parameter (e.g. "x":5) in a study This is a parametrized node (or param-node in short) """ - meta = ServiceDockerData.parse_obj( + meta = ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/parameter/{type_name}", @@ -44,7 +45,7 @@ def _create_metadata(type_name: str) -> ServiceDockerData: META_BOOL: Final = _create_metadata(type_name="boolean") META_INT: Final = _create_metadata(type_name="integer") META_STR: Final = _create_metadata(type_name="string") -META_ARRAY: Final = ServiceDockerData.parse_obj( +META_ARRAY: Final = ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/parameter/array", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/probes.py b/packages/models-library/src/models_library/function_services_catalog/services/probes.py index 70db62691d1..e736efb2fb1 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/probes.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/probes.py @@ -1,12 +1,13 @@ from typing import Final -from ...services import LATEST_INTEGRATION_VERSION, ServiceDockerData, ServiceType +from ...services import ServiceMetaDataPublished, ServiceType +from ...services_constants import LATEST_INTEGRATION_VERSION from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import OM, WVG, FunctionServices, create_fake_thumbnail_url -def _create_metadata(type_name: str) -> ServiceDockerData: - return ServiceDockerData.parse_obj( +def _create_metadata(type_name: str) -> ServiceMetaDataPublished: + obj: ServiceMetaDataPublished = ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/{type_name}", @@ -30,13 +31,14 @@ def _create_metadata(type_name: str) -> ServiceDockerData: "outputs": {}, } ) + return obj META_NUMBER: Final = _create_metadata("number") META_BOOL: Final = _create_metadata("boolean") META_INT: Final = _create_metadata("integer") META_STR: Final = _create_metadata("string") -META_ARRAY: Final = ServiceDockerData.parse_obj( +META_ARRAY: Final = ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/array", @@ -65,7 +67,7 @@ def _create_metadata(type_name: str) -> ServiceDockerData: } ) -META_FILE: Final = ServiceDockerData.parse_obj( +META_FILE: Final = ServiceMetaDataPublished.parse_obj( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/file", diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index 27e302df721..38994e54b15 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -18,6 +18,7 @@ validator, ) +from .basic_regex import PROPERTY_KEY_RE from .basic_types import EnvVarKey, HttpUrlWithCustomMinLength from .projects_access import AccessEnum from .projects_nodes_io import ( @@ -29,15 +30,14 @@ ) from .projects_nodes_ui import Position from .projects_state import RunningState -from .services import PROPERTY_KEY_RE, ServiceKey, ServiceVersion - -# NOTE: WARNING the order here matters +from .services import ServiceKey, ServiceVersion InputTypes = Union[ + # NOTE: WARNING the order in Union[*] below matters! StrictBool, StrictInt, StrictFloat, - Json, # FIXME: remove if OM sends object/array. create project does NOT use pydantic + Json, str, PortLink, SimCoreFileLink | DatCoreFileLink, # *FileLink to service @@ -45,10 +45,11 @@ list[Any] | dict[str, Any], # arrays | object ] OutputTypes = Union[ + # NOTE: WARNING the order in Union[*] below matters! StrictBool, StrictInt, StrictFloat, - Json, # TODO: remove when OM sends object/array instead of json-formatted strings + Json, str, SimCoreFileLink | DatCoreFileLink, # *FileLink to service DownloadLink, diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 84f29f8e4f6..00a91cbca2b 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -23,11 +23,11 @@ from .basic_regex import ( DATCORE_FILE_ID_RE, + PROPERTY_KEY_RE, SIMCORE_S3_DIRECTORY_ID_RE, SIMCORE_S3_FILE_ID_RE, UUID_RE, ) -from .services import PROPERTY_KEY_RE if TYPE_CHECKING: pass diff --git a/packages/models-library/src/models_library/services.py b/packages/models-library/src/models_library/services.py index 144d4b22d04..7b580de6555 100644 --- a/packages/models-library/src/models_library/services.py +++ b/packages/models-library/src/models_library/services.py @@ -1,486 +1,55 @@ -import re from datetime import datetime -from enum import Enum from typing import Any, ClassVar, Final, TypeAlias -from uuid import uuid4 -import arrow -from pydantic import ( - BaseModel, - ConstrainedStr, - Extra, - Field, - HttpUrl, - NonNegativeInt, - StrictBool, - StrictFloat, - StrictInt, - validator, -) +from pydantic import Extra, Field, NonNegativeInt -from .basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS, VERSION_RE +from .basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS from .boot_options import BootOption, BootOptions from .emails import LowerCaseEmailStr -from .services_constants import FILENAME_RE, PROPERTY_TYPE_RE -from .services_ui import Widget -from .utils.json_schema import ( - InvalidJsonSchema, - any_ref_key, - jsonschema_validate_schema, -) - -# CONSTANTS ------------------------------------------- -# NOTE: move to _constants.py: SEE https://github.com/ITISFoundation/osparc-simcore/issues/3486 - -# e.g. simcore/services/comp/opencor -SERVICE_KEY_RE: Final[re.Pattern[str]] = re.compile( - r"^simcore/services/" - r"(?P(comp|dynamic|frontend))/" - r"(?P[a-z0-9][a-z0-9_.-]*/)*" - r"(?P[a-z0-9-_]+[a-z0-9])$" -) -# e.g. simcore%2Fservices%2Fcomp%2Fopencor -SERVICE_ENCODED_KEY_RE: Final[re.Pattern[str]] = re.compile( - r"^simcore%2Fservices%2F" - r"(?P(comp|dynamic|frontend))%2F" - r"(?P[a-z0-9][a-z0-9_.-]*%2F)*" - r"(?P[a-z0-9-_]+[a-z0-9])$" +from .services_authoring import Author, Badge +from .services_base import ServiceBase, ServiceKeyVersion +from .services_constants import ANY_FILETYPE, LATEST_INTEGRATION_VERSION +from .services_enums import ServiceType +from .services_io import BaseServiceIOModel, ServiceInput, ServiceOutput +from .services_types import ( + DynamicServiceKey, + RunID, + ServiceKey, + ServicePortKey, + ServiceVersion, ) -DYNAMIC_SERVICE_KEY_RE = re.compile( - r"^simcore/services/dynamic/" - r"(?P[a-z0-9][a-z0-9_.-]*/)*" - r"(?P[a-z0-9-_]+[a-z0-9])$" +# NOTE: these asserts are here to avoid pre-commit to prune these imports +assert BaseServiceIOModel # nsoec +assert DynamicServiceKey # nosec +assert LATEST_INTEGRATION_VERSION # nosec +assert RunID # nosec +assert ServiceKey # nosec +assert ServiceType # nosec +assert ServiceVersion # nosec + +__all__: tuple[str, ...] = ( + "Author", + "Badge", + "BaseServiceIOModel", + "BootOptions", + "DynamicServiceKey", + "LATEST_INTEGRATION_VERSION", + "RunID", + "ServiceInput", + "ServiceKey", + "ServiceOutput", + "ServicePortKey", + "ServiceType", + "ServiceVersion", ) -DYNAMIC_SERVICE_KEY_FORMAT = "simcore/services/dynamic/{service_name}" - -COMPUTATIONAL_SERVICE_KEY_RE = re.compile( - r"^simcore/services/comp/" - r"(?P[a-z0-9][a-z0-9_.-]*/)*" - r"(?P[a-z0-9-_]+[a-z0-9])$" -) -COMPUTATIONAL_SERVICE_KEY_FORMAT = "simcore/services/comp/{service_name}" - -PROPERTY_KEY_RE = r"^[-_a-zA-Z0-9]+$" # TODO: PC->* it would be advisable to have this "variable friendly" (see VARIABLE_NAME_RE) - -LATEST_INTEGRATION_VERSION = "1.0.0" - - -# CONSTRAINT TYPES ------------------------------------------- -class ServicePortKey(ConstrainedStr): - regex = re.compile(PROPERTY_KEY_RE) - - class Config: - frozen = True - - -class FileName(ConstrainedStr): - regex = re.compile(FILENAME_RE) - - class Config: - frozen = True - - -class ServiceKey(ConstrainedStr): - regex = SERVICE_KEY_RE - - class Config: - frozen = True - - -class ServiceKeyEncoded(ConstrainedStr): - regex = re.compile(SERVICE_ENCODED_KEY_RE) - - class Config: - frozen = True - - -class DynamicServiceKey(ServiceKey): - regex = DYNAMIC_SERVICE_KEY_RE - - -class ComputationalServiceKey(ServiceKey): - regex = COMPUTATIONAL_SERVICE_KEY_RE - - -class ServiceVersion(ConstrainedStr): - regex = re.compile(VERSION_RE) - - class Config: - frozen = True - - -class RunID(str): - """ - Used to assign a unique identifier to the run of a service. - - Example usage: - The dynamic-sidecar uses this to distinguish between current - and old volumes for different runs. - Avoids overwriting data that left dropped on the node (due to an error) - and gives the osparc-agent an opportunity to back it up. - """ - - __slots__ = () - - @classmethod - def create(cls) -> "RunID": - # NOTE: there was a legacy version of this RunID - # legacy version: - # '0ac3ed64-665b-42d2-95f7-e59e0db34242' - # current version: - # '1690203099_0ac3ed64-665b-42d2-95f7-e59e0db34242' - utc_int_timestamp: int = arrow.utcnow().int_timestamp - run_id_format = f"{utc_int_timestamp}_{uuid4()}" - return cls(run_id_format) - - -class ServiceType(str, Enum): - COMPUTATIONAL = "computational" - DYNAMIC = "dynamic" - FRONTEND = "frontend" - BACKEND = "backend" - - -# TODO: create a flags enum that accounts for every column -# -# | service name | defininition | implementation | runs | ``ServiceType`` | | -# | --------------- | ------------ | -------------- | ----------------------- | ----------------------------- | --------------- | -# | ``file-picker`` | BE | FE | FE | ``ServiceType.FRONTEND`` | function | -# | ``isolve`` | DI-labels | DI | Dask-BE (own container) | ``ServiceType.COMPUTATIONAL`` | container | -# | ``jupyter-*`` | DI-labels | DI | DySC-BE (own container) | ``ServiceType.DYNAMIC`` | container | -# | ``iterator-*`` | BE | BE | BE (webserver) | ``ServiceType.BACKEND`` | function | -# | ``pyfun-*`` | BE | BE | Dask-BE (dask-sidecar) | ``ServiceType.COMPUTATIONAL`` | function | -# -# -# where FE (front-end), DI (docker image), Dask/DySC (dask/dynamic sidecar), BE (backend). - - -# MODELS ------------------------------------------- -class Badge(BaseModel): - name: str = Field( - ..., - description="Name of the subject", - examples=["travis-ci", "coverals.io", "github.io"], - ) - image: HttpUrl = Field( - ..., - description="Url to the badge", - examples=[ - "https://travis-ci.org/ITISFoundation/osparc-simcore.svg?branch=master", - "https://coveralls.io/repos/github/ITISFoundation/osparc-simcore/badge.svg?branch=master", - "https://img.shields.io/website-up-down-green-red/https/itisfoundation.github.io.svg?label=documentation", - ], - ) - url: HttpUrl = Field( - ..., - description="Link to the status", - examples=[ - "https://travis-ci.org/ITISFoundation/osparc-simcore 'State of CI: build, test and pushing images'", - "https://coveralls.io/github/ITISFoundation/osparc-simcore?branch=master 'Test coverage'", - "https://itisfoundation.github.io/", - ], - ) - - class Config: - extra = Extra.forbid - - -class Author(BaseModel): - name: str = Field(..., description="Name of the author", example="Jim Knopf") - email: LowerCaseEmailStr = Field( - ..., - examples=["sun@sense.eight", "deleen@minbar.bab"], - description="Email address", - ) - affiliation: str | None = Field( - None, examples=["Sense8", "Babylon 5"], description="Affiliation of the author" - ) - - class Config: - extra = Extra.forbid - - -class BaseServiceIOModel(BaseModel): - """ - Base class for service input/outputs - """ - - ## management - - ### human readable descriptors - display_order: float | None = Field( - None, - alias="displayOrder", - deprecated=True, - description="DEPRECATED: new display order is taken from the item position. This will be removed.", - ) - - label: str = Field(..., description="short name for the property", example="Age") - description: str = Field( - ..., - description="description of the property", - example="Age in seconds since 1970", - ) - - # mathematical and physics descriptors - property_type: str = Field( - ..., - alias="type", - description="data type expected on this input glob matching for data type is allowed", - examples=[ - "number", - "boolean", - "data:*/*", - "data:text/*", - "data:[image/jpeg,image/png]", - "data:application/json", - "data:application/json;schema=https://my-schema/not/really/schema.json", - "data:application/vnd.ms-excel", - "data:text/plain", - "data:application/hdf5", - "data:application/edu.ucdavis@ceclancy.xyz", - ], - regex=PROPERTY_TYPE_RE, - ) - - content_schema: dict[str, Any] | None = Field( - None, - description="jsonschema of this input/output. Required when type='ref_contentSchema'", - alias="contentSchema", - ) - - # value - file_to_key_map: dict[FileName, ServicePortKey] | None = Field( - None, - alias="fileToKeyMap", - description="Place the data associated with the named keys in files", - examples=[{"dir/input1.txt": "key_1", "dir33/input2.txt": "key2"}], - ) - - # TODO: should deprecate since content_schema include units - unit: str | None = Field( - None, - description="Units, when it refers to a physical quantity", - ) - - class Config: - extra = Extra.forbid - - @validator("content_schema") - @classmethod - def check_type_is_set_to_schema(cls, v, values): - if v is not None and (ptype := values["property_type"]) != "ref_contentSchema": - msg = f"content_schema is defined but set the wrong type.Expected type=ref_contentSchema but got ={ptype}." - raise ValueError(msg) - return v - - @validator("content_schema") - @classmethod - def check_valid_json_schema(cls, v): - if v is not None: - try: - jsonschema_validate_schema(schema=v) - - if any_ref_key(v): - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3030 - msg = "Schemas with $ref are still not supported" - raise ValueError(msg) - - except InvalidJsonSchema as err: - failed_path = "->".join(map(str, err.path)) - msg = f"Invalid json-schema at {failed_path}: {err.message}" - raise ValueError(msg) from err - return v - - @classmethod - def _from_json_schema_base_implementation( - cls, port_schema: dict[str, Any] - ) -> dict[str, Any]: - description = port_schema.pop("description", port_schema["title"]) - return { - "label": port_schema["title"], - "description": description, - "type": "ref_contentSchema", - "contentSchema": port_schema, - } - - -class ServiceInput(BaseServiceIOModel): - """ - Metadata on a service input port - """ - - # TODO: should deprecate since content_schema include defaults as well - default_value: StrictBool | StrictInt | StrictFloat | str | None = Field( - None, alias="defaultValue", examples=["Dog", True] - ) - - widget: Widget | None = Field( - None, - description="custom widget to use instead of the default one determined from the data-type", - ) - - class Config(BaseServiceIOModel.Config): - schema_extra: ClassVar[dict[str, Any]] = { - "examples": [ - # file-wo-widget: - { - "displayOrder": 1, - "label": "Input files - file-wo-widget", - "description": "Files downloaded from service connected at the input", - "type": "data:*/*", - }, - # v2 - { - "displayOrder": 2, - "label": "Sleep Time - v2", - "description": "Time to wait before completion", - "type": "number", - "defaultValue": 0, - "unit": "second", - "widget": {"type": "TextArea", "details": {"minHeight": 3}}, - }, - # latest: - { - "label": "Sleep Time - latest", - "description": "Time to wait before completion", - "type": "number", - "defaultValue": 0, - "unit": "second", - "widget": {"type": "TextArea", "details": {"minHeight": 3}}, - }, - { - "label": "array_numbers", - "description": "Some array of numbers", - "type": "ref_contentSchema", - "contentSchema": { - "title": "list[number]", - "type": "array", - "items": {"type": "number"}, - }, - }, - { - "label": "my_object", - "description": "Some object", - "type": "ref_contentSchema", - "contentSchema": { - "title": "an object named A", - "type": "object", - "properties": { - "i": {"title": "Int", "type": "integer", "default": 3}, - "b": {"title": "Bool", "type": "boolean"}, - "s": {"title": "Str", "type": "string"}, - }, - "required": ["b", "s"], - }, - }, - ], - } - - @classmethod - def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceInput": - """Creates input port model from a json-schema""" - data = cls._from_json_schema_base_implementation(port_schema) - return cls.parse_obj(data) - - -class ServiceOutput(BaseServiceIOModel): - widget: Widget | None = Field( - None, - description="custom widget to use instead of the default one determined from the data-type", - deprecated=True, - ) - - class Config(BaseServiceIOModel.Config): - schema_extra: ClassVar[dict[str, Any]] = { - "examples": [ - { - "displayOrder": 2, - "label": "Time Slept", - "description": "Time the service waited before completion", - "type": "number", - }, - { - "displayOrder": 2, - "label": "Time Slept - units", - "description": "Time the service waited before completion", - "type": "number", - "unit": "second", - }, - { - "label": "Time Slept - w/o displayorder", - "description": "Time the service waited before completion", - "type": "number", - "unit": "second", - }, - { - "label": "Output file 1", - "displayOrder": 4.0, - "description": "Output file uploaded from the outputs folder", - "type": "data:*/*", - }, - ] - } - - @classmethod - def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceOutput": - """Creates output port model from a json-schema""" - data = cls._from_json_schema_base_implementation(port_schema) - return cls.parse_obj(data) - - -class ServiceKeyVersion(BaseModel): - """This pair uniquely identifies a services""" - - key: ServiceKey = Field( - ..., - description="distinctive name for the node based on the docker registry path", - ) - version: ServiceVersion = Field( - ..., - description="service version number", - ) - - class Config: - frozen = True - - -class _BaseServiceCommonDataModel(BaseModel): - name: str = Field( - ..., - description="short, human readable name for the node", - example="Fast Counter", - ) - thumbnail: HttpUrl | None = Field( - None, - description="url to the thumbnail", - examples=[ - "https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png" - ], - ) - description: str = Field( - ..., - description="human readable description of the purpose of the node", - examples=[ - "Our best node type", - "The mother of all nodes, makes your numbers shine!", - ], - ) - - @validator("thumbnail", pre=True, always=False) - @classmethod - def validate_thumbnail(cls, value): # pylint: disable=no-self-argument,no-self-use - if value == "": - return None - return value ServiceInputsDict: TypeAlias = dict[ServicePortKey, ServiceInput] ServiceOutputsDict: TypeAlias = dict[ServicePortKey, ServiceOutput] -_EXAMPLE = { +_EXAMPLE: Final = { "name": "oSparc Python Runner", "key": "simcore/services/comp/osparc-python-runner", "type": "computational", @@ -506,7 +75,7 @@ def validate_thumbnail(cls, value): # pylint: disable=no-self-argument,no-self- "displayOrder": 1, "label": "Input data", "description": "Any code, requirements or data file", - "type": "data:*/*", + "type": ANY_FILETYPE, } }, "outputs": { @@ -514,7 +83,7 @@ def validate_thumbnail(cls, value): # pylint: disable=no-self-argument,no-self- "displayOrder": 1, "label": "Output data", "description": "All data produced by the script is zipped as output_data.zip", - "type": "data:*/*", + "type": ANY_FILETYPE, "fileToKeyMap": {"output_data.zip": "output_1"}, } }, @@ -527,14 +96,14 @@ def validate_thumbnail(cls, value): # pylint: disable=no-self-argument,no-self- "input_1": { "label": "Input data", "description": "Any code, requirements or data file", - "type": "data:*/*", + "type": ANY_FILETYPE, } }, "outputs": { "output_1": { "label": "Output data", "description": "All data produced by the script is zipped as output_data.zip", - "type": "data:*/*", + "type": ANY_FILETYPE, "fileToKeyMap": {"output_data.zip": "output_1"}, } }, @@ -550,9 +119,13 @@ def validate_thumbnail(cls, value): # pylint: disable=no-self-argument,no-self- } -class ServiceDockerData(ServiceKeyVersion, _BaseServiceCommonDataModel): +class ServiceMetaDataPublished(ServiceKeyVersion, ServiceBase): """ - Static metadata for a service injected in the image labels + Service metadata at publication time + + - read-only (can only be changed overwriting the image labels in the registry) + - base metaddata + - injected in the image labels NOTE: This model is serialized in .osparc/metadata.yml and in the labels of the docker image """ @@ -639,51 +212,3 @@ class Config: }, ] } - - -class BaseServiceMetaData(_BaseServiceCommonDataModel): - # Overrides all fields of _BaseServiceCommonDataModel: - # - for a partial update all members must be Optional - # FIXME: if API entry needs a schema to allow partial updates (e.g. patch/put), - # it should be implemented with a different model e.g. ServiceMetaDataUpdate - # - - name: str | None - thumbnail: HttpUrl | None - description: str | None - deprecated: datetime | None = Field( - default=None, - description="If filled with a date, then the service is to be deprecated at that date (e.g. cannot start anymore)", - ) - - # user-defined metatada - classifiers: list[str] | None - quality: dict[str, Any] = {} - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { - "example": { - "key": "simcore/services/dynamic/sim4life", - "version": "1.0.9", - "name": "sim4life", - "description": "s4l web", - "thumbnail": "https://thumbnailit.org/image", - "quality": { - "enabled": True, - "tsr_target": { - f"r{n:02d}": {"level": 4, "references": ""} - for n in range(1, 11) - }, - "annotations": { - "vandv": "", - "limitations": "", - "certificationLink": "", - "certificationStatus": "Uncertified", - }, - "tsr_current": { - f"r{n:02d}": {"level": 0, "references": ""} - for n in range(1, 11) - }, - }, - } - } diff --git a/packages/models-library/src/models_library/services_access.py b/packages/models-library/src/models_library/services_access.py index 9e121fad95a..4eec801de59 100644 --- a/packages/models-library/src/models_library/services_access.py +++ b/packages/models-library/src/models_library/services_access.py @@ -2,10 +2,12 @@ """ +from typing import TypeAlias + from pydantic import BaseModel, Field from pydantic.types import PositiveInt -GroupId = PositiveInt +GroupId: TypeAlias = PositiveInt class ServiceGroupAccessRights(BaseModel): diff --git a/packages/models-library/src/models_library/services_authoring.py b/packages/models-library/src/models_library/services_authoring.py new file mode 100644 index 00000000000..d57f0c27779 --- /dev/null +++ b/packages/models-library/src/models_library/services_authoring.py @@ -0,0 +1,41 @@ +from pydantic import BaseModel, Field, HttpUrl + +from .emails import LowerCaseEmailStr + + +class Badge(BaseModel): + name: str = Field( + ..., + description="Name of the subject", + examples=["travis-ci", "coverals.io", "github.io"], + ) + image: HttpUrl = Field( + ..., + description="Url to the badge", + examples=[ + "https://travis-ci.org/ITISFoundation/osparc-simcore.svg?branch=master", + "https://coveralls.io/repos/github/ITISFoundation/osparc-simcore/badge.svg?branch=master", + "https://img.shields.io/website-up-down-green-red/https/itisfoundation.github.io.svg?label=documentation", + ], + ) + url: HttpUrl = Field( + ..., + description="Link to the status", + examples=[ + "https://travis-ci.org/ITISFoundation/osparc-simcore 'State of CI: build, test and pushing images'", + "https://coveralls.io/github/ITISFoundation/osparc-simcore?branch=master 'Test coverage'", + "https://itisfoundation.github.io/", + ], + ) + + +class Author(BaseModel): + name: str = Field(..., description="Name of the author", example="Jim Knopf") + email: LowerCaseEmailStr = Field( + ..., + examples=["sun@sense.eight", "deleen@minbar.bab"], + description="Email address", + ) + affiliation: str | None = Field( + None, examples=["Sense8", "Babylon 5"], description="Affiliation of the author" + ) diff --git a/packages/models-library/src/models_library/services_base.py b/packages/models-library/src/models_library/services_base.py new file mode 100644 index 00000000000..874a4c05022 --- /dev/null +++ b/packages/models-library/src/models_library/services_base.py @@ -0,0 +1,47 @@ +from pydantic import BaseModel, Field, HttpUrl, validator + +from .services_types import ServiceKey, ServiceVersion +from .utils.common_validators import empty_str_to_none_pre_validator + + +class ServiceKeyVersion(BaseModel): + """This pair uniquely identifies a services""" + + key: ServiceKey = Field( + ..., + description="distinctive name for the node based on the docker registry path", + ) + version: ServiceVersion = Field( + ..., + description="service version number", + ) + + class Config: + frozen = True + + +class ServiceBase(BaseModel): + name: str = Field( + ..., + description="Display name: short, human readable name for the node", + example="Fast Counter", + ) + thumbnail: HttpUrl | None = Field( + None, + description="url to the thumbnail", + examples=[ + "https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png" + ], + ) + description: str = Field( + ..., + description="human readable description of the purpose of the node", + examples=[ + "Our best node type", + "The mother of all nodes, makes your numbers shine!", + ], + ) + + _empty_is_none = validator("thumbnail", allow_reuse=True, pre=True, always=False)( + empty_str_to_none_pre_validator + ) diff --git a/packages/models-library/src/models_library/services_constants.py b/packages/models-library/src/models_library/services_constants.py index 8b5b21d8b53..049370611dd 100644 --- a/packages/models-library/src/models_library/services_constants.py +++ b/packages/models-library/src/models_library/services_constants.py @@ -1,13 +1,6 @@ -# -# NOTE: https://github.com/ITISFoundation/osparc-simcore/issues/3486 -# +from typing import Final -PROPERTY_TYPE_RE = r"^(number|integer|boolean|string|ref_contentSchema|data:([^/\s,]+/[^/\s,]+|\[[^/\s,]+/[^/\s,]+(,[^/\s]+/[^/,\s]+)*\]))$" -PROPERTY_TYPE_TO_PYTHON_TYPE_MAP = { - "integer": int, - "number": float, - "boolean": bool, - "string": str, -} +LATEST_INTEGRATION_VERSION: Final[str] = "1.0.0" -FILENAME_RE = r".+" + +ANY_FILETYPE: Final[str] = "data:*/*" diff --git a/packages/models-library/src/models_library/services_db.py b/packages/models-library/src/models_library/services_db.py index e4ceae79c1f..e7e51fe8955 100644 --- a/packages/models-library/src/models_library/services_db.py +++ b/packages/models-library/src/models_library/services_db.py @@ -9,8 +9,9 @@ from pydantic import Field from pydantic.types import PositiveInt -from .services import BaseServiceMetaData, ServiceKeyVersion from .services_access import ServiceGroupAccessRights +from .services_base import ServiceKeyVersion +from .services_metadata import ServiceMetaDataEditable # ------------------------------------------------------------------- # Databases models @@ -18,9 +19,9 @@ # - table services_access_rights -class ServiceMetaDataAtDB(ServiceKeyVersion, BaseServiceMetaData): +class ServiceMetaDataAtDB(ServiceKeyVersion, ServiceMetaDataEditable): # for a partial update all members must be Optional - classifiers: list[str] | None = Field([]) + classifiers: list[str] | None = Field(default_factory=list) owner: PositiveInt | None class Config: diff --git a/packages/models-library/src/models_library/services_enums.py b/packages/models-library/src/models_library/services_enums.py index 8a55c0a960b..50a83313482 100644 --- a/packages/models-library/src/models_library/services_enums.py +++ b/packages/models-library/src/models_library/services_enums.py @@ -40,3 +40,24 @@ def comparison_order() -> dict["ServiceState", int]: ServiceState.STOPPING: 5, ServiceState.COMPLETE: 6, } + + +class ServiceType(str, Enum): + COMPUTATIONAL = "computational" + DYNAMIC = "dynamic" + FRONTEND = "frontend" + BACKEND = "backend" + + +# NOTE on services: +# +# | service name | defininition | implementation | runs | ``ServiceType`` | | +# | --------------- | ------------ | -------------- | ----------------------- | ----------------------------- | --------------- | +# | ``file-picker`` | BE | FE | FE | ``ServiceType.FRONTEND`` | function | +# | ``isolve`` | DI-labels | DI | Dask-BE (own container) | ``ServiceType.COMPUTATIONAL`` | container | +# | ``jupyter-*`` | DI-labels | DI | DySC-BE (own container) | ``ServiceType.DYNAMIC`` | container | +# | ``iterator-*`` | BE | BE | BE (webserver) | ``ServiceType.BACKEND`` | function | +# | ``pyfun-*`` | BE | BE | Dask-BE (dask-sidecar) | ``ServiceType.COMPUTATIONAL`` | function | +# +# +# where FE (front-end), DI (docker image), Dask/DySC (dask/dynamic sidecar), BE (backend). diff --git a/packages/models-library/src/models_library/services_io.py b/packages/models-library/src/models_library/services_io.py new file mode 100644 index 00000000000..9f05e3b5e55 --- /dev/null +++ b/packages/models-library/src/models_library/services_io.py @@ -0,0 +1,252 @@ +from typing import Any, ClassVar + +from pydantic import ( + BaseModel, + Extra, + Field, + StrictBool, + StrictFloat, + StrictInt, + validator, +) + +from .services_constants import ANY_FILETYPE +from .services_regex import PROPERTY_TYPE_RE +from .services_types import FileName, ServicePortKey +from .services_ui import Widget +from .utils.json_schema import ( + InvalidJsonSchema, + any_ref_key, + jsonschema_validate_schema, +) + + +class BaseServiceIOModel(BaseModel): + """ + Base class for service input/outputs + """ + + ## management + + ### human readable descriptors + display_order: float | None = Field( + None, + alias="displayOrder", + deprecated=True, + description="DEPRECATED: new display order is taken from the item position. This will be removed.", + ) + + label: str = Field(..., description="short name for the property", example="Age") + description: str = Field( + ..., + description="description of the property", + example="Age in seconds since 1970", + ) + + # mathematical and physics descriptors + property_type: str = Field( + ..., + alias="type", + description="data type expected on this input glob matching for data type is allowed", + examples=[ + "number", + "boolean", + ANY_FILETYPE, + "data:text/*", + "data:[image/jpeg,image/png]", + "data:application/json", + "data:application/json;schema=https://my-schema/not/really/schema.json", + "data:application/vnd.ms-excel", + "data:text/plain", + "data:application/hdf5", + "data:application/edu.ucdavis@ceclancy.xyz", + ], + regex=PROPERTY_TYPE_RE, + ) + + content_schema: dict[str, Any] | None = Field( + None, + description="jsonschema of this input/output. Required when type='ref_contentSchema'", + alias="contentSchema", + ) + + # value + file_to_key_map: dict[FileName, ServicePortKey] | None = Field( + None, + alias="fileToKeyMap", + description="Place the data associated with the named keys in files", + examples=[{"dir/input1.txt": "key_1", "dir33/input2.txt": "key2"}], + ) + + unit: str | None = Field( + None, + description="Units, when it refers to a physical quantity", + deprecated=True, # add x_unit in content_schema instead + ) + + class Config: + extra = Extra.forbid + + @validator("content_schema") + @classmethod + def _check_type_is_set_to_schema(cls, v, values): + if v is not None and (ptype := values["property_type"]) != "ref_contentSchema": + msg = f"content_schema is defined but set the wrong type. Expected type=ref_contentSchema but got ={ptype}." + raise ValueError(msg) + return v + + @validator("content_schema") + @classmethod + def _check_valid_json_schema(cls, v): + if v is not None: + try: + jsonschema_validate_schema(schema=v) + + if any_ref_key(v): + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3030 + msg = "Schemas with $ref are still not supported" + raise ValueError(msg) + + except InvalidJsonSchema as err: + failed_path = "->".join(map(str, err.path)) + msg = f"Invalid json-schema at {failed_path}: {err.message}" + raise ValueError(msg) from err + return v + + @classmethod + def _from_json_schema_base_implementation( + cls, port_schema: dict[str, Any] + ) -> dict[str, Any]: + description = port_schema.pop("description", port_schema["title"]) + return { + "label": port_schema["title"], + "description": description, + "type": "ref_contentSchema", + "contentSchema": port_schema, + } + + +class ServiceInput(BaseServiceIOModel): + """ + Metadata on a service input port + """ + + default_value: StrictBool | StrictInt | StrictFloat | str | None = Field( + None, + alias="defaultValue", + examples=["Dog", True], + deprecated=True, # Use content_schema defaults instead + ) + + widget: Widget | None = Field( + None, + description="custom widget to use instead of the default one determined from the data-type", + ) + + class Config(BaseServiceIOModel.Config): + schema_extra: ClassVar[dict[str, Any]] = { + "examples": [ + # file-wo-widget: + { + "displayOrder": 1, + "label": "Input files - file-wo-widget", + "description": "Files downloaded from service connected at the input", + "type": ANY_FILETYPE, + }, + # v2 + { + "displayOrder": 2, + "label": "Sleep Time - v2", + "description": "Time to wait before completion", + "type": "number", + "defaultValue": 0, + "unit": "second", + "widget": {"type": "TextArea", "details": {"minHeight": 3}}, + }, + # latest: + { + "label": "Sleep Time - latest", + "description": "Time to wait before completion", + "type": "number", + "defaultValue": 0, + "unit": "second", + "widget": {"type": "TextArea", "details": {"minHeight": 3}}, + }, + { + "label": "array_numbers", + "description": "Some array of numbers", + "type": "ref_contentSchema", + "contentSchema": { + "title": "list[number]", + "type": "array", + "items": {"type": "number"}, + }, + }, + { + "label": "my_object", + "description": "Some object", + "type": "ref_contentSchema", + "contentSchema": { + "title": "an object named A", + "type": "object", + "properties": { + "i": {"title": "Int", "type": "integer", "default": 3}, + "b": {"title": "Bool", "type": "boolean"}, + "s": {"title": "Str", "type": "string"}, + }, + "required": ["b", "s"], + }, + }, + ], + } + + @classmethod + def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceInput": + """Creates input port model from a json-schema""" + data = cls._from_json_schema_base_implementation(port_schema) + return cls.parse_obj(data) + + +class ServiceOutput(BaseServiceIOModel): + widget: Widget | None = Field( + None, + description="custom widget to use instead of the default one determined from the data-type", + deprecated=True, + ) + + class Config(BaseServiceIOModel.Config): + schema_extra: ClassVar[dict[str, Any]] = { + "examples": [ + { + "displayOrder": 2, + "label": "Time Slept", + "description": "Time the service waited before completion", + "type": "number", + }, + { + "displayOrder": 2, + "label": "Time Slept - units", + "description": "Time with units", + "type": "number", + "unit": "second", + }, + { + "label": "Time Slept - w/o displayorder", + "description": "Time without display order", + "type": "number", + "unit": "second", + }, + { + "label": "Output file 1", + "displayOrder": 4.0, + "description": "Output file uploaded from the outputs folder", + "type": ANY_FILETYPE, + }, + ] + } + + @classmethod + def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceOutput": + """Creates output port model from a json-schema""" + data = cls._from_json_schema_base_implementation(port_schema) + return cls.parse_obj(data) diff --git a/packages/models-library/src/models_library/services_metadata.py b/packages/models-library/src/models_library/services_metadata.py new file mode 100644 index 00000000000..7ec688c448d --- /dev/null +++ b/packages/models-library/src/models_library/services_metadata.py @@ -0,0 +1,59 @@ +from datetime import datetime +from typing import Any, ClassVar + +from pydantic import Field, HttpUrl + +from .services_base import ServiceBase +from .services_constants import LATEST_INTEGRATION_VERSION +from .services_enums import ServiceType +from .services_types import DynamicServiceKey, ServiceKey, ServiceVersion + +assert DynamicServiceKey # nosec +assert LATEST_INTEGRATION_VERSION # nosec +assert ServiceKey # nosec +assert ServiceType # nosec +assert ServiceVersion # nosec + + +class ServiceMetaDataEditable(ServiceBase): + # Overrides all fields of ServiceBase for a partial update all members must be Optional + + name: str | None + thumbnail: HttpUrl | None + description: str | None + deprecated: datetime | None = Field( + default=None, + description="If filled with a date, then the service is to be deprecated at that date (e.g. cannot start anymore)", + ) + + # user-defined metatada + classifiers: list[str] | None + quality: dict[str, Any] = {} + + class Config: + schema_extra: ClassVar[dict[str, Any]] = { + "example": { + "key": "simcore/services/dynamic/sim4life", + "version": "1.0.9", + "name": "sim4life", + "description": "s4l web", + "thumbnail": "https://thumbnailit.org/image", + "quality": { + "enabled": True, + "tsr_target": { + f"r{n:02d}": {"level": 4, "references": ""} + for n in range(1, 11) + }, + "annotations": { + "vandv": "", + "limitations": "", + "certificationLink": "", + "certificationStatus": "Uncertified", + }, + "tsr_current": { + f"r{n:02d}": {"level": 0, "references": ""} + for n in range(1, 11) + }, + }, + } + } diff --git a/packages/models-library/src/models_library/services_regex.py b/packages/models-library/src/models_library/services_regex.py new file mode 100644 index 00000000000..c4c9e84d2e0 --- /dev/null +++ b/packages/models-library/src/models_library/services_regex.py @@ -0,0 +1,44 @@ +import re +from typing import Final + +PROPERTY_TYPE_RE = r"^(number|integer|boolean|string|ref_contentSchema|data:([^/\s,]+/[^/\s,]+|\[[^/\s,]+/[^/\s,]+(,[^/\s]+/[^/,\s]+)*\]))$" +PROPERTY_TYPE_TO_PYTHON_TYPE_MAP = { + "integer": int, + "number": float, + "boolean": bool, + "string": str, +} + +FILENAME_RE = r".+" + + +# e.g. simcore/services/comp/opencor +SERVICE_KEY_RE: Final[re.Pattern[str]] = re.compile( + r"^simcore/services/" + r"(?P(comp|dynamic|frontend))/" + r"(?P[a-z0-9][a-z0-9_.-]*/)*" + r"(?P[a-z0-9-_]+[a-z0-9])$" +) +# e.g. simcore%2Fservices%2Fcomp%2Fopencor +SERVICE_ENCODED_KEY_RE: Final[re.Pattern[str]] = re.compile( + r"^simcore%2Fservices%2F" + r"(?P(comp|dynamic|frontend))%2F" + r"(?P[a-z0-9][a-z0-9_.-]*%2F)*" + r"(?P[a-z0-9-_]+[a-z0-9])$" +) + +DYNAMIC_SERVICE_KEY_RE: Final[re.Pattern[str]] = re.compile( + r"^simcore/services/dynamic/" + r"(?P[a-z0-9][a-z0-9_.-]*/)*" + r"(?P[a-z0-9-_]+[a-z0-9])$" +) +DYNAMIC_SERVICE_KEY_FORMAT = "simcore/services/dynamic/{service_name}" + + +# Computational regex & format +COMPUTATIONAL_SERVICE_KEY_RE: Final[re.Pattern[str]] = re.compile( + r"^simcore/services/comp/" + r"(?P[a-z0-9][a-z0-9_.-]*/)*" + r"(?P[a-z0-9-_]+[a-z0-9])$" +) +COMPUTATIONAL_SERVICE_KEY_FORMAT: Final[str] = "simcore/services/comp/{service_name}" diff --git a/packages/models-library/src/models_library/services_resources.py b/packages/models-library/src/models_library/services_resources.py index 7eac0b75032..7fe4f268f8c 100644 --- a/packages/models-library/src/models_library/services_resources.py +++ b/packages/models-library/src/models_library/services_resources.py @@ -42,7 +42,7 @@ class ResourceValue(BaseModel): @root_validator() @classmethod - def ensure_limits_are_equal_or_above_reservations(cls, values): + def _ensure_limits_are_equal_or_above_reservations(cls, values): if isinstance(values["reservation"], str): # in case of string, the limit is the same as the reservation values["limit"] = values["reservation"] diff --git a/packages/models-library/src/models_library/services_types.py b/packages/models-library/src/models_library/services_types.py new file mode 100644 index 00000000000..495193dd15d --- /dev/null +++ b/packages/models-library/src/models_library/services_types.py @@ -0,0 +1,82 @@ +import re +from uuid import uuid4 + +import arrow +from pydantic import ConstrainedStr + +from .basic_regex import PROPERTY_KEY_RE, VERSION_RE +from .services_regex import ( + COMPUTATIONAL_SERVICE_KEY_RE, + DYNAMIC_SERVICE_KEY_RE, + FILENAME_RE, + SERVICE_ENCODED_KEY_RE, + SERVICE_KEY_RE, +) + + +class ServicePortKey(ConstrainedStr): + regex = re.compile(PROPERTY_KEY_RE) + + class Config: + frozen = True + + +class FileName(ConstrainedStr): + regex = re.compile(FILENAME_RE) + + class Config: + frozen = True + + +class ServiceKey(ConstrainedStr): + regex = SERVICE_KEY_RE + + class Config: + frozen = True + + +class ServiceKeyEncoded(ConstrainedStr): + regex = re.compile(SERVICE_ENCODED_KEY_RE) + + class Config: + frozen = True + + +class DynamicServiceKey(ServiceKey): + regex = DYNAMIC_SERVICE_KEY_RE + + +class ComputationalServiceKey(ServiceKey): + regex = COMPUTATIONAL_SERVICE_KEY_RE + + +class ServiceVersion(ConstrainedStr): + regex = re.compile(VERSION_RE) + + class Config: + frozen = True + + +class RunID(str): + """ + Used to assign a unique identifier to the run of a service. + + Example usage: + The dynamic-sidecar uses this to distinguish between current + and old volumes for different runs. + Avoids overwriting data that left dropped on the node (due to an error) + and gives the osparc-agent an opportunity to back it up. + """ + + __slots__ = () + + @classmethod + def create(cls) -> "RunID": + # NOTE: there was a legacy version of this RunID + # legacy version: + # '0ac3ed64-665b-42d2-95f7-e59e0db34242' + # current version: + # '1690203099_0ac3ed64-665b-42d2-95f7-e59e0db34242' + utc_int_timestamp: int = arrow.utcnow().int_timestamp + run_id_format = f"{utc_int_timestamp}_{uuid4()}" + return cls(run_id_format) diff --git a/packages/models-library/src/models_library/utils/common_validators.py b/packages/models-library/src/models_library/utils/common_validators.py index 09e3d7138b8..681253ae8e2 100644 --- a/packages/models-library/src/models_library/utils/common_validators.py +++ b/packages/models-library/src/models_library/utils/common_validators.py @@ -3,12 +3,13 @@ Example: from pydantic import BaseModel, validator + from models_library.utils.common_validators import empty_str_to_none_pre_validator class MyModel(BaseModel): thumbnail: str | None _empty_is_none = validator("thumbnail", allow_reuse=True, pre=True)( - empty_str_to_none + empty_str_to_none_pre_validator ) SEE https://docs.pydantic.dev/usage/validators/#reuse-validators diff --git a/packages/models-library/src/models_library/utils/services_io.py b/packages/models-library/src/models_library/utils/services_io.py index 81de4a0a33c..ad3a00439b2 100644 --- a/packages/models-library/src/models_library/utils/services_io.py +++ b/packages/models-library/src/models_library/utils/services_io.py @@ -5,7 +5,7 @@ from pydantic import schema_of from ..services import ServiceInput, ServiceOutput -from ..services_constants import PROPERTY_TYPE_TO_PYTHON_TYPE_MAP +from ..services_regex import PROPERTY_TYPE_TO_PYTHON_TYPE_MAP PortKindStr = Literal["input", "output"] JsonSchemaDict = dict[str, Any] diff --git a/packages/models-library/tests/test__models_fit_schemas.py b/packages/models-library/tests/test__models_fit_schemas.py index b909a3c2890..31952e7108f 100644 --- a/packages/models-library/tests/test__models_fit_schemas.py +++ b/packages/models-library/tests/test__models_fit_schemas.py @@ -7,7 +7,7 @@ import pytest from models_library.projects import Project -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from pydantic.main import BaseModel @@ -15,7 +15,7 @@ @pytest.mark.parametrize( "pydantic_model, original_json_schema", [ - (ServiceDockerData, "node-meta-v0.0.1-pydantic.json"), + (ServiceMetaDataPublished, "node-meta-v0.0.1-pydantic.json"), (Project, "project-v0.0.1-pydantic.json"), ], ) diff --git a/packages/models-library/tests/test_function_services_catalog.py b/packages/models-library/tests/test_function_services_catalog.py index eafb6af0617..0844ed29a4e 100644 --- a/packages/models-library/tests/test_function_services_catalog.py +++ b/packages/models-library/tests/test_function_services_catalog.py @@ -15,14 +15,14 @@ is_function_service, iter_service_docker_data, ) -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished @pytest.mark.parametrize( "image_metadata", iter_service_docker_data(), ids=lambda obj: obj.name ) def test_create_frontend_services_metadata(image_metadata): - assert isinstance(image_metadata, ServiceDockerData) + assert isinstance(image_metadata, ServiceMetaDataPublished) assert is_function_service(image_metadata.key) diff --git a/packages/models-library/tests/test_services.py b/packages/models-library/tests/test_services.py index 8a3311daad0..8391159454b 100644 --- a/packages/models-library/tests/test_services.py +++ b/packages/models-library/tests/test_services.py @@ -10,14 +10,12 @@ import pytest from models_library.basic_regex import VERSION_RE -from models_library.services import ( +from models_library.services import BootOption, ServiceBase, ServiceMetaDataPublished +from models_library.services_regex import ( COMPUTATIONAL_SERVICE_KEY_FORMAT, DYNAMIC_SERVICE_KEY_FORMAT, SERVICE_ENCODED_KEY_RE, SERVICE_KEY_RE, - BootOption, - ServiceDockerData, - _BaseServiceCommonDataModel, ) @@ -32,7 +30,7 @@ def minimal_service_common_data() -> dict[str, Any]: def test_create_minimal_service_common_data( minimal_service_common_data: dict[str, Any] ): - service = _BaseServiceCommonDataModel(**minimal_service_common_data) + service = ServiceBase(**minimal_service_common_data) assert service.name == minimal_service_common_data["name"] assert service.description == minimal_service_common_data["description"] @@ -43,7 +41,7 @@ def test_node_with_empty_thumbnail(minimal_service_common_data: dict[str, Any]): service_data = minimal_service_common_data service_data.update({"thumbnail": ""}) - service = _BaseServiceCommonDataModel(**minimal_service_common_data) + service = ServiceBase(**minimal_service_common_data) assert service.name == minimal_service_common_data["name"] assert service.description == minimal_service_common_data["description"] @@ -58,7 +56,7 @@ def test_node_with_thumbnail(minimal_service_common_data: dict[str, Any]): } ) - service = _BaseServiceCommonDataModel(**minimal_service_common_data) + service = ServiceBase(**minimal_service_common_data) assert service.name == minimal_service_common_data["name"] assert service.description == minimal_service_common_data["description"] @@ -203,7 +201,7 @@ def test_service_docker_data_labels_convesion(): convension_breaking_fields: set[tuple[str, str]] = set() fiedls_with_aliases: list[tuple[str, str]] = [ - (x.name, x.alias) for x in ServiceDockerData.__fields__.values() + (x.name, x.alias) for x in ServiceMetaDataPublished.__fields__.values() ] for name, alias in fiedls_with_aliases: diff --git a/packages/models-library/tests/test_services_io.py b/packages/models-library/tests/test_services_io.py index 0d9b96e1553..b0db489a20d 100644 --- a/packages/models-library/tests/test_services_io.py +++ b/packages/models-library/tests/test_services_io.py @@ -5,7 +5,7 @@ from pathlib import Path import yaml -from models_library.services import ServiceDockerData, ServiceInput +from models_library.services import ServiceInput, ServiceMetaDataPublished from pint import Unit, UnitRegistry @@ -15,9 +15,9 @@ def test_service_port_units(project_tests_dir: Path): data = yaml.safe_load( (project_tests_dir / "data" / "metadata-sleeper-2.0.2.yaml").read_text() ) - print(ServiceDockerData.schema_json(indent=2)) + print(ServiceMetaDataPublished.schema_json(indent=2)) - service_meta = ServiceDockerData.parse_obj(data) + service_meta = ServiceMetaDataPublished.parse_obj(data) assert service_meta.inputs for input_nameid, input_meta in service_meta.inputs.items(): diff --git a/packages/service-integration/src/service_integration/_meta.py b/packages/service-integration/src/service_integration/_meta.py index 5ae5c009ccf..edd84d14f86 100644 --- a/packages/service-integration/src/service_integration/_meta.py +++ b/packages/service-integration/src/service_integration/_meta.py @@ -1,6 +1,6 @@ from importlib.metadata import distribution, version -from models_library.services import LATEST_INTEGRATION_VERSION +from models_library.services_constants import LATEST_INTEGRATION_VERSION current_distribution = distribution("simcore-service-integration") project_name: str = current_distribution.metadata["Name"] diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index b3eb998e7dd..cd4ecf6148b 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -24,12 +24,10 @@ RestartPolicy, ) from models_library.service_settings_nat_rule import NATRule -from models_library.services import ( +from models_library.services import BootOptions, ServiceMetaDataPublished, ServiceType +from models_library.services_regex import ( COMPUTATIONAL_SERVICE_KEY_FORMAT, DYNAMIC_SERVICE_KEY_FORMAT, - BootOptions, - ServiceDockerData, - ServiceType, ) from models_library.utils.labels_annotations import ( OSPARC_LABEL_PREFIXES, @@ -88,7 +86,7 @@ def from_yaml(cls, path: Path) -> "DockerComposeOverwriteConfig": return model -class MetadataConfig(ServiceDockerData): +class MetadataConfig(ServiceMetaDataPublished): """Content of metadata.yml configuration file Details about general info and I/O configuration of the service diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 5c97c4c5be5..5fe16c48ff2 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -1,12 +1,14 @@ import logging import os +from collections.abc import Callable from dataclasses import dataclass from pathlib import Path from pprint import pformat -from typing import Any, Callable +from typing import Any from models_library.api_schemas_storage import LinkType -from models_library.services import PROPERTY_KEY_RE, BaseServiceIOModel +from models_library.basic_regex import PROPERTY_KEY_RE +from models_library.services_io import BaseServiceIOModel from pydantic import AnyUrl, Field, PrivateAttr, ValidationError, validator from pydantic.tools import parse_obj_as from servicelib.progress_bar import ProgressBarData diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py index e2a5ab5eecd..3f34ee06b2c 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py @@ -1,7 +1,7 @@ import re -from typing import ItemsView, Iterator, KeysView, ValuesView +from collections.abc import ItemsView, Iterator, KeysView, ValuesView -from models_library.services import PROPERTY_KEY_RE +from models_library.basic_regex import PROPERTY_KEY_RE from pydantic import BaseModel, ConstrainedStr from ..node_ports_common.exceptions import UnboundPortError @@ -19,7 +19,7 @@ def __getitem__(self, key: int | PortKey) -> Port: if isinstance(key, int): if key < len(self.__root__): key = list(self.__root__.keys())[key] - if not key in self.__root__: + if key not in self.__root__: raise UnboundPortError(key) assert isinstance(key, str) # nosec return self.__root__[key] diff --git a/scripts/common-package.Makefile b/scripts/common-package.Makefile index 7dccecef79e..0fff1785d1c 100644 --- a/scripts/common-package.Makefile +++ b/scripts/common-package.Makefile @@ -38,7 +38,6 @@ info: ## displays package info @echo ' PACKAGE_VERSION : ${PACKAGE_VERSION}' - # # SUBTASKS # diff --git a/scripts/common.Makefile b/scripts/common.Makefile index 18e356a3361..3d29b122f9a 100644 --- a/scripts/common.Makefile +++ b/scripts/common.Makefile @@ -50,6 +50,11 @@ DOT_ENV_FILE = $(abspath $(REPO_BASE_DIR)/.env) # utils get_my_ip := $(shell hostname --all-ip-addresses | cut --delimiter=" " --fields=1) +IGNORE_DIR=.ignore + +$(IGNORE_DIR): # Used to produce .ignore folders which are auto excluded from version control (see .gitignore) + mkdir -p $(IGNORE_DIR) + # # SHORTCUTS # @@ -135,6 +140,19 @@ pyupgrade: ## upgrades python syntax for newer versions of the language (SEE htt pylint: $(REPO_BASE_DIR)/.pylintrc ## runs pylint (python linter) on src and tests folders @pylint --rcfile="$(REPO_BASE_DIR)/.pylintrc" -v $(CURDIR)/src $(CURDIR)/tests + +.PHONY: doc-uml +doc-uml: $(IGNORE_DIR) ## Create UML diagrams for classes and modules in current package. e.g. (export DOC_UML_PATH_SUFFIX="services*"; export DOC_UML_CLASS=models_library.api_schemas_catalog.services.ServiceGet; make doc-uml) + @pyreverse \ + --verbose \ + --output=svg \ + --output-directory=$(IGNORE_DIR) \ + --project=$(if ${PACKAGE_NAME},${PACKAGE_NAME},${SERVICE_NAME})${DOC_UML_PATH_SUFFIX} \ + $(if ${DOC_UML_CLASS},--class=${DOC_UML_CLASS},) \ + ${SRC_DIR}$(if ${DOC_UML_PATH_SUFFIX},/${DOC_UML_PATH_SUFFIX},) + @echo Outputs in $(realpath $(IGNORE_DIR)) + + .PHONY: ruff ruff: $(REPO_BASE_DIR)/.ruff.toml ## runs ruff (python fast linter) on src and tests folders @ruff check \ @@ -158,6 +176,7 @@ github-workflow-job: ## runs a github workflow job using act locally, run using $(SCRIPTS_DIR)/act.bash ../.. ${job} + .PHONY: version-patch version-minor version-major version-patch: ## commits version with bug fixes not affecting the cookiecuter config $(_bumpversion) diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py b/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py index d671f765536..a99017852a5 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py @@ -3,7 +3,8 @@ import packaging.version from models_library.basic_regex import PUBLIC_VARIABLE_NAME_RE -from models_library.services import COMPUTATIONAL_SERVICE_KEY_RE, ServiceDockerData +from models_library.services import ServiceMetaDataPublished +from models_library.services_regex import COMPUTATIONAL_SERVICE_KEY_RE from packaging.version import Version from pydantic import BaseModel, ConstrainedStr, Extra, Field, HttpUrl @@ -67,7 +68,7 @@ class Config: } @classmethod - def create_from_image(cls, image_meta: ServiceDockerData) -> "Solver": + def create_from_image(cls, image_meta: ServiceMetaDataPublished) -> "Solver": data = image_meta.dict( include={"name", "key", "version", "description", "contact"}, ) diff --git a/services/api-server/src/simcore_service_api_server/services/catalog.py b/services/api-server/src/simcore_service_api_server/services/catalog.py index 9043febef7f..3c9bf8f886f 100644 --- a/services/api-server/src/simcore_service_api_server/services/catalog.py +++ b/services/api-server/src/simcore_service_api_server/services/catalog.py @@ -8,7 +8,7 @@ from fastapi import FastAPI, status from models_library.emails import LowerCaseEmailStr -from models_library.services import ServiceDockerData, ServiceType +from models_library.services import ServiceMetaDataPublished, ServiceType from pydantic import Extra, ValidationError, parse_obj_as, parse_raw_as from settings_library.catalog import CatalogSettings @@ -23,7 +23,7 @@ SolverNameVersionPair = tuple[SolverKeyId, str] -class TruncatedCatalogServiceOut(ServiceDockerData): +class TruncatedCatalogServiceOut(ServiceMetaDataPublished): """ This model is used to truncate the response of the catalog, whose schema is in services/catalog/src/simcore_service_catalog/models/schemas/services.py::ServiceOut diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py index db57f42dc97..5930db55a29 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py @@ -16,7 +16,7 @@ import pytest from faker import Faker from fastapi import FastAPI -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import AnyUrl, HttpUrl, parse_obj_as from respx import MockRouter @@ -313,7 +313,7 @@ async def test_run_solver_job( example = next( e - for e in ServiceDockerData.Config.schema_extra["examples"] + for e in ServiceMetaDataPublished.Config.schema_extra["examples"] if "boot-options" in e ) diff --git a/services/catalog/openapi.json b/services/catalog/openapi.json index 6d66c5918c7..bf8f8aee6b6 100644 --- a/services/catalog/openapi.json +++ b/services/catalog/openapi.json @@ -596,7 +596,6 @@ "description": "Affiliation of the author" } }, - "additionalProperties": false, "type": "object", "required": [ "name", @@ -628,7 +627,6 @@ "description": "Link to the status" } }, - "additionalProperties": false, "type": "object", "required": [ "name", @@ -2053,7 +2051,7 @@ "name": { "type": "string", "title": "Name", - "description": "short, human readable name for the node", + "description": "Display name: short, human readable name for the node", "example": "Fast Counter" }, "thumbnail": { @@ -2207,7 +2205,7 @@ "outputs" ], "title": "ServiceGet", - "description": "Static metadata for a service injected in the image labels\n\nNOTE: This model is serialized in .osparc/metadata.yml and in the labels of the docker image", + "description": "Service metadata at publication time\n\n- read-only (can only be changed overwriting the image labels in the registry)\n- base metaddata\n- injected in the image labels\n\nNOTE: This model is serialized in .osparc/metadata.yml and in the labels of the docker image", "example": { "name": "File Picker", "description": "description", @@ -2306,7 +2304,8 @@ "unit": { "type": "string", "title": "Unit", - "description": "Units, when it refers to a physical quantity" + "description": "Units, when it refers to a physical quantity", + "deprecated": true }, "defaultValue": { "anyOf": [ @@ -2323,7 +2322,8 @@ "type": "string" } ], - "title": "Defaultvalue" + "title": "Defaultvalue", + "deprecated": true }, "widget": { "allOf": [ @@ -2388,7 +2388,8 @@ "unit": { "type": "string", "title": "Unit", - "description": "Units, when it refers to a physical quantity" + "description": "Units, when it refers to a physical quantity", + "deprecated": true }, "widget": { "allOf": [ diff --git a/services/catalog/src/simcore_service_catalog/core/background_tasks.py b/services/catalog/src/simcore_service_catalog/core/background_tasks.py index 8461c7f3923..c9b80c2d5b4 100644 --- a/services/catalog/src/simcore_service_catalog/core/background_tasks.py +++ b/services/catalog/src/simcore_service_catalog/core/background_tasks.py @@ -17,7 +17,7 @@ from fastapi import FastAPI from models_library.function_services_catalog.api import iter_service_docker_data -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from models_library.services_db import ServiceAccessRightsAtDB, ServiceMetaDataAtDB from packaging.version import Version from pydantic import ValidationError @@ -35,7 +35,7 @@ ServiceKey = NewType("ServiceKey", str) ServiceVersion = NewType("ServiceVersion", str) ServiceDockerDataMap: TypeAlias = dict[ - tuple[ServiceKey, ServiceVersion], ServiceDockerData + tuple[ServiceKey, ServiceVersion], ServiceMetaDataPublished ] @@ -52,7 +52,7 @@ async def _list_services_in_registry( } for service in registry_services: try: - service_data = ServiceDockerData.parse_obj(service) + service_data = ServiceMetaDataPublished.parse_obj(service) services[(service_data.key, service_data.version)] = service_data except ValidationError: # noqa: PERF203 @@ -79,7 +79,9 @@ async def _list_services_in_database( async def _create_services_in_database( app: FastAPI, service_keys: set[tuple[ServiceKey, ServiceVersion]], - services_in_registry: dict[tuple[ServiceKey, ServiceVersion], ServiceDockerData], + services_in_registry: dict[ + tuple[ServiceKey, ServiceVersion], ServiceMetaDataPublished + ], ) -> None: """Adds a new service in the database @@ -94,7 +96,7 @@ def _by_version(t: tuple[ServiceKey, ServiceVersion]) -> Version: sorted_services = sorted(service_keys, key=_by_version) for service_key, service_version in sorted_services: - service_metadata: ServiceDockerData = services_in_registry[ + service_metadata: ServiceMetaDataPublished = services_in_registry[ (service_key, service_version) ] ## Set deprecation date to null (is valid date value for postgres) @@ -129,7 +131,7 @@ async def _ensure_registry_and_database_are_synced(app: FastAPI) -> None: Notice that a services here refers to a 2-tuple (key, version) """ services_in_registry: dict[ - tuple[ServiceKey, ServiceVersion], ServiceDockerData + tuple[ServiceKey, ServiceVersion], ServiceMetaDataPublished ] = await _list_services_in_registry(app) services_in_db: set[ diff --git a/services/catalog/src/simcore_service_catalog/services/access_rights.py b/services/catalog/src/simcore_service_catalog/services/access_rights.py index 89d2b3ad751..a86e0627988 100644 --- a/services/catalog/src/simcore_service_catalog/services/access_rights.py +++ b/services/catalog/src/simcore_service_catalog/services/access_rights.py @@ -9,7 +9,7 @@ from urllib.parse import quote_plus from fastapi import FastAPI -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from models_library.services_db import ServiceAccessRightsAtDB from packaging.version import Version from pydantic.types import PositiveInt @@ -25,11 +25,11 @@ OLD_SERVICES_DATE: datetime = datetime(2020, 8, 19) -def _is_frontend_service(service: ServiceDockerData) -> bool: +def _is_frontend_service(service: ServiceMetaDataPublished) -> bool: return "/frontend/" in service.key -async def _is_old_service(app: FastAPI, service: ServiceDockerData) -> bool: +async def _is_old_service(app: FastAPI, service: ServiceMetaDataPublished) -> bool: # get service build date client = get_director_api(app) data = cast( @@ -48,7 +48,7 @@ async def _is_old_service(app: FastAPI, service: ServiceDockerData) -> bool: async def evaluate_default_policy( - app: FastAPI, service: ServiceDockerData + app: FastAPI, service: ServiceMetaDataPublished ) -> tuple[PositiveInt | None, list[ServiceAccessRightsAtDB]]: """Given a service, it returns the owner's group-id (gid) and a list of access rights following default access-rights policies @@ -101,7 +101,7 @@ async def evaluate_default_policy( async def evaluate_auto_upgrade_policy( - service_metadata: ServiceDockerData, services_repo: ServicesRepository + service_metadata: ServiceMetaDataPublished, services_repo: ServicesRepository ) -> list[ServiceAccessRightsAtDB]: # AUTO-UPGRADE PATCH policy: # diff --git a/services/catalog/src/simcore_service_catalog/services/function_services.py b/services/catalog/src/simcore_service_catalog/services/function_services.py index c8bd85f2952..ae6b7224a69 100644 --- a/services/catalog/src/simcore_service_catalog/services/function_services.py +++ b/services/catalog/src/simcore_service_catalog/services/function_services.py @@ -11,12 +11,12 @@ is_function_service, iter_service_docker_data, ) -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished assert is_function_service # nosec -def _as_dict(model_instance: ServiceDockerData) -> dict[str, Any]: +def _as_dict(model_instance: ServiceMetaDataPublished) -> dict[str, Any]: # FIXME: In order to convert to ServiceOut, now we have to convert back to front-end service because of alias # FIXME: set the same policy for f/e and director datasets! return cast(dict[str, Any], model_instance.dict(by_alias=True, exclude_unset=True)) diff --git a/services/catalog/tests/unit/test_services_function_services.py b/services/catalog/tests/unit/test_services_function_services.py index 17ce2c63721..4c4235ce9ae 100644 --- a/services/catalog/tests/unit/test_services_function_services.py +++ b/services/catalog/tests/unit/test_services_function_services.py @@ -7,7 +7,7 @@ import pytest -from models_library.api_schemas_catalog.services import ServiceDockerData +from models_library.api_schemas_catalog.services import ServiceMetaDataPublished from simcore_service_catalog.services.function_services import ( is_function_service, iter_service_docker_data, @@ -17,7 +17,7 @@ @pytest.mark.parametrize( "image_metadata", iter_service_docker_data(), ids=lambda obj: obj.name ) -def test_create_services_metadata(image_metadata: ServiceDockerData): - assert isinstance(image_metadata, ServiceDockerData) +def test_create_services_metadata(image_metadata: ServiceMetaDataPublished): + assert isinstance(image_metadata, ServiceMetaDataPublished) assert is_function_service(image_metadata.key) diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index 3428ad188f9..25c5c038fe7 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -17,7 +17,7 @@ from fastapi import FastAPI from fastapi.testclient import TestClient from models_library.products import ProductName -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from models_library.users import UserID from pydantic import parse_obj_as from pytest_mock.plugin import MockerFixture @@ -369,7 +369,9 @@ def _fake_factory(**overrides): data = deepcopy(template) data.update(**overrides) - assert ServiceDockerData.parse_obj(data), "Invalid fake data. Out of sync!" + assert ServiceMetaDataPublished.parse_obj( + data + ), "Invalid fake data. Out of sync!" return data return _fake_factory diff --git a/services/catalog/tests/unit/with_dbs/test_api_routes_services__list.py b/services/catalog/tests/unit/with_dbs/test_api_routes_services__list.py index 0edcda200a6..634b7550add 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_routes_services__list.py +++ b/services/catalog/tests/unit/with_dbs/test_api_routes_services__list.py @@ -11,7 +11,7 @@ from models_library.api_schemas_catalog.services import ServiceGet from models_library.products import ProductName -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from models_library.users import UserID from pydantic import parse_obj_as from respx.router import MockRouter @@ -55,7 +55,9 @@ async def test_list_services_with_details( url = URL("/v0/services").with_query({"user_id": user_id, "details": "true"}) # now fake the director such that it returns half the services - fake_registry_service_data = ServiceDockerData.Config.schema_extra["examples"][0] + fake_registry_service_data = ServiceMetaDataPublished.Config.schema_extra[ + "examples" + ][0] mocked_director_service_api.get("/services", name="list_services").respond( 200, @@ -254,7 +256,9 @@ async def test_list_services_that_are_deprecated( assert received_service.deprecated == deprecation_date # for details, the director must return the same service - fake_registry_service_data = ServiceDockerData.Config.schema_extra["examples"][0] + fake_registry_service_data = ServiceMetaDataPublished.Config.schema_extra[ + "examples" + ][0] mocked_director_service_api.get("/services", name="list_services").respond( 200, json={ diff --git a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py index 55bee7c0ff8..257690f3ba2 100644 --- a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py +++ b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py @@ -7,7 +7,7 @@ from fastapi import FastAPI from models_library.groups import GroupAtDB from models_library.products import ProductName -from models_library.services import ServiceDockerData, ServiceVersion +from models_library.services import ServiceMetaDataPublished, ServiceVersion from models_library.services_db import ServiceAccessRightsAtDB from pydantic import parse_obj_as from simcore_service_catalog.db.repositories.services import ServicesRepository @@ -111,8 +111,8 @@ async def test_auto_upgrade_policy( # SETUP --- MOST_UPDATED_EXAMPLE = -1 - new_service_metadata = ServiceDockerData.parse_obj( - ServiceDockerData.Config.schema_extra["examples"][MOST_UPDATED_EXAMPLE] + new_service_metadata = ServiceMetaDataPublished.parse_obj( + ServiceMetaDataPublished.Config.schema_extra["examples"][MOST_UPDATED_EXAMPLE] ) new_service_metadata.version = parse_obj_as(ServiceVersion, "1.0.11") diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py index 65a81b56f2d..c0448caa28f 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py @@ -1,7 +1,7 @@ import re from models_library.basic_types import VERSION_RE -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from packaging import version from pydantic import BaseModel, ByteSize, Extra, Field, validator @@ -105,5 +105,5 @@ def get_progress_regexp(self) -> re.Pattern[str]: assert set(ImageLabels.__fields__).issubset( - ServiceDockerData.__fields__ + ServiceMetaDataPublished.__fields__ ), "ImageLabels must be compatible with ServiceDockerData" diff --git a/services/dask-sidecar/tests/unit/test_tasks.py b/services/dask-sidecar/tests/unit/test_tasks.py index 950913fdda3..4aff3a1fd3d 100644 --- a/services/dask-sidecar/tests/unit/test_tasks.py +++ b/services/dask-sidecar/tests/unit/test_tasks.py @@ -38,7 +38,7 @@ ) from faker import Faker from models_library.basic_types import EnvVarKey -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from models_library.services_resources import BootMode from packaging import version from pydantic import AnyUrl, SecretStr, parse_obj_as @@ -438,7 +438,7 @@ def mocked_get_image_labels( integration_version: version.Version, mocker: MockerFixture ) -> mock.Mock: labels: ImageLabels = parse_obj_as( - ImageLabels, ServiceDockerData.Config.schema_extra["examples"][0] + ImageLabels, ServiceMetaDataPublished.Config.schema_extra["examples"][0] ) labels.integration_version = f"{integration_version}" return mocker.patch( diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index e8fb39e0911..0fc2b7ee1a9 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -10,12 +10,8 @@ from models_library.projects_nodes import InputsDict, NodeID, OutputsDict from models_library.projects_state import RunningState from models_library.resource_tracker import HardwareInfo -from models_library.services import ( - SERVICE_KEY_RE, - ServiceInputsDict, - ServiceOutput, - ServicePortKey, -) +from models_library.services import ServiceInputsDict, ServiceOutput, ServicePortKey +from models_library.services_regex import SERVICE_KEY_RE from models_library.services_resources import BootMode from pydantic import BaseModel, ByteSize, Extra, Field, parse_obj_as, validator from pydantic.types import PositiveInt diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index 19641b217f1..a33f689e9da 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -22,9 +22,9 @@ SimcoreServiceSettingsLabel, ) from models_library.services import ( - ServiceDockerData, ServiceKey, ServiceKeyVersion, + ServiceMetaDataPublished, ServiceVersion, ) from models_library.services_resources import ( @@ -72,7 +72,7 @@ # # Examples are nodes like file-picker or parameter/* # -_FRONTEND_SERVICES_CATALOG: dict[str, ServiceDockerData] = { +_FRONTEND_SERVICES_CATALOG: dict[str, ServiceMetaDataPublished] = { meta.key: meta for meta in iter_service_docker_data() } @@ -82,14 +82,17 @@ async def _get_service_details( user_id: UserID, product_name: str, node: ServiceKeyVersion, -) -> ServiceDockerData: +) -> ServiceMetaDataPublished: service_details = await catalog_client.get_service( user_id, node.key, node.version, product_name, ) - return ServiceDockerData.construct(**service_details) + obj: ServiceMetaDataPublished = ServiceMetaDataPublished.construct( + **service_details + ) + return obj def _compute_node_requirements( @@ -133,7 +136,9 @@ async def _get_node_infos( user_id: UserID, product_name: str, node: ServiceKeyVersion, -) -> tuple[ServiceDockerData | None, ServiceExtras | None, SimcoreServiceLabels | None]: +) -> tuple[ + ServiceMetaDataPublished | None, ServiceExtras | None, SimcoreServiceLabels | None +]: if to_node_class(node.key) == NodeClass.FRONTEND: return ( _FRONTEND_SERVICES_CATALOG.get(node.key, None), @@ -142,7 +147,7 @@ async def _get_node_infos( ) result: tuple[ - ServiceDockerData, ServiceExtras, SimcoreServiceLabels + ServiceMetaDataPublished, ServiceExtras, SimcoreServiceLabels ] = await asyncio.gather( _get_service_details(catalog_client, user_id, product_name, node), director_client.get_service_extras(node.key, node.version), diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index bbb10c03600..0abbc18f593 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -3,7 +3,8 @@ from typing import Any from models_library.projects_state import RunningState -from models_library.services import SERVICE_KEY_RE, ServiceKeyVersion +from models_library.services import ServiceKeyVersion +from models_library.services_regex import SERVICE_KEY_RE from models_library.users import UserID from pydantic import parse_obj_as from servicelib.utils import logged_gather diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 290422710ac..e12bdd7bffa 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -40,7 +40,7 @@ from models_library.projects_pipeline import PipelineDetails from models_library.projects_state import RunningState from models_library.service_settings_labels import SimcoreServiceLabels -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from models_library.services_resources import ( DEFAULT_SINGLE_SERVICE_NAME, ServiceResourcesDict, @@ -99,11 +99,11 @@ def minimal_configuration( @pytest.fixture(scope="session") -def fake_service_details(mocks_dir: Path) -> ServiceDockerData: +def fake_service_details(mocks_dir: Path) -> ServiceMetaDataPublished: fake_service_path = mocks_dir / "fake_service.json" assert fake_service_path.exists() fake_service_data = json.loads(fake_service_path.read_text()) - return ServiceDockerData(**fake_service_data) + return ServiceMetaDataPublished(**fake_service_data) @pytest.fixture @@ -130,7 +130,7 @@ def fake_service_labels() -> dict[str, Any]: @pytest.fixture def mocked_director_service_fcts( minimal_app: FastAPI, - fake_service_details: ServiceDockerData, + fake_service_details: ServiceMetaDataPublished, fake_service_extras: ServiceExtras, fake_service_labels: dict[str, Any], ) -> Iterator[respx.MockRouter]: @@ -167,7 +167,7 @@ def mocked_director_service_fcts( @pytest.fixture def mocked_catalog_service_fcts( minimal_app: FastAPI, - fake_service_details: ServiceDockerData, + fake_service_details: ServiceMetaDataPublished, fake_service_resources: ServiceResourcesDict, ) -> Iterator[respx.MockRouter]: def _mocked_service_resources(request) -> httpx.Response: @@ -216,7 +216,7 @@ def _mocked_services_details( @pytest.fixture def mocked_catalog_service_fcts_deprecated( minimal_app: FastAPI, - fake_service_details: ServiceDockerData, + fake_service_details: ServiceMetaDataPublished, fake_service_extras: ServiceExtras, ): def _mocked_services_details( diff --git a/services/dynamic-sidecar/openapi.json b/services/dynamic-sidecar/openapi.json index f9e2a73aeee..f8f7305aa60 100644 --- a/services/dynamic-sidecar/openapi.json +++ b/services/dynamic-sidecar/openapi.json @@ -1189,7 +1189,8 @@ "unit": { "type": "string", "title": "Unit", - "description": "Units, when it refers to a physical quantity" + "description": "Units, when it refers to a physical quantity", + "deprecated": true }, "widget": { "allOf": [ diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 621a50ff25a..9a66487c8be 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -670,7 +670,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_list_models_library.services.ServicePortKey__' + $ref: '#/components/schemas/Envelope_list_models_library.services_types.ServicePortKey__' /v0/catalog/services/{service_key}/{service_version}/outputs: get: tags: @@ -698,7 +698,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_list_models_library.services.ServicePortKey__' + $ref: '#/components/schemas/Envelope_list_models_library.services_types.ServicePortKey__' /v0/catalog/services/{service_key}/{service_version}/outputs/{output_key}: get: tags: @@ -782,7 +782,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_list_models_library.services.ServicePortKey__' + $ref: '#/components/schemas/Envelope_list_models_library.services_types.ServicePortKey__' /v0/catalog/services/{service_key}/{service_version}/resources: get: tags: @@ -2991,7 +2991,7 @@ paths: '403': description: ProjectInvalidRightsError '404': - description: ProjectNotFoundError, UserDefaultWalletNotFoundError + description: UserDefaultWalletNotFoundError, ProjectNotFoundError '409': description: ProjectTooManyProjectOpenedError '422': @@ -5277,7 +5277,6 @@ components: title: Affiliation type: string description: Affiliation of the author - additionalProperties: false Badge: title: Badge required: @@ -5304,7 +5303,6 @@ components: type: string description: Link to the status format: uri - additionalProperties: false Body_service_submission: title: Body_service_submission required: @@ -6648,8 +6646,8 @@ components: $ref: '#/components/schemas/ProjectsCommentsAPI' error: title: Error - Envelope_list_models_library.services.ServicePortKey__: - title: Envelope[list[models_library.services.ServicePortKey]] + Envelope_list_models_library.services_types.ServicePortKey__: + title: Envelope[list[models_library.services_types.ServicePortKey]] type: object properties: data: @@ -9842,6 +9840,7 @@ components: title: Unit type: string description: Units, when it refers to a physical quantity + deprecated: true defaultValue: title: Defaultvalue anyOf: @@ -9849,6 +9848,7 @@ components: - type: integer - type: number - type: string + deprecated: true widget: title: Widget allOf: @@ -9946,6 +9946,7 @@ components: title: Unit type: string description: Units, when it refers to a physical quantity + deprecated: true widget: title: Widget allOf: @@ -11443,7 +11444,7 @@ components: name: title: Name type: string - description: short, human readable name for the node + description: 'Display name: short, human readable name for the node' example: Fast Counter thumbnail: title: Thumbnail @@ -11557,7 +11558,14 @@ components: title: Owner type: string format: email - description: 'Static metadata for a service injected in the image labels + description: 'Service metadata at publication time + + + - read-only (can only be changed overwriting the image labels in the registry) + + - base metaddata + + - injected in the image labels NOTE: This model is serialized in .osparc/metadata.yml and in the labels of diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py index 4f0f5a89203..186b2edcd76 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py @@ -14,7 +14,7 @@ from models_library.function_services_catalog import is_iterator_service from models_library.projects import ProjectID from models_library.projects_nodes import Node, NodeID, OutputID, OutputTypes -from models_library.services import ServiceDockerData +from models_library.services import ServiceMetaDataPublished from models_library.utils.json_serialization import json_dumps from pydantic import BaseModel, ValidationError from pydantic.fields import Field @@ -49,7 +49,9 @@ def _build_project_iterations(project_nodes: NodesDict) -> list[_ParametersNodes """ # select iterable nodes - iterable_nodes_defs: list[ServiceDockerData] = [] # schemas of iterable nodes + iterable_nodes_defs: list[ + ServiceMetaDataPublished + ] = [] # schemas of iterable nodes iterable_nodes: list[Node] = [] # iterable nodes iterable_nodes_ids: list[NodeID] = [] From 64cb5ee75243b52d8ddb6bf2d75148abfaefd45c Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 18 Jun 2024 15:28:02 +0200 Subject: [PATCH 042/219] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Refuse=20fo?= =?UTF-8?q?lder=20drop=20on=20Workbech=20(#5957)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/file/FileDrop.js | 26 +++++++++++++++---- .../class/osparc/workbench/WorkbenchUI.js | 21 ++++++++------- 2 files changed, 32 insertions(+), 15 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/file/FileDrop.js b/services/static-webserver/client/source/class/osparc/file/FileDrop.js index 6f7adb2bd82..f7aa0734b2f 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileDrop.js +++ b/services/static-webserver/client/source/class/osparc/file/FileDrop.js @@ -87,6 +87,21 @@ qx.Class.define("osparc.file.FileDrop", { return { "border-width": "0px" }; + }, + + getFilesFromEvent: function(e) { + const files = []; + if (e.dataTransfer.items) { + const items = e.dataTransfer.items; + for (let i = 0; i < items.length; i++) { + // If dropped items aren't files, reject them + if (items[i].webkitGetAsEntry()["isFile"]) { + const file = items[i].getAsFile(); + files.push(file); + } + } + } + return files; } }, @@ -276,17 +291,18 @@ qx.Class.define("osparc.file.FileDrop", { this.__isDraggingFile = false; if ("dataTransfer" in e) { - const files = e.dataTransfer.files; - if (files.length === 1) { - const fileList = e.dataTransfer.files; - if (fileList.length) { + const files = osparc.file.FileDrop.getFilesFromEvent(e); + if (files.length) { + if (files.length === 1) { this.fireDataEvent("localFileDropped", { data: files, pos: this.__pointerFileEventToScreenPos(e) }); + } else { + osparc.FlashMessenger.getInstance().logAs(this.tr("Only one file at a time is accepted."), "ERROR"); } } else { - osparc.FlashMessenger.getInstance().logAs(this.tr("Only one file is accepted"), "ERROR"); + osparc.FlashMessenger.getInstance().logAs(this.tr("Folders are not accepted. You might want to upload a zip file."), "ERROR"); } } }, diff --git a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js index 9342375f4b6..0aae5eec4a9 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js +++ b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js @@ -1939,24 +1939,25 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { if ("dataTransfer" in e) { this.__isDraggingFile = false; - const files = e.dataTransfer.files; - if (files.length === 1) { - const pos = { - x: e.offsetX, - y: e.offsetY - }; - const fileList = e.dataTransfer.files; - if (fileList.length) { + const files = osparc.file.FileDrop.getFilesFromEvent(e); + if (files.length) { + if (files.length === 1) { + const pos = { + x: e.offsetX, + y: e.offsetY + }; const service = qx.data.marshal.Json.createModel(osparc.service.Utils.getFilePicker()); const nodeUI = await this.__addNode(service, pos); if (nodeUI) { const filePicker = new osparc.file.FilePicker(nodeUI.getNode(), "workbench"); - filePicker.uploadPendingFiles(fileList); + filePicker.uploadPendingFiles(files); filePicker.addListener("fileUploaded", () => this.fireDataEvent("nodeSelected", nodeUI.getNodeId()), this); } + } else { + osparc.FlashMessenger.getInstance().logAs(this.tr("Only one file at a time is accepted."), "ERROR"); } } else { - osparc.FlashMessenger.getInstance().logAs(this.tr("Only one file is accepted"), "ERROR"); + osparc.FlashMessenger.getInstance().logAs(this.tr("Folders are not accepted. You might want to upload a zip file."), "ERROR"); } } }, From 2bfa6bf70ec824afb4b18bf8913e1b94bb25f8d9 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 18 Jun 2024 16:21:51 +0200 Subject: [PATCH 043/219] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Guided=20To?= =?UTF-8?q?urs:=20add=20tours=20for=20TIP=20(#5933)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../client/source/class/osparc/About.js | 5 + .../source/class/osparc/FlashMessenger.js | 2 +- .../source/class/osparc/auth/LoginPage.js | 16 +- .../class/osparc/dashboard/NewStudies.js | 31 ++-- .../class/osparc/dashboard/ResourceFilter.js | 2 + .../class/osparc/dashboard/StudyBrowser.js | 8 +- .../osparc/desktop/credits/AutoRecharge.js | 13 +- .../desktop/credits/BillingCenterWindow.js | 3 + .../class/osparc/desktop/credits/Usage.js | 2 +- .../osparc/desktop/wallets/WalletListItem.js | 8 +- .../osparc/desktop/wallets/WalletsList.js | 11 +- .../class/osparc/navigation/UserMenu.js | 1 + .../osparc/notification/Notifications.js | 2 +- .../notification/NotificationsContainer.js | 3 +- .../source/class/osparc/product/Utils.js | 6 +- .../product/tours/{Utils.js => Tours.js} | 21 ++- .../class/osparc/product/tours/s4l/Tours.js | 40 ----- .../osparc/product/tours/s4llite/Tours.js | 40 ----- .../source/class/osparc/store/Support.js | 20 +-- .../client/source/class/osparc/task/Tasks.js | 2 +- .../source/class/osparc/theme/Appearance.js | 2 +- .../source/class/osparc/theme/ColorDark.js | 1 + .../source/class/osparc/theme/ColorLight.js | 3 +- .../client/source/class/osparc/tours/List.js | 4 +- .../source/class/osparc/tours/Manager.js | 16 +- .../class/osparc/ui/basic/FloatingHelper.js | 37 +++-- .../client/source/class/osparc/utils/Utils.js | 18 +++ .../client/source/resource/hint/hint.css | 30 ++-- .../source/resource/osparc/new_studies.json | 7 + .../resource/osparc/tours/s4l_tours.json | 27 ++-- .../resource/osparc/tours/tis_tours.json | 143 ++++++++++++++++++ 31 files changed, 327 insertions(+), 197 deletions(-) rename services/static-webserver/client/source/class/osparc/product/tours/{Utils.js => Tours.js} (52%) delete mode 100644 services/static-webserver/client/source/class/osparc/product/tours/s4l/Tours.js delete mode 100644 services/static-webserver/client/source/class/osparc/product/tours/s4llite/Tours.js create mode 100644 services/static-webserver/client/source/resource/osparc/tours/tis_tours.json diff --git a/services/static-webserver/client/source/class/osparc/About.js b/services/static-webserver/client/source/class/osparc/About.js index 9fbf145d01f..803d5caa9e8 100644 --- a/services/static-webserver/client/source/class/osparc/About.js +++ b/services/static-webserver/client/source/class/osparc/About.js @@ -51,6 +51,11 @@ qx.Class.define("osparc.About", { __buildLayout: function() { const color = qx.theme.manager.Color.getInstance().resolve("text"); + const createReleaseNotesLink = osparc.utils.Utils.createReleaseNotesLink().set({ + font: "link-label-14" + }); + this.add(createReleaseNotesLink); + const poweredByLabel = new qx.ui.basic.Label().set({ font: "text-14", maxWidth: this.self().MAX_WIDTH - 2*this.self().PADDING, diff --git a/services/static-webserver/client/source/class/osparc/FlashMessenger.js b/services/static-webserver/client/source/class/osparc/FlashMessenger.js index b41e94ff167..2256d3760dc 100644 --- a/services/static-webserver/client/source/class/osparc/FlashMessenger.js +++ b/services/static-webserver/client/source/class/osparc/FlashMessenger.js @@ -41,7 +41,7 @@ qx.Class.define("osparc.FlashMessenger", { this.__messages = new qx.data.Array(); this.__messageContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ - zIndex: 110000 + zIndex: osparc.utils.Utils.FLOATING_Z_INDEX }); const root = qx.core.Init.getApplication().getRoot(); root.add(this.__messageContainer, { diff --git a/services/static-webserver/client/source/class/osparc/auth/LoginPage.js b/services/static-webserver/client/source/class/osparc/auth/LoginPage.js index 6aaf505be0a..f177f56c011 100644 --- a/services/static-webserver/client/source/class/osparc/auth/LoginPage.js +++ b/services/static-webserver/client/source/class/osparc/auth/LoginPage.js @@ -348,21 +348,11 @@ qx.Class.define("osparc.auth.LoginPage", { flex: 1 }); - const versionLink = new osparc.ui.basic.LinkLabel().set({ + const createReleaseNotesLink = osparc.utils.Utils.createReleaseNotesLink(); + createReleaseNotesLink.set({ textColor: "text-darker" }); - const rData = osparc.store.StaticInfo.getInstance().getReleaseData(); - const platformVersion = osparc.utils.LibVersions.getPlatformVersion(); - let text = "osparc-simcore "; - text += (rData["tag"] && rData["tag"] !== "latest") ? rData["tag"] : platformVersion.version; - const platformName = osparc.store.StaticInfo.getInstance().getPlatformName(); - text += platformName.length ? ` (${platformName})` : ""; - const url = rData["url"] || osparc.utils.LibVersions.getVcsRefUrl(); - versionLink.set({ - value: text, - url - }); - versionLinkLayout.add(versionLink); + versionLinkLayout.add(createReleaseNotesLink); const organizationLink = new osparc.ui.basic.LinkLabel().set({ textColor: "text-darker" diff --git a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js index a57ea790793..9651b75189b 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js @@ -18,10 +18,11 @@ qx.Class.define("osparc.dashboard.NewStudies", { extend: qx.ui.core.Widget, - construct: function(newStudies) { + construct: function(newStudies, groups) { this.base(arguments); this.__newStudies = newStudies; + this.__groups = groups || []; this._setLayout(new qx.ui.layout.VBox(10)); @@ -38,18 +39,11 @@ qx.Class.define("osparc.dashboard.NewStudies", { }, properties: { - mode: { - check: ["grid", "list"], - init: "grid", - nullable: false, - event: "changeMode", - apply: "reloadCards" - }, - groupBy: { check: [null, "category"], init: null, - nullable: true + nullable: true, + apply: "reloadCards" } }, @@ -59,6 +53,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { members: { __newStudies: null, + __groups: null, __flatList: null, __groupedContainers: null, @@ -69,10 +64,8 @@ qx.Class.define("osparc.dashboard.NewStudies", { const noGroupContainer = this.__createGroupContainer("no-group", "No Group", "transparent"); this._add(noGroupContainer); - const categories = new Set([]); - this.__newStudies.forEach(newStudy => newStudy.category && categories.add(newStudy.category)); - Array.from(categories).forEach(category => { - const groupContainer = this.__createGroupContainer(category, qx.lang.String.firstUp(category), "transparent"); + Array.from(this.__groups).forEach(group => { + const groupContainer = this.__createGroupContainer(group.id, group.label, "transparent"); this._add(groupContainer); }); } else { @@ -84,7 +77,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { ].forEach(signalName => { flatList.addListener(signalName, e => this.fireDataEvent(signalName, e.getData()), this); }); - const spacing = this.getMode() === "grid" ? osparc.dashboard.GridButtonBase.SPACING : osparc.dashboard.ListButtonBase.SPACING; + const spacing = osparc.dashboard.GridButtonBase.SPACING; this.__flatList.getLayout().set({ spacingX: spacing, spacingY: spacing @@ -131,6 +124,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { headerColor, visibility: "excluded" }); + osparc.utils.Utils.setIdToWidget(groupContainer, groupId.toString() + "Group"); const atom = groupContainer.getChildControl("header"); atom.setFont("text-16"); this.__groupedContainers.push(groupContainer); @@ -148,14 +142,9 @@ qx.Class.define("osparc.dashboard.NewStudies", { __createCard: function(templateInfo) { const title = templateInfo.title; const desc = templateInfo.description; - const mode = this.getMode(); - const newPlanButton = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title, desc) : new osparc.dashboard.ListButtonNew(title, desc); + const newPlanButton = new osparc.dashboard.GridButtonNew(title, desc); newPlanButton.setCardKey(templateInfo.idToWidget); osparc.utils.Utils.setIdToWidget(newPlanButton, templateInfo.idToWidget); - if (this.getMode() === "list") { - const width = this.getBounds().width - 15; - newPlanButton.setWidth(width); - } newPlanButton.addListener("execute", () => this.fireDataEvent("newStudyClicked", templateInfo)) return newPlanButton; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js index 29f1e377292..0daf71e7282 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js @@ -22,6 +22,8 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { construct: function(resourceType) { this.base(arguments); + osparc.utils.Utils.setIdToWidget(this, "resourceFilter"); + this.__resourceType = resourceType; this.__sharedWithButtons = []; this.__tagButtons = []; diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index 6197de76c7c..e92961dc452 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -503,14 +503,13 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { newStudyBtn.setValue(false); const foundTemplates = newButtonsInfo.filter(newButtonInfo => templates.find(t => t.name === newButtonInfo.expectedTemplateLabel)); - const newStudies = new osparc.dashboard.NewStudies(foundTemplates); + const groups = newStudiesData[product].categories; + const newStudies = new osparc.dashboard.NewStudies(foundTemplates, groups); newStudies.setGroupBy("category"); - newStudies.setMode(this._resourcesContainer.getMode()); const winTitle = this.tr("New Plan"); const win = osparc.ui.window.Window.popUpInWindow(newStudies, winTitle, 640, 600).set({ clickAwayClose: false, - resizable: true, - showClose: true + resizable: true }); newStudies.addListener("newStudyClicked", e => { win.close(); @@ -520,6 +519,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__newPlanBtnClicked(templateData, templateInfo.newStudyLabel); } }); + osparc.utils.Utils.setIdToWidget(win, "newStudiesWindow"); }); } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/AutoRecharge.js b/services/static-webserver/client/source/class/osparc/desktop/credits/AutoRecharge.js index fa461489579..2265aff3e9a 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/AutoRecharge.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/AutoRecharge.js @@ -50,16 +50,19 @@ qx.Class.define("osparc.desktop.credits.AutoRecharge", { __paymentMethodField: null, __buildLayout: function() { - this.removeAll() + this.removeAll(); + + const titleText = this.tr("Auto-recharge"); + const introText = this.tr("Keep your balance running smoothly by automatically setting your credits to be recharged when it runs low."); this.__mainContent = new qx.ui.container.Composite(new qx.ui.layout.VBox(15).set({ alignX: "center" })) - const title = new qx.ui.basic.Label("Auto-recharge").set({ + const title = new qx.ui.basic.Label(titleText).set({ marginTop: 25, font: "title-18" }); - const subtitle = new qx.ui.basic.Label(this.tr("Keep your balance running smoothly by automatically setting your credits to be recharged when it runs low.")).set({ + const subtitle = new qx.ui.basic.Label(introText).set({ rich: true, font: "text-14", textAlign: "center" @@ -73,11 +76,11 @@ qx.Class.define("osparc.desktop.credits.AutoRecharge", { this.__noPaymentMethodsContent = new qx.ui.container.Composite(new qx.ui.layout.VBox(15).set({ alignX: "center" })) - this.__noPaymentMethodsContent.add(new qx.ui.basic.Label("Auto-recharge").set({ + this.__noPaymentMethodsContent.add(new qx.ui.basic.Label(titleText).set({ marginTop: 25, font: "title-18" })) - this.__noPaymentMethodsContent.add(new qx.ui.basic.Label(this.tr("Keep your balance running smoothly by automatically setting your credits to be recharged when it runs low.")).set({ + this.__noPaymentMethodsContent.add(new qx.ui.basic.Label(introText).set({ rich: true, font: "text-14", textAlign: "center" diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/BillingCenterWindow.js b/services/static-webserver/client/source/class/osparc/desktop/credits/BillingCenterWindow.js index 94610184744..2fb6a148669 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/BillingCenterWindow.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/BillingCenterWindow.js @@ -21,6 +21,9 @@ qx.Class.define("osparc.desktop.credits.BillingCenterWindow", { construct: function() { this.base(arguments, "credits", this.tr("Billing Center")); + + osparc.utils.Utils.setIdToWidget(this, "billingCenterWindow"); + const width = 1035; const height = 700; this.set({ diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/Usage.js b/services/static-webserver/client/source/class/osparc/desktop/credits/Usage.js index 598d46f7277..8f4eaf1075c 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/Usage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/Usage.js @@ -36,7 +36,7 @@ qx.Class.define("osparc.desktop.credits.Usage", { const container = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); - const lbl = new qx.ui.basic.Label("Select a credit account:"); + const lbl = new qx.ui.basic.Label("Select a Credit Account:"); container.add(lbl); const selectBoxContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js index 004600d5c79..3387638b3de 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js @@ -175,12 +175,13 @@ qx.Class.define("osparc.desktop.wallets.WalletListItem", { __buildLayout() { this._removeAll(); - this.__autorechargeBtn = new qx.ui.form.ToggleButton("Autorecharge").set({ + this.__autorechargeBtn = new qx.ui.form.ToggleButton(this.tr("Auto-recharge")).set({ maxHeight: 30, alignX: "center", alignY: "middle", focusable: false }); + osparc.utils.Utils.setIdToWidget(this.__autorechargeBtn, "autorechargeBtn"); this.__autorechargeBtn.addListener("execute", () => { const autorecharge = new osparc.desktop.credits.AutoRecharge(this.getKey()); const win = osparc.ui.window.Window.popUpInWindow(autorecharge, "Auto-recharge", 400, 550).set({ @@ -200,7 +201,7 @@ qx.Class.define("osparc.desktop.wallets.WalletListItem", { converter: ar => ar ? ar.enabled : false }); this.__autorechargeBtn.bind("value", this.__autorechargeBtn, "label", { - converter: value => value ? "Autorecharge: ON" : "Autorecharge: OFF" + converter: value => value ? this.tr("Auto-recharge: ON") : this.tr("Auto-recharge: OFF") }); this._add(this.__autorechargeBtn, { // Takes the status button place for the moment @@ -252,6 +253,7 @@ qx.Class.define("osparc.desktop.wallets.WalletListItem", { alignY: "middle", visibility: this.__canIWrite() ? "visible" : "excluded", }); + osparc.utils.Utils.setIdToWidget(this.__buyBtn, "buyCreditsBtn"); this.bind("accessRights", this.__buyBtn, "enabled", { converter: aR => { const myAr = osparc.data.model.Wallet.getMyAccessRights(aR); @@ -333,7 +335,7 @@ qx.Class.define("osparc.desktop.wallets.WalletListItem", { favouriteButtonIcon.setTextColor("strong-main"); } else { favouriteButton.set({ - toolTipText: this.tr("Switch to this credit account"), + toolTipText: this.tr("Switch to this Credit Account"), icon: "@FontAwesome5Solid/circle/20" }); favouriteButtonIcon.setTextColor("text"); diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js index 1c1e243fc2b..0dfc8e88def 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js @@ -23,6 +23,8 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { this._setLayout(new qx.ui.layout.VBox(10)); + osparc.utils.Utils.setIdToWidget(this, "walletsList"); + this.__addHeader(this.tr("Personal"), true); this.__noPersonalWalletsLabel = new qx.ui.basic.Label().set({ value: this.tr("No personal Credit Account found"), @@ -30,7 +32,7 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { marginLeft: 10 }); this._add(this.__noPersonalWalletsLabel); - this.__personalWalletsModel = this.__addWalletsList() + this.__personalWalletsModel = this.__addWalletsList("personalWalletsList"); this.__addHeader(this.tr("Shared with me"), false); this.__noSharedWalletsLabel = new qx.ui.basic.Label().set({ @@ -39,7 +41,7 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { marginLeft: 10 }); this._add(this.__noSharedWalletsLabel); - this.__sharedWalletsModel = this.__addWalletsList({ flex: 1 }) + this.__sharedWalletsModel = this.__addWalletsList("sharedWalletsList"); this.loadWallets(); }, @@ -78,7 +80,7 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { return filter; }, - __addWalletsList: function(layoutOpts={}) { + __addWalletsList: function(widgetId) { const walletsUIList = new qx.ui.form.List().set({ decorator: "no-border", spacing: 3, @@ -86,6 +88,7 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { height: null, focusable: false }); + osparc.utils.Utils.setIdToWidget(walletsUIList, widgetId); const walletsModel = new qx.data.Array(); const walletsCtrl = new qx.data.controller.List(walletsModel, walletsUIList, "name"); walletsCtrl.setDelegate({ @@ -117,7 +120,7 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { } }); - this._add(walletsUIList, layoutOpts); + this._add(walletsUIList); return walletsModel; }, diff --git a/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js b/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js index a934bbb987a..b226a949ae3 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js +++ b/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js @@ -61,6 +61,7 @@ qx.Class.define("osparc.navigation.UserMenu", { break; case "billing-center": control = new qx.ui.menu.Button(this.tr("Billing Center")); + osparc.utils.Utils.setIdToWidget(control, "userMenuBillingCenterBtn"); control.addListener("execute", () => { const walletsEnabled = osparc.desktop.credits.Utils.areWalletsEnabled(); if (walletsEnabled) { diff --git a/services/static-webserver/client/source/class/osparc/notification/Notifications.js b/services/static-webserver/client/source/class/osparc/notification/Notifications.js index 54e9b8c2a9b..2f7ab34c146 100644 --- a/services/static-webserver/client/source/class/osparc/notification/Notifications.js +++ b/services/static-webserver/client/source/class/osparc/notification/Notifications.js @@ -101,7 +101,7 @@ qx.Class.define("osparc.notification.Notifications", { "category": "WALLET_SHARED", "actionable_path": "wallet/"+walletId, "title": "Credits shared", - "text": "A Credit account was shared with you" + "text": "A Credit Account was shared with you" }; return { ...baseNotification, diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js b/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js index dd05744a142..7cded74b4b0 100644 --- a/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js +++ b/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js @@ -24,7 +24,7 @@ qx.Class.define("osparc.notification.NotificationsContainer", { this._setLayout(new qx.ui.layout.Canvas()); this.set({ - zIndex: 110000, + zIndex: osparc.utils.Utils.FLOATING_Z_INDEX, maxWidth: osparc.notification.NotificationUI.MAX_WIDTH, maxHeight: 250, backgroundColor: "background-main-3" @@ -37,6 +37,7 @@ qx.Class.define("osparc.notification.NotificationsContainer", { }); const notificationsContainer = this.__container = new qx.ui.container.Composite(new qx.ui.layout.VBox(1)); + osparc.utils.Utils.setIdToWidget(notificationsContainer, "notificationsContainer"); const scrollContainer = new qx.ui.container.Scroll(); scrollContainer.add(notificationsContainer); this._add(scrollContainer, { diff --git a/services/static-webserver/client/source/class/osparc/product/Utils.js b/services/static-webserver/client/source/class/osparc/product/Utils.js index 4b3799a51ef..eba9f0ff4b2 100644 --- a/services/static-webserver/client/source/class/osparc/product/Utils.js +++ b/services/static-webserver/client/source/class/osparc/product/Utils.js @@ -236,10 +236,10 @@ qx.Class.define("osparc.product.Utils", { }, showQuality: function() { - if (this.getProductName().includes("s4l")) { - return false; + if (this.isProduct("osparc")) { + return true; } - return true; + return false; }, showClassifiers: function() { diff --git a/services/static-webserver/client/source/class/osparc/product/tours/Utils.js b/services/static-webserver/client/source/class/osparc/product/tours/Tours.js similarity index 52% rename from services/static-webserver/client/source/class/osparc/product/tours/Utils.js rename to services/static-webserver/client/source/class/osparc/product/tours/Tours.js index 0a10bf87e1c..ab34e1fed7a 100644 --- a/services/static-webserver/client/source/class/osparc/product/tours/Utils.js +++ b/services/static-webserver/client/source/class/osparc/product/tours/Tours.js @@ -15,19 +15,34 @@ ************************************************************************ */ -qx.Class.define("osparc.product.tours.Utils", { +/** + * @asset(osparc/tours/s4llite_tours.json) + * @asset(osparc/tours/s4l_tours.json) + * @asset(osparc/tours/tis_tours.json) + */ + +qx.Class.define("osparc.product.tours.Tours", { type: "static", statics: { TOURS: { "s4llite": { - fetchTours: () => osparc.product.tours.s4llite.Tours.fetchTours() + fetchTours: () => osparc.product.tours.Tours.fetchTours("/resource/osparc/tours/s4llite_tours.json") }, "s4l": { - fetchTours: () => osparc.product.tours.s4l.Tours.fetchTours() + fetchTours: () => osparc.product.tours.Tours.fetchTours("/resource/osparc/tours/s4l_tours.json") + }, + "tis": { + fetchTours: () => osparc.product.tours.Tours.fetchTours("/resource/osparc/tours/tis_tours.json") } }, + fetchTours: function(link) { + return osparc.utils.Utils.fetchJSON(link) + .then(Object.values) + .catch(console.error); + }, + // it returns a promise getTours: function() { const pName = osparc.product.Utils.getProductName(); diff --git a/services/static-webserver/client/source/class/osparc/product/tours/s4l/Tours.js b/services/static-webserver/client/source/class/osparc/product/tours/s4l/Tours.js deleted file mode 100644 index a0f05e375be..00000000000 --- a/services/static-webserver/client/source/class/osparc/product/tours/s4l/Tours.js +++ /dev/null @@ -1,40 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -/** - * @asset(osparc/tours/s4l_tours.json) - */ - -qx.Class.define("osparc.product.tours.s4l.Tours", { - type: "static", - - statics: { - fetchTours: function() { - return new Promise((resolve, reject) => { - osparc.utils.Utils.fetchJSON("/resource/osparc/tours/s4l_tours.json") - .then(toursObj => { - const tours = Object.values(toursObj); - resolve(tours); - }) - .catch(err => { - console.error(err); - reject(); - }); - }); - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/product/tours/s4llite/Tours.js b/services/static-webserver/client/source/class/osparc/product/tours/s4llite/Tours.js deleted file mode 100644 index c71a2955eef..00000000000 --- a/services/static-webserver/client/source/class/osparc/product/tours/s4llite/Tours.js +++ /dev/null @@ -1,40 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -/** - * @asset(osparc/tours/s4llite_tours.json) - */ - -qx.Class.define("osparc.product.tours.s4llite.Tours", { - type: "static", - - statics: { - fetchTours: function() { - return new Promise((resolve, reject) => { - osparc.utils.Utils.fetchJSON("/resource/osparc/tours/s4llite_tours.json") - .then(toursObj => { - const tours = Object.values(toursObj); - resolve(tours); - }) - .catch(err => { - console.error(err); - reject(); - }); - }); - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/store/Support.js b/services/static-webserver/client/source/class/osparc/store/Support.js index 4fc01ae7100..508ad582644 100644 --- a/services/static-webserver/client/source/class/osparc/store/Support.js +++ b/services/static-webserver/client/source/class/osparc/store/Support.js @@ -25,7 +25,7 @@ qx.Class.define("osparc.store.Support", { addQuickStartToMenu: function(menu) { const quickStart = osparc.product.quickStart.Utils.getQuickStart(); if (quickStart) { - const qsButton = new qx.ui.menu.Button(qx.locale.Manager.tr("Quick Start")); + const qsButton = new qx.ui.menu.Button(qx.locale.Manager.tr("Quick Start"), "@FontAwesome5Solid/graduation-cap/14"); qsButton.getChildControl("label").set({ rich: true }); @@ -39,10 +39,10 @@ qx.Class.define("osparc.store.Support", { }, addGuidedToursToMenu: function(menu) { - const guidedToursButton = new qx.ui.menu.Button(qx.locale.Manager.tr("Guided Tours")); + const guidedToursButton = new qx.ui.menu.Button(qx.locale.Manager.tr("Guided Tours"), "@FontAwesome5Solid/graduation-cap/14"); guidedToursButton.exclude(); menu.add(guidedToursButton); - const fetchTours = osparc.product.tours.Utils.getTours(); + const fetchTours = osparc.product.tours.Tours.getTours(); if (fetchTours) { fetchTours .then(tours => { @@ -64,7 +64,7 @@ qx.Class.define("osparc.store.Support", { menuButton.setVisibility(manuals && manuals.length ? "visible" : "excluded"); } manuals.forEach(manual => { - const manualBtn = new qx.ui.menu.Button(manual.label); + const manualBtn = new qx.ui.menu.Button(manual.label, "@FontAwesome5Solid/book/14"); manualBtn.getChildControl("label").set({ rich: true }); @@ -81,7 +81,7 @@ qx.Class.define("osparc.store.Support", { } issues.forEach(issueInfo => { const label = issueInfo["label"]; - const issueButton = new qx.ui.menu.Button(label); + const issueButton = new qx.ui.menu.Button(label, "@FontAwesome5Solid/comments/14"); issueButton.getChildControl("label").set({ rich: true }); @@ -89,12 +89,12 @@ qx.Class.define("osparc.store.Support", { const issueConfirmationWindow = new osparc.ui.window.Dialog(label + " " + qx.locale.Manager.tr("Information"), null, qx.locale.Manager.tr("To create an issue, you must have an account and be already logged-in.") ); - const contBtn = new qx.ui.form.Button(qx.locale.Manager.tr("Continue"), "@FontAwesome5Solid/external-link-alt/12"); + const contBtn = new qx.ui.form.Button(qx.locale.Manager.tr("Continue"), "@FontAwesome5Solid/external-link-alt/14"); contBtn.addListener("execute", () => { window.open(issueInfo["new_url"]); issueConfirmationWindow.close(); }, this); - const loginBtn = new qx.ui.form.Button(qx.locale.Manager.tr("Log in in ") + label, "@FontAwesome5Solid/external-link-alt/12"); + const loginBtn = new qx.ui.form.Button(qx.locale.Manager.tr("Log in in ") + label, "@FontAwesome5Solid/external-link-alt/14"); loginBtn.addListener("execute", () => window.open(issueInfo["login_url"]), this); issueConfirmationWindow.addButton(contBtn); issueConfirmationWindow.addButton(loginBtn); @@ -117,15 +117,15 @@ qx.Class.define("osparc.store.Support", { let cb = null; switch (supportInfo["kind"]) { case "web": - icon = "@FontAwesome5Solid/link/12"; + icon = "@FontAwesome5Solid/link/14"; cb = () => window.open(supportInfo["url"]); break; case "forum": - icon = "@FontAwesome5Solid/comments/12"; + icon = "@FontAwesome5Solid/comments/14"; cb = () => window.open(supportInfo["url"]); break; case "email": - icon = "@FontAwesome5Solid/envelope/12"; + icon = "@FontAwesome5Solid/envelope/14"; cb = () => this.__openSendEmailFeedbackDialog(supportInfo["email"]); break; } diff --git a/services/static-webserver/client/source/class/osparc/task/Tasks.js b/services/static-webserver/client/source/class/osparc/task/Tasks.js index fefb476de50..8494db9754b 100644 --- a/services/static-webserver/client/source/class/osparc/task/Tasks.js +++ b/services/static-webserver/client/source/class/osparc/task/Tasks.js @@ -24,7 +24,7 @@ qx.Class.define("osparc.task.Tasks", { this.__tasks = new qx.data.Array(); const tasksContainer = this.__tasksContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox(3)).set({ - zIndex: 110000, + zIndex: osparc.utils.Utils.FLOATING_Z_INDEX, visibility: "excluded" }); osparc.utils.Utils.setIdToWidget(tasksContainer, "tasks"); diff --git a/services/static-webserver/client/source/class/osparc/theme/Appearance.js b/services/static-webserver/client/source/class/osparc/theme/Appearance.js index e2c4928bec3..e72b2cce9a7 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Appearance.js +++ b/services/static-webserver/client/source/class/osparc/theme/Appearance.js @@ -1014,7 +1014,7 @@ qx.Theme.define("osparc.theme.Appearance", { */ "hint": { style: state => ({ - backgroundColor: "window-popup-background", + backgroundColor: "hint-background", decorator: "hint", padding: 5 }) diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js index 2eb444de0b2..e46c25d4f02 100644 --- a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js +++ b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js @@ -42,6 +42,7 @@ qx.Theme.define("osparc.theme.ColorDark", { "input_background": "#213248", "input_background_disable": "rgba(113, 157, 181, 0.25)", "window-popup-background": "rgba(66, 66, 66, 1)", + "hint-background": "rgba(82, 82, 82, 1)", "transparent_overlay": "rgba(1, 18, 26, 0.1)", "flash_message_bg": "input_background", diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js index daa221ff21e..16f6c8e3fd7 100644 --- a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js +++ b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js @@ -41,7 +41,8 @@ qx.Theme.define("osparc.theme.ColorLight", { "fab-background": "rgba(255, 255, 255, 1)", "input_background": "rgba(209, 214, 218, 1)", "input_background_disable": "rgba(113, 157, 181, 0.04)", - "window-popup-background": "rgba(255,255,255, 1)", + "window-popup-background": "rgba(255, 255, 255, 1)", + "hint-background": "rgba(201, 201, 201, 1)", "transparent_overlay": "rgba(1, 18, 26, 0.1)", "flash_message_bg": "input_background", diff --git a/services/static-webserver/client/source/class/osparc/tours/List.js b/services/static-webserver/client/source/class/osparc/tours/List.js index 89648baf42d..989d26c9aa5 100644 --- a/services/static-webserver/client/source/class/osparc/tours/List.js +++ b/services/static-webserver/client/source/class/osparc/tours/List.js @@ -56,8 +56,8 @@ qx.Class.define("osparc.tours.List", { return control || this.base(arguments, id); }, - __isSelectorVisible: function(selector) { - const element = document.querySelector(`[${selector}]`); + __isSelectorVisible: function(contextSelector) { + const element = document.querySelector(`[${contextSelector}]`); if (element) { const widget = qx.ui.core.Widget.getWidgetByElement(element); if (qx.ui.core.queue.Visibility.isVisible(widget)) { diff --git a/services/static-webserver/client/source/class/osparc/tours/Manager.js b/services/static-webserver/client/source/class/osparc/tours/Manager.js index e24382d38aa..58bff7a6c44 100644 --- a/services/static-webserver/client/source/class/osparc/tours/Manager.js +++ b/services/static-webserver/client/source/class/osparc/tours/Manager.js @@ -25,7 +25,7 @@ qx.Class.define("osparc.tours.Manager", { layout: new qx.ui.layout.VBox(20), contentPadding: 15, modal: true, - width: 300, + width: 400, height: 300, showMaximize: false, showMinimize: false @@ -128,6 +128,8 @@ qx.Class.define("osparc.tours.Manager", { const widget = qx.ui.core.Widget.getWidgetByElement(element); if (step.beforeClick.action) { widget[step.beforeClick.action](); + } else if (step.beforeClick.event) { + widget.fireEvent(step.beforeClick.event); } else { widget.execute(); } @@ -168,10 +170,11 @@ qx.Class.define("osparc.tours.Manager", { } else { // target not found, move to the next step this.__toStepCheck(this.__currentIdx+1); + return; } } else { + // intro text, it will be centered stepWidget.getChildControl("caret").exclude(); - stepWidget.moveToTheCenter(); } if (step.title) { stepWidget.setTitle(step.title); @@ -187,8 +190,13 @@ qx.Class.define("osparc.tours.Manager", { } stepWidget.show(); - // eslint-disable-next-line no-underscore-dangle - setTimeout(() => stepWidget.__updatePosition(), 10); // Hacky: Execute async and give some time for the relevant properties to be set + setTimeout(() => { + if (stepWidget.getElement()) { + stepWidget.updatePosition(); + } else { + stepWidget.moveToTheCenter(); + } + }, 10); } } }); diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/FloatingHelper.js b/services/static-webserver/client/source/class/osparc/ui/basic/FloatingHelper.js index 2b1b88e5408..4a418cc9203 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/FloatingHelper.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/FloatingHelper.js @@ -19,7 +19,7 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { this.set({ backgroundColor: "transparent", visibility: "excluded", - zIndex: 110000 + zIndex: osparc.utils.Utils.FLOATING_Z_INDEX }); if (caretSize) { @@ -62,6 +62,7 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { properties: { element: { check: "qx.ui.core.Widget", + init: null, apply: "__applyElement" }, @@ -98,8 +99,16 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { control = new qx.ui.container.Composite().set({ backgroundColor: "transparent" }); - const classPrefix = this.getCaretSize() === "large" ? "hint-large" : "hint"; + const classPrefix = this.getCaretSize() === "large" ? "hint-large" : "hint-small"; control.getContentElement().addClass(classPrefix); + const colorManager = qx.theme.manager.Color.getInstance(); + // override the css defined caret color depending on theme + const overrideCaretColor = () => { + const hintBg = colorManager.resolve("hint-background"); + document.documentElement.style.setProperty("--hint-caret-color", hintBg); + }; + colorManager.addListener("changeTheme", () => overrideCaretColor(), this); + overrideCaretColor(); break; } } @@ -109,7 +118,7 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { __buildWidget: function() { this._removeAll(); - const classPrefix = this.getCaretSize() === "large" ? "hint-large" : "hint"; + const classPrefix = this.getCaretSize() === "large" ? "hint-large" : "hint-small"; const hintContainer = this.getChildControl("hint-container"); const caret = this.getChildControl("caret"); @@ -117,10 +126,11 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { caret.getContentElement().removeClass(classPrefix+"-right"); caret.getContentElement().removeClass(classPrefix+"-bottom"); caret.getContentElement().removeClass(classPrefix+"-left"); + let caretClass = null; switch (this.getOrientation()) { case this.self().ORIENTATION.TOP: case this.self().ORIENTATION.LEFT: { - caret.getContentElement().addClass(this.getOrientation() === this.self().ORIENTATION.LEFT ? classPrefix+"-left" : classPrefix+"-top"); + caretClass = this.getOrientation() === this.self().ORIENTATION.LEFT ? classPrefix+"-left" : classPrefix+"-top"; this._setLayout(this.getOrientation() === this.self().ORIENTATION.LEFT ? new qx.ui.layout.HBox() : new qx.ui.layout.VBox()); this._add(hintContainer, { flex: 1 @@ -130,7 +140,7 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { } case this.self().ORIENTATION.RIGHT: case this.self().ORIENTATION.BOTTOM: { - caret.getContentElement().addClass(this.getOrientation() === this.self().ORIENTATION.RIGHT ? classPrefix+"-right" : classPrefix+"-bottom"); + caretClass = this.getOrientation() === this.self().ORIENTATION.RIGHT ? classPrefix+"-right" : classPrefix+"-bottom"; this._setLayout(this.getOrientation() === this.self().ORIENTATION.RIGHT ? new qx.ui.layout.HBox() : new qx.ui.layout.VBox()); this._add(caret); this._add(hintContainer, { @@ -139,6 +149,7 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { break; } } + caret.getContentElement().addClass(caretClass); const caretSize = this.getCaretSize() === "large" ? 10 : 5; switch (this.getOrientation()) { case this.self().ORIENTATION.RIGHT: @@ -154,8 +165,8 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { } }, - __updatePosition: function() { - if (this.isPropertyInitialized("element") && this.getElement().getContentElement()) { + updatePosition: function() { + if (this.getElement() && this.getElement().getContentElement()) { const element = this.getElement().getContentElement() .getDomElement(); const { @@ -193,8 +204,8 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { moveToTheCenter: function() { const properties = {}; const selfBounds = this.getHintBounds(); - properties.top = Math.floor((window.innerHeight - selfBounds.width) / 2); - properties.left = Math.floor((window.innerWidth - selfBounds.height) / 2); + properties.top = Math.floor((window.innerHeight - selfBounds.height) / 2); + properties.left = Math.floor((window.innerWidth - selfBounds.width) / 2); this.setLayoutProperties(properties); }, @@ -204,7 +215,7 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { __applyOrientation: function() { this.__buildWidget(); - this.__updatePosition(); + this.updatePosition(); }, // overwritten @@ -267,7 +278,7 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { if (this.isActive()) { this.show(); } - this.__updatePosition(); + this.updatePosition(); break; case "disappear": this.exclude(); @@ -276,7 +287,7 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { case "resize": case "scrollX": case "scrollY": - setTimeout(() => this.__updatePosition(), 20); // Hacky: Execute async and give some time for the relevant properties to be set + setTimeout(() => this.updatePosition(), 20); // Hacky: Execute async and give some time for the relevant properties to be set break; } }, @@ -284,7 +295,7 @@ qx.Class.define("osparc.ui.basic.FloatingHelper", { // overridden _applyVisibility: function(ne, old) { this.base(arguments, ne, old); - this.__updatePosition(); + this.updatePosition(); } } }); diff --git a/services/static-webserver/client/source/class/osparc/utils/Utils.js b/services/static-webserver/client/source/class/osparc/utils/Utils.js index c791cec625b..282fd83b940 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Utils.js +++ b/services/static-webserver/client/source/class/osparc/utils/Utils.js @@ -89,6 +89,8 @@ qx.Class.define("osparc.utils.Utils", { } }, + FLOATING_Z_INDEX: 110000, + getDefaultFont: function() { const defaultFont = { family: null, @@ -412,6 +414,22 @@ qx.Class.define("osparc.utils.Utils", { return daysBetween; }, + createReleaseNotesLink: function() { + const versionLink = new osparc.ui.basic.LinkLabel(); + const rData = osparc.store.StaticInfo.getInstance().getReleaseData(); + const platformVersion = osparc.utils.LibVersions.getPlatformVersion(); + let text = "osparc-simcore "; + text += (rData["tag"] && rData["tag"] !== "latest") ? rData["tag"] : platformVersion.version; + const platformName = osparc.store.StaticInfo.getInstance().getPlatformName(); + text += platformName.length ? ` (${platformName})` : ""; + const url = rData["url"] || osparc.utils.LibVersions.getVcsRefUrl(); + versionLink.set({ + value: text, + url + }); + return versionLink; + }, + expirationMessage: function(daysToExpiration) { let msg = ""; if (daysToExpiration === 0) { diff --git a/services/static-webserver/client/source/resource/hint/hint.css b/services/static-webserver/client/source/resource/hint/hint.css index f56a6322325..2ac5c1ee646 100644 --- a/services/static-webserver/client/source/resource/hint/hint.css +++ b/services/static-webserver/client/source/resource/hint/hint.css @@ -1,4 +1,8 @@ -.hint:after { +:root { + --hint-caret-color: rgba(82, 82, 82, 1); /* Dark caret, override it from javascript for the light them */ +} + +.hint-small:after { border: 5px solid transparent; content: " "; height: 0; @@ -7,32 +11,32 @@ pointer-events: none; } -.hint-top:after { +.hint-small-top:after { top: 0; left: 50%; margin-left: -5px; - border-top-color: rgba(255, 255, 255, 0.01); + border-top-color: var(--hint-caret-color); } -.hint-right:after { +.hint-small-right:after { right: 0; top: 50%; margin-top: -5px; - border-right-color: rgba(255, 255, 255, 0.01); + border-right-color: var(--hint-caret-color); } -.hint-bottom:after { +.hint-small-bottom:after { bottom: 0; left: 50%; margin-left: -5px; - border-bottom-color: rgba(255, 255, 255, 0.01); + border-bottom-color: var(--hint-caret-color); } -.hint-left:after { +.hint-small-left:after { left: 0; top: 50%; margin-top: -5px; - border-left-color: rgba(255, 255, 255, 0.01); + border-left-color: var(--hint-caret-color); } @@ -49,26 +53,26 @@ top: 0; left: 50%; margin-left: -10px; - border-top-color: rgba(255, 255, 255, 0.01); + border-top-color: var(--hint-caret-color); } .hint-large-right:after { right: 0; top: 50%; margin-top: -10px; - border-right-color: rgba(255, 255, 255, 0.01); + border-right-color: var(--hint-caret-color); } .hint-large-bottom:after { bottom: 0; left: 50%; margin-left: -10px; - border-bottom-color: rgba(255, 255, 255, 0.01); + border-bottom-color: var(--hint-caret-color); } .hint-large-left:after { left: 0; top: 50%; margin-top: -10px; - border-left-color: rgba(255, 255, 255, 0.01); + border-left-color: var(--hint-caret-color); } diff --git a/services/static-webserver/client/source/resource/osparc/new_studies.json b/services/static-webserver/client/source/resource/osparc/new_studies.json index c371af4553a..14c6ffd45bd 100644 --- a/services/static-webserver/client/source/resource/osparc/new_studies.json +++ b/services/static-webserver/client/source/resource/osparc/new_studies.json @@ -43,6 +43,13 @@ "newStudyLabel": "Personalized Phase-Modulation TI", "category": "personalized", "idToWidget": "personalizationNewPMTIPlanButton" + }], + "categories": [{ + "id": "precomputed", + "label": "Precomputed" + }, { + "id": "personalized", + "label": "Personalized: Credits are required to run simulations." }] }, "s4l": { diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json index 95f8b721213..7b883f8a78c 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json @@ -45,25 +45,28 @@ "description": "Introduction to the Navigation Bar", "context": "osparc-test-id=navigationBar", "steps": [{ + "beforeClick": { + "selector": "osparc-test-id=notificationsButton", + "event": "tap" + }, + "anchorEl": "osparc-test-id=notificationsContainer", + "text": "By clicking on the Bell, you will you see notifications about what Studies, Credits and Organizations.", + "placement": "bottom" + }, { "beforeClick": { "selector": "osparc-test-id=helpNavigationBtn", "action": "open" }, "anchorEl": "osparc-test-id=helpNavigationMenu", - "text": "In this menu you will find Manuals, Support and ways to give us Feedback. It also provides quick access to other interactive Guides.", + "text": "Under the question mark, you find Manuals, Support and ways to give us Feedback. It also provides quick access to other Guided Tours.", "placement": "left" - }, { - "anchorEl": "osparc-test-id=creditsNavigationBtn", - "title": "Credits", - "text": "It will open the Billing Center", - "placement": "bottom" }, { "beforeClick": { "selector": "osparc-test-id=userMenuBtn", "action": "open" }, "anchorEl": "osparc-test-id=userMenuMenu", - "text": "The User menu gives you access to Your Account, Preferences, Organizations and more.", + "text": "The User Menu gives you access to Your Account, Billing Center, Preferences, Organizations and more.", "placement": "left" }] }, @@ -76,15 +79,15 @@ "beforeClick": { "selector": "osparc-test-id=studiesTabBtn" }, - "anchorEl": "osparc-test-id=searchBarFilter-textField-study", - "title": "Project Filter and Search", - "text": "This tool allows you to filter Projects, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", - "placement": "bottom" - }, { "anchorEl": "osparc-test-id=startS4LButton", "title": "Start Sim4Life", "text": "Clicking on this (+) Start Sim4Life button, allows you to create and open a new Sim4Life project", "placement": "right" + }, { + "anchorEl": "osparc-test-id=searchBarFilter-textField-study", + "title": "Filter and Search", + "text": "This tool allows you to filter Projects, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "placement": "bottom" }, { "beforeClick": { "selector": "osparc-test-id=studyItemMenuButton", diff --git a/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json b/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json new file mode 100644 index 00000000000..eff695cae7f --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json @@ -0,0 +1,143 @@ +{ + "navbar": { + "id": "navbar", + "name": "Navigation Bar", + "description": "Introduction to the Navigation Bar", + "context": "osparc-test-id=navigationBar", + "steps": [{ + "beforeClick": { + "selector": "osparc-test-id=notificationsButton", + "event": "tap" + }, + "anchorEl": "osparc-test-id=notificationsContainer", + "text": "By clicking on the Bell, you will you see notifications about what Studies, Credits and Organizations.", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=helpNavigationBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=helpNavigationMenu", + "text": "Under the question mark, you will find Manuals, Support and ways to give us Feedback. It also provides quick access to other Guided Tours.", + "placement": "left" + }, { + "beforeClick": { + "selector": "osparc-test-id=userMenuBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=userMenuMenu", + "text": "The User Menu gives you access to Your Account, Billing Center, Preferences, Organizations and more.", + "placement": "left" + }] + }, + "plans": { + "id": "plans", + "name": "Plans", + "description": "All you need to know about Plan handling", + "context": "osparc-test-id=studiesTabBtn", + "steps": [{ + "beforeClick": { + "selector": "osparc-test-id=studiesTabBtn" + }, + "anchorEl": "osparc-test-id=newStudyBtn", + "title": "Start A new Plan", + "text": "Clicking on this (+) button a window with all the Plans available will pop up.", + "placement": "right" + }, { + "beforeClick": { + "selector": "osparc-test-id=newStudyBtn" + }, + "anchorEl": "osparc-test-id=precomputedGroup", + "title": "Precomputed Plans", + "text": "These plans have been available in prior versions and continue to be free of charge. They utilize pre-installed models and do not support personalization. Please select these for basic stimulation optimization.", + "placement": "bottom" + }, { + "anchorEl": "osparc-test-id=personalizedGroup", + "title": "Personalized Plans", + "text": "These plans support personalization and allow for customized models. Running the simulations required for personalized optimization will incur charges. Choose these plans for advanced stimulation optimization. If you want to learn how to purchase credits, follow the 'Buy Credits' Guided Tour.", + "placement": "top" + }, { + "beforeClick": { + "selector": "osparc-test-id=newStudiesWindow", + "action": "close" + }, + "anchorEl": "osparc-test-id=studiesList", + "title": "Plans", + "text": "The Plans you create and the ones shared with you will be listed here.", + "placement": "top" + }, { + "anchorEl": "osparc-test-id=resourceFilter", + "title": "Filters", + "text": "These filters allows you to display a better organized Dashboard.
You can filter by with with whom the resources is Shared with and also Tags if you assign them.", + "placement": "right" + }, { + "anchorEl": "osparc-test-id=searchBarFilter-textField-study", + "title": "Filters and Search", + "text": "This tool allows you to search and filter Plans, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=studyItemMenuButton", + "action": "open" + }, + "anchorEl": "osparc-test-id=studyItemMenuMenu", + "title": "More options button", + "text": "On the Plan card, you can use the three dots button to access more information and operation on the Plan.", + "placement": "left" + }, { + "anchorEl": "osparc-test-id=updateStudyBtn", + "title": "Update Services", + "text": "On the Plan card, you can use the Update button to update the corresponding service to its latest version.", + "placement": "bottom" + }] + }, + "credits": { + "id": "credits", + "name": "Buy Credits", + "description": "Buy Credits for Personalization", + "context": "osparc-test-id=userMenuBtn", + "steps": [{ + "title": "Credits", + "text": "In order to run Personalized TI Plans, credits are required.
In the next Guided Tour you will learn how to purchase them." + }, { + "beforeClick": { + "selector": "osparc-test-id=userMenuBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=userMenuBillingCenterBtn", + "text": "From the User Menu you can access the Billing Center.", + "placement": "left" + }, { + "beforeClick": { + "selector": "osparc-test-id=userMenuBillingCenterBtn", + "action": "execute" + }, + "anchorEl": "osparc-test-id=sharedWalletsList", + "title": "Credits Accounts", + "text": "This is the list of Credits Accounts you have access to. You have your own Account, but you could also use Credit Accounts that were shared with you.", + "placement": "bottom" + }, { + "anchorEl": "osparc-test-id=personalWalletsList", + "title": "Top-up Credits", + "text": "There are two ways to charge credits into your Credit Account: you can set up an Auto-recharge process or go for a One time payment.", + "placement": "bottom" + }, { + "anchorEl": "osparc-test-id=buyCreditsBtn", + "title": "One time payment", + "text": "For a one-off, non recurring payment. After the payment is made, the available credits will be added to the Credit Account.
Also, the Payment Method can be saved for future payments.", + "placement": "bottom" + }, { + "anchorEl": "osparc-test-id=autorechargeBtn", + "title": "Auto-recharge", + "text": "Keep your balance running smoothly by automatically setting your credits to be recharged when it runs low.
Before the auto-recharge function can be activated you need to add your first payment method.", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=billingCenterWindow", + "action": "close" + }, + "title": "Credits Indicator", + "text": "Once the payment is validated, the Credits will be added to the Credit Account. In the User Menu -> Preferences, you will be able to tune the visibility options of the Credits Indicator." + }] + } +} From b32581619bf9ea6799fbda2cb76373afa350a1a7 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 18 Jun 2024 21:54:21 +0200 Subject: [PATCH 044/219] [Frontend] Disclaimer on TIP login page (#5960) --- .../announcement/AnnouncementUIFactory.js | 74 +++++++++++-------- .../source/class/osparc/auth/ui/LoginView.js | 11 +++ 2 files changed, 53 insertions(+), 32 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/announcement/AnnouncementUIFactory.js b/services/static-webserver/client/source/class/osparc/announcement/AnnouncementUIFactory.js index e141513e44f..ebef5735983 100644 --- a/services/static-webserver/client/source/class/osparc/announcement/AnnouncementUIFactory.js +++ b/services/static-webserver/client/source/class/osparc/announcement/AnnouncementUIFactory.js @@ -29,6 +29,47 @@ qx.Class.define("osparc.announcement.AnnouncementUIFactory", { } }, + statics: { + createLoginAnnouncement: function(title, text) { + const loginAnnouncement = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)).set({ + backgroundColor: "strong-main", + alignX: "center", + padding: 12, + allowGrowX: true, + maxWidth: 300 + }); + loginAnnouncement.getContentElement().setStyles({ + "border-radius": "8px" + }); + + if (title) { + const titleLabel = new qx.ui.basic.Label().set({ + value: title, + font: "text-16", + textColor: "white", + alignX: "center", + rich: true, + wrap: true + }); + loginAnnouncement.add(titleLabel); + } + + if (text) { + const descriptionLabel = new qx.ui.basic.Label().set({ + value: text, + font: "text-14", + textColor: "white", + alignX: "center", + rich: true, + wrap: true + }); + loginAnnouncement.add(descriptionLabel); + } + + return loginAnnouncement; + } + }, + members: { __ribbonAnnouncement: null, @@ -72,38 +113,7 @@ qx.Class.define("osparc.announcement.AnnouncementUIFactory", { createLoginAnnouncement: function() { const announcement = this.getAnnouncement(); - - const loginAnnouncement = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)).set({ - backgroundColor: "strong-main", - alignX: "center", - padding: 12, - allowGrowX: true, - maxWidth: 300 - }); - loginAnnouncement.getContentElement().setStyles({ - "border-radius": "8px" - }); - - const titleLabel = new qx.ui.basic.Label().set({ - value: announcement.getTitle(), - font: "text-16", - textColor: "white", - alignX: "center", - rich: true, - wrap: true - }); - loginAnnouncement.add(titleLabel); - - const descriptionLabel = new qx.ui.basic.Label().set({ - value: announcement.getDescription(), - font: "text-14", - textColor: "white", - alignX: "center", - rich: true, - wrap: true - }); - loginAnnouncement.add(descriptionLabel); - + const loginAnnouncement = this.self().createLoginAnnouncement(announcement.getTitle(), announcement.getDescription()); return loginAnnouncement; }, diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js index 005557f9653..404e7e1b3cd 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js @@ -122,6 +122,17 @@ qx.Class.define("osparc.auth.ui.LoginView", { }); this.add(grp); + + if (osparc.product.Utils.isProduct("tis")) { + const text = ` + 1) The TIP tool is designed exclusively for research purposes and it is not intended for clinical use. +
+
+ 2) Users are responsible for ensuring the anonymization and protection of privacy of medical data. + `; + const disclaimer = osparc.announcement.AnnouncementUIFactory.createLoginAnnouncement(this.tr("Disclaimer"), text); + this.add(disclaimer); + } }, getEmail: function() { From ac0f9c46632137f3d2202f7381fa7d725fc5dff0 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 19 Jun 2024 13:51:28 +0200 Subject: [PATCH 045/219] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Switch=20to?= =?UTF-8?q?=20iframe's=20content=20also=20after=20302=20response=20(#5963)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/data/model/Node.js | 45 ++++++++++--------- 1 file changed, 25 insertions(+), 20 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index 940d1a27adb..8bd121012cd 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -1327,31 +1327,36 @@ qx.Class.define("osparc.data.model.Node", { } }, - __waitForServiceReady: function(srvUrl) { - // ping for some time until it is really ready - fetch(srvUrl) - .then(request => { - /* - if (request.status >= 200 || request.status < 300) { - this.__waitForServiceWebsite(srvUrl) - } - */ + __waitForServiceReady: async function(srvUrl) { + const retry = () => { + this.getStatus().setInteractive("connecting"); + // Check if node is still there + if (this.getWorkbench().getNode(this.getNodeId()) === null) { + return; + } + const interval = 3000; + qx.event.Timer.once(() => this.__waitForServiceReady(srvUrl), this, interval); + }; + + // ping for some time until it is really reachable + try { + const response = await fetch(srvUrl); + if (response.ok || response.status === 302) { + // ok = status in the range 200-299 + // some services might respond with a 302 which is also fine // instead of // - requesting its frontend to make sure it is ready and ... // - waiting for the "load" event triggered by the content of the iframe // we will skip those steps and directly switch its iframe this.__serviceReadyIn(srvUrl); - }) - .catch(err => { - this.getStatus().setInteractive("connecting"); - console.log("service not ready yet, waiting... " + err); - // Check if node is still there - if (this.getWorkbench().getNode(this.getNodeId()) === null) { - return; - } - const interval = 1000; - qx.event.Timer.once(() => this.__waitForServiceReady(srvUrl), this, interval); - }) + } else { + console.log(`${srvUrl} is not reachable. Status: ${response.status}`); + retry(); + } + } catch (error) { + console.error(`Error while checking ${srvUrl}:`, error); + retry(); + } }, __waitForServiceWebsite: function(srvUrl) { From 417e66fa8df367668eb98685dabfca499f83b399 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 19 Jun 2024 16:16:47 +0200 Subject: [PATCH 046/219] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Highlight?= =?UTF-8?q?=20elements=20in=20Guided=20Tour=20(#5967)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../osparc/desktop/wallets/WalletsList.js | 33 ++++++++----- .../source/class/osparc/tours/Manager.js | 48 +++++++++++++++++++ .../resource/osparc/tours/tis_tours.json | 2 +- 3 files changed, 71 insertions(+), 12 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js index 0dfc8e88def..c8f1198eabd 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js @@ -23,25 +23,33 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { this._setLayout(new qx.ui.layout.VBox(10)); - osparc.utils.Utils.setIdToWidget(this, "walletsList"); + const listsLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + osparc.utils.Utils.setIdToWidget(listsLayout, "walletsList"); + this._add(listsLayout); - this.__addHeader(this.tr("Personal"), true); + const headerPersonal = this.__createHeader(this.tr("Personal"), true); + listsLayout.add(headerPersonal); this.__noPersonalWalletsLabel = new qx.ui.basic.Label().set({ value: this.tr("No personal Credit Account found"), font: "text-13", marginLeft: 10 }); - this._add(this.__noPersonalWalletsLabel); - this.__personalWalletsModel = this.__addWalletsList("personalWalletsList"); + listsLayout.add(this.__noPersonalWalletsLabel); + const listPersonal = this.__createWalletsList("personalWalletsList"); + listsLayout.add(listPersonal); + this.__personalWalletsModel = this.__createModelFromList(listPersonal); - this.__addHeader(this.tr("Shared with me"), false); + const headerShared = this.__createHeader(this.tr("Shared with me"), false); + listsLayout.add(headerShared); this.__noSharedWalletsLabel = new qx.ui.basic.Label().set({ value: this.tr("No shared Credit Accounts found"), font: "text-13", marginLeft: 10 }); - this._add(this.__noSharedWalletsLabel); - this.__sharedWalletsModel = this.__addWalletsList("sharedWalletsList"); + listsLayout.add(this.__noSharedWalletsLabel); + const listShared = this.__createWalletsList("sharedWalletsList"); + listsLayout.add(listShared); + this.__sharedWalletsModel = this.__createModelFromList(listShared); this.loadWallets(); }, @@ -80,7 +88,7 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { return filter; }, - __addWalletsList: function(widgetId) { + __createWalletsList: function(widgetId) { const walletsUIList = new qx.ui.form.List().set({ decorator: "no-border", spacing: 3, @@ -89,6 +97,10 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { focusable: false }); osparc.utils.Utils.setIdToWidget(walletsUIList, widgetId); + return walletsUIList; + }, + + __createModelFromList: function(walletsUIList) { const walletsModel = new qx.data.Array(); const walletsCtrl = new qx.data.controller.List(walletsModel, walletsUIList, "name"); walletsCtrl.setDelegate({ @@ -120,7 +132,6 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { } }); - this._add(walletsUIList); return walletsModel; }, @@ -206,7 +217,7 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { win.close(); }, - __addHeader: function(label, showCurrently) { + __createHeader: function(label, showCurrently) { const header = new qx.ui.container.Composite(new qx.ui.layout.HBox()); const userWallets = new qx.ui.basic.Label().set({ value: label, @@ -224,7 +235,7 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { }); header.add(selectColumn) } - this._add(header); + return header; } } }); diff --git a/services/static-webserver/client/source/class/osparc/tours/Manager.js b/services/static-webserver/client/source/class/osparc/tours/Manager.js index 58bff7a6c44..74a01b4ae04 100644 --- a/services/static-webserver/client/source/class/osparc/tours/Manager.js +++ b/services/static-webserver/client/source/class/osparc/tours/Manager.js @@ -31,6 +31,7 @@ qx.Class.define("osparc.tours.Manager", { showMinimize: false }); + this.__blankets = []; this.__buildLayout(); }, @@ -58,6 +59,7 @@ qx.Class.define("osparc.tours.Manager", { members: { __currentBubble: null, __currentIdx: null, + __blankets: null, _createChildControlImpl: function(id) { let control; @@ -95,6 +97,7 @@ qx.Class.define("osparc.tours.Manager", { stop: function() { this.setTour(null); this.__removeCurrentBubble(); + this.__removeBlankets(); }, __removeCurrentBubble: function() { @@ -105,6 +108,49 @@ qx.Class.define("osparc.tours.Manager", { } }, + __addBlankets: function(targetWidget) { + // the plan is to surround the targetWidget with dark blankets so it gets highlighted + const element = targetWidget.getContentElement().getDomElement(); + const { + top, + left + } = qx.bom.element.Location.get(element); + const { + width, + height + } = qx.bom.element.Dimension.getSize(element); + const windowW = window.innerWidth; + const windowH = window.innerHeight; + + const addBlanket = (w, h, l, t) => { + const blanket = new qx.ui.core.Widget().set({ + width: w, + height: h, + backgroundColor: "black", + opacity: 0.4, + zIndex: osparc.utils.Utils.FLOATING_Z_INDEX-1 + }); + qx.core.Init.getApplication().getRoot().add(blanket, { + left: l, + top: t + }); + return blanket; + }; + this.__blankets.push(addBlanket(left, windowH, 0, 0)); // left + this.__blankets.push(addBlanket(width, top, left, 0)); // top + this.__blankets.push(addBlanket(windowW-left-width, windowH, left+width, 0)); // right + this.__blankets.push(addBlanket(width, windowH-top-height, left, top+height)); // bottom + }, + + __removeBlankets: function() { + const nBlankets = this.__blankets.length; + for (let i=nBlankets-1; i>=0; i--) { + const blanket = this.__blankets[i]; + qx.core.Init.getApplication().getRoot().remove(blanket); + this.__blankets.splice(i, 1); + } + }, + __selectTour: function(tour) { this.close(); if ("steps" in tour) { @@ -121,6 +167,7 @@ qx.Class.define("osparc.tours.Manager", { } this.__removeCurrentBubble(); + this.__removeBlankets(); this.__currentIdx = idx; const step = steps[idx]; if (step.beforeClick && step.beforeClick.selector) { @@ -167,6 +214,7 @@ qx.Class.define("osparc.tours.Manager", { if (step.placement) { stepWidget.setOrientation(osparc.ui.basic.FloatingHelper.textToOrientation(step.placement)); } + this.__addBlankets(targetWidget); } else { // target not found, move to the next step this.__toStepCheck(this.__currentIdx+1); diff --git a/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json b/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json index eff695cae7f..62aa67cffa0 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json @@ -112,7 +112,7 @@ "selector": "osparc-test-id=userMenuBillingCenterBtn", "action": "execute" }, - "anchorEl": "osparc-test-id=sharedWalletsList", + "anchorEl": "osparc-test-id=walletsList", "title": "Credits Accounts", "text": "This is the list of Credits Accounts you have access to. You have your own Account, but you could also use Credit Accounts that were shared with you.", "placement": "bottom" From 90cba9f7a34995eeccedabe50709f7daa8281f19 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 19 Jun 2024 17:24:09 +0200 Subject: [PATCH 047/219] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20Log=20fetch?= =?UTF-8?q?=20response=20to=20running=20service=20(#5968)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../client/source/class/osparc/data/model/Node.js | 12 +++++++++--- .../client/source/class/osparc/utils/Utils.js | 4 ---- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index 8bd121012cd..c0fa0233bc0 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -1328,8 +1328,8 @@ qx.Class.define("osparc.data.model.Node", { }, __waitForServiceReady: async function(srvUrl) { + this.getStatus().setInteractive("connecting"); const retry = () => { - this.getStatus().setInteractive("connecting"); // Check if node is still there if (this.getWorkbench().getNode(this.getNodeId()) === null) { return; @@ -1340,7 +1340,13 @@ qx.Class.define("osparc.data.model.Node", { // ping for some time until it is really reachable try { + if (osparc.utils.Utils.isDevelopmentPlatform()) { + console.log("Connecting: about to fetch ", srvUrl); + } const response = await fetch(srvUrl); + if (osparc.utils.Utils.isDevelopmentPlatform()) { + console.log("Connecting: fetch's response ", response); + } if (response.ok || response.status === 302) { // ok = status in the range 200-299 // some services might respond with a 302 which is also fine @@ -1350,11 +1356,11 @@ qx.Class.define("osparc.data.model.Node", { // we will skip those steps and directly switch its iframe this.__serviceReadyIn(srvUrl); } else { - console.log(`${srvUrl} is not reachable. Status: ${response.status}`); + console.log(`Connecting: ${srvUrl} is not reachable. Status: ${response.status}`); retry(); } } catch (error) { - console.error(`Error while checking ${srvUrl}:`, error); + console.error(`Connecting: Error while checking ${srvUrl}:`, error); retry(); } }, diff --git a/services/static-webserver/client/source/class/osparc/utils/Utils.js b/services/static-webserver/client/source/class/osparc/utils/Utils.js index 282fd83b940..2f5cf539d89 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Utils.js +++ b/services/static-webserver/client/source/class/osparc/utils/Utils.js @@ -477,10 +477,6 @@ qx.Class.define("osparc.utils.Utils", { return window.location.hostname.includes("speag"); }, - isDevelEnv: function() { - return window.location.hostname.includes("master.speag") || window.location.port === "9081"; - }, - addBorder: function(widget, width = 1, color = "transparent") { widget.getContentElement().setStyle("border", width+"px solid " + color); }, From e4f4980f8e19516469e045195ade5a1d523950cd Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Thu, 20 Jun 2024 08:36:33 +0200 Subject: [PATCH 048/219] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20Stringify?= =?UTF-8?q?=20fetch=20response=20(#5970)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../client/source/class/osparc/data/model/Node.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index c0fa0233bc0..f0d0369ac11 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -1345,7 +1345,7 @@ qx.Class.define("osparc.data.model.Node", { } const response = await fetch(srvUrl); if (osparc.utils.Utils.isDevelopmentPlatform()) { - console.log("Connecting: fetch's response ", response); + console.log("Connecting: fetch's response ", JSON.stringify(response)); } if (response.ok || response.status === 302) { // ok = status in the range 200-299 From 81b6bd23a2b51783b98fb358c1fa8183835eb56d Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Thu, 20 Jun 2024 11:11:05 +0200 Subject: [PATCH 049/219] =?UTF-8?q?=F0=9F=8E=A8E2E:=20improvements=20on=20?= =?UTF-8?q?ClassicTIP=20test=20(#5955)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .vscode/extensions.json | 2 - .../src/pytest_simcore/logging_utils.py | 3 +- .../src/pytest_simcore/playwright_utils.py | 141 ++++++++++- .../autoscaled_monitor/cli.py | 1 + .../autoscaled_monitor/constants.py | 4 +- tests/e2e-playwright/.gitignore | 3 +- tests/e2e-playwright/Makefile | 103 ++++---- tests/e2e-playwright/tests/conftest.py | 77 +++--- .../tests/jupyterlabs/test_jupyterlab.py | 4 +- ...cker.py => test_resource_usage_tracker.py} | 0 .../{sim4life.py => test_sim4life.py} | 0 .../{sleepers.py => test_sleepers.py} | 10 +- .../e2e-playwright/tests/ti_plan/conftest.py | 24 -- tests/e2e-playwright/tests/ti_plan/ti_plan.py | 195 --------------- tests/e2e-playwright/tests/tip/conftest.py | 39 +++ .../e2e-playwright/tests/tip/test_ti_plan.py | 229 ++++++++++++++++++ 16 files changed, 515 insertions(+), 320 deletions(-) rename tests/e2e-playwright/tests/resource_usage_tracker/{resource_usage_tracker.py => test_resource_usage_tracker.py} (100%) rename tests/e2e-playwright/tests/sim4life/{sim4life.py => test_sim4life.py} (100%) rename tests/e2e-playwright/tests/sleepers/{sleepers.py => test_sleepers.py} (96%) delete mode 100644 tests/e2e-playwright/tests/ti_plan/conftest.py delete mode 100644 tests/e2e-playwright/tests/ti_plan/ti_plan.py create mode 100644 tests/e2e-playwright/tests/tip/conftest.py create mode 100644 tests/e2e-playwright/tests/tip/test_ti_plan.py diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 106f454943c..247fda7f675 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -5,12 +5,10 @@ "DevSoft.svg-viewer-vscode", "eamodio.gitlens", "exiasr.hadolint", - "hediet.vscode-drawio", "ms-azuretools.vscode-docker", "ms-python.black-formatter", "ms-python.pylint", "ms-python.python", - "ms-vscode.makefile-tools", "njpwerner.autodocstring", "samuelcolvin.jinjahtml", "timonwong.shellcheck", diff --git a/packages/pytest-simcore/src/pytest_simcore/logging_utils.py b/packages/pytest-simcore/src/pytest_simcore/logging_utils.py index 65502c56608..e6c786545fa 100644 --- a/packages/pytest-simcore/src/pytest_simcore/logging_utils.py +++ b/packages/pytest-simcore/src/pytest_simcore/logging_utils.py @@ -124,8 +124,7 @@ def log_context( error_message = ( f"{ctx_msg.raised} ({_timedelta_as_minute_second_ms(elapsed_time)})" ) - logger.log( - logging.ERROR, + logger.exception( error_message, *args, **kwargs, diff --git a/packages/pytest-simcore/src/pytest_simcore/playwright_utils.py b/packages/pytest-simcore/src/pytest_simcore/playwright_utils.py index deea6867c2b..11fccadd431 100644 --- a/packages/pytest-simcore/src/pytest_simcore/playwright_utils.py +++ b/packages/pytest-simcore/src/pytest_simcore/playwright_utils.py @@ -1,15 +1,21 @@ +import contextlib import json import logging +import re +from collections import defaultdict from contextlib import ExitStack -from dataclasses import dataclass +from dataclasses import dataclass, field from enum import Enum, unique from typing import Any, Final -from playwright.sync_api import WebSocket +from playwright.sync_api import FrameLocator, Page, Request, WebSocket, expect from pytest_simcore.logging_utils import log_context SECOND: Final[int] = 1000 MINUTE: Final[int] = 60 * SECOND +NODE_START_REQUEST_PATTERN: Final[re.Pattern[str]] = re.compile( + r"/projects/[^/]+/nodes/[^:]+:start" +) @unique @@ -42,6 +48,28 @@ def is_running(self) -> bool: ) +@unique +class NodeProgressType(str, Enum): + # NOTE: this is a partial duplicate of models_library/rabbitmq_messages.py + # It must remain as such until that module is pydantic V2 compatible + CLUSTER_UP_SCALING = "CLUSTER_UP_SCALING" + SERVICE_INPUTS_PULLING = "SERVICE_INPUTS_PULLING" + SIDECARS_PULLING = "SIDECARS_PULLING" + SERVICE_OUTPUTS_PULLING = "SERVICE_OUTPUTS_PULLING" + SERVICE_STATE_PULLING = "SERVICE_STATE_PULLING" + SERVICE_IMAGES_PULLING = "SERVICE_IMAGES_PULLING" + + @classmethod + def required_types_for_started_service(cls) -> set["NodeProgressType"]: + return { + NodeProgressType.SERVICE_INPUTS_PULLING, + NodeProgressType.SIDECARS_PULLING, + NodeProgressType.SERVICE_OUTPUTS_PULLING, + NodeProgressType.SERVICE_STATE_PULLING, + NodeProgressType.SERVICE_IMAGES_PULLING, + } + + class ServiceType(str, Enum): DYNAMIC = "DYNAMIC" COMPUTATIONAL = "COMPUTATIONAL" @@ -84,6 +112,28 @@ def retrieve_project_state_from_decoded_message(event: SocketIOEvent) -> Running return RunningState(event.obj["data"]["state"]["value"]) +@dataclass(frozen=True, slots=True, kw_only=True) +class NodeProgressEvent: + node_id: str + progress_type: NodeProgressType + current_progress: float + total_progress: float + + +def retrieve_node_progress_from_decoded_message( + event: SocketIOEvent, +) -> NodeProgressEvent: + assert event.name == _OSparcMessages.NODE_PROGRESS.value + assert "progress_type" in event.obj + assert "progress_report" in event.obj + return NodeProgressEvent( + node_id=event.obj["node_id"], + progress_type=NodeProgressType(event.obj["progress_type"]), + current_progress=float(event.obj["progress_report"]["actual_value"]), + total_progress=float(event.obj["progress_report"]["total"]), + ) + + @dataclass class SocketIOProjectClosedWaiter: def __call__(self, message: str) -> bool: @@ -139,6 +189,44 @@ def __call__(self, message: str) -> None: print("WS Message:", decoded_message.name, decoded_message.obj) +@dataclass +class SocketIONodeProgressCompleteWaiter: + node_id: str + _current_progress: dict[NodeProgressType, float] = field( + default_factory=defaultdict + ) + + def __call__(self, message: str) -> bool: + with log_context(logging.DEBUG, msg=f"handling websocket {message=}") as ctx: + # socket.io encodes messages like so + # https://stackoverflow.com/questions/24564877/what-do-these-numbers-mean-in-socket-io-payload + if message.startswith(_SOCKETIO_MESSAGE_PREFIX): + decoded_message = decode_socketio_42_message(message) + if decoded_message.name == _OSparcMessages.NODE_PROGRESS.value: + node_progress_event = retrieve_node_progress_from_decoded_message( + decoded_message + ) + if node_progress_event.node_id == self.node_id: + self._current_progress[node_progress_event.progress_type] = ( + node_progress_event.current_progress + / node_progress_event.total_progress + ) + ctx.logger.info( + "current startup progress: %s", + f"{json.dumps({k:round(v,1) for k,v in self._current_progress.items()})}", + ) + + return all( + progress_type in self._current_progress + for progress_type in NodeProgressType.required_types_for_started_service() + ) and all( + round(progress, 1) == 1.0 + for progress in self._current_progress.values() + ) + + return False + + def wait_for_pipeline_state( current_state: RunningState, *, @@ -187,3 +275,52 @@ def on_web_socket_default_handler(ws) -> None: ws.on("framesent", lambda payload: ctx.logger.info("⬇️ %s", payload)) ws.on("framereceived", lambda payload: ctx.logger.info("⬆️ %s", payload)) ws.on("close", lambda payload: stack.close()) # noqa: ARG005 + + +def _node_started_predicate(request: Request) -> bool: + return bool( + re.search(NODE_START_REQUEST_PATTERN, request.url) + and request.method.upper() == "POST" + ) + + +def _trigger_service_start_if_button_available(page: Page, node_id: str) -> None: + # wait for the start button to auto-disappear if it is still around after the timeout, then we click it + with log_context(logging.INFO, msg="trigger start button if needed") as ctx: + start_button_locator = page.get_by_test_id(f"Start_{node_id}") + with contextlib.suppress(AssertionError, TimeoutError): + expect(start_button_locator).to_be_visible(timeout=5000) + expect(start_button_locator).to_be_enabled(timeout=5000) + with page.expect_request(_node_started_predicate): + start_button_locator.click() + ctx.logger.info("triggered start button") + + +def wait_for_service_running( + *, + page: Page, + node_id: str, + websocket: WebSocket, + timeout: int, +) -> FrameLocator: + """NOTE: if the service was already started this will not work as some of the required websocket events will not be emitted again + In which case this will need further adjutment""" + + waiter = SocketIONodeProgressCompleteWaiter(node_id=node_id) + with ( + log_context(logging.INFO, msg="Waiting for node to run"), + websocket.expect_event("framereceived", waiter, timeout=timeout), + ): + _trigger_service_start_if_button_available(page, node_id) + return page.frame_locator(f'[osparc-test-id="iframe_{node_id}"]') + + +def app_mode_trigger_next_app(page: Page) -> None: + with ( + log_context(logging.INFO, msg="triggering next app"), + page.expect_request(_node_started_predicate), + ): + # Move to next step (this auto starts the next service) + next_button_locator = page.get_by_test_id("AppMode_NextBtn") + if next_button_locator.is_visible() and next_button_locator.is_enabled(): + page.get_by_test_id("AppMode_NextBtn").click() diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py index 1cad40078dd..3d5e150e24a 100644 --- a/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py @@ -81,6 +81,7 @@ def main( if "license" in file_path.name: continue # very bad HACK + rich.print(f"checking {file_path.name}") if ( any(_ in f"{file_path}" for _ in ("sim4life.io", "osparc-master")) and "openssh" not in f"{file_path}" diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/constants.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/constants.py index 82c5978f1d5..5a367d1180c 100644 --- a/scripts/maintenance/computational-clusters/autoscaled_monitor/constants.py +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/constants.py @@ -8,7 +8,9 @@ str ] = r"osparc-computational-cluster-{role}-{swarm_stack_name}-user_id:{user_id:d}-wallet_id:{wallet_id:d}" DEFAULT_DYNAMIC_EC2_FORMAT: Final[str] = r"osparc-dynamic-autoscaled-worker-{key_name}" -DEPLOY_SSH_KEY_PARSER: Final[parse.Parser] = parse.compile(r"osparc-{random_name}.pem") +DEPLOY_SSH_KEY_PARSER: Final[parse.Parser] = parse.compile( + r"{base_name}-{random_name}.pem" +) MINUTE: Final[int] = 60 HOUR: Final[int] = 60 * MINUTE diff --git a/tests/e2e-playwright/.gitignore b/tests/e2e-playwright/.gitignore index 5c4694bc6a0..cf83940dd82 100644 --- a/tests/e2e-playwright/.gitignore +++ b/tests/e2e-playwright/.gitignore @@ -1,6 +1,5 @@ test-results assets report.html -.e2e-playwright-env.txt -.e2e-playwright-jupyterlab-env.txt +.e2e-playwright-*.txt report.xml diff --git a/tests/e2e-playwright/Makefile b/tests/e2e-playwright/Makefile index e5fb838e54e..c0745f26c1a 100644 --- a/tests/e2e-playwright/Makefile +++ b/tests/e2e-playwright/Makefile @@ -92,7 +92,7 @@ test-sleepers: _check_venv_active ## runs sleepers test on local deploy --product-url=http://$(get_my_ip):9081 \ --autoregister \ --tracing=retain-on-failure \ - $(CURDIR)/tests/sleepers/sleepers.py + $(CURDIR)/tests/sleepers/test_sleepers.py .PHONY: test-sleepers-dev @@ -104,63 +104,56 @@ test-sleepers-dev: _check_venv_active ## runs sleepers test on local deploy --product-url=http://$(get_my_ip):9081 \ --headed \ --autoregister \ - $(CURDIR)/tests/sleepers/sleepers.py + $(CURDIR)/tests/sleepers/test_sleepers.py + + +# Define the files where user input will be saved +SLEEPERS_INPUT_FILE := .e2e-playwright-sleepers-env.txt +JUPYTER_LAB_INPUT_FILE := .e2e-playwright-jupyterlab-env.txt +CLASSIC_TIP_INPUT_FILE := .e2e-playwright-classictip-env.txt + +# Prompt the user for input and store it into variables +$(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE): + @read -p "Enter your product URL: " PRODUCT_URL; \ + read -p "Is the product billable [y/n]: " BILLABLE; \ + read -p "Is the test running in autoscaled deployment [y/n]: " AUTOSCALED; \ + read -p "Enter your username: " USER_NAME; \ + read -s -p "Enter your password: " PASSWORD; echo ""; \ + echo "--product-url=$$PRODUCT_URL --user-name=$$USER_NAME --password=$$PASSWORD" > $@; \ + if [ "$$BILLABLE" = "y" ]; then \ + echo "--product-billable" >> $@; \ + fi; \ + if [ "$$AUTOSCALED" = "y" ]; then \ + echo "--autoscaled" >> $@; \ + fi; \ + if [ "$@" = "$(JUPYTER_LAB_INPUT_FILE)" ]; then \ + read -p "Enter the size of the large file (human readable form e.g. 3Gib): " LARGE_FILE_SIZE; \ + echo "--service-key=jupyter-math --large-file-size=$$LARGE_FILE_SIZE" >> $@; \ + elif [ "$@" = "$(SLEEPERS_INPUT_FILE)" ]; then \ + read -p "Enter the number of sleepers: " NUM_SLEEPERS; \ + echo "--num-sleepers=$$NUM_SLEEPERS" >> $@; \ + fi +# Run the tests +test-sleepers-anywhere: _check_venv_active $(SLEEPERS_INPUT_FILE) + @$(call run_test, $(SLEEPERS_INPUT_FILE), tests/sleepers/test_sleepers.py) -# Define the file where user input will be saved -USER_INPUT_FILE := .e2e-playwright-env.txt -$(USER_INPUT_FILE):## Prompt the user for input and store it into variables - @read -p "Enter your product URL: " PRODUCT_URL; \ - read -p "Is the product billable [y/n]: " BILLABLE; \ - read -p "Enter your username: " USER_NAME; \ - read -s -p "Enter your password: " PASSWORD; echo ""; \ - read -p "Enter the number of sleepers: " NUM_SLEEPERS; \ - echo "$$PRODUCT_URL $$USER_NAME $$PASSWORD $$NUM_SLEEPERS $$BILLABLE" > $(USER_INPUT_FILE) - -# Read user input from the file and run the test -test-sleepers-anywhere: _check_venv_active $(USER_INPUT_FILE) ## test sleepers anywhere and keeps a cache as to where - @IFS=' ' read -r PRODUCT_URL USER_NAME PASSWORD NUM_SLEEPERS BILLABLE < $(USER_INPUT_FILE); \ - BILLABLE_FLAG=""; \ - if [ "$$BILLABLE" = "y" ]; then \ - BILLABLE_FLAG="--product-billable"; \ - fi; \ - pytest -s tests/sleepers/sleepers.py \ - --color=yes \ - --product-url=$$PRODUCT_URL \ - --user-name=$$USER_NAME \ - --password=$$PASSWORD \ - --num-sleepers=$$NUM_SLEEPERS \ - $$BILLABLE_FLAG \ - --browser chromium \ - --headed - -# Define the file where user input will be saved -JUPYTER_USER_INPUT_FILE := .e2e-playwright-jupyterlab-env.txt -$(JUPYTER_USER_INPUT_FILE): ## Prompt the user for input and store it into variables - @read -p "Enter your product URL: " PRODUCT_URL; \ - read -p "Is the product billable [y/n]: " BILLABLE; \ - read -p "Enter your username: " USER_NAME; \ - read -s -p "Enter your password: " PASSWORD; echo ""; \ - read -p "Enter the size of the large file (human readable form e.g. 3Gib): " LARGE_FILE_SIZE; \ - echo "$$PRODUCT_URL $$USER_NAME $$PASSWORD $$LARGE_FILE_SIZE $$BILLABLE" > $(JUPYTER_USER_INPUT_FILE) - -test-jupyterlab-anywhere: _check_venv_active $(JUPYTER_USER_INPUT_FILE) ## test jupyterlabs anywhere and keeps a cache as to where - @IFS=' ' read -r PRODUCT_URL USER_NAME PASSWORD LARGE_FILE_SIZE BILLABLE < $(JUPYTER_USER_INPUT_FILE); \ - BILLABLE_FLAG=""; \ - if [ "$$BILLABLE" = "y" ]; then \ - BILLABLE_FLAG="--product-billable"; \ - fi; \ - pytest -s tests/jupyterlabs/ \ +test-jupyterlab-anywhere: _check_venv_active $(JUPYTER_LAB_INPUT_FILE) + @$(call run_test, $(JUPYTER_LAB_INPUT_FILE), tests/jupyterlabs/test_jupyterlab.py) + +test-tip-anywhere: _check_venv_active $(CLASSIC_TIP_INPUT_FILE) + $(call run_test, $(CLASSIC_TIP_INPUT_FILE), tests/tip/test_ti_plan.py) + +# Define the common test running function +define run_test + TEST_ARGS=$$(cat $1 | xargs); \ + echo $$TEST_ARGS; \ + pytest -s $2 \ --color=yes \ - --product-url=$$PRODUCT_URL \ - --user-name=$$USER_NAME \ - --password=$$PASSWORD \ - --large-file-size=$$LARGE_FILE_SIZE \ - --service-key=jupyter-math \ - $$BILLABLE_FLAG \ --browser chromium \ - --headed + --headed \ + $$TEST_ARGS +endef clean: - @rm -rf $(USER_INPUT_FILE) - @rm -rf $(JUPYTER_USER_INPUT_FILE) + @rm -rf $(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index fb14087138a..7b5c16da1e6 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -12,7 +12,7 @@ import re from collections.abc import Callable, Iterator from contextlib import ExitStack -from typing import Final +from typing import Any, Final import pytest from faker import Faker @@ -71,6 +71,12 @@ def pytest_addoption(parser: pytest.Parser) -> None: default=False, help="Whether product is billable or not", ) + group.addoption( + "--autoscaled", + action="store_true", + default=False, + help="Whether test runs against autoscaled deployment or not", + ) group.addoption( "--service-test-id", action="store", @@ -141,6 +147,12 @@ def product_billable(request: pytest.FixtureRequest) -> bool: return TypeAdapter(bool).validate_python(billable) +@pytest.fixture(scope="session") +def autoscaled(request: pytest.FixtureRequest) -> bool: + autoscaled = request.config.getoption("--autoscaled") + return TypeAdapter(bool).validate_python(autoscaled) + + @pytest.fixture(scope="session") def service_test_id(request: pytest.FixtureRequest) -> str: if test_id := request.config.getoption("--service-test-id"): @@ -278,11 +290,7 @@ def log_in_and_out( logging.INFO, f"Logging out of {product_url=} using {user_name=}/{user_password=}", ): - # click anywher to remove modal windows - page.click( - "body", - position={"x": 0, "y": 0}, - ) + page.keyboard.press("Escape") page.get_by_test_id("userMenuBtn").click() with page.expect_response(re.compile(r"/auth/logout")) as response_info: page.get_by_test_id("userMenuLogoutBtn").click() @@ -298,7 +306,7 @@ def create_new_project_and_delete( product_billable: bool, api_request_context: APIRequestContext, product_url: AnyUrl, -) -> Iterator[Callable[[tuple[RunningState]], str]]: +) -> Iterator[Callable[[tuple[RunningState], bool], dict[str, Any]]]: """The first available service currently displayed in the dashboard will be opened NOTE: cannot be used multiple times or going back to dashboard will fail!! """ @@ -306,7 +314,8 @@ def create_new_project_and_delete( def _( expected_states: tuple[RunningState] = (RunningState.NOT_STARTED,), - ) -> str: + press_open: bool = True, + ) -> dict[str, Any]: assert ( len(created_project_uuids) == 0 ), "misuse of this fixture! only 1 study can be opened at a time. Otherwise please modify the fixture" @@ -315,13 +324,15 @@ def _( f"Opening project in {product_url=} as {product_billable=}", ) as ctx: waiter = SocketIOProjectStateUpdatedWaiter(expected_states=expected_states) - with log_in_and_out.expect_event( - "framereceived", waiter - ), page.expect_response( - re.compile(r"/projects/[^:]+:open") - ) as response_info: + with ( + log_in_and_out.expect_event("framereceived", waiter), + page.expect_response( + re.compile(r"/projects/[^:]+:open") + ) as response_info, + ): # Project detail view pop-ups shows - page.get_by_test_id("openResource").click() + if press_open: + page.get_by_test_id("openResource").click() if product_billable: # Open project with default resources page.get_by_test_id("openWithResources").click() @@ -335,7 +346,7 @@ def _( ) created_project_uuids.append(project_uuid) - return project_uuid + return project_data["data"] yield _ @@ -363,7 +374,6 @@ def _( logging.INFO, f"Deleting project with {project_uuid=} in {product_url=} as {product_billable=}", ): - response = api_request_context.delete( f"{product_url}v0/projects/{project_uuid}" ) @@ -378,7 +388,7 @@ def _( @pytest.fixture -def find_service_in_dashboard( +def find_and_start_service_in_dashboard( page: Page, ) -> Callable[[ServiceType, str, str | None], None]: def _( @@ -400,13 +410,15 @@ def _( @pytest.fixture def create_project_from_service_dashboard( - find_service_in_dashboard: Callable[[ServiceType, str, str | None], None], - create_new_project_and_delete: Callable[[tuple[RunningState]], str], -) -> Callable[[ServiceType, str, str | None], str]: + find_and_start_service_in_dashboard: Callable[[ServiceType, str, str | None], None], + create_new_project_and_delete: Callable[[tuple[RunningState]], dict[str, Any]], +) -> Callable[[ServiceType, str, str | None], dict[str, Any]]: def _( service_type: ServiceType, service_name: str, service_key_prefix: str | None - ) -> str: - find_service_in_dashboard(service_type, service_name, service_key_prefix) + ) -> dict[str, Any]: + find_and_start_service_in_dashboard( + service_type, service_name, service_key_prefix + ) expected_states = (RunningState.UNKNOWN,) if service_type is ServiceType.COMPUTATIONAL: expected_states = (RunningState.NOT_STARTED,) @@ -441,15 +453,18 @@ def _do() -> SocketIOEvent: # NOTE: Keep expect_request as an inner context. In case of timeout, we want # to know whether the POST was requested or not. - with log_in_and_out.expect_event( - "framereceived", - waiter, - timeout=_OUTER_CONTEXT_TIMEOUT_MS, - ) as event, page.expect_request( - lambda r: re.search(r"/computations", r.url) - and r.method.upper() == "POST", # type: ignore - timeout=_INNER_CONTEXT_TIMEOUT_MS, - ) as request_info: + with ( + log_in_and_out.expect_event( + "framereceived", + waiter, + timeout=_OUTER_CONTEXT_TIMEOUT_MS, + ) as event, + page.expect_request( + lambda r: re.search(r"/computations", r.url) + and r.method.upper() == "POST", # type: ignore + timeout=_INNER_CONTEXT_TIMEOUT_MS, + ) as request_info, + ): page.get_by_test_id("runStudyBtn").click() response = request_info.value.response() diff --git a/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py b/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py index c3b84548232..f5fc42baff7 100644 --- a/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py +++ b/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py @@ -10,7 +10,7 @@ import re from collections.abc import Callable from dataclasses import dataclass -from typing import Final, Literal +from typing import Any, Final, Literal from playwright.sync_api import Page, WebSocket from pydantic import ByteSize @@ -65,7 +65,7 @@ def __call__(self, new_websocket: WebSocket) -> bool: def test_jupyterlab( page: Page, create_project_from_service_dashboard: Callable[ - [ServiceType, str, str | None], str + [ServiceType, str, str | None], dict[str, Any] ], service_key: str, large_file_size: ByteSize, diff --git a/tests/e2e-playwright/tests/resource_usage_tracker/resource_usage_tracker.py b/tests/e2e-playwright/tests/resource_usage_tracker/test_resource_usage_tracker.py similarity index 100% rename from tests/e2e-playwright/tests/resource_usage_tracker/resource_usage_tracker.py rename to tests/e2e-playwright/tests/resource_usage_tracker/test_resource_usage_tracker.py diff --git a/tests/e2e-playwright/tests/sim4life/sim4life.py b/tests/e2e-playwright/tests/sim4life/test_sim4life.py similarity index 100% rename from tests/e2e-playwright/tests/sim4life/sim4life.py rename to tests/e2e-playwright/tests/sim4life/test_sim4life.py diff --git a/tests/e2e-playwright/tests/sleepers/sleepers.py b/tests/e2e-playwright/tests/sleepers/test_sleepers.py similarity index 96% rename from tests/e2e-playwright/tests/sleepers/sleepers.py rename to tests/e2e-playwright/tests/sleepers/test_sleepers.py index cda5113aa8b..fb85309eb72 100644 --- a/tests/e2e-playwright/tests/sleepers/sleepers.py +++ b/tests/e2e-playwright/tests/sleepers/test_sleepers.py @@ -12,7 +12,7 @@ import logging import re from collections.abc import Callable -from typing import Final +from typing import Any, Final from packaging.version import Version from packaging.version import parse as parse_version @@ -76,13 +76,13 @@ def test_sleepers( page: Page, log_in_and_out: WebSocket, create_project_from_service_dashboard: Callable[ - [ServiceType, str, str | None], str + [ServiceType, str, str | None], dict[str, Any] ], start_and_stop_pipeline: Callable[..., SocketIOEvent], num_sleepers: int, input_sleep_time: int | None, ): - project_uuid = create_project_from_service_dashboard( + project_data = create_project_from_service_dashboard( ServiceType.COMPUTATIONAL, "sleeper", "itis" ) @@ -95,7 +95,9 @@ def test_sleepers( ), ): for _ in range(1, num_sleepers): - with page.expect_response(re.compile(rf"/projects/{project_uuid}/nodes")): + with page.expect_response( + re.compile(rf"/projects/{project_data['uuid']}/nodes") + ): page.get_by_test_id("newNodeBtn").click() page.get_by_placeholder("Filter").click() page.get_by_placeholder("Filter").fill("sleeper") diff --git a/tests/e2e-playwright/tests/ti_plan/conftest.py b/tests/e2e-playwright/tests/ti_plan/conftest.py deleted file mode 100644 index c8b45d3e932..00000000000 --- a/tests/e2e-playwright/tests/ti_plan/conftest.py +++ /dev/null @@ -1,24 +0,0 @@ -# pylint: disable=redefined-outer-name -import pytest - - -def pytest_addoption(parser: pytest.Parser) -> None: - group = parser.getgroup( - "oSparc e2e options", description="oSPARC-e2e specific parameters" - ) - group.addoption( - "--service-opening-waiting-timeout", - action="store", - type=int, - default=300000, # 5 mins - help="Defines a waiting timeout in milliseconds for opening a service.", - ) - - -@pytest.fixture -def service_opening_waiting_timeout(request: pytest.FixtureRequest) -> int: - service_opening_waiting_timeout = request.config.getoption( - "--service-opening-waiting-timeout" - ) - assert isinstance(service_opening_waiting_timeout, int) - return service_opening_waiting_timeout diff --git a/tests/e2e-playwright/tests/ti_plan/ti_plan.py b/tests/e2e-playwright/tests/ti_plan/ti_plan.py deleted file mode 100644 index 0c185519595..00000000000 --- a/tests/e2e-playwright/tests/ti_plan/ti_plan.py +++ /dev/null @@ -1,195 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments -# pylint: disable=too-many-statements -# pylint: disable=unnecessary-lambda - -import logging -import re -from http import HTTPStatus - -from playwright.sync_api import APIRequestContext, Page, WebSocket, expect -from pydantic import AnyUrl -from pytest_simcore.logging_utils import log_context -from pytest_simcore.playwright_utils import SocketIOOsparcMessagePrinter -from tenacity import Retrying -from tenacity.retry import retry_if_exception_type -from tenacity.stop import stop_after_attempt -from tenacity.wait import wait_fixed - - -def test_tip( - page: Page, - log_in_and_out: WebSocket, - api_request_context: APIRequestContext, - product_url: AnyUrl, - product_billable: bool, - service_opening_waiting_timeout: int, -): - handler = SocketIOOsparcMessagePrinter() - # log_in_and_out is the initial websocket - log_in_and_out.on("framereceived", handler) - - # open studies tab and filter - page.get_by_test_id("studiesTabBtn").click() - _textbox = page.get_by_test_id("searchBarFilter-textField-study") - _textbox.fill("Classic TI") - _textbox.press("Enter") - - with page.expect_response(re.compile(r"/projects/[^:]+:open")) as response_info: - page.get_by_test_id("newStudyBtn").click() - page.get_by_test_id("newTIPlanButton").click() - if product_billable: - # Open project with default resources - page.get_by_test_id("openWithResources").click() - page.wait_for_timeout(1000) - - project_data = response_info.value.json() - assert project_data - project_uuid = project_data["data"]["uuid"] - print("project uuid: ", project_uuid) - node_ids = [] - workbench = project_data["data"]["workbench"] - for node_id in workbench.keys(): - print("node_id: ", node_id) - print("key: ", workbench[node_id]["key"]) - print("version: ", workbench[node_id]["version"]) - node_ids.append(node_id) - - # let it start or force - with log_context(logging.INFO, "Starting with Electrode Selector"): - page.wait_for_timeout(5000) - start_button = page.get_by_test_id("Start_" + node_ids[0]) - if start_button.is_visible() and start_button.is_enabled(): - start_button.click() - - # Electrode Selector - es_page = page.frame_locator(f'[osparc-test-id="iframe_{node_ids[0]}"]') - expect(es_page.get_by_test_id("TargetStructure_Selector")).to_be_visible( - timeout=service_opening_waiting_timeout - ) - # Sometimes this iframe flicks and shows a white page. This wait will avoid it - page.wait_for_timeout(5000) - es_page.get_by_test_id("TargetStructure_Selector").click() - es_page.get_by_test_id( - "TargetStructure_Target_(Targets_combined) Hypothalamus" - ).click() - electrode_selections = [ - ["E1+", "FT9"], - ["E1-", "FT7"], - ["E2+", "T9"], - ["E2-", "T7"], - ] - for selection in electrode_selections: - group_id = "ElectrodeGroup_" + selection[0] + "_Start" - electrode_id = "Electrode_" + selection[1] - es_page.get_by_test_id(group_id).click() - es_page.get_by_test_id(electrode_id).click() - # configuration done, push output - page.wait_for_timeout(1000) - es_page.get_by_test_id("FinishSetUp").click() - page.wait_for_timeout(10000) - # check outputs - expected_outputs = ["output.json"] - text_on_output_button = f"Outputs ({len(expected_outputs)})" - page.get_by_test_id("outputsBtn").get_by_text(text_on_output_button).click() - page.wait_for_timeout(5000) - - # Move to next step - page.get_by_test_id("AppMode_NextBtn").click() - - with log_context(logging.INFO, "Continue with Classic TI"): - # let it start or force - page.wait_for_timeout(5000) - start_button = page.get_by_test_id("Start_" + node_ids[1]) - if start_button.is_visible() and start_button.is_enabled(): - start_button.click() - - # Optimal Configuration Identification - ti_page = page.frame_locator(f'[osparc-test-id="iframe_{node_ids[1]}"]') - expect(ti_page.get_by_role("button", name="Run Optimization")).to_be_visible( - timeout=service_opening_waiting_timeout - ) - run_button = ti_page.get_by_role("button", name="Run Optimization") - run_button.click() - for attempt in Retrying( - wait=wait_fixed(5), - stop=stop_after_attempt(20), # 5*20= 100 seconds - retry=retry_if_exception_type(AssertionError), - reraise=True, - ): - with attempt: - # When optimization finishes, the button changes color. - _run_button_style = run_button.evaluate( - "el => window.getComputedStyle(el)" - ) - assert ( - _run_button_style["backgroundColor"] == "rgb(0, 128, 0)" - ) # initial color: rgb(0, 144, 208) - - ti_page.get_by_role("button", name="Load Analysis").click() - page.wait_for_timeout(20000) - ti_page.get_by_role("button", name="Load").nth( - 1 - ).click() # Load Analysis is first - page.wait_for_timeout(20000) - ti_page.get_by_role("button", name="Add to Report (0)").nth(0).click() - page.wait_for_timeout(20000) - ti_page.get_by_role("button", name="Export to S4L").click() - page.wait_for_timeout(20000) - ti_page.get_by_role("button", name="Add to Report (1)").nth(1).click() - page.wait_for_timeout(20000) - ti_page.get_by_role("button", name="Export Report").click() - page.wait_for_timeout(20000) - # check outputs - expected_outputs = ["output_1.zip", "TIP_report.pdf", "results.csv"] - text_on_output_button = f"Outputs ({len(expected_outputs)})" - page.get_by_test_id("outputsBtn").get_by_text(text_on_output_button).click() - page.wait_for_timeout(5000) - - # Move to next step - page.get_by_test_id("AppMode_NextBtn").click() - - with log_context(logging.INFO, "Continue to Exposure Analysis"): - # let it start or force - page.wait_for_timeout(5000) - start_button = page.get_by_test_id("Start_ " + node_ids[2]) - if start_button.is_visible() and start_button.is_enabled(): - start_button.click() - - # Sim4Life PostPro - s4l_postpro_page = page.frame_locator( - f'[osparc-test-id="iframe_{node_ids[2]}"]' - ) - expect(s4l_postpro_page.get_by_test_id("mode-button-postro")).to_be_visible( - timeout=service_opening_waiting_timeout - ) - # click on the postpro mode button - s4l_postpro_page.get_by_test_id("mode-button-postro").click() - # click on the surface viewer - s4l_postpro_page.get_by_test_id("tree-item-ti_field.cache").click() - s4l_postpro_page.get_by_test_id("tree-item-SurfaceViewer").nth(0).click() - page.wait_for_timeout(5000) - - # Going back to dashboard - page.get_by_test_id("dashboardBtn").click() - page.get_by_test_id("confirmDashboardBtn").click() - page.wait_for_timeout(1000) - - # Going back to projects/studies view (In Sim4life projects:=studies) - page.get_by_test_id("studiesTabBtn").click() - page.wait_for_timeout(1000) - - # The project is closing, wait until it is closed and delete it (currently waits max=5 minutes) - for attempt in Retrying( - wait=wait_fixed(5), - stop=stop_after_attempt(60), # 5*60= 300 seconds - retry=retry_if_exception_type(AssertionError), - reraise=True, - ): - with attempt: - resp = api_request_context.delete( - f"{product_url}v0/projects/{project_uuid}" - ) - assert resp.status == HTTPStatus.NO_CONTENT diff --git a/tests/e2e-playwright/tests/tip/conftest.py b/tests/e2e-playwright/tests/tip/conftest.py new file mode 100644 index 00000000000..f7e3b928520 --- /dev/null +++ b/tests/e2e-playwright/tests/tip/conftest.py @@ -0,0 +1,39 @@ +# pylint: disable=redefined-outer-name +import logging +from collections.abc import Callable +from typing import Any + +import pytest +from playwright.sync_api import Page +from pytest_simcore.logging_utils import log_context +from pytest_simcore.playwright_utils import RunningState + + +@pytest.fixture +def find_and_start_tip_plan_in_dashboard( + page: Page, +) -> Callable[[str], None]: + def _( + plan_name_test_id: str, + ) -> None: + with log_context(logging.INFO, f"Finding {plan_name_test_id=} in dashboard"): + page.get_by_test_id("studiesTabBtn").click() + page.get_by_test_id("newStudyBtn").click() + page.get_by_test_id(plan_name_test_id).click() + + return _ + + +@pytest.fixture +def create_tip_plan_from_dashboard( + find_and_start_tip_plan_in_dashboard: Callable[[str], None], + create_new_project_and_delete: Callable[ + [tuple[RunningState], bool], dict[str, Any] + ], +) -> Callable[[str], dict[str, Any]]: + def _(plan_name_test_id: str) -> dict[str, Any]: + find_and_start_tip_plan_in_dashboard(plan_name_test_id) + expected_states = (RunningState.UNKNOWN,) + return create_new_project_and_delete(expected_states, press_open=False) + + return _ diff --git a/tests/e2e-playwright/tests/tip/test_ti_plan.py b/tests/e2e-playwright/tests/tip/test_ti_plan.py new file mode 100644 index 00000000000..c135971e85b --- /dev/null +++ b/tests/e2e-playwright/tests/tip/test_ti_plan.py @@ -0,0 +1,229 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +# pylint: disable=unnecessary-lambda +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import contextlib +import json +import logging +import re +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any, Final + +from playwright.sync_api import Page, WebSocket +from pytest_simcore.logging_utils import log_context +from pytest_simcore.playwright_utils import ( + MINUTE, + SECOND, + app_mode_trigger_next_app, + wait_for_service_running, +) + +_GET_NODE_OUTPUTS_REQUEST_PATTERN: Final[re.Pattern[str]] = re.compile( + r"/storage/locations/[^/]+/files" +) +_OUTER_EXPECT_TIMEOUT_RATIO: Final[float] = 1.1 +_EC2_STARTUP_MAX_WAIT_TIME: Final[int] = 1 * MINUTE + +_ELECTRODE_SELECTOR_MAX_STARTUP_TIME: Final[int] = 1 * MINUTE +_ELECTRODE_SELECTOR_DOCKER_PULLING_MAX_TIME: Final[int] = 3 * MINUTE +_ELECTRODE_SELECTOR_BILLABLE_MAX_STARTUP_TIME: Final[int] = ( + _EC2_STARTUP_MAX_WAIT_TIME + + _ELECTRODE_SELECTOR_DOCKER_PULLING_MAX_TIME + + _ELECTRODE_SELECTOR_MAX_STARTUP_TIME +) +_ELECTRODE_SELECTOR_FLICKERING_WAIT_TIME: Final[int] = 5 * SECOND + + +_JLAB_MAX_STARTUP_MAX_TIME: Final[int] = 3 * MINUTE +_JLAB_DOCKER_PULLING_MAX_TIME: Final[int] = 12 * MINUTE +_JLAB_BILLABLE_MAX_STARTUP_TIME: Final[int] = ( + _EC2_STARTUP_MAX_WAIT_TIME + + _JLAB_DOCKER_PULLING_MAX_TIME + + _JLAB_MAX_STARTUP_MAX_TIME +) +_JLAB_RUN_OPTIMIZATION_APPEARANCE_TIME: Final[int] = 1 * MINUTE +_JLAB_RUN_OPTIMIZATION_MAX_TIME: Final[int] = 1 * MINUTE +_JLAB_REPORTING_MAX_TIME: Final[int] = 20 * SECOND + + +_POST_PRO_MAX_STARTUP_TIME: Final[int] = 2 * MINUTE +_POST_PRO_DOCKER_PULLING_MAX_TIME: Final[int] = 12 * MINUTE +_POST_PRO_BILLABLE_MAX_STARTUP_TIME: Final[int] = ( + _EC2_STARTUP_MAX_WAIT_TIME + + _POST_PRO_DOCKER_PULLING_MAX_TIME + + _POST_PRO_MAX_STARTUP_TIME +) + + +@dataclass +class _JLabWaitForWebSocket: + def __call__(self, new_websocket: WebSocket) -> bool: + with log_context(logging.DEBUG, msg=f"received {new_websocket=}"): + if re.search(r"/api/kernels/[^/]+/channels", new_websocket.url): + return True + return False + + +@dataclass +class _JLabWebSocketWaiter: + expected_header_msg_type: str + expected_message_contents: str + + def __call__(self, message: str) -> bool: + with log_context(logging.DEBUG, msg=f"handling websocket {message=}"): + with contextlib.suppress(json.JSONDecodeError, UnicodeDecodeError): + decoded_message = json.loads(message) + msg_type: str = decoded_message.get("header", {}).get("msg_type", "") + msg_contents: str = decoded_message.get("content", {}).get("text", "") + if (msg_type == self.expected_header_msg_type) and ( + self.expected_message_contents in msg_contents + ): + return True + + return False + + +def test_tip( # noqa: PLR0915 + page: Page, + create_tip_plan_from_dashboard: Callable[[str], dict[str, Any]], + log_in_and_out: WebSocket, + autoscaled: bool, +): + project_data = create_tip_plan_from_dashboard("newTIPlanButton") + assert "workbench" in project_data, "Expected workbench to be in project data!" + assert isinstance( + project_data["workbench"], dict + ), "Expected workbench to be a dict!" + node_ids: list[str] = list(project_data["workbench"]) + assert len(node_ids) >= 3, "Expected at least 3 nodes in the workbench!" + + with log_context(logging.INFO, "Electrode Selector step") as ctx: + electrode_selector_iframe = wait_for_service_running( + page=page, + node_id=node_ids[0], + websocket=log_in_and_out, + timeout=( + _ELECTRODE_SELECTOR_BILLABLE_MAX_STARTUP_TIME + if autoscaled + else _ELECTRODE_SELECTOR_MAX_STARTUP_TIME + ), # NOTE: this is actually not quite correct as we have billable product that do not autoscale + ) + # NOTE: Sometimes this iframe flicks and shows a white page. This wait will avoid it + page.wait_for_timeout(_ELECTRODE_SELECTOR_FLICKERING_WAIT_TIME) + + with log_context(logging.INFO, "Configure selector"): + electrode_selector_iframe.get_by_test_id("TargetStructure_Selector").click() + electrode_selector_iframe.get_by_test_id( + "TargetStructure_Target_(Targets_combined) Hypothalamus" + ).click() + electrode_selections = [ + ["E1+", "FT9"], + ["E1-", "FT7"], + ["E2+", "T9"], + ["E2-", "T7"], + ] + for selection in electrode_selections: + group_id = "ElectrodeGroup_" + selection[0] + "_Start" + electrode_id = "Electrode_" + selection[1] + electrode_selector_iframe.get_by_test_id(group_id).click() + electrode_selector_iframe.get_by_test_id(electrode_id).click() + # configuration done, push and wait for output + with ( + log_context(logging.INFO, "Check outputs"), + page.expect_request( + lambda r: bool( + re.search(_GET_NODE_OUTPUTS_REQUEST_PATTERN, r.url) + and r.method.upper() == "GET" + ) + ) as request_info, + ): + electrode_selector_iframe.get_by_test_id("FinishSetUp").click() + response = request_info.value.response() + assert response + assert response.ok, f"{response.json()}" + response_body = response.json() + ctx.logger.info("the following output was generated: %s", response_body) + + with log_context(logging.INFO, "Classic TI step") as ctx: + with page.expect_websocket( + _JLabWaitForWebSocket(), + timeout=_OUTER_EXPECT_TIMEOUT_RATIO + * ( + _JLAB_BILLABLE_MAX_STARTUP_TIME + if autoscaled + else _JLAB_MAX_STARTUP_MAX_TIME + ), + ) as ws_info: + # NOTE: separated calls, but dangerous as we could miss some socket event (which is highly unlikely though as the calls are one after the other) + app_mode_trigger_next_app(page) + ti_iframe = wait_for_service_running( + page=page, + node_id=node_ids[1], + websocket=log_in_and_out, + timeout=( + _JLAB_BILLABLE_MAX_STARTUP_TIME + if autoscaled + else _JLAB_MAX_STARTUP_MAX_TIME + ), # NOTE: this is actually not quite correct as we have billable product that do not autoscale + ) + jlab_websocket = ws_info.value + + with ( + log_context(logging.INFO, "Run optimization"), + jlab_websocket.expect_event( + "framereceived", + _JLabWebSocketWaiter( + expected_header_msg_type="stream", + expected_message_contents="All results evaluated", + ), + timeout=_JLAB_RUN_OPTIMIZATION_MAX_TIME + + _JLAB_RUN_OPTIMIZATION_APPEARANCE_TIME, + ), + ): + ti_iframe.get_by_role("button", name="Run Optimization").click( + timeout=_JLAB_RUN_OPTIMIZATION_APPEARANCE_TIME + ) + + with log_context(logging.INFO, "Create report"): + ti_iframe.get_by_role("button", name="Load Analysis").click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + ti_iframe.get_by_role("button", name="Load").nth(1).click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + ti_iframe.get_by_role("button", name="Add to Report (0)").nth(0).click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + ti_iframe.get_by_role("button", name="Export to S4L").click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + ti_iframe.get_by_role("button", name="Add to Report (1)").nth(1).click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + ti_iframe.get_by_role("button", name="Export Report").click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + + with log_context(logging.INFO, "Check outputs"): + expected_outputs = ["output_1.zip", "TIP_report.pdf", "results.csv"] + text_on_output_button = f"Outputs ({len(expected_outputs)})" + page.get_by_test_id("outputsBtn").get_by_text(text_on_output_button).click() + + with log_context(logging.INFO, "Exposure Analysis step"): + # NOTE: separated calls, but dangerous as we could miss some socket event (which is highly unlikely though as the calls are one after the other) + app_mode_trigger_next_app(page) + s4l_postpro_iframe = wait_for_service_running( + page=page, + node_id=node_ids[2], + websocket=log_in_and_out, + timeout=( + _POST_PRO_BILLABLE_MAX_STARTUP_TIME + if autoscaled + else _POST_PRO_MAX_STARTUP_TIME + ), # NOTE: this is actually not quite correct as we have billable product that do not autoscale + ) + + with log_context(logging.INFO, "Post process"): + # click on the postpro mode button + s4l_postpro_iframe.get_by_test_id("mode-button-postro").click() + # click on the surface viewer + s4l_postpro_iframe.get_by_test_id("tree-item-ti_field.cache").click() + s4l_postpro_iframe.get_by_test_id("tree-item-SurfaceViewer").nth(0).click() From b136bac43ba91d19a72f6bbc0dac37e75badcbcc Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Thu, 20 Jun 2024 11:57:47 +0200 Subject: [PATCH 050/219] =?UTF-8?q?=F0=9F=94=A8=20Enhance=20diagnostics=20?= =?UTF-8?q?info=20in=20e2e=20testing=20(#5962)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/e2e-playwright/README.md | 17 ++++-- tests/e2e-playwright/tests/conftest.py | 77 +++++++++++++++++++++++++- 2 files changed, 85 insertions(+), 9 deletions(-) diff --git a/tests/e2e-playwright/README.md b/tests/e2e-playwright/README.md index 73111eb7ecc..564fa44e474 100644 --- a/tests/e2e-playwright/README.md +++ b/tests/e2e-playwright/README.md @@ -1,14 +1,19 @@ -Auto generate new test +### Auto generate new test `playwright codegen sim4life.io` -Run test locally with headed mode -`pytest -s tests/sim4life.py --headed --browser chromium --product-billable --product-url https://sim4life.io/ --user-name YOUR_USERNAME --password YOUR_PASSWORD --service-key simcore/services/dynamic/sim4life-8-0-0-dy --service-test-id studyBrowserListItem_simcore/services/dynamic/sim4life-8-0-0-dy` +### Run test locally with headed mode +``` +pytest -s tests/sim4life.py --headed --browser chromium --product-billable --product-url https://sim4life.io/ --user-name YOUR_USERNAME --password YOUR_PASSWORD --service-key simcore/services/dynamic/sim4life-8-0-0-dy --service-test-id studyBrowserListItem_simcore/services/dynamic/sim4life-8-0-0-dy +``` -Check test results output +### Check test results output `playwright show-trace test-results/tests-sim4life-py-test-billable-sim4life-chromium/trace.zip` -Run debug mode +### Run debug mode `PWDEBUG=1 pytest -s tests/sim4life.py` -Run test in different browsers +### Run test in different browsers `pytest -s tests/sim4life.py --tracing on --html=report.html --browser chromium --browser firefox` + +### Runs in CI + - https://git.speag.com/oSparc/e2e-backend diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index 7b5c16da1e6..0dd4ce871ef 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -1,10 +1,11 @@ +# pylint: disable=no-name-in-module # pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable # pylint: disable=too-many-arguments # pylint: disable=too-many-statements -# pylint: disable=no-name-in-module +# pylint: disable=unused-argument +# pylint: disable=unused-variable +import datetime import json import logging import os @@ -14,11 +15,13 @@ from contextlib import ExitStack from typing import Any, Final +import arrow import pytest from faker import Faker from playwright.sync_api import APIRequestContext, BrowserContext, Page, WebSocket from playwright.sync_api._generated import Playwright from pydantic import AnyUrl, TypeAdapter +from pytest import Item from pytest_simcore.logging_utils import log_context from pytest_simcore.playwright_utils import ( MINUTE, @@ -100,6 +103,74 @@ def pytest_addoption(parser: pytest.Parser) -> None: ) +# Dictionary to store start times of tests +_test_start_times = {} + + +def pytest_runtest_setup(item): + """ + Hook to capture the start time of each test. + """ + _test_start_times[item.name] = arrow.now().datetime + + +_FORMAT: Final = "%Y-%m-%dT%H:%M:%S.%fZ" + + +def _construct_graylog_url( + product_url: str | None, start_time: datetime.datetime, end_time: datetime.datetime +) -> str: + # Deduce monitoring url + if product_url: + scheme, tail = product_url.split("://", 1) + else: + scheme, tail = "https", "" + monitoring_url = f"{scheme}://monitoring.{tail}" + + # build graylog URL + query = f"from={start_time.strftime(_FORMAT)}&to={end_time.strftime(_FORMAT)}" + return f"{monitoring_url}/graylog/search?{query}" + + +def pytest_runtest_makereport(item: Item, call): + """ + Hook to add extra information when a test fails. + """ + + # Check if the test failed + if call.when == "call" and call.excinfo is not None: + test_name = item.name + test_location = item.location + product_url = item.config.getoption("--product-url", default=None) + + diagnostics = { + "test_name": test_name, + "test_location": test_location, + "product_url": product_url, + } + + # Get the start and end times of the test + start_time = _test_start_times.get(test_name) + end_time = arrow.now().datetime + + if start_time: + diagnostics["graylog_url"] = _construct_graylog_url( + product_url, start_time, end_time + ) + diagnostics["duration"] = str(end_time - start_time) + + # Print the diagnostics report + print(f"\nDiagnostics repoort for {test_name} ---") + print(json.dumps(diagnostics, indent=2)) + print("---") + + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config): + config.pluginmanager.register(pytest_runtest_setup, "osparc_test_times_plugin") + config.pluginmanager.register(pytest_runtest_makereport, "osparc_makereport_plugin") + + @pytest.fixture(autouse=True) def osparc_test_id_attribute(playwright: Playwright) -> None: # Set a custom test id attribute From 5b226d15610113f3b63d34bc87ace6f43e059db8 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Thu, 20 Jun 2024 14:16:13 +0200 Subject: [PATCH 051/219] =?UTF-8?q?=F0=9F=90=9BE2E:=20--product-url=20is?= =?UTF-8?q?=20an=20url=20not=20a=20string=20(#5972)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/e2e-playwright/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index 0dd4ce871ef..afee65541f5 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -141,7 +141,7 @@ def pytest_runtest_makereport(item: Item, call): if call.when == "call" and call.excinfo is not None: test_name = item.name test_location = item.location - product_url = item.config.getoption("--product-url", default=None) + product_url = f"{item.config.getoption('--product-url', default=None)}" diagnostics = { "test_name": test_name, From a0c5c80417f0d0623c8bffc7f7d589741d6205c0 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Thu, 20 Jun 2024 14:51:46 +0200 Subject: [PATCH 052/219] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Team=20Blac?= =?UTF-8?q?k=20feedback=2020.06=20(#5971)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/desktop/WorkbenchView.js | 5 ++++- .../client/source/class/osparc/file/FilePicker.js | 10 +--------- .../source/class/osparc/form/renderer/PropForm.js | 3 ++- .../client/source/class/osparc/info/StudyUtils.js | 3 ++- .../osparc/notification/NotificationsContainer.js | 2 +- .../source/class/osparc/workbench/WorkbenchUI.js | 4 ++-- 6 files changed, 12 insertions(+), 15 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js index f8b76a18be6..1945a9742c9 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js @@ -985,7 +985,10 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.getChildControl("side-panel-right-tabs").setSelection([this.__serviceOptionsPage]); const spacing = 8; - const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(spacing*2)); + const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox().set({ + separator: "separator-vertical", + spacing: spacing*2 + })); // INPUTS FORM if (node.isPropertyInitialized("propsForm") && node.getPropsForm()) { diff --git a/services/static-webserver/client/source/class/osparc/file/FilePicker.js b/services/static-webserver/client/source/class/osparc/file/FilePicker.js index 573d599f6ae..fe6fd53cd5e 100644 --- a/services/static-webserver/client/source/class/osparc/file/FilePicker.js +++ b/services/static-webserver/client/source/class/osparc/file/FilePicker.js @@ -464,15 +464,7 @@ qx.Class.define("osparc.file.FilePicker", { }, __buildAppModeLayout: function() { - let msg = this.tr("In order to Select a File you have three options:"); - const options = [ - this.tr("- Upload a New File"), - this.tr("- Provide a File Link"), - this.tr("- Select a File from other ") + osparc.product.Utils.getStudyAlias() - ]; - for (let i=0; i this.__addInputPortButtonClicked()); this._add(addPortButton, { diff --git a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js index 2b0fc11524a..518e52d1447 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js @@ -190,7 +190,8 @@ qx.Class.define("osparc.info.StudyUtils", { const cb = new qx.ui.form.CheckBox().set({ value: "disableServiceAutoStart" in devObj ? !devObj["disableServiceAutoStart"] : true, label: qx.locale.Manager.tr("Autostart services"), - toolTipText: qx.locale.Manager.tr("This will help opening and closing studies faster"), + font: "text-14", + toolTipText: qx.locale.Manager.tr("Disabling this will help opening and closing studies/projects faster"), iconPosition: "right" }); cb.addListener("changeValue", e => { diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js b/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js index 7cded74b4b0..dd9e340a490 100644 --- a/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js +++ b/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js @@ -29,6 +29,7 @@ qx.Class.define("osparc.notification.NotificationsContainer", { maxHeight: 250, backgroundColor: "background-main-3" }); + osparc.utils.Utils.setIdToWidget(this, "notificationsContainer"); const root = qx.core.Init.getApplication().getRoot(); root.add(this, { @@ -37,7 +38,6 @@ qx.Class.define("osparc.notification.NotificationsContainer", { }); const notificationsContainer = this.__container = new qx.ui.container.Composite(new qx.ui.layout.VBox(1)); - osparc.utils.Utils.setIdToWidget(notificationsContainer, "notificationsContainer"); const scrollContainer = new qx.ui.container.Scroll(); scrollContainer.add(notificationsContainer); this._add(scrollContainer, { diff --git a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js index 0aae5eec4a9..6785803e02b 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js +++ b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js @@ -56,7 +56,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }, statics: { - getDashedBorderSytle(isRight) { + getDashedBorderStyle(isRight) { const side = isRight ? "right" : "left"; const borderStyle = {}; borderStyle["background-image"] = `linear-gradient(to bottom, #3D3D3D 50%, rgba(255, 255, 255, 0) 0%)`; @@ -273,7 +273,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { allowGrowX: false, padding: [0, 6] }); - inputOutputNodesLayout.getContentElement().setStyles(this.self().getDashedBorderSytle(isInput)); + inputOutputNodesLayout.getContentElement().setStyles(this.self().getDashedBorderStyle(isInput)); const title = new qx.ui.basic.Label(label).set({ alignX: "center", margin: [15, 0], From e08c73eba6cd1490601d20838dc8598944a595e7 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard <126242332+bisgaard-itis@users.noreply.github.com> Date: Thu, 20 Jun 2024 15:44:13 +0200 Subject: [PATCH 053/219] =?UTF-8?q?=F0=9F=8E=A8=20=F0=9F=91=BD=EF=B8=8F=20?= =?UTF-8?q?Improve=20start=20job=20endpoint=20in=20webserver=20and=20impro?= =?UTF-8?q?ve=20error=20handling=20in=20api-server=20(#5927)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .vscode/launch.template.json | 15 ++ api/specs/web-server/_computations.py | 19 +++ services/api-server/VERSION | 2 +- services/api-server/openapi.json | 134 ++++++++++++++++-- services/api-server/setup.cfg | 2 +- .../api/routes/solvers_jobs.py | 51 +++++-- .../api/routes/studies_jobs.py | 55 +++++-- .../exceptions/backend_errors.py | 100 +++++++++++++ .../exceptions/handlers/__init__.py | 3 + .../handlers/_handlers_backend_errors.py | 12 ++ .../exceptions/service_errors_utils.py | 21 +-- .../services/catalog.py | 31 +--- .../services/director_v2.py | 40 ++---- .../services/solver_job_outputs.py | 18 ++- .../services/webserver.py | 92 ++++++------ .../tests/mocks/start_solver_job.json | 80 +++++++++++ services/api-server/tests/unit/.swp | Bin 0 -> 12288 bytes .../api_solvers/test_api_routers_solvers.py | 3 +- .../test_api_routers_solvers_jobs.py | 13 +- .../test_api_routers_solvers_jobs_delete.py | 9 +- .../test_api_routers_solvers_jobs_logs.py | 3 +- .../test_api_routers_solvers_jobs_metadata.py | 13 +- .../test_api_routers_solvers_jobs_read.py | 5 +- .../test_api_routers_studies_jobs_metadata.py | 13 +- .../api_studies/test_api_routes_studies.py | 21 +-- .../test_api_routes_studies_jobs.py | 2 +- .../api-server/tests/unit/test__fastapi.py | 15 +- .../tests/unit/test_api__study_workflows.py | 4 +- .../tests/unit/test_api_solver_jobs.py | 44 +++++- .../api-server/tests/unit/test_exceptions.py | 11 +- services/director-v2/openapi.json | 24 ++-- .../api/routes/computations.py | 22 ++- .../integration/01/test_computation_api.py | 2 +- .../with_dbs/test_api_route_computations.py | 4 +- .../api/v0/openapi.yaml | 12 ++ 35 files changed, 662 insertions(+), 233 deletions(-) create mode 100644 services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py create mode 100644 services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_backend_errors.py create mode 100644 services/api-server/tests/mocks/start_solver_job.json create mode 100644 services/api-server/tests/unit/.swp diff --git a/.vscode/launch.template.json b/.vscode/launch.template.json index d3c712d6595..6e74bb461fa 100644 --- a/.vscode/launch.template.json +++ b/.vscode/launch.template.json @@ -171,6 +171,21 @@ } ] }, + { + "name": "Python: Remote Attach wb-api-server", + "type": "debugpy", + "request": "attach", + "connect": { + "port": 3019, + "host": "127.0.0.1" + }, + "pathMappings": [ + { + "localRoot": "${workspaceFolder}", + "remoteRoot": "/devel" + } + ] + }, { "name": "Python: Remote Attach webserver-garbage-collector", "type": "debugpy", diff --git a/api/specs/web-server/_computations.py b/api/specs/web-server/_computations.py index 61e937080dd..2458f499d54 100644 --- a/api/specs/web-server/_computations.py +++ b/api/specs/web-server/_computations.py @@ -28,6 +28,25 @@ async def get_computation(project_id: ProjectID): @router.post( "/computations/{project_id}:start", response_model=Envelope[_ComputationStarted], + responses={ + status.HTTP_404_NOT_FOUND: { + "description": "Project/wallet/pricing details not found" + }, + status.HTTP_402_PAYMENT_REQUIRED: { + "description": "Insufficient osparc credits" + }, + status.HTTP_406_NOT_ACCEPTABLE: { + "description": "Cluster not found", + }, + status.HTTP_503_SERVICE_UNAVAILABLE: { + "description": "Service not available", + }, + status.HTTP_422_UNPROCESSABLE_ENTITY: { + "description": "Configuration error", + }, + status.HTTP_402_PAYMENT_REQUIRED: {"description": "Payment required"}, + status.HTTP_409_CONFLICT: {"description": "Project already started"}, + }, ) async def start_computation(project_id: ProjectID, _start: ComputationStart): ... diff --git a/services/api-server/VERSION b/services/api-server/VERSION index 4b9fcbec101..a918a2aa18d 100644 --- a/services/api-server/VERSION +++ b/services/api-server/VERSION @@ -1 +1 @@ -0.5.1 +0.6.0 diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json index 7989d4a9c48..60b80ff2314 100644 --- a/services/api-server/openapi.json +++ b/services/api-server/openapi.json @@ -1925,7 +1925,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2092,7 +2092,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2220,7 +2220,7 @@ } ], "responses": { - "200": { + "202": { "description": "Successful Response", "content": { "application/json": { @@ -2241,7 +2241,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2300,12 +2300,32 @@ } } }, + "200": { + "description": "Job already started", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/JobStatus" + } + } + } + }, + "406": { + "description": "Cluster not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { - "description": "Validation Error", + "description": "Configuration error", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/HTTPValidationError" + "$ref": "#/components/schemas/ErrorGet" } } } @@ -2379,7 +2399,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2517,7 +2537,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2656,7 +2676,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -3585,7 +3605,7 @@ } ], "responses": { - "200": { + "202": { "description": "Successful Response", "content": { "application/json": { @@ -3595,12 +3615,102 @@ } } }, + "402": { + "description": "Payment required", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "404": { + "description": "Job/wallet/pricing details not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "502": { + "description": "Unexpected error when communicating with backend service", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "503": { + "description": "Service unavailable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "504": { + "description": "Request to a backend service timed out.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "200": { + "description": "Job already started", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/JobStatus" + } + } + } + }, + "406": { + "description": "Cluster not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { - "description": "Validation Error", + "description": "Configuration error", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/HTTPValidationError" + "$ref": "#/components/schemas/ErrorGet" } } } diff --git a/services/api-server/setup.cfg b/services/api-server/setup.cfg index 877b4c5aeca..81ab555ab95 100644 --- a/services/api-server/setup.cfg +++ b/services/api-server/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.5.1 +current_version = 0.6.0 commit = True message = services/api-server version: {current_version} → {new_version} tag = False diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py index 82a461d91e7..baab5a40fef 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py @@ -5,11 +5,16 @@ from typing import Annotated, Any from fastapi import APIRouter, Depends, Header, Query, Request, status +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse from models_library.api_schemas_webserver.projects import ProjectCreateNew, ProjectGet from models_library.clusters import ClusterID from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from pydantic.types import PositiveInt +from simcore_service_api_server.exceptions.backend_errors import ( + ProjectAlreadyStartedError, +) from ...exceptions.service_errors_utils import DEFAULT_BACKEND_SERVICE_STATUS_CODES from ...models.basic_types import VersionStr @@ -67,7 +72,7 @@ def _compose_job_resource_name(solver_key, solver_version, job_id) -> str: "model": ErrorGet, }, status.HTTP_404_NOT_FOUND: { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "model": ErrorGet, }, } | DEFAULT_BACKEND_SERVICE_STATUS_CODES @@ -157,8 +162,23 @@ async def delete_job( @router.post( "/{solver_key:path}/releases/{version}/jobs/{job_id:uuid}:start", + status_code=status.HTTP_202_ACCEPTED, response_model=JobStatus, - responses=JOBS_STATUS_CODES, + responses=JOBS_STATUS_CODES + | { + status.HTTP_200_OK: { + "description": "Job already started", + "model": JobStatus, + }, + status.HTTP_406_NOT_ACCEPTABLE: { + "description": "Cluster not found", + "model": ErrorGet, + }, + status.HTTP_422_UNPROCESSABLE_ENTITY: { + "description": "Configuration error", + "model": ErrorGet, + }, + }, ) async def start_job( request: Request, @@ -173,18 +193,31 @@ async def start_job( """Starts job job_id created with the solver solver_key:version New in *version 0.4.3*: cluster_id + New in *version 0.6.0*: This endpoint responds with a 202 when successfully starting a computation """ job_name = _compose_job_resource_name(solver_key, version, job_id) _logger.debug("Start Job '%s'", job_name) - await start_project( - request=request, - job_id=job_id, - expected_job_name=job_name, - webserver_api=webserver_api, - cluster_id=cluster_id, - ) + try: + await start_project( + request=request, + job_id=job_id, + expected_job_name=job_name, + webserver_api=webserver_api, + cluster_id=cluster_id, + ) + except ProjectAlreadyStartedError: + job_status = await inspect_job( + solver_key=solver_key, + version=version, + job_id=job_id, + user_id=user_id, + director2_api=director2_api, + ) + return JSONResponse( + status_code=status.HTTP_200_OK, content=jsonable_encoder(job_status) + ) return await inspect_job( solver_key=solver_key, version=version, diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py index e320214867d..d48e67716e2 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py @@ -3,7 +3,8 @@ from typing import Annotated from fastapi import APIRouter, Depends, Header, Query, Request, status -from fastapi.responses import RedirectResponse +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse, RedirectResponse from models_library.api_schemas_webserver.projects import ProjectName, ProjectPatch from models_library.api_schemas_webserver.projects_nodes import NodeOutputs from models_library.clusters import ClusterID @@ -13,6 +14,10 @@ from models_library.projects_nodes_io import NodeID from pydantic import PositiveInt from servicelib.logging_utils import log_context +from simcore_service_api_server.api.routes.solvers_jobs import JOBS_STATUS_CODES +from simcore_service_api_server.exceptions.backend_errors import ( + ProjectAlreadyStartedError, +) from ...api.dependencies.authentication import get_current_user_id from ...api.dependencies.services import get_api_client @@ -181,7 +186,23 @@ async def delete_study_job( @router.post( "/{study_id:uuid}/jobs/{job_id:uuid}:start", + status_code=status.HTTP_202_ACCEPTED, response_model=JobStatus, + responses=JOBS_STATUS_CODES + | { + status.HTTP_200_OK: { + "description": "Job already started", + "model": JobStatus, + }, + status.HTTP_406_NOT_ACCEPTABLE: { + "description": "Cluster not found", + "model": ErrorGet, + }, + status.HTTP_422_UNPROCESSABLE_ENTITY: { + "description": "Configuration error", + "model": ErrorGet, + }, + }, ) async def start_study_job( request: Request, @@ -191,17 +212,31 @@ async def start_study_job( webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], cluster_id: ClusterID | None = None, -) -> JobStatus: +): + """ + New in *version 0.6.0*: This endpoint responds with a 202 when successfully starting a computation + """ job_name = _compose_job_resource_name(study_id, job_id) with log_context(_logger, logging.DEBUG, f"Starting Job '{job_name}'"): - await start_project( - request=request, - job_id=job_id, - expected_job_name=job_name, - webserver_api=webserver_api, - cluster_id=cluster_id, - ) - job_status: JobStatus = await inspect_study_job( + try: + await start_project( + request=request, + job_id=job_id, + expected_job_name=job_name, + webserver_api=webserver_api, + cluster_id=cluster_id, + ) + except ProjectAlreadyStartedError: + job_status: JobStatus = await inspect_study_job( + study_id=study_id, + job_id=job_id, + user_id=user_id, + director2_api=director2_api, + ) + return JSONResponse( + content=jsonable_encoder(job_status), status_code=status.HTTP_200_OK + ) + job_status = await inspect_study_job( study_id=study_id, job_id=job_id, user_id=user_id, diff --git a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py new file mode 100644 index 00000000000..ca5d0711434 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py @@ -0,0 +1,100 @@ +from fastapi import status + +from ._base import ApiServerBaseError + + +class BaseBackEndError(ApiServerBaseError): + """status_code: the default return status which will be returned to the client calling the + api-server (in case this exception is raised)""" + + status_code = status.HTTP_502_BAD_GATEWAY + + +class ListSolversOrStudiesError(BaseBackEndError): + msg_template = "Cannot list solvers/studies" + status_code = status.HTTP_404_NOT_FOUND + + +class ListJobsError(BaseBackEndError): + msg_template = "Cannot list jobs" + status_code = status.HTTP_404_NOT_FOUND + + +class PaymentRequiredError(BaseBackEndError): + msg_template = "Payment required" + status_code = status.HTTP_402_PAYMENT_REQUIRED + + +class ProfileNotFoundError(BaseBackEndError): + msg_template = "Profile not found" + status_code = status.HTTP_404_NOT_FOUND + + +class SolverOrStudyNotFoundError(BaseBackEndError): + msg_template = "Could not get solver/study {name}:{version}" + status_code = status.HTTP_404_NOT_FOUND + + +class JobNotFoundError(BaseBackEndError): + msg_template = "Could not get solver/study job {project_id}" + status_code = status.HTTP_404_NOT_FOUND + + +class LogFileNotFoundError(BaseBackEndError): + msg_template = "Could not get logfile for solver/study job {project_id}" + status_code = status.HTTP_404_NOT_FOUND + + +class SolverOutputNotFoundError(BaseBackEndError): + msg_template = "Solver output of project {project_uuid} not found" + status_code = status.HTTP_404_NOT_FOUND + + +class ClusterNotFoundError(BaseBackEndError): + msg_template = "Cluster not found" + status_code = status.HTTP_406_NOT_ACCEPTABLE + + +class ConfigurationError(BaseBackEndError): + msg_template = "Configuration error" + status_code = status.HTTP_422_UNPROCESSABLE_ENTITY + + +class ProductPriceNotFoundError(BaseBackEndError): + msg_template = "Product price not found" + status_code = status.HTTP_404_NOT_FOUND + + +class WalletNotFoundError(BaseBackEndError): + msg_template = "Wallet not found" + status_code = status.HTTP_404_NOT_FOUND + + +class ForbiddenWalletError(BaseBackEndError): + msg_template = "User does not have access to wallet" + status_code = status.HTTP_403_FORBIDDEN + + +class ProjectPortsNotFoundError(BaseBackEndError): + msg_template = "The ports for the job/study {project_id} could not be found" + status_code = status.HTTP_404_NOT_FOUND + + +class ProjectMetadataNotFoundError(BaseBackEndError): + msg_template = "The metadata for the job/study {project_id} could not be found" + status_code = status.HTTP_404_NOT_FOUND + + +class PricingUnitNotFoundError(BaseBackEndError): + msg_template = "The pricing unit could not be found" + status_code = status.HTTP_404_NOT_FOUND + + +class PricingPlanNotFoundError(BaseBackEndError): + msg_template = "The pricing plan could not be found" + status_code = status.HTTP_404_NOT_FOUND + + +class ProjectAlreadyStartedError(BaseBackEndError): + msg_template = "Project already started" + status_code = status.HTTP_200_OK diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py index 0814906fb8f..17b2bb54644 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py @@ -1,12 +1,14 @@ from fastapi import FastAPI from fastapi.exceptions import RequestValidationError from httpx import HTTPError as HttpxException +from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError from starlette import status from starlette.exceptions import HTTPException from ..custom_errors import CustomBaseError from ..log_streaming_errors import LogStreamingBaseError from ._custom_errors import custom_error_handler +from ._handlers_backend_errors import backend_error_handler from ._handlers_factory import make_handler_for_exception from ._http_exceptions import http_exception_handler from ._httpx_client_exceptions import handle_httpx_client_exceptions @@ -22,6 +24,7 @@ def setup(app: FastAPI, *, is_debug: bool = False): app.add_exception_handler(RequestValidationError, http422_error_handler) app.add_exception_handler(LogStreamingBaseError, log_handling_error_handler) app.add_exception_handler(CustomBaseError, custom_error_handler) + app.add_exception_handler(BaseBackEndError, backend_error_handler) # SEE https://docs.python.org/3/library/exceptions.html#exception-hierarchy app.add_exception_handler( diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_backend_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_backend_errors.py new file mode 100644 index 00000000000..e1dc19c26ea --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_backend_errors.py @@ -0,0 +1,12 @@ +from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError +from starlette.requests import Request +from starlette.responses import JSONResponse + +from ._utils import create_error_json_response + + +async def backend_error_handler( + request: Request, exc: BaseBackEndError +) -> JSONResponse: + assert request # nosec + return create_error_json_response(f"{exc}", status_code=exc.status_code) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/service_errors_utils.py b/services/api-server/src/simcore_service_api_server/exceptions/service_errors_utils.py index f71e15cd6f4..3f521332ccf 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/service_errors_utils.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/service_errors_utils.py @@ -2,11 +2,12 @@ from collections.abc import Callable, Mapping from contextlib import contextmanager from functools import wraps -from typing import Any, NamedTuple, TypeAlias +from typing import Any, NamedTuple, TypeAlias, TypeVar import httpx from fastapi import HTTPException, status from pydantic import ValidationError +from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError from ..models.schemas.errors import ErrorGet @@ -48,7 +49,8 @@ class ToApiTuple(NamedTuple): # service to public-api status maps -HttpStatusMap: TypeAlias = Mapping[ServiceHTTPStatus, ToApiTuple] +E = TypeVar("E", bound=BaseBackEndError) +HttpStatusMap: TypeAlias = Mapping[ServiceHTTPStatus, E] def _get_http_exception_kwargs( @@ -60,18 +62,9 @@ def _get_http_exception_kwargs( detail: str = "" headers: dict[str, str] = {} - if mapped := http_status_map.get(service_error.response.status_code): - in_api = ToApiTuple(*mapped) - status_code = in_api.status_code - if in_api.detail: - if callable(in_api.detail): - detail = f"{in_api.detail(detail_kwargs)}." - else: - detail = in_api.detail - else: - detail = f"{service_error}." - - elif service_error.response.status_code in { + if exception_type := http_status_map.get(service_error.response.status_code): + raise exception_type(**detail_kwargs) + if service_error.response.status_code in { status.HTTP_429_TOO_MANY_REQUESTS, status.HTTP_503_SERVICE_UNAVAILABLE, status.HTTP_504_GATEWAY_TIMEOUT, diff --git a/services/api-server/src/simcore_service_api_server/services/catalog.py b/services/api-server/src/simcore_service_api_server/services/catalog.py index 3c9bf8f886f..3f3e0111eec 100644 --- a/services/api-server/src/simcore_service_api_server/services/catalog.py +++ b/services/api-server/src/simcore_service_api_server/services/catalog.py @@ -11,6 +11,10 @@ from models_library.services import ServiceMetaDataPublished, ServiceType from pydantic import Extra, ValidationError, parse_obj_as, parse_raw_as from settings_library.catalog import CatalogSettings +from simcore_service_api_server.exceptions.backend_errors import ( + ListSolversOrStudiesError, + SolverOrStudyNotFoundError, +) from ..exceptions.service_errors_utils import service_exception_mapper from ..models.basic_types import VersionStr @@ -76,14 +80,7 @@ class CatalogApi(BaseServiceClientApi): SEE osparc-simcore/services/catalog/openapi.json """ - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: "Could not list solvers/studies", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: ListSolversOrStudiesError}) async def list_solvers( self, *, @@ -123,14 +120,7 @@ async def list_solvers( ) return solvers - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"Could not get solver/study {kwargs['name']}:{kwargs['version']}", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: SolverOrStudyNotFoundError}) async def get_service( self, *, user_id: int, name: SolverKeyId, version: VersionStr, product_name: str ) -> Solver: @@ -159,14 +149,7 @@ async def get_service( solver: Solver = service.to_solver() return solver - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"Could not get ports for solver/study {kwargs['name']}:{kwargs['version']}", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: SolverOrStudyNotFoundError}) async def get_service_ports( self, *, user_id: int, name: SolverKeyId, version: VersionStr, product_name: str ): diff --git a/services/api-server/src/simcore_service_api_server/services/director_v2.py b/services/api-server/src/simcore_service_api_server/services/director_v2.py index 8d4a820c084..64fc03e537b 100644 --- a/services/api-server/src/simcore_service_api_server/services/director_v2.py +++ b/services/api-server/src/simcore_service_api_server/services/director_v2.py @@ -9,6 +9,10 @@ from models_library.projects_pipeline import ComputationTask from models_library.projects_state import RunningState from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, PositiveInt, parse_raw_as +from simcore_service_api_server.exceptions.backend_errors import ( + JobNotFoundError, + LogFileNotFoundError, +) from starlette import status from ..core.settings import DirectorV2Settings @@ -120,14 +124,7 @@ async def start_computation( task: ComputationTaskGet = ComputationTaskGet.parse_raw(response.text) return task - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"Could not get solver/study job {kwargs['project_id']}", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: JobNotFoundError}) async def get_computation( self, project_id: UUID, user_id: PositiveInt ) -> ComputationTaskGet: @@ -141,14 +138,7 @@ async def get_computation( task: ComputationTaskGet = ComputationTaskGet.parse_raw(response.text) return task - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"Could not get solver/study job {kwargs['project_id']}", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: JobNotFoundError}) async def stop_computation( self, project_id: UUID, user_id: PositiveInt ) -> ComputationTaskGet: @@ -162,14 +152,7 @@ async def stop_computation( task: ComputationTaskGet = ComputationTaskGet.parse_raw(response.text) return task - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"Could not get solver/study job {kwargs['project_id']}", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: JobNotFoundError}) async def delete_computation(self, project_id: UUID, user_id: PositiveInt): response = await self.client.request( "DELETE", @@ -181,14 +164,7 @@ async def delete_computation(self, project_id: UUID, user_id: PositiveInt): ) response.raise_for_status() - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"Could not get logfile for solver/study job {kwargs['project_id']}", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: LogFileNotFoundError}) async def get_computation_logs( self, user_id: PositiveInt, project_id: UUID ) -> dict[NodeName, DownloadLink]: diff --git a/services/api-server/src/simcore_service_api_server/services/solver_job_outputs.py b/services/api-server/src/simcore_service_api_server/services/solver_job_outputs.py index 2d064dbd82b..6ecfffd2044 100644 --- a/services/api-server/src/simcore_service_api_server/services/solver_job_outputs.py +++ b/services/api-server/src/simcore_service_api_server/services/solver_job_outputs.py @@ -2,14 +2,15 @@ from typing import TypeAlias import aiopg -from fastapi import status -from fastapi.exceptions import HTTPException -from models_library.projects import ProjectID +from models_library.projects import ProjectID, ProjectIDStr from models_library.projects_nodes import NodeID -from models_library.projects_nodes_io import BaseFileLink +from models_library.projects_nodes_io import BaseFileLink, NodeIDStr from pydantic import StrictBool, StrictFloat, StrictInt, parse_obj_as from simcore_sdk import node_ports_v2 from simcore_sdk.node_ports_v2 import DBManager, Nodeports +from simcore_service_api_server.exceptions.backend_errors import ( + SolverOutputNotFoundError, +) log = logging.getLogger(__name__) @@ -32,8 +33,8 @@ async def get_solver_output_results( try: solver: Nodeports = await node_ports_v2.ports( user_id=user_id, - project_id=f"{project_uuid}", - node_uuid=f"{node_uuid}", + project_id=ProjectIDStr(f"{project_uuid}"), + node_uuid=NodeIDStr(f"{node_uuid}"), db_manager=db_manager, ) solver_output_results = {} @@ -45,7 +46,4 @@ async def get_solver_output_results( return solver_output_results except node_ports_v2.exceptions.NodeNotFound as err: - raise HTTPException( - status.HTTP_404_NOT_FOUND, - detail=f"Solver {node_uuid} output of project {project_uuid} not found", - ) from err + raise SolverOutputNotFoundError(project_uuid=project_uuid) from err diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py index ee29d81f582..42641d2affb 100644 --- a/services/api-server/src/simcore_service_api_server/services/webserver.py +++ b/services/api-server/src/simcore_service_api_server/services/webserver.py @@ -2,7 +2,6 @@ import logging import urllib.parse -from collections.abc import Mapping from dataclasses import dataclass from functools import partial from typing import Any @@ -50,6 +49,21 @@ X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, ) +from simcore_service_api_server.exceptions.backend_errors import ( + ConfigurationError, + ForbiddenWalletError, + ListJobsError, + PaymentRequiredError, + PricingPlanNotFoundError, + PricingUnitNotFoundError, + ProductPriceNotFoundError, + ProfileNotFoundError, + ProjectAlreadyStartedError, + ProjectMetadataNotFoundError, + ProjectPortsNotFoundError, + SolverOutputNotFoundError, + WalletNotFoundError, +) from tenacity import TryAgain from tenacity._asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -57,8 +71,8 @@ from tenacity.wait import wait_fixed from ..core.settings import WebServerSettings +from ..exceptions.backend_errors import ClusterNotFoundError, JobNotFoundError from ..exceptions.service_errors_utils import ( - ToApiTuple, service_exception_handler, service_exception_mapper, ) @@ -74,24 +88,16 @@ _exception_mapper = partial(service_exception_mapper, "Webserver") -_JOB_STATUS_MAP: Mapping = { - status.HTTP_402_PAYMENT_REQUIRED: (status.HTTP_402_PAYMENT_REQUIRED, None), - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"The job/study {kwargs['project_id']} could not be found", - ), +_JOB_STATUS_MAP = { + status.HTTP_402_PAYMENT_REQUIRED: PaymentRequiredError, + status.HTTP_404_NOT_FOUND: JobNotFoundError, } -_PROFILE_STATUS_MAP: Mapping = { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: "Could not find profile", - ) -} +_PROFILE_STATUS_MAP = {status.HTTP_404_NOT_FOUND: ProfileNotFoundError} -_WALLET_STATUS_MAP: Mapping = { - status.HTTP_404_NOT_FOUND: (status.HTTP_404_NOT_FOUND, None), - status.HTTP_403_FORBIDDEN: (status.HTTP_403_FORBIDDEN, None), +_WALLET_STATUS_MAP = { + status.HTTP_404_NOT_FOUND: WalletNotFoundError, + status.HTTP_403_FORBIDDEN: ForbiddenWalletError, } @@ -184,11 +190,7 @@ async def _page_projects( with service_exception_handler( service_name="Webserver", - http_status_map={ - status.HTTP_404_NOT_FOUND: ToApiTuple( - status.HTTP_404_NOT_FOUND, "Could not list jobs" - ) - }, + http_status_map={status.HTTP_404_NOT_FOUND: ListJobsError}, ): resp = await self.client.get( "/projects", @@ -343,14 +345,7 @@ async def delete_project(self, project_id: ProjectID) -> None: ) response.raise_for_status() - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"The ports for the job/study {kwargs['project_id']} could not be found", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: ProjectPortsNotFoundError}) async def get_project_metadata_ports( self, project_id: ProjectID ) -> list[StudyPort]: @@ -368,14 +363,7 @@ async def get_project_metadata_ports( assert isinstance(data, list) # nosec return data - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"The metadata for the job/study {kwargs['project_id']} could not be found", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: ProjectMetadataNotFoundError}) async def get_project_metadata(self, project_id: ProjectID) -> ProjectMetadataGet: response = await self.client.get( f"/projects/{project_id}/metadata", @@ -395,14 +383,7 @@ async def patch_project(self, *, project_id: UUID, patch_params: ProjectPatch): ) response.raise_for_status() - @_exception_mapper( - { - status.HTTP_404_NOT_FOUND: ( - status.HTTP_404_NOT_FOUND, - lambda kwargs: f"The metadata for the job/study {kwargs['project_id']} could not be found", - ) - } - ) + @_exception_mapper({status.HTTP_404_NOT_FOUND: ProjectMetadataNotFoundError}) async def update_project_metadata( self, project_id: ProjectID, metadata: dict[str, MetaValueType] ) -> ProjectMetadataGet: @@ -416,7 +397,7 @@ async def update_project_metadata( assert data is not None # nosec return data - @_exception_mapper({status.HTTP_404_NOT_FOUND: (status.HTTP_404_NOT_FOUND, None)}) + @_exception_mapper({status.HTTP_404_NOT_FOUND: PricingUnitNotFoundError}) async def get_project_node_pricing_unit( self, project_id: UUID, node_id: UUID ) -> PricingUnitGet | None: @@ -430,7 +411,7 @@ async def get_project_node_pricing_unit( assert data is not None # nosec return data - @_exception_mapper({status.HTTP_404_NOT_FOUND: (status.HTTP_404_NOT_FOUND, None)}) + @_exception_mapper({status.HTTP_404_NOT_FOUND: PricingUnitNotFoundError}) async def connect_pricing_unit_to_project_node( self, project_id: UUID, @@ -444,7 +425,14 @@ async def connect_pricing_unit_to_project_node( ) response.raise_for_status() - @_exception_mapper(_JOB_STATUS_MAP) + @_exception_mapper( + _JOB_STATUS_MAP + | { + status.HTTP_409_CONFLICT: ProjectAlreadyStartedError, + status.HTTP_406_NOT_ACCEPTABLE: ClusterNotFoundError, + status.HTTP_422_UNPROCESSABLE_ENTITY: ConfigurationError, + } + ) async def start_project( self, project_id: UUID, cluster_id: ClusterID | None = None ) -> None: @@ -494,7 +482,7 @@ async def get_project_inputs( assert data is not None # nosec return data - @_exception_mapper({status.HTTP_404_NOT_FOUND: (status.HTTP_404_NOT_FOUND, None)}) + @_exception_mapper({status.HTTP_404_NOT_FOUND: SolverOutputNotFoundError}) async def get_project_outputs( self, project_id: ProjectID ) -> dict[NodeID, dict[str, Any]]: @@ -559,7 +547,7 @@ async def get_project_wallet(self, project_id: ProjectID) -> WalletGet | None: # PRODUCTS ------------------------------------------------- - @_exception_mapper({status.HTTP_404_NOT_FOUND: (status.HTTP_404_NOT_FOUND, None)}) + @_exception_mapper({status.HTTP_404_NOT_FOUND: ProductPriceNotFoundError}) async def get_product_price(self) -> NonNegativeDecimal | None: response = await self.client.get( "/credits-price", @@ -572,7 +560,7 @@ async def get_product_price(self) -> NonNegativeDecimal | None: # SERVICES ------------------------------------------------- - @_exception_mapper({status.HTTP_404_NOT_FOUND: (status.HTTP_404_NOT_FOUND, None)}) + @_exception_mapper({status.HTTP_404_NOT_FOUND: PricingPlanNotFoundError}) async def get_service_pricing_plan( self, solver_key: SolverKeyId, version: VersionStr ) -> ServicePricingPlanGet | None: diff --git a/services/api-server/tests/mocks/start_solver_job.json b/services/api-server/tests/mocks/start_solver_job.json new file mode 100644 index 00000000000..6c54ff0a058 --- /dev/null +++ b/services/api-server/tests/mocks/start_solver_job.json @@ -0,0 +1,80 @@ +[ + { + "name": "POST /computations/b9faf8d8-4928-4e50-af40-3690712c5481:start", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/computations/{project_id}:start", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "request_payload": {}, + "response_body": { + "data": { + "pipeline_id": "b9faf8d8-4928-4e50-af40-3690712c5481" + } + }, + "status_code": 409 + }, + { + "name": "GET /v2/computations/b9faf8d8-4928-4e50-af40-3690712c5481", + "description": "", + "method": "GET", + "host": "director-v2", + "path": { + "path": "/v2/computations/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "query": "user_id=1", + "response_body": { + "id": "b9faf8d8-4928-4e50-af40-3690712c5481", + "state": "STARTED", + "result": null, + "pipeline_details": { + "adjacency_list": { + "d3a3c1e6-3d89-5e7a-af22-0f3ffcedef3d": [] + }, + "progress": 0.05, + "node_states": { + "d3a3c1e6-3d89-5e7a-af22-0f3ffcedef3d": { + "modified": true, + "dependencies": [], + "currentStatus": "STARTED", + "progress": 0.05 + } + } + }, + "iteration": 2, + "cluster_id": 0, + "started": "2024-06-18T20:33:46.482456+00:00", + "stopped": "2024-06-18T20:31:25.399647+00:00", + "submitted": "2024-06-18T20:33:46.384524+00:00", + "url": "http://director-v2/v2/computations/b9faf8d8-4928-4e50-af40-3690712c5481?user_id=1", + "stop_url": "http://director-v2/v2/computations/b9faf8d8-4928-4e50-af40-3690712c5481:stop?user_id=1" + } + } +] diff --git a/services/api-server/tests/unit/.swp b/services/api-server/tests/unit/.swp new file mode 100644 index 0000000000000000000000000000000000000000..5c61b349e6521a19f55d69ce68a855384a7ec219 GIT binary patch literal 12288 zcmeI%K}*9h6u|LJy?Aoy7l?Xwvzt4}Uq~SO~#M%y4w@wYFD?f_3?fnmO63Gep8&LZmzXWI>o~IO$T{~ z00LVSIE=6Qy{H=C{OnYo9LHOnh1LilfB*srAbqiV* z0s;sifB*srAbeH{*x9Tj`DSO3k$MI#0X#%Wzm{LPFVW?TuRLrBS)IE?v!KM{4Jc x^>PpmpvPLJW$n0YFoE5)T{{)V=O|<|3 literal 0 HcmV?d00001 diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers.py index 5fb05633cc6..ebdcfc59950 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers.py @@ -10,6 +10,7 @@ import simcore_service_api_server.api.routes.solvers from pytest_mock import MockFixture from respx import MockRouter +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.schemas.solvers import Solver from starlette import status @@ -67,7 +68,7 @@ async def test_list_solver_ports( auth: httpx.BasicAuth, ): resp = await client.get( - "/v0/solvers/simcore/services/comp/itis/sleeper/releases/2.1.4/ports", + f"/{API_VTAG}/solvers/simcore/services/comp/itis/sleeper/releases/2.1.4/ports", auth=auth, ) assert resp.status_code == status.HTTP_200_OK diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py index 5930db55a29..ed3ae76cfbd 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py @@ -20,6 +20,7 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import AnyUrl, HttpUrl, parse_obj_as from respx import MockRouter +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.core.settings import ApplicationSettings from simcore_service_api_server.models.schemas.jobs import Job, JobInputs, JobStatus from simcore_service_api_server.services.director_v2 import ComputationTaskGet @@ -173,13 +174,13 @@ async def test_solver_logs( solver_key: str, solver_version: str, ): - resp = await client.get("/v0/meta") + resp = await client.get(f"/{API_VTAG}/meta") assert resp.status_code == 200 job_id = project_id resp = await client.get( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job_id}/outputs/logfile", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job_id}/outputs/logfile", auth=auth, follow_redirects=True, ) @@ -335,12 +336,12 @@ async def test_run_solver_job( # --------------------------------------------------------------------------------------------------------- - resp = await client.get("/v0/meta") + resp = await client.get(f"/{API_VTAG}/meta") assert resp.status_code == 200 # Create Job resp = await client.post( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs", auth=auth, json=JobInputs( values={ @@ -361,11 +362,11 @@ async def test_run_solver_job( # Start Job resp = await client.post( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}:start", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}:start", auth=auth, params={"cluster_id": 1}, ) - assert resp.status_code == status.HTTP_200_OK + assert resp.status_code == status.HTTP_202_ACCEPTED assert mocked_directorv2_service_api["inspect_computation"].called job_status = JobStatus.parse_obj(resp.json()) diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py index 5e61c9d1b82..000a586836a 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py @@ -18,6 +18,7 @@ X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, ) +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.schemas.jobs import Job, JobInputs from starlette import status @@ -69,7 +70,7 @@ async def test_delete_non_existing_solver_job( ): # Cannot delete if it does not exists resp = await client.delete( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{faker.uuid4()}", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{faker.uuid4()}", auth=auth, ) assert resp.status_code == status.HTTP_404_NOT_FOUND @@ -129,7 +130,7 @@ async def test_create_and_delete_solver_job( ): # create Job resp = await client.post( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs", auth=auth, json=JobInputs( values={ @@ -143,7 +144,7 @@ async def test_create_and_delete_solver_job( # Delete Job after creation resp = await client.delete( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}", auth=auth, ) assert resp.status_code == status.HTTP_204_NO_CONTENT @@ -214,7 +215,7 @@ def create_project_side_effect(request: httpx.Request): if parent_node_id is not None: header_dict[X_SIMCORE_PARENT_NODE_ID] = f"{parent_node_id}" resp = await client.post( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs", auth=auth, params={"hidden": f"{hidden}"}, headers=header_dict, diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_logs.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_logs.py index 0aacc9de686..6a9a066948c 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_logs.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_logs.py @@ -20,6 +20,7 @@ from pytest_mock import MockFixture from pytest_simcore.simcore_webserver_projects_rest_api import GET_PROJECT from respx import MockRouter +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.api.dependencies.rabbitmq import get_log_distributor from simcore_service_api_server.models.schemas.jobs import JobID, JobLog @@ -106,7 +107,7 @@ async def test_log_streaming( collected_messages: list[str] = [] async with client.stream( "GET", - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job_id}/logstream", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job_id}/logstream", auth=auth, ) as response: response.raise_for_status() diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py index e275a639792..ccf9b40b565 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py @@ -13,6 +13,7 @@ from pydantic import parse_file_as from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from respx import MockRouter +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.schemas.jobs import ( Job, JobInputs, @@ -102,7 +103,7 @@ async def test_get_and_update_job_metadata( ): # create Job resp = await client.post( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs", auth=auth, json=JobInputs( values={ @@ -118,7 +119,7 @@ async def test_get_and_update_job_metadata( # Get metadata resp = await client.get( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}/metadata", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}/metadata", auth=auth, ) assert resp.status_code == status.HTTP_200_OK @@ -129,7 +130,7 @@ async def test_get_and_update_job_metadata( # Update metadata my_metadata = {"number": 3.14, "integer": 42, "string": "foo", "boolean": True} resp = await client.patch( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}/metadata", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}/metadata", auth=auth, json=JobMetadataUpdate(metadata=my_metadata).dict(), ) @@ -140,7 +141,7 @@ async def test_get_and_update_job_metadata( # Get metadata after update resp = await client.get( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}/metadata", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}/metadata", auth=auth, ) assert resp.status_code == status.HTTP_200_OK @@ -150,14 +151,14 @@ async def test_get_and_update_job_metadata( # Delete job resp = await client.delete( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}", auth=auth, ) assert resp.status_code == status.HTTP_204_NO_CONTENT # Get metadata -> job not found! resp = await client.get( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}/metadata", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}/metadata", auth=auth, ) assert resp.status_code == status.HTTP_404_NOT_FOUND diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_read.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_read.py index 8fb9673dbcb..1dbf8b3fa0f 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_read.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_read.py @@ -10,6 +10,7 @@ from pydantic import parse_file_as, parse_obj_as from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from respx import MockRouter +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.pagination import Page from simcore_service_api_server.models.schemas.jobs import Job from starlette import status @@ -74,14 +75,14 @@ async def test_list_solver_jobs( ): # list jobs (w/o pagination) resp = await client.get( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs", auth=auth + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs", auth=auth ) assert resp.status_code == status.HTTP_200_OK jobs = parse_obj_as(list[Job], resp.json()) # list jobs (w/ pagination) resp = await client.get( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/page", + f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/page", auth=auth, params={"limits": 20}, ) diff --git a/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py b/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py index 526f3320154..098718c3738 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py +++ b/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py @@ -16,6 +16,7 @@ from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from pytest_simcore.helpers.httpx_calls_capture_parameters import PathDescription from respx import MockRouter +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.schemas.jobs import ( Job, JobMetadata, @@ -136,7 +137,7 @@ async def test_get_and_update_study_job_metadata( # Creates a job (w/o running it) resp = await client.post( - f"/v0/studies/{study_id}/jobs", + f"/{API_VTAG}/studies/{study_id}/jobs", auth=auth, json={"values": {}}, ) @@ -145,7 +146,7 @@ async def test_get_and_update_study_job_metadata( # Get metadata resp = await client.get( - f"/v0/studies/{study_id}/jobs/{job.id}/metadata", + f"/{API_VTAG}/studies/{study_id}/jobs/{job.id}/metadata", auth=auth, ) assert resp.status_code == status.HTTP_200_OK @@ -161,7 +162,7 @@ async def test_get_and_update_study_job_metadata( "boolean": True, } resp = await client.put( - f"/v0/studies/{study_id}/jobs/{job.id}/metadata", + f"/{API_VTAG}/studies/{study_id}/jobs/{job.id}/metadata", auth=auth, json=jsonable_encoder(JobMetadataUpdate(metadata=my_metadata)), ) @@ -172,7 +173,7 @@ async def test_get_and_update_study_job_metadata( # Get metadata after update resp = await client.get( - f"/v0/studies/{study_id}/jobs/{job.id}/metadata", + f"/{API_VTAG}/studies/{study_id}/jobs/{job.id}/metadata", auth=auth, ) assert resp.status_code == status.HTTP_200_OK @@ -182,14 +183,14 @@ async def test_get_and_update_study_job_metadata( # Delete job resp = await client.delete( - f"/v0/studies/{study_id}/jobs/{job.id}", + f"/{API_VTAG}/studies/{study_id}/jobs/{job.id}", auth=auth, ) assert resp.status_code == status.HTTP_204_NO_CONTENT # Get metadata -> job not found! resp = await client.get( - f"/v0/studies/{study_id}/jobs/{job.id}/metadata", + f"/{API_VTAG}/studies/{study_id}/jobs/{job.id}/metadata", auth=auth, ) assert resp.status_code == status.HTTP_404_NOT_FOUND diff --git a/services/api-server/tests/unit/api_studies/test_api_routes_studies.py b/services/api-server/tests/unit/api_studies/test_api_routes_studies.py index 858787953b7..1893e6e068e 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routes_studies.py +++ b/services/api-server/tests/unit/api_studies/test_api_routes_studies.py @@ -19,6 +19,7 @@ X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, ) +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.schemas.errors import ErrorGet from simcore_service_api_server.models.schemas.studies import Study, StudyID, StudyPort @@ -80,7 +81,7 @@ async def test_studies_read_workflow( study_id = StudyID("25531b1a-2565-11ee-ab43-02420a000031") # list_studies - resp = await client.get("/v0/studies", auth=auth) + resp = await client.get(f"/{API_VTAG}/studies", auth=auth) assert resp.status_code == status.HTTP_200_OK studies = parse_obj_as(list[Study], resp.json()["items"]) @@ -88,18 +89,18 @@ async def test_studies_read_workflow( assert studies[0].uid == study_id # create_study doest NOT exist -> needs to be done via GUI - resp = await client.post("/v0/studies", auth=auth) + resp = await client.post(f"/{API_VTAG}/studies", auth=auth) assert resp.status_code == status.HTTP_405_METHOD_NOT_ALLOWED # get_study - resp = await client.get(f"/v0/studies/{study_id}", auth=auth) + resp = await client.get(f"/{API_VTAG}/studies/{study_id}", auth=auth) assert resp.status_code == status.HTTP_200_OK study = parse_obj_as(Study, resp.json()) assert study.uid == study_id # get ports - resp = await client.get(f"/v0/studies/{study_id}/ports", auth=auth) + resp = await client.get(f"/{API_VTAG}/studies/{study_id}/ports", auth=auth) assert resp.status_code == status.HTTP_200_OK ports = parse_obj_as(list[StudyPort], resp.json()["items"]) @@ -107,12 +108,14 @@ async def test_studies_read_workflow( # get_study with non-existing uuid inexistent_study_id = StudyID("15531b1a-2565-11ee-ab43-02420a000031") - resp = await client.get(f"/v0/studies/{inexistent_study_id}", auth=auth) + resp = await client.get(f"/{API_VTAG}/studies/{inexistent_study_id}", auth=auth) assert resp.status_code == status.HTTP_404_NOT_FOUND error = parse_obj_as(ErrorGet, resp.json()) assert f"{inexistent_study_id}" in error.errors[0] - resp = await client.get(f"/v0/studies/{inexistent_study_id}/ports", auth=auth) + resp = await client.get( + f"/{API_VTAG}/studies/{inexistent_study_id}/ports", auth=auth + ) assert resp.status_code == status.HTTP_404_NOT_FOUND error = parse_obj_as(ErrorGet, resp.json()) assert f"{inexistent_study_id}" in error.errors[0] @@ -136,7 +139,7 @@ async def test_list_study_ports( ) # list_study_ports - resp = await client.get(f"/v0/studies/{study_id}/ports", auth=auth) + resp = await client.get(f"/{API_VTAG}/studies/{study_id}/ports", auth=auth) assert resp.status_code == status.HTTP_200_OK assert resp.json() == {"items": fake_study_ports, "total": len(fake_study_ports)} @@ -186,7 +189,7 @@ def clone_project_side_effect(request: httpx.Request): if parent_node_id is not None: _headers[X_SIMCORE_PARENT_NODE_ID] = f"{parent_node_id}" resp = await client.post( - f"/v0/studies/{study_id}:clone", headers=_headers, auth=auth + f"/{API_VTAG}/studies/{study_id}:clone", headers=_headers, auth=auth ) assert mocked_webserver_service_api_base["create_projects"].called @@ -212,7 +215,7 @@ async def test_clone_study_not_found( # tests unknown study unknown_study_id = faker.uuid4() - resp = await client.post(f"/v0/studies/{unknown_study_id}:clone", auth=auth) + resp = await client.post(f"/{API_VTAG}/studies/{unknown_study_id}:clone", auth=auth) assert resp.status_code == status.HTTP_404_NOT_FOUND diff --git a/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py b/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py index 443fa943548..7196e808d5c 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py +++ b/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py @@ -177,7 +177,7 @@ def _check_response(response: httpx.Response, status_code: int): f"{API_VTAG}/studies/{fake_study_id}/jobs/{job_id}:start", auth=auth, ) - _check_response(response, status.HTTP_200_OK) + _check_response(response, status.HTTP_202_ACCEPTED) # stop study job response = await client.post( diff --git a/services/api-server/tests/unit/test__fastapi.py b/services/api-server/tests/unit/test__fastapi.py index 66303f34492..6cf2e6f13c9 100644 --- a/services/api-server/tests/unit/test__fastapi.py +++ b/services/api-server/tests/unit/test__fastapi.py @@ -25,6 +25,7 @@ from faker import Faker from fastapi import APIRouter, FastAPI, status from fastapi.testclient import TestClient +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.schemas.solvers import ( Solver, SolverKeyId, @@ -79,7 +80,7 @@ async def stop_job( } the_app = FastAPI() - the_app.include_router(router, prefix="/v0") + the_app.include_router(router, prefix=f"/{API_VTAG}") return TestClient(the_app) @@ -91,7 +92,9 @@ def test_fastapi_route_paths_in_paths(client: TestClient, faker: Faker): # can be raw raw_solver_key = solver_key - resp = client.get(f"/v0/solvers/{raw_solver_key}/releases/{version}/jobs/{job_id}") + resp = client.get( + f"/{API_VTAG}/solvers/{raw_solver_key}/releases/{version}/jobs/{job_id}" + ) assert resp.status_code == status.HTTP_200_OK assert resp.json() == { "action": "get_job", @@ -103,7 +106,7 @@ def test_fastapi_route_paths_in_paths(client: TestClient, faker: Faker): # can be quoted quoted_solver_key = urllib.parse.quote_plus("simcore/services/comp/itis/isolve") resp = client.get( - f"/v0/solvers/{quoted_solver_key}/releases/{version}/jobs/{job_id}" + f"/{API_VTAG}/solvers/{quoted_solver_key}/releases/{version}/jobs/{job_id}" ) assert resp.status_code == status.HTTP_200_OK assert resp.json() == { @@ -129,12 +132,14 @@ def test_fastapi_route_name_parsing(client: TestClient, app: FastAPI, faker: Fak f"{action}_job", solver_key=solver_key, version=version, job_id=job_id ) resp = client.post( - f"/v0/solvers/{solver_key}/releases/{version}/jobs/{job_id}:{action}" + f"/{API_VTAG}/solvers/{solver_key}/releases/{version}/jobs/{job_id}:{action}" ) assert resp.url.path == expected_path assert resp.status_code == status.HTTP_200_OK assert resp.json()["action"] == f"{action}_job" - resp = client.get(f"/v0/solvers/{solver_key}/releases/{version}/jobs/{job_id}") + resp = client.get( + f"/{API_VTAG}/solvers/{solver_key}/releases/{version}/jobs/{job_id}" + ) assert resp.status_code == status.HTTP_200_OK assert resp.json()["action"] == "get_job" diff --git a/services/api-server/tests/unit/test_api__study_workflows.py b/services/api-server/tests/unit/test_api__study_workflows.py index 0bd5a6b7097..b0b7d306e77 100644 --- a/services/api-server/tests/unit/test_api__study_workflows.py +++ b/services/api-server/tests/unit/test_api__study_workflows.py @@ -88,7 +88,7 @@ class StudiesTestApi(_BaseTestApi): @_handle_http_status_error async def list_study_ports(self, study_id): resp = await self._client.get( - f"/v0/studies/{study_id}/ports", + f"/{API_VTAG}/studies/{study_id}/ports", **self._request_kwargs, ) resp.raise_for_status() @@ -116,7 +116,7 @@ async def start_study_job(self, study_id, job_id) -> JobStatus: @_handle_http_status_error async def inspect_study_job(self, study_id, job_id) -> JobStatus: resp = await self._client.post( - f"/v0/studies/{study_id}/jobs/{job_id}:inspect", + f"/{API_VTAG}/studies/{study_id}/jobs/{job_id}:inspect", **self._request_kwargs, ) resp.raise_for_status() diff --git a/services/api-server/tests/unit/test_api_solver_jobs.py b/services/api-server/tests/unit/test_api_solver_jobs.py index 586ece58c44..524adc7300c 100644 --- a/services/api-server/tests/unit/test_api_solver_jobs.py +++ b/services/api-server/tests/unit/test_api_solver_jobs.py @@ -194,7 +194,7 @@ def _get_pricing_unit_side_effect( @pytest.mark.parametrize( "capture_name,expected_status_code", [ - ("start_job_with_payment.json", 200), + ("start_job_with_payment.json", 202), ("start_job_not_enough_credit.json", 402), ], ) @@ -248,7 +248,7 @@ def _put_pricing_plan_and_unit_side_effect( _put_pricing_plan_and_unit_side_effect, _start_job_side_effect, ] - if expected_status_code == status.HTTP_200_OK: + if expected_status_code == status.HTTP_202_ACCEPTED: callbacks.append(get_inspect_job_side_effect(job_id=_job_id)) _put_pricing_plan_and_unit_side_effect.was_called = False @@ -270,7 +270,7 @@ def _put_pricing_plan_and_unit_side_effect( }, ) assert response.status_code == expected_status_code - if expected_status_code == status.HTTP_200_OK: + if expected_status_code == status.HTTP_202_ACCEPTED: assert _put_pricing_plan_and_unit_side_effect.was_called assert response.json()["job_id"] == _job_id @@ -306,10 +306,46 @@ async def test_get_solver_job_pricing_unit_no_payment( auth=auth, ) - assert response.status_code == status.HTTP_200_OK + assert response.status_code == status.HTTP_202_ACCEPTED assert response.json()["job_id"] == _job_id +async def test_start_solver_job_conflict( + client: AsyncClient, + mocked_webserver_service_api_base, + mocked_directorv2_service_api_base, + mocked_groups_extra_properties, + create_respx_mock_from_capture: CreateRespxMockCallback, + auth: httpx.BasicAuth, + project_tests_dir: Path, +): + assert mocked_groups_extra_properties + _solver_key: str = "simcore/services/comp/itis/sleeper" + _version: str = "2.0.2" + _job_id: str = "b9faf8d8-4928-4e50-af40-3690712c5481" + + create_respx_mock_from_capture( + respx_mocks=[ + mocked_directorv2_service_api_base, + mocked_webserver_service_api_base, + ], + capture_path=project_tests_dir / "mocks" / "start_solver_job.json", + side_effects_callbacks=[ + _start_job_side_effect, + get_inspect_job_side_effect(job_id=_job_id), + ], + ) + + response = await client.post( + f"{API_VTAG}/solvers/{_solver_key}/releases/{_version}/jobs/{_job_id}:start", + auth=auth, + ) + + assert response.status_code == status.HTTP_200_OK + job_status = JobStatus.parse_obj(response.json()) + assert f"{job_status.job_id}" == _job_id + + async def test_stop_job( client: AsyncClient, mocked_directorv2_service_api_base, diff --git a/services/api-server/tests/unit/test_exceptions.py b/services/api-server/tests/unit/test_exceptions.py index c64741d8987..d9d8b771293 100644 --- a/services/api-server/tests/unit/test_exceptions.py +++ b/services/api-server/tests/unit/test_exceptions.py @@ -10,6 +10,7 @@ from fastapi import FastAPI, HTTPException, status from httpx import HTTPStatusError, Request, Response from simcore_service_api_server.exceptions import setup_exception_handlers +from simcore_service_api_server.exceptions.backend_errors import ProfileNotFoundError from simcore_service_api_server.exceptions.custom_errors import MissingWalletError from simcore_service_api_server.exceptions.service_errors_utils import ( service_exception_mapper, @@ -20,12 +21,7 @@ async def test_backend_service_exception_mapper(): @service_exception_mapper( "DummyService", - { - status.HTTP_400_BAD_REQUEST: ( - status.HTTP_200_OK, - lambda kwargs: "error message", - ) - }, + {status.HTTP_400_BAD_REQUEST: ProfileNotFoundError}, ) async def my_endpoint(status_code: int): raise HTTPStatusError( @@ -34,9 +30,8 @@ async def my_endpoint(status_code: int): response=Response(status_code), ) - with pytest.raises(HTTPException) as exc_info: + with pytest.raises(ProfileNotFoundError): await my_endpoint(status.HTTP_400_BAD_REQUEST) - assert exc_info.value.status_code == status.HTTP_200_OK with pytest.raises(HTTPException) as exc_info: await my_endpoint(status.HTTP_500_INTERNAL_SERVER_ERROR) diff --git a/services/director-v2/openapi.json b/services/director-v2/openapi.json index 5be4cf92355..b2e27ac6a70 100644 --- a/services/director-v2/openapi.json +++ b/services/director-v2/openapi.json @@ -88,15 +88,23 @@ } } }, + "404": { + "description": "Project or pricing details not found" + }, + "406": { + "description": "Cluster not found" + }, + "503": { + "description": "Service not available" + }, "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } + "description": "Configuration error" + }, + "402": { + "description": "Payment required" + }, + "409": { + "description": "Project already started" } } } diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index 52988271471..16d2a69ce91 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -222,7 +222,7 @@ async def _try_start_pipeline( # 2 options here: either we have cycles in the graph or it's really done if find_computational_node_cycles(complete_dag): raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, + status_code=status.HTTP_409_CONFLICT, detail=f"Project {computation.project_id} contains cycles with computational services which are currently not supported! Please remove them.", ) # there is nothing else to be run here, so we are done @@ -267,6 +267,22 @@ async def _try_start_pipeline( summary="Create and optionally start a new computation", response_model=ComputationGet, status_code=status.HTTP_201_CREATED, + responses={ + status.HTTP_404_NOT_FOUND: { + "description": "Project or pricing details not found", + }, + status.HTTP_406_NOT_ACCEPTABLE: { + "description": "Cluster not found", + }, + status.HTTP_503_SERVICE_UNAVAILABLE: { + "description": "Service not available", + }, + status.HTTP_422_UNPROCESSABLE_ENTITY: { + "description": "Configuration error", + }, + status.HTTP_402_PAYMENT_REQUIRED: {"description": "Payment required"}, + status.HTTP_409_CONFLICT: {"description": "Project already started"}, + }, ) # NOTE: in case of a burst of calls to that endpoint, we might end up in a weird state. @run_sequentially_in_context(target_args=["computation.project_id"]) @@ -420,7 +436,9 @@ async def create_computation( # noqa: PLR0913 status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail=f"{e}" ) from e except ConfigurationError as e: - raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=f"{e}") from e + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=f"{e}" + ) from e except WalletNotEnoughCreditsError as e: raise HTTPException( status_code=status.HTTP_402_PAYMENT_REQUIRED, detail=f"{e}" diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index 8fb75a6c307..652b9fb6dff 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -1035,7 +1035,7 @@ async def test_pipeline_with_cycle_containing_a_computational_service_is_forbidd }, ) assert ( - response.status_code == status.HTTP_403_FORBIDDEN + response.status_code == status.HTTP_409_CONFLICT ), f"response code is {response.status_code}, error: {response.text}" # still this pipeline shall be createable if we do not want to start it diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index e12bdd7bffa..7fe67666267 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -571,7 +571,7 @@ async def test_create_computation_with_wallet( "default_pricing_plan", [PricingPlanGet.construct(**PricingPlanGet.Config.schema_extra["examples"][0])], ) -async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_raises_409( +async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_raises_422( minimal_configuration: None, mocked_director_service_fcts: respx.MockRouter, mocked_catalog_service_fcts: respx.MockRouter, @@ -601,7 +601,7 @@ async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_rai ) ), ) - assert response.status_code == status.HTTP_409_CONFLICT, response.text + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text mocked_clusters_keeper_service_get_instance_type_details_with_invalid_name.assert_called_once() diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 9a66487c8be..857ae652973 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -1040,6 +1040,18 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope__ComputationStarted_' + '404': + description: Project/wallet/pricing details not found + '402': + description: Payment required + '406': + description: Cluster not found + '503': + description: Service not available + '422': + description: Configuration error + '409': + description: Project already started /v0/computations/{project_id}:stop: post: tags: From ad2d3e391d9072fb9b6b555fcd7ad8f8421d6363 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Thu, 20 Jun 2024 17:38:44 +0200 Subject: [PATCH 054/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=F0=9F=90=9BStorage:?= =?UTF-8?q?=20disable=20handling=20of=20dangling=20multipart=20uploads=20(?= =?UTF-8?q?#5978)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .coveragerc | 2 +- .../simcore_service_storage/simcore_s3_dsm.py | 152 +++--------------- .../storage/tests/fixtures/data_models.py | 2 +- .../storage/tests/unit/test_dsm_dsmcleaner.py | 111 +------------ .../tests/unit/test_handlers_simcore_s3.py | 22 +-- 5 files changed, 40 insertions(+), 249 deletions(-) diff --git a/.coveragerc b/.coveragerc index 5718fd589c1..3299a90950a 100644 --- a/.coveragerc +++ b/.coveragerc @@ -22,7 +22,7 @@ exclude_lines = # Don't complain if non-runnable code isn't run: if 0: if __name__ == .__main__.: - + if __name__ == __main__.: # Don't complain about abstract methods, they aren't run: @(abc\.)?abstract(((class|static)?method)|property) diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index 8059bb6165e..301579040c5 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -10,6 +10,7 @@ from pathlib import Path from typing import Any, Final, cast +import arrow from aiohttp import web from aiopg.sa import Engine from aiopg.sa.connection import SAConnection @@ -37,7 +38,6 @@ APP_DB_ENGINE_KEY, DATCORE_ID, EXPAND_DIR_MAX_ITEM_COUNT, - MAX_CONCURRENT_DB_TASKS, MAX_CONCURRENT_S3_TASKS, MAX_LINK_CHUNK_BYTE_SIZE, S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID, @@ -71,11 +71,7 @@ from .s3_client import S3MetaData, StorageS3Client from .s3_utils import S3TransferDataCB, update_task_progress from .settings import Settings -from .simcore_s3_dsm_utils import ( - expand_directory, - get_directory_file_id, - get_simcore_directory, -) +from .simcore_s3_dsm_utils import expand_directory, get_directory_file_id from .utils import ( convert_db_to_model, download_to_file_or_raise, @@ -83,6 +79,7 @@ is_valid_managed_multipart_upload, ) +_NO_CONCURRENCY: Final[int] = 1 _MAX_PARALLEL_S3_CALLS: Final[NonNegativeInt] = 10 _logger = logging.getLogger(__name__) @@ -577,7 +574,7 @@ async def delete_project_simcore_s3( self.simcore_bucket_name, project_id, node_id ) - async def deep_copy_project_simcore_s3( + async def deep_copy_project_simcore_s3( # noqa: C901 self, user_id: UserID, src_project: dict[str, Any], @@ -788,19 +785,20 @@ async def create_soft_link( async def synchronise_meta_data_table( self, *, dry_run: bool ) -> list[StorageFileID]: - file_ids_to_remove = [] + async with self.engine.acquire() as conn: _logger.warning( "Total number of entries to check %d", await db_file_meta_data.total(conn), ) # iterate over all entries to check if there is a file in the S3 backend - async for fmd in db_file_meta_data.list_valid_uploads(conn): + file_ids_to_remove = [ + fmd.file_id + async for fmd in db_file_meta_data.list_valid_uploads(conn) if not await get_s3_client(self.app).file_exists( self.simcore_bucket_name, s3_object=fmd.object_name - ): - # this file does not exist in S3 - file_ids_to_remove.append(fmd.file_id) + ) + ] if not dry_run: await db_file_meta_data.delete(conn, file_ids_to_remove) @@ -832,7 +830,7 @@ async def _clean_expired_uploads(self) -> None: 1. will try to update the entry from S3 backend if exists 2. will delete the entry if nothing exists in S3 backend. """ - now = datetime.datetime.utcnow() + now = arrow.utcnow().datetime async with self.engine.acquire() as conn: list_of_expired_uploads = await db_file_meta_data.list_fmds( conn, expired_after=now @@ -853,7 +851,7 @@ async def _clean_expired_uploads(self) -> None: ), reraise=False, log=_logger, - max_concurrency=MAX_CONCURRENT_DB_TASKS, + max_concurrency=_NO_CONCURRENCY, ) list_of_fmds_to_delete = [ expired_fmd @@ -867,16 +865,24 @@ async def _clean_expired_uploads(self) -> None: async def _revert_file( conn: SAConnection, fmd: FileMetaDataAtDB ) -> FileMetaDataAtDB: + if is_valid_managed_multipart_upload(fmd.upload_id): + assert fmd.upload_id # nosec + await s3_client.abort_multipart_upload( + bucket=fmd.bucket_name, + file_id=fmd.file_id, + upload_id=fmd.upload_id, + ) await s3_client.undelete_file(fmd.bucket_name, fmd.file_id) return await self._update_database_from_storage(conn, fmd) s3_client = get_s3_client(self.app) async with self.engine.acquire() as conn: + # NOTE: no concurrency here as we want to run low resources reverted_fmds = await logged_gather( *(_revert_file(conn, fmd) for fmd in list_of_fmds_to_delete), reraise=False, log=_logger, - max_concurrency=MAX_CONCURRENT_DB_TASKS, + max_concurrency=_NO_CONCURRENCY, ) list_of_fmds_to_delete = [ fmd @@ -892,123 +898,17 @@ async def _revert_file( "following unfinished/incomplete uploads will now be deleted : [%s]", [fmd.file_id for fmd in list_of_fmds_to_delete], ) - await logged_gather( - *( - self.delete_file(fmd.user_id, fmd.file_id) - for fmd in list_of_fmds_to_delete - if fmd.user_id is not None - ), - log=_logger, - max_concurrency=MAX_CONCURRENT_DB_TASKS, - ) + for fmd in list_of_fmds_to_delete: + if fmd.user_id is not None: + await self.delete_file(fmd.user_id, fmd.file_id) + _logger.warning( "pending/incomplete uploads of [%s] removed", [fmd.file_id for fmd in list_of_fmds_to_delete], ) - async def _clean_dangling_multipart_uploads(self): - """this method removes any dangling multipart upload that - was initiated on S3 backend if it does not exist in file_meta_data - table. - Use-cases: - - presigned multipart upload: a multipart upload is created after the entry in the table ( - if the expiry date is still in the future we do not remove the upload - ) - - S3 external or internal potentially multipart upload (using S3 direct access we do not know - if they create multipart uploads and have no control over it, the only thing we know is the upload - expiry date) - --> we only remove dangling upload IDs which expiry date is in the past or that have no upload in process - or no entry at all in the database - - """ - current_multipart_uploads: list[ - tuple[UploadID, SimcoreS3FileID] - ] = await get_s3_client(self.app).list_ongoing_multipart_uploads( - self.simcore_bucket_name - ) - if not current_multipart_uploads: - return - _logger.debug("found %s", f"{current_multipart_uploads=}") - - # there are some multipart uploads, checking if - # there is a counterpart in file_meta_data - # NOTE: S3 url encode file uuid with specific characters - async with self.engine.acquire() as conn: - # files have a 1 to 1 entry in the file_meta_data table - file_ids: list[SimcoreS3FileID] = [ - SimcoreS3FileID(urllib.parse.unquote(f)) - for _, f in current_multipart_uploads - ] - # if a file is part of directory, check if this directory is present in - # the file_meta_data table; extracting the SimcoreS3DirectoryID from - # the file path to find it's equivalent - directory_and_file_ids: list[SimcoreS3FileID] = file_ids + [ - SimcoreS3FileID(get_simcore_directory(file_id)) for file_id in file_ids - ] - - list_of_known_metadata_entries: list[ - FileMetaDataAtDB - ] = await db_file_meta_data.list_fmds( - conn, file_ids=list(set(directory_and_file_ids)) - ) - _logger.debug("metadata entries %s", f"{list_of_known_metadata_entries=}") - - # known uploads do have an expiry date (regardless of upload ID that we do not always know) - list_of_known_uploads = [ - fmd for fmd in list_of_known_metadata_entries if fmd.upload_expires_at - ] - - # To compile the list of valid uploads, check that the s3_object is - # part of the known uploads. - # The known uploads is composed of entries for files or for directories. - # checking if the s3_object is part of either one of those - list_of_valid_upload_ids: list[str] = [] - known_directory_names: set[str] = { - x.object_name for x in list_of_known_uploads if x.is_directory is True - } - known_file_names: set[str] = { - x.object_name for x in list_of_known_uploads if x.is_directory is False - } - for upload_id, s3_object_name in current_multipart_uploads: - file_id = SimcoreS3FileID(urllib.parse.unquote(s3_object_name)) - if ( - file_id in known_file_names - or get_simcore_directory(file_id) in known_directory_names - ): - list_of_valid_upload_ids.append(upload_id) - - _logger.debug("found the following %s", f"{list_of_valid_upload_ids=}") - if list_of_invalid_uploads := [ - ( - upload_id, - file_id, - ) - for upload_id, file_id in current_multipart_uploads - if upload_id not in list_of_valid_upload_ids - ]: - _logger.debug( - "the following %s was found and will now be aborted", - f"{list_of_invalid_uploads=}", - ) - await logged_gather( - *( - get_s3_client(self.app).abort_multipart_upload( - self.simcore_bucket_name, file_id, upload_id - ) - for upload_id, file_id in list_of_invalid_uploads - ), - max_concurrency=MAX_CONCURRENT_S3_TASKS, - ) - _logger.warning( - "Dangling multipart uploads '%s', were aborted. " - "TIP: There were multipart uploads active on S3 with no counter-part in the file_meta_data database. " - "This might indicate that something went wrong in how storage handles multipart uploads!!", - f"{list_of_invalid_uploads}", - ) - async def clean_expired_uploads(self) -> None: await self._clean_expired_uploads() - await self._clean_dangling_multipart_uploads() async def _update_database_from_storage( self, conn: SAConnection, fmd: FileMetaDataAtDB @@ -1171,7 +1071,7 @@ async def _create_fmd_for_upload( is_directory: bool, sha256_checksum: SHA256Str | None, ) -> FileMetaDataAtDB: - now = datetime.datetime.utcnow() + now = arrow.utcnow().datetime upload_expiration_date = now + datetime.timedelta( seconds=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS ) diff --git a/services/storage/tests/fixtures/data_models.py b/services/storage/tests/fixtures/data_models.py index 66690264ad5..9fb00685e84 100644 --- a/services/storage/tests/fixtures/data_models.py +++ b/services/storage/tests/fixtures/data_models.py @@ -188,7 +188,7 @@ async def random_project_with_files( upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], faker: Faker, ) -> Callable[ - [int, tuple[ByteSize, ...]], + [int, tuple[ByteSize, ...], tuple[SHA256Str, ...]], Awaitable[ tuple[ dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]] diff --git a/services/storage/tests/unit/test_dsm_dsmcleaner.py b/services/storage/tests/unit/test_dsm_dsmcleaner.py index e9d1220da77..36c6ae7342f 100644 --- a/services/storage/tests/unit/test_dsm_dsmcleaner.py +++ b/services/storage/tests/unit/test_dsm_dsmcleaner.py @@ -9,9 +9,11 @@ import asyncio import datetime import urllib.parse +from collections.abc import Awaitable, Callable from pathlib import Path -from typing import Awaitable, Callable, Final +from typing import Final +import arrow import pytest from aiopg.sa.engine import Engine from faker import Faker @@ -54,41 +56,6 @@ def simcore_directory_id(simcore_file_id: SimcoreS3FileID) -> SimcoreS3FileID: ) -async def test_clean_expired_uploads_aborts_dangling_multipart_uploads( - disabled_dsm_cleaner_task, - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - simcore_s3_dsm: SimcoreS3DataManager, -): - """in this test we create a purely dangling multipart upload with no correspongin - entry in file_metadata table - """ - file_id = _faker.file_name() - file_size = parse_obj_as(ByteSize, "100Mib") - upload_links = await storage_s3_client.create_multipart_upload_links( - storage_s3_bucket, - file_id, - file_size, - expiration_secs=3600, - sha256_checksum=parse_obj_as(SHA256Str, _faker.sha256()), - ) - - # ensure we have now an upload id - all_ongoing_uploads = await storage_s3_client.list_ongoing_multipart_uploads( - storage_s3_bucket - ) - assert len(all_ongoing_uploads) == 1 - ongoing_upload_id, ongoing_file_id = all_ongoing_uploads[0] - assert upload_links.upload_id == ongoing_upload_id - assert ongoing_file_id == file_id - - # now run the cleaner - await simcore_s3_dsm.clean_expired_uploads() - - # since there is no entry in the db, this upload shall be cleaned up - assert not await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) - - @pytest.mark.parametrize( "file_size", [ByteSize(0), parse_obj_as(ByteSize, "10Mib"), parse_obj_as(ByteSize, "100Mib")], @@ -227,7 +194,7 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( await conn.execute( file_meta_data.update() .where(file_meta_data.c.file_id == file_or_directory_id) - .values(upload_expires_at=datetime.datetime.utcnow()) + .values(upload_expires_at=arrow.utcnow().datetime) ) await asyncio.sleep(1) await simcore_s3_dsm.clean_expired_uploads() @@ -314,7 +281,7 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi await conn.execute( file_meta_data.update() .where(file_meta_data.c.file_id == file_id) - .values(upload_expires_at=datetime.datetime.utcnow()) + .values(upload_expires_at=arrow.utcnow().datetime) ) await asyncio.sleep(1) await simcore_s3_dsm.clean_expired_uploads() @@ -356,7 +323,7 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation the cleaner in between to ensure the cleaner does not break the mechanism""" file_or_directory_id = simcore_directory_id if is_directory else simcore_file_id - later_than_now = datetime.datetime.utcnow() + datetime.timedelta(minutes=5) + later_than_now = arrow.utcnow().datetime + datetime.timedelta(minutes=5) fmd = FileMetaData.from_simcore_node( user_id, file_or_directory_id, @@ -425,69 +392,3 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation ) assert len(all_ongoing_uploads_after_clean) == len(file_ids_to_upload) assert all_ongoing_uploads == all_ongoing_uploads_after_clean - - -@pytest.mark.parametrize( - "file_size", - [parse_obj_as(ByteSize, "100Mib")], - ids=byte_size_ids, -) -@pytest.mark.parametrize("checksum", [_faker.sha256(), None]) -async def test_clean_expired_uploads_cleans_dangling_multipart_uploads_if_no_corresponding_upload_found( - disabled_dsm_cleaner_task, - aiopg_engine: Engine, - simcore_s3_dsm: SimcoreS3DataManager, - simcore_file_id: SimcoreS3FileID, - user_id: UserID, - file_size: ByteSize, - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - checksum: SHA256Str | None, -): - """This test reproduces what create_file_upload_links in dsm does, but running - the cleaner in between to ensure the cleaner does not break the mechanism""" - later_than_now = datetime.datetime.utcnow() + datetime.timedelta(minutes=5) - fmd = FileMetaData.from_simcore_node( - user_id, - simcore_file_id, - storage_s3_bucket, - simcore_s3_dsm.location_id, - simcore_s3_dsm.location_name, - upload_expires_at=later_than_now, - sha256_checksum=checksum, - ) - # we create the entry in the db - async with aiopg_engine.acquire() as conn: - await db_file_meta_data.upsert(conn, fmd) - - # ensure the database is correctly set up - fmd_in_db = await db_file_meta_data.get(conn, simcore_file_id) - assert fmd_in_db - assert fmd_in_db.upload_expires_at - # we create the multipart upload link - upload_links = await storage_s3_client.create_multipart_upload_links( - storage_s3_bucket, - simcore_file_id, - file_size, - expiration_secs=3600, - sha256_checksum=parse_obj_as(SHA256Str, _faker.sha256()), - ) - - # ensure we have now an upload id - all_ongoing_uploads = await storage_s3_client.list_ongoing_multipart_uploads( - storage_s3_bucket - ) - assert len(all_ongoing_uploads) == 1 - ongoing_upload_id, ongoing_file_id = all_ongoing_uploads[0] - assert upload_links.upload_id == ongoing_upload_id - assert urllib.parse.unquote(ongoing_file_id) == simcore_file_id - - # now cleanup, we do not have an explicit upload_id in the database yet - await simcore_s3_dsm.clean_expired_uploads() - - # ensure we STILL have the same upload id - all_ongoing_uploads_after_clean = ( - await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) - ) - assert len(all_ongoing_uploads_after_clean) == 1 - assert all_ongoing_uploads == all_ongoing_uploads_after_clean diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index ee3396f97da..348e214d5b5 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -6,9 +6,10 @@ # pylint:disable=too-many-nested-blocks import sys +from collections.abc import Awaitable, Callable from copy import deepcopy from pathlib import Path -from typing import Any, Awaitable, Callable, Literal +from typing import Any, Literal import pytest import sqlalchemy as sa @@ -24,7 +25,6 @@ from models_library.utils.change_case import camel_to_snake from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, parse_file_as, parse_obj_as -from pytest_mock import MockerFixture from pytest_simcore.helpers.utils_assert import assert_status from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.client import long_running_task_request @@ -192,7 +192,7 @@ async def test_copy_folders_from_valid_project_with_one_large_file( create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], aiopg_engine: Engine, random_project_with_files: Callable[ - ..., + [int, tuple[ByteSize], tuple[SHA256Str]], Awaitable[ tuple[ dict[str, Any], @@ -206,9 +206,9 @@ async def test_copy_folders_from_valid_project_with_one_large_file( SHA256Str, "0b3216d95ec5a36c120ba16c88911dcf5ff655925d0fbdbc74cf95baf86de6fc" ) src_project, src_projects_list = await random_project_with_files( - num_nodes=1, - file_sizes=tuple([parse_obj_as(ByteSize, "210Mib")]), - file_checksums=tuple([sha256_checksum]), + 1, + (parse_obj_as(ByteSize, "210Mib"),), + (sha256_checksum,), ) # 2. create a dst project without files dst_project, nodes_map = clone_project_data(src_project) @@ -381,16 +381,6 @@ async def _create_and_delete_folders_from_project( assert not data -@pytest.fixture -def mock_check_project_exists(mocker: MockerFixture): - # NOTE: this avoid having to inject project in database - mock = mocker.patch( - "simcore_service_storage.dsm._check_project_exists", - autospec=True, - return_value=None, - ) - - @pytest.mark.parametrize( "project", [pytest.param(prj, id=prj.name) for prj in _get_project_with_data()], From 68314e477c8924c8b00f1713714610dc424f9f9b Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Thu, 20 Jun 2024 19:16:53 +0200 Subject: [PATCH 055/219] =?UTF-8?q?=E2=9C=A8=20Auto=20inject=20osparc=20en?= =?UTF-8?q?vironments=20to=20dynamic=20services=20(#5966)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../models_library/service_settings_labels.py | 8 +- .../dynamic_sidecar/docker_compose_specs.py | 4 + .../modules/osparc_variables/substitutions.py | 34 ++++- services/director-v2/tests/conftest.py | 33 +++++ .../02/test_dynamic_services_routes.py | 3 + ...t_dynamic_sidecar_nodeports_integration.py | 1 + ...ixed_dynamic_sidecar_and_legacy_project.py | 1 + .../unit/test_modules_osparc_variables.py | 120 +++++++++++++----- 8 files changed, 166 insertions(+), 38 deletions(-) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index b919f6db8e9..adeedfdec7e 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -395,7 +395,7 @@ def _containers_allowed_outgoing_internet_in_compose_spec(cls, v, values): @validator("callbacks_mapping") @classmethod - def ensure_callbacks_mapping_container_names_defined_in_compose_spec( + def _ensure_callbacks_mapping_container_names_defined_in_compose_spec( cls, v: CallbacksMapping, values ): if v is None: @@ -423,12 +423,12 @@ def ensure_callbacks_mapping_container_names_defined_in_compose_spec( @validator("user_preferences_path", pre=True) @classmethod - def deserialize_from_json(cls, v): + def _deserialize_from_json(cls, v): return f"{v}".removeprefix('"').removesuffix('"') @validator("user_preferences_path") @classmethod - def user_preferences_path_no_included_in_other_volumes( + def _user_preferences_path_no_included_in_other_volumes( cls, v: CallbacksMapping, values ): paths_mapping: PathMappingsLabel | None = values.get("paths_mapping", None) @@ -447,7 +447,7 @@ def user_preferences_path_no_included_in_other_volumes( @root_validator @classmethod - def not_allowed_in_both_specs(cls, values): + def _not_allowed_in_both_specs(cls, values): match_keys = { "containers_allowed_outgoing_internet", "containers_allowed_outgoing_permit_list", diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py index b0a8dfc04ab..e35400e2a28 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py @@ -28,6 +28,7 @@ from ...core.dynamic_services_settings.egress_proxy import EgressProxySettings from ..osparc_variables.substitutions import ( + auto_inject_environments, resolve_and_substitute_session_variables_in_model, resolve_and_substitute_session_variables_in_specs, substitute_vendor_secrets_in_model, @@ -364,6 +365,9 @@ async def assemble_spec( # pylint: disable=too-many-arguments # noqa: PLR0913 assigned_limits=assigned_limits, ) + # resolve service-spec + service_spec = auto_inject_environments(service_spec) + service_spec = await substitute_vendor_secrets_in_specs( app=app, specs=service_spec, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/substitutions.py b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/substitutions.py index 0b3f87d46ea..c78ac5e04a2 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/substitutions.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/substitutions.py @@ -4,7 +4,7 @@ import functools import logging from copy import deepcopy -from typing import Any +from typing import Any, Final from fastapi import FastAPI from models_library.osparc_variable_identifier import ( @@ -15,6 +15,7 @@ from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from models_library.service_settings_labels import ComposeSpecLabelDict from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID from models_library.utils.specs_substitution import SpecsSubstitutionsResolver @@ -137,6 +138,37 @@ def create(cls, app: FastAPI): return table +_NEW_ENVIRONMENTS: Final = { + "OSPARC_API_BASE_URL": "$OSPARC_VARIABLE_API_HOST", + "OSPARC_API_KEY": "$OSPARC_VARIABLE_API_KEY", + "OSPARC_API_SECRET": "$OSPARC_VARIABLE_API_SECRET", + "OSPARC_STUDY_ID": "$OSPARC_VARIABLE_STUDY_UUID", + "OSPARC_NODE_ID": "$OSPARC_VARIABLE_NODE_ID", +} + + +def auto_inject_environments( + compose_spec: ComposeSpecLabelDict, +) -> ComposeSpecLabelDict: + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/5925 + for service in compose_spec.get("services", {}).values(): + current_environment = deepcopy(service.get("environment", {})) + + # if _NEW_ENVIRONMENTS are already defined, then do not change them + if isinstance(current_environment, dict): + service["environment"] = { + **_NEW_ENVIRONMENTS, + **current_environment, + } + elif isinstance(current_environment, list): + service["environment"] += [ + f"{name}={value}" + for name, value in _NEW_ENVIRONMENTS.items() + if not any(e.startswith(name) for e in current_environment) + ] + return compose_spec + + async def resolve_and_substitute_session_variables_in_model( app: FastAPI, model: BaseModel, diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index 27f488957dc..3ea9811f961 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -9,6 +9,7 @@ import os from collections.abc import AsyncIterable, AsyncIterator from copy import deepcopy +from datetime import timedelta from pathlib import Path from typing import Any from unittest.mock import AsyncMock @@ -19,7 +20,10 @@ from asgi_lifespan import LifespanManager from faker import Faker from fastapi import FastAPI +from models_library.api_schemas_webserver.auth import ApiKeyGet +from models_library.products import ProductName from models_library.projects import Node, NodesDict +from models_library.users import UserID from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.utils_envs import setenvs_from_dict, setenvs_from_envfile @@ -323,3 +327,32 @@ async def wrapper(*args, **kwargs): "simcore_service_director_v2.modules.dynamic_sidecar.scheduler._core._scheduler" ) mocker.patch(f"{module_base}.exclusive", side_effect=_mock_exclusive) + + +@pytest.fixture +def mock_osparc_variables_api_auth_rpc(mocker: MockerFixture) -> None: + + fake_data = ApiKeyGet.parse_obj(ApiKeyGet.Config.schema_extra["examples"][0]) + + async def _create( + app: FastAPI, + *, + product_name: ProductName, + user_id: UserID, + name: str, + expiration: timedelta, + ): + assert app + assert product_name + assert user_id + assert expiration is None + + fake_data.display_name = name + return fake_data + + # mocks RPC interface + mocker.patch( + "simcore_service_director_v2.modules.osparc_variables._api_auth.get_or_create_api_key_and_secret", + side_effect=_create, + autospec=True, + ) diff --git a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py index 9c4f740aaf5..0bdfb73b5c8 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py +++ b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py @@ -1,5 +1,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments import asyncio import json @@ -302,6 +304,7 @@ async def test_start_status_stop( mock_projects_repository: None, mocked_service_awaits_manual_interventions: None, mock_resource_usage_tracker: None, + mock_osparc_variables_api_auth_rpc: None, ): # NOTE: this test does not like it when the catalog is not fully ready!!! diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index a86aeb2ff2e..0d187de43d2 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -893,6 +893,7 @@ async def test_nodeports_integration( projects_networks_db: None, mocked_service_awaits_manual_interventions: None, mock_resource_usage_tracker: None, + mock_osparc_variables_api_auth_rpc: None, initialized_app: FastAPI, update_project_workbench_with_comp_tasks: Callable, async_client: httpx.AsyncClient, diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index 7afa5eb5aad..759e9fd620e 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -227,6 +227,7 @@ async def test_legacy_and_dynamic_sidecar_run( service_resources: ServiceResourcesDict, mocked_service_awaits_manual_interventions: None, mock_resource_usage_tracker: None, + mock_osparc_variables_api_auth_rpc: None, ): """ The test will start 3 dynamic services in the same project and check diff --git a/services/director-v2/tests/unit/test_modules_osparc_variables.py b/services/director-v2/tests/unit/test_modules_osparc_variables.py index 9ed3187a25a..9ed659f00ad 100644 --- a/services/director-v2/tests/unit/test_modules_osparc_variables.py +++ b/services/director-v2/tests/unit/test_modules_osparc_variables.py @@ -9,15 +9,14 @@ import json from collections.abc import AsyncIterable from contextlib import asynccontextmanager -from datetime import timedelta +from copy import deepcopy from unittest.mock import AsyncMock, Mock import pytest from asgi_lifespan import LifespanManager from faker import Faker from fastapi import FastAPI -from models_library.api_schemas_webserver.auth import ApiKeyGet -from models_library.products import ProductName +from models_library.service_settings_labels import ComposeSpecLabelDict from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID from models_library.utils.specs_substitution import SubstitutionValue @@ -30,6 +29,9 @@ from simcore_service_director_v2.api.dependencies.database import RepoType from simcore_service_director_v2.modules.osparc_variables import substitutions from simcore_service_director_v2.modules.osparc_variables.substitutions import ( + _NEW_ENVIRONMENTS, + OsparcSessionVariablesTable, + auto_inject_environments, resolve_and_substitute_session_variables_in_specs, substitute_vendor_secrets_in_specs, ) @@ -138,35 +140,6 @@ def mock_user_repo(mocker: MockerFixture, mock_repo_db_engine: None) -> None: mocker.patch(f"{base}.UsersRepo.get_email", return_value="e@ma.il") -@pytest.fixture -def mock_api_key_manager(mocker: MockerFixture) -> None: - - fake_data = ApiKeyGet.parse_obj(ApiKeyGet.Config.schema_extra["examples"][0]) - - async def _create( - app: FastAPI, - *, - product_name: ProductName, - user_id: UserID, - name: str, - expiration: timedelta, - ): - assert app - assert product_name - assert user_id - assert expiration is None - - fake_data.display_name = name - return fake_data - - # mocks RPC interface - mocker.patch( - "simcore_service_director_v2.modules.osparc_variables._api_auth.get_or_create_api_key_and_secret", - side_effect=_create, - autospec=True, - ) - - @pytest.fixture async def fake_app(faker: Faker) -> AsyncIterable[FastAPI]: app = FastAPI() @@ -183,7 +156,10 @@ async def fake_app(faker: Faker) -> AsyncIterable[FastAPI]: async def test_resolve_and_substitute_session_variables_in_specs( - mock_user_repo: None, mock_api_key_manager: None, fake_app: FastAPI, faker: Faker + mock_user_repo: None, + mock_osparc_variables_api_auth_rpc: None, + fake_app: FastAPI, + faker: Faker, ): specs = { "product_name": "${OSPARC_VARIABLE_PRODUCT_NAME}", @@ -241,3 +217,81 @@ async def test_substitute_vendor_secrets_in_specs( print("REPLACED SPECS\n", replaced_specs) assert VENDOR_SECRET_PREFIX not in f"{replaced_specs}" + + +@pytest.fixture +def compose_spec(): + return { + "version": "3.7", + "services": { + "jupyter-math": { + "environment": [ + "OSPARC_API_KEY=$OSPARC_VARIABLE_API_KEY", + "OSPARC_API_SECRET=$OSPARC_VARIABLE_API_SECRET", + "FOO=33", + ], + "image": "${SIMCORE_REGISTRY}/simcore/services/dynamic/jupyter-math:${SERVICE_VERSION}", + "networks": {"dy-sidecar_10e1b317-de62-44ca-979e-09bf15663834": None}, + "deploy": { + "resources": { + "reservations": {"cpus": "0.1", "memory": "2147483648"}, + "limits": {"cpus": "4.0", "memory": "17179869184"}, + } + }, + "labels": [ + "io.simcore.runtime.cpu-limit=4.0", + "io.simcore.runtime.memory-limit=17179869184", + "io.simcore.runtime.node-id=10e1b317-de62-44ca-979e-09bf15663834", + "io.simcore.runtime.product-name=osparc", + "io.simcore.runtime.project-id=e341df9e-2e38-11ef-894b-0242ac140025", + "io.simcore.runtime.simcore-user-agent=undefined", + "io.simcore.runtime.swarm-stack-name=master-simcore", + "io.simcore.runtime.user-id=1", + ], + } + }, + "networks": { + "dy-sidecar_10e1b317-de62-44ca-979e-09bf15663834": { + "name": "dy-sidecar_10e1b317-de62-44ca-979e-09bf15663834", + "external": True, + "driver": "overlay", + }, + "master-simcore_interactive_services_subnet": { + "name": "master-simcore_interactive_services_subnet", + "external": True, + "driver": "overlay", + }, + }, + } + + +def test_auto_inject_environments_added_to_all_services_in_compose( + compose_spec: ComposeSpecLabelDict, +): + + before = deepcopy(compose_spec) + + after = auto_inject_environments(compose_spec) + + assert before != after + assert after == compose_spec + + auto_injected_envs = set(_NEW_ENVIRONMENTS.keys()) + for name, service in compose_spec.get("services", {}).items(): + + # all services have environment specs + assert service["environment"], f"expected in {name} service" + + # injected? + for env_name in auto_injected_envs: + assert env_name in str(service["environment"]) + + +def test_auto_inject_environments_are_registered(): + app = FastAPI() + table = OsparcSessionVariablesTable.create(app) + + registered_osparc_variables = set(table.variables_names()) + auto_injected_osparc_variables = {_.lstrip("$") for _ in _NEW_ENVIRONMENTS.values()} + + assert auto_injected_osparc_variables.issubset(registered_osparc_variables) From 68901097ee7fd3530b1569decaae1c44f498eace Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 21 Jun 2024 13:36:59 +0200 Subject: [PATCH 056/219] =?UTF-8?q?=F0=9F=8E=A8=20Drafts=20new=20web-api?= =?UTF-8?q?=20for=20catalog=20services=20(#5969)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/specs/web-server/_catalog.py | 29 ++ .../api_schemas_catalog/services.py | 2 +- .../api_schemas_webserver/catalog.py | 53 ++- .../src/models_library/services.py | 195 +--------- .../src/models_library/services_db.py | 2 +- .../src/models_library/services_history.py | 45 +++ ...adata.py => services_metadata_editable.py} | 0 .../services_metadata_published.py | 182 +++++++++ packages/models-library/tests/conftest.py | 8 + .../models-library/tests/test_services.py | 3 +- .../models-library/tests/test_services_io.py | 6 +- .../tests/test_utils_labels_annotations.py} | 4 +- .../source/class/osparc/info/ServiceLarge.js | 2 +- .../api/v0/openapi.yaml | 356 ++++++++++++++++++ .../catalog/_handlers.py | 85 ++++- ...=> test_catalog_handlers__pricing_plan.py} | 0 .../01/test_catalog_handlers__services.py | 102 +++++ ...t_catalog_handlers__services_resources.py} | 0 18 files changed, 876 insertions(+), 198 deletions(-) create mode 100644 packages/models-library/src/models_library/services_history.py rename packages/models-library/src/models_library/{services_metadata.py => services_metadata_editable.py} (100%) create mode 100644 packages/models-library/src/models_library/services_metadata_published.py rename packages/{service-integration/tests/test_labels_annotations.py => models-library/tests/test_utils_labels_annotations.py} (91%) rename services/web/server/tests/unit/with_dbs/01/{test_catalog_api__pricing_plan.py => test_catalog_handlers__pricing_plan.py} (100%) create mode 100644 services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py rename services/web/server/tests/unit/with_dbs/01/{test_catalog_api.py => test_catalog_handlers__services_resources.py} (100%) diff --git a/api/specs/web-server/_catalog.py b/api/specs/web-server/_catalog.py index 6432a76a1d2..50489394ae7 100644 --- a/api/specs/web-server/_catalog.py +++ b/api/specs/web-server/_catalog.py @@ -3,6 +3,7 @@ from fastapi import APIRouter, Depends from models_library.api_schemas_api_server.pricing_plans import ServicePricingPlanGet from models_library.api_schemas_webserver.catalog import ( + DEVServiceGet, ServiceGet, ServiceInputGet, ServiceInputKey, @@ -14,6 +15,7 @@ from models_library.generics import Envelope from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.catalog._handlers import ( + ListServiceParams, ServicePathParams, _FromServiceOutputParams, _ServiceInputsPathParams, @@ -34,6 +36,33 @@ # +@router.get( + "/dev/catalog/services/-/latest", + response_model=Envelope[list[DEVServiceGet]], +) +def dev_list_services_latest(_query_params: Annotated[ListServiceParams, Depends()]): + pass + + +@router.get( + "/dev/catalog/services/{service_key}/{service_version}", + response_model=Envelope[DEVServiceGet], +) +def dev_get_service(_path_params: Annotated[ServicePathParams, Depends()]): + ... + + +@router.patch( + "/dev/catalog/services/{service_key}/{service_version}", + response_model=Envelope[DEVServiceGet], +) +def dev_update_service( + _path_params: Annotated[ServicePathParams, Depends()], + _update: ServiceUpdate, +): + ... + + @router.get( "/catalog/services", response_model=Envelope[list[ServiceGet]], diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services.py b/packages/models-library/src/models_library/api_schemas_catalog/services.py index 1d238e9d95c..55493c7223c 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/services.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/services.py @@ -5,7 +5,7 @@ from ..emails import LowerCaseEmailStr from ..services import ServiceMetaDataPublished from ..services_access import ServiceAccessRights -from ..services_metadata import ServiceMetaDataEditable +from ..services_metadata_editable import ServiceMetaDataEditable from ..services_resources import ServiceResourcesDict diff --git a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py index 84a89ee5c91..929165329c2 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py @@ -5,7 +5,9 @@ from pydantic.main import BaseModel from ..api_schemas_catalog import services as api_schemas_catalog_services -from ..services import ServiceInput, ServiceOutput, ServicePortKey +from ..services_history import ServiceRelease +from ..services_io import ServiceInput, ServiceOutput +from ..services_types import ServicePortKey from ..utils.change_case import snake_to_camel from ..utils.json_serialization import json_dumps, json_loads from ._base import InputSchema, OutputSchema @@ -133,3 +135,52 @@ class Config(InputSchema.Config): class ServiceResourcesGet(api_schemas_catalog_services.ServiceResourcesGet): class Config(OutputSchema.Config): ... + + +class DEVServiceGet(ServiceGet): + # pylint: disable=too-many-ancestors + + history: list[ServiceRelease] = Field( + default=[], + description="history of releases for this service at this point in time, starting from the newest to the oldest." + " It includes current release.", + ) + + class Config(OutputSchema.Config): + schema_extra: ClassVar[dict[str, Any]] = { + "example": { + **_EXAMPLE, # 1.0.0 + "history": [ + { + "version": "1.0.5", + "version_display": "Summer Release", + "release_date": "2024-07-20T15:00:00", + }, + { + "version": _EXAMPLE["version"], + "compatibility": { + "can_update_to": "1.0.5", + }, + }, + {"version": "0.9.11"}, + {"version": "0.9.10"}, + { + "version": "0.9.8", + "compatibility": { + "can_update_to": "0.9.10", + }, + }, + { + "version": "0.9.1", + "version_display": "Matterhorn", + "release_date": "2024-01-20T18:49:17", + "compatibility": { + "can_update_to": "0.9.10", + }, + }, + {"version": "0.9.0"}, + {"version": "0.8.0"}, + {"version": "0.1.0"}, + ], + } + } diff --git a/packages/models-library/src/models_library/services.py b/packages/models-library/src/models_library/services.py index 7b580de6555..23874571f96 100644 --- a/packages/models-library/src/models_library/services.py +++ b/packages/models-library/src/models_library/services.py @@ -1,16 +1,10 @@ -from datetime import datetime -from typing import Any, ClassVar, Final, TypeAlias - -from pydantic import Extra, Field, NonNegativeInt - -from .basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS from .boot_options import BootOption, BootOptions -from .emails import LowerCaseEmailStr from .services_authoring import Author, Badge -from .services_base import ServiceBase, ServiceKeyVersion -from .services_constants import ANY_FILETYPE, LATEST_INTEGRATION_VERSION +from .services_base import ServiceKeyVersion +from .services_constants import LATEST_INTEGRATION_VERSION from .services_enums import ServiceType from .services_io import BaseServiceIOModel, ServiceInput, ServiceOutput +from .services_metadata_published import ServiceInputsDict, ServiceMetaDataPublished from .services_types import ( DynamicServiceKey, RunID, @@ -19,196 +13,23 @@ ServiceVersion, ) -# NOTE: these asserts are here to avoid pre-commit to prune these imports -assert BaseServiceIOModel # nsoec -assert DynamicServiceKey # nosec -assert LATEST_INTEGRATION_VERSION # nosec -assert RunID # nosec -assert ServiceKey # nosec -assert ServiceType # nosec -assert ServiceVersion # nosec - __all__: tuple[str, ...] = ( "Author", "Badge", "BaseServiceIOModel", + "BootOption", "BootOptions", "DynamicServiceKey", "LATEST_INTEGRATION_VERSION", "RunID", "ServiceInput", + "ServiceInputsDict", "ServiceKey", + "ServiceKeyVersion", + "ServiceMetaDataPublished", "ServiceOutput", "ServicePortKey", "ServiceType", "ServiceVersion", ) - - -ServiceInputsDict: TypeAlias = dict[ServicePortKey, ServiceInput] -ServiceOutputsDict: TypeAlias = dict[ServicePortKey, ServiceOutput] - - -_EXAMPLE: Final = { - "name": "oSparc Python Runner", - "key": "simcore/services/comp/osparc-python-runner", - "type": "computational", - "integration-version": "1.0.0", - "progress_regexp": "^(?:\\[?PROGRESS\\]?:?)?\\s*(?P[0-1]?\\.\\d+|\\d+\\s*(?P%))", - "version": "1.7.0", - "description": "oSparc Python Runner", - "contact": "smith@company.com", - "authors": [ - { - "name": "John Smith", - "email": "smith@company.com", - "affiliation": "Company", - }, - { - "name": "Richard Brown", - "email": "brown@uni.edu", - "affiliation": "University", - }, - ], - "inputs": { - "input_1": { - "displayOrder": 1, - "label": "Input data", - "description": "Any code, requirements or data file", - "type": ANY_FILETYPE, - } - }, - "outputs": { - "output_1": { - "displayOrder": 1, - "label": "Output data", - "description": "All data produced by the script is zipped as output_data.zip", - "type": ANY_FILETYPE, - "fileToKeyMap": {"output_data.zip": "output_1"}, - } - }, -} - -_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER = { - **_EXAMPLE, - "description": "oSparc Python Runner with boot options", - "inputs": { - "input_1": { - "label": "Input data", - "description": "Any code, requirements or data file", - "type": ANY_FILETYPE, - } - }, - "outputs": { - "output_1": { - "label": "Output data", - "description": "All data produced by the script is zipped as output_data.zip", - "type": ANY_FILETYPE, - "fileToKeyMap": {"output_data.zip": "output_1"}, - } - }, - "boot-options": { - "example_service_defined_boot_mode": BootOption.Config.schema_extra["examples"][ - 0 - ], - "example_service_defined_theme_selection": BootOption.Config.schema_extra[ - "examples" - ][1], - }, - "min-visible-inputs": 2, -} - - -class ServiceMetaDataPublished(ServiceKeyVersion, ServiceBase): - """ - Service metadata at publication time - - - read-only (can only be changed overwriting the image labels in the registry) - - base metaddata - - injected in the image labels - - NOTE: This model is serialized in .osparc/metadata.yml and in the labels of the docker image - """ - - version_display: str | None = Field( - None, - description="A user-friendly or marketing name for the release." - " This can be used to reference the release in a more readable and recognizable format, such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.'" - " This name is not used for version comparison but is useful for communication and documentation purposes.", - ) - - release_date: datetime | None = Field( - None, - description="A timestamp when the specific version of the service was released." - " This field helps in tracking the timeline of releases and understanding the sequence of updates." - " A timestamp string should be formatted as YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z or [±]HH[:]MM]", - ) - - integration_version: str | None = Field( - None, - alias="integration-version", - description="This version is used to maintain backward compatibility when there are changes in the way a service is integrated into the framework", - regex=SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS, - ) - - service_type: ServiceType = Field( - ..., - alias="type", - description="service type", - examples=["computational"], - ) - - badges: list[Badge] | None = Field(None) - - authors: list[Author] = Field(..., min_items=1) - contact: LowerCaseEmailStr = Field( - ..., - description="email to correspond to the authors about the node", - examples=["lab@net.flix"], - ) - inputs: ServiceInputsDict | None = Field( - ..., description="definition of the inputs of this node" - ) - outputs: ServiceOutputsDict | None = Field( - ..., description="definition of the outputs of this node" - ) - - boot_options: BootOptions | None = Field( - None, - alias="boot-options", - description="Service defined boot options. These get injected in the service as env variables.", - ) - - min_visible_inputs: NonNegativeInt | None = Field( - None, - alias="min-visible-inputs", - description=( - "The number of 'data type inputs' displayed by default in the UI. " - "When None all 'data type inputs' are displayed." - ), - ) - - progress_regexp: str | None = Field( - None, - alias="progress_regexp", - description="regexp pattern for detecting computational service's progress", - ) - - class Config: - description = "Description of a simcore node 'class' with input and output" - extra = Extra.forbid - frozen = False # overrides config from ServiceKeyVersion. - allow_population_by_field_name = True - - schema_extra: ClassVar[dict[str, Any]] = { - "examples": [ - _EXAMPLE, - _EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, - # latest - { - **_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, - "version_display": "Matterhorn Release", - "release_date": "2024-05-31T13:45:30", - }, - ] - } +# nopycln: file diff --git a/packages/models-library/src/models_library/services_db.py b/packages/models-library/src/models_library/services_db.py index e7e51fe8955..95531f0c112 100644 --- a/packages/models-library/src/models_library/services_db.py +++ b/packages/models-library/src/models_library/services_db.py @@ -11,7 +11,7 @@ from .services_access import ServiceGroupAccessRights from .services_base import ServiceKeyVersion -from .services_metadata import ServiceMetaDataEditable +from .services_metadata_editable import ServiceMetaDataEditable # ------------------------------------------------------------------- # Databases models diff --git a/packages/models-library/src/models_library/services_history.py b/packages/models-library/src/models_library/services_history.py new file mode 100644 index 00000000000..c12c27ff3a6 --- /dev/null +++ b/packages/models-library/src/models_library/services_history.py @@ -0,0 +1,45 @@ +from datetime import datetime +from typing import Any, ClassVar, TypeAlias + +from pydantic import BaseModel, Field + +from .services_types import ServiceVersion + + +class Compatibility(BaseModel): + # NOTE: as an object it is more maintainable than a list + can_update_to: ServiceVersion = Field( + ..., + description="Latest compatible version at this moment." + "Current service can update to this version and still work", + ) + + +class ServiceRelease(BaseModel): + # from ServiceMetaDataPublished + version: ServiceVersion + version_display: str | None = Field(default=None) + release_date: datetime | None = Field(default=None) + + # computed compatibility + compatibility: Compatibility | None = Field(default=None) + + class Config: + schema_extra: ClassVar[dict[str, Any]] = { + "examples": [ + # minimal + { + "version": "0.9.0", + }, + # complete + { + "version": "0.9.1", + "version_display": "Matterhorn", + "release_date": "2024-06-20T18:49:17", + "compatibility": {"can_update_to": "0.9.10"}, + }, + ] + } + + +ReleaseHistory: TypeAlias = list[ServiceRelease] diff --git a/packages/models-library/src/models_library/services_metadata.py b/packages/models-library/src/models_library/services_metadata_editable.py similarity index 100% rename from packages/models-library/src/models_library/services_metadata.py rename to packages/models-library/src/models_library/services_metadata_editable.py diff --git a/packages/models-library/src/models_library/services_metadata_published.py b/packages/models-library/src/models_library/services_metadata_published.py new file mode 100644 index 00000000000..4809f865e06 --- /dev/null +++ b/packages/models-library/src/models_library/services_metadata_published.py @@ -0,0 +1,182 @@ +from datetime import datetime +from typing import Any, ClassVar, Final, TypeAlias + +from pydantic import Extra, Field, NonNegativeInt + +from .basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS +from .boot_options import BootOption, BootOptions +from .emails import LowerCaseEmailStr +from .services_authoring import Author, Badge +from .services_base import ServiceBase, ServiceKeyVersion +from .services_constants import ANY_FILETYPE +from .services_enums import ServiceType +from .services_io import ServiceInput, ServiceOutput +from .services_types import ServicePortKey + +ServiceInputsDict: TypeAlias = dict[ServicePortKey, ServiceInput] +ServiceOutputsDict: TypeAlias = dict[ServicePortKey, ServiceOutput] + + +_EXAMPLE: Final = { + "name": "oSparc Python Runner", + "key": "simcore/services/comp/osparc-python-runner", + "type": "computational", + "integration-version": "1.0.0", + "progress_regexp": "^(?:\\[?PROGRESS\\]?:?)?\\s*(?P[0-1]?\\.\\d+|\\d+\\s*(?P%))", + "version": "1.7.0", + "description": "oSparc Python Runner", + "contact": "smith@company.com", + "authors": [ + { + "name": "John Smith", + "email": "smith@company.com", + "affiliation": "Company", + }, + { + "name": "Richard Brown", + "email": "brown@uni.edu", + "affiliation": "University", + }, + ], + "inputs": { + "input_1": { + "displayOrder": 1, + "label": "Input data", + "description": "Any code, requirements or data file", + "type": ANY_FILETYPE, + } + }, + "outputs": { + "output_1": { + "displayOrder": 1, + "label": "Output data", + "description": "All data produced by the script is zipped as output_data.zip", + "type": ANY_FILETYPE, + "fileToKeyMap": {"output_data.zip": "output_1"}, + } + }, +} + +_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER = { + **_EXAMPLE, + "description": "oSparc Python Runner with boot options", + "inputs": { + "input_1": { + "label": "Input data", + "description": "Any code, requirements or data file", + "type": ANY_FILETYPE, + } + }, + "outputs": { + "output_1": { + "label": "Output data", + "description": "All data produced by the script is zipped as output_data.zip", + "type": ANY_FILETYPE, + "fileToKeyMap": {"output_data.zip": "output_1"}, + } + }, + "boot-options": { + "example_service_defined_boot_mode": BootOption.Config.schema_extra["examples"][ + 0 + ], + "example_service_defined_theme_selection": BootOption.Config.schema_extra[ + "examples" + ][1], + }, + "min-visible-inputs": 2, +} + + +class ServiceMetaDataPublished(ServiceKeyVersion, ServiceBase): + """ + Service metadata at publication time + + - read-only (can only be changed overwriting the image labels in the registry) + - base metaddata + - injected in the image labels + + NOTE: This model is serialized in .osparc/metadata.yml and in the labels of the docker image + """ + + version_display: str | None = Field( + None, + description="A user-friendly or marketing name for the release." + " This can be used to reference the release in a more readable and recognizable format, such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.'" + " This name is not used for version comparison but is useful for communication and documentation purposes.", + ) + + release_date: datetime | None = Field( + None, + description="A timestamp when the specific version of the service was released." + " This field helps in tracking the timeline of releases and understanding the sequence of updates." + " A timestamp string should be formatted as YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z or [±]HH[:]MM]", + ) + + integration_version: str | None = Field( + None, + alias="integration-version", + description="This version is used to maintain backward compatibility when there are changes in the way a service is integrated into the framework", + regex=SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS, + ) + + service_type: ServiceType = Field( + ..., + alias="type", + description="service type", + examples=["computational"], + ) + + badges: list[Badge] | None = Field(None) + + authors: list[Author] = Field(..., min_items=1) + contact: LowerCaseEmailStr = Field( + ..., + description="email to correspond to the authors about the node", + examples=["lab@net.flix"], + ) + inputs: ServiceInputsDict | None = Field( + ..., description="definition of the inputs of this node" + ) + outputs: ServiceOutputsDict | None = Field( + ..., description="definition of the outputs of this node" + ) + + boot_options: BootOptions | None = Field( + None, + alias="boot-options", + description="Service defined boot options. These get injected in the service as env variables.", + ) + + min_visible_inputs: NonNegativeInt | None = Field( + None, + alias="min-visible-inputs", + description=( + "The number of 'data type inputs' displayed by default in the UI. " + "When None all 'data type inputs' are displayed." + ), + ) + + progress_regexp: str | None = Field( + None, + alias="progress_regexp", + description="regexp pattern for detecting computational service's progress", + ) + + class Config: + description = "Description of a simcore node 'class' with input and output" + extra = Extra.forbid + frozen = False # overrides config from ServiceKeyVersion. + allow_population_by_field_name = True + + schema_extra: ClassVar[dict[str, Any]] = { + "examples": [ + _EXAMPLE, + _EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, + # latest + { + **_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, + "version_display": "Matterhorn Release", + "release_date": "2024-05-31T13:45:30", + }, + ] + } diff --git a/packages/models-library/tests/conftest.py b/packages/models-library/tests/conftest.py index 7044dae9ad1..9169e570b51 100644 --- a/packages/models-library/tests/conftest.py +++ b/packages/models-library/tests/conftest.py @@ -31,3 +31,11 @@ def project_slug_dir() -> Path: assert folder.exists() assert any(folder.glob("src/models_library")) return folder + + +@pytest.fixture +def tests_data_dir(project_tests_dir: Path) -> Path: + path = project_tests_dir / "data" + assert path.exists() + assert path.is_dir() + return path diff --git a/packages/models-library/tests/test_services.py b/packages/models-library/tests/test_services.py index 8391159454b..1d6e78b2d65 100644 --- a/packages/models-library/tests/test_services.py +++ b/packages/models-library/tests/test_services.py @@ -10,7 +10,8 @@ import pytest from models_library.basic_regex import VERSION_RE -from models_library.services import BootOption, ServiceBase, ServiceMetaDataPublished +from models_library.services import BootOption, ServiceMetaDataPublished +from models_library.services_base import ServiceBase from models_library.services_regex import ( COMPUTATIONAL_SERVICE_KEY_FORMAT, DYNAMIC_SERVICE_KEY_FORMAT, diff --git a/packages/models-library/tests/test_services_io.py b/packages/models-library/tests/test_services_io.py index b0db489a20d..acfb02a05b1 100644 --- a/packages/models-library/tests/test_services_io.py +++ b/packages/models-library/tests/test_services_io.py @@ -9,12 +9,10 @@ from pint import Unit, UnitRegistry -def test_service_port_units(project_tests_dir: Path): +def test_service_port_units(tests_data_dir: Path): ureg = UnitRegistry() - data = yaml.safe_load( - (project_tests_dir / "data" / "metadata-sleeper-2.0.2.yaml").read_text() - ) + data = yaml.safe_load((tests_data_dir / "metadata-sleeper-2.0.2.yaml").read_text()) print(ServiceMetaDataPublished.schema_json(indent=2)) service_meta = ServiceMetaDataPublished.parse_obj(data) diff --git a/packages/service-integration/tests/test_labels_annotations.py b/packages/models-library/tests/test_utils_labels_annotations.py similarity index 91% rename from packages/service-integration/tests/test_labels_annotations.py rename to packages/models-library/tests/test_utils_labels_annotations.py index f92b9e75c0e..68509a43a37 100644 --- a/packages/service-integration/tests/test_labels_annotations.py +++ b/packages/models-library/tests/test_utils_labels_annotations.py @@ -12,7 +12,9 @@ @pytest.fixture def metadata_config(tests_data_dir: Path): - config = yaml.safe_load((tests_data_dir / "metadata.yml").read_text()) + config = yaml.safe_load( + (tests_data_dir / "metadata-sleeper-2.0.2.yaml").read_text() + ) # adds some env-vars # FIXME: if version is set as '1.0' then pydantic will resolve it as a float!! config.update({"schema-version": "1.0.0", "build-date": "${BUILD_DATE}"}) diff --git a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js index 6df9c40738e..eae233f1b01 100644 --- a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js @@ -230,7 +230,7 @@ qx.Class.define("osparc.info.ServiceLarge", { osparc.metadata.Quality.isEnabled(this.getService()["quality"]) ) { extraInfo.push({ - label: this.tr("QUAILITY"), + label: this.tr("QUALITY"), view: this.__createQuality(), action: { button: osparc.utils.Utils.getEditButton(), diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 857ae652973..96ca5231d72 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -485,6 +485,99 @@ paths: application/json: schema: {} image/png: {} + /v0/dev/catalog/services/-/latest: + get: + tags: + - catalog + summary: Dev List Services Latest + operationId: dev_list_services_latest + parameters: + - required: false + schema: + title: Limit + exclusiveMaximum: true + minimum: 1 + type: integer + default: 20 + maximum: 50 + name: limit + in: query + - required: false + schema: + title: Offset + minimum: 0 + type: integer + default: 0 + name: offset + in: query + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_list_models_library.api_schemas_webserver.catalog.DEVServiceGet__' + /v0/dev/catalog/services/{service_key}/{service_version}: + get: + tags: + - catalog + summary: Dev Get Service + operationId: dev_get_service + parameters: + - required: true + schema: + title: Service Key + pattern: ^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$ + type: string + name: service_key + in: path + - required: true + schema: + title: Service Version + pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ + type: string + name: service_version + in: path + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_DEVServiceGet_' + patch: + tags: + - catalog + summary: Dev Update Service + operationId: dev_update_service + parameters: + - required: true + schema: + title: Service Key + pattern: ^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$ + type: string + name: service_key + in: path + - required: true + schema: + title: Service Version + pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ + type: string + name: service_version + in: path + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ServiceUpdate' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_DEVServiceGet_' /v0/catalog/services: get: tags: @@ -5692,6 +5785,18 @@ components: next_url: title: Next Url type: string + Compatibility: + title: Compatibility + required: + - can_update_to + type: object + properties: + can_update_to: + title: Can Update To + pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ + type: string + description: Latest compatible version at this moment.Current service can + update to this version and still work ComputationStart: title: ComputationStart type: object @@ -5827,6 +5932,219 @@ components: title: Comment maxLength: 100 type: string + DEVServiceGet: + title: DEVServiceGet + required: + - name + - description + - key + - version + - type + - authors + - contact + - inputs + - outputs + type: object + properties: + name: + title: Name + type: string + description: 'Display name: short, human readable name for the node' + example: Fast Counter + thumbnail: + title: Thumbnail + maxLength: 2083 + minLength: 1 + type: string + description: url to the thumbnail + format: uri + description: + title: Description + type: string + description: human readable description of the purpose of the node + deprecated: + title: Deprecated + type: string + description: If filled with a date, then the service is to be deprecated + at that date (e.g. cannot start anymore) + format: date-time + classifiers: + title: Classifiers + type: array + items: + type: string + quality: + title: Quality + type: object + default: {} + accessRights: + title: Accessrights + type: object + additionalProperties: + $ref: '#/components/schemas/ServiceGroupAccessRights' + description: service access rights per group id + key: + title: Key + pattern: ^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$ + type: string + description: distinctive name for the node based on the docker registry + path + version: + title: Version + pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ + type: string + description: service version number + versionDisplay: + title: Versiondisplay + type: string + description: A user-friendly or marketing name for the release. This can + be used to reference the release in a more readable and recognizable format, + such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.' This + name is not used for version comparison but is useful for communication + and documentation purposes. + releaseDate: + title: Releasedate + type: string + description: "A timestamp when the specific version of the service was released.\ + \ This field helps in tracking the timeline of releases and understanding\ + \ the sequence of updates. A timestamp string should be formatted as YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z\ + \ or [\xB1]HH[:]MM]" + format: date-time + integration-version: + title: Integration-Version + pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$ + type: string + description: This version is used to maintain backward compatibility when + there are changes in the way a service is integrated into the framework + type: + allOf: + - $ref: '#/components/schemas/ServiceType' + description: service type + badges: + title: Badges + type: array + items: + $ref: '#/components/schemas/Badge' + authors: + title: Authors + minItems: 1 + type: array + items: + $ref: '#/components/schemas/Author' + contact: + title: Contact + type: string + description: email to correspond to the authors about the node + format: email + inputs: + title: Inputs + type: object + description: inputs with extended information + outputs: + title: Outputs + type: object + description: outputs with extended information + boot-options: + title: Boot-Options + type: object + description: Service defined boot options. These get injected in the service + as env variables. + min-visible-inputs: + title: Min-Visible-Inputs + minimum: 0 + type: integer + description: The number of 'data type inputs' displayed by default in the + UI. When None all 'data type inputs' are displayed. + progress_regexp: + title: Progress Regexp + type: string + description: regexp pattern for detecting computational service's progress + owner: + title: Owner + type: string + format: email + history: + title: History + type: array + items: + $ref: '#/components/schemas/ServiceRelease' + description: history of releases for this service at this point in time, + starting from the newest to the oldest. It includes current release. + default: [] + description: 'Service metadata at publication time + + + - read-only (can only be changed overwriting the image labels in the registry) + + - base metaddata + + - injected in the image labels + + + NOTE: This model is serialized in .osparc/metadata.yml and in the labels of + the docker image' + example: + name: File Picker + description: description + classifiers: [] + quality: {} + accessRights: + '1': + execute_access: true + write_access: false + '4': + execute_access: true + write_access: true + key: simcore/services/frontend/file-picker + version: 1.0.0 + type: dynamic + authors: + - name: Red Pandas + email: redpandas@wonderland.com + contact: redpandas@wonderland.com + inputs: + input0: + label: Acceleration + description: acceleration with units + type: ref_contentSchema + contentSchema: + title: Acceleration + type: number + x_unit: m/s**2 + keyId: input_1 + unitLong: meter/second3 + unitShort: m/s3 + outputs: + outFile: + displayOrder: 2 + label: Time Slept + description: Time the service waited before completion + type: number + unit: second + unitLong: seconds + unitShort: sec + keyId: output_2 + owner: redpandas@wonderland.com + history: + - version: 1.0.5 + version_display: Summer Release + release_date: '2024-07-20T15:00:00' + - version: 1.0.0 + compatibility: + can_update_to: 1.0.5 + - version: 0.9.11 + - version: 0.9.10 + - version: 0.9.8 + compatibility: + can_update_to: 0.9.10 + - version: 0.9.1 + version_display: Matterhorn + release_date: '2024-01-20T18:49:17' + compatibility: + can_update_to: 0.9.10 + - version: 0.9.0 + - version: 0.8.0 + - version: 0.1.0 DatCoreFileLink: title: DatCoreFileLink required: @@ -6002,6 +6320,14 @@ components: $ref: '#/components/schemas/ComputationTaskGet' error: title: Error + Envelope_DEVServiceGet_: + title: Envelope[DEVServiceGet] + type: object + properties: + data: + $ref: '#/components/schemas/DEVServiceGet' + error: + title: Error Envelope_Error_: title: Envelope[Error] type: object @@ -6537,6 +6863,17 @@ components: $ref: '#/components/schemas/FileMetaDataGet' error: title: Error + Envelope_list_models_library.api_schemas_webserver.catalog.DEVServiceGet__: + title: Envelope[list[models_library.api_schemas_webserver.catalog.DEVServiceGet]] + type: object + properties: + data: + title: Data + type: array + items: + $ref: '#/components/schemas/DEVServiceGet' + error: + title: Error Envelope_list_models_library.api_schemas_webserver.catalog.ServiceGet__: title: Envelope[list[models_library.api_schemas_webserver.catalog.ServiceGet]] type: object @@ -10019,6 +10356,25 @@ components: type: array items: $ref: '#/components/schemas/PricingUnitGet' + ServiceRelease: + title: ServiceRelease + required: + - version + type: object + properties: + version: + title: Version + pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ + type: string + version_display: + title: Version Display + type: string + release_date: + title: Release Date + type: string + format: date-time + compatibility: + $ref: '#/components/schemas/Compatibility' ServiceRunGet: title: ServiceRunGet required: diff --git a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py index 073d13e62c1..fb55630c048 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py @@ -11,12 +11,14 @@ from aiohttp.web import Request, RouteTableDef from models_library.api_schemas_webserver.catalog import ( + DEVServiceGet, ServiceGet, ServiceInputKey, ServiceOutputKey, ServiceUpdate, ) from models_library.api_schemas_webserver.resource_usage import PricingPlanGet +from models_library.rest_pagination import PageQueryParameters from models_library.services import ServiceKey, ServiceVersion from models_library.services_resources import ( ServiceResourcesDict, @@ -25,10 +27,14 @@ from models_library.utils.json_serialization import json_loads from pydantic import BaseModel, Extra, Field, parse_obj_as, validator from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, parse_request_path_parameters_as, parse_request_query_parameters_as, ) from servicelib.rest_constants import RESPONSE_MODEL_POLICY +from simcore_service_webserver.application_settings_utils import ( + requires_dev_feature_enabled, +) from .._meta import API_VTAG from ..login.decorators import login_required @@ -42,6 +48,7 @@ _logger = logging.getLogger(__name__) VTAG: Final[str] = f"/{API_VTAG}" +VTAG_DEV: Final[str] = f"{VTAG}/dev" routes = RouteTableDef() @@ -63,6 +70,81 @@ def ensure_unquoted(cls, v): return v +class ListServiceParams(PageQueryParameters): + ... + + +@routes.get( + f"{VTAG_DEV}/catalog/services/-/latest", + name="dev_list_services_latest", +) +@requires_dev_feature_enabled +@login_required +@permission_required("services.catalog.*") +async def dev_list_services_latest(request: Request): + ctx = CatalogRequestContext.create(request) + query_params = parse_request_query_parameters_as(ListServiceParams, request) + + assert ctx # nosec + assert query_params # nosec + + _logger.debug("Moking response for %s...", request) + got = [ + parse_obj_as(DEVServiceGet, DEVServiceGet.Config.schema_extra["example"]), + ] + + return envelope_json_response( + got[query_params.offset : query_params.offset + query_params.limit] + ) + + +@routes.get( + f"{VTAG_DEV}/catalog/services/{{service_key}}/{{service_version}}", + name="dev_get_service", +) +@requires_dev_feature_enabled +@login_required +@permission_required("services.catalog.*") +async def dev_get_service(request: Request): + ctx = CatalogRequestContext.create(request) + path_params = parse_request_path_parameters_as(ServicePathParams, request) + + assert ctx # nosec + assert path_params # nosec + + _logger.debug("Moking response for %s...", request) + got = parse_obj_as(DEVServiceGet, DEVServiceGet.Config.schema_extra["example"]) + got.version = path_params.service_version + got.key = path_params.service_key + + return envelope_json_response(got) + + +@routes.patch( + f"{VTAG_DEV}/catalog/services/{{service_key}}/{{service_version}}", + name="dev_update_service", +) +@requires_dev_feature_enabled +@login_required +@permission_required("services.catalog.*") +async def dev_update_service(request: Request): + ctx = CatalogRequestContext.create(request) + path_params = parse_request_path_parameters_as(ServicePathParams, request) + update: ServiceUpdate = await parse_request_body_as(ServiceUpdate, request) + + assert ctx # nosec + assert path_params # nosec + assert update # nosec + + _logger.debug("Moking response for %s...", request) + got = parse_obj_as(DEVServiceGet, DEVServiceGet.Config.schema_extra["example"]) + got.version = path_params.service_version + got.key = path_params.service_key + updated = got.copy(update=update.dict(exclude_unset=True)) + + return envelope_json_response(updated) + + @routes.get(f"{VTAG}/catalog/services", name="list_services") @login_required @permission_required("services.catalog.*") @@ -86,7 +168,8 @@ async def list_services(request: Request): @routes.get( - f"{VTAG}/catalog/services/{{service_key}}/{{service_version}}", name="get_service" + f"{VTAG}/catalog/services/{{service_key}}/{{service_version}}", + name="get_service", ) @login_required @permission_required("services.catalog.*") diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_api__pricing_plan.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py similarity index 100% rename from services/web/server/tests/unit/with_dbs/01/test_catalog_api__pricing_plan.py rename to services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py new file mode 100644 index 00000000000..8d5ac4100fa --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py @@ -0,0 +1,102 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import urllib.parse + +import pytest +from aiohttp.test_utils import TestClient +from models_library.api_schemas_catalog.services import ServiceUpdate +from models_library.api_schemas_webserver.catalog import DEVServiceGet +from models_library.utils.fastapi_encoders import jsonable_encoder +from pydantic import parse_obj_as +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from pytest_simcore.helpers.utils_login import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.db.models import UserRole + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + envs={ + "WEBSERVER_DEV_FEATURES_ENABLED": "1", + }, + ) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_dev_list_latest_services( + client: TestClient, + logged_user: UserInfoDict, +): + assert client.app + assert client.app.router + + # LIST latest + url = client.app.router["dev_list_services_latest"].url_for() + assert url.path.endswith("/catalog/services/-/latest") + + response = await client.get(f"{url}", params={"offset": "0", "limit": "1"}) + data, error = await assert_status(response, status.HTTP_200_OK) + assert data + assert error is None + model = parse_obj_as(list[DEVServiceGet], data) + assert model + assert len(model) == 1 + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_dev_get_and_patch_service( + client: TestClient, + logged_user: UserInfoDict, +): + + assert client.app + assert client.app.router + + service_key = "simcore/services/dynamic/someservice" + service_version = "3.4.5" + + url = client.app.router["dev_get_service"].url_for( + service_key=urllib.parse.quote(service_key, safe=""), + service_version=service_version, + ) + + # GET + response = await client.get( + f"{url}", + ) + data, error = await assert_status(response, status.HTTP_200_OK) + + assert data + assert error is None + model = parse_obj_as(DEVServiceGet, data) + assert model.key == service_key + assert model.version == service_version + + # PATCH + update = ServiceUpdate(name="foo", thumbnail=None, description="bar") + response = await client.patch( + f"{url}", json=jsonable_encoder(update, exclude_unset=True) + ) + + data, error = await assert_status(response, status.HTTP_200_OK) + assert data + assert error is None + model = parse_obj_as(DEVServiceGet, data) + assert model.key == service_key + assert model.version == service_version + assert model.name == "foo" + assert model.description == "bar" diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_api.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py similarity index 100% rename from services/web/server/tests/unit/with_dbs/01/test_catalog_api.py rename to services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py From 59ed935728da7d9600281381525bd074428a4bb1 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Fri, 21 Jun 2024 13:53:35 +0200 Subject: [PATCH 057/219] =?UTF-8?q?=E2=99=BB=EF=B8=8FE2E:=20Classic=20TI?= =?UTF-8?q?=20wait=20a=20bit=20longer=20on=20Run=20Optimization=20step=20(?= =?UTF-8?q?#5984)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/e2e-playwright/tests/tip/test_ti_plan.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/e2e-playwright/tests/tip/test_ti_plan.py b/tests/e2e-playwright/tests/tip/test_ti_plan.py index c135971e85b..49b18a4dc39 100644 --- a/tests/e2e-playwright/tests/tip/test_ti_plan.py +++ b/tests/e2e-playwright/tests/tip/test_ti_plan.py @@ -45,8 +45,8 @@ + _JLAB_DOCKER_PULLING_MAX_TIME + _JLAB_MAX_STARTUP_MAX_TIME ) -_JLAB_RUN_OPTIMIZATION_APPEARANCE_TIME: Final[int] = 1 * MINUTE -_JLAB_RUN_OPTIMIZATION_MAX_TIME: Final[int] = 1 * MINUTE +_JLAB_RUN_OPTIMIZATION_APPEARANCE_TIME: Final[int] = 2 * MINUTE +_JLAB_RUN_OPTIMIZATION_MAX_TIME: Final[int] = 2 * MINUTE _JLAB_REPORTING_MAX_TIME: Final[int] = 20 * SECOND From d543cd9ff0433aab68856c665f8a4dbd034602d7 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 24 Jun 2024 13:41:56 +0200 Subject: [PATCH 058/219] =?UTF-8?q?=F0=9F=90=9B=20Fixes=20error=20while=20?= =?UTF-8?q?updated=20study=20with=20long=20description=20(#5989)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../api_schemas_webserver/projects.py | 22 +++++++++---------- .../src/models_library/basic_types.py | 17 ++++++++++++++ .../models-library/tests/test_projects.py | 17 ++++++++++++++ .../api/routes/studies_jobs.py | 4 ++-- .../api/v0/openapi.yaml | 4 ---- 5 files changed, 47 insertions(+), 17 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects.py b/packages/models-library/src/models_library/api_schemas_webserver/projects.py index 4fbf11cb7ea..651445ee433 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects.py @@ -10,7 +10,11 @@ from pydantic import Field, validator from ..api_schemas_long_running_tasks.tasks import TaskGet -from ..basic_types import HttpUrlWithCustomMinLength, IDStr +from ..basic_types import ( + HttpUrlWithCustomMinLength, + LongTruncatedStr, + ShortTruncatedStr, +) from ..emails import LowerCaseEmailStr from ..projects import ClassifierID, DateTimeStr, NodesDict, ProjectID from ..projects_access import AccessRights, GroupIDStr @@ -85,8 +89,8 @@ class ProjectListItem(ProjectGet): class ProjectReplace(InputSchema): uuid: ProjectID - name: str - description: str + name: ShortTruncatedStr + description: LongTruncatedStr thumbnail: HttpUrlWithCustomMinLength | None creation_date: DateTimeStr last_change_date: DateTimeStr @@ -107,8 +111,8 @@ class ProjectReplace(InputSchema): class ProjectUpdate(InputSchema): - name: str = FieldNotRequired() - description: str = FieldNotRequired() + name: ShortTruncatedStr = FieldNotRequired() + description: LongTruncatedStr = FieldNotRequired() thumbnail: HttpUrlWithCustomMinLength = FieldNotRequired() workbench: NodesDict = FieldNotRequired() access_rights: dict[GroupIDStr, AccessRights] = FieldNotRequired() @@ -118,13 +122,9 @@ class ProjectUpdate(InputSchema): quality: dict[str, Any] = FieldNotRequired() -ProjectName: TypeAlias = IDStr -ProjectDescription: TypeAlias = IDStr - - class ProjectPatch(InputSchema): - name: ProjectName = FieldNotRequired() - description: ProjectDescription = FieldNotRequired() + name: ShortTruncatedStr = FieldNotRequired() + description: LongTruncatedStr = FieldNotRequired() thumbnail: HttpUrlWithCustomMinLength = FieldNotRequired() access_rights: dict[GroupIDStr, AccessRights] = FieldNotRequired() classifiers: list[ClassifierID] = FieldNotRequired() diff --git a/packages/models-library/src/models_library/basic_types.py b/packages/models-library/src/models_library/basic_types.py index 7814f7952f9..e74d046db37 100644 --- a/packages/models-library/src/models_library/basic_types.py +++ b/packages/models-library/src/models_library/basic_types.py @@ -80,6 +80,23 @@ class IDStr(ConstrainedStr): max_length = 100 +class ShortTruncatedStr(ConstrainedStr): + # NOTE: Use to input e.g. titles or display names + # A truncated string: + # - Strips whitespaces and truncate strings that exceed the specified characters limit (curtail_length). + # - Ensures that the **input** data length to the API is controlled and prevents exceeding large inputs silently, i.e. without raising errors. + # SEE https://github.com/ITISFoundation/osparc-simcore/pull/5989#discussion_r1650506583 + strip_whitespace = True + curtail_length = 600 + + +class LongTruncatedStr(ConstrainedStr): + # NOTE: Use to input e.g. descriptions or summaries + # Analogous to ShortTruncatedStr + strip_whitespace = True + curtail_length = 65536 # same as github descripton + + # auto-incremented primary-key IDs IdInt: TypeAlias = PositiveInt PrimaryKeyInt: TypeAlias = PositiveInt diff --git a/packages/models-library/tests/test_projects.py b/packages/models-library/tests/test_projects.py index 6ed0c877f13..8b646345c2d 100644 --- a/packages/models-library/tests/test_projects.py +++ b/packages/models-library/tests/test_projects.py @@ -7,6 +7,7 @@ import pytest from faker import Faker +from models_library.api_schemas_webserver.projects import LongTruncatedStr, ProjectPatch from models_library.projects import Project @@ -40,3 +41,19 @@ def test_project_with_thumbnail_as_empty_string(minimal_project: dict[str, Any]) assert project assert project.thumbnail is None + + +def test_project_patch_truncates_description(): + # NOTE: checks https://github.com/ITISFoundation/osparc-simcore/issues/5988 + assert LongTruncatedStr.curtail_length + len_truncated = int(LongTruncatedStr.curtail_length) + + long_description = "X" * (len_truncated + 10) + assert len(long_description) > len_truncated + + update = ProjectPatch(description=long_description) + assert len(update.description) == len_truncated + + short_description = "X" + update = ProjectPatch(description=short_description) + assert len(update.description) == len(short_description) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py index d48e67716e2..3e8d734d200 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py @@ -5,7 +5,7 @@ from fastapi import APIRouter, Depends, Header, Query, Request, status from fastapi.encoders import jsonable_encoder from fastapi.responses import JSONResponse, RedirectResponse -from models_library.api_schemas_webserver.projects import ProjectName, ProjectPatch +from models_library.api_schemas_webserver.projects import ProjectPatch from models_library.api_schemas_webserver.projects_nodes import NodeOutputs from models_library.clusters import ClusterID from models_library.function_services_catalog.services import file_picker @@ -116,7 +116,7 @@ async def create_study_job( ) await webserver_api.patch_project( - project_id=job.id, patch_params=ProjectPatch(name=ProjectName(job.name)) + project_id=job.id, patch_params=ProjectPatch(name=job.name) ) project_inputs = await webserver_api.get_project_inputs(project_id=project.uuid) diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 96ca5231d72..af5028cf780 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -9549,13 +9549,9 @@ components: properties: name: title: Name - maxLength: 100 - minLength: 1 type: string description: title: Description - maxLength: 100 - minLength: 1 type: string thumbnail: title: Thumbnail From 4bbb76c894701b426aeb03f6c70e7d0527f2bb9b Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 24 Jun 2024 15:08:11 +0200 Subject: [PATCH 059/219] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20Fix:=20From?= =?UTF-8?q?=20service=20running=20to=20connect=20to=20iframe=20(#5987)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/data/model/Node.js | 71 +++++++------------ .../source/class/osparc/info/MergedLarge.js | 3 +- .../source/class/osparc/info/ServiceLarge.js | 3 +- .../source/class/osparc/info/StudyLarge.js | 3 +- 4 files changed, 33 insertions(+), 47 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index f0d0369ac11..67c3cd85d83 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -342,6 +342,7 @@ qx.Class.define("osparc.data.model.Node", { __posY: null, __unresponsiveRetries: null, __stopRequestingStatus: null, + __retriesLeft: null, getWorkbench: function() { return this.getStudy().getWorkbench(); @@ -1245,6 +1246,7 @@ qx.Class.define("osparc.data.model.Node", { } = osparc.utils.Utils.computeServiceUrl(data); this.setDynamicV2(isDynamicV2); if (srvUrl) { + this.__retriesLeft = 40; this.__waitForServiceReady(srvUrl); } break; @@ -1327,68 +1329,49 @@ qx.Class.define("osparc.data.model.Node", { } }, - __waitForServiceReady: async function(srvUrl) { + __waitForServiceReady: function(srvUrl) { this.getStatus().setInteractive("connecting"); + + if (this.__retriesLeft === 0) { + return; + } + const retry = () => { + this.__retriesLeft--; + // Check if node is still there if (this.getWorkbench().getNode(this.getNodeId()) === null) { return; } - const interval = 3000; + const interval = 5000; qx.event.Timer.once(() => this.__waitForServiceReady(srvUrl), this, interval); }; // ping for some time until it is really reachable try { if (osparc.utils.Utils.isDevelopmentPlatform()) { - console.log("Connecting: about to fetch ", srvUrl); - } - const response = await fetch(srvUrl); - if (osparc.utils.Utils.isDevelopmentPlatform()) { - console.log("Connecting: fetch's response ", JSON.stringify(response)); + console.log("Connecting: about to fetch", srvUrl); } - if (response.ok || response.status === 302) { - // ok = status in the range 200-299 - // some services might respond with a 302 which is also fine - // instead of - // - requesting its frontend to make sure it is ready and ... - // - waiting for the "load" event triggered by the content of the iframe - // we will skip those steps and directly switch its iframe - this.__serviceReadyIn(srvUrl); - } else { - console.log(`Connecting: ${srvUrl} is not reachable. Status: ${response.status}`); - retry(); - } - } catch (error) { - console.error(`Connecting: Error while checking ${srvUrl}:`, error); - retry(); - } - }, - - __waitForServiceWebsite: function(srvUrl) { - // request the frontend to make sure it is ready - let retries = 5 - const openAndSend = () => { - if (retries === 0) { - return - } - retries-- fetch(srvUrl) - .then(request => { - if (request.status >= 200 || request.status < 300) { - this.__serviceReadyIn(srvUrl) + .then(response => { + if (osparc.utils.Utils.isDevelopmentPlatform()) { + console.log("Connecting: fetch's response status", response.status); + } + if (response.status < 400) { + this.__serviceReadyIn(srvUrl); } else { - retry() // eslint-disable-line no-use-before-define + console.log(`Connecting: ${srvUrl} is not reachable. Status: ${response.status}`); + retry(); } }) - .catch(() => { - retry() // eslint-disable-line no-use-before-define - }) + .catch(err => { + console.error("Connecting: Error", err); + retry(); + }); + } catch (error) { + console.error(`Connecting: Error while checking ${srvUrl}:`, error); + retry(); } - const retry = () => { - setTimeout(() => openAndSend(), 2000) - }; - openAndSend() }, __serviceReadyIn: function(srvUrl) { diff --git a/services/static-webserver/client/source/class/osparc/info/MergedLarge.js b/services/static-webserver/client/source/class/osparc/info/MergedLarge.js index 39d909633c9..5c3c33e55bf 100644 --- a/services/static-webserver/client/source/class/osparc/info/MergedLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/MergedLarge.js @@ -407,7 +407,8 @@ qx.Class.define("osparc.info.MergedLarge", { }) .catch(err => { console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error while updating the information."), "ERROR"); + const msg = err.message || this.tr("There was an error while updating the information."); + osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); }); } } diff --git a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js index eae233f1b01..3bcc8f76d84 100644 --- a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js @@ -430,7 +430,8 @@ qx.Class.define("osparc.info.ServiceLarge", { }) .catch(err => { console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error while updating the information."), "ERROR"); + const msg = err.message || this.tr("There was an error while updating the information."); + osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); }); } } diff --git a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js index fb9e4b942e5..6b0e4a4927d 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js @@ -328,7 +328,8 @@ qx.Class.define("osparc.info.StudyLarge", { }) .catch(err => { console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error while updating the information."), "ERROR"); + const msg = err.message || this.tr("There was an error while updating the information."); + osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); }); } } From 8f141cecdb605c4f72327ac9ebdfb38895721d09 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard <126242332+bisgaard-itis@users.noreply.github.com> Date: Tue, 25 Jun 2024 11:13:39 +0200 Subject: [PATCH 060/219] =?UTF-8?q?=F0=9F=91=BD=EF=B8=8F=20Add=20endpoint?= =?UTF-8?q?=20for=20getting=20credit-price=20and=20study-job=20log=20files?= =?UTF-8?q?=20from=20api=20server=20(#5985)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../helpers/httpx_calls_capture_models.py | 2 +- services/api-server/openapi.json | 154 ++- .../simcore_service_api_server/api/root.py | 2 + .../api/routes/credits.py | 16 + .../api/routes/solvers_jobs_getters.py | 12 +- .../api/routes/studies_jobs.py | 32 +- .../models/schemas/studies.py | 21 +- .../services/director_v2.py | 14 +- .../services/webserver.py | 5 +- .../tests/mocks/create_study_job.json | 2 +- .../mocks/for_test_api_routes_studies.json | 1210 ++++++++--------- .../for_test_get_and_update_job_metadata.json | 24 +- .../tests/mocks/get_credits_price.json | 19 + .../tests/mocks/get_default_wallet.json | 2 +- .../get_job_pricing_unit_invalid_solver.json | 2 +- .../mocks/get_job_pricing_unit_success.json | 2 +- .../tests/mocks/get_solver_outputs.json | 6 +- .../tests/mocks/get_study_job_logs.json | 31 + .../api-server/tests/mocks/on_create_job.json | 18 +- .../api-server/tests/mocks/on_list_jobs.json | 12 +- .../tests/mocks/run_study_workflow.json | 2 +- .../mocks/start_job_not_enough_credit.json | 2 +- .../tests/mocks/start_job_with_payment.json | 2 +- ...est_get_and_update_study_job_metadata.json | 2 +- .../test_api_routes_studies_jobs.py | 28 +- .../api-server/tests/unit/test_credits.py | 26 + 26 files changed, 971 insertions(+), 677 deletions(-) create mode 100644 services/api-server/src/simcore_service_api_server/api/routes/credits.py create mode 100644 services/api-server/tests/mocks/get_credits_price.json create mode 100644 services/api-server/tests/mocks/get_study_job_logs.json create mode 100644 services/api-server/tests/unit/test_credits.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_models.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_models.py index 814a2771abe..60e048f56d3 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_models.py @@ -24,7 +24,7 @@ class HttpApiCallCaptureModel(BaseModel): path: PathDescription | str query: str | None = None request_payload: dict[str, Any] | None = None - response_body: dict[str, Any] | list | None = None + response_body: list[Any] | dict[str, Any] | None = None status_code: HTTPStatus = Field(default=status.HTTP_200_OK) @classmethod diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json index 60b80ff2314..87a31578289 100644 --- a/services/api-server/openapi.json +++ b/services/api-server/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "osparc.io web API", "description": "osparc-simcore public API specifications", - "version": "0.5.1" + "version": "0.6.0" }, "paths": { "/v0/meta": { @@ -2175,7 +2175,7 @@ "solvers" ], "summary": "Start Job", - "description": "Starts job job_id created with the solver solver_key:version\n\nNew in *version 0.4.3*: cluster_id", + "description": "Starts job job_id created with the solver solver_key:version\n\nNew in *version 0.4.3*: cluster_id\nNew in *version 0.6.0*: This endpoint responds with a 202 when successfully starting a computation", "operationId": "start_job", "parameters": [ { @@ -3571,6 +3571,7 @@ "studies" ], "summary": "Start Study Job", + "description": "New in *version 0.6.0*: This endpoint responds with a 202 when successfully starting a computation", "operationId": "start_study_job", "parameters": [ { @@ -3896,6 +3897,90 @@ } ] } + }, + "/v0/studies/{study_id}/jobs/{job_id}/outputs/log-links": { + "get": { + "tags": [ + "studies" + ], + "summary": "Get download links for study job log files", + "operationId": "get_study_job_output_logfile", + "parameters": [ + { + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Study Id" + }, + "name": "study_id", + "in": "path" + }, + { + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Job Id" + }, + "name": "job_id", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/JobLogsMap" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBasic": [] + } + ] + } + }, + "/v0/credits/price": { + "get": { + "tags": [ + "credits" + ], + "summary": "Get Credits Price", + "operationId": "get_credits_price", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetCreditPrice" + } + } + } + } + }, + "security": [ + { + "HTTPBasic": [] + } + ] + } } }, "components": { @@ -4097,6 +4182,33 @@ ], "title": "FileUploadData" }, + "GetCreditPrice": { + "properties": { + "productName": { + "type": "string", + "title": "Productname" + }, + "usdPerCredit": { + "type": "number", + "minimum": 0.0, + "title": "Usdpercredit", + "description": "Price of a credit in USD. If None, then this product's price is UNDEFINED" + }, + "minPaymentAmountUsd": { + "type": "integer", + "minimum": 0, + "title": "Minpaymentamountusd", + "description": "Minimum amount (included) in USD that can be paid for this productCan be None if this product's price is UNDEFINED" + } + }, + "type": "object", + "required": [ + "productName", + "usdPerCredit", + "minPaymentAmountUsd" + ], + "title": "GetCreditPrice" + }, "Groups": { "properties": { "me": { @@ -4259,6 +4371,23 @@ } } }, + "JobLogsMap": { + "properties": { + "log_links": { + "items": { + "$ref": "#/components/schemas/LogLink" + }, + "type": "array", + "title": "Log Links", + "description": "Array of download links" + } + }, + "type": "object", + "required": [ + "log_links" + ], + "title": "JobLogsMap" + }, "JobOutputs": { "properties": { "job_id": { @@ -4397,6 +4526,27 @@ "type": "object", "title": "Links" }, + "LogLink": { + "properties": { + "node_name": { + "type": "string", + "title": "Node Name" + }, + "download_link": { + "type": "string", + "maxLength": 65536, + "minLength": 1, + "format": "uri", + "title": "Download Link" + } + }, + "type": "object", + "required": [ + "node_name", + "download_link" + ], + "title": "LogLink" + }, "Meta": { "properties": { "name": { diff --git a/services/api-server/src/simcore_service_api_server/api/root.py b/services/api-server/src/simcore_service_api_server/api/root.py index 5010a286c22..aa715ade554 100644 --- a/services/api-server/src/simcore_service_api_server/api/root.py +++ b/services/api-server/src/simcore_service_api_server/api/root.py @@ -3,6 +3,7 @@ from fastapi import APIRouter from ..core.settings import ApplicationSettings +from .routes import credits as _credits from .routes import ( files, health, @@ -38,6 +39,7 @@ def create_router(settings: ApplicationSettings): router.include_router(studies.router, tags=["studies"], prefix="/studies") router.include_router(studies_jobs.router, tags=["studies"], prefix="/studies") router.include_router(wallets.router, tags=["wallets"], prefix="/wallets") + router.include_router(_credits.router, tags=["credits"], prefix="/credits") # NOTE: multiple-files upload is currently disabled # Web form to upload files at http://localhost:8000/v0/upload-form-view diff --git a/services/api-server/src/simcore_service_api_server/api/routes/credits.py b/services/api-server/src/simcore_service_api_server/api/routes/credits.py new file mode 100644 index 00000000000..c0ef51feea4 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/api/routes/credits.py @@ -0,0 +1,16 @@ +from typing import Annotated + +from fastapi import APIRouter, Depends, status +from models_library.api_schemas_webserver.product import GetCreditPrice + +from ..dependencies.webserver import AuthSession, get_webserver_session + +router = APIRouter() + + +@router.get("/price", status_code=status.HTTP_200_OK, response_model=GetCreditPrice) +async def get_credits_price( + webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], +): + product_price = await webserver_api.get_product_price() + return product_price diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_getters.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_getters.py index 952b126a173..73da939cf83 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_getters.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_getters.py @@ -31,7 +31,7 @@ from ...models.schemas.jobs import ArgumentTypes, Job, JobID, JobMetadata, JobOutputs from ...models.schemas.solvers import SolverKeyId from ...services.catalog import CatalogApi -from ...services.director_v2 import DirectorV2Api, DownloadLink, NodeName +from ...services.director_v2 import DirectorV2Api from ...services.jobs import ( get_custom_metadata, raise_if_job_not_associated_with_solver, @@ -231,7 +231,7 @@ async def get_job_outputs( assert len(node_ids) == 1 # nosec product_price = await webserver_api.get_product_price() - if product_price is not None: + if product_price.usd_per_credit is not None: wallet = await webserver_api.get_project_wallet(project_id=project.uuid) if wallet is None: raise MissingWalletError(job_id=project.uuid) @@ -295,16 +295,17 @@ async def get_job_output_logfile( project_id = job_id - logs_urls: dict[NodeName, DownloadLink] = await director2_api.get_computation_logs( + log_link_map = await director2_api.get_computation_logs( user_id=user_id, project_id=project_id ) + logs_urls = log_link_map.log_links _logger.debug( "Found %d logfiles for %s %s: %s", len(logs_urls), f"{project_id=}", f"{user_id=}", - list(logs_urls.keys()), + list(elm.download_link for elm in logs_urls), ) # if more than one node? should rezip all of them?? @@ -312,7 +313,8 @@ async def get_job_output_logfile( len(logs_urls) <= 1 ), "Current version only supports one node per solver" - for presigned_download_link in logs_urls.values(): + for log_link in logs_urls: + presigned_download_link = log_link.download_link _logger.info( "Redirecting '%s' to %s ...", f"{solver_key}/releases/{version}/jobs/{job_id}/outputs/logfile", diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py index 3e8d734d200..8b735009cf5 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py @@ -4,7 +4,7 @@ from fastapi import APIRouter, Depends, Header, Query, Request, status from fastapi.encoders import jsonable_encoder -from fastapi.responses import JSONResponse, RedirectResponse +from fastapi.responses import JSONResponse from models_library.api_schemas_webserver.projects import ProjectPatch from models_library.api_schemas_webserver.projects_nodes import NodeOutputs from models_library.clusters import ClusterID @@ -33,7 +33,7 @@ JobOutputs, JobStatus, ) -from ...models.schemas.studies import Study, StudyID +from ...models.schemas.studies import JobLogsMap, Study, StudyID from ...services.director_v2 import DirectorV2Api from ...services.jobs import ( get_custom_metadata, @@ -302,15 +302,27 @@ async def get_study_job_outputs( return job_outputs -@router.post( - "/{study_id}/jobs/{job_id}/outputs/logfile", - response_class=RedirectResponse, - include_in_schema=API_SERVER_DEV_FEATURES_ENABLED, - status_code=status.HTTP_501_NOT_IMPLEMENTED, +@router.get( + "/{study_id}/jobs/{job_id}/outputs/log-links", + response_model=JobLogsMap, + status_code=status.HTTP_200_OK, + summary="Get download links for study job log files", ) -async def get_study_job_output_logfile(study_id: StudyID, job_id: JobID): - msg = f"get study job output logfile study_id={study_id!r} job_id={job_id!r}. SEE https://github.com/ITISFoundation/osparc-simcore/issues/4177" - raise NotImplementedError(msg) +async def get_study_job_output_logfile( + study_id: StudyID, + job_id: JobID, + user_id: Annotated[PositiveInt, Depends(get_current_user_id)], + director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], +): + with log_context( + logger=_logger, + level=logging.DEBUG, + msg=f"get study job output logfile study_id={study_id!r} job_id={job_id!r}.", + ): + log_link_map = await director2_api.get_computation_logs( + user_id=user_id, project_id=job_id + ) + return log_link_map @router.get( diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/studies.py b/services/api-server/src/simcore_service_api_server/models/schemas/studies.py index 43704a68548..a14d39ed64e 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/studies.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/studies.py @@ -1,16 +1,18 @@ -import typing +from typing import TypeAlias -import pydantic from models_library import projects, projects_nodes_io from models_library.utils import pydantic_tools_extension +from pydantic import AnyUrl, BaseModel, Field from .. import api_resources from . import solvers -StudyID: typing.TypeAlias = projects.ProjectID +StudyID: TypeAlias = projects.ProjectID +NodeName: TypeAlias = str +DownloadLink: TypeAlias = AnyUrl -class Study(pydantic.BaseModel): +class Study(BaseModel): uid: StudyID title: str = pydantic_tools_extension.FieldNotRequired() description: str = pydantic_tools_extension.FieldNotRequired() @@ -21,9 +23,18 @@ def compose_resource_name(cls, study_key) -> api_resources.RelativeResourceName: class StudyPort(solvers.SolverPort): - key: projects_nodes_io.NodeID = pydantic.Field( + key: projects_nodes_io.NodeID = Field( ..., description="port identifier name." "Correponds to the UUID of the parameter/probe node in the study", title="Key name", ) + + +class LogLink(BaseModel): + node_name: NodeName + download_link: DownloadLink + + +class JobLogsMap(BaseModel): + log_links: list[LogLink] = Field(..., description="Array of download links") diff --git a/services/api-server/src/simcore_service_api_server/services/director_v2.py b/services/api-server/src/simcore_service_api_server/services/director_v2.py index 64fc03e537b..4628e617099 100644 --- a/services/api-server/src/simcore_service_api_server/services/director_v2.py +++ b/services/api-server/src/simcore_service_api_server/services/director_v2.py @@ -19,6 +19,7 @@ from ..db.repositories.groups_extra_properties import GroupsExtraPropertiesRepository from ..exceptions.service_errors_utils import service_exception_mapper from ..models.schemas.jobs import PercentageInt +from ..models.schemas.studies import JobLogsMap, LogLink from ..utils.client_base import BaseServiceClientApi, setup_client_instance logger = logging.getLogger(__name__) @@ -60,9 +61,6 @@ class TaskLogFileGet(BaseModel): ) -NodeName = str -DownloadLink = AnyUrl - # API CLASS --------------------------------------------- _exception_mapper = partial(service_exception_mapper, "Director V2") @@ -167,7 +165,7 @@ async def delete_computation(self, project_id: UUID, user_id: PositiveInt): @_exception_mapper({status.HTTP_404_NOT_FOUND: LogFileNotFoundError}) async def get_computation_logs( self, user_id: PositiveInt, project_id: UUID - ) -> dict[NodeName, DownloadLink]: + ) -> JobLogsMap: response = await self.client.get( f"/v2/computations/{project_id}/tasks/-/logfile", params={ @@ -178,12 +176,14 @@ async def get_computation_logs( # probably not found response.raise_for_status() - node_to_links: dict[NodeName, DownloadLink] = {} + log_links: list[LogLink] = [] for r in parse_raw_as(list[TaskLogFileGet], response.text or "[]"): if r.download_link: - node_to_links[f"{r.task_id}"] = r.download_link + log_links.append( + LogLink(node_name=f"{r.task_id}", download_link=r.download_link) + ) - return node_to_links + return JobLogsMap(log_links=log_links) # MODULES APP SETUP ------------------------------------------------------------- diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py index 42641d2affb..2626a22d740 100644 --- a/services/api-server/src/simcore_service_api_server/services/webserver.py +++ b/services/api-server/src/simcore_service_api_server/services/webserver.py @@ -36,7 +36,6 @@ WalletGet, WalletGetWithAvailableCredits, ) -from models_library.basic_types import NonNegativeDecimal from models_library.clusters import ClusterID from models_library.generics import Envelope from models_library.projects import ProjectID @@ -548,7 +547,7 @@ async def get_project_wallet(self, project_id: ProjectID) -> WalletGet | None: # PRODUCTS ------------------------------------------------- @_exception_mapper({status.HTTP_404_NOT_FOUND: ProductPriceNotFoundError}) - async def get_product_price(self) -> NonNegativeDecimal | None: + async def get_product_price(self) -> GetCreditPrice: response = await self.client.get( "/credits-price", cookies=self.session_cookies, @@ -556,7 +555,7 @@ async def get_product_price(self) -> NonNegativeDecimal | None: response.raise_for_status() data = Envelope[GetCreditPrice].parse_raw(response.text).data assert data is not None # nosec - return data.usd_per_credit + return data # SERVICES ------------------------------------------------- diff --git a/services/api-server/tests/mocks/create_study_job.json b/services/api-server/tests/mocks/create_study_job.json index b8670fc2601..c47c19e6972 100644 --- a/services/api-server/tests/mocks/create_study_job.json +++ b/services/api-server/tests/mocks/create_study_job.json @@ -81,7 +81,7 @@ "creationDate": "2024-05-14T09:43:20.099Z", "lastChangeDate": "2024-05-14T09:43:20.099Z", "workbench": {}, - "prjOwner": "bisgaard@itis.swiss", + "prjOwner": "frubio@example.net", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/mocks/for_test_api_routes_studies.json b/services/api-server/tests/mocks/for_test_api_routes_studies.json index d91e3090070..4543853948d 100644 --- a/services/api-server/tests/mocks/for_test_api_routes_studies.json +++ b/services/api-server/tests/mocks/for_test_api_routes_studies.json @@ -1,619 +1,619 @@ [ - { - "name": "get_me", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/me", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "first_name": "collins", - "last_name": "drew", - "id": 1, - "login": "collinsdrew@example.com", - "role": "User", - "groups": { - "me": { - "gid": 3, - "label": "collinsdrew", - "description": "primary group", - "thumbnail": null, - "accessRights": { - "read": true, - "write": false, - "delete": false - }, - "inclusionRules": {} - }, - "organizations": [ - { - "gid": 2, - "label": "osparc", - "description": "osparc product group", - "thumbnail": null, - "accessRights": { - "read": false, - "write": false, - "delete": false - }, - "inclusionRules": {} - } - ], - "all": { - "gid": 1, - "label": "Everyone", - "description": "all users", - "thumbnail": null, - "accessRights": { - "read": true, - "write": false, - "delete": false - }, - "inclusionRules": {} - } - }, - "gravatar_id": "aa33f6ec77ea434c2ea4fb92d0fd379e" + { + "name": "get_me", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/me", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "first_name": "collins", + "last_name": "drew", + "id": 1, + "login": "sarahlopez@example.net", + "role": "User", + "groups": { + "me": { + "gid": 3, + "label": "collinsdrew", + "description": "primary group", + "thumbnail": null, + "accessRights": { + "read": true, + "write": false, + "delete": false + }, + "inclusionRules": {} + }, + "organizations": [ + { + "gid": 2, + "label": "osparc", + "description": "osparc product group", + "thumbnail": null, + "accessRights": { + "read": false, + "write": false, + "delete": false + }, + "inclusionRules": {} } + ], + "all": { + "gid": 1, + "label": "Everyone", + "description": "all users", + "thumbnail": null, + "accessRights": { + "read": true, + "write": false, + "delete": false + }, + "inclusionRules": {} + } }, - "status_code": 200 + "gravatar_id": "aa33f6ec77ea434c2ea4fb92d0fd379e" + } }, - { - "name": "list_projects", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/projects", - "query": "type=user&show_hidden=false&limit=20&offset=0", - "request_payload": null, - "response_body": { - "_meta": { - "limit": 20, - "total": 1, - "offset": 0, - "count": 1 - }, - "_links": { - "self": "http://webserver:8080/v0/projects?type=user&show_hidden=false&limit=20&offset=0", - "first": "http://webserver:8080/v0/projects?type=user&show_hidden=false&limit=20&offset=0", - "prev": null, - "next": null, - "last": "http://webserver:8080/v0/projects?type=user&show_hidden=false&limit=20&offset=0" - }, - "data": [ - { - "uuid": "25531b1a-2565-11ee-ab43-02420a000031", - "name": "Brett Montoya", - "description": "", - "thumbnail": "", - "creationDate": "2023-07-18T12:18:04.314Z", - "lastChangeDate": "2023-07-20T20:02:42.535Z", - "workbench": { - "deea006c-a223-4103-b46e-7b677428de9f": { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - "label": "File Picker", - "progress": 0.0, - "thumbnail": null, - "inputs": {}, - "inputsUnits": {}, - "inputNodes": [], - "outputs": {}, - "parent": null - }, - "3e700053-3d10-4089-90f7-f0865167c5b2": { - "key": "simcore/services/frontend/parameter/integer", - "version": "1.0.0", - "label": "Integer Parameter", - "thumbnail": null, - "runHash": null, - "inputs": {}, - "inputsUnits": {}, - "inputNodes": [], - "outputs": { - "out_1": 1 - }, - "parent": null - }, - "60d76eed-2e50-42e7-8c2d-580566b2f4c6": { - "key": "simcore/services/frontend/parameter/boolean", - "version": "1.0.0", - "label": "Boolean Parameter", - "thumbnail": null, - "runHash": null, - "inputs": {}, - "inputsUnits": {}, - "inputNodes": [], - "outputs": { - "out_1": true - }, - "parent": null - }, - "09e17caa-a538-40d4-9b8e-9a221b8367d3": { - "key": "simcore/services/frontend/iterator-consumer/probe/integer", - "version": "1.0.0", - "label": "Integer probe", - "thumbnail": null, - "inputs": { - "in_1": 0 - }, - "inputsUnits": {}, - "inputNodes": [], - "parent": null - } - }, - "prjOwner": "watsonchelsea@example.org", - "accessRights": { - "3": { - "read": true, - "write": true, - "delete": true - } - }, - "tags": [], - "classifiers": [], - "state": { - "locked": { - "value": false, - "status": "CLOSED" - }, - "state": { - "value": "UNKNOWN" - } - }, - "ui": { - "workbench": { - "09e17caa-a538-40d4-9b8e-9a221b8367d3": { - "position": { - "x": 540, - "y": 240 - } - }, - "3e700053-3d10-4089-90f7-f0865167c5b2": { - "position": { - "x": 180, - "y": 280 - } - }, - "60d76eed-2e50-42e7-8c2d-580566b2f4c6": { - "position": { - "x": 180, - "y": 360 - } - }, - "deea006c-a223-4103-b46e-7b677428de9f": { - "position": { - "x": 160, - "y": 120 - } - } - }, - "slideshow": {}, - "currentNodeId": "25531b1a-2565-11ee-ab43-02420a000031", - "mode": "workbench" - }, - "quality": { - "enabled": true, - "tsr_target": { - "r01": { - "level": 4, - "references": "" - }, - "r02": { - "level": 4, - "references": "" - }, - "r03": { - "level": 4, - "references": "" - }, - "r04": { - "level": 4, - "references": "" - }, - "r05": { - "level": 4, - "references": "" - }, - "r06": { - "level": 4, - "references": "" - }, - "r07": { - "level": 4, - "references": "" - }, - "r08": { - "level": 4, - "references": "" - }, - "r09": { - "level": 4, - "references": "" - }, - "r10": { - "level": 4, - "references": "" - } - }, - "annotations": { - "vandv": "", - "limitations": "", - "certificationLink": "", - "certificationStatus": "Uncertified" - }, - "tsr_current": { - "r01": { - "level": 0, - "references": "" - }, - "r02": { - "level": 0, - "references": "" - }, - "r03": { - "level": 0, - "references": "" - }, - "r04": { - "level": 0, - "references": "" - }, - "r05": { - "level": 0, - "references": "" - }, - "r06": { - "level": 0, - "references": "" - }, - "r07": { - "level": 0, - "references": "" - }, - "r08": { - "level": 0, - "references": "" - }, - "r09": { - "level": 0, - "references": "" - }, - "r10": { - "level": 0, - "references": "" - } - } - }, - "dev": {} + "status_code": 200 + }, + { + "name": "list_projects", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/projects", + "query": "type=user&show_hidden=false&limit=20&offset=0", + "request_payload": null, + "response_body": { + "_meta": { + "limit": 20, + "total": 1, + "offset": 0, + "count": 1 + }, + "_links": { + "self": "http://webserver:8080/v0/projects?type=user&show_hidden=false&limit=20&offset=0", + "first": "http://webserver:8080/v0/projects?type=user&show_hidden=false&limit=20&offset=0", + "prev": null, + "next": null, + "last": "http://webserver:8080/v0/projects?type=user&show_hidden=false&limit=20&offset=0" + }, + "data": [ + { + "uuid": "25531b1a-2565-11ee-ab43-02420a000031", + "name": "Alexandra Brown", + "description": "", + "thumbnail": "", + "creationDate": "2023-07-18T12:18:04.314Z", + "lastChangeDate": "2023-07-20T20:02:42.535Z", + "workbench": { + "deea006c-a223-4103-b46e-7b677428de9f": { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "File Picker", + "progress": 0.0, + "thumbnail": null, + "inputs": {}, + "inputsUnits": {}, + "inputNodes": [], + "outputs": {}, + "parent": null + }, + "3e700053-3d10-4089-90f7-f0865167c5b2": { + "key": "simcore/services/frontend/parameter/integer", + "version": "1.0.0", + "label": "Integer Parameter", + "thumbnail": null, + "runHash": null, + "inputs": {}, + "inputsUnits": {}, + "inputNodes": [], + "outputs": { + "out_1": 1 + }, + "parent": null + }, + "60d76eed-2e50-42e7-8c2d-580566b2f4c6": { + "key": "simcore/services/frontend/parameter/boolean", + "version": "1.0.0", + "label": "Boolean Parameter", + "thumbnail": null, + "runHash": null, + "inputs": {}, + "inputsUnits": {}, + "inputNodes": [], + "outputs": { + "out_1": true + }, + "parent": null + }, + "09e17caa-a538-40d4-9b8e-9a221b8367d3": { + "key": "simcore/services/frontend/iterator-consumer/probe/integer", + "version": "1.0.0", + "label": "Integer probe", + "thumbnail": null, + "inputs": { + "in_1": 0 + }, + "inputsUnits": {}, + "inputNodes": [], + "parent": null + } + }, + "prjOwner": "melinda11@example.org", + "accessRights": { + "3": { + "read": true, + "write": true, + "delete": true + } + }, + "tags": [], + "classifiers": [], + "state": { + "locked": { + "value": false, + "status": "CLOSED" + }, + "state": { + "value": "UNKNOWN" + } + }, + "ui": { + "workbench": { + "09e17caa-a538-40d4-9b8e-9a221b8367d3": { + "position": { + "x": 540, + "y": 240 } - ] - }, - "status_code": 200 + }, + "3e700053-3d10-4089-90f7-f0865167c5b2": { + "position": { + "x": 180, + "y": 280 + } + }, + "60d76eed-2e50-42e7-8c2d-580566b2f4c6": { + "position": { + "x": 180, + "y": 360 + } + }, + "deea006c-a223-4103-b46e-7b677428de9f": { + "position": { + "x": 160, + "y": 120 + } + } + }, + "slideshow": {}, + "currentNodeId": "25531b1a-2565-11ee-ab43-02420a000031", + "mode": "workbench" + }, + "quality": { + "enabled": true, + "tsr_target": { + "r01": { + "level": 4, + "references": "" + }, + "r02": { + "level": 4, + "references": "" + }, + "r03": { + "level": 4, + "references": "" + }, + "r04": { + "level": 4, + "references": "" + }, + "r05": { + "level": 4, + "references": "" + }, + "r06": { + "level": 4, + "references": "" + }, + "r07": { + "level": 4, + "references": "" + }, + "r08": { + "level": 4, + "references": "" + }, + "r09": { + "level": 4, + "references": "" + }, + "r10": { + "level": 4, + "references": "" + } + }, + "annotations": { + "vandv": "", + "limitations": "", + "certificationLink": "", + "certificationStatus": "Uncertified" + }, + "tsr_current": { + "r01": { + "level": 0, + "references": "" + }, + "r02": { + "level": 0, + "references": "" + }, + "r03": { + "level": 0, + "references": "" + }, + "r04": { + "level": 0, + "references": "" + }, + "r05": { + "level": 0, + "references": "" + }, + "r06": { + "level": 0, + "references": "" + }, + "r07": { + "level": 0, + "references": "" + }, + "r08": { + "level": 0, + "references": "" + }, + "r09": { + "level": 0, + "references": "" + }, + "r10": { + "level": 0, + "references": "" + } + } + }, + "dev": {} + } + ] }, - { - "name": "get_project", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/projects/25531b1a-2565-11ee-ab43-02420a000031", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "uuid": "25531b1a-2565-11ee-ab43-02420a000031", - "name": "Tracy Sullivan", - "description": "This is an interesting study", - "thumbnail": "", - "creationDate": "2023-07-18T12:18:04.314Z", - "lastChangeDate": "2023-07-20T20:04:05.607Z", - "workbench": { - "deea006c-a223-4103-b46e-7b677428de9f": { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - "label": "File Picker", - "progress": 0.0, - "thumbnail": null, - "inputs": {}, - "inputsUnits": {}, - "inputNodes": [], - "outputs": {}, - "parent": null - }, - "3e700053-3d10-4089-90f7-f0865167c5b2": { - "key": "simcore/services/frontend/parameter/integer", - "version": "1.0.0", - "label": "Integer Parameter", - "thumbnail": null, - "runHash": null, - "inputs": {}, - "inputsUnits": {}, - "inputNodes": [], - "outputs": { - "out_1": 1 - }, - "parent": null - }, - "60d76eed-2e50-42e7-8c2d-580566b2f4c6": { - "key": "simcore/services/frontend/parameter/boolean", - "version": "1.0.0", - "label": "Boolean Parameter", - "thumbnail": null, - "runHash": null, - "inputs": {}, - "inputsUnits": {}, - "inputNodes": [], - "outputs": { - "out_1": true - }, - "parent": null - }, - "09e17caa-a538-40d4-9b8e-9a221b8367d3": { - "key": "simcore/services/frontend/iterator-consumer/probe/integer", - "version": "1.0.0", - "label": "Integer probe", - "thumbnail": null, - "inputs": { - "in_1": 0 - }, - "inputsUnits": {}, - "inputNodes": [], - "parent": null - } - }, - "prjOwner": "rromero@example.org", - "accessRights": { - "3": { - "read": true, - "write": true, - "delete": true - } - }, - "tags": [], - "classifiers": [], - "state": { - "locked": { - "value": false, - "status": "CLOSED" - }, - "state": { - "value": "UNKNOWN" - } - }, - "ui": { - "workbench": { - "09e17caa-a538-40d4-9b8e-9a221b8367d3": { - "position": { - "x": 540, - "y": 240 - } - }, - "3e700053-3d10-4089-90f7-f0865167c5b2": { - "position": { - "x": 180, - "y": 280 - } - }, - "60d76eed-2e50-42e7-8c2d-580566b2f4c6": { - "position": { - "x": 180, - "y": 360 - } - }, - "deea006c-a223-4103-b46e-7b677428de9f": { - "position": { - "x": 160, - "y": 120 - } - } - }, - "slideshow": {}, - "currentNodeId": "25531b1a-2565-11ee-ab43-02420a000031", - "mode": "workbench" - }, - "quality": { - "enabled": true, - "tsr_target": { - "r01": { - "level": 4, - "references": "" - }, - "r02": { - "level": 4, - "references": "" - }, - "r03": { - "level": 4, - "references": "" - }, - "r04": { - "level": 4, - "references": "" - }, - "r05": { - "level": 4, - "references": "" - }, - "r06": { - "level": 4, - "references": "" - }, - "r07": { - "level": 4, - "references": "" - }, - "r08": { - "level": 4, - "references": "" - }, - "r09": { - "level": 4, - "references": "" - }, - "r10": { - "level": 4, - "references": "" - } - }, - "annotations": { - "vandv": "", - "limitations": "", - "certificationLink": "", - "certificationStatus": "Uncertified" - }, - "tsr_current": { - "r01": { - "level": 0, - "references": "" - }, - "r02": { - "level": 0, - "references": "" - }, - "r03": { - "level": 0, - "references": "" - }, - "r04": { - "level": 0, - "references": "" - }, - "r05": { - "level": 0, - "references": "" - }, - "r06": { - "level": 0, - "references": "" - }, - "r07": { - "level": 0, - "references": "" - }, - "r08": { - "level": 0, - "references": "" - }, - "r09": { - "level": 0, - "references": "" - }, - "r10": { - "level": 0, - "references": "" - } - } - }, - "dev": {} + "status_code": 200 + }, + { + "name": "get_project", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/projects/25531b1a-2565-11ee-ab43-02420a000031", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "uuid": "25531b1a-2565-11ee-ab43-02420a000031", + "name": "Richard Perez", + "description": "This is an interesting study", + "thumbnail": "", + "creationDate": "2023-07-18T12:18:04.314Z", + "lastChangeDate": "2023-07-20T20:04:05.607Z", + "workbench": { + "deea006c-a223-4103-b46e-7b677428de9f": { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "File Picker", + "progress": 0.0, + "thumbnail": null, + "inputs": {}, + "inputsUnits": {}, + "inputNodes": [], + "outputs": {}, + "parent": null + }, + "3e700053-3d10-4089-90f7-f0865167c5b2": { + "key": "simcore/services/frontend/parameter/integer", + "version": "1.0.0", + "label": "Integer Parameter", + "thumbnail": null, + "runHash": null, + "inputs": {}, + "inputsUnits": {}, + "inputNodes": [], + "outputs": { + "out_1": 1 + }, + "parent": null + }, + "60d76eed-2e50-42e7-8c2d-580566b2f4c6": { + "key": "simcore/services/frontend/parameter/boolean", + "version": "1.0.0", + "label": "Boolean Parameter", + "thumbnail": null, + "runHash": null, + "inputs": {}, + "inputsUnits": {}, + "inputNodes": [], + "outputs": { + "out_1": true + }, + "parent": null + }, + "09e17caa-a538-40d4-9b8e-9a221b8367d3": { + "key": "simcore/services/frontend/iterator-consumer/probe/integer", + "version": "1.0.0", + "label": "Integer probe", + "thumbnail": null, + "inputs": { + "in_1": 0 + }, + "inputsUnits": {}, + "inputNodes": [], + "parent": null + } + }, + "prjOwner": "mitchellamber@example.org", + "accessRights": { + "3": { + "read": true, + "write": true, + "delete": true + } + }, + "tags": [], + "classifiers": [], + "state": { + "locked": { + "value": false, + "status": "CLOSED" + }, + "state": { + "value": "UNKNOWN" + } + }, + "ui": { + "workbench": { + "09e17caa-a538-40d4-9b8e-9a221b8367d3": { + "position": { + "x": 540, + "y": 240 + } + }, + "3e700053-3d10-4089-90f7-f0865167c5b2": { + "position": { + "x": 180, + "y": 280 + } + }, + "60d76eed-2e50-42e7-8c2d-580566b2f4c6": { + "position": { + "x": 180, + "y": 360 + } + }, + "deea006c-a223-4103-b46e-7b677428de9f": { + "position": { + "x": 160, + "y": 120 + } } + }, + "slideshow": {}, + "currentNodeId": "25531b1a-2565-11ee-ab43-02420a000031", + "mode": "workbench" }, - "status_code": 200 - }, - { - "name": "get_invalid_project", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/projects/15531b1a-2565-11ee-ab43-02420a000031", - "query": null, - "request_payload": null, - "response_body": { - "data": null, - "error": { - "logs": [ - { - "message": "Project 15531b1a-2565-11ee-ab43-02420a000031 not found", - "level": "ERROR", - "logger": "user" - } - ], - "errors": [ - { - "code": "HTTPNotFound", - "message": "Project 15531b1a-2565-11ee-ab43-02420a000031 not found", - "resource": null, - "field": null - } - ], - "status": 404, - "message": "Project 15531b1a-2565-11ee-ab43-02420a000031 not found" + "quality": { + "enabled": true, + "tsr_target": { + "r01": { + "level": 4, + "references": "" + }, + "r02": { + "level": 4, + "references": "" + }, + "r03": { + "level": 4, + "references": "" + }, + "r04": { + "level": 4, + "references": "" + }, + "r05": { + "level": 4, + "references": "" + }, + "r06": { + "level": 4, + "references": "" + }, + "r07": { + "level": 4, + "references": "" + }, + "r08": { + "level": 4, + "references": "" + }, + "r09": { + "level": 4, + "references": "" + }, + "r10": { + "level": 4, + "references": "" + } + }, + "annotations": { + "vandv": "", + "limitations": "", + "certificationLink": "", + "certificationStatus": "Uncertified" + }, + "tsr_current": { + "r01": { + "level": 0, + "references": "" + }, + "r02": { + "level": 0, + "references": "" + }, + "r03": { + "level": 0, + "references": "" + }, + "r04": { + "level": 0, + "references": "" + }, + "r05": { + "level": 0, + "references": "" + }, + "r06": { + "level": 0, + "references": "" + }, + "r07": { + "level": 0, + "references": "" + }, + "r08": { + "level": 0, + "references": "" + }, + "r09": { + "level": 0, + "references": "" + }, + "r10": { + "level": 0, + "references": "" } + } }, - "status_code": 404 + "dev": {} + } }, - { - "name": "get_project_ports", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/projects/25531b1a-2565-11ee-ab43-02420a000031/metadata/ports", - "query": null, - "request_payload": null, - "response_body": { - "data": [ - { - "key": "3e700053-3d10-4089-90f7-f0865167c5b2", - "kind": "input", - "content_schema": { - "title": "Integer Parameter", - "type": "integer", - "description": "Parameter of type integer" - } - }, - { - "key": "60d76eed-2e50-42e7-8c2d-580566b2f4c6", - "kind": "input", - "content_schema": { - "title": "Boolean Parameter", - "type": "boolean", - "description": "Parameter of type boolean" - } - }, - { - "key": "09e17caa-a538-40d4-9b8e-9a221b8367d3", - "kind": "output", - "content_schema": { - "title": "Integer probe", - "type": "integer", - "description": "Captures integer values attached to it", - "default": 0 - } - } - ] - }, - "status_code": 200 + "status_code": 200 + }, + { + "name": "get_invalid_project", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/projects/15531b1a-2565-11ee-ab43-02420a000031", + "query": null, + "request_payload": null, + "response_body": { + "data": null, + "error": { + "logs": [ + { + "message": "Project 15531b1a-2565-11ee-ab43-02420a000031 not found", + "level": "ERROR", + "logger": "user" + } + ], + "errors": [ + { + "code": "HTTPNotFound", + "message": "Project 15531b1a-2565-11ee-ab43-02420a000031 not found", + "resource": null, + "field": null + } + ], + "status": 404, + "message": "Project 15531b1a-2565-11ee-ab43-02420a000031 not found" + } }, - { - "name": "get_invalid_project_ports", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/projects/15531b1a-2565-11ee-ab43-02420a000031/metadata/ports", - "query": null, - "request_payload": null, - "response_body": { - "data": null, - "error": { - "logs": [ - { - "message": "Project '15531b1a-2565-11ee-ab43-02420a000031' not found", - "level": "ERROR", - "logger": "user" - } - ], - "errors": [ - { - "code": "HTTPNotFound", - "message": "Project '15531b1a-2565-11ee-ab43-02420a000031' not found", - "resource": null, - "field": null - } - ], - "status": 404, - "message": "Project '15531b1a-2565-11ee-ab43-02420a000031' not found" - } + "status_code": 404 + }, + { + "name": "get_project_ports", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/projects/25531b1a-2565-11ee-ab43-02420a000031/metadata/ports", + "query": null, + "request_payload": null, + "response_body": { + "data": [ + { + "key": "3e700053-3d10-4089-90f7-f0865167c5b2", + "kind": "input", + "content_schema": { + "title": "Integer Parameter", + "type": "integer", + "description": "Parameter of type integer" + } }, - "status_code": 404 - } + { + "key": "60d76eed-2e50-42e7-8c2d-580566b2f4c6", + "kind": "input", + "content_schema": { + "title": "Boolean Parameter", + "type": "boolean", + "description": "Parameter of type boolean" + } + }, + { + "key": "09e17caa-a538-40d4-9b8e-9a221b8367d3", + "kind": "output", + "content_schema": { + "title": "Integer probe", + "type": "integer", + "description": "Captures integer values attached to it", + "default": 0 + } + } + ] + }, + "status_code": 200 + }, + { + "name": "get_invalid_project_ports", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/projects/15531b1a-2565-11ee-ab43-02420a000031/metadata/ports", + "query": null, + "request_payload": null, + "response_body": { + "data": null, + "error": { + "logs": [ + { + "message": "Project '15531b1a-2565-11ee-ab43-02420a000031' not found", + "level": "ERROR", + "logger": "user" + } + ], + "errors": [ + { + "code": "HTTPNotFound", + "message": "Project '15531b1a-2565-11ee-ab43-02420a000031' not found", + "resource": null, + "field": null + } + ], + "status": 404, + "message": "Project '15531b1a-2565-11ee-ab43-02420a000031' not found" + } + }, + "status_code": 404 + } ] diff --git a/services/api-server/tests/mocks/for_test_get_and_update_job_metadata.json b/services/api-server/tests/mocks/for_test_get_and_update_job_metadata.json index f5976c83fb9..f14044c398c 100644 --- a/services/api-server/tests/mocks/for_test_get_and_update_job_metadata.json +++ b/services/api-server/tests/mocks/for_test_get_and_update_job_metadata.json @@ -12,7 +12,7 @@ "first_name": "crespo", "last_name": "", "id": 1, - "login": "williamsmartin@example.net", + "login": "rturner@example.net", "role": "User", "groups": { "me": { @@ -80,22 +80,22 @@ "type": "computational", "authors": [ { - "name": "Erin Lee", - "email": "sandraramirez@example.org", - "affiliation": "Harris PLC" + "name": "Joshua Boone", + "email": "sharon60@example.net", + "affiliation": "Johnson Inc" }, { - "name": "Lynn Lopez", - "email": "nreid@example.com", - "affiliation": "Gonzalez, Whitney and Lynch" + "name": "Kenneth Alvarez", + "email": "ncollins@example.com", + "affiliation": "Singh LLC" }, { - "name": "Riley Ray", - "email": "christina74@example.com", - "affiliation": "Davenport Inc" + "name": "Jennifer Howard", + "email": "amyhood@example.org", + "affiliation": "Campos-Weaver" } ], - "contact": "jared54@example.com", + "contact": "sharon91@example.com", "inputs": { "input_1": { "displayOrder": 1.0, @@ -312,7 +312,7 @@ } } }, - "prjOwner": "hguerrero@example.com", + "prjOwner": "brownlisa@example.com", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/mocks/get_credits_price.json b/services/api-server/tests/mocks/get_credits_price.json new file mode 100644 index 00000000000..220952437e8 --- /dev/null +++ b/services/api-server/tests/mocks/get_credits_price.json @@ -0,0 +1,19 @@ +[ + { + "name": "GET /credits-price", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/credits-price", + "path_parameters": [] + }, + "response_body": { + "data": { + "productName": "osparc", + "usdPerCredit": null, + "minPaymentAmountUsd": null + } + } + } +] diff --git a/services/api-server/tests/mocks/get_default_wallet.json b/services/api-server/tests/mocks/get_default_wallet.json index 502385e4c29..8fcddffb1be 100644 --- a/services/api-server/tests/mocks/get_default_wallet.json +++ b/services/api-server/tests/mocks/get_default_wallet.json @@ -13,7 +13,7 @@ "response_body": { "data": { "walletId": 1, - "name": "Randall Daniels", + "name": "Mike Medina", "description": "Credits purchased by Bisgaard end up in here", "owner": 3, "thumbnail": null, diff --git a/services/api-server/tests/mocks/get_job_pricing_unit_invalid_solver.json b/services/api-server/tests/mocks/get_job_pricing_unit_invalid_solver.json index b355c1c73a6..43071193260 100644 --- a/services/api-server/tests/mocks/get_job_pricing_unit_invalid_solver.json +++ b/services/api-server/tests/mocks/get_job_pricing_unit_invalid_solver.json @@ -64,7 +64,7 @@ } } }, - "prjOwner": "dkramer@example.org", + "prjOwner": "ptrevino@example.net", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/mocks/get_job_pricing_unit_success.json b/services/api-server/tests/mocks/get_job_pricing_unit_success.json index 368666913c5..f1e166bebae 100644 --- a/services/api-server/tests/mocks/get_job_pricing_unit_success.json +++ b/services/api-server/tests/mocks/get_job_pricing_unit_success.json @@ -64,7 +64,7 @@ } } }, - "prjOwner": "josebruce@example.net", + "prjOwner": "imitchell@example.net", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/mocks/get_solver_outputs.json b/services/api-server/tests/mocks/get_solver_outputs.json index e5d7f9ba7d7..2e9f02a235c 100644 --- a/services/api-server/tests/mocks/get_solver_outputs.json +++ b/services/api-server/tests/mocks/get_solver_outputs.json @@ -72,7 +72,7 @@ } } }, - "prjOwner": "ypatton@example.com", + "prjOwner": "greenrichard@example.org", "accessRights": { "3": { "read": true, @@ -162,7 +162,7 @@ "response_body": { "data": { "walletId": 1, - "name": "Hannah Johnson", + "name": "Derek Nguyen", "description": "Credits purchased by Bisgaard end up in here", "owner": 3, "thumbnail": null, @@ -205,7 +205,7 @@ "response_body": { "data": { "walletId": 1, - "name": "Kimberly Hoffman", + "name": "Eric Hunter", "description": "Credits purchased by Bisgaard end up in here", "owner": 3, "thumbnail": null, diff --git a/services/api-server/tests/mocks/get_study_job_logs.json b/services/api-server/tests/mocks/get_study_job_logs.json new file mode 100644 index 00000000000..85f07b476c9 --- /dev/null +++ b/services/api-server/tests/mocks/get_study_job_logs.json @@ -0,0 +1,31 @@ +[ + { + "name": "GET /v2/computations/1a4145e2-2fca-11ef-a199-0242ac14002a/tasks/-/logfile", + "description": "", + "method": "GET", + "host": "director-v2", + "path": { + "path": "/v2/computations/{project_id}/tasks/-/logfile", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "query": "user_id=1", + "response_body": [ + { + "task_id": "2da40b78-d529-5657-95c5-ab663cbd890d", + "download_link": "http://www.jensen-boyle.com/" + } + ] + } +] diff --git a/services/api-server/tests/mocks/on_create_job.json b/services/api-server/tests/mocks/on_create_job.json index 85b1e744c22..9820285afad 100644 --- a/services/api-server/tests/mocks/on_create_job.json +++ b/services/api-server/tests/mocks/on_create_job.json @@ -20,17 +20,17 @@ "type": "computational", "authors": [ { - "name": "Francisco Higgins", - "email": "linda35@example.net", - "affiliation": "Grant Ltd" + "name": "Kimberly Wilson", + "email": "christopher72@example.org", + "affiliation": "Ford, Collins and Villarreal" }, { - "name": "Stacey Lee", - "email": "michael80@example.net", - "affiliation": "Blevins-Cantrell" + "name": "Daniel Jones", + "email": "fschmitt@example.net", + "affiliation": "Ayala-Anderson" } ], - "contact": "luis44@example.org", + "contact": "jlozano@example.net", "inputs": { "input_1": { "displayOrder": 1.0, @@ -119,7 +119,7 @@ "bootOptions": null } }, - "prjOwner": "ginamiller@example.org", + "prjOwner": "robertsmith@example.org", "accessRights": {}, "tags": [], "classifiers": [], @@ -248,7 +248,7 @@ } } }, - "prjOwner": "sbarnes@example.com", + "prjOwner": "rhondakelly@example.net", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/mocks/on_list_jobs.json b/services/api-server/tests/mocks/on_list_jobs.json index 3c629daed77..48eadf3612b 100644 --- a/services/api-server/tests/mocks/on_list_jobs.json +++ b/services/api-server/tests/mocks/on_list_jobs.json @@ -20,12 +20,12 @@ "type": "computational", "authors": [ { - "name": "William Long", - "email": "brownkaren@example.net", - "affiliation": "Harris-Wright" + "name": "Julia Lewis", + "email": "ecantrell@example.org", + "affiliation": "Eaton LLC" } ], - "contact": "grayrachel@example.com", + "contact": "moorezachary@example.com", "inputs": { "input_1": { "displayOrder": 1.0, @@ -129,7 +129,7 @@ } } }, - "prjOwner": "mark71@example.org", + "prjOwner": "madison17@example.org", "accessRights": { "3": { "read": true, @@ -199,7 +199,7 @@ } } }, - "prjOwner": "heather83@example.org", + "prjOwner": "jacksonnicole@example.net", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/mocks/run_study_workflow.json b/services/api-server/tests/mocks/run_study_workflow.json index 4c1ca7ac590..7c77a69ade0 100644 --- a/services/api-server/tests/mocks/run_study_workflow.json +++ b/services/api-server/tests/mocks/run_study_workflow.json @@ -496,7 +496,7 @@ ] } }, - "prjOwner": "thomashall@example.org", + "prjOwner": "harpercynthia@example.com", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/mocks/start_job_not_enough_credit.json b/services/api-server/tests/mocks/start_job_not_enough_credit.json index 04fbf86075b..6f433ebeee4 100644 --- a/services/api-server/tests/mocks/start_job_not_enough_credit.json +++ b/services/api-server/tests/mocks/start_job_not_enough_credit.json @@ -64,7 +64,7 @@ } } }, - "prjOwner": "allisonsoto@example.net", + "prjOwner": "austin66@example.org", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/mocks/start_job_with_payment.json b/services/api-server/tests/mocks/start_job_with_payment.json index fb07f8cf1df..8e52a63a964 100644 --- a/services/api-server/tests/mocks/start_job_with_payment.json +++ b/services/api-server/tests/mocks/start_job_with_payment.json @@ -64,7 +64,7 @@ } } }, - "prjOwner": "wademiguel@example.com", + "prjOwner": "freemanryan@example.net", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json b/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json index 1f6eb91a6d8..05c81c5cfc5 100644 --- a/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json +++ b/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json @@ -219,7 +219,7 @@ "inputNodes": [] } }, - "prjOwner": "krobinson@example.com", + "prjOwner": "joshualam@example.com", "accessRights": { "3": { "read": true, diff --git a/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py b/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py index 7196e808d5c..1287926327a 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py +++ b/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py @@ -26,7 +26,7 @@ ) from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.schemas.jobs import Job, JobOutputs -from simcore_service_api_server.models.schemas.studies import Study, StudyID +from simcore_service_api_server.models.schemas.studies import JobLogsMap, Study, StudyID _faker = Faker() @@ -333,3 +333,29 @@ async def test_get_study_job_outputs( assert str(job_outputs.job_id) == job_id assert job_outputs.results == {} + + +async def test_get_job_logs( + client: httpx.AsyncClient, + mocked_webserver_service_api_base, + mocked_directorv2_service_api_base, + create_respx_mock_from_capture: CreateRespxMockCallback, + auth: httpx.BasicAuth, + project_tests_dir: Path, +): + _study_id = "7171cbf8-2fc9-11ef-95d3-0242ac140018" + _job_id = "1a4145e2-2fca-11ef-a199-0242ac14002a" + + create_respx_mock_from_capture( + respx_mocks=[ + mocked_directorv2_service_api_base, + ], + capture_path=project_tests_dir / "mocks" / "get_study_job_logs.json", + side_effects_callbacks=[], + ) + + response = await client.get( + f"{API_VTAG}/studies/{_study_id}/jobs/{_job_id}/outputs/log-links", auth=auth + ) + assert response.status_code == status.HTTP_200_OK + _ = JobLogsMap.parse_obj(response.json()) diff --git a/services/api-server/tests/unit/test_credits.py b/services/api-server/tests/unit/test_credits.py new file mode 100644 index 00000000000..8c2dfd7dd74 --- /dev/null +++ b/services/api-server/tests/unit/test_credits.py @@ -0,0 +1,26 @@ +from pathlib import Path + +from fastapi import status +from httpx import AsyncClient, BasicAuth +from models_library.api_schemas_webserver.product import GetCreditPrice +from pytest_simcore.helpers.httpx_calls_capture_models import CreateRespxMockCallback +from simcore_service_api_server._meta import API_VTAG + + +async def test_get_credits_price( + client: AsyncClient, + auth: BasicAuth, + mocked_webserver_service_api_base, + create_respx_mock_from_capture: CreateRespxMockCallback, + project_tests_dir: Path, +): + + create_respx_mock_from_capture( + respx_mocks=[mocked_webserver_service_api_base], + capture_path=project_tests_dir / "mocks" / "get_credits_price.json", + side_effects_callbacks=[], + ) + + response = await client.get(f"{API_VTAG}/credits/price", auth=auth) + assert response.status_code == status.HTTP_200_OK + _ = GetCreditPrice.parse_obj(response.json()) From ad25d00d8c12c91849932ba0c1920c616d36f182 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 25 Jun 2024 13:55:11 +0200 Subject: [PATCH 061/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Maintenance:=20`py?= =?UTF-8?q?test-simcore`=20initial=20cleanup=20(#5986)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/aws-library/tests/conftest.py | 4 +- packages/aws-library/tests/test_s3_client.py | 2 +- .../tests/conftest.py | 2 +- .../tests/test_utils_string_substitution.py | 2 +- .../notifications-library/tests/conftest.py | 7 +- .../tests/email/conftest.py | 2 +- packages/postgres-database/tests/conftest.py | 2 +- .../tests/products/conftest.py | 2 +- .../tests/projects/conftest.py | 2 +- .../tests/test_classifiers.py | 2 +- .../postgres-database/tests/test_clusters.py | 2 +- .../tests/test_delete_projects_and_users.py | 2 +- .../postgres-database/tests/test_groups.py | 2 +- .../tests/test_models_api_keys.py | 2 +- .../tests/test_models_payments_methods.py | 2 +- .../test_models_payments_transactions.py | 2 +- .../tests/test_models_products_prices.py | 2 +- .../tests/test_services_consume_filetypes.py | 2 +- .../tests/test_uniqueness_in_comp_tasks.py | 2 +- .../postgres-database/tests/test_users.py | 2 +- .../tests/test_users_details.py | 2 +- .../tests/test_utils_payments_autorecharge.py | 2 +- .../tests/test_utils_services.py | 2 +- .../tests/test_utils_tags.py | 2 +- .../tests/test_utils_user_preferences.py | 2 +- .../tests/test_utils_users.py | 2 +- .../src/pytest_simcore/__init__.py | 8 +- .../src/pytest_simcore/aioresponses_mocker.py | 2 +- .../src/pytest_simcore/aws_server.py | 4 +- .../src/pytest_simcore/cli_runner.py | 2 +- .../{container_pause.py => docker.py} | 0 .../src/pytest_simcore/docker_compose.py | 17 +- .../src/pytest_simcore/docker_registry.py | 4 +- .../src/pytest_simcore/docker_swarm.py | 10 +- .../src/pytest_simcore/environment_configs.py | 2 +- .../src/pytest_simcore/faker_payments_data.py | 2 +- .../src/pytest_simcore/faker_products_data.py | 2 +- .../src/pytest_simcore/faker_users_data.py | 2 +- .../{utils_assert.py => assert_checks.py} | 0 ...ils_environs.py => deprecated_environs.py} | 0 .../helpers/{utils_dict.py => dict_tools.py} | 7 +- .../helpers/{utils_docker.py => docker.py} | 0 ..._docker_registry.py => docker_registry.py} | 0 .../{rawdata_fakers.py => faker_factories.py} | 0 .../helpers/{utils_host.py => host.py} | 0 .../{logging_utils.py => helpers/logging.py} | 0 .../{utils_envs.py => monkeypatch_envs.py} | 0 ...arametrizations.py => parametrizations.py} | 0 .../playwright.py} | 2 +- .../{utils_tags.py => postgres_tags.py} | 0 .../{utils_postgres.py => postgres_tools.py} | 0 ...unch_citations.py => scrunch_citations.py} | 0 ...ils_public_api.py => typing_public_api.py} | 0 .../helpers/utils_rate_limit.py | 186 ------------------ ...server.py => webserver_fake_ports_data.py} | 0 ...ces.py => webserver_fake_services_data.py} | 0 .../{utils_login.py => webserver_login.py} | 4 +- ...th_db.py => webserver_parametrizations.py} | 0 ...tils_projects.py => webserver_projects.py} | 2 +- .../{utils_tokens.py => webserver_tokens.py} | 0 .../src/pytest_simcore/httpbin_service.py | 2 +- .../src/pytest_simcore/httpx_calls_capture.py | 6 +- .../src/pytest_simcore/minio_service.py | 6 +- .../src/pytest_simcore/monkeypatch_extra.py | 25 --- .../src/pytest_simcore/postgres_service.py | 8 +- .../src/pytest_simcore/rabbit_service.py | 4 +- .../src/pytest_simcore/redis_service.py | 4 +- .../pytest_simcore/simcore_dask_service.py | 4 +- .../src/pytest_simcore/simcore_services.py | 30 ++- .../pytest_simcore/simcore_storage_service.py | 4 +- .../simcore_webserver_service.py | 2 +- .../{pytest_socketio.py => socketio.py} | 0 ...websocket_client.py => socketio_client.py} | 2 +- .../src/pytest_simcore/tmp_path_extra.py | 25 --- .../src/pytest_simcore/traefik_service.py | 2 +- .../tests/test_helpers_utils_dict.py | 2 +- .../tests/test_helpers_utils_envs.py | 2 +- .../service-integration/tests/conftest.py | 2 +- .../aiohttp/long_running_tasks/conftest.py | 2 +- .../test_long_running_tasks.py | 2 +- .../test_long_running_tasks_client.py | 2 +- ...st_long_running_tasks_with_task_context.py | 2 +- packages/service-library/tests/conftest.py | 3 +- .../tests/fastapi/test_rabbitmq.py | 2 +- .../src/settings_library/comp_services.py | 4 +- .../src/settings_library/docker_registry.py | 2 +- .../src/settings_library/email.py | 4 +- .../src/settings_library/node_ports.py | 4 +- .../tests/test_application.py | 2 +- packages/settings-library/tests/test_base.py | 2 +- .../tests/test_base_w_postgres.py | 2 +- packages/settings-library/tests/test_email.py | 2 +- .../tests/test_node_ports_settings.py | 2 +- .../settings-library/tests/test_twilio.py | 2 +- .../settings-library/tests/test_utils_cli.py | 2 +- packages/simcore-sdk/tests/conftest.py | 3 +- .../simcore-sdk/tests/integration/conftest.py | 2 +- .../test_node_ports_common_filemanager.py | 2 +- .../test_node_ports_common_storage_client.py | 2 +- .../tests/unit/test_storage_client.py | 2 +- services/api-server/tests/conftest.py | 3 +- .../tests/unit/_with_db/conftest.py | 4 +- .../_with_db/test_core_settings__with_db.py | 2 +- .../tests/unit/api_studies/conftest.py | 2 +- services/api-server/tests/unit/conftest.py | 4 +- .../tests/unit/test_core_settings.py | 2 +- .../tests/unit/test_services_rabbitmq.py | 2 +- services/autoscaling/tests/unit/conftest.py | 5 +- .../autoscaling/tests/unit/test_api_health.py | 2 +- .../tests/unit/test_core_settings.py | 2 +- services/autoscaling/tests/unit/test_main.py | 2 +- ...test_modules_auto_scaling_computational.py | 2 +- .../unit/test_modules_auto_scaling_dynamic.py | 2 +- .../unit/test_modules_auto_scaling_task.py | 2 +- .../tests/unit/test_modules_dask.py | 2 +- .../unit/test_utils_auto_scaling_core.py | 2 +- .../tests/unit/test_utils_docker.py | 2 +- services/catalog/tests/unit/conftest.py | 3 +- .../tests/unit/test_services_director.py | 2 +- .../catalog/tests/unit/with_dbs/conftest.py | 6 +- .../clusters-keeper/tests/unit/conftest.py | 5 +- .../tests/unit/test_api_health.py | 2 +- .../tests/unit/test_core_settings.py | 2 +- .../clusters-keeper/tests/unit/test_main.py | 2 +- .../tests/unit/test_modules_clusters.py | 2 +- .../test_modules_clusters_management_core.py | 2 +- .../test_modules_clusters_management_task.py | 2 +- .../tests/unit/test_modules_remote_debug.py | 2 +- .../tests/unit/test_utils_clusters.py | 2 +- .../tests/unit/test_utils_ec2.py | 2 +- services/dask-sidecar/tests/unit/conftest.py | 3 +- services/director-v2/tests/conftest.py | 8 +- .../integration/01/test_computation_api.py | 4 +- .../02/test_dynamic_services_routes.py | 4 +- ...t_dynamic_sidecar_nodeports_integration.py | 4 +- ...ixed_dynamic_sidecar_and_legacy_project.py | 4 +- .../director-v2/tests/integration/02/utils.py | 2 +- ...ic_sidecar_docker_service_specs_sidecar.py | 2 +- .../test_modules_dynamic_sidecar_observer.py | 2 +- ...s_dynamic_sidecar_scheduler_core_events.py | 2 +- ..._modules_dynamic_sidecar_scheduler_task.py | 2 +- .../tests/unit/test_modules_notifier.py | 2 +- .../test_api_route_computations_tasks.py | 2 +- ...db_repositories_groups_extra_properties.py | 2 +- .../test_modules_db_repositories_projects.py | 2 +- ...test_modules_dynamic_sidecar_docker_api.py | 2 +- ...es_dynamic_sidecar_docker_service_specs.py | 2 +- services/director/tests/conftest.py | 1 - services/dynamic-scheduler/tests/conftest.py | 3 +- .../dynamic-scheduler/tests/unit/test_cli.py | 2 +- services/dynamic-sidecar/tests/conftest.py | 5 +- .../tests/integration/conftest.py | 2 +- .../test_modules_long_running_tasks.py | 6 +- .../test_modules_user_services_preferences.py | 4 +- .../dynamic-sidecar/tests/unit/conftest.py | 2 +- .../tests/unit/test__oas_spec.py | 2 +- .../tests/unit/test_api_containers.py | 2 +- .../test_api_containers_long_running_tasks.py | 2 +- .../tests/unit/test_api_prometheus_metrics.py | 2 +- .../unit/test_api_workflow_service_metrics.py | 2 +- .../unit/test_core_external_dependencies.py | 2 +- .../tests/unit/test_core_reserved_space.py | 2 +- .../tests/unit/test_core_settings.py | 2 +- .../unit/test_modules_outputs_manager.py | 2 +- .../test_modules_system_monitor__notifier.py | 2 +- services/efs-guardian/tests/unit/conftest.py | 3 +- .../tests/unit/test_api_health.py | 2 +- .../tests/unit/test_core_settings.py | 2 +- .../tests/unit/test_efs_manager.py | 2 +- services/efs-guardian/tests/unit/test_main.py | 2 +- services/invitations/tests/unit/conftest.py | 2 +- services/invitations/tests/unit/test_cli.py | 2 +- .../tests/unit/test_core_settings.py | 2 +- services/migration/tests/conftest.py | 1 - .../tests/integration/conftest.py | 2 +- .../tests/integration/test_clusters.py | 2 +- .../tests/integration/test_dask_sidecar.py | 2 +- .../tests/integration/test_gateway.py | 2 +- .../tests/system/test_deploy.py | 2 +- services/payments/tests/conftest.py | 5 +- .../api/test__one_time_payment_workflows.py | 2 +- .../api/test__payment_method_workflows.py | 2 +- .../unit/api/test_rest_acknowledgements.py | 2 +- services/payments/tests/unit/conftest.py | 2 +- services/payments/tests/unit/test_cli.py | 2 +- .../unit/test_db_payments_methods_repo.py | 2 +- .../test_db_payments_transactions_repo.py | 2 +- .../tests/unit/test_db_payments_users_repo.py | 4 +- .../payments/tests/unit/test_rpc_payments.py | 2 +- .../tests/unit/test_rpc_payments_methods.py | 2 +- .../test_services_auto_recharge_listener.py | 4 +- .../tests/unit/test_services_notifier.py | 4 +- .../unit/test_services_notifier_email.py | 2 +- .../tests/unit/test_services_payments.py | 2 +- .../test_services_payments__get_invoice.py | 4 +- .../unit/test_services_payments_gateway.py | 2 +- .../test_services_resource_usage_tracker.py | 2 +- .../tests/unit/test_services_stripe.py | 2 +- .../tests/unit/conftest.py | 3 +- .../tests/unit/test_web_main.py | 2 +- .../tests/unit/with_dbs/conftest.py | 2 +- services/storage/tests/conftest.py | 3 +- .../storage/tests/fixtures/data_models.py | 2 +- services/storage/tests/unit/conftest.py | 2 +- .../storage/tests/unit/test__openapi_specs.py | 2 +- .../storage/tests/unit/test_dsm_dsmcleaner.py | 2 +- .../tests/unit/test_handlers_datasets.py | 4 +- .../storage/tests/unit/test_handlers_files.py | 4 +- .../unit/test_handlers_files_metadata.py | 2 +- .../tests/unit/test_handlers_health.py | 2 +- .../tests/unit/test_handlers_locations.py | 2 +- .../tests/unit/test_handlers_simcore_s3.py | 2 +- services/storage/tests/unit/test_s3_client.py | 2 +- services/storage/tests/unit/test_s3_utils.py | 2 +- services/web/server/tests/conftest.py | 11 +- .../01/test_exporter_requests_handlers.py | 4 +- .../integration/01/test_garbage_collection.py | 4 +- .../server/tests/integration/02/conftest.py | 2 +- .../notifications/test_rabbitmq_consumers.py | 2 +- .../02/scicrunch/test_scicrunch__resolver.py | 2 +- .../02/scicrunch/test_scicrunch__rest.py | 5 +- .../tests/integration/02/test_computation.py | 2 +- .../web/server/tests/integration/conftest.py | 6 +- services/web/server/tests/unit/conftest.py | 4 +- .../server/tests/unit/isolated/conftest.py | 4 +- .../tests/unit/isolated/test__configs.py | 2 +- .../tests/unit/isolated/test_activity.py | 2 +- .../isolated/test_application_settings.py | 5 +- .../test_application_settings_utils.py | 2 +- .../isolated/test_diagnostics_healthcheck.py | 2 +- .../unit/isolated/test_login_settings.py | 2 +- .../server/tests/unit/isolated/test_rest.py | 2 +- .../isolated/test_studies_dispatcher_core.py | 2 +- ..._studies_dispatcher_projects_permalinks.py | 2 +- .../test_studies_dispatcher_settings.py | 2 +- .../01/clusters/test_clusters_handlers.py | 4 +- ...fications__db_comp_tasks_listening_task.py | 2 +- .../01/studies_dispatcher/conftest.py | 2 +- .../test_studies_dispatcher_handlers.py | 4 +- .../test_studies_dispatcher_projects.py | 6 +- .../test_studies_dispatcher_studies_access.py | 8 +- .../tests/unit/with_dbs/01/test_api_keys.py | 4 +- .../unit/with_dbs/01/test_api_keys_rpc.py | 4 +- .../01/test_catalog_handlers__pricing_plan.py | 4 +- .../01/test_catalog_handlers__services.py | 6 +- ...st_catalog_handlers__services_resources.py | 4 +- .../with_dbs/01/test_director_v2_handlers.py | 6 +- .../tests/unit/with_dbs/01/test_groups.py | 6 +- .../01/test_groups_handlers_classifers.py | 2 +- .../with_dbs/01/test_long_running_tasks.py | 4 +- .../01/test_resource_manager_user_sessions.py | 2 +- .../tests/unit/with_dbs/01/test_storage.py | 2 +- .../server/tests/unit/with_dbs/02/conftest.py | 8 +- .../unit/with_dbs/02/test_announcements.py | 4 +- .../02/test_projects_cancellations.py | 6 +- .../02/test_projects_comments_handlers.py | 4 +- .../02/test_projects_crud_handlers.py | 6 +- .../02/test_projects_crud_handlers__clone.py | 4 +- .../02/test_projects_crud_handlers__delete.py | 6 +- .../02/test_projects_crud_handlers__list.py | 4 +- ...s_crud_handlers__list_with_query_params.py | 4 +- .../02/test_projects_crud_handlers__patch.py | 4 +- .../02/test_projects_metadata_handlers.py | 6 +- .../02/test_projects_nodes_handler.py | 6 +- .../02/test_projects_nodes_handlers__patch.py | 4 +- ...rojects_nodes_handlers__services_access.py | 2 +- ...st_projects_nodes_pricing_unit_handlers.py | 4 +- .../02/test_projects_ports_handlers.py | 8 +- .../02/test_projects_states_handlers.py | 8 +- .../02/test_projects_wallet_handlers.py | 4 +- .../server/tests/unit/with_dbs/03/conftest.py | 2 +- .../test_resource_manager.py | 10 +- .../unit/with_dbs/03/invitations/conftest.py | 2 +- .../03/invitations/test_invitations.py | 4 +- ...login_handlers_registration_invitations.py | 2 +- .../test_products__invitations_handlers.py | 6 +- .../tests/unit/with_dbs/03/login/conftest.py | 4 +- .../unit/with_dbs/03/login/test_login_2fa.py | 6 +- .../03/login/test_login_2fa_resend.py | 6 +- .../unit/with_dbs/03/login/test_login_auth.py | 4 +- .../03/login/test_login_change_email.py | 4 +- .../03/login/test_login_change_password.py | 4 +- .../with_dbs/03/login/test_login_logout.py | 4 +- .../03/login/test_login_registration.py | 6 +- .../login/test_login_registration_handlers.py | 4 +- .../03/login/test_login_reset_password.py | 4 +- .../test_meta_modeling_iterations.py | 6 +- .../unit/with_dbs/03/products/conftest.py | 2 +- .../03/products/test_products_handlers.py | 4 +- .../with_dbs/03/products/test_products_rpc.py | 4 +- .../with_dbs/03/resource_usage/conftest.py | 2 +- .../test_admin_pricing_plans.py | 4 +- .../03/resource_usage/test_pricing_plans.py | 4 +- .../test_usage_services__export.py | 2 +- .../test_usage_services__list.py | 4 +- .../tests/unit/with_dbs/03/tags/conftest.py | 2 +- .../tests/unit/with_dbs/03/tags/test_tags.py | 8 +- .../unit/with_dbs/03/test__openapi_specs.py | 2 +- .../tests/unit/with_dbs/03/test_email.py | 6 +- .../tests/unit/with_dbs/03/test_project_db.py | 6 +- .../tests/unit/with_dbs/03/test_session.py | 4 +- .../tests/unit/with_dbs/03/test_socketio.py | 6 +- .../unit/with_dbs/03/test_storage_handlers.py | 4 +- .../tests/unit/with_dbs/03/test_users.py | 8 +- .../with_dbs/03/test_users__notifications.py | 15 +- .../03/test_users__preferences_api.py | 4 +- .../03/test_users__preferences_handlers.py | 6 +- .../03/test_users__preferences_models.py | 2 +- .../unit/with_dbs/03/test_users__tokens.py | 8 +- .../tests/unit/with_dbs/03/test_users_api.py | 6 +- .../with_dbs/03/version_control/conftest.py | 9 +- .../test_version_control_handlers.py | 2 +- .../unit/with_dbs/03/wallets/conftest.py | 4 +- .../with_dbs/03/wallets/payments/conftest.py | 4 +- .../03/wallets/payments/test_payments.py | 4 +- .../wallets/payments/test_payments_methods.py | 2 +- .../03/wallets/payments/test_payments_rpc.py | 4 +- .../unit/with_dbs/03/wallets/test_wallets.py | 4 +- .../03/wallets/test_wallets_groups.py | 4 +- .../server/tests/unit/with_dbs/conftest.py | 8 +- tests/e2e-playwright/tests/conftest.py | 4 +- .../tests/jupyterlabs/test_jupyterlab.py | 4 +- .../test_resource_usage_tracker.py | 2 +- .../tests/sim4life/test_sim4life.py | 4 +- .../tests/sleepers/test_sleepers.py | 4 +- tests/e2e-playwright/tests/tip/conftest.py | 4 +- .../e2e-playwright/tests/tip/test_ti_plan.py | 4 +- tests/public-api/conftest.py | 14 +- tests/public-api/test_solvers_api.py | 2 +- tests/public-api/test_solvers_jobs_api.py | 2 +- tests/public-api/test_users_api.py | 2 +- tests/swarm-deploy/conftest.py | 3 +- 332 files changed, 533 insertions(+), 765 deletions(-) rename packages/pytest-simcore/src/pytest_simcore/{container_pause.py => docker.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_assert.py => assert_checks.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_environs.py => deprecated_environs.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_dict.py => dict_tools.py} (89%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_docker.py => docker.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_docker_registry.py => docker_registry.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{rawdata_fakers.py => faker_factories.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_host.py => host.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/{logging_utils.py => helpers/logging.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_envs.py => monkeypatch_envs.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_parametrizations.py => parametrizations.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/{playwright_utils.py => helpers/playwright.py} (99%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_tags.py => postgres_tags.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_postgres.py => postgres_tools.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_scrunch_citations.py => scrunch_citations.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_public_api.py => typing_public_api.py} (100%) delete mode 100644 packages/pytest-simcore/src/pytest_simcore/helpers/utils_rate_limit.py rename packages/pytest-simcore/src/pytest_simcore/helpers/{faker_webserver.py => webserver_fake_ports_data.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_services.py => webserver_fake_services_data.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_login.py => webserver_login.py} (98%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_webserver_unit_with_db.py => webserver_parametrizations.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_projects.py => webserver_projects.py} (99%) rename packages/pytest-simcore/src/pytest_simcore/helpers/{utils_tokens.py => webserver_tokens.py} (100%) delete mode 100644 packages/pytest-simcore/src/pytest_simcore/monkeypatch_extra.py rename packages/pytest-simcore/src/pytest_simcore/{pytest_socketio.py => socketio.py} (100%) rename packages/pytest-simcore/src/pytest_simcore/{websocket_client.py => socketio_client.py} (97%) delete mode 100644 packages/pytest-simcore/src/pytest_simcore/tmp_path_extra.py diff --git a/packages/aws-library/tests/conftest.py b/packages/aws-library/tests/conftest.py index e3aca9457e9..af911581920 100644 --- a/packages/aws-library/tests/conftest.py +++ b/packages/aws-library/tests/conftest.py @@ -7,13 +7,13 @@ import pytest pytest_plugins = [ - "pytest_simcore.aws_server", "pytest_simcore.aws_ec2_service", "pytest_simcore.aws_s3_service", + "pytest_simcore.aws_server", "pytest_simcore.environment_configs", - "pytest_simcore.repository_paths", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", + "pytest_simcore.repository_paths", ] diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index a6ee8c3033f..92fc77cbfef 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -17,7 +17,7 @@ from models_library.api_schemas_storage import S3BucketName from moto.server import ThreadedMotoServer from pydantic import AnyUrl -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from settings_library.s3 import S3Settings from types_aiobotocore_s3 import S3Client diff --git a/packages/dask-task-models-library/tests/conftest.py b/packages/dask-task-models-library/tests/conftest.py index b98e60ad64d..e551898ea95 100644 --- a/packages/dask-task-models-library/tests/conftest.py +++ b/packages/dask-task-models-library/tests/conftest.py @@ -7,9 +7,9 @@ import pytest pytest_plugins = [ - "pytest_simcore.repository_paths", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", + "pytest_simcore.repository_paths", ] diff --git a/packages/models-library/tests/test_utils_string_substitution.py b/packages/models-library/tests/test_utils_string_substitution.py index 59aade68d2b..cfc627ae819 100644 --- a/packages/models-library/tests/test_utils_string_substitution.py +++ b/packages/models-library/tests/test_utils_string_substitution.py @@ -15,7 +15,7 @@ substitute_all_legacy_identifiers, upgrade_identifier, ) -from pytest_simcore.helpers.utils_envs import load_dotenv +from pytest_simcore.helpers.monkeypatch_envs import load_dotenv @pytest.mark.parametrize( diff --git a/packages/notifications-library/tests/conftest.py b/packages/notifications-library/tests/conftest.py index 0be979be507..4570801119a 100644 --- a/packages/notifications-library/tests/conftest.py +++ b/packages/notifications-library/tests/conftest.py @@ -17,15 +17,14 @@ from simcore_postgres_database.models.products import Vendor pytest_plugins = [ + "pytest_simcore.docker_compose", + "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", - "pytest_simcore.repository_paths", "pytest_simcore.faker_payments_data", "pytest_simcore.faker_products_data", "pytest_simcore.faker_users_data", - "pytest_simcore.docker_compose", "pytest_simcore.postgres_service", - "pytest_simcore.docker_swarm", - "pytest_simcore.tmp_path_extra", + "pytest_simcore.repository_paths", ] diff --git a/packages/notifications-library/tests/email/conftest.py b/packages/notifications-library/tests/email/conftest.py index cd05fc6eddd..f0f47f0e3d2 100644 --- a/packages/notifications-library/tests/email/conftest.py +++ b/packages/notifications-library/tests/email/conftest.py @@ -3,8 +3,8 @@ import pytest from pydantic import EmailStr from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict @pytest.fixture diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py index 65e36b91406..72b4ed4cf11 100644 --- a/packages/postgres-database/tests/conftest.py +++ b/packages/postgres-database/tests/conftest.py @@ -16,7 +16,7 @@ from aiopg.sa.engine import Engine from aiopg.sa.result import ResultProxy, RowProxy from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import ( +from pytest_simcore.helpers.faker_factories import ( random_group, random_project, random_user, diff --git a/packages/postgres-database/tests/products/conftest.py b/packages/postgres-database/tests/products/conftest.py index 60eaf7a736c..eb3d213c249 100644 --- a/packages/postgres-database/tests/products/conftest.py +++ b/packages/postgres-database/tests/products/conftest.py @@ -9,7 +9,7 @@ import pytest from aiopg.sa.exc import ResourceClosedError from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import random_product +from pytest_simcore.helpers.faker_factories import random_product from simcore_postgres_database.webserver_models import products from sqlalchemy.dialects.postgresql import insert as pg_insert diff --git a/packages/postgres-database/tests/projects/conftest.py b/packages/postgres-database/tests/projects/conftest.py index f1623ac4be6..fb507557fbf 100644 --- a/packages/postgres-database/tests/projects/conftest.py +++ b/packages/postgres-database/tests/projects/conftest.py @@ -10,7 +10,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.engine import Engine from aiopg.sa.result import ResultProxy, RowProxy -from pytest_simcore.helpers.rawdata_fakers import random_project, random_user +from pytest_simcore.helpers.faker_factories import random_project, random_user from simcore_postgres_database.models.projects import projects from simcore_postgres_database.models.users import users diff --git a/packages/postgres-database/tests/test_classifiers.py b/packages/postgres-database/tests/test_classifiers.py index 91e8b1e4cc0..f53740a124d 100644 --- a/packages/postgres-database/tests/test_classifiers.py +++ b/packages/postgres-database/tests/test_classifiers.py @@ -10,7 +10,7 @@ import pytest import sqlalchemy as sa from aiopg.sa.engine import Engine -from pytest_simcore.helpers.rawdata_fakers import random_group +from pytest_simcore.helpers.faker_factories import random_group from simcore_postgres_database.models.classifiers import group_classifiers from simcore_postgres_database.models.groups import groups from sqlalchemy import func, literal_column diff --git a/packages/postgres-database/tests/test_clusters.py b/packages/postgres-database/tests/test_clusters.py index a83898d1c74..6dccc8ef153 100644 --- a/packages/postgres-database/tests/test_clusters.py +++ b/packages/postgres-database/tests/test_clusters.py @@ -8,7 +8,7 @@ import sqlalchemy as sa from aiopg.sa.engine import Engine from aiopg.sa.result import ResultProxy -from pytest_simcore.helpers.rawdata_fakers import random_user +from pytest_simcore.helpers.faker_factories import random_user from simcore_postgres_database.errors import ForeignKeyViolation, NotNullViolation from simcore_postgres_database.models.cluster_to_groups import cluster_to_groups from simcore_postgres_database.models.clusters import ClusterType, clusters diff --git a/packages/postgres-database/tests/test_delete_projects_and_users.py b/packages/postgres-database/tests/test_delete_projects_and_users.py index c86c567e2ca..b0e0edacef8 100644 --- a/packages/postgres-database/tests/test_delete_projects_and_users.py +++ b/packages/postgres-database/tests/test_delete_projects_and_users.py @@ -9,7 +9,7 @@ from aiopg.sa.engine import Engine from aiopg.sa.result import ResultProxy, RowProxy from psycopg2.errors import ForeignKeyViolation -from pytest_simcore.helpers.rawdata_fakers import random_project, random_user +from pytest_simcore.helpers.faker_factories import random_project, random_user from simcore_postgres_database.webserver_models import projects, users from sqlalchemy import func diff --git a/packages/postgres-database/tests/test_groups.py b/packages/postgres-database/tests/test_groups.py index 2b222f8b896..649e2111867 100644 --- a/packages/postgres-database/tests/test_groups.py +++ b/packages/postgres-database/tests/test_groups.py @@ -13,7 +13,7 @@ from aiopg.sa.engine import Engine from aiopg.sa.result import ResultProxy, RowProxy from psycopg2.errors import ForeignKeyViolation, RaiseException, UniqueViolation -from pytest_simcore.helpers.rawdata_fakers import random_user +from pytest_simcore.helpers.faker_factories import random_user from simcore_postgres_database.models.base import metadata from simcore_postgres_database.webserver_models import ( GroupType, diff --git a/packages/postgres-database/tests/test_models_api_keys.py b/packages/postgres-database/tests/test_models_api_keys.py index ab5cb83b114..2c5de1a56a8 100644 --- a/packages/postgres-database/tests/test_models_api_keys.py +++ b/packages/postgres-database/tests/test_models_api_keys.py @@ -9,7 +9,7 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy -from pytest_simcore.helpers.rawdata_fakers import ( +from pytest_simcore.helpers.faker_factories import ( random_api_key, random_product, random_user, diff --git a/packages/postgres-database/tests/test_models_payments_methods.py b/packages/postgres-database/tests/test_models_payments_methods.py index 2dc3760a71d..100c0e5431b 100644 --- a/packages/postgres-database/tests/test_models_payments_methods.py +++ b/packages/postgres-database/tests/test_models_payments_methods.py @@ -9,7 +9,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import random_payment_method +from pytest_simcore.helpers.faker_factories import random_payment_method from simcore_postgres_database.errors import UniqueViolation from simcore_postgres_database.models.payments_methods import ( InitPromptAckFlowState, diff --git a/packages/postgres-database/tests/test_models_payments_transactions.py b/packages/postgres-database/tests/test_models_payments_transactions.py index 278dbb62fa5..6a0e6892bc9 100644 --- a/packages/postgres-database/tests/test_models_payments_transactions.py +++ b/packages/postgres-database/tests/test_models_payments_transactions.py @@ -11,7 +11,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import random_payment_transaction, utcnow +from pytest_simcore.helpers.faker_factories import random_payment_transaction, utcnow from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, payments_transactions, diff --git a/packages/postgres-database/tests/test_models_products_prices.py b/packages/postgres-database/tests/test_models_products_prices.py index 9c1f28070ab..7112f31b612 100644 --- a/packages/postgres-database/tests/test_models_products_prices.py +++ b/packages/postgres-database/tests/test_models_products_prices.py @@ -9,7 +9,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import random_product +from pytest_simcore.helpers.faker_factories import random_product from simcore_postgres_database.errors import CheckViolation, ForeignKeyViolation from simcore_postgres_database.models.products import products from simcore_postgres_database.models.products_prices import products_prices diff --git a/packages/postgres-database/tests/test_services_consume_filetypes.py b/packages/postgres-database/tests/test_services_consume_filetypes.py index d1ee3638af9..88c68dadc7c 100644 --- a/packages/postgres-database/tests/test_services_consume_filetypes.py +++ b/packages/postgres-database/tests/test_services_consume_filetypes.py @@ -11,7 +11,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.exc import ResourceClosedError from aiopg.sa.result import ResultProxy, RowProxy -from pytest_simcore.helpers.utils_services import ( +from pytest_simcore.helpers.webserver_fake_services_data import ( FAKE_FILE_CONSUMER_SERVICES, list_supported_filetypes, ) diff --git a/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py b/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py index 4058ae99c9c..80b9b28e514 100644 --- a/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py +++ b/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py @@ -8,7 +8,7 @@ import pytest import sqlalchemy as sa from psycopg2.errors import UniqueViolation # pylint: disable=no-name-in-module -from pytest_simcore.helpers.rawdata_fakers import fake_pipeline, fake_task_factory +from pytest_simcore.helpers.faker_factories import fake_pipeline, fake_task_factory from simcore_postgres_database.models.base import metadata from simcore_postgres_database.webserver_models import comp_pipeline, comp_tasks diff --git a/packages/postgres-database/tests/test_users.py b/packages/postgres-database/tests/test_users.py index 078c964fc52..97bfa3b2f99 100644 --- a/packages/postgres-database/tests/test_users.py +++ b/packages/postgres-database/tests/test_users.py @@ -10,7 +10,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import random_user +from pytest_simcore.helpers.faker_factories import random_user from simcore_postgres_database.errors import InvalidTextRepresentation, UniqueViolation from simcore_postgres_database.models.users import ( _USER_ROLE_TO_LEVEL, diff --git a/packages/postgres-database/tests/test_users_details.py b/packages/postgres-database/tests/test_users_details.py index 6a19cd007f5..f99ab6396f0 100644 --- a/packages/postgres-database/tests/test_users_details.py +++ b/packages/postgres-database/tests/test_users_details.py @@ -10,7 +10,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import ( +from pytest_simcore.helpers.faker_factories import ( random_pre_registration_details, random_user, ) diff --git a/packages/postgres-database/tests/test_utils_payments_autorecharge.py b/packages/postgres-database/tests/test_utils_payments_autorecharge.py index fa8a67dd0e8..1746b8720cc 100644 --- a/packages/postgres-database/tests/test_utils_payments_autorecharge.py +++ b/packages/postgres-database/tests/test_utils_payments_autorecharge.py @@ -11,7 +11,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import random_payment_method, utcnow +from pytest_simcore.helpers.faker_factories import random_payment_method, utcnow from simcore_postgres_database.models.payments_methods import ( InitPromptAckFlowState, payments_methods, diff --git a/packages/postgres-database/tests/test_utils_services.py b/packages/postgres-database/tests/test_utils_services.py index 4c9c86a50ca..70b102fea70 100644 --- a/packages/postgres-database/tests/test_utils_services.py +++ b/packages/postgres-database/tests/test_utils_services.py @@ -9,7 +9,7 @@ import pytest import sqlalchemy as sa from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import random_group +from pytest_simcore.helpers.faker_factories import random_group from simcore_postgres_database.models.groups import GroupType, groups from simcore_postgres_database.models.products import products from simcore_postgres_database.models.services import ( diff --git a/packages/postgres-database/tests/test_utils_tags.py b/packages/postgres-database/tests/test_utils_tags.py index 191d2ddb549..86fca7dfaef 100644 --- a/packages/postgres-database/tests/test_utils_tags.py +++ b/packages/postgres-database/tests/test_utils_tags.py @@ -10,7 +10,7 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy -from pytest_simcore.helpers.utils_tags import create_tag, create_tag_access +from pytest_simcore.helpers.postgres_tags import create_tag, create_tag_access from simcore_postgres_database.models.tags import tags_to_groups from simcore_postgres_database.models.users import UserRole, UserStatus from simcore_postgres_database.utils_tags import ( diff --git a/packages/postgres-database/tests/test_utils_user_preferences.py b/packages/postgres-database/tests/test_utils_user_preferences.py index 83764ecc02c..d432708e9e9 100644 --- a/packages/postgres-database/tests/test_utils_user_preferences.py +++ b/packages/postgres-database/tests/test_utils_user_preferences.py @@ -8,7 +8,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import random_user +from pytest_simcore.helpers.faker_factories import random_user from simcore_postgres_database.models.users import UserRole, users from simcore_postgres_database.utils_user_preferences import ( BasePreferencesRepo, diff --git a/packages/postgres-database/tests/test_utils_users.py b/packages/postgres-database/tests/test_utils_users.py index 1f6960e6748..5ee6cf11038 100644 --- a/packages/postgres-database/tests/test_utils_users.py +++ b/packages/postgres-database/tests/test_utils_users.py @@ -9,7 +9,7 @@ import pytest from aiopg.sa.connection import SAConnection from faker import Faker -from pytest_simcore.helpers.rawdata_fakers import random_user +from pytest_simcore.helpers.faker_factories import random_user from simcore_postgres_database.models.users import UserRole, users from simcore_postgres_database.utils_users import UserNotFoundInRepoError, UsersRepo diff --git a/packages/pytest-simcore/src/pytest_simcore/__init__.py b/packages/pytest-simcore/src/pytest_simcore/__init__.py index 5fa2fa4edd5..60638a8946e 100644 --- a/packages/pytest-simcore/src/pytest_simcore/__init__.py +++ b/packages/pytest-simcore/src/pytest_simcore/__init__.py @@ -1,6 +1,5 @@ # Collection of tests fixtures for integration testing from importlib.metadata import version -from pathlib import Path import pytest @@ -16,8 +15,11 @@ def pytest_addoption(parser: pytest.Parser): help="Keep stack/registry up after fixtures closes", ) - # DUMMY - parser.addini("HELLO", "Dummy pytest.ini setting") + +@pytest.fixture(scope="session") +def keep_docker_up(request: pytest.FixtureRequest) -> bool: + flag: bool = bool(request.config.getoption(name="--keep-docker-up", default=False)) + return flag @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/aioresponses_mocker.py b/packages/pytest-simcore/src/pytest_simcore/aioresponses_mocker.py index f74d4db6617..a81a87acfea 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aioresponses_mocker.py +++ b/packages/pytest-simcore/src/pytest_simcore/aioresponses_mocker.py @@ -3,7 +3,7 @@ import pytest from aioresponses import aioresponses as AioResponsesMock # noqa: N812 -from .helpers.utils_host import get_localhost_ip +from .helpers.host import get_localhost_ip # WARNING: any request done through the client will go through aioresponses. It is # unfortunate but that means any valid request (like calling the test server) prefix must be set as passthrough. diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_server.py b/packages/pytest-simcore/src/pytest_simcore/aws_server.py index aaf3932c80a..96e05999abf 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_server.py @@ -13,8 +13,8 @@ from settings_library.ec2 import EC2Settings from settings_library.s3 import S3Settings -from .helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from .helpers.utils_host import get_localhost_ip +from .helpers.host import get_localhost_ip +from .helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict @pytest.fixture(scope="module") diff --git a/packages/pytest-simcore/src/pytest_simcore/cli_runner.py b/packages/pytest-simcore/src/pytest_simcore/cli_runner.py index 0c643102b65..81dbbfdd98e 100644 --- a/packages/pytest-simcore/src/pytest_simcore/cli_runner.py +++ b/packages/pytest-simcore/src/pytest_simcore/cli_runner.py @@ -6,7 +6,7 @@ # Based on https://github.com/Stranger6667/pytest-click -from typing import Iterator +from collections.abc import Iterator import pytest from typer.testing import CliRunner diff --git a/packages/pytest-simcore/src/pytest_simcore/container_pause.py b/packages/pytest-simcore/src/pytest_simcore/docker.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/container_pause.py rename to packages/pytest-simcore/src/pytest_simcore/docker.py diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py index 138f458bf48..a242e819b19 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py @@ -28,9 +28,20 @@ FIXTURE_CONFIG_OPS_SERVICES_SELECTION, ) from .helpers.constants import HEADER_STR +from .helpers.docker import run_docker_compose_config, save_docker_infos +from .helpers.host import get_localhost_ip from .helpers.typing_env import EnvVarsDict -from .helpers.utils_docker import run_docker_compose_config, save_docker_infos -from .helpers.utils_host import get_localhost_ip + + +@pytest.fixture(scope="module") +def temp_folder( + request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory +) -> Path: + """**Module scoped** temporary folder""" + prefix = __name__.replace(".", "_") + return tmp_path_factory.mktemp( + basename=f"{prefix}_temp_folder_{request.module.__name__}", numbered=True + ) @pytest.fixture(scope="session") @@ -88,8 +99,8 @@ def testing_environ_vars(env_devel_file: Path) -> EnvVarsDict: @pytest.fixture(scope="module") def env_file_for_testing( - testing_environ_vars: dict[str, str], temp_folder: Path, + testing_environ_vars: dict[str, str], osparc_simcore_root_dir: Path, ) -> Iterator[Path]: """Dumps all the environment variables into an $(temp_folder)/.env.test file diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py index eb8e9968988..47508c9d8e2 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py @@ -16,11 +16,11 @@ import jsonschema import pytest import tenacity +from pytest_simcore.helpers.logging import log_context from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.logging_utils import log_context from settings_library.docker_registry import RegistrySettings -from .helpers.utils_host import get_localhost_ip +from .helpers.host import get_localhost_ip log = logging.getLogger(__name__) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py index b06c51aa77c..2ce2bc522bd 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py @@ -23,9 +23,9 @@ from tenacity.wait import wait_fixed, wait_random_exponential from .helpers.constants import HEADER_STR, MINUTE +from .helpers.dict_tools import copy_from_dict +from .helpers.host import get_localhost_ip from .helpers.typing_env import EnvVarsDict -from .helpers.utils_dict import copy_from_dict -from .helpers.utils_host import get_localhost_ip log = logging.getLogger(__name__) @@ -146,12 +146,6 @@ def docker_client() -> Iterator[docker.client.DockerClient]: client.close() -@pytest.fixture(scope="session") -def keep_docker_up(request: pytest.FixtureRequest) -> bool: - flag: bool = request.config.getoption(name="--keep-docker-up", default=False) - return flag - - @pytest.fixture(scope="module") def docker_swarm( docker_client: docker.client.DockerClient, keep_docker_up: bool diff --git a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py index 7317f18530b..1f5b9334ac4 100644 --- a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py +++ b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py @@ -9,8 +9,8 @@ import pytest +from .helpers.monkeypatch_envs import load_dotenv, setenvs_from_dict from .helpers.typing_env import EnvVarsDict -from .helpers.utils_envs import load_dotenv, setenvs_from_dict def pytest_addoption(parser: pytest.Parser): diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py index 55e65cd9655..9d675c45e11 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py @@ -29,7 +29,7 @@ PaymentTransactionState, ) -from .helpers.rawdata_fakers import random_payment_transaction +from .helpers.faker_factories import random_payment_transaction @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py index ea6965ff64f..f82636b6633 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py @@ -16,7 +16,7 @@ from models_library.products import ProductName, StripePriceID, StripeTaxRateID from pydantic import EmailStr, parse_obj_as -from .helpers.rawdata_fakers import random_product +from .helpers.faker_factories import random_product _MESSAGE = ( "If set, it overrides the fake value of `{}` fixture." diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py index c5fd1e6da89..6ba011db47c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py @@ -16,7 +16,7 @@ from models_library.users import UserID from pydantic import EmailStr, parse_obj_as -from .helpers.rawdata_fakers import DEFAULT_TEST_PASSWORD, random_user +from .helpers.faker_factories import DEFAULT_TEST_PASSWORD, random_user _MESSAGE = ( "If set, it overrides the fake value of `{}` fixture." diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_assert.py b/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_assert.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_environs.py b/packages/pytest-simcore/src/pytest_simcore/helpers/deprecated_environs.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_environs.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/deprecated_environs.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_dict.py b/packages/pytest-simcore/src/pytest_simcore/helpers/dict_tools.py similarity index 89% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_dict.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/dict_tools.py index 02d451b7da1..b31123d5ff5 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_dict.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/dict_tools.py @@ -1,7 +1,7 @@ """ Utils to operate with dicts """ from copy import deepcopy -from typing import Any, Mapping, Optional, Union +from typing import Any, Mapping ConfigDict = dict[str, Any] @@ -20,10 +20,7 @@ def copy_from_dict_ex(data: dict[str, Any], exclude: set[str]) -> dict[str, Any] def copy_from_dict( - data: dict[str, Any], - *, - include: Optional[Union[set, dict]] = None, - deep: bool = False + data: dict[str, Any], *, include: set | dict | None = None, deep: bool = False ): # # Analogous to advanced includes from pydantic exports diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py b/packages/pytest-simcore/src/pytest_simcore/helpers/docker.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/docker.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker_registry.py b/packages/pytest-simcore/src/pytest_simcore/helpers/docker_registry.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker_registry.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/docker_registry.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/rawdata_fakers.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/rawdata_fakers.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_host.py b/packages/pytest-simcore/src/pytest_simcore/helpers/host.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_host.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/host.py diff --git a/packages/pytest-simcore/src/pytest_simcore/logging_utils.py b/packages/pytest-simcore/src/pytest_simcore/helpers/logging.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/logging_utils.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/logging.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_envs.py b/packages/pytest-simcore/src/pytest_simcore/helpers/monkeypatch_envs.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_envs.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/monkeypatch_envs.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_parametrizations.py b/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_parametrizations.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py diff --git a/packages/pytest-simcore/src/pytest_simcore/playwright_utils.py b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py similarity index 99% rename from packages/pytest-simcore/src/pytest_simcore/playwright_utils.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py index 11fccadd431..9f7dca473ee 100644 --- a/packages/pytest-simcore/src/pytest_simcore/playwright_utils.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py @@ -9,7 +9,7 @@ from typing import Any, Final from playwright.sync_api import FrameLocator, Page, Request, WebSocket, expect -from pytest_simcore.logging_utils import log_context +from pytest_simcore.helpers.logging import log_context SECOND: Final[int] = 1000 MINUTE: Final[int] = 60 * SECOND diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_tags.py b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tags.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_tags.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tags.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_scrunch_citations.py b/packages/pytest-simcore/src/pytest_simcore/helpers/scrunch_citations.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_scrunch_citations.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/scrunch_citations.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_public_api.py b/packages/pytest-simcore/src/pytest_simcore/helpers/typing_public_api.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_public_api.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/typing_public_api.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_rate_limit.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_rate_limit.py deleted file mode 100644 index 03e3c987d92..00000000000 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_rate_limit.py +++ /dev/null @@ -1,186 +0,0 @@ -import asyncio -import logging -import math -import time -from functools import wraps -from typing import Awaitable - -from aiohttp import ClientResponse, ClientSession, ClientTimeout - -log = logging.getLogger() - - -def function_duration(func): - @wraps(func) - async def wrapper(*args, **kwargs): - start = time.time() - result = await func(*args, **kwargs) - end = time.time() - elapsed = end - start - log.info("Function '%s' execution took '%0.2f' seconds", func.__name__, elapsed) - return result - - return wrapper - - -def is_rate_limit_reached(result: ClientResponse) -> bool: - return "Retry-After" in result.headers - - -async def get_request_result( - client: ClientSession, endpoint_to_check: str -) -> ClientResponse: - result = await client.get(endpoint_to_check) - log.debug("%s\n%s\n%s", result, await result.text(), dict(result.headers)) - return result - - -async def assert_burst_request( - client: ClientSession, - endpoint_to_check: str, - burst: int, -): - functions = [get_request_result(client, endpoint_to_check) for x in range(burst)] - results = await asyncio.gather(*functions) - for result in results: - assert is_rate_limit_reached(result) is False - - -@function_duration -async def assert_burst_rate_limit( - endpoint_to_check: str, average: int, period_sec: int, burst: int -) -> float: - """ - Runs 2 burst sequences with a pause in between and expects for the - next result to fail. - """ - - max_rate = period_sec / average - # sleeping 2 times the burst window - burst_window = period_sec / burst - sleep_internval = 2 * burst_window - - log.info( - "Sleeping params: burst_window=%s, sleep_interval=%s, max_rate=%s", - burst_window, - sleep_internval, - max_rate, - ) - - timeout = ClientTimeout(total=10, connect=1, sock_connect=1) - async with ClientSession(timeout=timeout) as client: - - # check can burst in timeframe - await assert_burst_request( - client=client, endpoint_to_check=endpoint_to_check, burst=burst - ) - - log.info("First burst finished") - - await asyncio.sleep(sleep_internval) - - # check that burst in timeframe is ok - await assert_burst_request( - client=client, endpoint_to_check=endpoint_to_check, burst=burst - ) - - log.info("Second burst finished") - - # check that another request after the burst fails - result = await get_request_result(client, endpoint_to_check) - assert is_rate_limit_reached(result) is True - - return sleep_internval - - -@function_duration -async def assert_steady_rate_in_5_seconds( - endpoint_to_check: str, average: int, period_sec: int, **_ -) -> float: - """Creates a requests at a continuous rate without considering burst limits""" - # run tests for at least 5 seconds - max_rate = period_sec / average # reqs/ sec - requests_to_make = int(math.ceil(max_rate * 5)) - - sleep_interval = max_rate - - log.info( - "Steady rate params: sleep_interval=%s, max_rate=%s, requests_to_make=%s", - sleep_interval, - max_rate, - requests_to_make, - ) - - timeout = ClientTimeout(total=10, connect=1, sock_connect=1) - async with ClientSession(timeout=timeout) as client: - - for i in range(requests_to_make): - log.info("Request %s", i) - result = await get_request_result(client, endpoint_to_check) - assert is_rate_limit_reached(result) is False - log.info("Sleeping for %s s", sleep_interval) - await asyncio.sleep(sleep_interval) - - return sleep_interval - - -CHECKS_TO_RUN: list[Awaitable] = [ - assert_steady_rate_in_5_seconds, - assert_burst_rate_limit, -] - - -@function_duration -async def run_rate_limit_configuration_checks( - endpoint_to_check: str, average: int = 0, period_sec: int = 1, burst: int = 1 -): - """ - Runner to start all the checks for the firewall configuration - - All tests mut return the period to sleep before the next test can start. - - All defaults are taken from Traefik's docs - SEE https://doc.traefik.io/traefik/middlewares/ratelimit/ - """ - - log.warning( - "Runtime will vary based on the rate limit configuration of the service\n" - ) - - for awaitable in CHECKS_TO_RUN: - log.info("<<<< Starting test '%s'...", awaitable.__name__) - sleep_before_next_test = await awaitable( - endpoint_to_check=endpoint_to_check, - average=average, - period_sec=period_sec, - burst=burst, - ) - log.info(">>>> Finished testing '%s'\n", awaitable.__name__) - - log.info(">>>> Sleeping '%s' seconds before next test", sleep_before_next_test) - await asyncio.sleep(sleep_before_next_test) - - log.info("All tests completed") - - -if __name__ == "__main__": - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s %(levelname)s %(threadName)s [%(name)s] %(message)s", - ) - - # How to use, the below parameters are derived from the following labels: - # - traefik.http.middlewares.ratelimit-${SWARM_STACK_NAME}_api-server.ratelimit.average=1 - # - traefik.http.middlewares.ratelimit-${SWARM_STACK_NAME}_api-server.ratelimit.period=1m - # - traefik.http.middlewares.ratelimit-${SWARM_STACK_NAME}_api-server.ratelimit.burst=10 - # Will result in: average=1, period_sec=60, burst=10 - # WARNING: in the above example the test will run for 5 hours :\ - - asyncio.get_event_loop().run_until_complete( - run_rate_limit_configuration_checks( - endpoint_to_check="http://localhost:10081/", - average=1, - period_sec=60, - burst=10, - ) - ) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_webserver.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_fake_ports_data.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/faker_webserver.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/webserver_fake_ports_data.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_services.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_fake_services_data.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_services.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/webserver_fake_services_data.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_login.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py similarity index 98% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_login.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py index 92e4fb3ada1..062a33d693a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_login.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py @@ -15,8 +15,8 @@ from simcore_service_webserver.security.api import clean_auth_policy_cache from yarl import URL -from .rawdata_fakers import DEFAULT_FAKER, DEFAULT_TEST_PASSWORD, random_user -from .utils_assert import assert_status +from .assert_checks import assert_status +from .faker_factories import DEFAULT_FAKER, DEFAULT_TEST_PASSWORD, random_user # WARNING: DO NOT use UserDict is already in https://docs.python.org/3/library/collections.html#collections.UserDictclass UserRowDict(TypedDict): diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_webserver_unit_with_db.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_parametrizations.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_webserver_unit_with_db.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/webserver_parametrizations.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py similarity index 99% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py index 1df38757bbb..63c57e00559 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py @@ -20,7 +20,7 @@ from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.utils import now_str -from .utils_assert import assert_status +from .assert_checks import assert_status def empty_project_data(): diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_tokens.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_tokens.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/helpers/utils_tokens.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/webserver_tokens.py diff --git a/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py b/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py index 62baba3623e..b6c0a5aad3b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py @@ -21,7 +21,7 @@ from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed -from .helpers.utils_host import get_localhost_ip +from .helpers.host import get_localhost_ip @pytest.fixture(scope="session") diff --git a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py index f2c316fa289..5c8df1ff6c5 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py @@ -40,10 +40,10 @@ import yaml from pydantic import parse_obj_as from pytest_mock import MockerFixture, MockType +from pytest_simcore.helpers.docker import get_service_published_port +from pytest_simcore.helpers.host import get_localhost_ip from pytest_simcore.helpers.httpx_client_base_dev import AsyncClientCaptureWrapper -from pytest_simcore.helpers.utils_docker import get_service_published_port -from pytest_simcore.helpers.utils_envs import EnvVarsDict -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from .helpers.httpx_calls_capture_models import ( CreateRespxMockCallback, diff --git a/packages/pytest-simcore/src/pytest_simcore/minio_service.py b/packages/pytest-simcore/src/pytest_simcore/minio_service.py index 0ef03b40e19..46cee6fbeeb 100644 --- a/packages/pytest-simcore/src/pytest_simcore/minio_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/minio_service.py @@ -4,10 +4,10 @@ import pytest from faker import Faker +from pytest_simcore.helpers.docker import get_service_published_port +from pytest_simcore.helpers.host import get_localhost_ip +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_docker import get_service_published_port -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_host import get_localhost_ip from settings_library.s3 import S3Settings diff --git a/packages/pytest-simcore/src/pytest_simcore/monkeypatch_extra.py b/packages/pytest-simcore/src/pytest_simcore/monkeypatch_extra.py deleted file mode 100644 index 3e373216283..00000000000 --- a/packages/pytest-simcore/src/pytest_simcore/monkeypatch_extra.py +++ /dev/null @@ -1,25 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -import warnings -from typing import Iterator - -import pytest - -warnings.warn( - f"{__name__} is deprecated, we highly recommend to use pytest.monkeypatch at function-scope level." - "Large scopes lead to complex problems during tests", - DeprecationWarning, -) -# Some extras to overcome https://github.com/pytest-dev/pytest/issues/363 -# SEE https://github.com/pytest-dev/pytest/issues/363#issuecomment-289830794 - - -@pytest.fixture(scope="module") -def monkeypatch_module(request: pytest.FixtureRequest) -> Iterator[pytest.MonkeyPatch]: - assert request.scope == "module" - - mpatch_module = pytest.MonkeyPatch() - yield mpatch_module - mpatch_module.undo() diff --git a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py index a64e21d206c..6e9f06a146a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py @@ -10,15 +10,15 @@ import pytest import sqlalchemy as sa import tenacity +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from sqlalchemy.ext.asyncio import AsyncEngine from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed -from .helpers.utils_docker import get_service_published_port -from .helpers.utils_host import get_localhost_ip -from .helpers.utils_postgres import PostgresTestConfig, migrated_pg_tables_context +from .helpers.docker import get_service_published_port +from .helpers.host import get_localhost_ip +from .helpers.postgres_tools import PostgresTestConfig, migrated_pg_tables_context _TEMPLATE_DB_TO_RESTORE = "template_simcore_db" diff --git a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py index 7856547cc9b..47188400e79 100644 --- a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py @@ -15,9 +15,9 @@ from tenacity.stop import stop_after_attempt from tenacity.wait import wait_fixed +from .helpers.docker import get_service_published_port +from .helpers.host import get_localhost_ip from .helpers.typing_env import EnvVarsDict -from .helpers.utils_docker import get_service_published_port -from .helpers.utils_host import get_localhost_ip _logger = logging.getLogger(__name__) diff --git a/packages/pytest-simcore/src/pytest_simcore/redis_service.py b/packages/pytest-simcore/src/pytest_simcore/redis_service.py index 9651255198b..3a84f0ceb03 100644 --- a/packages/pytest-simcore/src/pytest_simcore/redis_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/redis_service.py @@ -15,8 +15,8 @@ from tenacity.wait import wait_fixed from yarl import URL -from .helpers.utils_docker import get_service_published_port -from .helpers.utils_host import get_localhost_ip +from .helpers.docker import get_service_published_port +from .helpers.host import get_localhost_ip log = logging.getLogger(__name__) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_dask_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_dask_service.py index 2fc5220104a..26f16b02a41 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_dask_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_dask_service.py @@ -12,8 +12,8 @@ from models_library.clusters import InternalClusterAuthentication, TLSAuthentication from pydantic import AnyUrl -from .helpers.utils_docker import get_service_published_port -from .helpers.utils_host import get_localhost_ip +from .helpers.docker import get_service_published_port +from .helpers.host import get_localhost_ip @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py index 51f278fbde4..cbaa9ee0a32 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py @@ -7,6 +7,7 @@ import logging import warnings from dataclasses import dataclass +from typing import Iterator import aiohttp import pytest @@ -18,9 +19,9 @@ from yarl import URL from .helpers.constants import MINUTE +from .helpers.docker import get_service_published_port +from .helpers.host import get_localhost_ip from .helpers.typing_env import EnvVarsDict -from .helpers.utils_docker import get_service_published_port -from .helpers.utils_host import get_localhost_ip log = logging.getLogger(__name__) @@ -178,9 +179,28 @@ def simcore_services_ready( monkeypatch.setenv(f"{env_prefix}_PORT", str(endpoint.port)) +@pytest.fixture(scope="module") +def _monkeypatch_module(request: pytest.FixtureRequest) -> Iterator[pytest.MonkeyPatch]: + # WARNING: Temporarily ONLY for simcore_services_ready_module + assert request.scope == "module" + + warnings.warn( + f"{__name__} is deprecated, we highly recommend to use pytest.monkeypatch at function-scope level." + "Large scopes lead to complex problems during tests", + DeprecationWarning, + stacklevel=1, + ) + # Some extras to overcome https://github.com/pytest-dev/pytest/issues/363 + # SEE https://github.com/pytest-dev/pytest/issues/363#issuecomment-289830794 + + mpatch_module = pytest.MonkeyPatch() + yield mpatch_module + mpatch_module.undo() + + @pytest.fixture(scope="module") def simcore_services_ready_module( - services_endpoint: dict[str, URL], monkeypatch_module: pytest.MonkeyPatch + services_endpoint: dict[str, URL], _monkeypatch_module: pytest.MonkeyPatch ) -> None: warnings.warn( "This fixture uses deprecated monkeypatch_module fixture" @@ -195,5 +215,5 @@ def simcore_services_ready_module( assert endpoint.host - monkeypatch_module.setenv(f"{env_prefix}_HOST", endpoint.host) - monkeypatch_module.setenv(f"{env_prefix}_PORT", str(endpoint.port)) + _monkeypatch_module.setenv(f"{env_prefix}_HOST", endpoint.host) + _monkeypatch_module.setenv(f"{env_prefix}_PORT", str(endpoint.port)) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py index f5ab4302dd1..9628d1058c9 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py @@ -15,8 +15,8 @@ from servicelib.minio_utils import ServiceRetryPolicyUponInitialization from yarl import URL -from .helpers.utils_docker import get_service_published_port -from .helpers.utils_host import get_localhost_ip +from .helpers.docker import get_service_published_port +from .helpers.host import get_localhost_ip @pytest.fixture(scope="module") diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py index fb1699fabd3..3d5d083edfe 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py @@ -9,7 +9,7 @@ from servicelib.minio_utils import ServiceRetryPolicyUponInitialization from yarl import URL -from .helpers.utils_docker import get_service_published_port +from .helpers.docker import get_service_published_port @pytest.fixture(scope="module") diff --git a/packages/pytest-simcore/src/pytest_simcore/pytest_socketio.py b/packages/pytest-simcore/src/pytest_simcore/socketio.py similarity index 100% rename from packages/pytest-simcore/src/pytest_simcore/pytest_socketio.py rename to packages/pytest-simcore/src/pytest_simcore/socketio.py diff --git a/packages/pytest-simcore/src/pytest_simcore/websocket_client.py b/packages/pytest-simcore/src/pytest_simcore/socketio_client.py similarity index 97% rename from packages/pytest-simcore/src/pytest_simcore/websocket_client.py rename to packages/pytest-simcore/src/pytest_simcore/socketio_client.py index eb582b607d3..1dbe8d111b0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/websocket_client.py +++ b/packages/pytest-simcore/src/pytest_simcore/socketio_client.py @@ -9,7 +9,7 @@ import pytest import socketio from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from yarl import URL diff --git a/packages/pytest-simcore/src/pytest_simcore/tmp_path_extra.py b/packages/pytest-simcore/src/pytest_simcore/tmp_path_extra.py deleted file mode 100644 index 7986ead3ad8..00000000000 --- a/packages/pytest-simcore/src/pytest_simcore/tmp_path_extra.py +++ /dev/null @@ -1,25 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -""" - Extends temp_path fixture - https://docs.pytest.org/en/6.2.x/tmpdir.html#the-tmp-path-fixture - - NOTE: use tmp_path instead of tmpdir - NOTE: default base temporary directory can be set as `pytest --basetemp=mydir` - -""" -from pathlib import Path - -import pytest - - -@pytest.fixture(scope="module") -def temp_folder( - request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory -) -> Path: - """Module scoped temporary folder""" - prefix = __name__.replace(".", "_") - return tmp_path_factory.mktemp( - basename=f"{prefix}_temp_folder_{request.module.__name__}", numbered=True - ) diff --git a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py index 8bbaf3913be..462dfdb29e2 100644 --- a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py @@ -11,7 +11,7 @@ from servicelib.minio_utils import ServiceRetryPolicyUponInitialization from yarl import URL -from .helpers.utils_docker import get_service_published_port +from .helpers.docker import get_service_published_port @pytest.fixture(scope="module") diff --git a/packages/pytest-simcore/tests/test_helpers_utils_dict.py b/packages/pytest-simcore/tests/test_helpers_utils_dict.py index 524d6a44e30..9fa34442a99 100644 --- a/packages/pytest-simcore/tests/test_helpers_utils_dict.py +++ b/packages/pytest-simcore/tests/test_helpers_utils_dict.py @@ -7,8 +7,8 @@ import sys import pytest +from pytest_simcore.helpers.dict_tools import copy_from_dict, get_from_dict from pytest_simcore.helpers.typing_docker import TaskDict -from pytest_simcore.helpers.utils_dict import copy_from_dict, get_from_dict @pytest.fixture diff --git a/packages/pytest-simcore/tests/test_helpers_utils_envs.py b/packages/pytest-simcore/tests/test_helpers_utils_envs.py index 7117bf99d60..458a72bb063 100644 --- a/packages/pytest-simcore/tests/test_helpers_utils_envs.py +++ b/packages/pytest-simcore/tests/test_helpers_utils_envs.py @@ -1,7 +1,7 @@ from pathlib import Path from textwrap import dedent -from pytest_simcore.helpers.utils_envs import EnvVarsDict, load_dotenv +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, load_dotenv def test_load_envfile(tmp_path: Path): diff --git a/packages/service-integration/tests/conftest.py b/packages/service-integration/tests/conftest.py index 07e4652b2ea..2f148482ea5 100644 --- a/packages/service-integration/tests/conftest.py +++ b/packages/service-integration/tests/conftest.py @@ -14,8 +14,8 @@ pytest_plugins = [ "pytest_simcore.pydantic_models", - "pytest_simcore.repository_paths", "pytest_simcore.pytest_global_environs", + "pytest_simcore.repository_paths", ] _CURRENT_DIR = ( diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py index 7357f5f1ff9..8e260d81d49 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py @@ -10,7 +10,7 @@ from aiohttp.test_utils import TestClient from faker import Faker from pydantic import BaseModel, parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks.server import TaskId from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py index 255f4fef7fc..b4fda99695e 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py @@ -19,7 +19,7 @@ from aiohttp import web from aiohttp.test_utils import TestClient from pydantic import parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks.server import TaskGet, TaskId from servicelib.aiohttp.rest_middlewares import append_rest_middlewares diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py index c5a72c5645d..4aaca5e8d84 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py @@ -7,7 +7,7 @@ import pytest from aiohttp import ClientResponseError, web from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks import client as lr_client from servicelib.aiohttp.long_running_tasks.client import ( diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py index 14eebf23212..941ae31359d 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py @@ -19,7 +19,7 @@ from aiohttp import web from aiohttp.test_utils import TestClient from pydantic import create_model, parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks._server import ( RQT_LONG_RUNNING_TASKS_CONTEXT_KEY, diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py index 7329ecee428..81734453cc4 100644 --- a/packages/service-library/tests/conftest.py +++ b/packages/service-library/tests/conftest.py @@ -16,10 +16,10 @@ from settings_library.redis import RedisDatabase, RedisSettings pytest_plugins = [ - "pytest_simcore.container_pause", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", + "pytest_simcore.docker", "pytest_simcore.environment_configs", "pytest_simcore.file_extra", "pytest_simcore.pytest_global_environs", @@ -28,7 +28,6 @@ "pytest_simcore.repository_paths", "pytest_simcore.schemas", "pytest_simcore.simcore_service_library_fixtures", - "pytest_simcore.tmp_path_extra", ] diff --git a/packages/service-library/tests/fastapi/test_rabbitmq.py b/packages/service-library/tests/fastapi/test_rabbitmq.py index 2112f2bafdf..9c94cfa0766 100644 --- a/packages/service-library/tests/fastapi/test_rabbitmq.py +++ b/packages/service-library/tests/fastapi/test_rabbitmq.py @@ -12,8 +12,8 @@ from models_library.rabbitmq_messages import LoggerRabbitMessage, RabbitMessageBase from pydantic import ValidationError from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from servicelib.fastapi.rabbitmq import get_rabbitmq_client, setup_rabbit from servicelib.rabbitmq import BIND_TO_ALL_TOPICS, RabbitMQClient from settings_library.rabbit import RabbitSettings diff --git a/packages/settings-library/src/settings_library/comp_services.py b/packages/settings-library/src/settings_library/comp_services.py index 787882143f5..e3cb628f7b7 100644 --- a/packages/settings-library/src/settings_library/comp_services.py +++ b/packages/settings-library/src/settings_library/comp_services.py @@ -17,14 +17,14 @@ class ComputationalServices(BaseCustomSettings): @validator("DEFAULT_MAX_NANO_CPUS", pre=True) @classmethod - def set_default_cpus_if_negative(cls, v): + def _set_default_cpus_if_negative(cls, v): if v is None or v == "" or int(v) <= 0: v = _DEFAULT_MAX_NANO_CPUS_VALUE return v @validator("DEFAULT_MAX_MEMORY", pre=True) @classmethod - def set_default_memory_if_negative(cls, v): + def _set_default_memory_if_negative(cls, v): if v is None or v == "" or int(v) <= 0: v = _DEFAULT_MAX_MEMORY_VALUE return v diff --git a/packages/settings-library/src/settings_library/docker_registry.py b/packages/settings-library/src/settings_library/docker_registry.py index 3043e46f54a..bb365cb9785 100644 --- a/packages/settings-library/src/settings_library/docker_registry.py +++ b/packages/settings-library/src/settings_library/docker_registry.py @@ -25,7 +25,7 @@ class RegistrySettings(BaseCustomSettings): @validator("REGISTRY_PATH", pre=True) @classmethod - def escape_none_string(cls, v) -> Any | None: + def _escape_none_string(cls, v) -> Any | None: return None if v == "None" else v @cached_property diff --git a/packages/settings-library/src/settings_library/email.py b/packages/settings-library/src/settings_library/email.py index bd5ed0ab261..b15bf209405 100644 --- a/packages/settings-library/src/settings_library/email.py +++ b/packages/settings-library/src/settings_library/email.py @@ -33,7 +33,7 @@ class SMTPSettings(BaseCustomSettings): @root_validator @classmethod - def both_credentials_must_be_set(cls, values): + def _both_credentials_must_be_set(cls, values): username = values.get("SMTP_USERNAME") password = values.get("SMTP_PASSWORD") @@ -45,7 +45,7 @@ def both_credentials_must_be_set(cls, values): @root_validator @classmethod - def enabled_tls_required_authentication(cls, values): + def _enabled_tls_required_authentication(cls, values): smtp_protocol = values.get("SMTP_PROTOCOL") username = values.get("SMTP_USERNAME") diff --git a/packages/settings-library/src/settings_library/node_ports.py b/packages/settings-library/src/settings_library/node_ports.py index f2f3f5cd9aa..2a5d12f1bd7 100644 --- a/packages/settings-library/src/settings_library/node_ports.py +++ b/packages/settings-library/src/settings_library/node_ports.py @@ -21,9 +21,9 @@ def auth_required(self) -> bool: # for details see https://github.com/ITISFoundation/osparc-issues/issues/1264 return self.STORAGE_USERNAME is not None and self.STORAGE_PASSWORD is not None - @classmethod @root_validator - def validate_auth_fields(cls, values): + @classmethod + def _validate_auth_fields(cls, values): username = values["STORAGE_USERNAME"] password = values["STORAGE_PASSWORD"] if (username is None) != (password is None): diff --git a/packages/settings-library/tests/test_application.py b/packages/settings-library/tests/test_application.py index 8447a253a21..8f847e5dd24 100644 --- a/packages/settings-library/tests/test_application.py +++ b/packages/settings-library/tests/test_application.py @@ -5,8 +5,8 @@ import pytest +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from settings_library.application import BaseApplicationSettings diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index f6afeeee093..aa1ffa95a23 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -14,7 +14,7 @@ from pydantic import BaseModel, BaseSettings, ValidationError from pydantic.fields import Field from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_envs import setenvs_from_envfile +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from settings_library.base import ( _DEFAULTS_TO_NONE_MSG, BaseCustomSettings, diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index 288f121bec4..d54d40bf925 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -7,7 +7,7 @@ import pytest from pydantic import Field, ValidationError -from pytest_simcore.helpers.utils_envs import setenvs_from_envfile +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from settings_library.base import BaseCustomSettings, DefaultFromEnvFactoryError from settings_library.basic_types import PortInt diff --git a/packages/settings-library/tests/test_email.py b/packages/settings-library/tests/test_email.py index 449218857c0..1cd3978503e 100644 --- a/packages/settings-library/tests/test_email.py +++ b/packages/settings-library/tests/test_email.py @@ -7,8 +7,8 @@ import pytest from pydantic import ValidationError +from pytest_simcore.helpers.monkeypatch_envs import delenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import delenvs_from_dict from settings_library.email import EmailProtocol, SMTPSettings diff --git a/packages/settings-library/tests/test_node_ports_settings.py b/packages/settings-library/tests/test_node_ports_settings.py index 1ac178a93ef..0adbd4efb4d 100644 --- a/packages/settings-library/tests/test_node_ports_settings.py +++ b/packages/settings-library/tests/test_node_ports_settings.py @@ -1,5 +1,5 @@ import pytest -from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from settings_library.node_ports import StorageAuthSettings diff --git a/packages/settings-library/tests/test_twilio.py b/packages/settings-library/tests/test_twilio.py index 0d77ac0bd39..6f2830ea4aa 100644 --- a/packages/settings-library/tests/test_twilio.py +++ b/packages/settings-library/tests/test_twilio.py @@ -3,7 +3,7 @@ # pylint: disable=unused-variable import pytest -from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from settings_library.twilio import TwilioSettings diff --git a/packages/settings-library/tests/test_utils_cli.py b/packages/settings-library/tests/test_utils_cli.py index 0e6d5398cf7..665358ea3b4 100644 --- a/packages/settings-library/tests/test_utils_cli.py +++ b/packages/settings-library/tests/test_utils_cli.py @@ -12,8 +12,8 @@ import typer from dotenv import dotenv_values from pydantic import Field, SecretStr +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_envfile from settings_library.base import BaseCustomSettings from settings_library.utils_cli import ( create_json_encoder_wo_secrets, diff --git a/packages/simcore-sdk/tests/conftest.py b/packages/simcore-sdk/tests/conftest.py index 88f9b296442..e5cde2bc51c 100644 --- a/packages/simcore-sdk/tests/conftest.py +++ b/packages/simcore-sdk/tests/conftest.py @@ -10,8 +10,8 @@ import pytest import simcore_sdk +from pytest_simcore.helpers.postgres_tools import PostgresTestConfig from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_postgres import PostgresTestConfig from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @@ -31,7 +31,6 @@ "pytest_simcore.services_api_mocks_for_aiohttp_clients", "pytest_simcore.simcore_services", "pytest_simcore.simcore_storage_service", - "pytest_simcore.tmp_path_extra", ] diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index 20510ff7f23..8ced749bcf3 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -18,7 +18,7 @@ from models_library.projects_nodes_io import LocationID, NodeIDStr, SimcoreS3FileID from models_library.users import UserID from pydantic import parse_obj_as -from pytest_simcore.helpers.rawdata_fakers import random_project, random_user +from pytest_simcore.helpers.faker_factories import random_project, random_user from settings_library.r_clone import RCloneSettings, S3Provider from settings_library.s3 import S3Settings from simcore_postgres_database.models.comp_pipeline import comp_pipeline diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py index bb796481897..67e4bed3c68 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py @@ -21,7 +21,7 @@ from models_library.users import UserID from pydantic import ByteSize, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_parametrizations import byte_size_ids +from pytest_simcore.helpers.parametrizations import byte_size_ids from servicelib.progress_bar import ProgressBarData from settings_library.r_clone import RCloneSettings from simcore_sdk.node_ports_common import exceptions, filemanager diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_storage_client.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_storage_client.py index ea08391b826..3fb97424daa 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_storage_client.py @@ -9,7 +9,7 @@ from aiohttp import ClientResponseError, ClientSession from aiohttp.client_exceptions import ClientConnectionError from aioresponses import aioresponses -from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from simcore_sdk.node_ports_common.storage_client import retry_request _ROUTE_ALWAYS_200_OK: Final[str] = "http://always-200-ok" diff --git a/packages/simcore-sdk/tests/unit/test_storage_client.py b/packages/simcore-sdk/tests/unit/test_storage_client.py index 156ef27f847..91e46c5bd61 100644 --- a/packages/simcore-sdk/tests/unit/test_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_storage_client.py @@ -21,7 +21,7 @@ from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, parse_obj_as -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.aiohttp import status from simcore_sdk.node_ports_common import exceptions from simcore_sdk.node_ports_common._filemanager import _get_https_link_if_storage_secure diff --git a/services/api-server/tests/conftest.py b/services/api-server/tests/conftest.py index e60cf7f53c4..25180a27491 100644 --- a/services/api-server/tests/conftest.py +++ b/services/api-server/tests/conftest.py @@ -8,7 +8,7 @@ import pytest import simcore_service_api_server from dotenv import dotenv_values -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @@ -27,7 +27,6 @@ "pytest_simcore.repository_paths", "pytest_simcore.schemas", "pytest_simcore.services_api_mocks_for_aiohttp_clients", - "pytest_simcore.tmp_path_extra", ] diff --git a/services/api-server/tests/unit/_with_db/conftest.py b/services/api-server/tests/unit/_with_db/conftest.py index 783933db3cb..57450561ce4 100644 --- a/services/api-server/tests/unit/_with_db/conftest.py +++ b/services/api-server/tests/unit/_with_db/conftest.py @@ -24,13 +24,13 @@ from models_library.api_schemas_api_server.api_keys import ApiKeyInDB from pydantic import PositiveInt from pytest_mock import MockerFixture -from pytest_simcore.helpers.rawdata_fakers import ( +from pytest_simcore.helpers.faker_factories import ( random_api_key, random_product, random_user, ) +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_postgres_database.models.api_keys import api_keys from simcore_postgres_database.models.base import metadata from simcore_postgres_database.models.products import products diff --git a/services/api-server/tests/unit/_with_db/test_core_settings__with_db.py b/services/api-server/tests/unit/_with_db/test_core_settings__with_db.py index 4cb770979f0..f61c1ae4153 100644 --- a/services/api-server/tests/unit/_with_db/test_core_settings__with_db.py +++ b/services/api-server/tests/unit/_with_db/test_core_settings__with_db.py @@ -5,7 +5,7 @@ import logging -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_api_server.core.settings import ApplicationSettings, BootModeEnum from yarl import URL diff --git a/services/api-server/tests/unit/api_studies/conftest.py b/services/api-server/tests/unit/api_studies/conftest.py index 50ec7b18c57..e25d0aedd79 100644 --- a/services/api-server/tests/unit/api_studies/conftest.py +++ b/services/api-server/tests/unit/api_studies/conftest.py @@ -9,7 +9,7 @@ import pytest from faker import Faker -from pytest_simcore.helpers.faker_webserver import ( +from pytest_simcore.helpers.webserver_fake_ports_data import ( PROJECTS_METADATA_PORTS_RESPONSE_BODY_DATA, ) from simcore_service_api_server.models.schemas.studies import StudyID diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 7d7017fe1cd..d4ed6f04229 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -38,8 +38,8 @@ from packaging.version import Version from pydantic import EmailStr, HttpUrl, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.host import get_localhost_ip +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from pytest_simcore.simcore_webserver_projects_rest_api import GET_PROJECT from requests.auth import HTTPBasicAuth from respx import MockRouter diff --git a/services/api-server/tests/unit/test_core_settings.py b/services/api-server/tests/unit/test_core_settings.py index d95ea6066f6..fbb9f875b65 100644 --- a/services/api-server/tests/unit/test_core_settings.py +++ b/services/api-server/tests/unit/test_core_settings.py @@ -4,7 +4,7 @@ import pytest -from pytest_simcore.helpers.utils_envs import ( +from pytest_simcore.helpers.monkeypatch_envs import ( EnvVarsDict, delenvs_from_dict, setenvs_from_dict, diff --git a/services/api-server/tests/unit/test_services_rabbitmq.py b/services/api-server/tests/unit/test_services_rabbitmq.py index 34c7ae48af0..6f67b3386bd 100644 --- a/services/api-server/tests/unit/test_services_rabbitmq.py +++ b/services/api-server/tests/unit/test_services_rabbitmq.py @@ -28,7 +28,7 @@ from models_library.users import UserID from pydantic import ValidationError from pytest_mock import MockerFixture, MockFixture -from pytest_simcore.helpers.utils_envs import ( +from pytest_simcore.helpers.monkeypatch_envs import ( EnvVarsDict, delenvs_from_dict, setenvs_from_dict, diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index 1ae3a3953fc..5a1fb3d1d64 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -45,8 +45,8 @@ ) from pydantic import ByteSize, PositiveInt, parse_obj_as from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.host import get_localhost_ip +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from settings_library.rabbit import RabbitSettings from simcore_service_autoscaling.core.application import create_app from simcore_service_autoscaling.core.settings import ( @@ -81,7 +81,6 @@ "pytest_simcore.environment_configs", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", ] diff --git a/services/autoscaling/tests/unit/test_api_health.py b/services/autoscaling/tests/unit/test_api_health.py index 9ac0dccac01..6fa53018e85 100644 --- a/services/autoscaling/tests/unit/test_api_health.py +++ b/services/autoscaling/tests/unit/test_api_health.py @@ -5,7 +5,7 @@ import httpx import pytest from moto.server import ThreadedMotoServer -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_autoscaling.api.health import _StatusGet from starlette import status diff --git a/services/autoscaling/tests/unit/test_core_settings.py b/services/autoscaling/tests/unit/test_core_settings.py index 89c12efc602..f6bdb8cbc9f 100644 --- a/services/autoscaling/tests/unit/test_core_settings.py +++ b/services/autoscaling/tests/unit/test_core_settings.py @@ -8,7 +8,7 @@ import pytest from faker import Faker from pydantic import ValidationError -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_autoscaling.core.settings import ( ApplicationSettings, EC2InstancesSettings, diff --git a/services/autoscaling/tests/unit/test_main.py b/services/autoscaling/tests/unit/test_main.py index 4f899fdb1fb..525748023ec 100644 --- a/services/autoscaling/tests/unit/test_main.py +++ b/services/autoscaling/tests/unit/test_main.py @@ -3,7 +3,7 @@ # pylint:disable=redefined-outer-name -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict def test_main_app(app_environment: EnvVarsDict): diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py index 5ef0f820902..31b18631044 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py @@ -33,7 +33,7 @@ from models_library.rabbitmq_messages import RabbitAutoscalingStatusMessage from pydantic import ByteSize, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_autoscaling.core.settings import ApplicationSettings from simcore_service_autoscaling.models import EC2InstanceData from simcore_service_autoscaling.modules.auto_scaling_core import auto_scale_cluster diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py index e454c9d2c8d..a947fda1026 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py @@ -35,7 +35,7 @@ from models_library.rabbitmq_messages import RabbitAutoscalingStatusMessage from pydantic import ByteSize, parse_obj_as from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_autoscaling.core.settings import ApplicationSettings from simcore_service_autoscaling.models import AssociatedInstance, Cluster from simcore_service_autoscaling.modules.auto_scaling_core import ( diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py index 8395792d052..a065df08425 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py @@ -10,7 +10,7 @@ import pytest from fastapi import FastAPI from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_autoscaling.core.settings import ApplicationSettings _FAST_POLL_INTERVAL = 1 diff --git a/services/autoscaling/tests/unit/test_modules_dask.py b/services/autoscaling/tests/unit/test_modules_dask.py index b6b78fdd7b6..096dca26d2b 100644 --- a/services/autoscaling/tests/unit/test_modules_dask.py +++ b/services/autoscaling/tests/unit/test_modules_dask.py @@ -18,7 +18,7 @@ TLSAuthentication, ) from pydantic import AnyUrl, ByteSize, parse_obj_as -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.host import get_localhost_ip from simcore_service_autoscaling.core.errors import ( DaskNoWorkersError, DaskSchedulerNotFoundError, diff --git a/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py b/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py index 95879b19158..5755024f830 100644 --- a/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py +++ b/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py @@ -17,8 +17,8 @@ from models_library.docker import DockerGenericTag from models_library.generated_models.docker_rest_api import Node as DockerNode from pydantic import parse_obj_as +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_autoscaling.core.errors import Ec2InvalidDnsNameError from simcore_service_autoscaling.core.settings import ApplicationSettings from simcore_service_autoscaling.models import AssociatedInstance, EC2InstanceData diff --git a/services/autoscaling/tests/unit/test_utils_docker.py b/services/autoscaling/tests/unit/test_utils_docker.py index ecea7297ebd..8012d932872 100644 --- a/services/autoscaling/tests/unit/test_utils_docker.py +++ b/services/autoscaling/tests/unit/test_utils_docker.py @@ -32,7 +32,7 @@ ) from pydantic import ByteSize, parse_obj_as from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from servicelib.docker_utils import to_datetime from settings_library.docker_registry import RegistrySettings from simcore_service_autoscaling.core.settings import ApplicationSettings diff --git a/services/catalog/tests/unit/conftest.py b/services/catalog/tests/unit/conftest.py index 7d7ecac82b0..8ab832aa65f 100644 --- a/services/catalog/tests/unit/conftest.py +++ b/services/catalog/tests/unit/conftest.py @@ -13,8 +13,8 @@ import simcore_service_catalog from asgi_lifespan import LifespanManager from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_catalog.core.application import create_app from simcore_service_catalog.core.settings import ApplicationSettings @@ -29,7 +29,6 @@ "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", ] diff --git a/services/catalog/tests/unit/test_services_director.py b/services/catalog/tests/unit/test_services_director.py index a6307f2a5fe..a683be39525 100644 --- a/services/catalog/tests/unit/test_services_director.py +++ b/services/catalog/tests/unit/test_services_director.py @@ -10,8 +10,8 @@ import pytest import respx from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx.router import MockRouter from simcore_service_catalog.api.dependencies.director import get_director_api from simcore_service_catalog.core.settings import ApplicationSettings diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index 25c5c038fe7..af5ba8c5580 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -21,12 +21,12 @@ from models_library.users import UserID from pydantic import parse_obj_as from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_postgres import ( +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.postgres_tools import ( PostgresTestConfig, insert_and_get_row_lifespan, ) +from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_postgres_database.models.products import products from simcore_postgres_database.models.users import users from simcore_service_catalog.core.settings import ApplicationSettings diff --git a/services/clusters-keeper/tests/unit/conftest.py b/services/clusters-keeper/tests/unit/conftest.py index 533bb74c94b..31d9575a2bd 100644 --- a/services/clusters-keeper/tests/unit/conftest.py +++ b/services/clusters-keeper/tests/unit/conftest.py @@ -23,7 +23,7 @@ from models_library.users import UserID from models_library.wallets import WalletID from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.rabbitmq import RabbitMQRPCClient from settings_library.ec2 import EC2Settings from settings_library.rabbit import RabbitSettings @@ -39,7 +39,7 @@ pytest_plugins = [ "pytest_simcore.aws_ec2_service", "pytest_simcore.aws_server", - "pytest_simcore.container_pause", + "pytest_simcore.docker", "pytest_simcore.dask_scheduler", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", @@ -48,7 +48,6 @@ "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", "pytest_simcore.simcore_service_library_fixtures", - "pytest_simcore.tmp_path_extra", ] diff --git a/services/clusters-keeper/tests/unit/test_api_health.py b/services/clusters-keeper/tests/unit/test_api_health.py index dcd67cb4e98..734620afa1b 100644 --- a/services/clusters-keeper/tests/unit/test_api_health.py +++ b/services/clusters-keeper/tests/unit/test_api_health.py @@ -5,7 +5,7 @@ import httpx import pytest from moto.server import ThreadedMotoServer -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_clusters_keeper.api.health import _StatusGet from starlette import status diff --git a/services/clusters-keeper/tests/unit/test_core_settings.py b/services/clusters-keeper/tests/unit/test_core_settings.py index 4e1df0c9e06..7f97afad58b 100644 --- a/services/clusters-keeper/tests/unit/test_core_settings.py +++ b/services/clusters-keeper/tests/unit/test_core_settings.py @@ -9,7 +9,7 @@ import pytest from aws_library.ec2.models import EC2InstanceBootSpecific from pydantic import ValidationError -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_clusters_keeper.core.settings import ApplicationSettings from types_aiobotocore_ec2.literals import InstanceTypeType diff --git a/services/clusters-keeper/tests/unit/test_main.py b/services/clusters-keeper/tests/unit/test_main.py index 4cf5db92500..96d7fb8507d 100644 --- a/services/clusters-keeper/tests/unit/test_main.py +++ b/services/clusters-keeper/tests/unit/test_main.py @@ -3,7 +3,7 @@ # pylint:disable=redefined-outer-name -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict def test_main_app(app_environment: EnvVarsDict): diff --git a/services/clusters-keeper/tests/unit/test_modules_clusters.py b/services/clusters-keeper/tests/unit/test_modules_clusters.py index ea9329d35a8..47a266dbb4d 100644 --- a/services/clusters-keeper/tests/unit/test_modules_clusters.py +++ b/services/clusters-keeper/tests/unit/test_modules_clusters.py @@ -15,7 +15,7 @@ from models_library.users import UserID from models_library.wallets import WalletID from parse import Result, search -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_clusters_keeper._meta import VERSION as APP_VERSION from simcore_service_clusters_keeper.core.errors import Ec2InstanceNotFoundError from simcore_service_clusters_keeper.core.settings import ( diff --git a/services/clusters-keeper/tests/unit/test_modules_clusters_management_core.py b/services/clusters-keeper/tests/unit/test_modules_clusters_management_core.py index 4ece631188f..de92fd8a64d 100644 --- a/services/clusters-keeper/tests/unit/test_modules_clusters_management_core.py +++ b/services/clusters-keeper/tests/unit/test_modules_clusters_management_core.py @@ -17,8 +17,8 @@ from models_library.users import UserID from models_library.wallets import WalletID from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_clusters_keeper.core.settings import ApplicationSettings from simcore_service_clusters_keeper.modules.clusters import ( cluster_heartbeat, diff --git a/services/clusters-keeper/tests/unit/test_modules_clusters_management_task.py b/services/clusters-keeper/tests/unit/test_modules_clusters_management_task.py index d738a5cc05e..0c9c52eab4c 100644 --- a/services/clusters-keeper/tests/unit/test_modules_clusters_management_task.py +++ b/services/clusters-keeper/tests/unit/test_modules_clusters_management_task.py @@ -10,7 +10,7 @@ import pytest from fastapi import FastAPI from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_clusters_keeper.core.settings import ApplicationSettings _FAST_POLL_INTERVAL = 1 diff --git a/services/clusters-keeper/tests/unit/test_modules_remote_debug.py b/services/clusters-keeper/tests/unit/test_modules_remote_debug.py index 4a676451334..dbb5a91922e 100644 --- a/services/clusters-keeper/tests/unit/test_modules_remote_debug.py +++ b/services/clusters-keeper/tests/unit/test_modules_remote_debug.py @@ -4,8 +4,8 @@ import pytest from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict @pytest.fixture diff --git a/services/clusters-keeper/tests/unit/test_utils_clusters.py b/services/clusters-keeper/tests/unit/test_utils_clusters.py index 206d751bb03..437774a8c17 100644 --- a/services/clusters-keeper/tests/unit/test_utils_clusters.py +++ b/services/clusters-keeper/tests/unit/test_utils_clusters.py @@ -24,7 +24,7 @@ ) from models_library.utils.json_serialization import json_dumps from pydantic import ByteSize, parse_obj_as -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_clusters_keeper.core.settings import ApplicationSettings from simcore_service_clusters_keeper.utils.clusters import ( _prepare_environment_variables, diff --git a/services/clusters-keeper/tests/unit/test_utils_ec2.py b/services/clusters-keeper/tests/unit/test_utils_ec2.py index e7d1be224d3..cc466d113ac 100644 --- a/services/clusters-keeper/tests/unit/test_utils_ec2.py +++ b/services/clusters-keeper/tests/unit/test_utils_ec2.py @@ -6,7 +6,7 @@ from faker import Faker from models_library.users import UserID from models_library.wallets import WalletID -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_clusters_keeper.core.settings import ApplicationSettings from simcore_service_clusters_keeper.utils.ec2 import ( _APPLICATION_TAG_KEY, diff --git a/services/dask-sidecar/tests/unit/conftest.py b/services/dask-sidecar/tests/unit/conftest.py index 377b2ad3255..edc92c87969 100644 --- a/services/dask-sidecar/tests/unit/conftest.py +++ b/services/dask-sidecar/tests/unit/conftest.py @@ -22,8 +22,8 @@ from pydantic import AnyUrl, parse_obj_as from pytest_localftpserver.servers import ProcessFTPServer from pytest_mock.plugin import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from settings_library.s3 import S3Settings from simcore_service_dask_sidecar.file_utils import _s3fs_settings_from_s3_settings from yarl import URL @@ -37,7 +37,6 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_users_data", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", ] diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index 3ea9811f961..eafe6bb15fc 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -25,8 +25,11 @@ from models_library.projects import Node, NodesDict from models_library.users import UserID from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import ( + setenvs_from_dict, + setenvs_from_envfile, +) from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict, setenvs_from_envfile from simcore_service_director_v2.core.application import init_app from simcore_service_director_v2.core.settings import AppSettings from starlette.testclient import ASGI3App, TestClient @@ -44,7 +47,7 @@ "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", - "pytest_simcore.pytest_socketio", + "pytest_simcore.socketio", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", "pytest_simcore.repository_paths", @@ -52,7 +55,6 @@ "pytest_simcore.simcore_dask_service", "pytest_simcore.simcore_services", "pytest_simcore.simcore_storage_service", - "pytest_simcore.tmp_path_extra", ] logger = logging.getLogger(__name__) diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index 652b9fb6dff..9c510d5f23f 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -28,9 +28,9 @@ from models_library.projects_pipeline import PipelineDetails from models_library.projects_state import RunningState from models_library.users import UserID +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.postgres_tools import PostgresTestConfig from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_postgres import PostgresTestConfig from settings_library.rabbit import RabbitSettings from starlette import status from starlette.testclient import TestClient diff --git a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py index 0bdfb73b5c8..ed43a5c4451 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py +++ b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py @@ -26,9 +26,9 @@ ) from models_library.users import UserID from pytest_mock.plugin import MockerFixture +from pytest_simcore.helpers.host import get_localhost_ip +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_host import get_localhost_ip from servicelib.common_headers import ( X_DYNAMIC_SIDECAR_REQUEST_DNS, X_DYNAMIC_SIDECAR_REQUEST_SCHEME, diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 0d187de43d2..e8a877b3763 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -48,9 +48,9 @@ from models_library.users import UserID from pydantic import AnyHttpUrl, parse_obj_as from pytest_mock.plugin import MockerFixture +from pytest_simcore.helpers.host import get_localhost_ip +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_host import get_localhost_ip from servicelib.fastapi.long_running_tasks.client import ( Client, ProgressMessage, diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index 759e9fd620e..134b9eaea74 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -19,9 +19,9 @@ from models_library.services_resources import ServiceResourcesDict from models_library.users import UserID from pytest_mock.plugin import MockerFixture +from pytest_simcore.helpers.host import get_localhost_ip +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_host import get_localhost_ip from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings from utils import ( diff --git a/services/director-v2/tests/integration/02/utils.py b/services/director-v2/tests/integration/02/utils.py index 41efb6e84ec..2b388ef552a 100644 --- a/services/director-v2/tests/integration/02/utils.py +++ b/services/director-v2/tests/integration/02/utils.py @@ -21,7 +21,7 @@ ) from models_library.users import UserID from pydantic import PositiveInt, parse_obj_as -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.host import get_localhost_ip from servicelib.common_headers import ( X_DYNAMIC_SIDECAR_REQUEST_DNS, X_DYNAMIC_SIDECAR_REQUEST_SCHEME, diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py index 2e05c858f79..4001403c684 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py @@ -5,7 +5,7 @@ from typing import Any, Final import pytest -from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from simcore_service_director_v2.core.settings import AppSettings from simcore_service_director_v2.models.dynamic_services_scheduler import SchedulerData from simcore_service_director_v2.modules.dynamic_sidecar.docker_service_specs.sidecar import ( diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_observer.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_observer.py index 30e419a0cc6..19ba5f72bcf 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_observer.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_observer.py @@ -9,7 +9,7 @@ from faker import Faker from fastapi import FastAPI from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_director_v2.core.settings import AppSettings from simcore_service_director_v2.models.dynamic_services_scheduler import SchedulerData from simcore_service_director_v2.modules.dynamic_sidecar.api_client import ( diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_core_events.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_core_events.py index 4d41ad966f2..2bd05535e6a 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_core_events.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_core_events.py @@ -11,8 +11,8 @@ from fastapi import FastAPI from pydantic import PositiveFloat, PositiveInt from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from servicelib.exception_utils import _SKIPS_MESSAGE from servicelib.fastapi.http_client_thin import BaseHttpClientError from simcore_service_director_v2.models.dynamic_services_scheduler import ( diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py index 05fd4e0b280..124b156ff0e 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py @@ -15,8 +15,8 @@ from faker import Faker from fastapi import FastAPI from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx.router import MockRouter from simcore_service_director_v2.models.dynamic_services_scheduler import SchedulerData from simcore_service_director_v2.modules.dynamic_sidecar.api_client._public import ( diff --git a/services/director-v2/tests/unit/test_modules_notifier.py b/services/director-v2/tests/unit/test_modules_notifier.py index 3a194d98659..46d0879cebc 100644 --- a/services/director-v2/tests/unit/test_modules_notifier.py +++ b/services/director-v2/tests/unit/test_modules_notifier.py @@ -20,7 +20,7 @@ from models_library.wallets import WalletID from pydantic import NonNegativeInt, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.utils import logged_gather from settings_library.rabbit import RabbitSettings from simcore_service_director_v2.core.settings import AppSettings diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py index 2c7c77f9156..6f75f43c59f 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py @@ -22,8 +22,8 @@ from models_library.projects_nodes_io import NodeID from models_library.users import UserID from pydantic import parse_obj_as, parse_raw_as +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_director_v2.core.settings import AppSettings from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_groups_extra_properties.py b/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_groups_extra_properties.py index 0d02d53d69a..31baae6de57 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_groups_extra_properties.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_groups_extra_properties.py @@ -7,8 +7,8 @@ import sqlalchemy as sa from faker import Faker from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_postgres_database.models.groups import groups from simcore_postgres_database.models.groups_extra_properties import ( groups_extra_properties, diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py b/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py index d23932422a8..3cdc76f4150 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py @@ -10,8 +10,8 @@ from fastapi import FastAPI from models_library.projects import ProjectAtDB from models_library.projects_nodes_io import NodeID +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_postgres_database.utils_projects_nodes import ProjectNodesNodeNotFoundError from simcore_service_director_v2.modules.db.repositories.projects import ( ProjectsRepository, diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 3ea1cb1cad8..df52d3cb49c 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -22,7 +22,7 @@ from models_library.projects_nodes_io import NodeID from models_library.services_enums import ServiceState from models_library.users import UserID -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_director_v2.constants import ( DYNAMIC_PROXY_SERVICE_PREFIX, DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL, diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index ee5f4a4f15a..ce475fa5555 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -27,8 +27,8 @@ from models_library.services import RunID, ServiceKeyVersion from models_library.utils.json_serialization import json_dumps from models_library.wallets import WalletInfo +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from settings_library.s3 import S3Settings from simcore_service_director_v2.core.dynamic_services_settings.scheduler import ( DynamicServicesSchedulerSettings, diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index f4cf8ab6aed..eecb693e0de 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -18,7 +18,6 @@ "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", "pytest_simcore.pytest_global_environs", ] diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py index 641e26559b5..ff72140f5ee 100644 --- a/services/dynamic-scheduler/tests/conftest.py +++ b/services/dynamic-scheduler/tests/conftest.py @@ -11,8 +11,8 @@ from asgi_lifespan import LifespanManager from fastapi import FastAPI from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_dynamic_scheduler.core.application import create_app pytest_plugins = [ @@ -23,7 +23,6 @@ "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", ] diff --git a/services/dynamic-scheduler/tests/unit/test_cli.py b/services/dynamic-scheduler/tests/unit/test_cli.py index 637c0c124fb..2e812f7e118 100644 --- a/services/dynamic-scheduler/tests/unit/test_cli.py +++ b/services/dynamic-scheduler/tests/unit/test_cli.py @@ -3,8 +3,8 @@ import os import pytest +from pytest_simcore.helpers.monkeypatch_envs import load_dotenv, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import load_dotenv, setenvs_from_dict from simcore_service_dynamic_scheduler._meta import API_VERSION from simcore_service_dynamic_scheduler.cli import main as cli_main from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py index a88c402442f..24bb48e4a26 100644 --- a/services/dynamic-sidecar/tests/conftest.py +++ b/services/dynamic-sidecar/tests/conftest.py @@ -21,7 +21,7 @@ from models_library.utils.json_serialization import json_dumps from pydantic import parse_obj_as from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import ( +from pytest_simcore.helpers.monkeypatch_envs import ( EnvVarsDict, setenvs_from_dict, setenvs_from_envfile, @@ -39,11 +39,10 @@ "pytest_simcore.faker_users_data", "pytest_simcore.minio_service", "pytest_simcore.pytest_global_environs", - "pytest_simcore.pytest_socketio", + "pytest_simcore.socketio", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", "pytest_simcore.simcore_service_library_fixtures", - "pytest_simcore.tmp_path_extra", ] CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent diff --git a/services/dynamic-sidecar/tests/integration/conftest.py b/services/dynamic-sidecar/tests/integration/conftest.py index a39368a34c4..8c7e5e79533 100644 --- a/services/dynamic-sidecar/tests/integration/conftest.py +++ b/services/dynamic-sidecar/tests/integration/conftest.py @@ -3,7 +3,7 @@ import pytest import sqlalchemy as sa from models_library.users import UserID -from pytest_simcore.helpers.rawdata_fakers import random_user +from pytest_simcore.helpers.faker_factories import random_user from simcore_postgres_database.models.users import users pytest_plugins = [ diff --git a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py index 9123679d3e5..5e70b0a6f79 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py @@ -25,9 +25,9 @@ from models_library.users import UserID from pydantic import AnyUrl, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.rawdata_fakers import random_project -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_postgres import PostgresTestConfig +from pytest_simcore.helpers.faker_factories import random_project +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.postgres_tools import PostgresTestConfig from servicelib.fastapi.long_running_tasks.server import TaskProgress from servicelib.utils import logged_gather from settings_library.s3 import S3Settings diff --git a/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py b/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py index 462467cb0ad..094b3014404 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py @@ -15,8 +15,8 @@ from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID from pydantic import parse_obj_as -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_postgres import PostgresTestConfig +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.postgres_tools import PostgresTestConfig from simcore_service_dynamic_sidecar.core.application import create_app from simcore_service_dynamic_sidecar.modules.user_services_preferences import ( load_user_services_preferences, diff --git a/services/dynamic-sidecar/tests/unit/conftest.py b/services/dynamic-sidecar/tests/unit/conftest.py index 6df34b37c63..b6e590f71eb 100644 --- a/services/dynamic-sidecar/tests/unit/conftest.py +++ b/services/dynamic-sidecar/tests/unit/conftest.py @@ -11,7 +11,7 @@ from aiodocker.volumes import DockerVolume from async_asgi_testclient import TestClient from fastapi import FastAPI -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_dynamic_sidecar.core.application import AppState, create_app from simcore_service_dynamic_sidecar.core.docker_compose_utils import ( docker_compose_down, diff --git a/services/dynamic-sidecar/tests/unit/test__oas_spec.py b/services/dynamic-sidecar/tests/unit/test__oas_spec.py index a25734b30f2..b5fbc3328a4 100644 --- a/services/dynamic-sidecar/tests/unit/test__oas_spec.py +++ b/services/dynamic-sidecar/tests/unit/test__oas_spec.py @@ -7,7 +7,7 @@ import pytest from fastapi import FastAPI -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict @pytest.fixture diff --git a/services/dynamic-sidecar/tests/unit/test_api_containers.py b/services/dynamic-sidecar/tests/unit/test_api_containers.py index 5615e2c1a24..29ca97093c5 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_containers.py +++ b/services/dynamic-sidecar/tests/unit/test_api_containers.py @@ -26,7 +26,7 @@ from models_library.services import ServiceOutput from models_library.services_creation import CreateServiceMetricsAdditionalParams from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.docker_constants import SUFFIX_EGRESS_PROXY_NAME from servicelib.fastapi.long_running_tasks.client import TaskId from simcore_service_dynamic_sidecar._meta import API_VTAG diff --git a/services/dynamic-sidecar/tests/unit/test_api_containers_long_running_tasks.py b/services/dynamic-sidecar/tests/unit/test_api_containers_long_running_tasks.py index 27c1985f0aa..4069eac1fe7 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_containers_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/unit/test_api_containers_long_running_tasks.py @@ -23,7 +23,7 @@ from models_library.services_creation import CreateServiceMetricsAdditionalParams from pydantic import AnyHttpUrl, parse_obj_as from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from servicelib.fastapi.long_running_tasks.client import ( Client, TaskClientResultError, diff --git a/services/dynamic-sidecar/tests/unit/test_api_prometheus_metrics.py b/services/dynamic-sidecar/tests/unit/test_api_prometheus_metrics.py index 1d2a7635b58..75bf997142e 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_prometheus_metrics.py +++ b/services/dynamic-sidecar/tests/unit/test_api_prometheus_metrics.py @@ -15,7 +15,7 @@ from models_library.callbacks_mapping import CallbacksMapping from models_library.services_creation import CreateServiceMetricsAdditionalParams from pydantic import AnyHttpUrl, parse_obj_as -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.fastapi.long_running_tasks.client import ( Client, TaskId, diff --git a/services/dynamic-sidecar/tests/unit/test_api_workflow_service_metrics.py b/services/dynamic-sidecar/tests/unit/test_api_workflow_service_metrics.py index 61dcedbf1d7..bfbebda6bf2 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_workflow_service_metrics.py +++ b/services/dynamic-sidecar/tests/unit/test_api_workflow_service_metrics.py @@ -30,7 +30,7 @@ from models_library.services_creation import CreateServiceMetricsAdditionalParams from pydantic import AnyHttpUrl, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.fastapi.long_running_tasks.client import ( Client, TaskClientResultError, diff --git a/services/dynamic-sidecar/tests/unit/test_core_external_dependencies.py b/services/dynamic-sidecar/tests/unit/test_core_external_dependencies.py index 614c0462f93..3de1c0acf6a 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_external_dependencies.py +++ b/services/dynamic-sidecar/tests/unit/test_core_external_dependencies.py @@ -9,7 +9,7 @@ from fastapi import FastAPI from models_library.projects import ProjectID from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_dynamic_sidecar.core.application import create_app from simcore_service_dynamic_sidecar.core.external_dependencies import ( CouldNotReachExternalDependenciesError, diff --git a/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py b/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py index 568cbc9785f..123f21864a0 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py +++ b/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py @@ -3,7 +3,7 @@ from pydantic import ByteSize, parse_obj_as -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_dynamic_sidecar.core.application import create_base_app from simcore_service_dynamic_sidecar.core.reserved_space import ( _RESERVED_DISK_SPACE_NAME, diff --git a/services/dynamic-sidecar/tests/unit/test_core_settings.py b/services/dynamic-sidecar/tests/unit/test_core_settings.py index 0f3b8116ad4..4512abf71d6 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_settings.py +++ b/services/dynamic-sidecar/tests/unit/test_core_settings.py @@ -2,7 +2,7 @@ # pylint: disable=redefined-outer-name import pytest -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from settings_library.utils_service import DEFAULT_AIOHTTP_PORT from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py index e8ca825986e..40a3db6d3f9 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py @@ -17,7 +17,7 @@ from models_library.services import RunID from pydantic import PositiveFloat from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_sdk.node_ports_common.exceptions import S3TransferError from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings diff --git a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py index e567fd9553b..73184a1b3cb 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py @@ -23,7 +23,7 @@ from models_library.users import UserID from pydantic import ByteSize, NonNegativeInt, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.utils import logged_gather from settings_library.rabbit import RabbitSettings from simcore_service_dynamic_sidecar.core.application import create_app diff --git a/services/efs-guardian/tests/unit/conftest.py b/services/efs-guardian/tests/unit/conftest.py index 0b974e13645..62e4352e1cc 100644 --- a/services/efs-guardian/tests/unit/conftest.py +++ b/services/efs-guardian/tests/unit/conftest.py @@ -14,7 +14,7 @@ from asgi_lifespan import LifespanManager from fastapi import FastAPI from httpx import ASGITransport -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.rabbitmq import RabbitMQRPCClient from settings_library.rabbit import RabbitSettings from simcore_service_efs_guardian.core.application import create_app @@ -30,7 +30,6 @@ "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", "pytest_simcore.aws_s3_service", "pytest_simcore.aws_server", ] diff --git a/services/efs-guardian/tests/unit/test_api_health.py b/services/efs-guardian/tests/unit/test_api_health.py index 22bc3377a8f..621543e2d80 100644 --- a/services/efs-guardian/tests/unit/test_api_health.py +++ b/services/efs-guardian/tests/unit/test_api_health.py @@ -4,8 +4,8 @@ import httpx import pytest +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from settings_library.rabbit import RabbitSettings from starlette import status diff --git a/services/efs-guardian/tests/unit/test_core_settings.py b/services/efs-guardian/tests/unit/test_core_settings.py index a3496b381b5..f8fd28355d7 100644 --- a/services/efs-guardian/tests/unit/test_core_settings.py +++ b/services/efs-guardian/tests/unit/test_core_settings.py @@ -3,7 +3,7 @@ # pylint: disable=unused-variable -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_efs_guardian.core.settings import ApplicationSettings diff --git a/services/efs-guardian/tests/unit/test_efs_manager.py b/services/efs-guardian/tests/unit/test_efs_manager.py index ede34e1824c..cd5b9a755a3 100644 --- a/services/efs-guardian/tests/unit/test_efs_manager.py +++ b/services/efs-guardian/tests/unit/test_efs_manager.py @@ -9,8 +9,8 @@ import pytest from faker import Faker from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.efs_guardian import efs_manager from simcore_service_efs_guardian.core.settings import AwsEfsSettings diff --git a/services/efs-guardian/tests/unit/test_main.py b/services/efs-guardian/tests/unit/test_main.py index 475673488be..bbdb41096c8 100644 --- a/services/efs-guardian/tests/unit/test_main.py +++ b/services/efs-guardian/tests/unit/test_main.py @@ -3,7 +3,7 @@ # pylint:disable=redefined-outer-name -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict def test_main_app(app_environment: EnvVarsDict): diff --git a/services/invitations/tests/unit/conftest.py b/services/invitations/tests/unit/conftest.py index 9a533536dc9..1b6ea4ee6e9 100644 --- a/services/invitations/tests/unit/conftest.py +++ b/services/invitations/tests/unit/conftest.py @@ -10,8 +10,8 @@ from cryptography.fernet import Fernet from faker import Faker from models_library.products import ProductName +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_invitations.services.invitations import InvitationInputs pytest_plugins = [ diff --git a/services/invitations/tests/unit/test_cli.py b/services/invitations/tests/unit/test_cli.py index 11c599019ac..e77247115cf 100644 --- a/services/invitations/tests/unit/test_cli.py +++ b/services/invitations/tests/unit/test_cli.py @@ -8,8 +8,8 @@ import pytest from faker import Faker from models_library.products import ProductName +from pytest_simcore.helpers.monkeypatch_envs import load_dotenv, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import load_dotenv, setenvs_from_dict from simcore_service_invitations._meta import API_VERSION from simcore_service_invitations.cli import main from simcore_service_invitations.core.settings import ApplicationSettings diff --git a/services/invitations/tests/unit/test_core_settings.py b/services/invitations/tests/unit/test_core_settings.py index ca91b5bd00b..7c68e809eda 100644 --- a/services/invitations/tests/unit/test_core_settings.py +++ b/services/invitations/tests/unit/test_core_settings.py @@ -5,8 +5,8 @@ import pytest +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_invitations.core.settings import ( ApplicationSettings, MinimalApplicationSettings, diff --git a/services/migration/tests/conftest.py b/services/migration/tests/conftest.py index dd0a28d12c5..2ad21378f67 100644 --- a/services/migration/tests/conftest.py +++ b/services/migration/tests/conftest.py @@ -3,5 +3,4 @@ "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", ] diff --git a/services/osparc-gateway-server/tests/integration/conftest.py b/services/osparc-gateway-server/tests/integration/conftest.py index 7ac15707662..cac3d567864 100644 --- a/services/osparc-gateway-server/tests/integration/conftest.py +++ b/services/osparc-gateway-server/tests/integration/conftest.py @@ -19,7 +19,7 @@ OSPARC_SCHEDULER_API_PORT, OSPARC_SCHEDULER_DASHBOARD_PORT, ) -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.host import get_localhost_ip from tenacity._asyncio import AsyncRetrying from tenacity.wait import wait_fixed diff --git a/services/osparc-gateway-server/tests/integration/test_clusters.py b/services/osparc-gateway-server/tests/integration/test_clusters.py index 0d1306b0b2a..295754cd9a6 100644 --- a/services/osparc-gateway-server/tests/integration/test_clusters.py +++ b/services/osparc-gateway-server/tests/integration/test_clusters.py @@ -9,7 +9,7 @@ from aiodocker import Docker from dask_gateway import Gateway from faker import Faker -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.host import get_localhost_ip from tenacity._asyncio import AsyncRetrying from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed diff --git a/services/osparc-gateway-server/tests/integration/test_dask_sidecar.py b/services/osparc-gateway-server/tests/integration/test_dask_sidecar.py index cbf54edb1c0..f506f0554b4 100644 --- a/services/osparc-gateway-server/tests/integration/test_dask_sidecar.py +++ b/services/osparc-gateway-server/tests/integration/test_dask_sidecar.py @@ -8,7 +8,7 @@ import aiodocker import pytest from faker import Faker -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.host import get_localhost_ip from tenacity._asyncio import AsyncRetrying from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed diff --git a/services/osparc-gateway-server/tests/integration/test_gateway.py b/services/osparc-gateway-server/tests/integration/test_gateway.py index c1f89ee76e8..7009c12cb5b 100644 --- a/services/osparc-gateway-server/tests/integration/test_gateway.py +++ b/services/osparc-gateway-server/tests/integration/test_gateway.py @@ -7,7 +7,7 @@ from dask_gateway_server.app import DaskGateway from faker import Faker from osparc_gateway_server.backend.osparc import OsparcBackend -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.host import get_localhost_ip @pytest.fixture( diff --git a/services/osparc-gateway-server/tests/system/test_deploy.py b/services/osparc-gateway-server/tests/system/test_deploy.py index 79cc1221884..b6145462b89 100644 --- a/services/osparc-gateway-server/tests/system/test_deploy.py +++ b/services/osparc-gateway-server/tests/system/test_deploy.py @@ -12,7 +12,7 @@ import dask_gateway import pytest from faker import Faker -from pytest_simcore.helpers.utils_host import get_localhost_ip +from pytest_simcore.helpers.host import get_localhost_ip from tenacity._asyncio import AsyncRetrying from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed diff --git a/services/payments/tests/conftest.py b/services/payments/tests/conftest.py index 26092690de9..042ac85f968 100644 --- a/services/payments/tests/conftest.py +++ b/services/payments/tests/conftest.py @@ -11,8 +11,8 @@ from faker import Faker from models_library.users import GroupID from pydantic import parse_obj_as +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from servicelib.utils_secrets import generate_token_secret_key pytest_plugins = [ @@ -25,10 +25,9 @@ "pytest_simcore.faker_users_data", "pytest_simcore.httpbin_service", "pytest_simcore.postgres_service", - "pytest_simcore.pytest_socketio", + "pytest_simcore.socketio", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", ] diff --git a/services/payments/tests/unit/api/test__one_time_payment_workflows.py b/services/payments/tests/unit/api/test__one_time_payment_workflows.py index 153ae6d69b4..2052dee31f6 100644 --- a/services/payments/tests/unit/api/test__one_time_payment_workflows.py +++ b/services/payments/tests/unit/api/test__one_time_payment_workflows.py @@ -17,8 +17,8 @@ from models_library.wallets import WalletID from pydantic import EmailStr, parse_obj_as from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx import MockRouter from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq._constants import RPC_REQUEST_DEFAULT_TIMEOUT_S diff --git a/services/payments/tests/unit/api/test__payment_method_workflows.py b/services/payments/tests/unit/api/test__payment_method_workflows.py index 15cedd186d9..5b92bee8b17 100644 --- a/services/payments/tests/unit/api/test__payment_method_workflows.py +++ b/services/payments/tests/unit/api/test__payment_method_workflows.py @@ -18,8 +18,8 @@ from models_library.wallets import WalletID from pydantic import EmailStr, parse_obj_as from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx import MockRouter from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq._constants import RPC_REQUEST_DEFAULT_TIMEOUT_S diff --git a/services/payments/tests/unit/api/test_rest_acknowledgements.py b/services/payments/tests/unit/api/test_rest_acknowledgements.py index 58614a1ac53..4df30829f93 100644 --- a/services/payments/tests/unit/api/test_rest_acknowledgements.py +++ b/services/payments/tests/unit/api/test_rest_acknowledgements.py @@ -16,8 +16,8 @@ PaymentNotFoundError, ) from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_payments.models.schemas.acknowledgements import ( AckPayment, AckPaymentMethod, diff --git a/services/payments/tests/unit/conftest.py b/services/payments/tests/unit/conftest.py index aeb9c9a446a..63f4ed97bbd 100644 --- a/services/payments/tests/unit/conftest.py +++ b/services/payments/tests/unit/conftest.py @@ -25,7 +25,7 @@ from models_library.wallets import WalletID from pydantic import parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.rawdata_fakers import random_payment_method_view +from pytest_simcore.helpers.faker_factories import random_payment_method_view from pytest_simcore.helpers.typing_env import EnvVarsDict from respx import MockRouter from servicelib.rabbitmq import RabbitMQRPCClient diff --git a/services/payments/tests/unit/test_cli.py b/services/payments/tests/unit/test_cli.py index c0c62c3190c..2d01fd0fc31 100644 --- a/services/payments/tests/unit/test_cli.py +++ b/services/payments/tests/unit/test_cli.py @@ -6,8 +6,8 @@ import os import pytest +from pytest_simcore.helpers.monkeypatch_envs import load_dotenv, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import load_dotenv, setenvs_from_dict from simcore_service_payments._meta import API_VERSION from simcore_service_payments.cli import main as cli_main from simcore_service_payments.core.settings import ApplicationSettings diff --git a/services/payments/tests/unit/test_db_payments_methods_repo.py b/services/payments/tests/unit/test_db_payments_methods_repo.py index 76166c5d0be..f64570cf598 100644 --- a/services/payments/tests/unit/test_db_payments_methods_repo.py +++ b/services/payments/tests/unit/test_db_payments_methods_repo.py @@ -7,8 +7,8 @@ import pytest from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_payments.db.payments_methods_repo import PaymentsMethodsRepo from simcore_service_payments.models.db import InitPromptAckFlowState, PaymentsMethodsDB diff --git a/services/payments/tests/unit/test_db_payments_transactions_repo.py b/services/payments/tests/unit/test_db_payments_transactions_repo.py index 23bd548446a..62e217a9e7a 100644 --- a/services/payments/tests/unit/test_db_payments_transactions_repo.py +++ b/services/payments/tests/unit/test_db_payments_transactions_repo.py @@ -7,8 +7,8 @@ import pytest from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_payments.db.payments_transactions_repo import ( PaymentsTransactionsRepo, ) diff --git a/services/payments/tests/unit/test_db_payments_users_repo.py b/services/payments/tests/unit/test_db_payments_users_repo.py index 4db76e25992..a695af251c4 100644 --- a/services/payments/tests/unit/test_db_payments_users_repo.py +++ b/services/payments/tests/unit/test_db_payments_users_repo.py @@ -11,9 +11,9 @@ import pytest from fastapi import FastAPI from models_library.users import GroupID, UserID +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_postgres import insert_and_get_row_lifespan from simcore_postgres_database.models.payments_transactions import payments_transactions from simcore_postgres_database.models.products import products from simcore_postgres_database.models.users import users diff --git a/services/payments/tests/unit/test_rpc_payments.py b/services/payments/tests/unit/test_rpc_payments.py index 96fe89c6bff..3e5b3ad1c2a 100644 --- a/services/payments/tests/unit/test_rpc_payments.py +++ b/services/payments/tests/unit/test_rpc_payments.py @@ -14,8 +14,8 @@ from models_library.payments import UserInvoiceAddress from models_library.rabbitmq_basic_types import RPCMethodName from pydantic import parse_obj_as +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx import MockRouter from servicelib.rabbitmq import RabbitMQRPCClient, RPCServerError from servicelib.rabbitmq._constants import RPC_REQUEST_DEFAULT_TIMEOUT_S diff --git a/services/payments/tests/unit/test_rpc_payments_methods.py b/services/payments/tests/unit/test_rpc_payments_methods.py index 5f27c754b4a..9ecb10d9976 100644 --- a/services/payments/tests/unit/test_rpc_payments_methods.py +++ b/services/payments/tests/unit/test_rpc_payments_methods.py @@ -19,8 +19,8 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import EmailStr, parse_obj_as +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx import MockRouter from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq._constants import RPC_REQUEST_DEFAULT_TIMEOUT_S diff --git a/services/payments/tests/unit/test_services_auto_recharge_listener.py b/services/payments/tests/unit/test_services_auto_recharge_listener.py index a3eb8f7cf8d..9db559e8660 100644 --- a/services/payments/tests/unit/test_services_auto_recharge_listener.py +++ b/services/payments/tests/unit/test_services_auto_recharge_listener.py @@ -24,13 +24,13 @@ from models_library.rabbitmq_messages import WalletCreditsMessage from models_library.users import UserID from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.rawdata_fakers import ( +from pytest_simcore.helpers.faker_factories import ( random_payment_autorecharge, random_payment_method, random_payment_transaction, ) +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx import MockRouter from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient, RPCRouter from simcore_postgres_database.models.payments_autorecharge import payments_autorecharge diff --git a/services/payments/tests/unit/test_services_notifier.py b/services/payments/tests/unit/test_services_notifier.py index 3cdeb7224c2..5aab90f9f0f 100644 --- a/services/payments/tests/unit/test_services_notifier.py +++ b/services/payments/tests/unit/test_services_notifier.py @@ -22,9 +22,9 @@ from models_library.users import GroupID, UserID from pydantic import parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.rawdata_fakers import random_payment_transaction +from pytest_simcore.helpers.faker_factories import random_payment_transaction +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from settings_library.rabbit import RabbitSettings from simcore_service_payments.models.db import PaymentsTransactionsDB from simcore_service_payments.models.db_to_api import to_payments_api_model diff --git a/services/payments/tests/unit/test_services_notifier_email.py b/services/payments/tests/unit/test_services_notifier_email.py index f7d30007df1..d078d902b1b 100644 --- a/services/payments/tests/unit/test_services_notifier_email.py +++ b/services/payments/tests/unit/test_services_notifier_email.py @@ -15,8 +15,8 @@ from models_library.users import UserID from pydantic import EmailStr from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from settings_library.email import SMTPSettings from simcore_postgres_database.models.products import Vendor from simcore_service_payments.db.payment_users_repo import PaymentsUsersRepo diff --git a/services/payments/tests/unit/test_services_payments.py b/services/payments/tests/unit/test_services_payments.py index 881db142aff..94452d9c772 100644 --- a/services/payments/tests/unit/test_services_payments.py +++ b/services/payments/tests/unit/test_services_payments.py @@ -18,8 +18,8 @@ from models_library.wallets import WalletID from pydantic import EmailStr from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx import MockRouter from simcore_service_payments.db.payments_methods_repo import PaymentsMethodsRepo from simcore_service_payments.db.payments_transactions_repo import ( diff --git a/services/payments/tests/unit/test_services_payments__get_invoice.py b/services/payments/tests/unit/test_services_payments__get_invoice.py index a4dfcdc3352..7a391b22351 100644 --- a/services/payments/tests/unit/test_services_payments__get_invoice.py +++ b/services/payments/tests/unit/test_services_payments__get_invoice.py @@ -18,9 +18,9 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import HttpUrl -from pytest_simcore.helpers.rawdata_fakers import random_payment_transaction +from pytest_simcore.helpers.faker_factories import random_payment_transaction +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from respx import MockRouter from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, diff --git a/services/payments/tests/unit/test_services_payments_gateway.py b/services/payments/tests/unit/test_services_payments_gateway.py index ceb1ec48065..f210d1e7258 100644 --- a/services/payments/tests/unit/test_services_payments_gateway.py +++ b/services/payments/tests/unit/test_services_payments_gateway.py @@ -9,7 +9,7 @@ from faker import Faker from fastapi import FastAPI, status from models_library.payments import UserInvoiceAddress -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from respx import MockRouter from simcore_service_payments.core.settings import ApplicationSettings from simcore_service_payments.models.payments_gateway import ( diff --git a/services/payments/tests/unit/test_services_resource_usage_tracker.py b/services/payments/tests/unit/test_services_resource_usage_tracker.py index 7af987fa4d2..0959f535cc1 100644 --- a/services/payments/tests/unit/test_services_resource_usage_tracker.py +++ b/services/payments/tests/unit/test_services_resource_usage_tracker.py @@ -11,7 +11,7 @@ from asgi_lifespan import LifespanManager from faker import Faker from fastapi import FastAPI -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from respx import MockRouter from simcore_service_payments.core.application import create_app from simcore_service_payments.core.settings import ApplicationSettings diff --git a/services/payments/tests/unit/test_services_stripe.py b/services/payments/tests/unit/test_services_stripe.py index d9c3d982d91..fec78997f53 100644 --- a/services/payments/tests/unit/test_services_stripe.py +++ b/services/payments/tests/unit/test_services_stripe.py @@ -7,7 +7,7 @@ import pytest from fastapi import FastAPI, status from models_library.payments import StripeInvoiceID -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from respx import MockRouter from simcore_service_payments.core.settings import ApplicationSettings from simcore_service_payments.services.stripe import StripeApi, setup_stripe diff --git a/services/resource-usage-tracker/tests/unit/conftest.py b/services/resource-usage-tracker/tests/unit/conftest.py index aeb5f36ec4a..6d70142d5e4 100644 --- a/services/resource-usage-tracker/tests/unit/conftest.py +++ b/services/resource-usage-tracker/tests/unit/conftest.py @@ -20,8 +20,8 @@ from fastapi import FastAPI from fastapi.testclient import TestClient from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from settings_library.rabbit import RabbitSettings from simcore_service_resource_usage_tracker.core.application import create_app from simcore_service_resource_usage_tracker.core.settings import ApplicationSettings @@ -37,7 +37,6 @@ "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", "pytest_simcore.aws_s3_service", "pytest_simcore.aws_server", ] diff --git a/services/resource-usage-tracker/tests/unit/test_web_main.py b/services/resource-usage-tracker/tests/unit/test_web_main.py index c56231e0c0b..a58f4a6139a 100644 --- a/services/resource-usage-tracker/tests/unit/test_web_main.py +++ b/services/resource-usage-tracker/tests/unit/test_web_main.py @@ -3,7 +3,7 @@ # pylint:disable=redefined-outer-name -from pytest_simcore.helpers.utils_envs import EnvVarsDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict def test_main_app(app_environment: EnvVarsDict): diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py b/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py index 3d476e01d6d..a0962022d80 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py @@ -19,8 +19,8 @@ RabbitResourceTrackingMessageType, RabbitResourceTrackingStartedMessage, ) +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from servicelib.rabbitmq import RabbitMQRPCClient from settings_library.rabbit import RabbitSettings from simcore_postgres_database.models.resource_tracker_credit_transactions import ( diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index d40e621c305..ec6940b1fb2 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -41,7 +41,7 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from simcore_postgres_database.storage_models import file_meta_data, projects, users from simcore_service_storage.application import create @@ -71,7 +71,6 @@ "pytest_simcore.postgres_service", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", "tests.fixtures.data_models", "tests.fixtures.datcore_adapter", ] diff --git a/services/storage/tests/fixtures/data_models.py b/services/storage/tests/fixtures/data_models.py index 9fb00685e84..dc5fbadf9e1 100644 --- a/services/storage/tests/fixtures/data_models.py +++ b/services/storage/tests/fixtures/data_models.py @@ -19,7 +19,7 @@ from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from models_library.users import UserID from pydantic import ByteSize, parse_obj_as -from pytest_simcore.helpers.rawdata_fakers import random_project, random_user +from pytest_simcore.helpers.faker_factories import random_project, random_user from servicelib.utils import logged_gather from simcore_postgres_database.storage_models import projects, users diff --git a/services/storage/tests/unit/conftest.py b/services/storage/tests/unit/conftest.py index 91c7d2cdade..222c9913234 100644 --- a/services/storage/tests/unit/conftest.py +++ b/services/storage/tests/unit/conftest.py @@ -24,7 +24,7 @@ from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from simcore_service_storage._meta import API_VTAG from simcore_service_storage.handlers_files import UPLOAD_TASKS_KEY diff --git a/services/storage/tests/unit/test__openapi_specs.py b/services/storage/tests/unit/test__openapi_specs.py index 3d4bc0f3509..8e384e5a0d7 100644 --- a/services/storage/tests/unit/test__openapi_specs.py +++ b/services/storage/tests/unit/test__openapi_specs.py @@ -10,8 +10,8 @@ from aiohttp import web from faker import Faker from openapi_core import Spec as OpenApiSpecs +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_storage.settings import Settings diff --git a/services/storage/tests/unit/test_dsm_dsmcleaner.py b/services/storage/tests/unit/test_dsm_dsmcleaner.py index 36c6ae7342f..40eac9dba06 100644 --- a/services/storage/tests/unit/test_dsm_dsmcleaner.py +++ b/services/storage/tests/unit/test_dsm_dsmcleaner.py @@ -22,7 +22,7 @@ from models_library.projects_nodes_io import SimcoreS3DirectoryID, SimcoreS3FileID from models_library.users import UserID from pydantic import ByteSize, parse_obj_as -from pytest_simcore.helpers.utils_parametrizations import byte_size_ids +from pytest_simcore.helpers.parametrizations import byte_size_ids from simcore_postgres_database.storage_models import file_meta_data from simcore_service_storage import db_file_meta_data from simcore_service_storage.exceptions import ( diff --git a/services/storage/tests/unit/test_handlers_datasets.py b/services/storage/tests/unit/test_handlers_datasets.py index 3a4dc574bac..52e3c3ed348 100644 --- a/services/storage/tests/unit/test_handlers_datasets.py +++ b/services/storage/tests/unit/test_handlers_datasets.py @@ -17,8 +17,8 @@ from models_library.users import UserID from pydantic import ByteSize, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_parametrizations import byte_size_ids +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.parametrizations import byte_size_ids from servicelib.aiohttp import status from tests.helpers.file_utils import parametrized_file_size diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index ebea214b569..ea966634770 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -44,8 +44,8 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import AnyHttpUrl, ByteSize, HttpUrl, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_parametrizations import byte_size_ids +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.parametrizations import byte_size_ids from servicelib.aiohttp import status from simcore_service_storage.constants import ( MULTIPART_UPLOADS_MIN_TOTAL_SIZE, diff --git a/services/storage/tests/unit/test_handlers_files_metadata.py b/services/storage/tests/unit/test_handlers_files_metadata.py index 87cbfcc1999..55cf83dbcac 100644 --- a/services/storage/tests/unit/test_handlers_files_metadata.py +++ b/services/storage/tests/unit/test_handlers_files_metadata.py @@ -15,7 +15,7 @@ from models_library.projects import ProjectID from models_library.users import UserID from pydantic import ByteSize, parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status pytest_simcore_core_services_selection = ["postgres"] diff --git a/services/storage/tests/unit/test_handlers_health.py b/services/storage/tests/unit/test_handlers_health.py index 1447bd46846..3504575c205 100644 --- a/services/storage/tests/unit/test_handlers_health.py +++ b/services/storage/tests/unit/test_handlers_health.py @@ -8,7 +8,7 @@ from models_library.api_schemas_storage import S3BucketName from models_library.app_diagnostics import AppStatusCheck from moto.server import ThreadedMotoServer -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from simcore_service_storage.handlers_health import HealthCheck from simcore_service_storage.s3_client import StorageS3Client diff --git a/services/storage/tests/unit/test_handlers_locations.py b/services/storage/tests/unit/test_handlers_locations.py index c541be0d75e..cc236499ee9 100644 --- a/services/storage/tests/unit/test_handlers_locations.py +++ b/services/storage/tests/unit/test_handlers_locations.py @@ -9,7 +9,7 @@ import pytest from aiohttp.test_utils import TestClient from models_library.users import UserID -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from tests.helpers.utils import has_datcore_tokens diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index 348e214d5b5..916f7d3cdba 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -25,7 +25,7 @@ from models_library.utils.change_case import camel_to_snake from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, parse_file_as, parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.client import long_running_task_request from servicelib.utils import logged_gather diff --git a/services/storage/tests/unit/test_s3_client.py b/services/storage/tests/unit/test_s3_client.py index 3d44547f3e1..20dab8581bc 100644 --- a/services/storage/tests/unit/test_s3_client.py +++ b/services/storage/tests/unit/test_s3_client.py @@ -30,7 +30,7 @@ from models_library.projects_nodes_io import SimcoreS3FileID from pydantic import ByteSize, parse_obj_as from pytest_mock import MockFixture -from pytest_simcore.helpers.utils_parametrizations import byte_size_ids +from pytest_simcore.helpers.parametrizations import byte_size_ids from simcore_service_storage.models import MultiPartUploadLinks, S3BucketName from simcore_service_storage.s3_client import ( StorageS3Client, diff --git a/services/storage/tests/unit/test_s3_utils.py b/services/storage/tests/unit/test_s3_utils.py index 8f159d45790..ddcb866b383 100644 --- a/services/storage/tests/unit/test_s3_utils.py +++ b/services/storage/tests/unit/test_s3_utils.py @@ -6,7 +6,7 @@ import pytest from pydantic import ByteSize, parse_obj_as -from pytest_simcore.helpers.utils_parametrizations import byte_size_ids +from pytest_simcore.helpers.parametrizations import byte_size_ids from simcore_service_storage.s3_utils import ( _MULTIPART_MAX_NUMBER_OF_PARTS, _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE, diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index 5415e67e6c6..cd6b0142b33 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -21,10 +21,10 @@ from models_library.projects_nodes_io import NodeID from models_library.projects_state import ProjectState from models_library.utils.json_serialization import json_dumps -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_dict import ConfigDict -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import LoggedUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.dict_tools import ConfigDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict from pytest_simcore.simcore_webserver_projects_rest_api import NEW_PROJECT from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.server import TaskStatus @@ -73,8 +73,7 @@ "pytest_simcore.services_api_mocks_for_aiohttp_clients", "pytest_simcore.simcore_service_library_fixtures", "pytest_simcore.simcore_services", - "pytest_simcore.tmp_path_extra", - "pytest_simcore.websocket_client", + "pytest_simcore.socketio_client", ] diff --git a/services/web/server/tests/integration/01/test_exporter_requests_handlers.py b/services/web/server/tests/integration/01/test_exporter_requests_handlers.py index fb5d4d72cc5..d51945ba960 100644 --- a/services/web/server/tests/integration/01/test_exporter_requests_handlers.py +++ b/services/web/server/tests/integration/01/test_exporter_requests_handlers.py @@ -15,8 +15,8 @@ import pytest import redis.asyncio as aioredis from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_login import LoggedUser, UserInfoDict -from pytest_simcore.helpers.utils_projects import ( +from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict +from pytest_simcore.helpers.webserver_projects import ( create_project, delete_all_projects, empty_project_data, diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index 6f23eb1ae12..adf3849444d 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -22,8 +22,8 @@ from aioresponses import aioresponses from models_library.projects_state import RunningState from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_login import UserInfoDict, log_client_in -from pytest_simcore.helpers.utils_projects import create_project, empty_project_data +from pytest_simcore.helpers.webserver_login import UserInfoDict, log_client_in +from pytest_simcore.helpers.webserver_projects import create_project, empty_project_data from servicelib.aiohttp.application import create_safe_application from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisDatabase, RedisSettings diff --git a/services/web/server/tests/integration/02/conftest.py b/services/web/server/tests/integration/02/conftest.py index 920a7c7abbc..d82d86242cf 100644 --- a/services/web/server/tests/integration/02/conftest.py +++ b/services/web/server/tests/integration/02/conftest.py @@ -9,7 +9,7 @@ import pytest from models_library.projects import ProjectID -from pytest_simcore.helpers.utils_projects import NewProject +from pytest_simcore.helpers.webserver_projects import NewProject @pytest.fixture(scope="session") diff --git a/services/web/server/tests/integration/02/notifications/test_rabbitmq_consumers.py b/services/web/server/tests/integration/02/notifications/test_rabbitmq_consumers.py index a26af00fc4c..913656e8638 100644 --- a/services/web/server/tests/integration/02/notifications/test_rabbitmq_consumers.py +++ b/services/web/server/tests/integration/02/notifications/test_rabbitmq_consumers.py @@ -31,7 +31,7 @@ from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from redis import Redis from servicelib.aiohttp.application import create_safe_application from servicelib.aiohttp.monitor_services import ( diff --git a/services/web/server/tests/integration/02/scicrunch/test_scicrunch__resolver.py b/services/web/server/tests/integration/02/scicrunch/test_scicrunch__resolver.py index 228fb448855..99683e08cff 100644 --- a/services/web/server/tests/integration/02/scicrunch/test_scicrunch__resolver.py +++ b/services/web/server/tests/integration/02/scicrunch/test_scicrunch__resolver.py @@ -6,7 +6,7 @@ import pytest from aiohttp import ClientSession from aiohttp.client import ClientTimeout -from pytest_simcore.helpers.utils_scrunch_citations import ( +from pytest_simcore.helpers.scrunch_citations import ( ANTIBODY_CITATIONS, ORGANISM_CITATIONS, PLAMID_CITATIONS, diff --git a/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py b/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py index 1a30c32bd00..014418a25fb 100644 --- a/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py +++ b/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py @@ -19,10 +19,7 @@ import pytest from aiohttp import ClientResponseError, ClientSession -from pytest_simcore.helpers.utils_scrunch_citations import ( - NOT_TOOL_CITATIONS, - TOOL_CITATIONS, -) +from pytest_simcore.helpers.scrunch_citations import NOT_TOOL_CITATIONS, TOOL_CITATIONS from servicelib.aiohttp import status from simcore_service_webserver.scicrunch._rest import ( ListOfResourceHits, diff --git a/services/web/server/tests/integration/02/test_computation.py b/services/web/server/tests/integration/02/test_computation.py index 360e3f26958..2531d5761f9 100644 --- a/services/web/server/tests/integration/02/test_computation.py +++ b/services/web/server/tests/integration/02/test_computation.py @@ -17,7 +17,7 @@ from aiohttp.test_utils import TestClient from models_library.projects_state import RunningState from models_library.utils.json_serialization import json_dumps -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from servicelib.aiohttp.application import create_safe_application from servicelib.status_utils import get_code_display_name diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index 8ea2ce01377..114c8b65d23 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -25,9 +25,9 @@ import yaml from pytest_mock import MockerFixture from pytest_simcore.helpers import FIXTURE_CONFIG_CORE_SERVICES_SELECTION -from pytest_simcore.helpers.utils_dict import ConfigDict -from pytest_simcore.helpers.utils_docker import get_service_published_port -from pytest_simcore.helpers.utils_login import NewUser, UserInfoDict +from pytest_simcore.helpers.dict_tools import ConfigDict +from pytest_simcore.helpers.docker import get_service_published_port +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from simcore_service_webserver.groups.api import ( add_user_in_group, create_user_group, diff --git a/services/web/server/tests/unit/conftest.py b/services/web/server/tests/unit/conftest.py index 80d3ac3a3c0..40dd2bd79b9 100644 --- a/services/web/server/tests/unit/conftest.py +++ b/services/web/server/tests/unit/conftest.py @@ -15,8 +15,8 @@ import pytest import yaml from openapi_core.schema.specs.models import Spec as OpenApiSpecs -from pytest_simcore.helpers.utils_dict import ConfigDict -from pytest_simcore.helpers.utils_projects import empty_project_data +from pytest_simcore.helpers.dict_tools import ConfigDict +from pytest_simcore.helpers.webserver_projects import empty_project_data from simcore_service_webserver.rest._utils import ( get_openapi_specs_path, load_openapi_specs, diff --git a/services/web/server/tests/unit/isolated/conftest.py b/services/web/server/tests/unit/isolated/conftest.py index cdf98e21faa..f4436d35fa1 100644 --- a/services/web/server/tests/unit/isolated/conftest.py +++ b/services/web/server/tests/unit/isolated/conftest.py @@ -6,9 +6,9 @@ import pytest from faker import Faker from pytest_mock import MockerFixture +from pytest_simcore.helpers.dict_tools import ConfigDict +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_dict import ConfigDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict @pytest.fixture diff --git a/services/web/server/tests/unit/isolated/test__configs.py b/services/web/server/tests/unit/isolated/test__configs.py index f8db0c504d6..fe2984f62ae 100644 --- a/services/web/server/tests/unit/isolated/test__configs.py +++ b/services/web/server/tests/unit/isolated/test__configs.py @@ -10,7 +10,7 @@ from typing import Any import pytest -from pytest_simcore.helpers.utils_environs import eval_service_environ +from pytest_simcore.helpers.deprecated_environs import eval_service_environ from servicelib.aiohttp.application_setup import is_setup_function diff --git a/services/web/server/tests/unit/isolated/test_activity.py b/services/web/server/tests/unit/isolated/test_activity.py index 17821d28918..b8d97b92c67 100644 --- a/services/web/server/tests/unit/isolated/test_activity.py +++ b/services/web/server/tests/unit/isolated/test_activity.py @@ -13,7 +13,7 @@ from aiohttp.client_exceptions import ClientConnectionError from aiohttp.test_utils import TestClient from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from servicelib.aiohttp.application import create_safe_application from simcore_service_webserver.activity.plugin import setup_activity diff --git a/services/web/server/tests/unit/isolated/test_application_settings.py b/services/web/server/tests/unit/isolated/test_application_settings.py index c97ff2a59b0..66c5a75200e 100644 --- a/services/web/server/tests/unit/isolated/test_application_settings.py +++ b/services/web/server/tests/unit/isolated/test_application_settings.py @@ -9,8 +9,11 @@ from aiohttp import web from models_library.utils.json_serialization import json_dumps from pydantic import HttpUrl, parse_obj_as +from pytest_simcore.helpers.monkeypatch_envs import ( + setenvs_from_dict, + setenvs_from_envfile, +) from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict, setenvs_from_envfile from simcore_service_webserver.application_settings import ( APP_SETTINGS_KEY, ApplicationSettings, diff --git a/services/web/server/tests/unit/isolated/test_application_settings_utils.py b/services/web/server/tests/unit/isolated/test_application_settings_utils.py index 3b06d0e7a7a..f4f0f901199 100644 --- a/services/web/server/tests/unit/isolated/test_application_settings_utils.py +++ b/services/web/server/tests/unit/isolated/test_application_settings_utils.py @@ -1,7 +1,7 @@ from typing import Callable import pytest -from pytest_simcore.helpers.utils_dict import ConfigDict +from pytest_simcore.helpers.dict_tools import ConfigDict from simcore_service_webserver.application_settings import ApplicationSettings from simcore_service_webserver.application_settings_utils import ( convert_to_app_config, diff --git a/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py b/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py index dee75fafb55..e70f153e369 100644 --- a/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py +++ b/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py @@ -13,7 +13,7 @@ import simcore_service_webserver from aiohttp import web from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from servicelib.aiohttp.application import create_safe_application from simcore_service_webserver._constants import APP_SETTINGS_KEY diff --git a/services/web/server/tests/unit/isolated/test_login_settings.py b/services/web/server/tests/unit/isolated/test_login_settings.py index 0fc59f8ad3c..b6872fce92d 100644 --- a/services/web/server/tests/unit/isolated/test_login_settings.py +++ b/services/web/server/tests/unit/isolated/test_login_settings.py @@ -8,7 +8,7 @@ import pytest from models_library.errors import ErrorDict from pydantic import ValidationError -from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from settings_library.email import SMTPSettings from simcore_postgres_database.models.products import ProductLoginSettingsDict from simcore_service_webserver.login.settings import ( diff --git a/services/web/server/tests/unit/isolated/test_rest.py b/services/web/server/tests/unit/isolated/test_rest.py index bedb61da1f5..a10592b7757 100644 --- a/services/web/server/tests/unit/isolated/test_rest.py +++ b/services/web/server/tests/unit/isolated/test_rest.py @@ -11,8 +11,8 @@ from aiohttp import web from aiohttp.test_utils import TestClient from pytest_mock import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_assert import assert_status from servicelib.aiohttp import status from servicelib.aiohttp.application import create_safe_application from simcore_service_webserver.application_settings import setup_settings diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py index 074c9d2d5e4..8faada91005 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py @@ -15,7 +15,7 @@ from pydantic import validator from pydantic.main import BaseModel from pydantic.networks import HttpUrl -from pytest_simcore.helpers.utils_services import list_fake_file_consumers +from pytest_simcore.helpers.webserver_fake_services_data import list_fake_file_consumers from simcore_service_webserver.studies_dispatcher._projects import ( UserInfo, ViewerInfo, diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py index 62bcde16dca..03a0eb5920f 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py @@ -11,8 +11,8 @@ from aiohttp import web from aiohttp.test_utils import make_mocked_request from faker import Faker +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_webserver.application_settings import setup_settings from simcore_service_webserver.studies_dispatcher._projects_permalinks import ( PermalinkNotAllowedError, diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_settings.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_settings.py index bf3d8a575e6..91364e64beb 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_settings.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_settings.py @@ -10,8 +10,8 @@ import pytest from models_library.errors import ErrorDict from pydantic import ValidationError +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_webserver.studies_dispatcher.settings import ( StudiesDispatcherSettings, ) diff --git a/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py b/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py index be01e2fd5a7..c3f6b1d8570 100644 --- a/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py @@ -23,8 +23,8 @@ ) from models_library.clusters import CLUSTER_ADMIN_RIGHTS, Cluster, SimpleAuthentication from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( # nopycln: import +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_parametrizations import ( # nopycln: import ExpectedResponse, standard_role_response, ) diff --git a/services/web/server/tests/unit/with_dbs/01/notifications/test_notifications__db_comp_tasks_listening_task.py b/services/web/server/tests/unit/with_dbs/01/notifications/test_notifications__db_comp_tasks_listening_task.py index ac207d5943f..160b39f4c97 100644 --- a/services/web/server/tests/unit/with_dbs/01/notifications/test_notifications__db_comp_tasks_listening_task.py +++ b/services/web/server/tests/unit/with_dbs/01/notifications/test_notifications__db_comp_tasks_listening_task.py @@ -16,7 +16,7 @@ from faker import Faker from models_library.projects import ProjectAtDB, ProjectID from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY from simcore_postgres_database.models.comp_pipeline import StateType from simcore_postgres_database.models.comp_tasks import NodeClass, comp_tasks diff --git a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/conftest.py b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/conftest.py index 09001ddc27b..d80957cca89 100644 --- a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/conftest.py +++ b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/conftest.py @@ -8,8 +8,8 @@ import pytest from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_webserver.log import setup_logging from simcore_service_webserver.studies_dispatcher.settings import ( StudiesDispatcherSettings, diff --git a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_handlers.py b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_handlers.py index a7fecb40dcd..8157dc8248a 100644 --- a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_handlers.py @@ -20,8 +20,8 @@ from models_library.utils.json_serialization import json_dumps from pydantic import BaseModel, ByteSize, parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict, UserRole +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict, UserRole from pytest_simcore.pydantic_models import iter_model_examples_in_module from servicelib.aiohttp import status from settings_library.redis import RedisSettings diff --git a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_projects.py b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_projects.py index b89e1a0f224..48aacf56c6c 100644 --- a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_projects.py +++ b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_projects.py @@ -14,9 +14,9 @@ from models_library.projects import Project, ProjectID from models_library.projects_nodes_io import NodeID from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_login import NewUser -from pytest_simcore.helpers.utils_projects import delete_all_projects -from pytest_simcore.helpers.utils_services import list_fake_file_consumers +from pytest_simcore.helpers.webserver_fake_services_data import list_fake_file_consumers +from pytest_simcore.helpers.webserver_login import NewUser +from pytest_simcore.helpers.webserver_projects import delete_all_projects from simcore_service_webserver.groups.api import auto_add_user_to_groups from simcore_service_webserver.projects.projects_api import get_project_for_user from simcore_service_webserver.studies_dispatcher._models import ServiceInfo diff --git a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_studies_access.py index 1abf8926a57..6291f766b56 100644 --- a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_studies_access.py @@ -21,10 +21,10 @@ from models_library.projects_state import ProjectLocked, ProjectStatus from pytest_mock import MockerFixture from pytest_simcore.aioresponses_mocker import AioResponsesMock -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict, UserRole -from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects -from pytest_simcore.helpers.utils_webserver_unit_with_db import MockedStorageSubsystem +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict, UserRole +from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_projects import NewProject, delete_all_projects from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.client import LRTask from servicelib.aiohttp.long_running_tasks.server import TaskProgress diff --git a/services/web/server/tests/unit/with_dbs/01/test_api_keys.py b/services/web/server/tests/unit/with_dbs/01/test_api_keys.py index 4a19bf8ebe9..aa9e1a14065 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_api_keys.py +++ b/services/web/server/tests/unit/with_dbs/01/test_api_keys.py @@ -11,8 +11,8 @@ import pytest from aiohttp.test_utils import TestClient from models_library.products import ProductName -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import NewUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.api_keys._api import ( get_api_key, diff --git a/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py b/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py index b70812f09ae..7ad51c739d7 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py +++ b/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py @@ -14,9 +14,9 @@ from models_library.rabbitmq_basic_types import RPCMethodName from pydantic import parse_obj_as from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.rabbitmq import RabbitMQRPCClient from settings_library.rabbit import RabbitSettings from simcore_postgres_database.models.users import UserRole diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py index b6a8f9bd82b..b328ddc4c7d 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py @@ -15,8 +15,8 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import parse_obj_as from pytest_simcore.aioresponses_mocker import AioResponsesMock -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from settings_library.resource_usage_tracker import ResourceUsageTrackerSettings from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py index 8d5ac4100fa..933e2bf54f8 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py @@ -10,10 +10,10 @@ from models_library.api_schemas_webserver.catalog import DEVServiceGet from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import parse_obj_as +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py index 3d9cc977658..afffca3652a 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py @@ -15,8 +15,8 @@ ) from pydantic import parse_obj_as from pytest_simcore.aioresponses_mocker import AioResponsesMock -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from settings_library.catalog import CatalogSettings from simcore_service_webserver.catalog.settings import get_plugin_settings diff --git a/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py b/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py index cc6b4b7ece4..8cbcfbdf739 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py @@ -7,9 +7,9 @@ from aiohttp.test_utils import TestClient from faker import Faker from models_library.projects import ProjectID -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) diff --git a/services/web/server/tests/unit/with_dbs/01/test_groups.py b/services/web/server/tests/unit/with_dbs/01/test_groups.py index 0403e5e303b..f6e41225ff7 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_groups.py +++ b/services/web/server/tests/unit/with_dbs/01/test_groups.py @@ -13,9 +13,9 @@ import pytest from aiohttp.test_utils import TestClient from faker import Faker -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser, NewUser, UserInfoDict -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser, NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) diff --git a/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py b/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py index 5caecca99f5..2f79fb3c402 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py @@ -8,7 +8,7 @@ import pytest from aiohttp import web_exceptions from aioresponses.core import aioresponses -from pytest_simcore.helpers.utils_dict import ConfigDict +from pytest_simcore.helpers.dict_tools import ConfigDict @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py b/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py index 4c5e3c108ab..4e3f10a9c4d 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py +++ b/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py @@ -7,8 +7,8 @@ import pytest from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) diff --git a/services/web/server/tests/unit/with_dbs/01/test_resource_manager_user_sessions.py b/services/web/server/tests/unit/with_dbs/01/test_resource_manager_user_sessions.py index 79d88422c58..403dfbc6a2e 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_resource_manager_user_sessions.py +++ b/services/web/server/tests/unit/with_dbs/01/test_resource_manager_user_sessions.py @@ -12,7 +12,7 @@ import pytest import redis.asyncio as aioredis from aiohttp import web -from pytest_simcore.helpers.utils_envs import setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from servicelib.aiohttp.application import create_safe_application from servicelib.aiohttp.application_setup import is_setup_completed from simcore_service_webserver.application_settings import setup_settings diff --git a/services/web/server/tests/unit/with_dbs/01/test_storage.py b/services/web/server/tests/unit/with_dbs/01/test_storage.py index 1d645083aac..e6977b67c6d 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_storage.py +++ b/services/web/server/tests/unit/with_dbs/01/test_storage.py @@ -7,7 +7,7 @@ import pytest from aiohttp import web from faker import Faker -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from servicelib.aiohttp.application import create_safe_application from simcore_postgres_database.models.users import UserRole diff --git a/services/web/server/tests/unit/with_dbs/02/conftest.py b/services/web/server/tests/unit/with_dbs/02/conftest.py index 7a311221770..4e776efc341 100644 --- a/services/web/server/tests/unit/with_dbs/02/conftest.py +++ b/services/web/server/tests/unit/with_dbs/02/conftest.py @@ -26,11 +26,11 @@ ) from pydantic import parse_obj_as from pytest_mock import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_projects import NewProject, delete_all_projects from settings_library.catalog import CatalogSettings from simcore_service_webserver.application_settings import get_application_settings from simcore_service_webserver.catalog.settings import get_plugin_settings diff --git a/services/web/server/tests/unit/with_dbs/02/test_announcements.py b/services/web/server/tests/unit/with_dbs/02/test_announcements.py index a6a33c3ad62..cd87e2526c6 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_announcements.py +++ b/services/web/server/tests/unit/with_dbs/02/test_announcements.py @@ -17,9 +17,9 @@ from aiohttp.test_utils import TestClient from faker import Faker from pydantic import BaseModel, ValidationError +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from pytest_simcore.pydantic_models import iter_model_examples_in_module from servicelib.aiohttp import status from settings_library.redis import RedisDatabase, RedisSettings diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py index abe8611bf69..832bceb1f77 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py @@ -10,10 +10,10 @@ import pytest from aiohttp.test_utils import TestClient from pydantic import ByteSize, parse_obj_as +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, MockedStorageSubsystem, standard_role_response, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_comments_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_comments_handlers.py index b4a29386231..23b654dc684 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_comments_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_comments_handlers.py @@ -11,8 +11,8 @@ import pytest import sqlalchemy as sa from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.projects import projects from simcore_service_webserver._meta import api_version_prefix diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py index 38f638692c5..482a4ced098 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py @@ -24,9 +24,9 @@ from models_library.services import ServiceKey from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, MockedStorageSubsystem, standard_role_response, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py index 6215ae57226..be67e5c5bc9 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py @@ -15,8 +15,8 @@ from models_library.api_schemas_webserver.projects import ProjectGet from models_library.projects import ProjectID from pydantic import parse_obj_as -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_webserver_unit_with_db import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem from servicelib.aiohttp.long_running_tasks.client import long_running_task_request from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py index f54537a6d0e..b177bcc16e0 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py @@ -21,9 +21,9 @@ ) from models_library.projects import ProjectID from models_library.projects_state import ProjectStatus -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, MockedStorageSubsystem, standard_role_response, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py index 57c8317ebda..dae689d1974 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py @@ -12,8 +12,8 @@ import pytest from aiohttp.test_utils import TestClient from aioresponses import aioresponses -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py index 77fefa326f8..b0a23fb420e 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py @@ -17,11 +17,11 @@ from models_library.users import UserID from pydantic import BaseModel, PositiveInt from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_projects import create_project -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) +from pytest_simcore.helpers.webserver_projects import create_project from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py index 756f60184a8..f522bc91f46 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py @@ -12,8 +12,8 @@ import pytest from aiohttp.test_utils import TestClient from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py index 66e9f97a534..7a7056d7bbc 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py @@ -19,9 +19,9 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from pydantic import parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, MockedStorageSubsystem, standard_user_role_response, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index 9ceec731870..806115ed971 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -34,9 +34,9 @@ ) from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import NonNegativeFloat, NonNegativeInt, parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, MockedStorageSubsystem, standard_role_response, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py index 1706d21f688..7136a86e837 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py @@ -12,8 +12,8 @@ import pytest from aiohttp.test_utils import TestClient from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py index 869f3b78b5c..44188b708e5 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py @@ -14,7 +14,7 @@ ServiceAccessRightsGet, ) from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py index 1a3e2ba3862..5806a4bbfe3 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py @@ -21,8 +21,8 @@ from pydantic import parse_obj_as from pytest_mock.plugin import MockerFixture from pytest_simcore.aioresponses_mocker import AioResponsesMock -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict from servicelib.aiohttp import status from settings_library.resource_usage_tracker import ResourceUsageTrackerSettings from simcore_service_webserver._meta import api_version_prefix diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py index b7e25ee3393..ae1b62e0558 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py @@ -16,12 +16,12 @@ from models_library.api_schemas_webserver.projects import ProjectGet from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import parse_obj_as -from pytest_simcore.helpers.faker_webserver import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_fake_ports_data import ( PROJECTS_METADATA_PORTS_RESPONSE_BODY_DATA, ) -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_webserver_unit_with_db import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.client import long_running_task_request from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index d95fb1313e1..8613fbc8319 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -41,14 +41,14 @@ ServiceResourcesDictHelpers, ) from models_library.utils.fastapi_encoders import jsonable_encoder +from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict, log_client_in -from pytest_simcore.helpers.utils_projects import assert_get_same_project -from pytest_simcore.helpers.utils_webserver_unit_with_db import ( +from pytest_simcore.helpers.webserver_login import UserInfoDict, log_client_in +from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) +from pytest_simcore.helpers.webserver_projects import assert_get_same_project from servicelib.aiohttp import status from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from simcore_postgres_database.models.products import products diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_wallet_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_wallet_handlers.py index 432eaff4e2c..4fdf9a88e42 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_wallet_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_wallet_handlers.py @@ -13,8 +13,8 @@ from aiohttp.test_utils import TestClient from models_library.api_schemas_webserver.wallets import WalletGet from pydantic import parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver._meta import api_version_prefix diff --git a/services/web/server/tests/unit/with_dbs/03/conftest.py b/services/web/server/tests/unit/with_dbs/03/conftest.py index 851d48865a7..ec215a41e79 100644 --- a/services/web/server/tests/unit/with_dbs/03/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/conftest.py @@ -14,7 +14,7 @@ from aiopg.sa.connection import SAConnection from faker import Faker from models_library.products import ProductName -from pytest_simcore.helpers.rawdata_fakers import random_product +from pytest_simcore.helpers.faker_factories import random_product from simcore_postgres_database.models.products import products from simcore_postgres_database.models.products_prices import products_prices from simcore_postgres_database.models.user_preferences import user_preferences_frontend diff --git a/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py index 27ad5fe04f5..2fe84ed67aa 100644 --- a/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py @@ -25,11 +25,11 @@ ) from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_projects import NewProject -from pytest_simcore.helpers.utils_webserver_unit_with_db import MockedStorageSubsystem +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_projects import NewProject from redis.asyncio import Redis from servicelib.aiohttp import status from servicelib.aiohttp.application import create_safe_application diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py b/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py index 018e10fb9f1..84350848be5 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py @@ -22,7 +22,7 @@ ) from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_simcore.aioresponses_mocker import AioResponsesMock -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.aiohttp import status from simcore_service_webserver.application_settings import ApplicationSettings from simcore_service_webserver.invitations.settings import ( diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py index 49822276e43..ad31dda87c3 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py @@ -9,7 +9,7 @@ from models_library.api_schemas_invitations.invitations import ApiInvitationContent from pytest_mock import MockerFixture from pytest_simcore.aioresponses_mocker import AioResponsesMock -from pytest_simcore.helpers.utils_login import NewUser +from pytest_simcore.helpers.webserver_login import NewUser from simcore_service_webserver.groups.api import auto_add_user_to_product_group from simcore_service_webserver.invitations._client import ( InvitationsServiceApi, @@ -95,7 +95,7 @@ async def test_invalid_invitation_if_guest_is_already_registered_in_product( # user exists, and we skip product registration to do this test mocker.patch( - "pytest_simcore.helpers.utils_login.auto_add_user_to_product_group", + "pytest_simcore.helpers.webserver_login.auto_add_user_to_product_group", return_value=f"Mocked in {__file__}. SKIPPED auto_add_user_to_product_group", autospec=True, ) diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py index 23caa38c36a..7fa3ee144a7 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py @@ -14,7 +14,7 @@ from pydantic import HttpUrl from pytest_mock import MockerFixture from pytest_simcore.aioresponses_mocker import AioResponsesMock -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from servicelib.rest_constants import X_PRODUCT_NAME_HEADER from simcore_service_webserver.invitations.api import generate_invitation diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py index 822437e99d9..c3246184dd0 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py @@ -17,9 +17,9 @@ ) from pydantic import PositiveInt from pytest_simcore.aioresponses_mocker import AioResponsesMock -from pytest_simcore.helpers.rawdata_fakers import DEFAULT_TEST_PASSWORD -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.faker_factories import DEFAULT_TEST_PASSWORD +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/login/conftest.py b/services/web/server/tests/unit/with_dbs/03/login/conftest.py index 2cb46dfd091..167315facb4 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/login/conftest.py @@ -11,8 +11,8 @@ from aiohttp.test_utils import TestClient from faker import Faker from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import NewUser, UserInfoDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from simcore_postgres_database.models.users import users from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver.login.settings import LoginOptions, get_plugin_options diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa.py index b21fb08c594..d6dc34bcdfe 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa.py @@ -14,9 +14,9 @@ from faker import Faker from models_library.authentification import TwoFactorAuthentificationMethod from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import NewUser, parse_link, parse_test_marks +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import NewUser, parse_link, parse_test_marks from servicelib.aiohttp import status from servicelib.utils_secrets import generate_passcode from simcore_postgres_database.models.products import ProductLoginSettingsDict, products diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa_resend.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa_resend.py index 19845ef83af..7139811a6b1 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa_resend.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa_resend.py @@ -8,9 +8,9 @@ from aiohttp.test_utils import TestClient from pydantic import parse_obj_as from pytest_mock import MockFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.products import ProductLoginSettingsDict, products from simcore_service_webserver.application_settings import ApplicationSettings diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py index 7785c186acd..c73020d0638 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py @@ -10,8 +10,8 @@ from aiohttp.test_utils import TestClient from cryptography import fernet from faker import Faker -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import NewUser +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import NewUser from servicelib.aiohttp import status from settings_library.utils_session import DEFAULT_SESSION_COOKIE_NAME from simcore_service_webserver._constants import APP_SETTINGS_KEY diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py index 14adbcefbae..77b6bbd0b0e 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py @@ -4,8 +4,8 @@ import pytest from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser, NewUser, parse_link +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser, NewUser, parse_link from servicelib.aiohttp import status from simcore_service_webserver._constants import INDEX_RESOURCE_NAME from simcore_service_webserver.login._constants import ( diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py index adba9c7cecf..41f90807925 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py @@ -5,8 +5,8 @@ import pytest from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser from servicelib.aiohttp import status from servicelib.aiohttp.rest_responses import unwrap_envelope from simcore_service_webserver.login._constants import ( diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_logout.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_logout.py index 31d4362a88a..910a7cb0b1b 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_logout.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_logout.py @@ -3,8 +3,8 @@ # pylint: disable=unused-variable from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser from servicelib.aiohttp import status from simcore_service_webserver.login.storage import AsyncpgStorage diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py index bb0af10d859..61ade5ec24b 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py @@ -11,9 +11,9 @@ from faker import Faker from models_library.products import ProductName from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_error, assert_status -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import NewInvitation, NewUser, parse_link +from pytest_simcore.helpers.assert_checks import assert_error, assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import NewInvitation, NewUser, parse_link from servicelib.aiohttp import status from servicelib.aiohttp.rest_responses import unwrap_envelope from simcore_service_webserver.db.models import UserStatus diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py index 9ae32c0bf99..8e076f89513 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py @@ -13,8 +13,8 @@ from faker import Faker from models_library.api_schemas_webserver.auth import AccountRequestInfo from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import NewUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.login._constants import MSG_USER_DELETED diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py index 3d208e92c51..dd4a49a698a 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py @@ -7,8 +7,8 @@ import pytest from aiohttp.test_utils import TestClient, TestServer -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import NewUser, parse_link, parse_test_marks +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import NewUser, parse_link, parse_test_marks from servicelib.aiohttp import status from servicelib.utils_secrets import generate_password from simcore_service_webserver.db.models import ConfirmationAction, UserStatus diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py index f36e7ce7447..05bd373b15f 100644 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py +++ b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py @@ -13,9 +13,9 @@ from models_library.services_resources import ServiceResourcesDict from models_library.utils.json_serialization import json_dumps from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.simcore_webserver_projects_rest_api import ( NEW_PROJECT, REPLACE_PROJECT_ON_MODIFIED, diff --git a/services/web/server/tests/unit/with_dbs/03/products/conftest.py b/services/web/server/tests/unit/with_dbs/03/products/conftest.py index 7650497aaf9..99f086477a5 100644 --- a/services/web/server/tests/unit/with_dbs/03/products/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/products/conftest.py @@ -4,8 +4,8 @@ import pytest +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/03/products/test_products_handlers.py b/services/web/server/tests/unit/with_dbs/03/products/test_products_handlers.py index 95698dbe66b..a36fc493ad6 100644 --- a/services/web/server/tests/unit/with_dbs/03/products/test_products_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/products/test_products_handlers.py @@ -12,8 +12,8 @@ from aiohttp.test_utils import TestClient from models_library.api_schemas_webserver.product import GetProduct from models_library.products import ProductName -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from servicelib.rest_constants import X_PRODUCT_NAME_HEADER from simcore_postgres_database.constants import QUANTIZE_EXP_ARG diff --git a/services/web/server/tests/unit/with_dbs/03/products/test_products_rpc.py b/services/web/server/tests/unit/with_dbs/03/products/test_products_rpc.py index 0c513ccabef..3de1f7a95c8 100644 --- a/services/web/server/tests/unit/with_dbs/03/products/test_products_rpc.py +++ b/services/web/server/tests/unit/with_dbs/03/products/test_products_rpc.py @@ -12,9 +12,9 @@ from models_library.rabbitmq_basic_types import RPCMethodName from pydantic import parse_obj_as from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.rabbitmq import RabbitMQRPCClient, RPCServerError from settings_library.rabbit import RabbitSettings from simcore_postgres_database.models.users import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/conftest.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/conftest.py index a763db35434..4dc9da94974 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/conftest.py @@ -4,8 +4,8 @@ import pytest +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict API_VERSION = "v0" RESOURCE_NAME = "projects" diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py index e5f38cc05d8..6e67883e357 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py @@ -19,8 +19,8 @@ from models_library.resource_tracker import PricingPlanClassification from pydantic import parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py index ddf02b2aded..7b25e33a799 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py @@ -17,8 +17,8 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import parse_obj_as from pytest_simcore.aioresponses_mocker import AioResponsesMock -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from settings_library.resource_usage_tracker import ResourceUsageTrackerSettings from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__export.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__export.py index bc1fabfe5ab..6a80bccca0d 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__export.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__export.py @@ -18,7 +18,7 @@ from models_library.rest_ordering import OrderBy from pydantic import AnyUrl, parse_obj_as from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py index e5fe848543b..33b9d9146f5 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py @@ -18,8 +18,8 @@ ServiceRunPage, ) from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/tags/conftest.py b/services/web/server/tests/unit/with_dbs/03/tags/conftest.py index 4bef4fa16c9..11708b59922 100644 --- a/services/web/server/tests/unit/with_dbs/03/tags/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/tags/conftest.py @@ -8,7 +8,7 @@ import pytest from aioresponses import aioresponses -from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects +from pytest_simcore.helpers.webserver_projects import NewProject, delete_all_projects from servicelib.aiohttp.application import create_safe_application from simcore_service_webserver.application_settings import setup_settings from simcore_service_webserver.db.plugin import setup_db diff --git a/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py b/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py index 926b982f55e..84c6647a4c5 100644 --- a/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py +++ b/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py @@ -20,10 +20,10 @@ RunningState, ) from models_library.utils.fastapi_encoders import jsonable_encoder -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_projects import assert_get_same_project -from pytest_simcore.helpers.utils_tags import create_tag, delete_tag +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.postgres_tags import create_tag, delete_tag +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_projects import assert_get_same_project from servicelib.aiohttp import status from simcore_postgres_database.models.tags import tags from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py b/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py index 2b4ad41b803..ebd268074ab 100644 --- a/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py +++ b/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py @@ -9,8 +9,8 @@ from aiohttp import web from faker import Faker from openapi_core.schema.specs.models import Spec as OpenApiSpecs +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_service_webserver.application import create_application from simcore_service_webserver.application_settings import get_application_settings diff --git a/services/web/server/tests/unit/with_dbs/03/test_email.py b/services/web/server/tests/unit/with_dbs/03/test_email.py index 619ce8d309c..bb1ef162001 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_email.py +++ b/services/web/server/tests/unit/with_dbs/03/test_email.py @@ -18,10 +18,10 @@ from faker import Faker from pydantic import ValidationError from pytest_mock import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict, UserRole +from pytest_simcore.helpers.webserver_login import UserInfoDict, UserRole from servicelib.aiohttp import status from settings_library.email import EmailProtocol, SMTPSettings from simcore_service_webserver._meta import API_VTAG diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index ebf46bee580..f5586f08561 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -20,10 +20,10 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr from psycopg2.errors import UniqueViolation +from pytest_simcore.helpers.dict_tools import copy_from_dict_ex +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_dict import copy_from_dict_ex -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict, log_client_in +from pytest_simcore.helpers.webserver_login import UserInfoDict, log_client_in from servicelib.utils import logged_gather from simcore_postgres_database.models.projects import ProjectType, projects from simcore_postgres_database.models.projects_to_products import projects_to_products diff --git a/services/web/server/tests/unit/with_dbs/03/test_session.py b/services/web/server/tests/unit/with_dbs/03/test_session.py index 86e590d6441..3d92c32acd2 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_session.py +++ b/services/web/server/tests/unit/with_dbs/03/test_session.py @@ -13,9 +13,9 @@ from aiohttp.test_utils import TestClient from aiohttp_session.cookie_storage import EncryptedCookieStorage from cryptography.fernet import Fernet +from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_dict import ConfigDict -from pytest_simcore.helpers.utils_login import NewUser +from pytest_simcore.helpers.webserver_login import NewUser from simcore_service_webserver.application import create_application from simcore_service_webserver.session.api import get_session from simcore_service_webserver.session.settings import SessionSettings diff --git a/services/web/server/tests/unit/with_dbs/03/test_socketio.py b/services/web/server/tests/unit/with_dbs/03/test_socketio.py index 8beab6a5619..05be09f7749 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_socketio.py +++ b/services/web/server/tests/unit/with_dbs/03/test_socketio.py @@ -7,10 +7,10 @@ import socketio from aiohttp.test_utils import TestClient, TestServer from pytest_mock import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.application_settings import ApplicationSettings diff --git a/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py b/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py index 5031c483b60..b047fd2c9a7 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py @@ -16,9 +16,9 @@ ) from pydantic import AnyUrl, ByteSize, parse_obj_as from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp.rest_responses import wrap_as_envelope from simcore_postgres_database.models.users import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/test_users.py b/services/web/server/tests/unit/with_dbs/03/test_users.py index 4139eaa4ed5..dac6f6696c4 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users.py @@ -20,13 +20,13 @@ from models_library.api_schemas_webserver.auth import AccountRequestInfo from models_library.generics import Envelope from psycopg2 import OperationalError -from pytest_simcore.helpers.rawdata_fakers import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.faker_factories import ( DEFAULT_TEST_PASSWORD, random_pre_registration_details, ) -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from servicelib.rest_constants import RESPONSE_MODEL_POLICY from simcore_postgres_database.models.users import UserRole, UserStatus diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py b/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py index f359a0c2f49..77aaccade51 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py @@ -19,9 +19,9 @@ from aiohttp.test_utils import TestClient from models_library.products import ProductName from pydantic import parse_obj_as -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.redis import get_redis_user_notifications_client @@ -92,14 +92,11 @@ async def _create_notifications( redis_client: aioredis.Redis, logged_user: UserInfoDict, product_name: ProductName, - count: int + count: int, ) -> AsyncIterator[list[UserNotification]]: user_notifications: list[UserNotification] = [ - _create_notification( - logged_user=logged_user, - product_name=product_name - ) + _create_notification(logged_user=logged_user, product_name=product_name) for _ in range(count) ] @@ -312,7 +309,7 @@ async def test_create_user_notification_per_product( logged_user=logged_user, product_name="s4l", count=n_notifications_per_product, - ) as _ + ) as _, ): user_id = logged_user["id"] diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py index f52cc497e7c..3835883af8b 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py @@ -17,8 +17,8 @@ from models_library.users import UserID from pydantic import BaseModel from pydantic.fields import ModelField -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import NewUser +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import NewUser from simcore_postgres_database.models.groups_extra_properties import ( groups_extra_properties, ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_handlers.py b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_handlers.py index be8773b7cd6..73910d7a2c1 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_handlers.py @@ -13,9 +13,9 @@ from faker import Faker from models_library.user_preferences import FrontendUserPreference, PreferenceIdentifier from models_library.users import UserID -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import NewUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole, UserStatus from simcore_service_webserver.users._preferences_models import ALL_FRONTEND_PREFERENCES diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py index e049bffad1b..bdfe1af8d81 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py @@ -13,7 +13,7 @@ PreferenceIdentifier, PreferenceName, ) -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_webserver._constants import APP_SETTINGS_KEY from simcore_service_webserver.application_settings import ApplicationSettings from simcore_service_webserver.users._preferences_models import ( diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py b/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py index a2cbc2a7486..315f4884bc0 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py @@ -13,10 +13,10 @@ import pytest from aiohttp.test_utils import TestClient from faker import Faker -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_tokens import ( +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_tokens import ( create_token_in_db, delete_all_tokens_from_db, get_token_from_db, diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_api.py b/services/web/server/tests/unit/with_dbs/03/test_users_api.py index 5fa3d77ab20..28b70592ce8 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users_api.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users_api.py @@ -7,9 +7,9 @@ import pytest from aiohttp.test_utils import TestClient from faker import Faker -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.utils_login import NewUser +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_login import NewUser from servicelib.aiohttp import status from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY from simcore_postgres_database.models.users import UserStatus diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py index 3419d4ac281..634f1d7ca74 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py @@ -3,9 +3,10 @@ # pylint: disable=unused-variable import logging +from collections.abc import AsyncIterator, Awaitable, Callable from copy import deepcopy from pathlib import Path -from typing import Any, AsyncIterator, Awaitable, Callable +from typing import Any from unittest import mock from uuid import UUID @@ -19,9 +20,9 @@ from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_mock import MockerFixture -from pytest_simcore.helpers.rawdata_fakers import random_project -from pytest_simcore.helpers.utils_login import UserInfoDict -from pytest_simcore.helpers.utils_projects import NewProject +from pytest_simcore.helpers.faker_factories import random_project +from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_projects import NewProject from servicelib.aiohttp import status from simcore_postgres_database.models.projects_version_control import ( projects_vc_repos, diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py index ad2d74ced1c..05ab31ccdf8 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py @@ -13,7 +13,7 @@ from models_library.rest_pagination import Page from models_library.users import UserID from pydantic.main import BaseModel -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from simcore_service_webserver._meta import API_VTAG as VX from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/conftest.py b/services/web/server/tests/unit/with_dbs/03/wallets/conftest.py index 25c73d9f005..b5717c5998a 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/conftest.py @@ -10,9 +10,9 @@ import pytest import sqlalchemy as sa from aioresponses import aioresponses +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects +from pytest_simcore.helpers.webserver_projects import NewProject, delete_all_projects from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver.application_settings import ApplicationSettings diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/payments/conftest.py b/services/web/server/tests/unit/with_dbs/03/wallets/payments/conftest.py index a675dbaecb8..2b59b77c3b5 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/payments/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/payments/conftest.py @@ -30,8 +30,8 @@ from models_library.wallets import WalletID from pydantic import EmailStr, HttpUrl from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.payments_transactions import payments_transactions from simcore_postgres_database.models.users_details import ( diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py index c87de0b0273..ed8b2868481 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py @@ -19,8 +19,8 @@ from models_library.rest_pagination import Page from pydantic import parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser, NewUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser, NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_methods.py b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_methods.py index eed6244b26a..0980e45caa2 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_methods.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_methods.py @@ -24,7 +24,7 @@ from models_library.wallets import WalletID from pydantic import parse_obj_as from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from simcore_postgres_database.models.payments_methods import InitPromptAckFlowState from simcore_service_webserver.payments._methods_api import ( diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_rpc.py b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_rpc.py index b512e19b60f..756c008adba 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_rpc.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_rpc.py @@ -13,9 +13,9 @@ from models_library.payments import InvoiceDataGet from models_library.rabbitmq_basic_types import RPCMethodName from pydantic import parse_obj_as +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_envs import setenvs_from_dict -from pytest_simcore.helpers.utils_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.rabbitmq import RabbitMQRPCClient from settings_library.rabbit import RabbitSettings from simcore_service_webserver.application_settings import ApplicationSettings diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/test_wallets.py b/services/web/server/tests/unit/with_dbs/03/wallets/test_wallets.py index 22918c65753..b5ddcaf6f31 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/test_wallets.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/test_wallets.py @@ -23,8 +23,8 @@ ) from models_library.products import ProductName from pytest_mock import MockerFixture -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.login.utils import notify_user_confirmation diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/test_wallets_groups.py b/services/web/server/tests/unit/with_dbs/03/wallets/test_wallets_groups.py index 2264838643c..cf47474daaa 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/test_wallets_groups.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/test_wallets_groups.py @@ -9,8 +9,8 @@ import pytest from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import NewUser, UserInfoDict +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index b42d20f62e1..c4b5807d58f 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -40,11 +40,11 @@ from models_library.services_enums import ServiceState from pydantic import ByteSize, parse_obj_as from pytest_mock import MockerFixture +from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.utils_dict import ConfigDict -from pytest_simcore.helpers.utils_login import NewUser, UserInfoDict -from pytest_simcore.helpers.utils_projects import NewProject -from pytest_simcore.helpers.utils_webserver_unit_with_db import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_projects import NewProject from redis import Redis from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY from servicelib.aiohttp.long_running_tasks.client import LRTask diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index afee65541f5..a278ba9166f 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -22,8 +22,8 @@ from playwright.sync_api._generated import Playwright from pydantic import AnyUrl, TypeAdapter from pytest import Item -from pytest_simcore.logging_utils import log_context -from pytest_simcore.playwright_utils import ( +from pytest_simcore.helpers.logging import log_context +from pytest_simcore.helpers.playwright import ( MINUTE, AutoRegisteredUser, RunningState, diff --git a/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py b/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py index f5fc42baff7..7a56b89e594 100644 --- a/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py +++ b/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py @@ -14,8 +14,8 @@ from playwright.sync_api import Page, WebSocket from pydantic import ByteSize -from pytest_simcore.logging_utils import log_context -from pytest_simcore.playwright_utils import MINUTE, SECOND, ServiceType +from pytest_simcore.helpers.logging import log_context +from pytest_simcore.helpers.playwright import MINUTE, SECOND, ServiceType _WAITING_FOR_SERVICE_TO_START: Final[int] = ( 10 * MINUTE diff --git a/tests/e2e-playwright/tests/resource_usage_tracker/test_resource_usage_tracker.py b/tests/e2e-playwright/tests/resource_usage_tracker/test_resource_usage_tracker.py index d09ad5880ae..88092c91bb6 100644 --- a/tests/e2e-playwright/tests/resource_usage_tracker/test_resource_usage_tracker.py +++ b/tests/e2e-playwright/tests/resource_usage_tracker/test_resource_usage_tracker.py @@ -14,7 +14,7 @@ import pytest from playwright.sync_api import APIRequestContext from pydantic import AnyUrl -from pytest_simcore.logging_utils import test_logger +from pytest_simcore.helpers.logging import test_logger from tenacity import Retrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay diff --git a/tests/e2e-playwright/tests/sim4life/test_sim4life.py b/tests/e2e-playwright/tests/sim4life/test_sim4life.py index 7a27a771466..d60ec45d083 100644 --- a/tests/e2e-playwright/tests/sim4life/test_sim4life.py +++ b/tests/e2e-playwright/tests/sim4life/test_sim4life.py @@ -13,8 +13,8 @@ from playwright.sync_api import APIRequestContext, Page from pydantic import AnyUrl -from pytest_simcore.logging_utils import test_logger -from pytest_simcore.playwright_utils import on_web_socket_default_handler +from pytest_simcore.helpers.logging import test_logger +from pytest_simcore.helpers.playwright import on_web_socket_default_handler from tenacity import Retrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_attempt diff --git a/tests/e2e-playwright/tests/sleepers/test_sleepers.py b/tests/e2e-playwright/tests/sleepers/test_sleepers.py index fb85309eb72..4e7ac4d3756 100644 --- a/tests/e2e-playwright/tests/sleepers/test_sleepers.py +++ b/tests/e2e-playwright/tests/sleepers/test_sleepers.py @@ -17,8 +17,8 @@ from packaging.version import Version from packaging.version import parse as parse_version from playwright.sync_api import Page, WebSocket -from pytest_simcore.logging_utils import ContextMessages, log_context, test_logger -from pytest_simcore.playwright_utils import ( +from pytest_simcore.helpers.logging import ContextMessages, log_context, test_logger +from pytest_simcore.helpers.playwright import ( MINUTE, RunningState, ServiceType, diff --git a/tests/e2e-playwright/tests/tip/conftest.py b/tests/e2e-playwright/tests/tip/conftest.py index f7e3b928520..a9e48ff45b4 100644 --- a/tests/e2e-playwright/tests/tip/conftest.py +++ b/tests/e2e-playwright/tests/tip/conftest.py @@ -5,8 +5,8 @@ import pytest from playwright.sync_api import Page -from pytest_simcore.logging_utils import log_context -from pytest_simcore.playwright_utils import RunningState +from pytest_simcore.helpers.logging import log_context +from pytest_simcore.helpers.playwright import RunningState @pytest.fixture diff --git a/tests/e2e-playwright/tests/tip/test_ti_plan.py b/tests/e2e-playwright/tests/tip/test_ti_plan.py index 49b18a4dc39..7b09ec85dac 100644 --- a/tests/e2e-playwright/tests/tip/test_ti_plan.py +++ b/tests/e2e-playwright/tests/tip/test_ti_plan.py @@ -14,8 +14,8 @@ from typing import Any, Final from playwright.sync_api import Page, WebSocket -from pytest_simcore.logging_utils import log_context -from pytest_simcore.playwright_utils import ( +from pytest_simcore.helpers.logging import log_context +from pytest_simcore.helpers.playwright import ( MINUTE, SECOND, app_mode_trigger_next_app, diff --git a/tests/public-api/conftest.py b/tests/public-api/conftest.py index 5226461c28e..935d63a18a8 100644 --- a/tests/public-api/conftest.py +++ b/tests/public-api/conftest.py @@ -9,15 +9,15 @@ import logging import os import time +from collections.abc import Callable, Iterable, Iterator from pprint import pformat -from typing import Callable, Iterable, Iterator import httpx import osparc import pytest +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from pytest_simcore.helpers.typing_docker import UrlStr -from pytest_simcore.helpers.utils_envs import EnvVarsDict -from pytest_simcore.helpers.utils_public_api import ( +from pytest_simcore.helpers.typing_public_api import ( RegisteredUserDict, ServiceInfoDict, ServiceNameStr, @@ -38,12 +38,10 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", - "pytest_simcore.monkeypatch_extra", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", "pytest_simcore.schemas", "pytest_simcore.simcore_services", - "pytest_simcore.tmp_path_extra", ] @@ -196,7 +194,7 @@ def services_registry( "description": "A service which awaits for time to pass, two times.", "inputs": { "input_1": { - "description": "Pick a file containing only one " "integer", + "description": "Pick a file containing only one integer", "displayOrder": 1, "fileToKeyMap": {"single_number.txt": "input_1"}, "label": "File with int number", @@ -220,7 +218,7 @@ def services_registry( }, "input_4": { "defaultValue": 0, - "description": "It will first walk the distance to " "bed", + "description": "It will first walk the distance to bed", "displayOrder": 4, "label": "Distance to bed", "type": "integer", @@ -239,7 +237,7 @@ def services_registry( "type": "data:text/plain", }, "output_2": { - "description": "Interval is generated in range " "[1-9]", + "description": "Interval is generated in range [1-9]", "displayOrder": 2, "label": "Random sleep interval", "type": "integer", diff --git a/tests/public-api/test_solvers_api.py b/tests/public-api/test_solvers_api.py index 4d6f3f78541..9698b4c327b 100644 --- a/tests/public-api/test_solvers_api.py +++ b/tests/public-api/test_solvers_api.py @@ -11,7 +11,7 @@ import osparc import pytest from packaging.version import parse as parse_version -from pytest_simcore.helpers.utils_public_api import ServiceInfoDict, ServiceNameStr +from pytest_simcore.helpers.typing_public_api import ServiceInfoDict, ServiceNameStr class NameTagTuple(NamedTuple): diff --git a/tests/public-api/test_solvers_jobs_api.py b/tests/public-api/test_solvers_jobs_api.py index e861f30c982..ddd6445f11e 100644 --- a/tests/public-api/test_solvers_jobs_api.py +++ b/tests/public-api/test_solvers_jobs_api.py @@ -19,7 +19,7 @@ import osparc import pytest -from pytest_simcore.helpers.utils_public_api import ServiceInfoDict, ServiceNameStr +from pytest_simcore.helpers.typing_public_api import ServiceInfoDict, ServiceNameStr osparc_VERSION = tuple(map(int, osparc.__version__.split("."))) assert osparc_VERSION >= (0, 4, 3) diff --git a/tests/public-api/test_users_api.py b/tests/public-api/test_users_api.py index 82fae651f55..0b953ddf182 100644 --- a/tests/public-api/test_users_api.py +++ b/tests/public-api/test_users_api.py @@ -8,7 +8,7 @@ import osparc import pytest -from pytest_simcore.helpers.utils_public_api import RegisteredUserDict +from pytest_simcore.helpers.typing_public_api import RegisteredUserDict @pytest.fixture(scope="module") diff --git a/tests/swarm-deploy/conftest.py b/tests/swarm-deploy/conftest.py index 687157f9381..c295e328fc8 100644 --- a/tests/swarm-deploy/conftest.py +++ b/tests/swarm-deploy/conftest.py @@ -10,7 +10,7 @@ from docker import DockerClient from docker.models.services import Service from pytest_simcore.docker_swarm import assert_service_is_running -from pytest_simcore.helpers.utils_environs import EnvVarsDict +from pytest_simcore.helpers.deprecated_environs import EnvVarsDict from tenacity import Retrying from tenacity.before_sleep import before_sleep_log from tenacity.stop import stop_after_delay @@ -25,7 +25,6 @@ "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", "pytest_simcore.simcore_webserver_service", - "pytest_simcore.tmp_path_extra", "pytest_simcore.traefik_service", ] From 9d7e144aa0abe28521ef0fee6f420aa632a7b6b9 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:34:30 +0200 Subject: [PATCH 062/219] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20TIP=20v3=20?= =?UTF-8?q?Feedback=20(#5980)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../static-webserver/client/Manifest.json | 2 +- services/static-webserver/client/qx-lock.json | 6 +- .../source/class/osparc/auth/ui/LoginView.js | 22 +++ .../class/osparc/auth/ui/RequestAccount.js | 155 ++++++++++-------- .../class/osparc/dashboard/GridButtonBase.js | 3 +- .../class/osparc/dashboard/NewStudies.js | 31 +++- .../desktop/credits/CreditsIndicatorButton.js | 100 +++++++++++ .../desktop/credits/CreditsNavBarContainer.js | 83 ++++++++++ .../class/osparc/desktop/credits/Utils.js | 12 ++ .../desktop/credits/WalletsMiniViewer.js | 95 ----------- .../class/osparc/navigation/NavigationBar.js | 14 +- .../source/class/osparc/theme/Appearance.js | 10 ++ .../source/class/osparc/theme/Decoration.js | 7 + .../source/resource/osparc/new_studies.json | 9 +- 14 files changed, 375 insertions(+), 174 deletions(-) create mode 100644 services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js create mode 100644 services/static-webserver/client/source/class/osparc/desktop/credits/CreditsNavBarContainer.js delete mode 100644 services/static-webserver/client/source/class/osparc/desktop/credits/WalletsMiniViewer.js diff --git a/services/static-webserver/client/Manifest.json b/services/static-webserver/client/Manifest.json index 80463514298..21d82cd8cd3 100644 --- a/services/static-webserver/client/Manifest.json +++ b/services/static-webserver/client/Manifest.json @@ -41,7 +41,7 @@ "requires": { "@qooxdoo/compiler": "^1.0.0-beta", "@qooxdoo/framework": "^6.0.0-beta", - "ITISFoundation/qx-iconfont-fontawesome5": "^0.2.0", + "ITISFoundation/qx-iconfont-fontawesome5": "^1.0.0", "ITISFoundation/qx-osparc-theme": "^0.5.6", "qooxdoo/qxl.testtapper": "^0.4.3", "qooxdoo/qxl.apiviewer": "^1.0.0-beta", diff --git a/services/static-webserver/client/qx-lock.json b/services/static-webserver/client/qx-lock.json index d52dff39e77..6b9be87bd53 100644 --- a/services/static-webserver/client/qx-lock.json +++ b/services/static-webserver/client/qx-lock.json @@ -2,11 +2,11 @@ "libraries": [ { "library_name": "qx-iconfont-fontawesome5", - "library_version": "0.2.0", - "path": "qx_packages/ITISFoundation_qx-iconfont-fontawesome5_v0_2_0", + "library_version": "1.0.0", + "path": "qx_packages/ITISFoundation_qx-iconfont-fontawesome5_v1_0_0", "uri": "ITISFoundation/qx-iconfont-fontawesome5", "repo_name": "ITISFoundation/qx-iconfont-fontawesome5", - "repo_tag": "v0.2.0" + "repo_tag": "v1.0.0" }, { "library_name": "qx-osparc-theme", diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js index 404e7e1b3cd..c3ce4587b27 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js @@ -132,6 +132,28 @@ qx.Class.define("osparc.auth.ui.LoginView", { `; const disclaimer = osparc.announcement.AnnouncementUIFactory.createLoginAnnouncement(this.tr("Disclaimer"), text); this.add(disclaimer); + + this.add(new qx.ui.core.Spacer(), { + flex: 1 + }); + + const poweredByLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox()).set({ + alignX: "center", + allowGrowX: false, + cursor: "pointer" + }); + poweredByLayout.addListener("tap", () => window.open("https://sim4life.swiss/")); + const label = new qx.ui.basic.Label(this.tr("powered by")); + poweredByLayout.add(label); + const s4lLogo = new qx.ui.basic.Image("osparc/Sim4Life_full_logo_white.svg"); + s4lLogo.set({ + width: osparc.auth.LoginPage.LOGO_WIDTH/2, + height: osparc.auth.LoginPage.LOGO_HEIGHT/2, + scale: true, + alignX: "center" + }); + poweredByLayout.add(s4lLogo); + this.add(poweredByLayout); } }, diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js index d1f68963fcf..13b022b26d1 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js @@ -52,40 +52,48 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { }); this._form.add(lastName, this.tr("Last Name"), null, "lastName"); + const email = new qx.ui.form.TextField().set({ required: true }); - if ( - osparc.product.Utils.isProduct("s4lacad") || - osparc.product.Utils.isProduct("s4ldesktopacad") - ) { - this._form.add(email, this.tr("University Email"), qx.util.Validate.email(), "email"); - } else { - this._form.add(email, this.tr("Email"), qx.util.Validate.email(), "email"); + switch (osparc.product.Utils.getProductName()) { + case "s4l": + case "tis": + this._form.add(email, this.tr("Email"), qx.util.Validate.email(), "email"); + break; + case "s4lacad": + case "s4ldesktopacad": + this._form.add(email, this.tr("University Email"), qx.util.Validate.email(), "email"); + break; } const phone = new qx.ui.form.TextField(); this._form.add(phone, this.tr("Phone Number"), null, "phone"); - if ( - osparc.product.Utils.isProduct("s4lacad") || - osparc.product.Utils.isProduct("s4ldesktopacad") - ) { - const university = new qx.ui.form.TextField(); - doubleSpaced.push(university); - this._form.add(university, this.tr("University"), null, "university"); - } else { - const company = new qx.ui.form.TextField(); - doubleSpaced.push(company); - this._form.add(company, this.tr("Company Name"), null, "company"); + + const organization = new qx.ui.form.TextField(); + doubleSpaced.push(organization); + switch (osparc.product.Utils.getProductName()) { + case "s4l": + this._form.add(organization, this.tr("Company Name"), null, "company"); + break; + case "s4lacad": + case "s4ldesktopacad": + this._form.add(organization, this.tr("University"), null, "university"); + break; + case "tis": + this._form.add(organization, this.tr("Organization"), null, "organization"); + break; } + const address = new qx.ui.form.TextField().set({ required: true }); doubleSpaced.push(address); this._form.add(address, this.tr("Address"), null, "address"); + const city = new qx.ui.form.TextField().set({ required: true }); @@ -96,6 +104,7 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { }); this._form.add(postalCode, this.tr("Postal code"), null, "postalCode"); + const country = new qx.ui.form.SelectBox().set({ required: true }); @@ -117,58 +126,67 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { }); this._form.add(country, this.tr("Country"), null, "country"); - const application = new qx.ui.form.SelectBox(); - [{ - id: "Antenna_Design_for_Wireless_Communication", - label: "Antenna Design for Wireless Communication" - }, { - id: "Bioelectronics,_Electroceuticals_and_Neuroprosthetics", - label: "Bioelectronics, Electroceuticals & Neuroprosthetics" - }, { - id: "Safety_and_Efficacy_Assessment", - label: "Safety & Efficacy Assessment" - }, { - id: "Exposure_and_Compliance", - label: "Exposure & Compliance" - }, { - id: "Focused_Ultrasound", - label: "Focused Ultrasound" - }, { - id: "In_Silico_Trials", - label: "In Silico Trials" - }, { - id: "Implant_Design", - label: "Implant Design" - }, { - id: "Magnetic_Resonance_Imaging", - label: "Magnetic Resonance Imaging" - }, { - id: "Neurostimulation", - label: "Neurostimulation" - }, { - id: "Personalized_Medicine", - label: "Personalized Medicine" - }, { - id: "Thermal_Therapies", - label: "Thermal Therapies" - }, { - id: "Wireless_Power_Transfer_Systems", - label: "Wireless Power Transfer Systems" - }, { - id: "Vascular_Flow_and_Perfusion", - label: "Vascular Flow & Perfusion" - }].forEach(appData => { - const lItem = new qx.ui.form.ListItem(appData.label, null, appData.id).set({ - rich: true + + if ( + osparc.product.Utils.isProduct("s4l") || + osparc.product.Utils.isProduct("s4lacad") || + osparc.product.Utils.isProduct("s4ldesktopacad") + ) { + const application = new qx.ui.form.SelectBox(); + [{ + id: "Antenna_Design_for_Wireless_Communication", + label: "Antenna Design for Wireless Communication" + }, { + id: "Bioelectronics,_Electroceuticals_and_Neuroprosthetics", + label: "Bioelectronics, Electroceuticals & Neuroprosthetics" + }, { + id: "Safety_and_Efficacy_Assessment", + label: "Safety & Efficacy Assessment" + }, { + id: "Exposure_and_Compliance", + label: "Exposure & Compliance" + }, { + id: "Focused_Ultrasound", + label: "Focused Ultrasound" + }, { + id: "In_Silico_Trials", + label: "In Silico Trials" + }, { + id: "Implant_Design", + label: "Implant Design" + }, { + id: "Magnetic_Resonance_Imaging", + label: "Magnetic Resonance Imaging" + }, { + id: "Neurostimulation", + label: "Neurostimulation" + }, { + id: "Personalized_Medicine", + label: "Personalized Medicine" + }, { + id: "Thermal_Therapies", + label: "Thermal Therapies" + }, { + id: "Wireless_Power_Transfer_Systems", + label: "Wireless Power Transfer Systems" + }, { + id: "Vascular_Flow_and_Perfusion", + label: "Vascular Flow & Perfusion" + }].forEach(appData => { + const lItem = new qx.ui.form.ListItem(appData.label, null, appData.id).set({ + rich: true + }); + application.add(lItem); }); - application.add(lItem); - }); - doubleSpaced.push(application); - this._form.add(application, this.tr("Application"), null, "application"); + doubleSpaced.push(application); + this._form.add(application, this.tr("Application"), null, "application"); + + + const description = new qx.ui.form.TextField(); + doubleSpaced.push(description); + this._form.add(description, this.tr("Description"), null, "description"); + } - const description = new qx.ui.form.TextField(); - doubleSpaced.push(description); - this._form.add(description, this.tr("Description"), null, "description"); const hear = new qx.ui.form.SelectBox(); [{ @@ -193,6 +211,7 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { doubleSpaced.push(hear); this._form.add(hear, this.tr("How did you hear about us?"), null, "hear"); + // accept links // Privacy Policy link let ppLink = osparc.CookiePolicy.getS4LPrivacyPolicyLink("our privacy policy"); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js index e29291e8715..61292dd7171 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js @@ -48,6 +48,7 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { SPACING: 15, // TITLE_MAX_HEIGHT: 34, // two lines in Roboto TITLE_MAX_HEIGHT: 40, // two lines in Manrope + ICON_SIZE: 50, POS: { TITLE: { row: 0, @@ -287,7 +288,7 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { // overridden _applyIcon: function(value, old) { if (value.includes("@FontAwesome5Solid/")) { - value += "50"; + value += this.self().ICON_SIZE; const image = this.getChildControl("icon").getChildControl("image"); image.set({ source: value diff --git a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js index 9651b75189b..9fc4f8441ab 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js @@ -140,12 +140,41 @@ qx.Class.define("osparc.dashboard.NewStudies", { }, __createCard: function(templateInfo) { + const newStudyClicked = () => this.fireDataEvent("newStudyClicked", templateInfo); + const title = templateInfo.title; const desc = templateInfo.description; const newPlanButton = new osparc.dashboard.GridButtonNew(title, desc); newPlanButton.setCardKey(templateInfo.idToWidget); osparc.utils.Utils.setIdToWidget(newPlanButton, templateInfo.idToWidget); - newPlanButton.addListener("execute", () => this.fireDataEvent("newStudyClicked", templateInfo)) + if (templateInfo.billable) { + osparc.desktop.credits.Utils.setCreditsIconToButton(newPlanButton); + newPlanButton.addListener("execute", () => { + const store = osparc.store.Store.getInstance(); + const credits = store.getContextWallet().getCreditsAvailable() + const preferencesSettings = osparc.Preferences.getInstance(); + const warningThreshold = preferencesSettings.getCreditsWarningThreshold(); + if (credits <= warningThreshold) { + const msg = this.tr("This Plan requires Credits to run Sim4Life powered simulations. You can top up in the Billing Center."); + const win = new osparc.ui.window.Confirmation(msg).set({ + caption: this.tr("Credits required"), + confirmText: this.tr("Start, I'll get them later"), + confirmAction: "create" + }); + win.center(); + win.open(); + win.addListener("close", () => { + if (win.getConfirmed()) { + this.fireDataEvent("newStudyClicked", templateInfo); + } + }); + } else { + newStudyClicked(); + } + }); + } else { + newPlanButton.addListener("execute", () => newStudyClicked()); + } return newPlanButton; }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js new file mode 100644 index 00000000000..e331547fbca --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js @@ -0,0 +1,100 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.desktop.credits.CreditsIndicatorButton", { + extend: qx.ui.form.Button, + + construct: function() { + this.base(arguments); + + this.set({ + backgroundColor: "transparent" + }); + + const store = osparc.store.Store.getInstance(); + store.bind("contextWallet", this, "wallet"); + + this.__creditsContainer = new osparc.desktop.credits.CreditsNavBarContainer(); + this.__creditsContainer.exclude(); + + this.addListener("tap", this.__buttonTapped, this); + }, + + properties: { + wallet: { + check: "osparc.data.model.Wallet", + init: null, + nullable: true, + event: "changeWallet", + apply: "__applyWallet" + } + }, + + members: { + __creditsContainer: null, + __tappedOut: null, + + __applyWallet: function() { + osparc.desktop.credits.Utils.setCreditsIconToButton(this); + }, + + __buttonTapped: function() { + if (this.__tappedOut) { + this.__tappedOut = false; + return; + } + this.__showCreditsContainer(); + }, + + __showCreditsContainer: function() { + const tapListener = event => { + // In case a notification was tapped propagate the event so it can be handled by the NotificationUI + if (osparc.utils.Utils.isMouseOnElement(this.__creditsContainer, event)) { + return; + } + // I somehow can't stop the propagation of the event so workaround: + // If the user tapped on the bell we don't want to show it again + if (osparc.utils.Utils.isMouseOnElement(this, event)) { + this.__tappedOut = true; + } + this.__hideNotifications(); + document.removeEventListener("mousedown", tapListener, this); + }; + + const bounds = this.getBounds(); + const cel = this.getContentElement(); + if (cel) { + const domeEle = cel.getDomElement(); + if (domeEle) { + const rect = domeEle.getBoundingClientRect(); + bounds.left = parseInt(rect.x); + bounds.top = parseInt(rect.y); + } + } + const bottom = bounds.top+bounds.height; + const right = bounds.left+bounds.width; + this.__creditsContainer.setPosition(right, bottom); + this.__creditsContainer.show(); + + document.addEventListener("mousedown", tapListener, this); + }, + + __hideNotifications: function() { + this.__creditsContainer.exclude(); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsNavBarContainer.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsNavBarContainer.js new file mode 100644 index 00000000000..4bc6eceb91f --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsNavBarContainer.js @@ -0,0 +1,83 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.desktop.credits.CreditsNavBarContainer", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.Grow()); + + this.set({ + appearance: "floating-menu", + padding: 8, + maxWidth: this.self().WIDTH + }); + osparc.utils.Utils.setIdToWidget(this, "creditsNavBarContainer"); + + const layout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + + const creditsIndicator = new osparc.desktop.credits.CreditsIndicator(); + const store = osparc.store.Store.getInstance(); + store.bind("contextWallet", creditsIndicator, "wallet"); + layout.add(creditsIndicator, { + flex: 1 + }); + + const buttonSize = 26; + const billingCenterButton = new qx.ui.form.Button().set({ + appearance: "form-button-outlined", + width: buttonSize, + height: buttonSize, + alignX: "center", + alignY: "middle", + center: true, + icon: "@FontAwesome5Solid/ellipsis-v/12" + }); + // make it circular + billingCenterButton.getContentElement().setStyles({ + "border-radius": `${buttonSize / 2}px` + }); + billingCenterButton.addListener("execute", () => { + osparc.desktop.credits.BillingCenterWindow.openWindow(); + this.exclude(); + }); + layout.add(billingCenterButton); + + this._add(layout); + + const root = qx.core.Init.getApplication().getRoot(); + root.add(this, { + top: 0, + right: 0 + }); + }, + + statics: { + WIDTH: 200 + }, + + members: { + setPosition: function(x, y) { + this.setLayoutProperties({ + left: x - this.self().WIDTH, + top: y + }); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js b/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js index 3a207e6c6b6..fc9539e6213 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js @@ -20,12 +20,24 @@ qx.Class.define("osparc.desktop.credits.Utils", { statics: { DANGER_ZONE: 25, // one hour consumption + CREDITS_ICON: "@FontAwesome5Solid/database/", areWalletsEnabled: function() { const statics = osparc.store.Store.getInstance().get("statics"); return Boolean(statics && statics["isPaymentEnabled"]); }, + setCreditsIconToButton: function(button) { + button.setIcon(osparc.desktop.credits.Utils.CREDITS_ICON); + const store = osparc.store.Store.getInstance(); + const contextWallet = store.getContextWallet(); + if (contextWallet) { + contextWallet.bind("creditsAvailable", button, "textColor", { + converter: c => osparc.desktop.credits.Utils.creditsToColor(c, "strong-main") + }); + } + }, + getNoWriteAccessInformationLabel: function() { return new qx.ui.basic.Label().set({ value: qx.locale.Manager.tr("You can't access this information"), diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/WalletsMiniViewer.js b/services/static-webserver/client/source/class/osparc/desktop/credits/WalletsMiniViewer.js deleted file mode 100644 index ca9e3077736..00000000000 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/WalletsMiniViewer.js +++ /dev/null @@ -1,95 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Class.define("osparc.desktop.credits.WalletsMiniViewer", { - extend: qx.ui.core.Widget, - - construct: function() { - this.base(arguments); - - this._setLayout(new qx.ui.layout.VBox(2)); - - osparc.utils.Utils.setIdToWidget(this, "walletsMiniViewer"); - - this.set({ - alignX: "center", - margin: 6, - marginRight: 20, - cursor: "pointer" - }); - - this.__buildLayout(); - }, - - properties: { - contextWallet: { - check: "osparc.data.model.Wallet", - init: null, - nullable: true, - apply: "__reloadLayout" - } - }, - - members: { - __buildLayout: function() { - const store = osparc.store.Store.getInstance(); - // there is a bug with the binding the second time a user logs in - store.bind("contextWallet", this, "contextWallet"); - }, - - __reloadLayout: function() { - this._removeAll(); - const contextWallet = this.getContextWallet(); - if (contextWallet) { - this.__showOneWallet(contextWallet); - } else { - this.__showSelectWallet(); - } - }, - - __showOneWallet: function(wallet) { - const creditsIndicator = new osparc.desktop.credits.CreditsIndicator(wallet); - creditsIndicator.addListener("tap", () => { - const walletsEnabled = osparc.desktop.credits.Utils.areWalletsEnabled(); - if (walletsEnabled) { - osparc.desktop.credits.BillingCenterWindow.openWindow(); - } - }, this); - this._add(creditsIndicator, { - flex: 1 - }); - }, - - __showSelectWallet: function() { - const iconSrc = "@MaterialIcons/account_balance_wallet/26"; - const walletsButton = new qx.ui.basic.Image(iconSrc).set({ - toolTipText: this.tr("Select Credit Account"), - textColor: "danger-red" - }); - walletsButton.addListener("tap", () => { - const walletsEnabled = osparc.desktop.credits.Utils.areWalletsEnabled(); - if (walletsEnabled) { - const billingCenterWindow = osparc.desktop.credits.BillingCenterWindow.openWindow(); - billingCenterWindow.openWallets(); - } - }, this); - this._add(walletsButton, { - flex: 1 - }); - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js index f28868f302d..db55016a71e 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js +++ b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js @@ -138,7 +138,7 @@ qx.Class.define("osparc.navigation.NavigationBar", { this.getChildControl("expiration-icon"); this.getChildControl("help"); if (osparc.desktop.credits.Utils.areWalletsEnabled()) { - this.getChildControl("credits-menu-button"); + this.getChildControl("credits-button"); } this.getChildControl("log-in-button"); this.getChildControl("user-menu"); @@ -235,10 +235,20 @@ qx.Class.define("osparc.navigation.NavigationBar", { currentUsage, maxHeight: this.self().HEIGHT }); - osparc.utils.Utils.setIdToWidget(control, "creditsNavigationBtn"); this.getChildControl("right-items").add(control); break; } + case "credits-button": + control = new osparc.desktop.credits.CreditsIndicatorButton().set({ + maxHeight: 32 + }); + control.getChildControl("icon").set({ + maxHeight: 24, + scale: true + }); + osparc.utils.Utils.setIdToWidget(control, "creditsNavigationBtn"); + this.getChildControl("right-items").add(control); + break; case "tasks-button": control = new osparc.task.TasksButton(); this.getChildControl("right-items").add(control); diff --git a/services/static-webserver/client/source/class/osparc/theme/Appearance.js b/services/static-webserver/client/source/class/osparc/theme/Appearance.js index e72b2cce9a7..ebf773d2934 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Appearance.js +++ b/services/static-webserver/client/source/class/osparc/theme/Appearance.js @@ -177,6 +177,16 @@ qx.Theme.define("osparc.theme.Appearance", { "none": {}, + "floating-menu": { + style: function() { + return { + backgroundColor: "background-main", + padding: 4, + decorator: "border-simple" + } + } + }, + /* --------------------------------------------------------------------------- WINDOW-SMALL-CAP CHOOSER diff --git a/services/static-webserver/client/source/class/osparc/theme/Decoration.js b/services/static-webserver/client/source/class/osparc/theme/Decoration.js index 6cda4485919..12c85bc3ec6 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Decoration.js +++ b/services/static-webserver/client/source/class/osparc/theme/Decoration.js @@ -236,6 +236,13 @@ qx.Theme.define("osparc.theme.Decoration", { } }, + "border-simple": { + include: "border", + style: { + radius: 4 + } + }, + "no-border": { style: { radius: 4, diff --git a/services/static-webserver/client/source/resource/osparc/new_studies.json b/services/static-webserver/client/source/resource/osparc/new_studies.json index 14c6ffd45bd..3123f2ae328 100644 --- a/services/static-webserver/client/source/resource/osparc/new_studies.json +++ b/services/static-webserver/client/source/resource/osparc/new_studies.json @@ -28,21 +28,24 @@ "description": "Start new Personalized Classic TI planning", "newStudyLabel": "Personalized Classic TI", "category": "personalized", - "idToWidget": "personalizationNewTIPlanButton" + "idToWidget": "personalizationNewTIPlanButton", + "billable": true }, { "expectedTemplateLabel": "personalized mcTI Planning Tool", "title": "Personalized MC TI", "description": "Start new Personalized Multichannel TI planning", "newStudyLabel": "Personalized Multichannel TI", "category": "personalized", - "idToWidget": "personalizationNewMTIPlanButton" + "idToWidget": "personalizationNewMTIPlanButton", + "billable": true }, { "expectedTemplateLabel": "personalized pmTI Planning Tool", "title": "Personalized PM TI", "description": "Start new Personalized Phase-Modulation TI planning", "newStudyLabel": "Personalized Phase-Modulation TI", "category": "personalized", - "idToWidget": "personalizationNewPMTIPlanButton" + "idToWidget": "personalizationNewPMTIPlanButton", + "billable": true }], "categories": [{ "id": "precomputed", From 42cc5e6e0d5fc691bd36a1da5c8a29de6fe50c10 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 25 Jun 2024 18:55:02 +0200 Subject: [PATCH 063/219] =?UTF-8?q?=F0=9F=90=9B=20Fixes=20mismatch=20on=20?= =?UTF-8?q?error=20status-code=20for=20start-computation=20(#5994)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/specs/web-server/_computations.py | 19 ++++++------------- .../api/routes/computations.py | 6 +++--- .../integration/01/test_computation_api.py | 2 +- .../class/osparc/desktop/StudyEditor.js | 2 +- .../tests/integration/02/test_computation.py | 12 ++++++------ 5 files changed, 17 insertions(+), 24 deletions(-) diff --git a/api/specs/web-server/_computations.py b/api/specs/web-server/_computations.py index 2458f499d54..36600f1efac 100644 --- a/api/specs/web-server/_computations.py +++ b/api/specs/web-server/_computations.py @@ -29,23 +29,16 @@ async def get_computation(project_id: ProjectID): "/computations/{project_id}:start", response_model=Envelope[_ComputationStarted], responses={ - status.HTTP_404_NOT_FOUND: { - "description": "Project/wallet/pricing details not found" - }, status.HTTP_402_PAYMENT_REQUIRED: { - "description": "Insufficient osparc credits" - }, - status.HTTP_406_NOT_ACCEPTABLE: { - "description": "Cluster not found", + "description": "Insufficient credits to run computation" }, - status.HTTP_503_SERVICE_UNAVAILABLE: { - "description": "Service not available", - }, - status.HTTP_422_UNPROCESSABLE_ENTITY: { - "description": "Configuration error", + status.HTTP_404_NOT_FOUND: { + "description": "Project/wallet/pricing details were not found" }, - status.HTTP_402_PAYMENT_REQUIRED: {"description": "Payment required"}, + status.HTTP_406_NOT_ACCEPTABLE: {"description": "Cluster not found"}, status.HTTP_409_CONFLICT: {"description": "Project already started"}, + status.HTTP_422_UNPROCESSABLE_ENTITY: {"description": "Configuration error"}, + status.HTTP_503_SERVICE_UNAVAILABLE: {"description": "Service not available"}, }, ) async def start_computation(project_id: ProjectID, _start: ComputationStart): diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index 16d2a69ce91..cea6e18770d 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -98,7 +98,7 @@ router = APIRouter() -async def _check_pipeline_not_running( +async def _check_pipeline_not_running_or_raise_409( comp_tasks_repo: CompTasksRepository, computation: ComputationCreate ) -> None: pipeline_state = utils.get_pipeline_state_from_task_states( @@ -106,7 +106,7 @@ async def _check_pipeline_not_running( ) if utils.is_pipeline_running(pipeline_state): raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, + status_code=status.HTTP_409_CONFLICT, detail=f"Project {computation.project_id} already started, current state is {pipeline_state}", ) @@ -324,7 +324,7 @@ async def create_computation( # noqa: PLR0913 project: ProjectAtDB = await project_repo.get_project(computation.project_id) # check if current state allow to modify the computation - await _check_pipeline_not_running(comp_tasks_repo, computation) + await _check_pipeline_not_running_or_raise_409(comp_tasks_repo, computation) # create the complete DAG graph complete_dag = create_complete_dag(project.workbench) diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index 9c510d5f23f..110dbd5f89b 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -835,7 +835,7 @@ async def test_update_and_delete_computation( ), f"pipeline is not in the expected starting state but in {task_out.state}" # now try to update the pipeline, is expected to be forbidden - with pytest.raises(httpx.HTTPStatusError, match=f"{status.HTTP_403_FORBIDDEN}"): + with pytest.raises(httpx.HTTPStatusError, match=f"{status.HTTP_409_CONFLICT}"): await create_pipeline( async_client, project=sleepers_project, diff --git a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js index 4b0848288d2..b504551a869 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js +++ b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js @@ -472,7 +472,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.getStudy().setPipelineRunning(false); }, this); req.addListener("fail", async e => { - if (e.getTarget().getStatus() == "403") { + if (e.getTarget().getStatus() == "409") { this.getStudyLogger().error(null, "Pipeline is already running"); } else if (e.getTarget().getStatus() == "422") { this.getStudyLogger().info(null, "The pipeline is up-to-date"); diff --git a/services/web/server/tests/integration/02/test_computation.py b/services/web/server/tests/integration/02/test_computation.py index 2531d5761f9..23fe812b9df 100644 --- a/services/web/server/tests/integration/02/test_computation.py +++ b/services/web/server/tests/integration/02/test_computation.py @@ -85,7 +85,7 @@ class _ExpectedResponseTuple(NamedTuple): ok: int created: int no_content: int - forbidden: int + confict: int # pylint: disable=no-member def __str__(self) -> str: @@ -105,7 +105,7 @@ def standard_role_response(): ok=status.HTTP_401_UNAUTHORIZED, created=status.HTTP_401_UNAUTHORIZED, no_content=status.HTTP_401_UNAUTHORIZED, - forbidden=status.HTTP_401_UNAUTHORIZED, + confict=status.HTTP_401_UNAUTHORIZED, ), ), pytest.param( @@ -114,7 +114,7 @@ def standard_role_response(): ok=status.HTTP_200_OK, created=status.HTTP_201_CREATED, no_content=status.HTTP_204_NO_CONTENT, - forbidden=status.HTTP_403_FORBIDDEN, + confict=status.HTTP_409_CONFLICT, ), ), pytest.param( @@ -123,7 +123,7 @@ def standard_role_response(): ok=status.HTTP_200_OK, created=status.HTTP_201_CREATED, no_content=status.HTTP_204_NO_CONTENT, - forbidden=status.HTTP_403_FORBIDDEN, + confict=status.HTTP_409_CONFLICT, ), ), pytest.param( @@ -132,7 +132,7 @@ def standard_role_response(): ok=status.HTTP_200_OK, created=status.HTTP_201_CREATED, no_content=status.HTTP_204_NO_CONTENT, - forbidden=status.HTTP_403_FORBIDDEN, + confict=status.HTTP_409_CONFLICT, ), ), ], @@ -390,7 +390,7 @@ async def test_start_stop_computation( if not error: # starting again should be disallowed, since it's already running resp = await client.post(f"{url_start}") - assert resp.status == expected.forbidden + assert resp.status == expected.confict assert "pipeline_id" in data assert data["pipeline_id"] == project_id From b84b85f42dde2816500eff8d28a966d5409029da Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Wed, 26 Jun 2024 10:45:18 +0200 Subject: [PATCH 064/219] =?UTF-8?q?=F0=9F=8E=A8=20Maintenance:=20Add=20new?= =?UTF-8?q?=20concurrency=20tooling=20(#5997)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../service-library/src/servicelib/utils.py | 148 ++++++++++++++- .../tests/test_archiving_utils.py | 13 +- .../tests/test_archiving_utils_extra.py | 13 +- packages/service-library/tests/test_utils.py | 177 +++++++++++++++--- scripts/mypy.bash | 6 +- scripts/mypy/Dockerfile | 26 ++- .../modules/nodeports.py | 2 +- 7 files changed, 342 insertions(+), 43 deletions(-) diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py index 0cd9c89613e..0f96e7af3a0 100644 --- a/packages/service-library/src/servicelib/utils.py +++ b/packages/service-library/src/servicelib/utils.py @@ -4,19 +4,24 @@ I order to avoid cyclic dependences, please DO NOT IMPORT ANYTHING from . """ + import asyncio import logging import os import socket from collections.abc import Awaitable, Coroutine, Generator, Iterable from pathlib import Path -from typing import Any, Final, cast +from typing import Any, AsyncGenerator, AsyncIterable, Final, TypeVar, cast import toolz from pydantic import NonNegativeInt _logger = logging.getLogger(__name__) +_DEFAULT_GATHER_TASKS_GROUP_PREFIX: Final[str] = "gathered" +_DEFAULT_LOGGER: Final[logging.Logger] = _logger +_DEFAULT_LIMITED_CONCURRENCY: Final[int] = 1 + def is_production_environ() -> bool: """ @@ -175,3 +180,144 @@ def unused_port() -> int: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.bind(("127.0.0.1", 0)) return cast(int, s.getsockname()[1]) + + +T = TypeVar("T") + + +async def limited_as_completed( + awaitables: Iterable[Awaitable[T]] | AsyncIterable[Awaitable[T]], + *, + limit: int = _DEFAULT_LIMITED_CONCURRENCY, + tasks_group_prefix: str | None = None, +) -> AsyncGenerator[asyncio.Future[T], None]: + """Runs awaitables using limited concurrent tasks and returns + result futures unordered. + + Arguments: + awaitables -- The awaitables to limit the concurrency of. + + Keyword Arguments: + limit -- The maximum number of awaitables to run concurrently. + 0 or negative values disables the limit. (default: {1}) + tasks_group_prefix -- The prefix to use for the name of the asyncio tasks group. + If None, no name is used. (default: {None}) + + Returns: + nothing + + Yields: + Future[T]: the future of the awaitables as they appear. + + + """ + try: + awaitable_iterator = aiter(awaitables) # type: ignore[arg-type] + is_async = True + except TypeError: + assert isinstance(awaitables, Iterable) # nosec + awaitable_iterator = iter(awaitables) # type: ignore[assignment] + is_async = False + + completed_all_awaitables = False + pending_futures: set[asyncio.Future] = set() + + try: + while pending_futures or not completed_all_awaitables: + while ( + limit < 1 or len(pending_futures) < limit + ) and not completed_all_awaitables: + try: + aw = ( + await anext(awaitable_iterator) + if is_async + else next(awaitable_iterator) # type: ignore[call-overload] + ) + future = asyncio.ensure_future(aw) + if tasks_group_prefix: + future.set_name(f"{tasks_group_prefix}-{future.get_name()}") + pending_futures.add(future) + except (StopIteration, StopAsyncIteration): # noqa: PERF203 + completed_all_awaitables = True + if not pending_futures: + return + done, pending_futures = await asyncio.wait( + pending_futures, return_when=asyncio.FIRST_COMPLETED + ) + + for future in done: + yield future + except asyncio.CancelledError: + for future in pending_futures: + future.cancel() + await asyncio.gather(*pending_futures, return_exceptions=True) + raise + + +async def _wrapped( + awaitable: Awaitable[T], *, index: int, reraise: bool, logger: logging.Logger +) -> tuple[int, T | BaseException]: + try: + return index, await awaitable + except asyncio.CancelledError: + logger.debug( + "Cancelled %i-th concurrent task %s", + index + 1, + f"{awaitable=}", + ) + raise + except BaseException as exc: # pylint: disable=broad-exception-caught + logger.warning( + "Error in %i-th concurrent task %s: %s", + index + 1, + f"{awaitable=}", + f"{exc=}", + ) + if reraise: + raise + return index, exc + + +async def limited_gather( + *awaitables: Awaitable[T], + reraise: bool = True, + log: logging.Logger = _DEFAULT_LOGGER, + limit: int = _DEFAULT_LIMITED_CONCURRENCY, + tasks_group_prefix: str | None = None, +) -> list[T | BaseException | None]: + """runs all the awaitables using the limited concurrency and returns them in the same order + + Arguments: + awaitables -- The awaitables to limit the concurrency of. + + Keyword Arguments: + limit -- The maximum number of awaitables to run concurrently. + setting 0 or negative values disable (default: {1}) + reraise -- if True will raise at the first exception + The remaining tasks will continue as in standard asyncio gather. + If False, then the exceptions will be returned (default: {True}) + log -- the logger to use for logging the exceptions (default: {_logger}) + tasks_group_prefix -- The prefix to use for the name of the asyncio tasks group. + If None, 'gathered' prefix is used. (default: {None}) + + Returns: + the results of the awaitables keeping the order + + special thanks to: https://death.andgravity.com/limit-concurrency + """ + + indexed_awaitables = [ + _wrapped(awaitable, reraise=reraise, index=index, logger=log) + for index, awaitable in enumerate(awaitables) + ] + + results: list[T | BaseException | None] = [None] * len(indexed_awaitables) + async for future in limited_as_completed( + indexed_awaitables, + limit=limit, + tasks_group_prefix=tasks_group_prefix or _DEFAULT_GATHER_TASKS_GROUP_PREFIX, + ): + index, result = await future + results[index] = result + + return results diff --git a/packages/service-library/tests/test_archiving_utils.py b/packages/service-library/tests/test_archiving_utils.py index 84bcafb9572..f6886ea509a 100644 --- a/packages/service-library/tests/test_archiving_utils.py +++ b/packages/service-library/tests/test_archiving_utils.py @@ -14,7 +14,7 @@ from concurrent.futures import ProcessPoolExecutor from dataclasses import dataclass from pathlib import Path -from typing import Callable, Iterable, Iterator, Optional +from typing import Callable, Iterable, Iterator import pytest from faker import Faker @@ -23,7 +23,12 @@ from servicelib import archiving_utils from servicelib.archiving_utils import ArchiveError, archive_dir, unarchive_dir -from .test_utils import print_tree + +def _print_tree(path: Path, level=0): + tab = " " * level + print(f"{tab}{'+' if path.is_dir() else '-'} {path if level==0 else path.name}") + for p in path.glob("*"): + _print_tree(p, level + 1) @pytest.fixture @@ -96,7 +101,7 @@ def exclude_patterns_validation_dir(tmp_path: Path, faker: Faker) -> Path: (base_dir / "d1" / "sd1" / "f2.txt").write_text(faker.text()) print("exclude_patterns_validation_dir ---") - print_tree(base_dir) + _print_tree(base_dir) return base_dir @@ -174,7 +179,7 @@ def _escape_undecodable_path(path: Path) -> Path: async def assert_same_directory_content( dir_to_compress: Path, output_dir: Path, - inject_relative_path: Optional[Path] = None, + inject_relative_path: Path | None = None, unsupported_replace: bool = False, ) -> None: def _relative_path(input_path: Path) -> Path: diff --git a/packages/service-library/tests/test_archiving_utils_extra.py b/packages/service-library/tests/test_archiving_utils_extra.py index a428b5db4aa..bc2959c2e5b 100644 --- a/packages/service-library/tests/test_archiving_utils_extra.py +++ b/packages/service-library/tests/test_archiving_utils_extra.py @@ -13,7 +13,12 @@ unarchive_dir, ) -from .test_utils import print_tree + +def _print_tree(path: Path, level=0): + tab = " " * level + print(f"{tab}{'+' if path.is_dir() else '-'} {path if level==0 else path.name}") + for p in path.glob("*"): + _print_tree(p, level + 1) @pytest.fixture @@ -32,7 +37,7 @@ def state_dir(tmp_path) -> Path: (base_dir / "d1" / "d1_1" / "d1_1_1" / "f6").touch() print("state-dir ---") - print_tree(base_dir) + _print_tree(base_dir) # + /tmp/pytest-of-crespo/pytest-95/test_override_and_prune_from_a1/original # + empty # + d1 @@ -64,7 +69,7 @@ def new_state_dir(tmp_path) -> Path: # f6 deleted -> d1/d1_1/d2_2 remains empty and should be pruned print("new-state-dir ---") - print_tree(base_dir) + _print_tree(base_dir) # + /tmp/pytest-of-crespo/pytest-95/test_override_and_prune_from_a1/updated # + d1 # + d1_1 @@ -120,7 +125,7 @@ def test_override_and_prune_folder(state_dir: Path, new_state_dir: Path): assert old_paths != got_paths print("after ----") - print_tree(state_dir) + _print_tree(state_dir) @pytest.mark.parametrize( diff --git a/packages/service-library/tests/test_utils.py b/packages/service-library/tests/test_utils.py index 005465c35df..7bfcd4cee69 100644 --- a/packages/service-library/tests/test_utils.py +++ b/packages/service-library/tests/test_utils.py @@ -3,29 +3,37 @@ # pylint:disable=redefined-outer-name import asyncio -from collections.abc import Awaitable, Coroutine -from copy import copy -from pathlib import Path +from collections.abc import AsyncIterator, Awaitable, Coroutine, Iterator +from copy import copy, deepcopy from random import randint +from typing import NoReturn +from unittest import mock import pytest from faker import Faker -from servicelib.utils import ensure_ends_with, fire_and_forget_task, logged_gather +from pytest_mock import MockerFixture +from servicelib.utils import ( + ensure_ends_with, + fire_and_forget_task, + limited_as_completed, + limited_gather, + logged_gather, +) -async def _value_error(uid, *, delay=1): - await _succeed(delay) +async def _value_error(uid: int, *, delay: int = 1) -> NoReturn: + await _succeed(uid, delay=delay) msg = f"task#{uid}" raise ValueError(msg) -async def _runtime_error(uid, *, delay=1): - await _succeed(delay) +async def _runtime_error(uid: int, *, delay: int = 1) -> NoReturn: + await _succeed(uid, delay=delay) msg = f"task#{uid}" raise RuntimeError(msg) -async def _succeed(uid, *, delay=1): +async def _succeed(uid: int, *, delay: int = 1) -> int: print(f"task#{uid} begin") await asyncio.sleep(delay) print(f"task#{uid} end") @@ -33,19 +41,19 @@ async def _succeed(uid, *, delay=1): @pytest.fixture -def coros(): +def coros() -> list[Coroutine]: return [ _succeed(0), - _value_error(1, delay=2), + _value_error(1, delay=4), _succeed(2), - _runtime_error(3), - _value_error(4, delay=0), + _runtime_error(3, delay=0), + _value_error(4, delay=2), _succeed(5), ] @pytest.fixture -def mock_logger(mocker): +def mock_logger(mocker: MockerFixture) -> Iterator[mock.Mock]: mock_logger = mocker.Mock() yield mock_logger @@ -57,7 +65,11 @@ def mock_logger(mocker): ), "Expected all 3 errors ALWAYS logged as warnings" -async def test_logged_gather(event_loop, coros, mock_logger): +async def test_logged_gather( + event_loop: asyncio.AbstractEventLoop, + coros: list[Coroutine], + mock_logger: mock.Mock, +): with pytest.raises(ValueError) as excinfo: # noqa: PT011 await logged_gather(*coros, reraise=True, log=mock_logger) @@ -79,7 +91,7 @@ async def test_logged_gather(event_loop, coros, mock_logger): assert not task.cancelled() -async def test_logged_gather_wo_raising(coros, mock_logger): +async def test_logged_gather_wo_raising(coros: list[Coroutine], mock_logger: mock.Mock): results = await logged_gather(*coros, reraise=False, log=mock_logger) assert results[0] == 0 @@ -90,13 +102,6 @@ async def test_logged_gather_wo_raising(coros, mock_logger): assert results[5] == 5 -def print_tree(path: Path, level=0): - tab = " " * level - print(f"{tab}{'+' if path.is_dir() else '-'} {path if level==0 else path.name}") - for p in path.glob("*"): - print_tree(p, level + 1) - - @pytest.fixture() async def coroutine_that_cancels() -> asyncio.Future | Awaitable: async def _self_cancelling() -> None: @@ -142,7 +147,7 @@ async def test_fire_and_forget_cancellation_no_errors_raised( async def test_fire_and_forget_1000s_tasks(faker: Faker): tasks_collection = set() - async def _some_task(n: int): + async def _some_task(n: int) -> str: await asyncio.sleep(randint(1, 3)) return f"I'm great since I slept a bit, and by the way I'm task {n}" @@ -175,3 +180,127 @@ def test_ensure_ends_with(original: str, termination: str, expected: str): assert original_copy == original assert terminated_string.endswith(termination) assert terminated_string == expected + + +@pytest.fixture +def uids(faker: Faker) -> list[int]: + return [faker.pyint() for _ in range(10)] + + +@pytest.fixture +def long_delay() -> int: + return 10 + + +@pytest.fixture +def slow_successful_coros_list(uids: list[int], long_delay: int) -> list[Coroutine]: + return [_succeed(uid, delay=long_delay) for uid in uids] + + +@pytest.fixture +def successful_coros_list(uids: list[int]) -> list[Coroutine]: + return [_succeed(uid) for uid in uids] + + +@pytest.fixture +async def successful_coros_gen(uids: list[int]) -> AsyncIterator[Coroutine]: + async def as_async_iter(it): + for x in it: + yield x + + return as_async_iter(_succeed(uid) for uid in uids) + + +@pytest.fixture(params=["list", "generator"]) +async def successful_coros( + successful_coros_list: list[Coroutine], + successful_coros_gen: AsyncIterator[Coroutine], + request: pytest.FixtureRequest, +) -> list[Coroutine] | AsyncIterator[Coroutine]: + return successful_coros_list if request.param == "list" else successful_coros_gen + + +@pytest.mark.parametrize("limit", [0, 2, 5, 10]) +async def test_limited_as_completed( + uids: list[int], + successful_coros: list[Coroutine] | AsyncIterator[Coroutine], + limit: int, +): + expected_uids = deepcopy(uids) + async for future in limited_as_completed(successful_coros, limit=limit): + result = await future + assert result is not None + assert result in expected_uids + expected_uids.remove(result) + assert len(expected_uids) == 0 + + +async def test_limited_as_completed_empty_coros(): + results = [await result async for result in limited_as_completed([])] + assert results == [] + + +@pytest.mark.parametrize("limit", [0, 2, 5, 10]) +async def test_limited_gather_limits( + uids: list[int], + successful_coros_list: list[Coroutine], + limit: int, +): + results = await limited_gather(*successful_coros_list, limit=limit) + assert results == uids + + +async def test_limited_gather( + event_loop: asyncio.AbstractEventLoop, + coros: list[Coroutine], + mock_logger: mock.Mock, +): + with pytest.raises(RuntimeError) as excinfo: + await limited_gather(*coros, reraise=True, log=mock_logger, limit=0) + + # NOTE: #3 fails first + assert "task#3" in str(excinfo.value) + + # NOTE: only first error in the list is raised, since it is not RuntimeError, that task + assert isinstance(excinfo.value, RuntimeError) + + unfinished_tasks = [ + task + for task in asyncio.all_tasks(event_loop) + if task is not asyncio.current_task() + ] + final_results = await asyncio.gather(*unfinished_tasks, return_exceptions=True) + for result in final_results: + if isinstance(result, Exception): + assert isinstance(result, ValueError | RuntimeError) + + +async def test_limited_gather_wo_raising( + coros: list[Coroutine], mock_logger: mock.Mock +): + results = await limited_gather(*coros, reraise=False, log=mock_logger, limit=0) + + assert results[0] == 0 + assert isinstance(results[1], ValueError) + assert results[2] == 2 + assert isinstance(results[3], RuntimeError) + assert isinstance(results[4], ValueError) + assert results[5] == 5 + + +async def test_limited_gather_cancellation( + event_loop: asyncio.AbstractEventLoop, slow_successful_coros_list: list[Coroutine] +): + task = asyncio.create_task(limited_gather(*slow_successful_coros_list, limit=0)) + await asyncio.sleep(3) + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + + # check all coros are cancelled + unfinished_tasks = [ + task + for task in asyncio.all_tasks(event_loop) + if task is not asyncio.current_task() + ] + assert not unfinished_tasks diff --git a/scripts/mypy.bash b/scripts/mypy.bash index 0647a7de4ea..e25b504fc4b 100755 --- a/scripts/mypy.bash +++ b/scripts/mypy.bash @@ -29,13 +29,11 @@ echo_requirements() { --interactive \ --rm \ --user="$(id --user "$USER")":"$(id --group "$USER")" \ - --entrypoint="pip" \ + --entrypoint="uv" \ "$IMAGE_NAME" \ - --no-cache-dir freeze + --no-cache-dir pip freeze } - - run() { echo Using "$(docker run --rm "$IMAGE_NAME" --version)" echo Mypy config "${MYPY_CONFIG}" diff --git a/scripts/mypy/Dockerfile b/scripts/mypy/Dockerfile index 06a82234250..930ebf7110b 100644 --- a/scripts/mypy/Dockerfile +++ b/scripts/mypy/Dockerfile @@ -1,13 +1,29 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.10.14" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base +# Sets utf-8 encoding for Python et al +ENV LANG=C.UTF-8 -COPY requirements.txt /requirements.txt +# Turns off writing .pyc files; superfluous on an ephemeral container. +ENV PYTHONDONTWRITEBYTECODE=1 \ + VIRTUAL_ENV=/home/scu/.venv +# Ensures that the python and pip executables used in the image will be +# those from our virtualenv. +ENV PATH="${VIRTUAL_ENV}/bin:$PATH" + + +# NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install --upgrade pip \ - && pip install -r requirements.txt \ - && pip freeze + pip install uv~=0.1 + +RUN \ + --mount=type=cache,mode=0755,target=/root/.cache/uv \ + --mount=type=bind,source=./requirements.txt,target=requirements.txt \ + uv venv "${VIRTUAL_ENV}" \ + && uv pip install --upgrade pip wheel setuptools \ + && uv pip install -r requirements.txt \ + && uv pip list ENTRYPOINT ["mypy", "--config-file", "/config/mypy.ini", "--warn-unused-configs"] diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py index 23c3d437cd5..44f8ad9bfab 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py @@ -134,7 +134,7 @@ async def upload_outputs( # generic case let's create an archive # only the filtered out files will be zipped tmp_folder = Path( - await stack.enter_async_context(AioTemporaryDirectory()) + await stack.enter_async_context(AioTemporaryDirectory()) # type: ignore[arg-type] ) tmp_file = tmp_folder / f"{src_folder.stem}.zip" From 25fbe837f754f443209f2f2a4874d9bfec1e3444 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 26 Jun 2024 12:33:19 +0200 Subject: [PATCH 065/219] =?UTF-8?q?=E2=9C=A8=20[Frontend]=20Coins=20icon?= =?UTF-8?q?=20for=20the=20credits=20indicator=20(#5993)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../class/osparc/dashboard/GridButtonBase.js | 8 + .../class/osparc/dashboard/NewStudies.js | 9 +- .../osparc/desktop/credits/CreditsImage.js | 49 +++++ .../desktop/credits/CreditsIndicatorButton.js | 24 +-- .../class/osparc/desktop/credits/Utils.js | 11 -- .../class/osparc/navigation/NavigationBar.js | 8 +- .../source/class/osparc/ui/basic/SVGImage.js | 175 ++++++++++++++++++ .../source/class/osparc/ui/basic/Thumbnail.js | 31 +--- .../class/osparc/ui/layout/CenteredGrid.js | 64 +++++++ .../source/resource/osparc/coins-solid.svg | 1 + 10 files changed, 315 insertions(+), 65 deletions(-) create mode 100644 services/static-webserver/client/source/class/osparc/desktop/credits/CreditsImage.js create mode 100644 services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js create mode 100644 services/static-webserver/client/source/class/osparc/ui/layout/CenteredGrid.js create mode 100644 services/static-webserver/client/source/resource/osparc/coins-solid.svg diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js index 61292dd7171..3a7744e76a0 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js @@ -349,6 +349,14 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { iconLayout.recheckSize(); }, + replaceIcon: function(newIcon) { + const plusIcon = this.getChildControl("icon"); + plusIcon.exclude(); + + const bodyLayout = this.getChildControl("body"); + bodyLayout.add(newIcon, {flex: 1}); + }, + /** * Event handler for the pointer over event. */ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js index 9fc4f8441ab..de323bfaaa6 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js @@ -148,7 +148,14 @@ qx.Class.define("osparc.dashboard.NewStudies", { newPlanButton.setCardKey(templateInfo.idToWidget); osparc.utils.Utils.setIdToWidget(newPlanButton, templateInfo.idToWidget); if (templateInfo.billable) { - osparc.desktop.credits.Utils.setCreditsIconToButton(newPlanButton); + // replace the plus button with the creditsImage + const creditsImage = new osparc.desktop.credits.CreditsImage(); + creditsImage.getChildControl("image").set({ + width: 60, + height: 60 + }) + newPlanButton.replaceIcon(creditsImage); + newPlanButton.addListener("execute", () => { const store = osparc.store.Store.getInstance(); const credits = store.getContextWallet().getCreditsAvailable() diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsImage.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsImage.js new file mode 100644 index 00000000000..fca14011705 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsImage.js @@ -0,0 +1,49 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.desktop.credits.CreditsImage", { + extend: osparc.ui.basic.SVGImage, + + construct: function() { + this.base(arguments, "osparc/coins-solid.svg"); + + const store = osparc.store.Store.getInstance(); + store.addListener("changeContextWallet", this.__updateWallet, this); + this.__updateWallet(); + }, + + members: { + __updateWallet: function() { + const store = osparc.store.Store.getInstance(); + const contextWallet = store.getContextWallet(); + if (contextWallet) { + contextWallet.addListener("changeCreditsAvailable", this.__updateColor, this); + this.__updateColor(); + } + }, + + __updateColor: function() { + const store = osparc.store.Store.getInstance(); + const contextWallet = store.getContextWallet(); + if (contextWallet) { + const credits = contextWallet.getCreditsAvailable(); + const creditsColorKeyword = osparc.desktop.credits.Utils.creditsToColor(credits, "strong-main"); + this.setImageColor(creditsColorKeyword); + } + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js index e331547fbca..b612461d939 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js @@ -16,17 +16,20 @@ ************************************************************************ */ qx.Class.define("osparc.desktop.credits.CreditsIndicatorButton", { - extend: qx.ui.form.Button, + extend: osparc.desktop.credits.CreditsImage, construct: function() { this.base(arguments); this.set({ - backgroundColor: "transparent" + cursor: "pointer", + padding: [3, 8] }); - const store = osparc.store.Store.getInstance(); - store.bind("contextWallet", this, "wallet"); + this.getChildControl("image").set({ + width: 24, + height: 24 + }); this.__creditsContainer = new osparc.desktop.credits.CreditsNavBarContainer(); this.__creditsContainer.exclude(); @@ -34,24 +37,11 @@ qx.Class.define("osparc.desktop.credits.CreditsIndicatorButton", { this.addListener("tap", this.__buttonTapped, this); }, - properties: { - wallet: { - check: "osparc.data.model.Wallet", - init: null, - nullable: true, - event: "changeWallet", - apply: "__applyWallet" - } - }, members: { __creditsContainer: null, __tappedOut: null, - __applyWallet: function() { - osparc.desktop.credits.Utils.setCreditsIconToButton(this); - }, - __buttonTapped: function() { if (this.__tappedOut) { this.__tappedOut = false; diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js b/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js index fc9539e6213..e05d53427da 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js @@ -27,17 +27,6 @@ qx.Class.define("osparc.desktop.credits.Utils", { return Boolean(statics && statics["isPaymentEnabled"]); }, - setCreditsIconToButton: function(button) { - button.setIcon(osparc.desktop.credits.Utils.CREDITS_ICON); - const store = osparc.store.Store.getInstance(); - const contextWallet = store.getContextWallet(); - if (contextWallet) { - contextWallet.bind("creditsAvailable", button, "textColor", { - converter: c => osparc.desktop.credits.Utils.creditsToColor(c, "strong-main") - }); - } - }, - getNoWriteAccessInformationLabel: function() { return new qx.ui.basic.Label().set({ value: qx.locale.Manager.tr("You can't access this information"), diff --git a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js index db55016a71e..89662fee8dd 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js +++ b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js @@ -239,13 +239,7 @@ qx.Class.define("osparc.navigation.NavigationBar", { break; } case "credits-button": - control = new osparc.desktop.credits.CreditsIndicatorButton().set({ - maxHeight: 32 - }); - control.getChildControl("icon").set({ - maxHeight: 24, - scale: true - }); + control = new osparc.desktop.credits.CreditsIndicatorButton(); osparc.utils.Utils.setIdToWidget(control, "creditsNavigationBtn"); this.getChildControl("right-items").add(control); break; diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js b/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js new file mode 100644 index 00000000000..541b0283029 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js @@ -0,0 +1,175 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * Widget that displays a SVG image and supports changing its color. + * It is meant to be used for those images that are not available in the catalogs of font icons we include. + */ + + +qx.Class.define("osparc.ui.basic.SVGImage", { + extend: osparc.ui.layout.CenteredGrid, + + /** + * @param source + */ + construct: function(source) { + this.base(arguments); + + if (source) { + this.setSource(source); + } + }, + + properties: { + source: { + check: "String", + init: null, + nullable: false, + apply: "__applySource" + }, + + imageColor: { + check: "String", + init: null, + nullable: false, + event: "changeImageColor", + apply: "__applyImageColor" + }, + }, + + statics: { + keywordToCSSFilter: function(keyword) { + // use the following link to extended supported colors + // https://isotropic.co/tool/hex-color-to-css-filter/ + let filter = null; + switch (keyword) { + case "danger-red": // "#FF2D2D" + filter = "invert(13%) sepia(89%) saturate(5752%) hue-rotate(346deg) brightness(85%) contrast(109%)"; + break; + case "warning-yellow": // #F8DB1F + filter = "invert(90%) sepia(99%) saturate(7500%) hue-rotate(331deg) brightness(95%) contrast(108%)"; + break; + case "ready-green": // #58A6FF + filter = "invert(66%) sepia(24%) saturate(5763%) hue-rotate(188deg) brightness(101%) contrast(101%)"; + break; + case "text": // light or dark + if (qx.theme.manager.Meta.getInstance().getTheme().basename === "ThemeLight") { + // ThemeLight #282828 + filter = "invert(10%) sepia(4%) saturate(19%) hue-rotate(354deg) brightness(102%) contrast(86%)"; + } else { + // ThemeDark #D8D8D8 + filter = "invert(66%) sepia(24%) saturate(5763%) hue-rotate(188deg) brightness(101%) contrast(101%)"; + } + break; + case "strong-main": // it depends on the product + if (qx.theme.manager.Meta.getInstance().getTheme().name.includes(".s4l.")) { + // "rgba(0, 144, 208, 1)" + filter = "invert(55%) sepia(73%) saturate(6976%) hue-rotate(177deg) brightness(100%) contrast(102%)"; + } else if (qx.theme.manager.Meta.getInstance().getTheme().name.includes(".tis.")) { + // "rgba(105, 105, 255, 1)" + filter = "invert(36%) sepia(74%) saturate(2007%) hue-rotate(225deg) brightness(102%) contrast(104%)"; + } else { + // "rgba(131, 0, 191, 1)" osparc + filter = "invert(13%) sepia(95%) saturate(6107%) hue-rotate(282deg) brightness(77%) contrast(115%)"; + } + } + return filter; + }, + + // not very accurate + rgbToCSSFilter: function(rgb) { + const [r, g, b] = rgb.split(",").map(Number); + + let [rf, gf, bf] = [r / 255, g / 255, b / 255]; + let [mi, ma] = [Math.min(rf, gf, bf), Math.max(rf, gf, bf)]; + let [h, s, l] = [0, 0, (mi + ma) / 2]; + + if (mi !== ma) { + s = l < 0.5 ? (ma - mi) / (ma + mi) : (ma - mi) / (2 - ma - mi); + switch (ma) { + case rf: + h = (gf - bf) / (ma - mi); + break; + case gf: + h = 2 + (bf - rf) / (ma - mi); + break; + case bf: + h = 4 + (rf - gf) / (ma - mi); + break; + } + } + + h = Math.round(h * 60); + if (h < 0) { + h += 360; + } + s = Math.round(s * 100); + l = Math.round(l * 100); + + const invertValue = l2 => 100 - l2; + const sepiaValue = s2 => s2; + const saturateValue = s3 => s3; + const brightnessValue = l3 => l3; + const contrastValue = l4 => l4 > 50 ? 50 : l4; + return `invert(${invertValue(l)}%) sepia(${sepiaValue(s)}%) saturate(${saturateValue(s)}%) hue-rotate(${h}deg) brightness(${brightnessValue(l)}%) contrast(${contrastValue(l)}%)`; + } + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "image": + control = new qx.ui.basic.Image().set({ + scale: true, + allowStretchX: true, + allowStretchY: true, + allowGrowX: true, + allowGrowY: true, + alignX: "center", + alignY: "middle" + }); + this.addCenteredWidget(control); + break; + } + return control || this.base(arguments, id); + }, + + __applySource: function(src) { + if (src && src.includes(".svg")) { + this.getChildControl("image").setSource(src); + } + }, + + /** + * @param keywordOrRgb {string} predefined keyword or rgb in the folloing format "0,255,0" + */ + __applyImageColor: function(keywordOrRgb) { + let filterValue = this.self().keywordToCSSFilter(keywordOrRgb); + if (filterValue === null) { + const hexColor = qx.theme.manager.Color.getInstance().resolve(keywordOrRgb); + const rgbColor = qx.util.ColorUtil.hexStringToRgb(hexColor); + filterValue = this.self().rgbToCSSFilter(rgbColor); + } + const myStyle = { + "filter": filterValue + }; + this.getChildControl("image").getContentElement().setStyles(myStyle); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js b/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js index cc83795b37a..53e9d77285c 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js @@ -23,7 +23,7 @@ * |_____x_____|flex Spacer|_____x_____| */ qx.Class.define("osparc.ui.basic.Thumbnail", { - extend: qx.ui.core.Widget, + extend: osparc.ui.layout.CenteredGrid, /** * @param {String} source Source of the Image @@ -33,30 +33,6 @@ qx.Class.define("osparc.ui.basic.Thumbnail", { construct: function(source, maxWidth, maxHeight) { this.base(arguments); - const layout = new qx.ui.layout.Grid(); - layout.setRowFlex(0, 1); - layout.setRowFlex(2, 1); - layout.setColumnFlex(0, 1); - layout.setColumnFlex(2, 1); - this._setLayout(layout); - - [ - [0, 0], - [0, 1], - [0, 2], - [1, 0], - [1, 2], - [2, 0], - [2, 1], - [2, 2] - ].forEach(quad => { - const empty = new qx.ui.core.Spacer(); - this._add(empty, { - row: quad[0], - column: quad[1] - }); - }); - if (source) { this.setSource(source); } @@ -98,10 +74,7 @@ qx.Class.define("osparc.ui.basic.Thumbnail", { alignX: "center", alignY: "middle" }); - this._add(control, { - row: 1, - column: 1 - }); + this.addCenteredWidget(control); break; } return control || this.base(arguments, id); diff --git a/services/static-webserver/client/source/class/osparc/ui/layout/CenteredGrid.js b/services/static-webserver/client/source/class/osparc/ui/layout/CenteredGrid.js new file mode 100644 index 00000000000..8c4e98d351d --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/layout/CenteredGrid.js @@ -0,0 +1,64 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * Grid layout that shows an element well centered + * ___________________________________ + * |flex Spacer|flex Spacer|flex Spacer| + * |flex Spacer| element |flex Spacer| + * |flex Spacer|flex Spacer|flex Spacer| + */ +qx.Class.define("osparc.ui.layout.CenteredGrid", { + extend: qx.ui.container.Composite, + + construct: function() { + this.base(arguments); + + const layout = new qx.ui.layout.Grid(); + layout.setRowFlex(0, 1); + layout.setRowFlex(2, 1); + layout.setColumnFlex(0, 1); + layout.setColumnFlex(2, 1); + this._setLayout(layout); + + [ + [0, 0], + [0, 1], + [0, 2], + [1, 0], + [1, 2], + [2, 0], + [2, 1], + [2, 2] + ].forEach(quad => { + const empty = new qx.ui.core.Spacer(); + this._add(empty, { + row: quad[0], + column: quad[1] + }); + }); + }, + + members: { + addCenteredWidget: function(widget) { + this._add(widget, { + row: 1, + column: 1 + }); + } + } +}); diff --git a/services/static-webserver/client/source/resource/osparc/coins-solid.svg b/services/static-webserver/client/source/resource/osparc/coins-solid.svg new file mode 100644 index 00000000000..b4799adeb41 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/coins-solid.svg @@ -0,0 +1 @@ + From b5d82e0deafda5271c761e731778801bc7683ff3 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 26 Jun 2024 13:56:39 +0200 Subject: [PATCH 066/219] =?UTF-8?q?=F0=9F=8E=A8=20Fixes=20=20Deprecation?= =?UTF-8?q?=20Warning=20on=20redis=20(#5996)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/pytest_simcore/redis_service.py | 6 +-- .../servicelib/aiohttp/rest_middlewares.py | 16 ++++---- .../src/servicelib/aiohttp/rest_models.py | 2 +- .../src/servicelib/aiohttp/rest_responses.py | 38 ++++++++++++++----- .../tests/aiohttp/test_rest_responses.py | 31 +++++++++++++++ .../director_v2/_handlers.py | 30 ++++++++++----- .../integration/01/test_garbage_collection.py | 2 +- .../server/tests/unit/with_dbs/conftest.py | 2 +- 8 files changed, 94 insertions(+), 33 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/redis_service.py b/packages/pytest-simcore/src/pytest_simcore/redis_service.py index 3a84f0ceb03..a94a7a38223 100644 --- a/packages/pytest-simcore/src/pytest_simcore/redis_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/redis_service.py @@ -69,7 +69,7 @@ async def redis_client( yield client await client.flushall() - await client.close(close_connection_pool=True) + await client.aclose(close_connection_pool=True) @pytest.fixture() @@ -86,7 +86,7 @@ async def redis_locks_client( yield client await client.flushall() - await client.close(close_connection_pool=True) + await client.aclose(close_connection_pool=True) @tenacity.retry( @@ -103,4 +103,4 @@ async def wait_till_redis_responsive(redis_url: URL | str) -> None: msg = f"{redis_url=} not available" raise ConnectionError(msg) finally: - await client.close(close_connection_pool=True) + await client.aclose(close_connection_pool=True) diff --git a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py index 8c12f7b3491..e68db2cbb46 100644 --- a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py +++ b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py @@ -18,7 +18,7 @@ from .rest_models import ErrorItemType, ErrorType, LogMessageType from .rest_responses import ( create_data_response, - create_error_response, + create_http_error, is_enveloped_from_map, is_enveloped_from_text, wrap_as_envelope, @@ -44,7 +44,7 @@ def error_middleware_factory( _is_prod: bool = is_production_environ() def _process_and_raise_unexpected_error(request: web.BaseRequest, err: Exception): - resp = create_error_response( + http_error = create_http_error( err, "Unexpected Server error", web.HTTPInternalServerError, @@ -58,11 +58,11 @@ def _process_and_raise_unexpected_error(request: web.BaseRequest, err: Exception request.remote, request.method, request.path, - resp.status, + http_error.status, exc_info=err, stack_info=True, ) - raise resp + raise http_error @web.middleware async def _middleware_handler(request: web.Request, handler: Handler): @@ -115,22 +115,22 @@ async def _middleware_handler(request: web.Request, handler: Handler): raise except NotImplementedError as err: - error_response = create_error_response( + http_error = create_http_error( err, f"{err}", web.HTTPNotImplemented, skip_internal_error_details=_is_prod, ) - raise error_response from err + raise http_error from err except asyncio.TimeoutError as err: - error_response = create_error_response( + http_error = create_http_error( err, f"{err}", web.HTTPGatewayTimeout, skip_internal_error_details=_is_prod, ) - raise error_response from err + raise http_error from err except Exception as err: # pylint: disable=broad-except _process_and_raise_unexpected_error(request, err) diff --git a/packages/service-library/src/servicelib/aiohttp/rest_models.py b/packages/service-library/src/servicelib/aiohttp/rest_models.py index f35cabe4394..36902f17b77 100644 --- a/packages/service-library/src/servicelib/aiohttp/rest_models.py +++ b/packages/service-library/src/servicelib/aiohttp/rest_models.py @@ -27,4 +27,4 @@ class ErrorType: logs: list[LogMessageType] = field(default_factory=list) errors: list[ErrorItemType] = field(default_factory=list) status: int = 400 - message: str = "Unexpected client error" + message: str = "Unexpected error" diff --git a/packages/service-library/src/servicelib/aiohttp/rest_responses.py b/packages/service-library/src/servicelib/aiohttp/rest_responses.py index 313590a22bc..569ab56fbca 100644 --- a/packages/service-library/src/servicelib/aiohttp/rest_responses.py +++ b/packages/service-library/src/servicelib/aiohttp/rest_responses.py @@ -13,6 +13,7 @@ from servicelib.aiohttp.status import HTTP_200_OK from ..mimetype_constants import MIMETYPE_APPLICATION_JSON +from ..status_utils import get_code_description from .rest_models import ErrorItemType, ErrorType _ENVELOPE_KEYS = ("data", "error") @@ -65,18 +66,20 @@ def create_data_response( response = web.json_response(payload, dumps=json_dumps, status=status) except (TypeError, ValueError) as err: - response = create_error_response( - [ - err, - ], - str(err), - web.HTTPInternalServerError, - skip_internal_error_details=skip_internal_error_details, + response = exception_to_response( + create_http_error( + [ + err, + ], + str(err), + web.HTTPInternalServerError, + skip_internal_error_details=skip_internal_error_details, + ) ) return response -def create_error_response( +def create_http_error( errors: list[Exception] | Exception, reason: str | None = None, http_error_cls: type[HTTPError] = web.HTTPInternalServerError, @@ -94,18 +97,23 @@ def create_error_response( # TODO: guarantee no throw! is_internal_error: bool = http_error_cls == web.HTTPInternalServerError + default_message = reason or get_code_description(http_error_cls.status_code) if is_internal_error and skip_internal_error_details: error = ErrorType( errors=[], status=http_error_cls.status_code, + message=default_message, ) else: + items = [ErrorItemType.from_error(err) for err in errors] error = ErrorType( - errors=[ErrorItemType.from_error(err) for err in errors], + errors=items, status=http_error_cls.status_code, + message=items[0].message if items else default_message, ) + assert not http_error_cls.empty_body # nosec payload = wrap_as_envelope(error=asdict(error)) return http_error_cls( @@ -115,6 +123,18 @@ def create_error_response( ) +def exception_to_response(exc: HTTPError) -> web.Response: + # Returning web.HTTPException is deprecated so here we have a converter to a response + # so it can be used as + # SEE https://github.com/aio-libs/aiohttp/issues/2415 + return web.Response( + status=exc.status, + headers=exc.headers, + reason=exc.reason, + text=exc.text, + ) + + # Inverse map from code to HTTPException classes def _collect_http_exceptions(exception_cls: type[HTTPException] = HTTPException): def _pred(obj) -> bool: diff --git a/packages/service-library/tests/aiohttp/test_rest_responses.py b/packages/service-library/tests/aiohttp/test_rest_responses.py index ce47777cd4f..7077a93cb0f 100644 --- a/packages/service-library/tests/aiohttp/test_rest_responses.py +++ b/packages/service-library/tests/aiohttp/test_rest_responses.py @@ -5,6 +5,7 @@ import itertools import pytest +from aiohttp import web from aiohttp.web_exceptions import ( HTTPBadRequest, HTTPError, @@ -14,10 +15,14 @@ HTTPNotModified, HTTPOk, ) +from servicelib.aiohttp import status from servicelib.aiohttp.rest_responses import ( _STATUS_CODE_TO_HTTP_ERRORS, + create_http_error, + exception_to_response, get_http_error, ) +from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON # @@ -53,3 +58,29 @@ def test_collected_http_errors_map(status_code: int, http_error_cls: type[HTTPEr assert http_error_cls != HTTPError assert issubclass(http_error_cls, HTTPError) + + +@pytest.mark.parametrize("skip_details", [True, False]) +def tests_exception_to_response(skip_details: bool): + exception = create_http_error( + errors=[RuntimeError("foo")], + reason="Something whent wrong", + http_error_cls=web.HTTPInternalServerError, + skip_internal_error_details=skip_details, + ) + + # For now until deprecated SEE https://github.com/aio-libs/aiohttp/issues/2415 + assert isinstance(exception, Exception) + assert isinstance(exception, web.Response) + assert hasattr(exception, "__http_exception__") + + # until they have exception.make_response(), we user + response = exception_to_response(exception) + assert isinstance(response, web.Response) + assert not isinstance(response, Exception) + assert not hasattr(response, "__http_exception__") + + assert response.content_type == MIMETYPE_APPLICATION_JSON + assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR + assert response.text + assert response.body diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py b/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py index 6886c2408c2..ccd523fc750 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py @@ -10,7 +10,11 @@ from models_library.utils.json_serialization import json_dumps from pydantic import BaseModel, Field, ValidationError, parse_obj_as from pydantic.types import NonNegativeInt -from servicelib.aiohttp.rest_responses import create_error_response, get_http_error +from servicelib.aiohttp.rest_responses import ( + create_http_error, + exception_to_response, + get_http_error, +) from servicelib.common_headers import ( UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, X_SIMCORE_USER_AGENT, @@ -162,15 +166,21 @@ async def start_computation(request: web.Request) -> web.Response: return envelope_json_response(data, status_cls=web.HTTPCreated) except DirectorServiceError as exc: - return create_error_response( - exc, - reason=exc.reason, - http_error_cls=get_http_error(exc.status) or web.HTTPServiceUnavailable, + return exception_to_response( + create_http_error( + exc, + reason=exc.reason, + http_error_cls=get_http_error(exc.status) or web.HTTPServiceUnavailable, + ) ) except UserDefaultWalletNotFoundError as exc: - return create_error_response(exc, http_error_cls=web.HTTPNotFound) + return exception_to_response( + create_http_error(exc, http_error_cls=web.HTTPNotFound) + ) except WalletNotEnoughCreditsError as exc: - return create_error_response(exc, http_error_cls=web.HTTPPaymentRequired) + return exception_to_response( + create_http_error(exc, http_error_cls=web.HTTPPaymentRequired) + ) @routes.post(f"/{VTAG}/computations/{{project_id}}:stop", name="stop_computation") @@ -203,7 +213,7 @@ async def stop_computation(request: web.Request) -> web.Response: raise web.HTTPNoContent(content_type=MIMETYPE_APPLICATION_JSON) except DirectorServiceError as exc: - return create_error_response( + return create_http_error( exc, reason=exc.reason, http_error_cls=get_http_error(exc.status) or web.HTTPServiceUnavailable, @@ -252,10 +262,10 @@ async def get_computation(request: web.Request) -> web.Response: dumps=json_dumps, ) except DirectorServiceError as exc: - return create_error_response( + return create_http_error( exc, reason=exc.reason, http_error_cls=get_http_error(exc.status) or web.HTTPServiceUnavailable, ) except ValidationError as exc: - return create_error_response(exc, http_error_cls=web.HTTPInternalServerError) + return create_http_error(exc, http_error_cls=web.HTTPInternalServerError) diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index adf3849444d..9aac48842a6 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -87,7 +87,7 @@ async def __delete_all_redis_keys__(redis_settings: RedisSettings): decode_responses=True, ) await client.flushall() - await client.close(close_connection_pool=True) + await client.aclose(close_connection_pool=True) @pytest.fixture(scope="session") diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index c4b5807d58f..1217e6f39b0 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -579,7 +579,7 @@ async def redis_locks_client( yield client await client.flushall() - await client.close(close_connection_pool=True) + await client.aclose(close_connection_pool=True) # SOCKETS FIXTURES -------------------------------------------------------- From 87d335448c2690fb283950431cdad758c2234f66 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Thu, 27 Jun 2024 07:54:53 +0200 Subject: [PATCH 067/219] =?UTF-8?q?=E2=9C=A8=20dynamic-sidecar=20logs=20ch?= =?UTF-8?q?anges=20to=20input=20ports=20(#5999)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../src/servicelib/file_utils.py | 55 +++++++++++++++- .../service-library/tests/test_file_utils.py | 62 ++++++++++++++++++- .../modules/long_running_tasks.py | 22 ++++--- 3 files changed, 127 insertions(+), 12 deletions(-) diff --git a/packages/service-library/src/servicelib/file_utils.py b/packages/service-library/src/servicelib/file_utils.py index f05f35af329..c90468cba2a 100644 --- a/packages/service-library/src/servicelib/file_utils.py +++ b/packages/service-library/src/servicelib/file_utils.py @@ -1,8 +1,10 @@ import asyncio import hashlib import shutil +from contextlib import contextmanager +from logging import Logger from pathlib import Path -from typing import Final, Protocol +from typing import Final, Iterator, Protocol # https://docs.python.org/3/library/shutil.html#shutil.rmtree # https://docs.python.org/3/library/os.html#os.remove @@ -60,10 +62,59 @@ async def create_sha256_checksum( async def _eval_hash_async( async_stream: AsyncStream, - hasher: "hashlib._Hash", # noqa: SLF001 + hasher: "hashlib._Hash", chunk_size: ByteSize, ) -> str: while chunk := await async_stream.read(chunk_size): hasher.update(chunk) digest = hasher.hexdigest() return f"{digest}" + + +def _get_file_properties(path: Path) -> tuple[float, int]: + stats = path.stat() + return stats.st_mtime, stats.st_size + + +def _get_directory_snapshot(path: Path) -> dict[str, tuple[float, int]]: + return { + f"{p.relative_to(path)}": _get_file_properties(p) + for p in path.rglob("*") + if p.is_file() + } + + +@contextmanager +def log_directory_changes(path: Path, logger: Logger, log_level: int) -> Iterator[None]: + before: dict[str, tuple[float, int]] = _get_directory_snapshot(path) + yield + after: dict[str, tuple[float, int]] = _get_directory_snapshot(path) + + after_keys: set[str] = set(after.keys()) + before_keys: set[str] = set(before.keys()) + common_keys = before_keys & after_keys + + added_elements = after_keys - before_keys + removed_elements = before_keys - after_keys + content_changed_elements = {x for x in common_keys if before[x] != after[x]} + + if added_elements or removed_elements or content_changed_elements: + logger.log(log_level, "File changes in path: '%s'", f"{path}") + if added_elements: + logger.log( + log_level, + "Files added:\n%s", + "\n".join([f"+ {x}" for x in sorted(added_elements)]), + ) + if removed_elements: + logger.log( + log_level, + "Files removed:\n%s", + "\n".join([f"- {x}" for x in sorted(removed_elements)]), + ) + if content_changed_elements: + logger.log( + log_level, + "File content changed:\n%s", + "\n".join([f"* {x}" for x in sorted(content_changed_elements)]), + ) diff --git a/packages/service-library/tests/test_file_utils.py b/packages/service-library/tests/test_file_utils.py index 454106c22b4..b5feff78603 100644 --- a/packages/service-library/tests/test_file_utils.py +++ b/packages/service-library/tests/test_file_utils.py @@ -1,11 +1,14 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +import logging from pathlib import Path import pytest from faker import Faker -from servicelib.file_utils import remove_directory +from servicelib.file_utils import log_directory_changes, remove_directory + +_logger = logging.getLogger(__name__) @pytest.fixture @@ -80,3 +83,60 @@ async def test_remove_not_existing_directory_rasing_error( await remove_directory( path=missing_path, only_children=only_children, ignore_errors=False ) + + +async def test_log_directory_changes(caplog: pytest.LogCaptureFixture, some_dir: Path): + # directory cretion triggers no changes + caplog.clear() + with log_directory_changes(some_dir, _logger, logging.ERROR): + (some_dir / "a-dir").mkdir(parents=True, exist_ok=True) + assert "File changes in path" not in caplog.text + assert "Files added:" not in caplog.text + assert "Files removed:" not in caplog.text + assert "File content changed" not in caplog.text + + # files were added + caplog.clear() + with log_directory_changes(some_dir, _logger, logging.ERROR): + (some_dir / "hoho").touch() + assert "File changes in path" in caplog.text + assert "Files added:" in caplog.text + assert "Files removed:" not in caplog.text + assert "File content changed" not in caplog.text + + # files were removed + caplog.clear() + with log_directory_changes(some_dir, _logger, logging.ERROR): + await remove_directory(path=some_dir) + assert "File changes in path" in caplog.text + assert "Files removed:" in caplog.text + assert "Files added:" not in caplog.text + assert "File content changed" not in caplog.text + + # nothing changed + caplog.clear() + with log_directory_changes(some_dir, _logger, logging.ERROR): + pass + assert caplog.text == "" + + # files added and removed + caplog.clear() + some_dir.mkdir(parents=True, exist_ok=True) + (some_dir / "som_other_file").touch() + with log_directory_changes(some_dir, _logger, logging.ERROR): + (some_dir / "som_other_file").unlink() + (some_dir / "som_other_file_2").touch() + assert "File changes in path" in caplog.text + assert "Files added:" in caplog.text + assert "Files removed:" in caplog.text + assert "File content changed" not in caplog.text + + # file content changed + caplog.clear() + (some_dir / "file_to_change").touch() + with log_directory_changes(some_dir, _logger, logging.ERROR): + (some_dir / "file_to_change").write_text("ab") + assert "File changes in path" in caplog.text + assert "Files added:" not in caplog.text + assert "Files removed:" not in caplog.text + assert "File content changed" in caplog.text diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py index cc0b76c9197..592ce9c39c3 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py @@ -11,6 +11,7 @@ from models_library.rabbitmq_messages import ProgressType, SimcorePlatformStatus from pydantic import PositiveInt from servicelib.fastapi.long_running_tasks.server import TaskProgress +from servicelib.file_utils import log_directory_changes from servicelib.logging_utils import log_context from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather @@ -476,15 +477,18 @@ async def task_ports_inputs_pull( ), description="pulling inputs", ) as root_progress: - transferred_bytes = await nodeports.download_target_ports( - nodeports.PortTypeName.INPUTS, - mounted_volumes.disk_inputs_path, - port_keys=port_keys, - io_log_redirect_cb=functools.partial( - post_sidecar_log_message, app, log_level=logging.INFO - ), - progress_bar=root_progress, - ) + with log_directory_changes( + mounted_volumes.disk_inputs_path, _logger, logging.INFO + ): + transferred_bytes = await nodeports.download_target_ports( + nodeports.PortTypeName.INPUTS, + mounted_volumes.disk_inputs_path, + port_keys=port_keys, + io_log_redirect_cb=functools.partial( + post_sidecar_log_message, app, log_level=logging.INFO + ), + progress_bar=root_progress, + ) await post_sidecar_log_message( app, "Finished pulling inputs", log_level=logging.INFO ) From c4ed0514c37d47d329bfd6004937644e28f7b167 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Fri, 28 Jun 2024 08:18:26 +0200 Subject: [PATCH 068/219] =?UTF-8?q?=E2=99=BB=EF=B8=8FStorage:=20refactorin?= =?UTF-8?q?g=20(#5981)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu <5694077+GitHK@users.noreply.github.com> --- packages/aws-library/requirements/_test.in | 1 + packages/aws-library/requirements/_test.txt | 5 + .../src/aws_library/s3/__init__.py | 38 + .../aws-library/src/aws_library/s3/_client.py | 460 ++++++ .../src/aws_library/s3/_constants.py | 10 + .../src/aws_library/s3/_error_handler.py | 137 ++ .../aws-library/src/aws_library/s3/_errors.py | 34 + .../aws-library/src/aws_library/s3/_models.py | 67 + .../aws-library/src/aws_library/s3/_utils.py | 36 + .../aws-library/src/aws_library/s3/client.py | 75 - .../aws-library/src/aws_library/s3/errors.py | 66 - packages/aws-library/tests/conftest.py | 1 + .../aws-library/tests}/test_aiobotocore.py | 0 packages/aws-library/tests/test_s3_client.py | 1328 ++++++++++++++++- .../aws-library/tests}/test_s3_utils.py | 6 +- .../src/models_library/api_schemas_storage.py | 4 +- .../src/pytest_simcore/file_extra.py | 56 +- .../helpers/parametrizations.py | 8 +- .../src/pytest_simcore/helpers/s3.py | 59 +- .../modules/s3.py | 3 +- .../services/resource_tracker_service_runs.py | 2 +- .../simcore_service_storage/application.py | 9 +- .../src/simcore_service_storage/constants.py | 10 +- .../handlers_health.py | 14 +- .../long_running_tasks.py | 2 +- .../src/simcore_service_storage/models.py | 15 +- .../storage/src/simcore_service_storage/s3.py | 76 +- .../src/simcore_service_storage/s3_client.py | 441 ------ .../src/simcore_service_storage/s3_utils.py | 32 - .../simcore_service_storage/simcore_s3_dsm.py | 158 +- .../simcore_s3_dsm_utils.py | 31 +- .../simcore_service_storage/utils_handlers.py | 2 +- services/storage/tests/conftest.py | 385 +++-- .../storage/tests/fixtures/data_models.py | 25 +- .../tests/helpers/utils_file_meta_data.py | 4 +- .../storage/tests/helpers/utils_project.py | 2 +- services/storage/tests/unit/conftest.py | 216 --- .../storage/tests/unit/test__openapi_specs.py | 11 +- services/storage/tests/unit/test_dsm.py | 25 +- .../storage/tests/unit/test_dsm_dsmcleaner.py | 47 +- .../tests/unit/test_handlers_datasets.py | 8 +- .../storage/tests/unit/test_handlers_files.py | 245 +-- .../tests/unit/test_handlers_health.py | 21 +- .../tests/unit/test_handlers_simcore_s3.py | 47 +- .../test_handlers_simcore_s3_benchmark.py | 421 ------ services/storage/tests/unit/test_s3.py | 24 - services/storage/tests/unit/test_s3_client.py | 988 ------------ services/storage/tests/unit/test_settings.py | 17 - .../storage/tests/unit/test_simcore_s3_dsm.py | 12 +- .../storage/tests/unit/test_utils_handlers.py | 2 +- 50 files changed, 2834 insertions(+), 2852 deletions(-) create mode 100644 packages/aws-library/src/aws_library/s3/_client.py create mode 100644 packages/aws-library/src/aws_library/s3/_constants.py create mode 100644 packages/aws-library/src/aws_library/s3/_error_handler.py create mode 100644 packages/aws-library/src/aws_library/s3/_errors.py create mode 100644 packages/aws-library/src/aws_library/s3/_models.py create mode 100644 packages/aws-library/src/aws_library/s3/_utils.py delete mode 100644 packages/aws-library/src/aws_library/s3/client.py delete mode 100644 packages/aws-library/src/aws_library/s3/errors.py rename {services/storage/tests/unit => packages/aws-library/tests}/test_aiobotocore.py (100%) rename {services/storage/tests/unit => packages/aws-library/tests}/test_s3_utils.py (97%) rename services/storage/tests/helpers/file_utils.py => packages/pytest-simcore/src/pytest_simcore/helpers/s3.py (63%) delete mode 100644 services/storage/src/simcore_service_storage/s3_client.py delete mode 100644 services/storage/tests/unit/conftest.py delete mode 100644 services/storage/tests/unit/test_handlers_simcore_s3_benchmark.py delete mode 100644 services/storage/tests/unit/test_s3.py delete mode 100644 services/storage/tests/unit/test_s3_client.py delete mode 100644 services/storage/tests/unit/test_settings.py diff --git a/packages/aws-library/requirements/_test.in b/packages/aws-library/requirements/_test.in index 2dad2a71008..e488be8a23f 100644 --- a/packages/aws-library/requirements/_test.in +++ b/packages/aws-library/requirements/_test.in @@ -15,6 +15,7 @@ moto[server] pint pytest pytest-asyncio +pytest-benchmark pytest-cov pytest-icdiff pytest-instafail diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt index 47527b71a51..c8c50583a28 100644 --- a/packages/aws-library/requirements/_test.txt +++ b/packages/aws-library/requirements/_test.txt @@ -148,6 +148,8 @@ ply==3.11 # via jsonpath-ng pprintpp==0.4.0 # via pytest-icdiff +py-cpuinfo==9.0.0 + # via pytest-benchmark py-partiql-parser==0.5.4 # via moto pycparser==2.22 @@ -163,6 +165,7 @@ pytest==8.2.0 # via # -r requirements/_test.in # pytest-asyncio + # pytest-benchmark # pytest-cov # pytest-icdiff # pytest-instafail @@ -172,6 +175,8 @@ pytest-asyncio==0.21.2 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_test.in +pytest-benchmark==4.0.0 + # via -r requirements/_test.in pytest-cov==5.0.0 # via -r requirements/_test.in pytest-icdiff==0.9 diff --git a/packages/aws-library/src/aws_library/s3/__init__.py b/packages/aws-library/src/aws_library/s3/__init__.py index e69de29bb2d..929a446d211 100644 --- a/packages/aws-library/src/aws_library/s3/__init__.py +++ b/packages/aws-library/src/aws_library/s3/__init__.py @@ -0,0 +1,38 @@ +from ._client import SimcoreS3API +from ._constants import PRESIGNED_LINK_MAX_SIZE, S3_MAX_FILE_SIZE +from ._errors import ( + S3AccessError, + S3BucketInvalidError, + S3DestinationNotEmptyError, + S3KeyNotFoundError, + S3NotConnectedError, + S3RuntimeError, + S3UploadNotFoundError, +) +from ._models import ( + MultiPartUploadLinks, + S3DirectoryMetaData, + S3MetaData, + S3ObjectKey, + UploadID, +) + +__all__: tuple[str, ...] = ( + "SimcoreS3API", + "PRESIGNED_LINK_MAX_SIZE", + "S3_MAX_FILE_SIZE", + "S3AccessError", + "S3BucketInvalidError", + "S3DestinationNotEmptyError", + "S3KeyNotFoundError", + "S3NotConnectedError", + "S3RuntimeError", + "S3UploadNotFoundError", + "S3DirectoryMetaData", + "S3MetaData", + "S3ObjectKey", + "MultiPartUploadLinks", + "UploadID", +) + +# nopycln: file diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py new file mode 100644 index 00000000000..67478f4e204 --- /dev/null +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -0,0 +1,460 @@ +import asyncio +import contextlib +import logging +import urllib.parse +from collections.abc import AsyncGenerator, Callable, Sequence +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Final, cast + +import aioboto3 +from aiobotocore.session import ClientCreatorContext +from boto3.s3.transfer import TransferConfig +from botocore import exceptions as botocore_exc +from botocore.client import Config +from models_library.api_schemas_storage import ETag, S3BucketName, UploadedPart +from models_library.basic_types import SHA256Str +from pydantic import AnyUrl, ByteSize, parse_obj_as +from servicelib.logging_utils import log_catch, log_context +from servicelib.utils import limited_gather +from settings_library.s3 import S3Settings +from types_aiobotocore_s3 import S3Client +from types_aiobotocore_s3.literals import BucketLocationConstraintType +from types_aiobotocore_s3.type_defs import ObjectIdentifierTypeDef + +from ._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE +from ._error_handler import s3_exception_handler, s3_exception_handler_async_gen +from ._errors import S3DestinationNotEmptyError, S3KeyNotFoundError +from ._models import ( + MultiPartUploadLinks, + S3DirectoryMetaData, + S3MetaData, + S3ObjectKey, + UploadID, +) +from ._utils import compute_num_file_chunks + +_logger = logging.getLogger(__name__) + +_S3_MAX_CONCURRENCY_DEFAULT: Final[int] = 10 +_DEFAULT_AWS_REGION: Final[str] = "us-east-1" +_MAX_ITEMS_PER_PAGE: Final[int] = 500 +_MAX_CONCURRENT_COPY: Final[int] = 4 +_AWS_MAX_ITEMS_PER_PAGE: Final[int] = 1000 + + +@dataclass(frozen=True) +class SimcoreS3API: # pylint: disable=too-many-public-methods + _client: S3Client + _session: aioboto3.Session + _exit_stack: contextlib.AsyncExitStack = field( + default_factory=contextlib.AsyncExitStack + ) + transfer_max_concurrency: int = _S3_MAX_CONCURRENCY_DEFAULT + + @classmethod + async def create( + cls, settings: S3Settings, s3_max_concurrency: int = _S3_MAX_CONCURRENCY_DEFAULT + ) -> "SimcoreS3API": + session = aioboto3.Session() + session_client = session.client( + "s3", + endpoint_url=settings.S3_ENDPOINT, + aws_access_key_id=settings.S3_ACCESS_KEY, + aws_secret_access_key=settings.S3_SECRET_KEY, + region_name=settings.S3_REGION, + config=Config(signature_version="s3v4"), + ) + assert isinstance(session_client, ClientCreatorContext) # nosec + exit_stack = contextlib.AsyncExitStack() + s3_client = cast(S3Client, await exit_stack.enter_async_context(session_client)) + # NOTE: this triggers a botocore.exception.ClientError in case the connection is not made to the S3 backend + await s3_client.list_buckets() + + return cls(s3_client, session, exit_stack, s3_max_concurrency) + + async def close(self) -> None: + await self._exit_stack.aclose() + + async def http_check_bucket_connected(self, *, bucket: S3BucketName) -> bool: + with log_catch(_logger, reraise=False): + return await self.bucket_exists(bucket=bucket) + return False + + @s3_exception_handler(_logger) + async def create_bucket( + self, *, bucket: S3BucketName, region: BucketLocationConstraintType + ) -> None: + with log_context( + _logger, logging.INFO, msg=f"Create bucket {bucket} in {region}" + ): + try: + # NOTE: see https://github.com/boto/boto3/issues/125 why this is so... (sic) + # setting it for the us-east-1 creates issue when creating buckets + create_bucket_config: dict[str, Any] = {"Bucket": f"{bucket}"} + if region != _DEFAULT_AWS_REGION: + create_bucket_config["CreateBucketConfiguration"] = { + "LocationConstraint": region + } + + await self._client.create_bucket(**create_bucket_config) + + except self._client.exceptions.BucketAlreadyOwnedByYou: + _logger.info( + "Bucket %s already exists and is owned by us", + bucket, + ) + + @s3_exception_handler(_logger) + async def bucket_exists(self, *, bucket: S3BucketName) -> bool: + """ + :raises: S3AccessError for any other error + """ + try: + await self._client.head_bucket(Bucket=bucket) + return True + except botocore_exc.ClientError as exc: + status_code = exc.response.get("Error", {}).get("Code", -1) + if status_code == "404": + return False + raise + + @s3_exception_handler(_logger) + async def object_exists( + self, *, bucket: S3BucketName, object_key: S3ObjectKey + ) -> bool: + # SEE https://www.peterbe.com/plog/fastest-way-to-find-out-if-a-file-exists-in-s3 + response = await self._client.list_objects_v2(Bucket=bucket, Prefix=object_key) + return len(response.get("Contents", [])) > 0 + + @s3_exception_handler(_logger) + async def get_object_metadata( + self, *, bucket: S3BucketName, object_key: S3ObjectKey + ) -> S3MetaData: + response = await self._client.head_object( + Bucket=bucket, Key=object_key, ChecksumMode="ENABLED" + ) + return S3MetaData.from_botocore_head_object(object_key, response) + + @s3_exception_handler(_logger) + async def get_directory_metadata( + self, *, bucket: S3BucketName, prefix: str + ) -> S3DirectoryMetaData: + size = 0 + async for s3_object in self._list_all_objects(bucket=bucket, prefix=prefix): + size += s3_object.size + return S3DirectoryMetaData(size=size) + + @s3_exception_handler_async_gen(_logger) + async def list_objects_paginated( + self, + bucket: S3BucketName, + prefix: str, + *, + items_per_page: int = _MAX_ITEMS_PER_PAGE, + ) -> AsyncGenerator[list[S3MetaData], None]: + if items_per_page > _AWS_MAX_ITEMS_PER_PAGE: + msg = f"items_per_page must be <= {_AWS_MAX_ITEMS_PER_PAGE}" + raise ValueError(msg) + async for page in self._client.get_paginator("list_objects_v2").paginate( + Bucket=bucket, + Prefix=prefix, + PaginationConfig={ + "PageSize": items_per_page, + }, + ): + yield [ + S3MetaData.from_botocore_list_objects(obj) + for obj in page.get("Contents", []) + ] + + async def _list_all_objects( + self, *, bucket: S3BucketName, prefix: str + ) -> AsyncGenerator[S3MetaData, None]: + async for s3_objects in self.list_objects_paginated( + bucket=bucket, prefix=prefix + ): + for obj in s3_objects: + yield obj + + @s3_exception_handler(_logger) + async def delete_objects_recursively( + self, *, bucket: S3BucketName, prefix: str + ) -> None: + # NOTE: deletion of objects is done in batches of max 1000 elements, + # the maximum accepted by the S3 API + with log_context( + _logger, logging.DEBUG, f"deleting objects in {prefix=}", log_duration=True + ): + async for s3_objects in self.list_objects_paginated( + bucket=bucket, prefix=prefix + ): + objects_to_delete: Sequence[ObjectIdentifierTypeDef] = [ + {"Key": f"{_.object_key}"} for _ in s3_objects + ] + if objects_to_delete: + await self._client.delete_objects( + Bucket=bucket, + Delete={"Objects": objects_to_delete}, + ) + + @s3_exception_handler(_logger) + async def delete_object( + self, *, bucket: S3BucketName, object_key: S3ObjectKey + ) -> None: + await self._client.delete_object(Bucket=bucket, Key=object_key) + + @s3_exception_handler(_logger) + async def undelete_object( + self, *, bucket: S3BucketName, object_key: S3ObjectKey + ) -> None: + """this allows to restore a file that was deleted. + **NOT to restore previous versions!""" + with log_context( + _logger, logging.DEBUG, msg=f"undeleting {bucket}/{object_key}" + ): + response = await self._client.list_object_versions( + Bucket=bucket, Prefix=object_key, MaxKeys=1 + ) + _logger.debug("%s", f"{response=}") + if not response["IsTruncated"] and all( + _ not in response for _ in ("Versions", "DeleteMarkers") + ): + raise S3KeyNotFoundError(key=object_key, bucket=bucket) + if "DeleteMarkers" in response: + # we have something to undelete + latest_version = response["DeleteMarkers"][0] + assert "IsLatest" in latest_version # nosec + assert "VersionId" in latest_version # nosec + await self._client.delete_object( + Bucket=bucket, + Key=object_key, + VersionId=latest_version["VersionId"], + ) + _logger.debug("restored %s", f"{bucket}/{object_key}") + + @s3_exception_handler(_logger) + async def create_single_presigned_download_link( + self, + *, + bucket: S3BucketName, + object_key: S3ObjectKey, + expiration_secs: int, + ) -> AnyUrl: + # NOTE: ensure the bucket/object exists, this will raise if not + await self._client.head_bucket(Bucket=bucket) + await self._client.head_object(Bucket=bucket, Key=object_key) + generated_link = await self._client.generate_presigned_url( + "get_object", + Params={"Bucket": bucket, "Key": object_key}, + ExpiresIn=expiration_secs, + ) + url: AnyUrl = parse_obj_as(AnyUrl, generated_link) + return url + + @s3_exception_handler(_logger) + async def create_single_presigned_upload_link( + self, *, bucket: S3BucketName, object_key: S3ObjectKey, expiration_secs: int + ) -> AnyUrl: + # NOTE: ensure the bucket/object exists, this will raise if not + await self._client.head_bucket(Bucket=bucket) + generated_link = await self._client.generate_presigned_url( + "put_object", + Params={"Bucket": bucket, "Key": object_key}, + ExpiresIn=expiration_secs, + ) + url: AnyUrl = parse_obj_as(AnyUrl, generated_link) + return url + + @s3_exception_handler(_logger) + async def create_multipart_upload_links( + self, + *, + bucket: S3BucketName, + object_key: S3ObjectKey, + file_size: ByteSize, + expiration_secs: int, + sha256_checksum: SHA256Str | None, + ) -> MultiPartUploadLinks: + # NOTE: ensure the bucket exists, this will raise if not + await self._client.head_bucket(Bucket=bucket) + # first initiate the multipart upload + create_input: dict[str, Any] = {"Bucket": bucket, "Key": object_key} + if sha256_checksum: + create_input["Metadata"] = {"sha256_checksum": sha256_checksum} + response = await self._client.create_multipart_upload(**create_input) + upload_id = response["UploadId"] + # compute the number of links, based on the announced file size + num_upload_links, chunk_size = compute_num_file_chunks(file_size) + # now create the links + upload_links = parse_obj_as( + list[AnyUrl], + await asyncio.gather( + *( + self._client.generate_presigned_url( + "upload_part", + Params={ + "Bucket": bucket, + "Key": object_key, + "PartNumber": i + 1, + "UploadId": upload_id, + }, + ExpiresIn=expiration_secs, + ) + for i in range(num_upload_links) + ), + ), + ) + return MultiPartUploadLinks( + upload_id=upload_id, chunk_size=chunk_size, urls=upload_links + ) + + @s3_exception_handler(_logger) + async def list_ongoing_multipart_uploads( + self, + *, + bucket: S3BucketName, + ) -> list[tuple[UploadID, S3ObjectKey]]: + """Returns all the currently ongoing multipart uploads + + NOTE: minio does not implement the same behaviour as AWS here and will + only return the uploads if a prefix or object name is given [minio issue](https://github.com/minio/minio/issues/7632). + + :return: list of AWS uploads see [boto3 documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.list_multipart_uploads) + """ + response = await self._client.list_multipart_uploads( + Bucket=bucket, + ) + + return [ + ( + upload.get("UploadId", "undefined-uploadid"), + S3ObjectKey(upload.get("Key", "undefined-key")), + ) + for upload in response.get("Uploads", []) + ] + + @s3_exception_handler(_logger) + async def abort_multipart_upload( + self, *, bucket: S3BucketName, object_key: S3ObjectKey, upload_id: UploadID + ) -> None: + await self._client.abort_multipart_upload( + Bucket=bucket, Key=object_key, UploadId=upload_id + ) + + @s3_exception_handler(_logger) + async def complete_multipart_upload( + self, + *, + bucket: S3BucketName, + object_key: S3ObjectKey, + upload_id: UploadID, + uploaded_parts: list[UploadedPart], + ) -> ETag: + inputs: dict[str, Any] = { + "Bucket": bucket, + "Key": object_key, + "UploadId": upload_id, + "MultipartUpload": { + "Parts": [ + {"ETag": part.e_tag, "PartNumber": part.number} + for part in uploaded_parts + ] + }, + } + response = await self._client.complete_multipart_upload(**inputs) + return response["ETag"] + + @s3_exception_handler(_logger) + async def upload_file( + self, + *, + bucket: S3BucketName, + file: Path, + object_key: S3ObjectKey, + bytes_transfered_cb: Callable[[int], None] | None, + ) -> None: + """upload a file using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)""" + upload_options = { + "Bucket": bucket, + "Key": object_key, + "Config": TransferConfig(max_concurrency=self.transfer_max_concurrency), + } + if bytes_transfered_cb: + upload_options |= {"Callback": bytes_transfered_cb} + await self._client.upload_file(f"{file}", **upload_options) + + @s3_exception_handler(_logger) + async def copy_object( + self, + *, + bucket: S3BucketName, + src_object_key: S3ObjectKey, + dst_object_key: S3ObjectKey, + bytes_transfered_cb: Callable[[int], None] | None, + ) -> None: + """copy a file in S3 using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)""" + copy_options = { + "CopySource": {"Bucket": bucket, "Key": src_object_key}, + "Bucket": bucket, + "Key": dst_object_key, + "Config": TransferConfig(max_concurrency=self.transfer_max_concurrency), + } + if bytes_transfered_cb: + copy_options |= {"Callback": bytes_transfered_cb} + await self._client.copy(**copy_options) + + @s3_exception_handler(_logger) + async def copy_objects_recursively( + self, + *, + bucket: S3BucketName, + src_prefix: str, + dst_prefix: str, + bytes_transfered_cb: Callable[[int], None] | None, + ) -> None: + """copy from 1 location in S3 to another recreating the same structure""" + dst_metadata = await self.get_directory_metadata( + bucket=bucket, prefix=dst_prefix + ) + if dst_metadata.size > 0: + raise S3DestinationNotEmptyError(dst_prefix=dst_prefix) + try: + + await limited_gather( + *[ + self.copy_object( + bucket=bucket, + src_object_key=s3_object.object_key, + dst_object_key=s3_object.object_key.replace( + src_prefix, dst_prefix + ), + bytes_transfered_cb=bytes_transfered_cb, + ) + async for s3_object in self._list_all_objects( + bucket=bucket, prefix=src_prefix + ) + ], + limit=_MAX_CONCURRENT_COPY, + ) + + except Exception: + # rollback changes + with log_catch(_logger, reraise=False), log_context( + _logger, + logging.ERROR, + msg="Unexpected error while copying files recursively, deleting partially copied files", + ): + await self.delete_objects_recursively(bucket=bucket, prefix=dst_prefix) + raise + + @staticmethod + def is_multipart(file_size: ByteSize) -> bool: + return file_size >= MULTIPART_UPLOADS_MIN_TOTAL_SIZE + + @staticmethod + def compute_s3_url(*, bucket: S3BucketName, object_key: S3ObjectKey) -> AnyUrl: + url: AnyUrl = parse_obj_as( + AnyUrl, f"s3://{bucket}/{urllib.parse.quote(object_key)}" + ) + return url diff --git a/packages/aws-library/src/aws_library/s3/_constants.py b/packages/aws-library/src/aws_library/s3/_constants.py new file mode 100644 index 00000000000..cc246e8d0ba --- /dev/null +++ b/packages/aws-library/src/aws_library/s3/_constants.py @@ -0,0 +1,10 @@ +from typing import Final + +from pydantic import ByteSize, parse_obj_as + +# NOTE: AWS S3 upload limits https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html +MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "100MiB") + + +PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5GiB") +S3_MAX_FILE_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5TiB") diff --git a/packages/aws-library/src/aws_library/s3/_error_handler.py b/packages/aws-library/src/aws_library/s3/_error_handler.py new file mode 100644 index 00000000000..023d1553c27 --- /dev/null +++ b/packages/aws-library/src/aws_library/s3/_error_handler.py @@ -0,0 +1,137 @@ +import functools +import inspect +import logging +from collections.abc import AsyncGenerator, Callable, Coroutine +from typing import Any, Concatenate, ParamSpec, TypeVar + +from botocore import exceptions as botocore_exc + +from ._errors import ( + S3AccessError, + S3BucketInvalidError, + S3KeyNotFoundError, + S3UploadNotFoundError, +) + + +def _map_botocore_client_exception( + botocore_error: botocore_exc.ClientError, **kwargs +) -> S3AccessError: + status_code = int( + botocore_error.response.get("ResponseMetadata", {}).get("HTTPStatusCode") + or botocore_error.response.get("Error", {}).get("Code", -1) + ) + operation_name = botocore_error.operation_name + match status_code, operation_name: + case 404, "HeadObject": + return S3KeyNotFoundError( + bucket=kwargs["bucket"], + key=kwargs.get("object_key") or kwargs.get("src_object_key"), + ) + case (404, "HeadBucket") | (403, "HeadBucket"): + return S3BucketInvalidError(bucket=kwargs["bucket"]) + case (404, "AbortMultipartUpload") | ( + 500, + "CompleteMultipartUpload", + ): + return S3UploadNotFoundError( + bucket=kwargs["bucket"], key=kwargs["object_key"] + ) + case _: + return S3AccessError() + + +P = ParamSpec("P") +R = TypeVar("R") +T = TypeVar("T") + + +def s3_exception_handler( + logger: logging.Logger, +) -> Callable[ # type: ignore[name-defined] + [Callable[Concatenate["SimcoreS3API", P], Coroutine[Any, Any, R]]], + Callable[Concatenate["SimcoreS3API", P], Coroutine[Any, Any, R]], +]: + """ + Raises: + S3BucketInvalidError: + S3KeyNotFoundError: + S3BucketInvalidError: + S3UploadNotFoundError: + S3AccessError: + """ + + def decorator( + func: Callable[Concatenate["SimcoreS3API", P], Coroutine[Any, Any, R]] # type: ignore[name-defined] # noqa: F821 + ) -> Callable[Concatenate["SimcoreS3API", P], Coroutine[Any, Any, R]]: # type: ignore[name-defined] # noqa: F821 + @functools.wraps(func) + async def wrapper(self: "SimcoreS3API", *args: P.args, **kwargs: P.kwargs) -> R: # type: ignore[name-defined] # noqa: F821 + try: + return await func(self, *args, **kwargs) + except ( + self._client.exceptions.NoSuchBucket # pylint: disable=protected-access + ) as exc: + raise S3BucketInvalidError( + bucket=exc.response.get("Error", {}).get("BucketName", "undefined") + ) from exc + except botocore_exc.ClientError as exc: + raise _map_botocore_client_exception(exc, **kwargs) from exc + except botocore_exc.EndpointConnectionError as exc: + raise S3AccessError from exc + except botocore_exc.BotoCoreError as exc: + logger.exception("Unexpected error in s3 client: ") + raise S3AccessError from exc + + wrapper.__doc__ = f"{func.__doc__}\n\n{s3_exception_handler.__doc__}" + + return wrapper + + return decorator + + +def s3_exception_handler_async_gen( + logger: logging.Logger, +) -> Callable[ # type: ignore[name-defined] + [Callable[Concatenate["SimcoreS3API", P], AsyncGenerator[T, None]]], # noqa: F821 + Callable[Concatenate["SimcoreS3API", P], AsyncGenerator[T, None]], # noqa: F821 +]: + """ + Raises: + S3BucketInvalidError: + S3KeyNotFoundError: + S3BucketInvalidError: + S3UploadNotFoundError: + S3AccessError: + """ + + def decorator( + func: Callable[Concatenate["SimcoreS3API", P], AsyncGenerator[T, None]] # type: ignore[name-defined] # noqa: F821 + ) -> Callable[Concatenate["SimcoreS3API", P], AsyncGenerator[T, None]]: # type: ignore[name-defined] # noqa: F821 + @functools.wraps(func) + async def async_generator_wrapper( + self: "SimcoreS3API", *args: P.args, **kwargs: P.kwargs # type: ignore[name-defined] # noqa: F821 + ) -> AsyncGenerator[T, None]: + try: + assert inspect.isasyncgenfunction(func) # nosec + async for item in func(self, *args, **kwargs): + yield item + except ( + self._client.exceptions.NoSuchBucket # pylint: disable=protected-access + ) as exc: + raise S3BucketInvalidError( + bucket=exc.response.get("Error", {}).get("BucketName", "undefined") + ) from exc + except botocore_exc.ClientError as exc: + raise _map_botocore_client_exception(exc, **kwargs) from exc + except botocore_exc.EndpointConnectionError as exc: + raise S3AccessError from exc + except botocore_exc.BotoCoreError as exc: + logger.exception("Unexpected error in s3 client: ") + raise S3AccessError from exc + + async_generator_wrapper.__doc__ = ( + f"{func.__doc__}\n\n{s3_exception_handler.__doc__}" + ) + return async_generator_wrapper + + return decorator diff --git a/packages/aws-library/src/aws_library/s3/_errors.py b/packages/aws-library/src/aws_library/s3/_errors.py new file mode 100644 index 00000000000..f297b04b64d --- /dev/null +++ b/packages/aws-library/src/aws_library/s3/_errors.py @@ -0,0 +1,34 @@ +from pydantic.errors import PydanticErrorMixin + + +class S3RuntimeError(PydanticErrorMixin, RuntimeError): + msg_template: str = "S3 client unexpected error" + + +class S3NotConnectedError(S3RuntimeError): + msg_template: str = "Cannot connect with s3 server" + + +class S3AccessError(S3RuntimeError): + code = "s3_access.error" + msg_template: str = "Unexpected error while accessing S3 backend" + + +class S3BucketInvalidError(S3AccessError): + code = "s3_bucket.invalid_error" + msg_template: str = "The bucket '{bucket}' is invalid" + + +class S3KeyNotFoundError(S3AccessError): + code = "s3_key.not_found_error" + msg_template: str = "The file {key} in {bucket} was not found" + + +class S3UploadNotFoundError(S3AccessError): + code = "s3_upload.not_found_error" + msg_template: str = "The upload for {key} in {bucket} was not found" + + +class S3DestinationNotEmptyError(S3AccessError): + code = "s3_destination.not_empty_error" + msg_template: str = "The destination {dst_prefix} is not empty" diff --git a/packages/aws-library/src/aws_library/s3/_models.py b/packages/aws-library/src/aws_library/s3/_models.py new file mode 100644 index 00000000000..63e7ad15699 --- /dev/null +++ b/packages/aws-library/src/aws_library/s3/_models.py @@ -0,0 +1,67 @@ +import datetime +from dataclasses import dataclass +from typing import TypeAlias + +from models_library.api_schemas_storage import ETag +from models_library.basic_types import SHA256Str +from pydantic import AnyUrl, BaseModel, ByteSize +from types_aiobotocore_s3.type_defs import HeadObjectOutputTypeDef, ObjectTypeDef + +S3ObjectKey: TypeAlias = str +UploadID: TypeAlias = str + + +@dataclass(frozen=True, slots=True, kw_only=True) +class S3MetaData: + object_key: S3ObjectKey + last_modified: datetime.datetime + e_tag: ETag + sha256_checksum: SHA256Str | None + size: int + + @staticmethod + def from_botocore_head_object( + object_key: S3ObjectKey, obj: HeadObjectOutputTypeDef + ) -> "S3MetaData": + return S3MetaData( + object_key=object_key, + last_modified=obj["LastModified"], + e_tag=obj["ETag"].strip('"'), + sha256_checksum=( + SHA256Str(obj.get("ChecksumSHA256")) + if obj.get("ChecksumSHA256") + else None + ), + size=obj["ContentLength"], + ) + + @staticmethod + def from_botocore_list_objects( + obj: ObjectTypeDef, + ) -> "S3MetaData": + assert "Key" in obj # nosec + assert "LastModified" in obj # nosec + assert "ETag" in obj # nosec + assert "Size" in obj # nosec + return S3MetaData( + object_key=obj["Key"], + last_modified=obj["LastModified"], + e_tag=obj["ETag"].strip('"'), + sha256_checksum=( + SHA256Str(obj.get("ChecksumSHA256")) + if obj.get("ChecksumSHA256") + else None + ), + size=obj["Size"], + ) + + +@dataclass(frozen=True) +class S3DirectoryMetaData: + size: int + + +class MultiPartUploadLinks(BaseModel): + upload_id: UploadID + chunk_size: ByteSize + urls: list[AnyUrl] diff --git a/packages/aws-library/src/aws_library/s3/_utils.py b/packages/aws-library/src/aws_library/s3/_utils.py new file mode 100644 index 00000000000..00a1bcc59bb --- /dev/null +++ b/packages/aws-library/src/aws_library/s3/_utils.py @@ -0,0 +1,36 @@ +from typing import Final + +from pydantic import ByteSize, parse_obj_as + +_MULTIPART_MAX_NUMBER_OF_PARTS: Final[int] = 10000 + +# this is artifically defined, if possible we keep a maximum number of requests for parallel +# uploading. If that is not possible then we create as many upload part as the max part size allows +_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE: Final[list[ByteSize]] = [ + parse_obj_as(ByteSize, x) + for x in [ + "10Mib", + "50Mib", + "100Mib", + "200Mib", + "400Mib", + "600Mib", + "800Mib", + "1Gib", + "2Gib", + "3Gib", + "4Gib", + "5Gib", + ] +] + + +def compute_num_file_chunks(file_size: ByteSize) -> tuple[int, ByteSize]: + for chunk in _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE: + num_upload_links = int(file_size / chunk) + (1 if file_size % chunk > 0 else 0) + if num_upload_links < _MULTIPART_MAX_NUMBER_OF_PARTS: + return (num_upload_links, chunk) + msg = f"Could not determine number of upload links for {file_size=}" + raise ValueError( + msg, + ) diff --git a/packages/aws-library/src/aws_library/s3/client.py b/packages/aws-library/src/aws_library/s3/client.py deleted file mode 100644 index 5779d76ca1b..00000000000 --- a/packages/aws-library/src/aws_library/s3/client.py +++ /dev/null @@ -1,75 +0,0 @@ -import contextlib -import logging -from dataclasses import dataclass -from typing import cast - -import aioboto3 -from aiobotocore.session import ClientCreatorContext -from botocore.client import Config -from models_library.api_schemas_storage import S3BucketName -from pydantic import AnyUrl, parse_obj_as -from settings_library.s3 import S3Settings -from types_aiobotocore_s3 import S3Client - -from .errors import s3_exception_handler - -_logger = logging.getLogger(__name__) - -_S3_MAX_CONCURRENCY_DEFAULT = 10 - - -@dataclass(frozen=True) -class SimcoreS3API: - client: S3Client - session: aioboto3.Session - exit_stack: contextlib.AsyncExitStack - transfer_max_concurrency: int - - @classmethod - async def create( - cls, settings: S3Settings, s3_max_concurrency: int = _S3_MAX_CONCURRENCY_DEFAULT - ) -> "SimcoreS3API": - session = aioboto3.Session() - session_client = session.client( - "s3", - endpoint_url=settings.S3_ENDPOINT, - aws_access_key_id=settings.S3_ACCESS_KEY, - aws_secret_access_key=settings.S3_SECRET_KEY, - region_name=settings.S3_REGION, - config=Config(signature_version="s3v4"), - ) - assert isinstance(session_client, ClientCreatorContext) # nosec - exit_stack = contextlib.AsyncExitStack() - s3_client = cast(S3Client, await exit_stack.enter_async_context(session_client)) - # NOTE: this triggers a botocore.exception.ClientError in case the connection is not made to the S3 backend - await s3_client.list_buckets() - - return cls(s3_client, session, exit_stack, s3_max_concurrency) - - async def close(self) -> None: - await self.exit_stack.aclose() - - async def http_check_bucket_connected(self, bucket: S3BucketName) -> bool: - try: - _logger.debug("Head bucket: %s", bucket) - await self.client.head_bucket(Bucket=bucket) - return True - except Exception: # pylint: disable=broad-except - return False - - @s3_exception_handler(_logger) - async def create_single_presigned_download_link( - self, - bucket_name: S3BucketName, - object_key: str, - expiration_secs: int, - ) -> AnyUrl: - # NOTE: ensure the bucket/object exists, this will raise if not - await self.client.head_bucket(Bucket=bucket_name) - generated_link = await self.client.generate_presigned_url( - "get_object", - Params={"Bucket": bucket_name, "Key": object_key}, - ExpiresIn=expiration_secs, - ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) - return url diff --git a/packages/aws-library/src/aws_library/s3/errors.py b/packages/aws-library/src/aws_library/s3/errors.py deleted file mode 100644 index 13e60fd0b0b..00000000000 --- a/packages/aws-library/src/aws_library/s3/errors.py +++ /dev/null @@ -1,66 +0,0 @@ -import functools -import logging - -from botocore import exceptions as botocore_exc -from pydantic.errors import PydanticErrorMixin - - -class S3RuntimeError(PydanticErrorMixin, RuntimeError): - msg_template: str = "S3 client unexpected error" - - -class S3NotConnectedError(S3RuntimeError): - msg_template: str = "Cannot connect with s3 server" - - -class S3AccessError(S3RuntimeError): - code = "s3_access.error" - msg_template: str = "Unexpected error while accessing S3 backend" - - -class S3BucketInvalidError(S3AccessError): - code = "s3_bucket.invalid_error" - msg_template: str = "The bucket '{bucket}' is invalid" - - -class S3KeyNotFoundError(S3AccessError): - code = "s3_key.not_found_error" - msg_template: str = "The file {key} in {bucket} was not found" - - -def s3_exception_handler(log: logging.Logger): - """converts typical aiobotocore/boto exceptions to storage exceptions - NOTE: this is a work in progress as more exceptions might arise in different - use-cases - """ - - def decorator(func): # noqa: C901 - @functools.wraps(func) - async def wrapper(self, *args, **kwargs): - try: - return await func(self, *args, **kwargs) - except self.client.exceptions.NoSuchBucket as exc: - raise S3BucketInvalidError( - bucket=exc.response.get("Error", {}).get("BucketName", "undefined") - ) from exc - except botocore_exc.ClientError as exc: - status_code = int(exc.response.get("Error", {}).get("Code", -1)) - operation_name = exc.operation_name - - match status_code, operation_name: - case 404, "HeadObject": - raise S3KeyNotFoundError(bucket=args[0], key=args[1]) from exc - case (404, "HeadBucket") | (403, "HeadBucket"): - raise S3BucketInvalidError(bucket=args[0]) from exc - case _: - raise S3AccessError from exc - except botocore_exc.EndpointConnectionError as exc: - raise S3AccessError from exc - - except botocore_exc.BotoCoreError as exc: - log.exception("Unexpected error in s3 client: ") - raise S3AccessError from exc - - return wrapper - - return decorator diff --git a/packages/aws-library/tests/conftest.py b/packages/aws-library/tests/conftest.py index af911581920..a43c3c7ec2b 100644 --- a/packages/aws-library/tests/conftest.py +++ b/packages/aws-library/tests/conftest.py @@ -11,6 +11,7 @@ "pytest_simcore.aws_s3_service", "pytest_simcore.aws_server", "pytest_simcore.environment_configs", + "pytest_simcore.file_extra", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/services/storage/tests/unit/test_aiobotocore.py b/packages/aws-library/tests/test_aiobotocore.py similarity index 100% rename from services/storage/tests/unit/test_aiobotocore.py rename to packages/aws-library/tests/test_aiobotocore.py diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index 92fc77cbfef..747a2a9b85c 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -2,39 +2,430 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name # pylint:disable=too-many-arguments +# pylint:disable=protected-access # pylint:disable=no-name-in-module -import csv -import os -from collections.abc import AsyncIterator +import asyncio +import filecmp +import json +import logging +from collections import defaultdict +from collections.abc import AsyncIterator, Awaitable, Callable +from dataclasses import dataclass from pathlib import Path +from typing import Any import botocore.exceptions import pytest -from aws_library.s3.client import SimcoreS3API +from aiohttp import ClientSession +from aws_library.s3._client import S3ObjectKey, SimcoreS3API +from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE +from aws_library.s3._errors import ( + S3BucketInvalidError, + S3DestinationNotEmptyError, + S3KeyNotFoundError, + S3UploadNotFoundError, +) +from aws_library.s3._models import MultiPartUploadLinks from faker import Faker -from models_library.api_schemas_storage import S3BucketName +from models_library.api_schemas_storage import S3BucketName, UploadedPart +from models_library.basic_types import SHA256Str from moto.server import ThreadedMotoServer -from pydantic import AnyUrl -from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict +from pydantic import AnyUrl, ByteSize, parse_obj_as +from pytest_benchmark.plugin import BenchmarkFixture +from pytest_simcore.helpers.logging import log_context +from pytest_simcore.helpers.parametrizations import ( + byte_size_ids, + parametrized_file_size, +) +from pytest_simcore.helpers.s3 import ( + delete_all_object_versions, + upload_file_to_presigned_link, +) +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.utils import limited_as_completed from settings_library.s3 import S3Settings from types_aiobotocore_s3 import S3Client +from types_aiobotocore_s3.literals import BucketLocationConstraintType @pytest.fixture async def simcore_s3_api( mocked_s3_server_settings: S3Settings, + mocked_s3_server_envs: EnvVarsDict, ) -> AsyncIterator[SimcoreS3API]: s3 = await SimcoreS3API.create(settings=mocked_s3_server_settings) assert s3 - assert s3.client - assert s3.exit_stack - assert s3.session + assert s3._client # noqa: SLF001 + assert s3._exit_stack # noqa: SLF001 + assert s3._session # noqa: SLF001 yield s3 await s3.close() +@pytest.fixture +def bucket_name(faker: Faker) -> S3BucketName: + # NOTE: no faker here as we need some specific namings + return parse_obj_as(S3BucketName, faker.pystr().replace("_", "-").lower()) + + +@pytest.fixture +async def ensure_bucket_name_deleted( + bucket_name: S3BucketName, s3_client: S3Client +) -> AsyncIterator[None]: + yield + await s3_client.delete_bucket(Bucket=bucket_name) + + +@pytest.fixture +async def with_s3_bucket( + s3_client: S3Client, bucket_name: S3BucketName +) -> AsyncIterator[S3BucketName]: + await s3_client.create_bucket(Bucket=bucket_name) + yield bucket_name + await s3_client.delete_bucket(Bucket=bucket_name) + + +@pytest.fixture +def non_existing_s3_bucket(faker: Faker) -> S3BucketName: + return parse_obj_as(S3BucketName, faker.pystr().replace("_", "-").lower()) + + +@pytest.fixture +async def upload_to_presigned_link( + s3_client: S3Client, +) -> AsyncIterator[ + Callable[[Path, AnyUrl, S3BucketName, S3ObjectKey], Awaitable[None]] +]: + uploaded_object_keys: dict[S3BucketName, list[S3ObjectKey]] = defaultdict(list) + + async def _( + file: Path, presigned_url: AnyUrl, bucket: S3BucketName, s3_object: S3ObjectKey + ) -> None: + await upload_file_to_presigned_link( + file, + MultiPartUploadLinks( + upload_id="fake", + chunk_size=parse_obj_as(ByteSize, file.stat().st_size), + urls=[presigned_url], + ), + ) + uploaded_object_keys[bucket].append(s3_object) + + yield _ + + for bucket, object_keys in uploaded_object_keys.items(): + await delete_all_object_versions(s3_client, bucket, object_keys) + + +@dataclass(frozen=True, slots=True, kw_only=True) +class UploadedFile: + local_path: Path + s3_key: S3ObjectKey + + +@pytest.fixture +async def with_uploaded_file_on_s3( + create_file_of_size: Callable[[ByteSize], Path], + s3_client: S3Client, + with_s3_bucket: S3BucketName, +) -> AsyncIterator[UploadedFile]: + test_file = create_file_of_size(parse_obj_as(ByteSize, "10Kib")) + await s3_client.upload_file( + Filename=f"{test_file}", + Bucket=with_s3_bucket, + Key=test_file.name, + ) + + yield UploadedFile(local_path=test_file, s3_key=test_file.name) + + await delete_all_object_versions(s3_client, with_s3_bucket, [test_file.name]) + + +@pytest.fixture +def default_expiration_time_seconds(faker: Faker) -> int: + return faker.pyint(min_value=10) + + +@pytest.mark.parametrize("region", ["us-east-1", "us-east-2", "us-west-1", "us-west-2"]) +async def test_create_bucket( + simcore_s3_api: SimcoreS3API, + bucket_name: S3BucketName, + ensure_bucket_name_deleted: None, + region: BucketLocationConstraintType, +): + assert not await simcore_s3_api.bucket_exists(bucket=bucket_name) + await simcore_s3_api.create_bucket(bucket=bucket_name, region=region) + assert await simcore_s3_api.bucket_exists(bucket=bucket_name) + # calling again works and silently does nothing + await simcore_s3_api.create_bucket(bucket=bucket_name, region=region) + + +@pytest.fixture +async def with_versioning_enabled( + s3_client: S3Client, + with_s3_bucket: S3BucketName, +) -> None: + await s3_client.put_bucket_versioning( + Bucket=with_s3_bucket, + VersioningConfiguration={"MFADelete": "Disabled", "Status": "Enabled"}, + ) + + +@pytest.fixture +async def upload_file_to_multipart_presigned_link_without_completing( + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + create_file_of_size: Callable[[ByteSize], Path], + faker: Faker, + default_expiration_time_seconds: int, + s3_client: S3Client, +) -> AsyncIterator[ + Callable[ + ..., Awaitable[tuple[S3ObjectKey, MultiPartUploadLinks, list[UploadedPart]]] + ] +]: + possibly_updated_files: list[S3ObjectKey] = [] + + async def _uploader( + file_size: ByteSize, + object_key: S3ObjectKey | None = None, + ) -> tuple[S3ObjectKey, MultiPartUploadLinks, list[UploadedPart]]: + file = create_file_of_size(file_size) + if not object_key: + object_key = S3ObjectKey(file.name) + upload_links = await simcore_s3_api.create_multipart_upload_links( + bucket=with_s3_bucket, + object_key=object_key, + file_size=ByteSize(file.stat().st_size), + expiration_secs=default_expiration_time_seconds, + sha256_checksum=parse_obj_as(SHA256Str, faker.sha256()), + ) + assert upload_links + + # check there is no file yet + with pytest.raises(S3KeyNotFoundError, match=f"{object_key}"): + await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=object_key + ) + + # check we have the multipart upload initialized and listed + ongoing_multipart_uploads = await simcore_s3_api.list_ongoing_multipart_uploads( + bucket=with_s3_bucket + ) + assert ongoing_multipart_uploads + assert len(ongoing_multipart_uploads) == 1 + ongoing_upload_id, ongoing_object_key = ongoing_multipart_uploads[0] + assert ongoing_upload_id == upload_links.upload_id + assert ongoing_object_key == object_key + + # upload the file + uploaded_parts: list[UploadedPart] = await upload_file_to_presigned_link( + file, + upload_links, + ) + assert len(uploaded_parts) == len(upload_links.urls) + + # check there is no file yet + with pytest.raises(S3KeyNotFoundError): + await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=object_key + ) + + # check we have the multipart upload initialized and listed + ongoing_multipart_uploads = await simcore_s3_api.list_ongoing_multipart_uploads( + bucket=with_s3_bucket + ) + assert ongoing_multipart_uploads + assert len(ongoing_multipart_uploads) == 1 + ongoing_upload_id, ongoing_object_key = ongoing_multipart_uploads[0] + assert ongoing_upload_id == upload_links.upload_id + assert ongoing_object_key == object_key + + possibly_updated_files.append(object_key) + + return ( + object_key, + upload_links, + uploaded_parts, + ) + + yield _uploader + + await delete_all_object_versions(s3_client, with_s3_bucket, possibly_updated_files) + + +@dataclass +class _ProgressCallback: + file_size: int + action: str + logger: logging.Logger + _total_bytes_transfered: int = 0 + + def __call__(self, bytes_transferred: int) -> None: + self._total_bytes_transfered += bytes_transferred + self.logger.debug( + "progress: %s", + f"{self.action} {self._total_bytes_transfered} / {self.file_size} bytes", + ) + + +@pytest.fixture +async def upload_file( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + s3_client: S3Client, +) -> AsyncIterator[Callable[[Path], Awaitable[UploadedFile]]]: + uploaded_object_keys = [] + + async def _uploader(file: Path, base_path: Path | None = None) -> UploadedFile: + object_key = file.name + if base_path: + object_key = f"{file.relative_to(base_path)}" + with log_context( + logging.INFO, msg=f"uploading {file} to {with_s3_bucket}/{object_key}" + ) as ctx: + progress_cb = _ProgressCallback( + file_size=file.stat().st_size, action="uploaded", logger=ctx.logger + ) + response = await simcore_s3_api.upload_file( + bucket=with_s3_bucket, + file=file, + object_key=object_key, + bytes_transfered_cb=progress_cb, + ) + # there is no response from aioboto3... + assert not response + + assert ( + await simcore_s3_api.object_exists( + bucket=with_s3_bucket, object_key=object_key + ) + is True + ) + uploaded_object_keys.append(object_key) + return UploadedFile(local_path=file, s3_key=object_key) + + yield _uploader + + with log_context(logging.INFO, msg=f"delete {len(uploaded_object_keys)}"): + await delete_all_object_versions( + s3_client, with_s3_bucket, uploaded_object_keys + ) + + +@pytest.fixture(autouse=True) +def set_log_levels_for_noisy_libraries() -> None: + # Reduce the log level for 'werkzeug' + logging.getLogger("werkzeug").setLevel(logging.WARNING) + + +@pytest.fixture +async def with_uploaded_folder_on_s3( + create_folder_of_size_with_multiple_files: Callable[ + [ByteSize, ByteSize, ByteSize], Path + ], + upload_file: Callable[[Path, Path], Awaitable[UploadedFile]], + directory_size: ByteSize, + min_file_size: ByteSize, + max_file_size: ByteSize, +) -> list[UploadedFile]: + # create random files of random size and upload to S3 + folder = create_folder_of_size_with_multiple_files( + ByteSize(directory_size), ByteSize(min_file_size), ByteSize(max_file_size) + ) + list_uploaded_files = [] + + with log_context(logging.INFO, msg=f"uploading {folder}") as ctx: + list_uploaded_files = [ + await uploaded_file + async for uploaded_file in limited_as_completed( + ( + upload_file(file, folder.parent) + for file in folder.rglob("*") + if file.is_file() + ), + limit=20, + ) + ] + ctx.logger.info("uploaded %s files", len(list_uploaded_files)) + return list_uploaded_files + + +@pytest.fixture +async def copy_file( + simcore_s3_api: SimcoreS3API, with_s3_bucket: S3BucketName, s3_client: S3Client +) -> AsyncIterator[Callable[[S3ObjectKey, S3ObjectKey], Awaitable[S3ObjectKey]]]: + copied_object_keys = [] + + async def _copier(src_key: S3ObjectKey, dst_key: S3ObjectKey) -> S3ObjectKey: + file_metadata = await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=src_key + ) + with log_context(logging.INFO, msg=f"copying {src_key} to {dst_key}") as ctx: + progress_cb = _ProgressCallback( + file_size=file_metadata.size, action="copied", logger=ctx.logger + ) + await simcore_s3_api.copy_object( + bucket=with_s3_bucket, + src_object_key=src_key, + dst_object_key=dst_key, + bytes_transfered_cb=progress_cb, + ) + copied_object_keys.append(dst_key) + return dst_key + + yield _copier + + # cleanup + await delete_all_object_versions(s3_client, with_s3_bucket, copied_object_keys) + + +@pytest.fixture +async def copy_files_recursively( + simcore_s3_api: SimcoreS3API, with_s3_bucket: S3BucketName, s3_client: S3Client +) -> AsyncIterator[Callable[[str, str], Awaitable[str]]]: + copied_dst_prefixes = [] + + async def _copier(src_prefix: str, dst_prefix: str) -> str: + src_directory_metadata = await simcore_s3_api.get_directory_metadata( + bucket=with_s3_bucket, prefix=src_prefix + ) + with log_context( + logging.INFO, + msg=f"copying {src_prefix} [{ByteSize(src_directory_metadata.size).human_readable()}] to {dst_prefix}", + ) as ctx: + progress_cb = _ProgressCallback( + file_size=src_directory_metadata.size, + action="copied", + logger=ctx.logger, + ) + await simcore_s3_api.copy_objects_recursively( + bucket=with_s3_bucket, + src_prefix=src_prefix, + dst_prefix=dst_prefix, + bytes_transfered_cb=progress_cb, + ) + + dst_directory_metadata = await simcore_s3_api.get_directory_metadata( + bucket=with_s3_bucket, prefix=dst_prefix + ) + assert dst_directory_metadata.size == src_directory_metadata.size + + copied_dst_prefixes.append(dst_prefix) + return dst_prefix + + yield _copier + + # cleanup + for dst_prefix in copied_dst_prefixes: + await simcore_s3_api.delete_objects_recursively( + bucket=with_s3_bucket, prefix=dst_prefix + ) + + async def test_aiobotocore_s3_client_when_s3_server_goes_up_and_down( mocked_aws_server: ThreadedMotoServer, mocked_s3_server_envs: EnvVarsDict, @@ -52,82 +443,909 @@ async def test_aiobotocore_s3_client_when_s3_server_goes_up_and_down( await s3_client.list_buckets() -@pytest.fixture -async def create_s3_bucket( - mocked_s3_server_envs: EnvVarsDict, s3_client: S3Client, faker: Faker -) -> AsyncIterator[S3BucketName]: - bucket_name = faker.pystr() - await s3_client.create_bucket(Bucket=bucket_name) - - yield S3BucketName(bucket_name) - - await s3_client.delete_bucket(Bucket=bucket_name) +async def test_bucket_exists( + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + non_existing_s3_bucket: S3BucketName, +): + assert not await simcore_s3_api.bucket_exists(bucket=non_existing_s3_bucket) + assert await simcore_s3_api.bucket_exists(bucket=with_s3_bucket) + assert not await simcore_s3_api.http_check_bucket_connected( + bucket=non_existing_s3_bucket + ) + assert await simcore_s3_api.http_check_bucket_connected(bucket=with_s3_bucket) async def test_http_check_bucket_connected( mocked_aws_server: ThreadedMotoServer, simcore_s3_api: SimcoreS3API, - create_s3_bucket: S3BucketName, + with_s3_bucket: S3BucketName, ): assert ( - await simcore_s3_api.http_check_bucket_connected(bucket=create_s3_bucket) - is True + await simcore_s3_api.http_check_bucket_connected(bucket=with_s3_bucket) is True ) mocked_aws_server.stop() assert ( - await simcore_s3_api.http_check_bucket_connected(bucket=create_s3_bucket) - is False + await simcore_s3_api.http_check_bucket_connected(bucket=with_s3_bucket) is False ) mocked_aws_server.start() assert ( - await simcore_s3_api.http_check_bucket_connected(bucket=create_s3_bucket) - is True + await simcore_s3_api.http_check_bucket_connected(bucket=with_s3_bucket) is True ) -@pytest.fixture -async def create_small_csv_file(tmp_path): - data = [ - ["Name", "Age", "Country"], - ["Alice", 25, "USA"], - ["Bob", 30, "Canada"], - ["Charlie", 22, "UK"], - ] +async def test_get_file_metadata( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + with_uploaded_file_on_s3: UploadedFile, + simcore_s3_api: SimcoreS3API, + s3_client: S3Client, +): + s3_metadata = await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + aioboto_s3_object_response = await s3_client.get_object( + Bucket=with_s3_bucket, Key=with_uploaded_file_on_s3.s3_key + ) + assert s3_metadata.object_key == with_uploaded_file_on_s3.s3_key + assert s3_metadata.last_modified == aioboto_s3_object_response["LastModified"] + assert s3_metadata.e_tag == json.loads(aioboto_s3_object_response["ETag"]) + assert s3_metadata.sha256_checksum is None + assert s3_metadata.size == aioboto_s3_object_response["ContentLength"] - # Create a temporary file in the tmp_path directory - csv_file_path = tmp_path / "small_csv_file.csv" - # Write the data to the CSV file - with open(csv_file_path, mode="w", newline="") as file: - csv_writer = csv.writer(file) - csv_writer.writerows(data) +async def test_get_file_metadata_with_non_existing_bucket_raises( + mocked_s3_server_envs: EnvVarsDict, + non_existing_s3_bucket: S3BucketName, + with_uploaded_file_on_s3: UploadedFile, + simcore_s3_api: SimcoreS3API, +): + with pytest.raises(S3KeyNotFoundError): + await simcore_s3_api.get_object_metadata( + bucket=non_existing_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) - # Provide the CSV file path as the fixture value - yield csv_file_path - # Clean up: Remove the temporary CSV file after the test - if csv_file_path.exists(): - os.remove(csv_file_path) +async def test_get_file_metadata_with_non_existing_key_raises( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + simcore_s3_api: SimcoreS3API, + faker: Faker, +): + with pytest.raises(S3KeyNotFoundError): + await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=faker.pystr() + ) -@pytest.fixture -async def upload_file_to_bucket( - create_small_csv_file: Path, +async def test_delete_file( mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + simcore_s3_api: SimcoreS3API, + with_uploaded_file_on_s3: UploadedFile, +): + # delete the file + await simcore_s3_api.delete_object( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + + # check it is not available + assert not await simcore_s3_api.object_exists( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + + # calling again does not raise + await simcore_s3_api.delete_object( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + + +async def test_delete_file_non_existing_bucket_raises( + mocked_s3_server_envs: EnvVarsDict, + non_existing_s3_bucket: S3BucketName, + simcore_s3_api: SimcoreS3API, + faker: Faker, +): + with pytest.raises(S3BucketInvalidError): + await simcore_s3_api.delete_object( + bucket=non_existing_s3_bucket, object_key=faker.pystr() + ) + + +async def test_undelete_file( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + with_versioning_enabled: None, + simcore_s3_api: SimcoreS3API, + with_uploaded_file_on_s3: UploadedFile, + upload_file: Callable[[Path, Path], Awaitable[UploadedFile]], + create_file_of_size: Callable[[ByteSize], Path], s3_client: S3Client, - create_s3_bucket: S3BucketName, ): - await s3_client.upload_file(create_small_csv_file, create_s3_bucket, "test.csv") + # we have a file uploaded + file_metadata = await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + assert file_metadata.size == with_uploaded_file_on_s3.local_path.stat().st_size + + # upload another file on top of the existing one + new_file = create_file_of_size(parse_obj_as(ByteSize, "5Kib")) + await s3_client.upload_file( + Filename=f"{new_file}", + Bucket=with_s3_bucket, + Key=file_metadata.object_key, + ) + + # check that the metadata changed + new_file_metadata = await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + assert new_file_metadata.size == new_file.stat().st_size + assert file_metadata.e_tag != new_file_metadata.e_tag + + # this deletes the new_file, so it's gone + await simcore_s3_api.delete_object( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + assert not await simcore_s3_api.object_exists( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + + # undelete the file, the new file is back + await simcore_s3_api.undelete_object( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + await simcore_s3_api.object_exists( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + assert ( + await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + == new_file_metadata + ) + # does nothing + await simcore_s3_api.undelete_object( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + + # delete the file again + await simcore_s3_api.delete_object( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + # check it is not available + assert ( + await simcore_s3_api.object_exists( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + is False + ) + + +async def test_undelete_file_raises_if_file_does_not_exists( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + non_existing_s3_bucket: S3BucketName, + simcore_s3_api: SimcoreS3API, + faker: Faker, +): + with pytest.raises(S3BucketInvalidError): + await simcore_s3_api.undelete_object( + bucket=non_existing_s3_bucket, object_key=faker.pystr() + ) + with pytest.raises(S3KeyNotFoundError): + await simcore_s3_api.undelete_object( + bucket=with_s3_bucket, object_key=faker.pystr() + ) - yield - await s3_client.delete_object(Bucket=create_s3_bucket, Key="test.csv") +async def test_undelete_file_with_no_versioning_raises( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + simcore_s3_api: SimcoreS3API, + with_uploaded_file_on_s3: UploadedFile, +): + await simcore_s3_api.delete_object( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) + with pytest.raises(S3KeyNotFoundError): + await simcore_s3_api.undelete_object( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) async def test_create_single_presigned_download_link( - simcore_s3_api: SimcoreS3API, upload_file_to_bucket: None, create_s3_bucket + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + with_uploaded_file_on_s3: UploadedFile, + simcore_s3_api: SimcoreS3API, + default_expiration_time_seconds: int, + tmp_path: Path, + faker: Faker, ): + assert await simcore_s3_api.object_exists( + bucket=with_s3_bucket, object_key=with_uploaded_file_on_s3.s3_key + ) download_url = await simcore_s3_api.create_single_presigned_download_link( - create_s3_bucket, "test.csv", 50 + bucket=with_s3_bucket, + object_key=with_uploaded_file_on_s3.s3_key, + expiration_secs=default_expiration_time_seconds, ) assert isinstance(download_url, AnyUrl) + + dest_file = tmp_path / faker.file_name() + async with ClientSession() as session: + response = await session.get(download_url) + response.raise_for_status() + with dest_file.open("wb") as fp: + fp.write(await response.read()) + assert dest_file.exists() + + assert filecmp.cmp(dest_file, with_uploaded_file_on_s3.local_path) is True + + +async def test_create_single_presigned_download_link_of_invalid_object_key_raises( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + simcore_s3_api: SimcoreS3API, + default_expiration_time_seconds: int, + faker: Faker, +): + with pytest.raises(S3KeyNotFoundError): + await simcore_s3_api.create_single_presigned_download_link( + bucket=with_s3_bucket, + object_key=faker.file_name(), + expiration_secs=default_expiration_time_seconds, + ) + + +async def test_create_single_presigned_download_link_of_invalid_bucket_raises( + mocked_s3_server_envs: EnvVarsDict, + non_existing_s3_bucket: S3BucketName, + with_uploaded_file_on_s3: UploadedFile, + simcore_s3_api: SimcoreS3API, + default_expiration_time_seconds: int, +): + with pytest.raises(S3BucketInvalidError): + await simcore_s3_api.create_single_presigned_download_link( + bucket=non_existing_s3_bucket, + object_key=with_uploaded_file_on_s3.s3_key, + expiration_secs=default_expiration_time_seconds, + ) + + +async def test_create_single_presigned_upload_link( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + simcore_s3_api: SimcoreS3API, + create_file_of_size: Callable[[ByteSize], Path], + default_expiration_time_seconds: int, + upload_to_presigned_link: Callable[ + [Path, AnyUrl, S3BucketName, S3ObjectKey], Awaitable[None] + ], +): + file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) + s3_object_key = file.name + presigned_url = await simcore_s3_api.create_single_presigned_upload_link( + bucket=with_s3_bucket, + object_key=s3_object_key, + expiration_secs=default_expiration_time_seconds, + ) + assert presigned_url + + # upload the file with a fake multipart upload links structure + await upload_to_presigned_link(file, presigned_url, with_s3_bucket, s3_object_key) + + # check it is there + s3_metadata = await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=s3_object_key + ) + assert s3_metadata.size == file.stat().st_size + assert s3_metadata.last_modified + assert s3_metadata.e_tag + + +async def test_create_single_presigned_upload_link_with_non_existing_bucket_raises( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + non_existing_s3_bucket: S3BucketName, + create_file_of_size: Callable[[ByteSize], Path], + default_expiration_time_seconds: int, +): + file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) + s3_object_key = file.name + with pytest.raises(S3BucketInvalidError): + await simcore_s3_api.create_single_presigned_upload_link( + bucket=non_existing_s3_bucket, + object_key=s3_object_key, + expiration_secs=default_expiration_time_seconds, + ) + + +@pytest.mark.parametrize( + "file_size", + [ + parametrized_file_size("10Mib"), + parametrized_file_size("100Mib"), + parametrized_file_size("1000Mib"), + ], + ids=byte_size_ids, +) +async def test_create_multipart_presigned_upload_link( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + upload_file_to_multipart_presigned_link_without_completing: Callable[ + ..., Awaitable[tuple[S3ObjectKey, MultiPartUploadLinks, list[UploadedPart]]] + ], + file_size: ByteSize, +): + ( + file_id, + upload_links, + uploaded_parts, + ) = await upload_file_to_multipart_presigned_link_without_completing(file_size) + + # now complete it + received_e_tag = await simcore_s3_api.complete_multipart_upload( + bucket=with_s3_bucket, + object_key=file_id, + upload_id=upload_links.upload_id, + uploaded_parts=uploaded_parts, + ) + + # check that the multipart upload is not listed anymore + list_ongoing_uploads = await simcore_s3_api.list_ongoing_multipart_uploads( + bucket=with_s3_bucket + ) + assert list_ongoing_uploads == [] + + # check the object is complete + s3_metadata = await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=file_id + ) + assert s3_metadata.size == file_size + assert s3_metadata.last_modified + assert s3_metadata.e_tag == f"{json.loads(received_e_tag)}" + + # completing again raises + with pytest.raises(S3UploadNotFoundError): + await simcore_s3_api.complete_multipart_upload( + bucket=with_s3_bucket, + object_key=file_id, + upload_id=upload_links.upload_id, + uploaded_parts=uploaded_parts, + ) + + +@pytest.mark.parametrize( + "file_size", + [ + parametrized_file_size(MULTIPART_UPLOADS_MIN_TOTAL_SIZE.human_readable()), + ], + ids=byte_size_ids, +) +async def test_create_multipart_presigned_upload_link_invalid_raises( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + non_existing_s3_bucket: S3BucketName, + upload_file_to_multipart_presigned_link_without_completing: Callable[ + ..., Awaitable[tuple[S3ObjectKey, MultiPartUploadLinks, list[UploadedPart]]] + ], + file_size: ByteSize, + create_file_of_size: Callable[[ByteSize], Path], + faker: Faker, + default_expiration_time_seconds: int, +): + file = create_file_of_size(file_size) + # creating links with invalid bucket + with pytest.raises(S3BucketInvalidError): + await simcore_s3_api.create_multipart_upload_links( + bucket=non_existing_s3_bucket, + object_key=faker.pystr(), + file_size=ByteSize(file.stat().st_size), + expiration_secs=default_expiration_time_seconds, + sha256_checksum=parse_obj_as(SHA256Str, faker.sha256()), + ) + + # completing with invalid bucket + ( + object_key, + upload_links, + uploaded_parts, + ) = await upload_file_to_multipart_presigned_link_without_completing(file_size) + + with pytest.raises(S3BucketInvalidError): + await simcore_s3_api.complete_multipart_upload( + bucket=non_existing_s3_bucket, + object_key=object_key, + upload_id=upload_links.upload_id, + uploaded_parts=uploaded_parts, + ) + + # with pytest.raises(S3KeyNotFoundError): + # NOTE: this does not raise... and it returns the file_id of the original file... + await simcore_s3_api.complete_multipart_upload( + bucket=with_s3_bucket, + object_key=faker.pystr(), + upload_id=upload_links.upload_id, + uploaded_parts=uploaded_parts, + ) + + +@pytest.mark.parametrize("file_size", [parametrized_file_size("1Gib")]) +async def test_break_completion_of_multipart_upload( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + upload_file_to_multipart_presigned_link_without_completing: Callable[ + ..., Awaitable[tuple[S3ObjectKey, MultiPartUploadLinks, list[UploadedPart]]] + ], + file_size: ByteSize, +): + ( + object_key, + upload_links, + uploaded_parts, + ) = await upload_file_to_multipart_presigned_link_without_completing(file_size) + # let's break the completion very quickly task and see what happens + VERY_SHORT_TIMEOUT = 0.2 + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for( + simcore_s3_api.complete_multipart_upload( + bucket=with_s3_bucket, + object_key=object_key, + upload_id=upload_links.upload_id, + uploaded_parts=uploaded_parts, + ), + timeout=VERY_SHORT_TIMEOUT, + ) + # check we have the multipart upload initialized and listed + ongoing_multipart_uploads = await simcore_s3_api.list_ongoing_multipart_uploads( + bucket=with_s3_bucket + ) + assert ongoing_multipart_uploads + assert len(ongoing_multipart_uploads) == 1 + ongoing_upload_id, ongoing_file_id = ongoing_multipart_uploads[0] + assert ongoing_upload_id == upload_links.upload_id + assert ongoing_file_id == object_key + + # now wait + await asyncio.sleep(10) + + # check that the completion of the update completed... + assert ( + await simcore_s3_api.list_ongoing_multipart_uploads(bucket=with_s3_bucket) == [] + ) + + # check the object is complete + s3_metadata = await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=object_key + ) + assert s3_metadata.size == file_size + assert s3_metadata.last_modified + assert s3_metadata.e_tag + + +@pytest.mark.parametrize( + "file_size", + [parametrized_file_size(f"{MULTIPART_UPLOADS_MIN_TOTAL_SIZE}")], + ids=byte_size_ids, +) +async def test_abort_multipart_upload( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + non_existing_s3_bucket: S3BucketName, + upload_file_to_multipart_presigned_link_without_completing: Callable[ + ..., Awaitable[tuple[S3ObjectKey, MultiPartUploadLinks, list[UploadedPart]]] + ], + file_size: ByteSize, + faker: Faker, +): + ( + object_key, + upload_links, + _, + ) = await upload_file_to_multipart_presigned_link_without_completing(file_size) + + # first abort with wrong bucket shall raise + with pytest.raises(S3BucketInvalidError): + await simcore_s3_api.abort_multipart_upload( + bucket=non_existing_s3_bucket, + object_key=object_key, + upload_id=upload_links.upload_id, + ) + + # now abort it + await simcore_s3_api.abort_multipart_upload( + bucket=with_s3_bucket, + object_key=faker.pystr(), + upload_id=upload_links.upload_id, + ) + # doing it again raises + with pytest.raises(S3UploadNotFoundError): + await simcore_s3_api.abort_multipart_upload( + bucket=with_s3_bucket, + object_key=object_key, + upload_id=upload_links.upload_id, + ) + + # now check that the listing is empty + ongoing_multipart_uploads = await simcore_s3_api.list_ongoing_multipart_uploads( + bucket=with_s3_bucket + ) + assert ongoing_multipart_uploads == [] + + # check it is not available + assert ( + await simcore_s3_api.object_exists(bucket=with_s3_bucket, object_key=object_key) + is False + ) + + +@pytest.mark.parametrize( + "file_size", + [parametrized_file_size("500Mib")], + ids=byte_size_ids, +) +async def test_upload_file( + mocked_s3_server_envs: EnvVarsDict, + upload_file: Callable[[Path], Awaitable[UploadedFile]], + file_size: ByteSize, + create_file_of_size: Callable[[ByteSize], Path], +): + file = create_file_of_size(file_size) + await upload_file(file) + + +async def test_upload_file_invalid_raises( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + non_existing_s3_bucket: S3BucketName, + create_file_of_size: Callable[[ByteSize, str | None], Path], + faker: Faker, +): + file = create_file_of_size(ByteSize(10), None) + with pytest.raises(S3BucketInvalidError): + await simcore_s3_api.upload_file( + bucket=non_existing_s3_bucket, + file=file, + object_key=faker.pystr(), + bytes_transfered_cb=None, + ) + + +@pytest.mark.parametrize( + "file_size", + [parametrized_file_size("500Mib")], + ids=byte_size_ids, +) +async def test_copy_file( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + file_size: ByteSize, + upload_file: Callable[[Path], Awaitable[UploadedFile]], + copy_file: Callable[[S3ObjectKey, S3ObjectKey], Awaitable[S3ObjectKey]], + create_file_of_size: Callable[[ByteSize], Path], + faker: Faker, +): + file = create_file_of_size(file_size) + uploaded_file = await upload_file(file) + dst_object_key = faker.file_name() + await copy_file(uploaded_file.s3_key, dst_object_key) + + # check the object is uploaded + assert ( + await simcore_s3_api.object_exists( + bucket=with_s3_bucket, object_key=dst_object_key + ) + is True + ) + dst_file_metadata = await simcore_s3_api.get_object_metadata( + bucket=with_s3_bucket, object_key=dst_object_key + ) + assert uploaded_file.local_path.stat().st_size == dst_file_metadata.size + + +async def test_copy_file_invalid_raises( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + non_existing_s3_bucket: S3BucketName, + upload_file: Callable[[Path], Awaitable[UploadedFile]], + create_file_of_size: Callable[[ByteSize], Path], + faker: Faker, +): + file = create_file_of_size(parse_obj_as(ByteSize, "1MiB")) + uploaded_file = await upload_file(file) + dst_object_key = faker.file_name() + with pytest.raises(S3BucketInvalidError, match=f"{non_existing_s3_bucket}"): + await simcore_s3_api.copy_object( + bucket=non_existing_s3_bucket, + src_object_key=uploaded_file.s3_key, + dst_object_key=dst_object_key, + bytes_transfered_cb=None, + ) + fake_src_key = faker.file_name() + with pytest.raises(S3KeyNotFoundError, match=rf"{fake_src_key}"): + await simcore_s3_api.copy_object( + bucket=with_s3_bucket, + src_object_key=fake_src_key, + dst_object_key=dst_object_key, + bytes_transfered_cb=None, + ) + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size", + [ + ( + parse_obj_as(ByteSize, "1Mib"), + parse_obj_as(ByteSize, "1B"), + parse_obj_as(ByteSize, "10Kib"), + ) + ], + ids=byte_size_ids, +) +async def test_get_directory_metadata( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + with_uploaded_folder_on_s3: list[UploadedFile], + directory_size: ByteSize, +): + metadata = await simcore_s3_api.get_directory_metadata( + bucket=with_s3_bucket, + prefix=Path(with_uploaded_folder_on_s3[0].s3_key).parts[0], + ) + assert metadata + assert metadata.size == directory_size + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size", + [ + ( + parse_obj_as(ByteSize, "1Mib"), + parse_obj_as(ByteSize, "1B"), + parse_obj_as(ByteSize, "10Kib"), + ) + ], + ids=byte_size_ids, +) +async def test_get_directory_metadata_raises( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + non_existing_s3_bucket: S3BucketName, + with_uploaded_folder_on_s3: list[UploadedFile], +): + with pytest.raises(S3BucketInvalidError, match=rf"{non_existing_s3_bucket}"): + await simcore_s3_api.get_directory_metadata( + bucket=non_existing_s3_bucket, + prefix=Path(with_uploaded_folder_on_s3[0].s3_key).parts[0], + ) + + wrong_prefix = "/" + metadata = await simcore_s3_api.get_directory_metadata( + bucket=with_s3_bucket, + prefix=wrong_prefix, + ) + assert metadata.size == 0 + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size", + [ + ( + parse_obj_as(ByteSize, "1Mib"), + parse_obj_as(ByteSize, "1B"), + parse_obj_as(ByteSize, "10Kib"), + ) + ], + ids=byte_size_ids, +) +async def test_delete_file_recursively( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + with_uploaded_folder_on_s3: list[UploadedFile], +): + # deleting from the root + await simcore_s3_api.delete_objects_recursively( + bucket=with_s3_bucket, + prefix=Path(with_uploaded_folder_on_s3[0].s3_key).parts[0], + ) + files_exists = set( + await asyncio.gather( + *[ + simcore_s3_api.object_exists( + bucket=with_s3_bucket, object_key=file.s3_key + ) + for file in with_uploaded_folder_on_s3 + ] + ) + ) + assert len(files_exists) == 1 + assert next(iter(files_exists)) is False + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size", + [ + ( + parse_obj_as(ByteSize, "1Mib"), + parse_obj_as(ByteSize, "1B"), + parse_obj_as(ByteSize, "10Kib"), + ) + ], + ids=byte_size_ids, +) +async def test_delete_file_recursively_raises( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + non_existing_s3_bucket: S3BucketName, + with_s3_bucket: S3BucketName, + with_uploaded_folder_on_s3: list[UploadedFile], + faker: Faker, +): + with pytest.raises(S3BucketInvalidError, match=rf"{non_existing_s3_bucket}"): + await simcore_s3_api.delete_objects_recursively( + bucket=non_existing_s3_bucket, + prefix=Path(with_uploaded_folder_on_s3[0].s3_key).parts[0], + ) + # this will do nothing + await simcore_s3_api.delete_objects_recursively( + bucket=with_s3_bucket, + prefix=f"{faker.pystr()}", + ) + # and this files still exist + some_file = next( + iter(filter(lambda f: f.local_path.is_file(), with_uploaded_folder_on_s3)) + ) + await simcore_s3_api.object_exists( + bucket=with_s3_bucket, object_key=some_file.s3_key + ) + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size", + [ + ( + parse_obj_as(ByteSize, "1Mib"), + parse_obj_as(ByteSize, "1B"), + parse_obj_as(ByteSize, "10Kib"), + ) + ], + ids=byte_size_ids, +) +async def test_copy_files_recursively( + mocked_s3_server_envs: EnvVarsDict, + with_uploaded_folder_on_s3: list[UploadedFile], + copy_files_recursively: Callable[[str, str], Awaitable[str]], +): + src_folder = Path(with_uploaded_folder_on_s3[0].s3_key).parts[0] + dst_folder = f"{src_folder}-copy" + await copy_files_recursively(src_folder, dst_folder) + + # doing it again shall raise + with pytest.raises(S3DestinationNotEmptyError, match=rf"{dst_folder}"): + await copy_files_recursively(src_folder, dst_folder) + + +async def test_copy_files_recursively_raises( + mocked_s3_server_envs: EnvVarsDict, + simcore_s3_api: SimcoreS3API, + non_existing_s3_bucket: S3BucketName, +): + with pytest.raises(S3BucketInvalidError, match=rf"{non_existing_s3_bucket}"): + await simcore_s3_api.copy_objects_recursively( + bucket=non_existing_s3_bucket, + src_prefix="", + dst_prefix="", + bytes_transfered_cb=None, + ) + + +@pytest.mark.parametrize( + "file_size, expected_multipart", + [ + (MULTIPART_UPLOADS_MIN_TOTAL_SIZE - 1, False), + (MULTIPART_UPLOADS_MIN_TOTAL_SIZE, True), + ], +) +def test_is_multipart(file_size: ByteSize, expected_multipart: bool): + assert SimcoreS3API.is_multipart(file_size) == expected_multipart + + +@pytest.mark.parametrize( + "bucket, object_key, expected_s3_url", + [ + ( + "some-bucket", + "an/object/separate/by/slashes", + "s3://some-bucket/an/object/separate/by/slashes", + ), + ( + "some-bucket", + "an/object/separate/by/slashes-?/3#$", + r"s3://some-bucket/an/object/separate/by/slashes-%3F/3%23%24", + ), + ], +) +def test_compute_s3_url( + bucket: S3BucketName, object_key: S3ObjectKey, expected_s3_url: AnyUrl +): + assert ( + SimcoreS3API.compute_s3_url(bucket=bucket, object_key=object_key) + == expected_s3_url + ) + + +@pytest.mark.parametrize( + "file_size", + [ + parametrized_file_size("10Mib"), + parametrized_file_size("100Mib"), + parametrized_file_size("1000Mib"), + ], + ids=byte_size_ids, +) +def test_upload_file_performance( + mocked_s3_server_envs: EnvVarsDict, + create_file_of_size: Callable[[ByteSize], Path], + file_size: ByteSize, + upload_file: Callable[[Path, Path | None], Awaitable[UploadedFile]], + benchmark: BenchmarkFixture, +): + + # create random files of random size and upload to S3 + file = create_file_of_size(file_size) + + def run_async_test(*args, **kwargs) -> None: + asyncio.get_event_loop().run_until_complete(upload_file(file, None)) + + benchmark(run_async_test) + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size", + [ + ( + parse_obj_as(ByteSize, "1Mib"), + parse_obj_as(ByteSize, "1B"), + parse_obj_as(ByteSize, "10Kib"), + ), + ( + parse_obj_as(ByteSize, "500Mib"), + parse_obj_as(ByteSize, "10Mib"), + parse_obj_as(ByteSize, "50Mib"), + ), + ], + ids=byte_size_ids, +) +def test_copy_recurively_performance( + mocked_s3_server_envs: EnvVarsDict, + with_uploaded_folder_on_s3: list[UploadedFile], + copy_files_recursively: Callable[[str, str], Awaitable[str]], + benchmark: BenchmarkFixture, +): + src_folder = Path(with_uploaded_folder_on_s3[0].s3_key).parts[0] + + folder_index = 0 + + def dst_folder_setup() -> tuple[tuple[str], dict[str, Any]]: + nonlocal folder_index + dst_folder = f"{src_folder}-copy-{folder_index}" + folder_index += 1 + return (dst_folder,), {} + + def run_async_test(dst_folder: str) -> None: + asyncio.get_event_loop().run_until_complete( + copy_files_recursively(src_folder, dst_folder) + ) + + benchmark.pedantic(run_async_test, setup=dst_folder_setup, rounds=4) diff --git a/services/storage/tests/unit/test_s3_utils.py b/packages/aws-library/tests/test_s3_utils.py similarity index 97% rename from services/storage/tests/unit/test_s3_utils.py rename to packages/aws-library/tests/test_s3_utils.py index ddcb866b383..5354da8bc66 100644 --- a/services/storage/tests/unit/test_s3_utils.py +++ b/packages/aws-library/tests/test_s3_utils.py @@ -5,13 +5,13 @@ import pytest -from pydantic import ByteSize, parse_obj_as -from pytest_simcore.helpers.parametrizations import byte_size_ids -from simcore_service_storage.s3_utils import ( +from aws_library.s3._utils import ( _MULTIPART_MAX_NUMBER_OF_PARTS, _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE, compute_num_file_chunks, ) +from pydantic import ByteSize, parse_obj_as +from pytest_simcore.helpers.parametrizations import byte_size_ids @pytest.mark.parametrize( diff --git a/packages/models-library/src/models_library/api_schemas_storage.py b/packages/models-library/src/models_library/api_schemas_storage.py index 7646473647f..e68cd99d1ee 100644 --- a/packages/models-library/src/models_library/api_schemas_storage.py +++ b/packages/models-library/src/models_library/api_schemas_storage.py @@ -10,7 +10,7 @@ from datetime import datetime from enum import Enum from re import Pattern -from typing import Any, ClassVar +from typing import Any, ClassVar, TypeAlias from uuid import UUID from pydantic import ( @@ -36,7 +36,7 @@ StorageFileID, ) -ETag = str +ETag: TypeAlias = str class S3BucketName(ConstrainedStr): diff --git a/packages/pytest-simcore/src/pytest_simcore/file_extra.py b/packages/pytest-simcore/src/pytest_simcore/file_extra.py index 1d1befc0315..d2d3dd58276 100644 --- a/packages/pytest-simcore/src/pytest_simcore/file_extra.py +++ b/packages/pytest-simcore/src/pytest_simcore/file_extra.py @@ -1,9 +1,11 @@ +import logging +from collections.abc import Callable from pathlib import Path -from typing import Callable import pytest from faker import Faker from pydantic import ByteSize +from pytest_simcore.helpers.logging import log_context @pytest.fixture @@ -22,3 +24,55 @@ def _creator(size: ByteSize, name: str | None = None) -> Path: return file return _creator + + +@pytest.fixture +def create_folder_of_size_with_multiple_files( + tmp_path: Path, faker: Faker +) -> Callable[[ByteSize, ByteSize, ByteSize], Path]: + def _create_folder_of_size_with_multiple_files( + directory_size: ByteSize, + file_min_size: ByteSize, + file_max_size: ByteSize, + ) -> Path: + # Helper function to create random files and directories + assert file_min_size > 0 + assert file_min_size <= file_max_size + + def create_random_content(base_dir: Path, remaining_size: ByteSize) -> ByteSize: + if remaining_size <= 0: + return remaining_size + + # Decide to create a file or a subdirectory + # Create a file + file_size = ByteSize( + faker.pyint( + min_value=min(file_min_size, remaining_size), + max_value=min(remaining_size, file_max_size), + ) + ) # max file size 1MB + file_path = base_dir / f"{faker.file_path(depth=4, absolute=False)}" + file_path.parent.mkdir(parents=True, exist_ok=True) + assert not file_path.exists() + with file_path.open("wb") as fp: + fp.write(f"I am a {file_size.human_readable()} file".encode()) + fp.truncate(file_size) + assert file_path.exists() + + return ByteSize(remaining_size - file_size) + + # Recursively create content in the temporary directory + remaining_size = directory_size + with log_context( + logging.INFO, + msg=f"creating {directory_size.human_readable()} of random files " + f"(up to {file_max_size.human_readable()}) in {tmp_path}", + ) as ctx: + num_files_created = 0 + while remaining_size > 0: + remaining_size = create_random_content(tmp_path, remaining_size) + num_files_created += 1 + ctx.logger.info("created %s files", num_files_created) + return tmp_path + + return _create_folder_of_size_with_multiple_files diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py b/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py index e5ebb125582..ed6381f5611 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py @@ -1,7 +1,13 @@ -from pydantic import ByteSize +import pytest +from _pytest.mark.structures import ParameterSet +from pydantic import ByteSize, parse_obj_as def byte_size_ids(val) -> str | None: if isinstance(val, ByteSize): return val.human_readable() return None + + +def parametrized_file_size(size_str: str) -> ParameterSet: + return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str) diff --git a/services/storage/tests/helpers/file_utils.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py similarity index 63% rename from services/storage/tests/helpers/file_utils.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index 5dd8b131a8e..810f6caab8b 100644 --- a/services/storage/tests/helpers/file_utils.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -1,16 +1,17 @@ -import json +from collections.abc import Iterable from pathlib import Path from time import perf_counter from typing import Final import aiofiles -import pytest +import orjson from aiohttp import ClientSession -from models_library.api_schemas_storage import FileUploadSchema +from aws_library.s3 import MultiPartUploadLinks +from models_library.api_schemas_storage import ETag, FileUploadSchema, UploadedPart from pydantic import AnyUrl, ByteSize, parse_obj_as from servicelib.aiohttp import status -from servicelib.utils import logged_gather -from simcore_service_storage.s3_client import ETag, MultiPartUploadLinks, UploadedPart +from servicelib.utils import limited_as_completed, logged_gather +from types_aiobotocore_s3 import S3Client _SENDER_CHUNK_SIZE: Final[int] = parse_obj_as(ByteSize, "16Mib") @@ -29,7 +30,8 @@ async def _file_sender( num_read_bytes += len(chunk) yield chunk if raise_while_uploading: - raise RuntimeError("we were asked to raise here!") + msg = "we were asked to raise here!" + raise RuntimeError(msg) async def upload_file_part( @@ -40,6 +42,7 @@ async def upload_file_part( this_file_chunk_size: int, num_parts: int, upload_url: AnyUrl, + *, raise_while_uploading: bool = False, ) -> tuple[int, ETag]: print( @@ -62,7 +65,7 @@ async def upload_file_part( assert response.status == status.HTTP_200_OK assert response.headers assert "Etag" in response.headers - received_e_tag = json.loads(response.headers["Etag"]) + received_e_tag = orjson.loads(response.headers["Etag"]) print( f"--> completed upload {this_file_chunk_size=} of {file=}, [{part_index+1}/{num_parts}], {received_e_tag=}" ) @@ -96,7 +99,7 @@ async def upload_file_to_presigned_link( upload_url, ) ) - results = await logged_gather(*upload_tasks, max_concurrency=2) + results = await logged_gather(*upload_tasks, max_concurrency=0) part_to_etag = [ UploadedPart(number=index + 1, e_tag=e_tag) for index, e_tag in results ] @@ -106,5 +109,41 @@ async def upload_file_to_presigned_link( return part_to_etag -def parametrized_file_size(size_str: str): - return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str) +async def delete_all_object_versions( + s3_client: S3Client, bucket: str, keys: Iterable[str] +) -> None: + objects_to_delete = [] + + bucket_versioning = await s3_client.get_bucket_versioning(Bucket=bucket) + if "Status" in bucket_versioning and bucket_versioning["Status"] == "Enabled": + # NOTE: using gather here kills the moto server + all_versions = [ + await v + async for v in limited_as_completed( + ( + s3_client.list_object_versions(Bucket=bucket, Prefix=key) + for key in keys + ), + limit=10, + ) + ] + + for versions in all_versions: + # Collect all version IDs and delete markers + objects_to_delete.extend( + {"Key": version["Key"], "VersionId": version["VersionId"]} + for version in versions.get("Versions", []) + ) + + objects_to_delete.extend( + {"Key": marker["Key"], "VersionId": marker["VersionId"]} + for marker in versions.get("DeleteMarkers", []) + ) + else: + # NOTE: this is way faster + objects_to_delete = [{"Key": key} for key in keys] + # Delete all versions and delete markers + if objects_to_delete: + await s3_client.delete_objects( + Bucket=bucket, Delete={"Objects": objects_to_delete} + ) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py index 256b464523b..889b8cfcd1c 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py @@ -1,8 +1,7 @@ import logging from typing import cast -from aws_library.s3.client import SimcoreS3API -from aws_library.s3.errors import S3NotConnectedError +from aws_library.s3 import S3NotConnectedError, SimcoreS3API from fastapi import FastAPI from models_library.api_schemas_storage import S3BucketName from settings_library.s3 import S3Settings diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py index 536d7c9423f..be1b73824cf 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py @@ -1,7 +1,7 @@ from datetime import datetime, timezone import shortuuid -from aws_library.s3.client import SimcoreS3API +from aws_library.s3 import SimcoreS3API from models_library.api_schemas_resource_usage_tracker.service_runs import ( ServiceRunGet, ServiceRunPage, diff --git a/services/storage/src/simcore_service_storage/application.py b/services/storage/src/simcore_service_storage/application.py index 3cda74cd3ab..ce06726bb54 100644 --- a/services/storage/src/simcore_service_storage/application.py +++ b/services/storage/src/simcore_service_storage/application.py @@ -17,7 +17,7 @@ from .db import setup_db from .dsm import setup_dsm from .dsm_cleaner import setup_dsm_cleaner -from .long_running_tasks import setup_long_running_tasks +from .long_running_tasks import setup_rest_api_long_running_tasks from .redis import setup_redis from .rest import setup_rest from .s3 import setup_s3 @@ -61,20 +61,17 @@ def create(settings: Settings) -> web.Application: setup_db(app) setup_s3(app) - setup_long_running_tasks(app) + setup_rest_api_long_running_tasks(app) setup_rest(app) - if settings.STORAGE_REDIS: - setup_redis(app) - setup_dsm(app) if settings.STORAGE_CLEANER_INTERVAL_S: + setup_redis(app) setup_dsm_cleaner(app) app.middlewares.append(dsm_exception_handler) if settings.STORAGE_PROFILING: - app.middlewares.append(profiling_middleware) if settings.LOG_LEVEL == "DEBUG": diff --git a/services/storage/src/simcore_service_storage/constants.py b/services/storage/src/simcore_service_storage/constants.py index ee9ccd3f0ce..f9c6a36f5c2 100644 --- a/services/storage/src/simcore_service_storage/constants.py +++ b/services/storage/src/simcore_service_storage/constants.py @@ -1,7 +1,8 @@ from typing import Final +from aws_library.s3 import PRESIGNED_LINK_MAX_SIZE, S3_MAX_FILE_SIZE from models_library.api_schemas_storage import LinkType -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize from servicelib.aiohttp import application_keys RETRY_WAIT_SECS = 2 @@ -25,13 +26,6 @@ S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID = "UNDEFINED/EXTERNALID" -PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5GiB") -S3_MAX_FILE_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5TiB") - -# AWS S3 upload limits https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html -MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "100MiB") -MULTIPART_UPLOADS_MIN_PART_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "10MiB") - MAX_LINK_CHUNK_BYTE_SIZE: Final[dict[LinkType, ByteSize]] = { LinkType.PRESIGNED: PRESIGNED_LINK_MAX_SIZE, diff --git a/services/storage/src/simcore_service_storage/handlers_health.py b/services/storage/src/simcore_service_storage/handlers_health.py index 8ed8228d369..4ae743afef9 100644 --- a/services/storage/src/simcore_service_storage/handlers_health.py +++ b/services/storage/src/simcore_service_storage/handlers_health.py @@ -3,10 +3,11 @@ - Checks connectivity with other services in the backend """ + import logging from aiohttp import web -from aws_library.s3.errors import S3AccessError, S3BucketInvalidError +from aws_library.s3 import S3AccessError from models_library.api_schemas_storage import HealthCheck, S3BucketName from models_library.app_diagnostics import AppStatusCheck from models_library.utils.json_serialization import json_dumps @@ -49,12 +50,13 @@ async def get_status(request: web.Request) -> web.Response: s3_state = "disabled" if app_settings.STORAGE_S3: try: - await get_s3_client(request.app).check_bucket_connection( - S3BucketName(app_settings.STORAGE_S3.S3_BUCKET_NAME) + s3_state = ( + "connected" + if await get_s3_client(request.app).bucket_exists( + bucket=S3BucketName(app_settings.STORAGE_S3.S3_BUCKET_NAME) + ) + else "no access to S3 bucket" ) - s3_state = "connected" - except S3BucketInvalidError: - s3_state = "no access to S3 bucket" except S3AccessError: s3_state = "failed" diff --git a/services/storage/src/simcore_service_storage/long_running_tasks.py b/services/storage/src/simcore_service_storage/long_running_tasks.py index 0260728adc9..6f0f43c9a8a 100644 --- a/services/storage/src/simcore_service_storage/long_running_tasks.py +++ b/services/storage/src/simcore_service_storage/long_running_tasks.py @@ -4,7 +4,7 @@ from ._meta import API_VTAG -def setup_long_running_tasks(app: web.Application) -> None: +def setup_rest_api_long_running_tasks(app: web.Application) -> None: setup( app, router_prefix=f"/{API_VTAG}/futures", diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index 0f1cf1780eb..b62fce18860 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -4,6 +4,7 @@ from typing import Final, Literal, NamedTuple from uuid import UUID +from aws_library.s3 import UploadID from models_library.api_schemas_storage import ( DatasetMetaDataGet, ETag, @@ -40,8 +41,6 @@ UNDEFINED_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, -1) -UploadID = str - class DatasetMetaData(DatasetMetaDataGet): ... @@ -113,9 +112,9 @@ def from_simcore_node( "object_name": file_id, "file_name": parts[-1], "user_id": user_id, - "project_id": parse_obj_as(ProjectID, parts[0]) - if is_uuid(parts[0]) - else None, + "project_id": ( + parse_obj_as(ProjectID, parts[0]) if is_uuid(parts[0]) else None + ), "node_id": parse_obj_as(NodeID, parts[1]) if is_uuid(parts[1]) else None, "file_id": file_id, "created_at": now, @@ -138,12 +137,6 @@ class UploadLinks: chunk_size: ByteSize -class MultiPartUploadLinks(BaseModel): - upload_id: UploadID - chunk_size: ByteSize - urls: list[AnyUrl] - - class StorageQueryParamsBase(BaseModel): user_id: UserID diff --git a/services/storage/src/simcore_service_storage/s3.py b/services/storage/src/simcore_service_storage/s3.py index 0d7da465b50..bb8e5fc1811 100644 --- a/services/storage/src/simcore_service_storage/s3.py +++ b/services/storage/src/simcore_service_storage/s3.py @@ -3,76 +3,70 @@ """ import logging +from collections.abc import AsyncGenerator from typing import cast from aiohttp import web +from aws_library.s3 import SimcoreS3API from models_library.utils.json_serialization import json_dumps +from servicelib.logging_utils import log_context from tenacity._asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log from tenacity.wait import wait_fixed from .constants import APP_CONFIG_KEY, APP_S3_KEY, RETRY_WAIT_SECS -from .s3_client import StorageS3Client from .settings import Settings log = logging.getLogger(__name__) -async def setup_s3_client(app): - log.debug("setup %s.setup.cleanup_ctx", __name__) - # setup - storage_settings: Settings = app[APP_CONFIG_KEY] - storage_s3_settings = storage_settings.STORAGE_S3 - assert storage_s3_settings # nosec - - client = None - async for attempt in AsyncRetrying( - wait=wait_fixed(RETRY_WAIT_SECS), - before_sleep=before_sleep_log(log, logging.WARNING), - reraise=True, - ): - with attempt: - client = await StorageS3Client.create( - storage_s3_settings, - storage_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, - ) - log.info( - "S3 client %s successfully created [%s]", - f"{client=}", - json_dumps(attempt.retry_state.retry_object.statistics), - ) - assert client # nosec - app[APP_S3_KEY] = client - - yield - # tear-down - log.debug("closing %s", f"{client=}") - await client.close() - - log.info("closed s3 client %s", f"{client=}") +async def setup_s3_client(app) -> AsyncGenerator[None, None]: + with log_context(log, logging.DEBUG, msg=f"setup {__name__}.setup.cleanup_ctx"): + storage_settings: Settings = app[APP_CONFIG_KEY] + storage_s3_settings = storage_settings.STORAGE_S3 + assert storage_s3_settings # nosec + + client = None + async for attempt in AsyncRetrying( + wait=wait_fixed(RETRY_WAIT_SECS), + before_sleep=before_sleep_log(log, logging.WARNING), + reraise=True, + ): + with attempt: + client = await SimcoreS3API.create( + storage_s3_settings, + storage_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, + ) + log.info( + "S3 client %s successfully created [%s]", + f"{client=}", + json_dumps(attempt.retry_state.retry_object.statistics), + ) + assert client # nosec + app[APP_S3_KEY] = client + + yield + # tear-down + await client.close() async def setup_s3_bucket(app: web.Application): storage_s3_settings = app[APP_CONFIG_KEY].STORAGE_S3 client = get_s3_client(app) await client.create_bucket( - storage_s3_settings.S3_BUCKET_NAME, storage_s3_settings.S3_REGION + bucket=storage_s3_settings.S3_BUCKET_NAME, region=storage_s3_settings.S3_REGION ) yield def setup_s3(app: web.Application): - """minio/s3 service setup""" - - log.debug("Setting up %s ...", __name__) - if setup_s3_client not in app.cleanup_ctx: app.cleanup_ctx.append(setup_s3_client) if setup_s3_bucket not in app.cleanup_ctx: app.cleanup_ctx.append(setup_s3_bucket) -def get_s3_client(app: web.Application) -> StorageS3Client: +def get_s3_client(app: web.Application) -> SimcoreS3API: assert app[APP_S3_KEY] # nosec - assert isinstance(app[APP_S3_KEY], StorageS3Client) # nosec - return cast(StorageS3Client, app[APP_S3_KEY]) + assert isinstance(app[APP_S3_KEY], SimcoreS3API) # nosec + return cast(SimcoreS3API, app[APP_S3_KEY]) diff --git a/services/storage/src/simcore_service_storage/s3_client.py b/services/storage/src/simcore_service_storage/s3_client.py deleted file mode 100644 index 411fef583b5..00000000000 --- a/services/storage/src/simcore_service_storage/s3_client.py +++ /dev/null @@ -1,441 +0,0 @@ -import datetime -import json -import logging -import urllib.parse -from collections.abc import AsyncGenerator, Callable -from dataclasses import dataclass -from pathlib import Path -from typing import Any, Final, TypeAlias - -from aws_library.s3.client import SimcoreS3API -from aws_library.s3.errors import S3KeyNotFoundError, s3_exception_handler -from boto3.s3.transfer import TransferConfig -from models_library.api_schemas_storage import UploadedPart -from models_library.basic_types import IDStr, SHA256Str -from models_library.projects import ProjectID -from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from pydantic import AnyUrl, ByteSize, NonNegativeInt, parse_obj_as -from servicelib.logging_utils import log_context -from servicelib.utils import logged_gather -from types_aiobotocore_s3 import S3Client -from types_aiobotocore_s3.type_defs import ( - ListObjectsV2OutputTypeDef, - ObjectTypeDef, - PaginatorConfigTypeDef, -) - -from .constants import EXPAND_DIR_MAX_ITEM_COUNT, MULTIPART_UPLOADS_MIN_TOTAL_SIZE -from .models import ETag, MultiPartUploadLinks, S3BucketName, UploadID -from .s3_utils import compute_num_file_chunks - -_logger = logging.getLogger(__name__) - - -_MAX_ITEMS_PER_PAGE: Final[NonNegativeInt] = 500 - - -NextContinuationToken: TypeAlias = str - - -@dataclass(frozen=True) -class S3MetaData: - file_id: SimcoreS3FileID - last_modified: datetime.datetime - e_tag: ETag - sha256_checksum: SHA256Str | None - size: int - - @staticmethod - def from_botocore_object(obj: ObjectTypeDef) -> "S3MetaData": - assert "Key" in obj # nosec - assert "LastModified" in obj # nosec - assert "ETag" in obj # nosec - assert "Size" in obj # nosec - return S3MetaData( - file_id=SimcoreS3FileID(obj["Key"]), - last_modified=obj["LastModified"], - e_tag=json.loads(obj["ETag"]), - sha256_checksum=( - SHA256Str(obj.get("ChecksumSHA256")) - if obj.get("ChecksumSHA256") - else None - ), - size=obj["Size"], - ) - - -@dataclass(frozen=True) -class S3FolderMetaData: - size: int - - -async def _list_objects_v2_paginated_gen( - client: S3Client, - bucket: S3BucketName, - prefix: str, - *, - items_per_page: int = _MAX_ITEMS_PER_PAGE, -) -> AsyncGenerator[list[ObjectTypeDef], None]: - pagination_config: PaginatorConfigTypeDef = { - "PageSize": items_per_page, - } - - page: ListObjectsV2OutputTypeDef - async for page in client.get_paginator("list_objects_v2").paginate( - Bucket=bucket, Prefix=prefix, PaginationConfig=pagination_config - ): - items_in_page: list[ObjectTypeDef] = page.get("Contents", []) - yield items_in_page - - -_DEFAULT_AWS_REGION: Final[str] = "us-east-1" - - -class StorageS3Client(SimcoreS3API): # pylint: disable=too-many-public-methods - @s3_exception_handler(_logger) - async def create_bucket(self, bucket: S3BucketName, region: IDStr) -> None: - _logger.debug("Creating bucket: %s", bucket) - try: - # NOTE: see https://github.com/boto/boto3/issues/125 why this is so... (sic) - # setting it for the us-east-1 creates issue when creating buckets - create_bucket_config = {"Bucket": bucket} - if region != _DEFAULT_AWS_REGION: - create_bucket_config["CreateBucketConfiguration"] = { - "LocationConstraint": region - } - await self.client.create_bucket(**create_bucket_config) - - _logger.info("Bucket %s successfully created", bucket) - except self.client.exceptions.BucketAlreadyOwnedByYou: - _logger.info( - "Bucket %s already exists and is owned by us", - bucket, - ) - - @s3_exception_handler(_logger) - async def check_bucket_connection(self, bucket: S3BucketName) -> None: - """ - :raises: S3BucketInvalidError if not existing, not enough rights - :raises: S3AccessError for any other error - """ - _logger.debug("Head bucket: %s", bucket) - await self.client.head_bucket(Bucket=bucket) - - @s3_exception_handler(_logger) - async def create_single_presigned_download_link( - self, bucket: S3BucketName, file_id: SimcoreS3FileID, expiration_secs: int - ) -> AnyUrl: - # NOTE: ensure the bucket/object exists, this will raise if not - await self.client.head_bucket(Bucket=bucket) - await self.get_file_metadata(bucket, file_id) - generated_link = await self.client.generate_presigned_url( - "get_object", - Params={"Bucket": bucket, "Key": file_id}, - ExpiresIn=expiration_secs, - ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) - return url - - @s3_exception_handler(_logger) - async def create_single_presigned_upload_link( - self, bucket: S3BucketName, file_id: SimcoreS3FileID, expiration_secs: int - ) -> AnyUrl: - # NOTE: ensure the bucket/object exists, this will raise if not - await self.client.head_bucket(Bucket=bucket) - generated_link = await self.client.generate_presigned_url( - "put_object", - Params={"Bucket": bucket, "Key": file_id}, - ExpiresIn=expiration_secs, - ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) - return url - - @s3_exception_handler(_logger) - async def create_multipart_upload_links( - self, - bucket: S3BucketName, - file_id: SimcoreS3FileID, - file_size: ByteSize, - expiration_secs: int, - sha256_checksum: SHA256Str | None, - ) -> MultiPartUploadLinks: - # NOTE: ensure the bucket/object exists, this will raise if not - await self.client.head_bucket(Bucket=bucket) - # first initiate the multipart upload - create_input: dict[str, Any] = {"Bucket": bucket, "Key": file_id} - if sha256_checksum: - create_input["Metadata"] = {"sha256_checksum": sha256_checksum} - response = await self.client.create_multipart_upload(**create_input) - upload_id = response["UploadId"] - # compute the number of links, based on the announced file size - num_upload_links, chunk_size = compute_num_file_chunks(file_size) - # now create the links - upload_links = parse_obj_as( - list[AnyUrl], - await logged_gather( - *[ - self.client.generate_presigned_url( - "upload_part", - Params={ - "Bucket": bucket, - "Key": file_id, - "PartNumber": i + 1, - "UploadId": upload_id, - }, - ExpiresIn=expiration_secs, - ) - for i in range(num_upload_links) - ], - log=_logger, - max_concurrency=20, - ), - ) - return MultiPartUploadLinks( - upload_id=upload_id, chunk_size=chunk_size, urls=upload_links - ) - - @s3_exception_handler(_logger) - async def list_ongoing_multipart_uploads( - self, - bucket: S3BucketName, - ) -> list[tuple[UploadID, SimcoreS3FileID]]: - """Returns all the currently ongoing multipart uploads - - NOTE: minio does not implement the same behaviour as AWS here and will - only return the uploads if a prefix or object name is given [minio issue](https://github.com/minio/minio/issues/7632). - - :return: list of AWS uploads see [boto3 documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.list_multipart_uploads) - """ - response = await self.client.list_multipart_uploads( - Bucket=bucket, - ) - - return [ - ( - upload.get("UploadId", "undefined-uploadid"), - SimcoreS3FileID(upload.get("Key", "undefined-key")), - ) - for upload in response.get("Uploads", []) - ] - - @s3_exception_handler(_logger) - async def abort_multipart_upload( - self, bucket: S3BucketName, file_id: SimcoreS3FileID, upload_id: UploadID - ) -> None: - await self.client.abort_multipart_upload( - Bucket=bucket, Key=file_id, UploadId=upload_id - ) - - @s3_exception_handler(_logger) - async def complete_multipart_upload( - self, - bucket: S3BucketName, - file_id: SimcoreS3FileID, - upload_id: UploadID, - uploaded_parts: list[UploadedPart], - ) -> ETag: - inputs: dict[str, Any] = { - "Bucket": bucket, - "Key": file_id, - "UploadId": upload_id, - "MultipartUpload": { - "Parts": [ - {"ETag": part.e_tag, "PartNumber": part.number} - for part in uploaded_parts - ] - }, - } - response = await self.client.complete_multipart_upload(**inputs) - return response["ETag"] - - @s3_exception_handler(_logger) - async def delete_file(self, bucket: S3BucketName, file_id: SimcoreS3FileID) -> None: - await self.client.delete_object(Bucket=bucket, Key=file_id) - - @s3_exception_handler(_logger) - async def undelete_file( - self, bucket: S3BucketName, file_id: SimcoreS3FileID - ) -> None: - with log_context(_logger, logging.DEBUG, msg=f"undeleting {bucket}/{file_id}"): - response = await self.client.list_object_versions( - Bucket=bucket, Prefix=file_id, MaxKeys=1 - ) - _logger.debug("%s", f"{response=}") - - if all(k not in response for k in ["Versions", "DeleteMarkers"]): - # that means there is no such file_id - raise S3KeyNotFoundError(key=file_id, bucket=bucket) - - if "DeleteMarkers" in response: - latest_version = response["DeleteMarkers"][0] - assert "IsLatest" in latest_version # nosec - assert "VersionId" in latest_version # nosec - if latest_version["IsLatest"]: - await self.client.delete_object( - Bucket=bucket, - Key=file_id, - VersionId=latest_version["VersionId"], - ) - _logger.debug("restored %s", f"{bucket}/{file_id}") - - async def list_all_objects_gen( - self, bucket: S3BucketName, *, prefix: str - ) -> AsyncGenerator[list[ObjectTypeDef], None]: - async for s3_objects in _list_objects_v2_paginated_gen( - self.client, bucket=bucket, prefix=prefix - ): - yield s3_objects - - @s3_exception_handler(_logger) - async def delete_files_in_path(self, bucket: S3BucketName, *, prefix: str) -> None: - """Removes one or more files from a given S3 path. - - # NOTE: the / at the end of the Prefix is VERY important, - # makes the listing several order of magnitudes faster - """ - - # NOTE: deletion of objects is done in batches of max 1000 elements, - # the maximum accepted by the S3 API - with log_context( - _logger, logging.INFO, f"deleting objects in {prefix=}", log_duration=True - ): - async for s3_objects in self.list_all_objects_gen(bucket, prefix=prefix): - if objects_to_delete := [f["Key"] for f in s3_objects if "Key" in f]: - await self.client.delete_objects( - Bucket=bucket, - Delete={"Objects": [{"Key": key} for key in objects_to_delete]}, - ) - - @s3_exception_handler(_logger) - async def delete_files_in_project_node( - self, - bucket: S3BucketName, - project_id: ProjectID, - node_id: NodeID | None = None, - ) -> None: - await self.delete_files_in_path( - bucket, prefix=f"{project_id}/{node_id}/" if node_id else f"{project_id}/" - ) - - @s3_exception_handler(_logger) - async def get_file_metadata( - self, bucket: S3BucketName, file_id: SimcoreS3FileID - ) -> S3MetaData: - response = await self.client.head_object( - Bucket=bucket, Key=file_id, ChecksumMode="ENABLED" - ) - return S3MetaData( - file_id=file_id, - last_modified=response["LastModified"], - e_tag=json.loads(response["ETag"]), - sha256_checksum=response.get("ChecksumSHA256"), - size=response["ContentLength"], - ) - - async def _list_all_objects( - self, bucket: S3BucketName, *, prefix: str - ) -> AsyncGenerator[ObjectTypeDef, None]: - async for s3_objects in self.list_all_objects_gen(bucket, prefix=prefix): - for obj in s3_objects: - yield obj - - @s3_exception_handler(_logger) - async def get_directory_metadata( - self, bucket: S3BucketName, *, prefix: str - ) -> S3FolderMetaData: - size = 0 - async for s3_object in self._list_all_objects(bucket, prefix=prefix): - assert "Size" in s3_object # nosec - size += s3_object["Size"] - return S3FolderMetaData(size=size) - - @s3_exception_handler(_logger) - async def copy_file( - self, - bucket: S3BucketName, - src_file: SimcoreS3FileID, - dst_file: SimcoreS3FileID, - bytes_transfered_cb: Callable[[int], None] | None, - ) -> None: - """copy a file in S3 using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads) - - :type bucket: S3BucketName - :type src_file: SimcoreS3FileID - :type dst_file: SimcoreS3FileID - """ - copy_options = { - "CopySource": {"Bucket": bucket, "Key": src_file}, - "Bucket": bucket, - "Key": dst_file, - "Config": TransferConfig(max_concurrency=self.transfer_max_concurrency), - } - if bytes_transfered_cb: - copy_options |= {"Callback": bytes_transfered_cb} - await self.client.copy(**copy_options) - - @s3_exception_handler(_logger) - async def list_files( - self, - bucket: S3BucketName, - *, - prefix: str, - max_files_to_list: int = EXPAND_DIR_MAX_ITEM_COUNT, - ) -> list[S3MetaData]: - """ - NOTE: adding a / at the end of a folder improves speed by several orders of magnitudes - This endpoint is currently limited to only return EXPAND_DIR_MAX_ITEM_COUNT by default - """ - found_items: list[ObjectTypeDef] = [] - async for s3_objects in _list_objects_v2_paginated_gen( - self.client, bucket, prefix, items_per_page=max_files_to_list - ): - found_items.extend(s3_objects) - # NOTE: stop immediately after listing after `max_files_to_list` - break - - return [ - S3MetaData.from_botocore_object(entry) - for entry in found_items - if all(k in entry for k in ("Key", "LastModified", "ETag", "Size")) - ] - - @s3_exception_handler(_logger) - async def file_exists(self, bucket: S3BucketName, *, s3_object: str) -> bool: - """Checks if an S3 object exists""" - # SEE https://www.peterbe.com/plog/fastest-way-to-find-out-if-a-file-exists-in-s3 - response = await self.client.list_objects_v2(Bucket=bucket, Prefix=s3_object) - return len(response.get("Contents", [])) > 0 - - @s3_exception_handler(_logger) - async def upload_file( - self, - bucket: S3BucketName, - file: Path, - file_id: SimcoreS3FileID, - bytes_transfered_cb: Callable[[int], None] | None, - ) -> None: - """upload a file using aioboto3 transfer manager (e.g. works >5Gb and create multiple threads) - - :type bucket: S3BucketName - :type file: Path - :type file_id: SimcoreS3FileID - """ - upload_options = { - "Bucket": bucket, - "Key": file_id, - "Config": TransferConfig(max_concurrency=self.transfer_max_concurrency), - } - if bytes_transfered_cb: - upload_options |= {"Callback": bytes_transfered_cb} - await self.client.upload_file(f"{file}", **upload_options) - - @staticmethod - def compute_s3_url(bucket: S3BucketName, file_id: SimcoreS3FileID) -> AnyUrl: - url: AnyUrl = parse_obj_as( - AnyUrl, f"s3://{bucket}/{urllib.parse.quote(file_id)}" - ) - return url - - @staticmethod - def is_multipart(file_size: ByteSize) -> bool: - return file_size >= MULTIPART_UPLOADS_MIN_TOTAL_SIZE diff --git a/services/storage/src/simcore_service_storage/s3_utils.py b/services/storage/src/simcore_service_storage/s3_utils.py index 0e68973b80e..63f4676956a 100644 --- a/services/storage/src/simcore_service_storage/s3_utils.py +++ b/services/storage/src/simcore_service_storage/s3_utils.py @@ -1,6 +1,5 @@ import logging from dataclasses import dataclass -from typing import Final from pydantic import ByteSize, parse_obj_as from servicelib.aiohttp.long_running_tasks.server import ( @@ -11,37 +10,6 @@ logger = logging.getLogger(__name__) -# this is artifically defined, if possible we keep a maximum number of requests for parallel -# uploading. If that is not possible then we create as many upload part as the max part size allows -_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE: Final[list[ByteSize]] = [ - parse_obj_as(ByteSize, x) - for x in [ - "10Mib", - "50Mib", - "100Mib", - "200Mib", - "400Mib", - "600Mib", - "800Mib", - "1Gib", - "2Gib", - "3Gib", - "4Gib", - "5Gib", - ] -] -_MULTIPART_MAX_NUMBER_OF_PARTS: Final[int] = 10000 - - -def compute_num_file_chunks(file_size: ByteSize) -> tuple[int, ByteSize]: - for chunk in _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE: - num_upload_links = int(file_size / chunk) + (1 if file_size % chunk > 0 else 0) - if num_upload_links < _MULTIPART_MAX_NUMBER_OF_PARTS: - return (num_upload_links, chunk) - raise ValueError( - f"Could not determine number of upload links for {file_size=}", - ) - def update_task_progress( task_progress: TaskProgress | None, diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index 301579040c5..d9e3d39242b 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -14,7 +14,7 @@ from aiohttp import web from aiopg.sa import Engine from aiopg.sa.connection import SAConnection -from aws_library.s3.errors import S3KeyNotFoundError +from aws_library.s3 import S3KeyNotFoundError, S3MetaData from models_library.api_schemas_storage import LinkType, S3BucketName, UploadedPart from models_library.basic_types import SHA256Str from models_library.projects import ProjectID @@ -29,8 +29,7 @@ from servicelib.aiohttp.client_session import get_client_session from servicelib.aiohttp.long_running_tasks.server import TaskProgress from servicelib.logging_utils import log_context -from servicelib.sequences_utils import partition_gen -from servicelib.utils import ensure_ends_with, logged_gather +from servicelib.utils import ensure_ends_with, limited_gather from . import db_file_meta_data, db_projects, db_tokens from .constants import ( @@ -68,7 +67,6 @@ UserOrProjectFilter, ) from .s3 import get_s3_client -from .s3_client import S3MetaData, StorageS3Client from .s3_utils import S3TransferDataCB, update_task_progress from .settings import Settings from .simcore_s3_dsm_utils import expand_directory, get_directory_file_id @@ -214,14 +212,14 @@ async def list_files( # noqa C901 max_items_to_include = EXPAND_DIR_MAX_ITEM_COUNT - len(data) directory_expands.append( expand_directory( - self.app, + get_s3_client(self.app), self.simcore_bucket_name, metadata, max_items_to_include, ) ) - for files_in_directory in await logged_gather( - *directory_expands, max_concurrency=_MAX_PARALLEL_S3_CALLS + for files_in_directory in await limited_gather( + *directory_expands, limit=_MAX_PARALLEL_S3_CALLS ): data.extend(files_in_directory) @@ -317,9 +315,9 @@ async def create_file_upload_links( multipart_presigned_links = await get_s3_client( self.app ).create_multipart_upload_links( - fmd.bucket_name, - fmd.file_id, - file_size_bytes, + bucket=fmd.bucket_name, + object_key=fmd.file_id, + file_size=file_size_bytes, expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, sha256_checksum=fmd.sha256_checksum, ) @@ -335,8 +333,8 @@ async def create_file_upload_links( single_presigned_link = await get_s3_client( self.app ).create_single_presigned_upload_link( - self.simcore_bucket_name, - fmd.file_id, + bucket=self.simcore_bucket_name, + object_key=fmd.file_id, expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, ) return UploadLinks( @@ -346,7 +344,8 @@ async def create_file_upload_links( # user wants just the s3 link s3_link = get_s3_client(self.app).compute_s3_url( - self.simcore_bucket_name, parse_obj_as(SimcoreS3FileID, file_id) + bucket=self.simcore_bucket_name, + object_key=parse_obj_as(SimcoreS3FileID, file_id), ) return UploadLinks( [s3_link], file_size_bytes or MAX_LINK_CHUNK_BYTE_SIZE[link_type] @@ -371,13 +370,13 @@ async def abort_file_upload( assert fmd.upload_id # nosec await get_s3_client(self.app).abort_multipart_upload( bucket=fmd.bucket_name, - file_id=fmd.file_id, + object_key=fmd.file_id, upload_id=fmd.upload_id, ) # try to recover a file if it existed with contextlib.suppress(S3KeyNotFoundError): - await get_s3_client(self.app).undelete_file( - bucket=fmd.bucket_name, file_id=fmd.file_id + await get_s3_client(self.app).undelete_object( + bucket=fmd.bucket_name, object_key=fmd.file_id ) try: @@ -416,7 +415,7 @@ async def complete_file_upload( assert fmd.upload_id # nosec await get_s3_client(self.app).complete_multipart_upload( bucket=self.simcore_bucket_name, - file_id=fmd.file_id, + object_key=fmd.file_id, upload_id=fmd.upload_id, uploaded_parts=uploaded_parts, ) @@ -471,9 +470,9 @@ async def __get_link( ) if link_type == LinkType.PRESIGNED: link = await get_s3_client(self.app).create_single_presigned_download_link( - self.simcore_bucket_name, - s3_file_id, - self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, + bucket=self.simcore_bucket_name, + object_key=s3_file_id, + expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, ) return link @@ -505,8 +504,8 @@ async def _get_link_for_directory_fmd( ) -> AnyUrl: # 2. the file_id represents a file inside a directory await self.__ensure_read_access_rights(conn, user_id, directory_file_id) - if not await get_s3_client(self.app).file_exists( - self.simcore_bucket_name, s3_object=f"{file_id}" + if not await get_s3_client(self.app).object_exists( + bucket=self.simcore_bucket_name, object_key=f"{file_id}" ): raise S3KeyNotFoundError(key=file_id, bucket=self.simcore_bucket_name) return await self.__get_link(parse_obj_as(SimcoreS3FileID, file_id), link_type) @@ -542,8 +541,8 @@ async def delete_file( # NOTE: since this lists the files before deleting them # it can be used to filter for just a single file and also # to delete it - await get_s3_client(self.app).delete_files_in_path( - file.bucket_name, + await get_s3_client(self.app).delete_objects_recursively( + bucket=file.bucket_name, prefix=( ensure_ends_with(file.file_id, "/") if file.is_directory @@ -570,8 +569,11 @@ async def delete_project_simcore_s3( else: await db_file_meta_data.delete_all_from_node(conn, node_id) - await get_s3_client(self.app).delete_files_in_project_node( - self.simcore_bucket_name, project_id, node_id + await get_s3_client(self.app).delete_objects_recursively( + bucket=self.simcore_bucket_name, + prefix=ensure_ends_with( + f"{project_id}/{node_id}" if node_id else f"{project_id}", "/" + ), ) async def deep_copy_project_simcore_s3( # noqa: C901 @@ -631,18 +633,10 @@ async def deep_copy_project_simcore_s3( # noqa: C901 ), log_duration=True, ): - sizes_and_num_files: list[tuple[ByteSize, int]] = [] - for src_project_files_slice in partition_gen( - src_project_files, slice_size=_MAX_PARALLEL_S3_CALLS - ): - sizes_and_num_files.extend( - await logged_gather( - *[ - self._get_size_and_num_files(fmd) - for fmd in src_project_files_slice - ] - ) - ) + sizes_and_num_files: list[tuple[ByteSize, int]] = await limited_gather( + *[self._get_size_and_num_files(fmd) for fmd in src_project_files], + limit=_MAX_PARALLEL_S3_CALLS, + ) total_bytes_to_copy = sum(n for n, _ in sizes_and_num_files) total_num_of_files = sum(n for _, n in sizes_and_num_files) @@ -700,10 +694,8 @@ async def deep_copy_project_simcore_s3( # noqa: C901 and (int(output.get("store", self.location_id)) == DATCORE_ID) ] ) - for copy_tasks_slice in partition_gen( - copy_tasks, slice_size=MAX_CONCURRENT_S3_TASKS - ): - await logged_gather(*copy_tasks_slice) + await limited_gather(*copy_tasks, limit=MAX_CONCURRENT_S3_TASKS) + # ensure the full size is reported s3_transfered_data_cb.finalize_transfer() _logger.info( @@ -721,11 +713,15 @@ async def _get_size_and_num_files( # in case of directory list files and return size total_size: int = 0 total_num_s3_objects = 0 - async for s3_objects in get_s3_client(self.app).list_all_objects_gen( - self.simcore_bucket_name, - prefix=f"{fmd.object_name}", + async for s3_objects in get_s3_client(self.app).list_objects_paginated( + bucket=self.simcore_bucket_name, + prefix=( + ensure_ends_with(f"{fmd.object_name}", "/") + if fmd.is_directory + else fmd.object_name + ), ): - total_size += sum(x.get("Size", 0) for x in s3_objects) + total_size += sum(x.size for x in s3_objects) total_num_s3_objects += len(s3_objects) return parse_obj_as(ByteSize, total_size), total_num_s3_objects @@ -795,8 +791,8 @@ async def synchronise_meta_data_table( file_ids_to_remove = [ fmd.file_id async for fmd in db_file_meta_data.list_valid_uploads(conn) - if not await get_s3_client(self.app).file_exists( - self.simcore_bucket_name, s3_object=fmd.object_name + if not await get_s3_client(self.app).object_exists( + bucket=self.simcore_bucket_name, object_key=fmd.object_name ) ] @@ -820,7 +816,7 @@ async def _clean_pending_upload( assert fmd.upload_id # nosec await get_s3_client(self.app).abort_multipart_upload( bucket=self.simcore_bucket_name, - file_id=file_id, + object_key=file_id, upload_id=fmd.upload_id, ) @@ -844,14 +840,14 @@ async def _clean_expired_uploads(self) -> None: ) # try first to upload these from S3, they might have finished and the client forgot to tell us (conservative) - updated_fmds = await logged_gather( + updated_fmds = await limited_gather( *( self._update_database_from_storage_no_connection(fmd) for fmd in list_of_expired_uploads ), reraise=False, log=_logger, - max_concurrency=_NO_CONCURRENCY, + limit=_NO_CONCURRENCY, ) list_of_fmds_to_delete = [ expired_fmd @@ -869,20 +865,22 @@ async def _revert_file( assert fmd.upload_id # nosec await s3_client.abort_multipart_upload( bucket=fmd.bucket_name, - file_id=fmd.file_id, + object_key=fmd.file_id, upload_id=fmd.upload_id, ) - await s3_client.undelete_file(fmd.bucket_name, fmd.file_id) + await s3_client.undelete_object( + bucket=fmd.bucket_name, object_key=fmd.file_id + ) return await self._update_database_from_storage(conn, fmd) s3_client = get_s3_client(self.app) async with self.engine.acquire() as conn: # NOTE: no concurrency here as we want to run low resources - reverted_fmds = await logged_gather( + reverted_fmds = await limited_gather( *(_revert_file(conn, fmd) for fmd in list_of_fmds_to_delete), reraise=False, log=_logger, - max_concurrency=_NO_CONCURRENCY, + limit=_NO_CONCURRENCY, ) list_of_fmds_to_delete = [ fmd @@ -915,8 +913,8 @@ async def _update_database_from_storage( ) -> FileMetaDataAtDB: s3_metadata: S3MetaData | None = None if not fmd.is_directory: - s3_metadata = await get_s3_client(self.app).get_file_metadata( - fmd.bucket_name, fmd.object_name + s3_metadata = await get_s3_client(self.app).get_object_metadata( + bucket=fmd.bucket_name, object_key=fmd.object_name ) fmd = await db_file_meta_data.get(conn, fmd.file_id) @@ -926,7 +924,7 @@ async def _update_database_from_storage( fmd.entity_tag = s3_metadata.e_tag elif fmd.is_directory: s3_folder_metadata = await get_s3_client(self.app).get_directory_metadata( - fmd.bucket_name, prefix=fmd.object_name + bucket=fmd.bucket_name, prefix=fmd.object_name ) fmd.file_size = parse_obj_as(ByteSize, s3_folder_metadata.size) fmd.upload_expires_at = None @@ -986,10 +984,10 @@ async def _copy_file_datcore_s3( await transaction.commit() # Uploads local -> S3 await get_s3_client(self.app).upload_file( - self.simcore_bucket_name, - local_file_path, - dst_file_id, - bytes_transfered_cb, + bucket=self.simcore_bucket_name, + file=local_file_path, + object_key=dst_file_id, + bytes_transfered_cb=bytes_transfered_cb, ) updated_fmd = await self._update_database_from_storage(conn, new_fmd) file_storage_link["store"] = self.location_id @@ -1026,34 +1024,20 @@ async def _copy_path_s3_s3( # NOTE: ensure the database is updated so cleaner does not pickup newly created uploads await transaction.commit() - s3_client: StorageS3Client = get_s3_client(self.app) + s3_client = get_s3_client(self.app) if src_fmd.is_directory: - async for s3_objects in s3_client.list_all_objects_gen( - self.simcore_bucket_name, - prefix=src_fmd.object_name, - ): - s3_objects_src_to_new: dict[str, str] = { - x["Key"]: x["Key"].replace( - f"{src_fmd.object_name}", f"{new_fmd.object_name}" - ) - for x in s3_objects - } - - for src, new in s3_objects_src_to_new.items(): - # NOTE: copy_file cannot be called concurrently or it will hang. - # test this with copying multiple 1GB files if you do not believe me - await s3_client.copy_file( - self.simcore_bucket_name, - cast(SimcoreS3FileID, src), - cast(SimcoreS3FileID, new), - bytes_transfered_cb=bytes_transfered_cb, - ) + await s3_client.copy_objects_recursively( + bucket=self.simcore_bucket_name, + src_prefix=src_fmd.object_name, + dst_prefix=new_fmd.object_name, + bytes_transfered_cb=bytes_transfered_cb, + ) else: - await s3_client.copy_file( - self.simcore_bucket_name, - src_fmd.object_name, - new_fmd.object_name, + await s3_client.copy_object( + bucket=self.simcore_bucket_name, + src_object_key=src_fmd.object_name, + dst_object_key=new_fmd.object_name, bytes_transfered_cb=bytes_transfered_cb, ) diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py index 0053b371cb2..94de9fa9643 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py @@ -1,8 +1,9 @@ from contextlib import suppress from pathlib import Path +from typing import cast -from aiohttp import web from aiopg.sa.connection import SAConnection +from aws_library.s3 import S3MetaData, SimcoreS3API from models_library.api_schemas_storage import S3BucketName from models_library.projects_nodes_io import ( SimcoreS3DirectoryID, @@ -15,13 +16,26 @@ from . import db_file_meta_data from .exceptions import FileMetaDataNotFoundError from .models import FileMetaData, FileMetaDataAtDB -from .s3 import get_s3_client -from .s3_client import S3MetaData from .utils import convert_db_to_model +async def _list_all_files_in_folder( + *, + s3_client: SimcoreS3API, + bucket: S3BucketName, + prefix: str, + max_files_to_list: int, +) -> list[S3MetaData]: + async for s3_objects in s3_client.list_objects_paginated( + bucket, prefix, items_per_page=max_files_to_list + ): + # NOTE: stop immediately after listing after `max_files_to_list` + return cast(list[S3MetaData], s3_objects) + return [] + + async def expand_directory( - app: web.Application, + s3_client: SimcoreS3API, simcore_bucket_name: S3BucketName, fmd: FileMetaDataAtDB, max_items_to_include: NonNegativeInt, @@ -30,8 +44,9 @@ async def expand_directory( Scans S3 backend and returns a list S3MetaData entries which get mapped to FileMetaData entry. """ - files_in_folder: list[S3MetaData] = await get_s3_client(app).list_files( - simcore_bucket_name, + files_in_folder: list[S3MetaData] = await _list_all_files_in_folder( + s3_client=s3_client, + bucket=simcore_bucket_name, prefix=ensure_ends_with(fmd.file_id, "/"), max_files_to_list=max_items_to_include, ) @@ -41,7 +56,7 @@ async def expand_directory( location_id=fmd.location_id, location=fmd.location, bucket_name=fmd.bucket_name, - object_name=x.file_id, + object_name=cast(SimcoreS3FileID, x.object_key), user_id=fmd.user_id, # NOTE: to ensure users have a consistent experience the # `created_at` field is inherited from the last_modified @@ -49,7 +64,7 @@ async def expand_directory( # creation of the directory, the file's creation date # will not be 1 month in the passed. created_at=x.last_modified, - file_id=x.file_id, + file_id=cast(SimcoreS3FileID, x.object_key), file_size=parse_obj_as(ByteSize, x.size), last_modified=x.last_modified, entity_tag=x.e_tag, diff --git a/services/storage/src/simcore_service_storage/utils_handlers.py b/services/storage/src/simcore_service_storage/utils_handlers.py index b13c89459e1..e0438cc0c92 100644 --- a/services/storage/src/simcore_service_storage/utils_handlers.py +++ b/services/storage/src/simcore_service_storage/utils_handlers.py @@ -3,7 +3,7 @@ from aiohttp import web from aiohttp.typedefs import Handler from aiohttp.web_request import Request -from aws_library.s3.errors import S3AccessError, S3KeyNotFoundError +from aws_library.s3 import S3AccessError, S3KeyNotFoundError from pydantic import ValidationError from servicelib.aiohttp.aiopg_utils import DBAPIError diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index ec6940b1fb2..d4c240a5bf3 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -7,12 +7,12 @@ import asyncio +import logging import sys import urllib.parse -import uuid from collections.abc import AsyncIterator, Awaitable, Callable +from contextlib import AbstractAsyncContextManager, asynccontextmanager from pathlib import Path -from time import perf_counter from typing import cast import aioresponses @@ -21,6 +21,7 @@ import simcore_service_storage from aiohttp.test_utils import TestClient from aiopg.sa import Engine +from aws_library.s3 import SimcoreS3API from faker import Faker from fakeredis.aioredis import FakeRedis from models_library.api_schemas_storage import ( @@ -30,7 +31,6 @@ FileUploadCompleteState, FileUploadCompletionBody, FileUploadSchema, - PresignedLink, UploadedPart, ) from models_library.basic_types import SHA256Str @@ -42,25 +42,29 @@ from pydantic import ByteSize, parse_obj_as from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.logging import log_context +from pytest_simcore.helpers.s3 import upload_file_to_presigned_link +from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status from simcore_postgres_database.storage_models import file_meta_data, projects, users from simcore_service_storage.application import create from simcore_service_storage.dsm import get_dsm_provider +from simcore_service_storage.handlers_files import UPLOAD_TASKS_KEY from simcore_service_storage.models import S3BucketName from simcore_service_storage.s3 import get_s3_client -from simcore_service_storage.s3_client import StorageS3Client from simcore_service_storage.settings import Settings from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from tenacity._asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed -from tests.helpers.file_utils import upload_file_to_presigned_link from tests.helpers.utils_file_meta_data import assert_file_meta_data_in_db +from types_aiobotocore_s3 import S3Client from yarl import URL pytest_plugins = [ "pytest_simcore.aioresponses_mocker", + "pytest_simcore.aws_s3_service", "pytest_simcore.aws_server", "pytest_simcore.cli_runner", "pytest_simcore.docker_compose", @@ -86,14 +90,14 @@ def here() -> Path: @pytest.fixture(scope="session") -def package_dir(here) -> Path: +def package_dir(here: Path) -> Path: dirpath = Path(simcore_service_storage.__file__).parent assert dirpath.exists() return dirpath @pytest.fixture(scope="session") -def osparc_simcore_root_dir(here) -> Path: +def osparc_simcore_root_dir(here: Path) -> Path: root_dir = here.parent.parent.parent assert root_dir.exists() assert any(root_dir.glob("services")), "Is this service within osparc-simcore repo?" @@ -101,14 +105,7 @@ def osparc_simcore_root_dir(here) -> Path: @pytest.fixture(scope="session") -def osparc_api_specs_dir(osparc_simcore_root_dir) -> Path: - dirpath = osparc_simcore_root_dir / "api" / "specs" - assert dirpath.exists() - return dirpath - - -@pytest.fixture(scope="session") -def project_slug_dir(osparc_simcore_root_dir) -> Path: +def project_slug_dir(osparc_simcore_root_dir: Path) -> Path: # uses pytest_simcore.environs.osparc_simcore_root_dir service_folder = osparc_simcore_root_dir / "services" / "storage" assert service_folder.exists() @@ -117,11 +114,10 @@ def project_slug_dir(osparc_simcore_root_dir) -> Path: @pytest.fixture(scope="session") -def project_env_devel_dict(project_slug_dir: Path) -> dict[str, str]: +def project_env_devel_dict(project_slug_dir: Path) -> dict[str, str | None]: env_devel_file = project_slug_dir / ".env-devel" assert env_devel_file.exists() - environ = dotenv.dotenv_values(env_devel_file, verbose=True, interpolate=True) - return environ + return dotenv.dotenv_values(env_devel_file, verbose=True, interpolate=True) @pytest.fixture @@ -135,20 +131,6 @@ def project_env_devel_environment( ## FAKE DATA FIXTURES ---------------------------------------------- -@pytest.fixture -def mock_files_factory(tmpdir_factory) -> Callable[[int], list[Path]]: - def _create_files(count: int) -> list[Path]: - filepaths = [] - for _i in range(count): - filepath = Path(tmpdir_factory.mktemp("data")) / f"{uuid.uuid4()}.txt" - filepath.write_text("Hello world\n") - filepaths.append(filepath) - - return filepaths - - return _create_files - - @pytest.fixture async def cleanup_user_projects_file_metadata(aiopg_engine: Engine): yield @@ -160,7 +142,8 @@ async def cleanup_user_projects_file_metadata(aiopg_engine: Engine): @pytest.fixture -def simcore_s3_dsm(client) -> SimcoreS3DataManager: +def simcore_s3_dsm(client: TestClient) -> SimcoreS3DataManager: + assert client.app return cast( SimcoreS3DataManager, get_dsm_provider(client.app).get(SimcoreS3DataManager.get_location_id()), @@ -170,7 +153,7 @@ def simcore_s3_dsm(client) -> SimcoreS3DataManager: @pytest.fixture async def storage_s3_client( client: TestClient, -) -> StorageS3Client: +) -> SimcoreS3API: assert client.app return get_s3_client(client.app) @@ -182,18 +165,12 @@ async def storage_s3_bucket(app_settings: Settings) -> str: @pytest.fixture -def mock_config( +def app_settings( aiopg_engine: Engine, postgres_host_config: dict[str, str], - mocked_s3_server_envs, + mocked_s3_server_envs: EnvVarsDict, datcore_adapter_service_mock: aioresponses.aioresponses, -) -> None: - # NOTE: this can be overriden in tests that do not need all dependencies up - ... - - -@pytest.fixture -def app_settings(mock_config: None) -> Settings: +) -> Settings: test_app_settings = Settings.create_from_envs() print(f"{test_app_settings.json(indent=2)=}") return test_app_settings @@ -240,7 +217,6 @@ def simcore_file_id( ) -# NOTE: this will be enabled at a later timepoint @pytest.fixture( params=[ SimcoreS3DataManager.get_location_id(), @@ -275,59 +251,11 @@ async def _getter(file_id: SimcoreS3FileID) -> FileMetaDataGet: assert data received_fmd = parse_obj_as(FileMetaDataGet, data) assert received_fmd - print(f"<-- {received_fmd.json(indent=2)=}") return received_fmd return _getter -@pytest.fixture -async def create_upload_file_link_v1( - client: TestClient, user_id: UserID, location_id: LocationID -) -> AsyncIterator[Callable[..., Awaitable[PresignedLink]]]: - file_params: list[tuple[UserID, int, SimcoreS3FileID]] = [] - - async def _link_creator(file_id: SimcoreS3FileID, **query_kwargs) -> PresignedLink: - assert client.app - url = ( - client.app.router["upload_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(file_id, safe=""), - ) - .with_query(**query_kwargs, user_id=user_id) - ) - assert ( - "file_size" not in url.query - ), "v1 call to upload_file MUST NOT contain file_size field, this is reserved for v2 call" - response = await client.put(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert not error - assert data - received_file_upload_link = parse_obj_as(PresignedLink, data) - assert received_file_upload_link - print(f"--> created link for {file_id=}") - file_params.append((user_id, location_id, file_id)) - return received_file_upload_link - - yield _link_creator - - # cleanup - assert client.app - clean_tasks = [] - for u_id, loc_id, file_id in file_params: - url = ( - client.app.router["delete_file"] - .url_for( - location_id=f"{loc_id}", - file_id=urllib.parse.quote(file_id, safe=""), - ) - .with_query(user_id=u_id) - ) - clean_tasks.append(client.delete(f"{url}")) - await asyncio.gather(*clean_tasks) - - @pytest.fixture async def create_upload_file_link_v2( client: TestClient, user_id: UserID, location_id: LocationID @@ -355,7 +283,6 @@ async def _link_creator( assert data received_file_upload = parse_obj_as(FileUploadSchema, data) assert received_file_upload - print(f"--> created link for {file_id=}") file_params.append((user_id, location_id, file_id)) return received_file_upload @@ -380,7 +307,7 @@ async def _link_creator( @pytest.fixture def upload_file( aiopg_engine: Engine, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, client: TestClient, project_id: ProjectID, @@ -395,7 +322,6 @@ async def _uploader( file_size: ByteSize, file_name: str, file_id: SimcoreS3FileID | None = None, - wait_for_completion: bool = True, sha256_checksum: SHA256Str | None = None, project_id: ProjectID = project_id, ) -> tuple[Path, SimcoreS3FileID]: @@ -418,51 +344,45 @@ async def _uploader( ) # complete the upload complete_url = URL(file_upload_link.links.complete_upload).relative() - start = perf_counter() - print(f"--> completing upload of {file=}") - response = await client.post( - f"{complete_url}", - json=jsonable_encoder(FileUploadCompletionBody(parts=part_to_etag)), - ) - response.raise_for_status() - data, error = await assert_status(response, status.HTTP_202_ACCEPTED) - assert not error - assert data - file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) - state_url = URL(file_upload_complete_response.links.state).relative() - - if not wait_for_completion: - # we do not want to wait for completion to finish - return file, file_id, state_url - - completion_etag = None - async for attempt in AsyncRetrying( - reraise=True, - wait=wait_fixed(1), - stop=stop_after_delay(60), - retry=retry_if_exception_type(ValueError), - ): - with attempt: - print( - f"--> checking for upload {state_url=}, {attempt.retry_state.attempt_number}..." - ) - response = await client.post(f"{state_url}") - response.raise_for_status() - data, error = await assert_status(response, status.HTTP_200_OK) - assert not error - assert data - future = FileUploadCompleteFutureResponse.parse_obj(data) - if future.state == FileUploadCompleteState.NOK: - msg = f"{data=}" - raise ValueError(msg) - assert future.state == FileUploadCompleteState.OK - assert future.e_tag is not None - completion_etag = future.e_tag - print( - f"--> done waiting, data is completely uploaded [{attempt.retry_state.retry_object.statistics}]" - ) - - print(f"--> completed upload in {perf_counter() - start}") + with log_context(logging.INFO, f"completing upload of {file=}"): + response = await client.post( + f"{complete_url}", + json=jsonable_encoder(FileUploadCompletionBody(parts=part_to_etag)), + ) + response.raise_for_status() + data, error = await assert_status(response, status.HTTP_202_ACCEPTED) + assert not error + assert data + file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) + state_url = URL(file_upload_complete_response.links.state).relative() + + completion_etag = None + async for attempt in AsyncRetrying( + reraise=True, + wait=wait_fixed(1), + stop=stop_after_delay(60), + retry=retry_if_exception_type(ValueError), + ): + with attempt, log_context( + logging.INFO, + f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", + ) as ctx: + response = await client.post(f"{state_url}") + response.raise_for_status() + data, error = await assert_status(response, status.HTTP_200_OK) + assert not error + assert data + future = FileUploadCompleteFutureResponse.parse_obj(data) + if future.state == FileUploadCompleteState.NOK: + msg = f"{data=}" + raise ValueError(msg) + assert future.state == FileUploadCompleteState.OK + assert future.e_tag is not None + completion_etag = future.e_tag + ctx.logger.info( + "%s", + f"--> done waiting, data is completely uploaded [{attempt.retry_state.retry_object.statistics}]", + ) # check the entry in db now has the correct file size, and the upload id is gone await assert_file_meta_data_in_db( @@ -475,8 +395,8 @@ async def _uploader( expected_sha256_checksum=sha256_checksum, ) # check the file is in S3 for real - s3_metadata = await storage_s3_client.get_file_metadata( - storage_s3_bucket, file_id + s3_metadata = await storage_s3_client.get_object_metadata( + bucket=storage_s3_bucket, object_key=file_id ) assert s3_metadata.size == file_size assert s3_metadata.last_modified @@ -503,3 +423,184 @@ def _creator( return SimcoreS3FileID(f"{clean_path}") return _creator + + +@pytest.fixture +async def with_versioning_enabled( + s3_client: S3Client, + storage_s3_bucket: S3BucketName, +) -> None: + await s3_client.put_bucket_versioning( + Bucket=storage_s3_bucket, + VersioningConfiguration={"MFADelete": "Disabled", "Status": "Enabled"}, + ) + + +@pytest.fixture +async def create_empty_directory( + create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], + create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], + client: TestClient, + project_id: ProjectID, + node_id: NodeID, +) -> Callable[..., Awaitable[FileUploadSchema]]: + async def _directory_creator(dir_name: str): + # creating an empty directory goes through the same procedure as uploading a multipart file + # done by using 3 calls: + # 1. create the link as a directory + # 2. call complete_upload link + # 3. call file_upload_complete_response until it replies OK + + directory_file_id = create_simcore_file_id(project_id, node_id, dir_name) + directory_file_upload: FileUploadSchema = await create_upload_file_link_v2( + directory_file_id, link_type="s3", is_directory="true", file_size=-1 + ) + # always returns a v2 link when dealing with directories + assert isinstance(directory_file_upload, FileUploadSchema) + assert len(directory_file_upload.urls) == 1 + + # complete the upload + complete_url = URL(directory_file_upload.links.complete_upload).relative() + response = await client.post( + f"{complete_url}", + json=jsonable_encoder(FileUploadCompletionBody(parts=[])), + ) + response.raise_for_status() + data, error = await assert_status(response, status.HTTP_202_ACCEPTED) + assert not error + assert data + file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) + state_url = URL(file_upload_complete_response.links.state).relative() + + # check that it finished updating + assert client.app + client.app[UPLOAD_TASKS_KEY].clear() + # now check for the completion + async for attempt in AsyncRetrying( + reraise=True, + wait=wait_fixed(1), + stop=stop_after_delay(60), + retry=retry_if_exception_type(AssertionError), + ): + with attempt, log_context( + logging.INFO, + f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", + ) as ctx: + response = await client.post(f"{state_url}") + data, error = await assert_status(response, status.HTTP_200_OK) + assert not error + assert data + future = FileUploadCompleteFutureResponse.parse_obj(data) + assert future.state == FileUploadCompleteState.OK + assert future.e_tag is None + ctx.logger.info( + "%s", + f"--> done waiting, data is completely uploaded [{attempt.retry_state.retry_object.statistics}]", + ) + + return directory_file_upload + + return _directory_creator + + +@pytest.fixture +async def populate_directory( + create_file_of_size: Callable[[ByteSize, str | None], Path], + storage_s3_client: SimcoreS3API, + storage_s3_bucket: S3BucketName, + project_id: ProjectID, + node_id: NodeID, +) -> Callable[..., Awaitable[None]]: + async def _create_content( + file_size_in_dir: ByteSize, + dir_name: str, + subdir_count: int = 4, + file_count: int = 5, + ) -> None: + file = create_file_of_size(file_size_in_dir, "some_file") + + async def _create_file(s: int, f: int): + file_name = f"{dir_name}/sub-dir-{s}/file-{f}" + clean_path = Path(f"{project_id}/{node_id}/{file_name}") + await storage_s3_client.upload_file( + bucket=storage_s3_bucket, + file=file, + object_key=SimcoreS3FileID(f"{clean_path}"), + bytes_transfered_cb=None, + ) + + tasks = [ + _create_file(s, f) for f in range(file_count) for s in range(subdir_count) + ] + + await asyncio.gather(*tasks) + + file.unlink() + + return _create_content + + +@pytest.fixture +async def delete_directory( + client: TestClient, + storage_s3_client: SimcoreS3API, + storage_s3_bucket: S3BucketName, + user_id: UserID, + location_id: LocationID, +) -> Callable[..., Awaitable[None]]: + async def _dir_remover(directory_file_upload: FileUploadSchema) -> None: + assert directory_file_upload.urls[0].path + directory_file_id = directory_file_upload.urls[0].path.strip("/") + assert client.app + delete_url = ( + client.app.router["delete_file"] + .url_for( + location_id=f"{location_id}", + file_id=urllib.parse.quote(directory_file_id, safe=""), + ) + .with_query(user_id=user_id) + ) + response = await client.delete(f"{delete_url}") + await assert_status(response, status.HTTP_204_NO_CONTENT) + + # NOTE: ensures no more files are left in the directory, + # even if one file is left this will detect it + list_files_metadata_url = ( + client.app.router["get_files_metadata"] + .url_for(location_id=f"{location_id}") + .with_query(user_id=user_id, uuid_filter=directory_file_id) + ) + response = await client.get(f"{list_files_metadata_url}") + data, error = await assert_status(response, status.HTTP_200_OK) + assert error is None + assert data == [] + + return _dir_remover + + +@pytest.fixture +async def create_directory_with_files( + create_empty_directory: Callable[..., Awaitable[FileUploadSchema]], + populate_directory: Callable[..., Awaitable[None]], + delete_directory: Callable[..., Awaitable[None]], +) -> Callable[..., AbstractAsyncContextManager[FileUploadSchema]]: + @asynccontextmanager + async def _create_context( + dir_name: str, file_size_in_dir: ByteSize, subdir_count: int, file_count: int + ) -> AsyncIterator[FileUploadSchema]: + directory_file_upload: FileUploadSchema = await create_empty_directory( + dir_name=dir_name + ) + + await populate_directory( + file_size_in_dir=file_size_in_dir, + dir_name=dir_name, + subdir_count=subdir_count, + file_count=file_count, + ) + + yield directory_file_upload + + await delete_directory(directory_file_upload=directory_file_upload) + + return _create_context diff --git a/services/storage/tests/fixtures/data_models.py b/services/storage/tests/fixtures/data_models.py index dc5fbadf9e1..197e877ec7a 100644 --- a/services/storage/tests/fixtures/data_models.py +++ b/services/storage/tests/fixtures/data_models.py @@ -20,14 +20,14 @@ from models_library.users import UserID from pydantic import ByteSize, parse_obj_as from pytest_simcore.helpers.faker_factories import random_project, random_user -from servicelib.utils import logged_gather +from servicelib.utils import limited_gather from simcore_postgres_database.storage_models import projects, users from ..helpers.utils import get_updated_project @asynccontextmanager -async def user_context(aiopg_engine: Engine, *, name: str) -> UserID: +async def user_context(aiopg_engine: Engine, *, name: str) -> AsyncIterator[UserID]: # inject a random user in db # NOTE: Ideally this (and next fixture) should be done via webserver API but at this point @@ -36,7 +36,6 @@ async def user_context(aiopg_engine: Engine, *, name: str) -> UserID: # pylint: disable=no-value-for-parameter stmt = users.insert().values(**random_user(name=name)).returning(users.c.id) - print(str(stmt)) async with aiopg_engine.acquire() as conn: result = await conn.execute(stmt) row = await result.fetchone() @@ -146,7 +145,7 @@ async def _() -> None: @pytest.fixture async def create_project_node( user_id: UserID, aiopg_engine: Engine, faker: Faker -) -> AsyncIterator[Callable[..., Awaitable[NodeID]]]: +) -> Callable[..., Awaitable[NodeID]]: async def _creator( project_id: ProjectID, node_id: NodeID | None = None, **kwargs ) -> NodeID: @@ -159,7 +158,7 @@ async def _creator( row = await result.fetchone() assert row project_workbench: dict[str, Any] = row[projects.c.workbench] - new_node_id = node_id or NodeID(faker.uuid4()) + new_node_id = node_id or NodeID(f"{faker.uuid4()}") node_data = { "key": "simcore/services/frontend/file-picker", "version": "1.0.0", @@ -174,7 +173,7 @@ async def _creator( ) return new_node_id - yield _creator + return _creator @pytest.fixture @@ -227,7 +226,7 @@ async def _creator( upload_tasks: deque[Awaitable] = deque() for _node_index in range(num_nodes): # NOTE: we put some more outputs in there to simulate a real case better - new_node_id = NodeID(faker.uuid4()) + new_node_id = NodeID(f"{faker.uuid4()}") output3_file_id = create_simcore_file_id( ProjectID(project["uuid"]), new_node_id, @@ -247,9 +246,9 @@ async def _creator( # upload the output 3 and some random other files at the root of each node src_projects_list[src_node_id] = {} - checksum: SHA256Str = choice(file_checksums) + checksum: SHA256Str = choice(file_checksums) # noqa: S311 src_file, _ = await upload_file( - file_size=choice(file_sizes), + file_size=choice(file_sizes), # noqa: S311 file_name=Path(output3_file_id).name, file_id=output3_file_id, sha256_checksum=checksum, @@ -264,9 +263,9 @@ async def _upload_file_and_update_project(project, src_node_id): src_file_uuid = create_simcore_file_id( ProjectID(project["uuid"]), src_node_id, src_file_name, None ) - checksum: SHA256Str = choice(file_checksums) + checksum: SHA256Str = choice(file_checksums) # noqa: S311 src_file, _ = await upload_file( - file_size=choice(file_sizes), + file_size=choice(file_sizes), # noqa: S311 file_name=src_file_name, file_id=src_file_uuid, sha256_checksum=checksum, @@ -280,10 +279,10 @@ async def _upload_file_and_update_project(project, src_node_id): upload_tasks.extend( [ _upload_file_and_update_project(project, src_node_id) - for _ in range(randint(0, 3)) + for _ in range(randint(0, 3)) # noqa: S311 ] ) - await logged_gather(*upload_tasks, max_concurrency=2) + await limited_gather(*upload_tasks, limit=10) project = await get_updated_project(aiopg_engine, project["uuid"]) return project, src_projects_list diff --git a/services/storage/tests/helpers/utils_file_meta_data.py b/services/storage/tests/helpers/utils_file_meta_data.py index 6809b6dc0d6..f6b133bbdda 100644 --- a/services/storage/tests/helpers/utils_file_meta_data.py +++ b/services/storage/tests/helpers/utils_file_meta_data.py @@ -1,8 +1,8 @@ from aiopg.sa.engine import Engine +from aws_library.s3 import UploadID from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import StorageFileID from simcore_postgres_database.storage_models import file_meta_data -from simcore_service_storage.s3_client import UploadID async def assert_file_meta_data_in_db( @@ -15,7 +15,7 @@ async def assert_file_meta_data_in_db( expected_upload_expiration_date: bool | None, expected_sha256_checksum: SHA256Str | None, ) -> UploadID | None: - if expected_entry_exists and expected_file_size == None: + if expected_entry_exists and expected_file_size is None: assert True, "Invalid usage of assertion, expected_file_size cannot be None" async with aiopg_engine.acquire() as conn: diff --git a/services/storage/tests/helpers/utils_project.py b/services/storage/tests/helpers/utils_project.py index 85044a54b96..b6519c4e82c 100644 --- a/services/storage/tests/helpers/utils_project.py +++ b/services/storage/tests/helpers/utils_project.py @@ -21,7 +21,7 @@ def _create_new_node_uuid(old_uuid: NodeIDStr) -> NodeIDStr: return NodeIDStr(uuidlib.uuid5(project_copy_uuid, old_uuid)) nodes_map = {} - for node_uuid in project.get("workbench", {}).keys(): + for node_uuid in project.get("workbench", {}): nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) def _replace_uuids(node): diff --git a/services/storage/tests/unit/conftest.py b/services/storage/tests/unit/conftest.py deleted file mode 100644 index 222c9913234..00000000000 --- a/services/storage/tests/unit/conftest.py +++ /dev/null @@ -1,216 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -import asyncio -import urllib.parse -from collections import deque -from collections.abc import AsyncIterator, Awaitable, Callable -from contextlib import AbstractAsyncContextManager, asynccontextmanager -from pathlib import Path - -import openapi_core -import pytest -from aiohttp.test_utils import TestClient -from models_library.api_schemas_storage import ( - FileUploadCompleteFutureResponse, - FileUploadCompleteResponse, - FileUploadCompleteState, - FileUploadCompletionBody, - FileUploadSchema, -) -from models_library.projects import ProjectID -from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID -from models_library.users import UserID -from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import ByteSize -from pytest_simcore.helpers.assert_checks import assert_status -from servicelib.aiohttp import status -from simcore_service_storage._meta import API_VTAG -from simcore_service_storage.handlers_files import UPLOAD_TASKS_KEY -from simcore_service_storage.models import S3BucketName -from simcore_service_storage.resources import storage_resources -from simcore_service_storage.s3_client import StorageS3Client -from tenacity._asyncio import AsyncRetrying -from tenacity.retry import retry_if_exception_type -from tenacity.stop import stop_after_delay -from tenacity.wait import wait_fixed -from yarl import URL - - -@pytest.fixture(scope="module") -def openapi_specs() -> openapi_core.Spec: - spec_path: Path = storage_resources.get_path(f"api/{API_VTAG}/openapi.yaml") - return openapi_core.Spec.from_path(spec_path) - - -@pytest.fixture -async def create_empty_directory( - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], - client: TestClient, - project_id: ProjectID, - node_id: NodeID, -) -> Callable[..., Awaitable[FileUploadSchema]]: - async def _directory_creator(dir_name: str): - # creating an empty directory goes through the same procedure as uploading a multipart file - # done by using 3 calls: - # 1. create the link as a directory - # 2. call complete_upload link - # 3. call file_upload_complete_response until it replies OK - - directory_file_id = create_simcore_file_id(project_id, node_id, dir_name) - directory_file_upload: FileUploadSchema = await create_upload_file_link_v2( - directory_file_id, link_type="s3", is_directory="true", file_size=-1 - ) - # always returns a v2 link when dealing with directories - assert isinstance(directory_file_upload, FileUploadSchema) - assert len(directory_file_upload.urls) == 1 - - # complete the upload - complete_url = URL(directory_file_upload.links.complete_upload).relative() - response = await client.post( - f"{complete_url}", - json=jsonable_encoder(FileUploadCompletionBody(parts=[])), - ) - response.raise_for_status() - data, error = await assert_status(response, status.HTTP_202_ACCEPTED) - assert not error - assert data - file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) - state_url = URL(file_upload_complete_response.links.state).relative() - - # check that it finished updating - assert client.app - client.app[UPLOAD_TASKS_KEY].clear() - # now check for the completion - async for attempt in AsyncRetrying( - reraise=True, - wait=wait_fixed(1), - stop=stop_after_delay(60), - retry=retry_if_exception_type(AssertionError), - ): - with attempt: - print( - f"--> checking for upload {state_url=}, {attempt.retry_state.attempt_number}..." - ) - response = await client.post(f"{state_url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert not error - assert data - future = FileUploadCompleteFutureResponse.parse_obj(data) - assert future.state == FileUploadCompleteState.OK - assert future.e_tag is None - print( - f"--> done waiting, data is completely uploaded [{attempt.retry_state.retry_object.statistics}]" - ) - - return directory_file_upload - - return _directory_creator - - -@pytest.fixture -async def populate_directory( - create_file_of_size: Callable[[ByteSize, str | None], Path], - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - project_id: ProjectID, - node_id: NodeID, -) -> Callable[..., Awaitable[None]]: - async def _create_content( - file_size_in_dir: ByteSize, - dir_name: str, - subdir_count: int = 4, - file_count: int = 5, - ) -> None: - file = create_file_of_size(file_size_in_dir, "some_file") - - async def _create_file(s: int, f: int): - file_name = f"{dir_name}/sub-dir-{s}/file-{f}" - clean_path = Path(f"{project_id}/{node_id}/{file_name}") - await storage_s3_client.upload_file( - storage_s3_bucket, file, SimcoreS3FileID(f"{clean_path}"), None - ) - - tasks: deque = deque() - for s in range(subdir_count): - for f in range(file_count): - tasks.append(_create_file(s, f)) - - await asyncio.gather(*tasks) - - file.unlink() - - return _create_content - - -@pytest.fixture -async def delete_directory( - client: TestClient, - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - user_id: UserID, - location_id: LocationID, -) -> Callable[..., Awaitable[None]]: - async def _dir_remover(directory_file_upload: FileUploadSchema) -> None: - assert directory_file_upload.urls[0].path - directory_file_id = directory_file_upload.urls[0].path.strip("/") - assert client.app - delete_url = ( - client.app.router["delete_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(directory_file_id, safe=""), - ) - .with_query(user_id=user_id) - ) - response = await client.delete(f"{delete_url}") - await assert_status(response, status.HTTP_204_NO_CONTENT) - - # NOTE: ensures no more files are left in the directory, - # even if one file is left this will detect it - files = await storage_s3_client.list_files( - bucket=storage_s3_bucket, prefix=directory_file_id - ) - assert len(files) == 0 - - return _dir_remover - - -@pytest.fixture -async def create_directory_with_files( - create_empty_directory: Callable[..., Awaitable[FileUploadSchema]], - populate_directory: Callable[..., Awaitable[None]], - delete_directory: Callable[..., Awaitable[None]], -) -> Callable[..., AbstractAsyncContextManager[FileUploadSchema]]: - @asynccontextmanager - async def _create_context( - dir_name: str, file_size_in_dir: ByteSize, subdir_count: int, file_count: int - ) -> AsyncIterator[FileUploadSchema]: - directory_file_upload: FileUploadSchema = await create_empty_directory( - dir_name=dir_name - ) - - await populate_directory( - file_size_in_dir=file_size_in_dir, - dir_name=dir_name, - subdir_count=subdir_count, - file_count=file_count, - ) - - yield directory_file_upload - - await delete_directory(directory_file_upload=directory_file_upload) - - return _create_context - - -@pytest.fixture -async def with_versioning_enabled( - storage_s3_client: StorageS3Client, storage_s3_bucket: str -) -> None: - await storage_s3_client.client.put_bucket_versioning( - Bucket=storage_s3_bucket, - VersioningConfiguration={"MFADelete": "Disabled", "Status": "Enabled"}, - ) diff --git a/services/storage/tests/unit/test__openapi_specs.py b/services/storage/tests/unit/test__openapi_specs.py index 8e384e5a0d7..5e35ff7a7cf 100644 --- a/services/storage/tests/unit/test__openapi_specs.py +++ b/services/storage/tests/unit/test__openapi_specs.py @@ -3,8 +3,10 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable +from pathlib import Path from typing import NamedTuple +import openapi_core import pytest import simcore_service_storage.application from aiohttp import web @@ -12,6 +14,8 @@ from openapi_core import Spec as OpenApiSpecs from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_storage._meta import API_VTAG +from simcore_service_storage.resources import storage_resources from simcore_service_storage.settings import Settings @@ -43,10 +47,15 @@ def app(app_environment: EnvVarsDict) -> web.Application: # - all plugins are setup but app is NOT started (i.e events are not triggered) # settings = Settings.create_from_envs() - print(settings.json(indent=1)) return simcore_service_storage.application.create(settings) +@pytest.fixture(scope="module") +def openapi_specs() -> openapi_core.Spec: + spec_path: Path = storage_resources.get_path(f"api/{API_VTAG}/openapi.yaml") + return openapi_core.Spec.from_path(spec_path) + + @pytest.fixture def expected_openapi_entrypoints(openapi_specs: OpenApiSpecs) -> set[Entrypoint]: entrypoints: set[Entrypoint] = set() diff --git a/services/storage/tests/unit/test_dsm.py b/services/storage/tests/unit/test_dsm.py index 4d6da5f45f5..ae07ec94c9b 100644 --- a/services/storage/tests/unit/test_dsm.py +++ b/services/storage/tests/unit/test_dsm.py @@ -1,20 +1,20 @@ # pylint: disable=unused-variable # pylint: disable=unused-argument # pylint: disable=redefined-outer-name +# pylint: disable=protected-access -import asyncio +from collections.abc import Awaitable, Callable from pathlib import Path -from typing import Awaitable, Callable, Optional import pytest from faker import Faker from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID from pydantic import ByteSize, parse_obj_as -from servicelib.utils import logged_gather +from servicelib.utils import limited_gather from simcore_service_storage.models import FileMetaData, S3BucketName -from simcore_service_storage.s3_client import StorageS3Client from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager +from types_aiobotocore_s3 import S3Client pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] @@ -25,19 +25,20 @@ async def dsm_mockup_complete_db( simcore_s3_dsm: SimcoreS3DataManager, user_id: UserID, upload_file: Callable[ - [ByteSize, str, Optional[SimcoreS3FileID]], + [ByteSize, str, SimcoreS3FileID | None], Awaitable[tuple[Path, SimcoreS3FileID]], ], cleanup_user_projects_file_metadata: None, faker: Faker, ) -> tuple[FileMetaData, FileMetaData]: file_size = parse_obj_as(ByteSize, "10Mib") - uploaded_files = await logged_gather( + uploaded_files = await limited_gather( *(upload_file(file_size, faker.file_name(), None) for _ in range(2)), - max_concurrency=2, + limit=2, ) - fmds = await asyncio.gather( - *(simcore_s3_dsm.get_file(user_id, file_id) for _, file_id in uploaded_files) + fmds = await limited_gather( + *(simcore_s3_dsm.get_file(user_id, file_id) for _, file_id in uploaded_files), + limit=0, ) assert len(fmds) == 2 @@ -47,8 +48,8 @@ async def dsm_mockup_complete_db( async def test_sync_table_meta_data( simcore_s3_dsm: SimcoreS3DataManager, dsm_mockup_complete_db: tuple[FileMetaData, FileMetaData], - storage_s3_client: StorageS3Client, storage_s3_bucket: S3BucketName, + s3_client: S3Client, ): expected_removed_files = [] # the list should be empty on start @@ -58,9 +59,7 @@ async def test_sync_table_meta_data( # now remove the files for file_entry in dsm_mockup_complete_db: s3_key = f"{file_entry.project_id}/{file_entry.node_id}/{file_entry.file_name}" - await storage_s3_client.client.delete_object( - Bucket=storage_s3_bucket, Key=s3_key - ) + await s3_client.delete_object(Bucket=storage_s3_bucket, Key=s3_key) expected_removed_files.append(s3_key) # the list should now contain the removed entries diff --git a/services/storage/tests/unit/test_dsm_dsmcleaner.py b/services/storage/tests/unit/test_dsm_dsmcleaner.py index 40eac9dba06..fd759ca1bf2 100644 --- a/services/storage/tests/unit/test_dsm_dsmcleaner.py +++ b/services/storage/tests/unit/test_dsm_dsmcleaner.py @@ -16,6 +16,7 @@ import arrow import pytest from aiopg.sa.engine import Engine +from aws_library.s3 import MultiPartUploadLinks, SimcoreS3API from faker import Faker from models_library.api_schemas_storage import LinkType from models_library.basic_types import SHA256Str @@ -29,13 +30,7 @@ FileAccessRightError, FileMetaDataNotFoundError, ) -from simcore_service_storage.models import ( - FileMetaData, - MultiPartUploadLinks, - S3BucketName, - UploadID, -) -from simcore_service_storage.s3_client import StorageS3Client +from simcore_service_storage.models import FileMetaData, S3BucketName, UploadID from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager pytest_simcore_core_services_selection = ["postgres"] @@ -81,7 +76,7 @@ async def test_regression_collaborator_creates_file_upload_links( link_type: LinkType, file_size: ByteSize, is_directory: bool, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, checksum: SHA256Str | None, collaborator_id: UserID, @@ -147,7 +142,7 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( link_type: LinkType, file_size: ByteSize, is_directory: bool, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, checksum: SHA256Str | None, ): @@ -172,7 +167,7 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( # ensure we have now an upload id IF the link was presigned ONLY # NOTE: S3 uploads might create multipart uploads out of storage!! ongoing_uploads = await storage_s3_client.list_ongoing_multipart_uploads( - storage_s3_bucket + bucket=storage_s3_bucket ) if fmd.upload_id and link_type == LinkType.PRESIGNED: assert len(ongoing_uploads) == 1 @@ -185,7 +180,7 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( fmd_after_clean = await db_file_meta_data.get(conn, file_or_directory_id) assert fmd_after_clean == fmd assert ( - await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) + await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) == ongoing_uploads ) @@ -204,7 +199,9 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( with pytest.raises(FileMetaDataNotFoundError): await db_file_meta_data.get(conn, simcore_file_id) # since there is no entry in the db, this upload shall be cleaned up - assert not await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) + assert not await storage_s3_client.list_ongoing_multipart_uploads( + bucket=storage_s3_bucket + ) @pytest.mark.parametrize( @@ -225,7 +222,7 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi user_id: UserID, link_type: LinkType, file_size: ByteSize, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, with_versioning_enabled: None, checksum: SHA256Str | None, @@ -259,7 +256,7 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi # ensure we have now an upload id IF the link was presigned ONLY # NOTE: S3 uploads might create multipart uploads out of storage!! ongoing_uploads = await storage_s3_client.list_ongoing_multipart_uploads( - storage_s3_bucket + bucket=storage_s3_bucket ) if fmd.upload_id and link_type == LinkType.PRESIGNED: assert len(ongoing_uploads) == 1 @@ -272,7 +269,7 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi fmd_after_clean = await db_file_meta_data.get(conn, file_id) assert fmd_after_clean == fmd assert ( - await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) + await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) == ongoing_uploads ) @@ -293,10 +290,14 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi exclude={"created_at"} ) # check the S3 content is the old file - s3_meta_data = await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) + s3_meta_data = await storage_s3_client.get_object_metadata( + bucket=storage_s3_bucket, object_key=file_id + ) assert s3_meta_data.size == file_size # since there is no entry in the db, this upload shall be cleaned up - assert not await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) + assert not await storage_s3_client.list_ongoing_multipart_uploads( + bucket=storage_s3_bucket + ) @pytest.mark.parametrize( @@ -315,7 +316,7 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation user_id: UserID, file_size: ByteSize, is_directory: bool, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, checksum: SHA256Str | None, ): @@ -360,9 +361,9 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation upload_links_list: list[MultiPartUploadLinks] = [ await storage_s3_client.create_multipart_upload_links( - storage_s3_bucket, - file_id, - file_size, + bucket=storage_s3_bucket, + object_key=file_id, + file_size=file_size, expiration_secs=3600, sha256_checksum=parse_obj_as(SHA256Str, _faker.sha256()), ) @@ -376,7 +377,7 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation # ensure we have now an upload id all_ongoing_uploads: list[ tuple[UploadID, SimcoreS3FileID] - ] = await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) + ] = await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) assert len(all_ongoing_uploads) == len(file_ids_to_upload) for ongoing_upload_id, ongoing_file_id in all_ongoing_uploads: @@ -388,7 +389,7 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation # ensure we STILL have the same upload id all_ongoing_uploads_after_clean = ( - await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) + await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) ) assert len(all_ongoing_uploads_after_clean) == len(file_ids_to_upload) assert all_ongoing_uploads == all_ongoing_uploads_after_clean diff --git a/services/storage/tests/unit/test_handlers_datasets.py b/services/storage/tests/unit/test_handlers_datasets.py index 52e3c3ed348..92408a33136 100644 --- a/services/storage/tests/unit/test_handlers_datasets.py +++ b/services/storage/tests/unit/test_handlers_datasets.py @@ -5,8 +5,8 @@ # pylint:disable=no-name-in-module +from collections.abc import Awaitable, Callable from pathlib import Path -from typing import Awaitable, Callable import pytest from aiohttp.test_utils import TestClient @@ -18,9 +18,11 @@ from pydantic import ByteSize, parse_obj_as from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.parametrizations import byte_size_ids +from pytest_simcore.helpers.parametrizations import ( + byte_size_ids, + parametrized_file_size, +) from servicelib.aiohttp import status -from tests.helpers.file_utils import parametrized_file_size pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index ea966634770..a45df04e0b5 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -3,27 +3,27 @@ # pylint:disable=redefined-outer-name # pylint:disable=too-many-arguments # pylint:disable=no-name-in-module - +# pylint:disable=protected-access import asyncio import filecmp import json +import logging import urllib.parse from collections.abc import Awaitable, Callable from contextlib import AbstractAsyncContextManager from dataclasses import dataclass -from http import HTTPStatus from pathlib import Path from random import choice -from time import perf_counter -from typing import Any, Literal +from typing import Any, AsyncIterator, Literal from uuid import uuid4 import pytest from aiohttp import ClientSession from aiohttp.test_utils import TestClient from aiopg.sa import Engine -from aws_library.s3.errors import S3KeyNotFoundError +from aws_library.s3 import S3KeyNotFoundError, S3ObjectKey, SimcoreS3API +from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE from faker import Faker from models_library.api_schemas_storage import ( FileMetaDataGet, @@ -45,21 +45,19 @@ from pydantic import AnyHttpUrl, ByteSize, HttpUrl, parse_obj_as from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.logging import log_context from pytest_simcore.helpers.parametrizations import byte_size_ids +from pytest_simcore.helpers.s3 import upload_file_part, upload_file_to_presigned_link from servicelib.aiohttp import status -from simcore_service_storage.constants import ( - MULTIPART_UPLOADS_MIN_TOTAL_SIZE, - S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID, -) +from simcore_service_storage.constants import S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID from simcore_service_storage.handlers_files import UPLOAD_TASKS_KEY from simcore_service_storage.models import S3BucketName, UploadID -from simcore_service_storage.s3_client import StorageS3Client from tenacity._asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed -from tests.helpers.file_utils import upload_file_part, upload_file_to_presigned_link from tests.helpers.utils_file_meta_data import assert_file_meta_data_in_db +from types_aiobotocore_s3 import S3Client from yarl import URL pytest_simcore_core_services_selection = ["postgres"] @@ -77,15 +75,14 @@ async def assert_multipart_uploads_in_progress( - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, - file_id: SimcoreS3FileID, *, expected_upload_ids: list[str] | None, ): """if None is passed, then it checks that no uploads are in progress""" list_uploads: list[ - tuple[UploadID, SimcoreS3FileID] + tuple[UploadID, S3ObjectKey] ] = await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) if expected_upload_ids is None: assert ( @@ -136,7 +133,7 @@ class SingleLinkParam: ], ) async def test_create_upload_file_with_file_size_0_returns_single_link( - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, simcore_file_id: SimcoreS3FileID, single_link_param: SingleLinkParam, @@ -182,11 +179,56 @@ async def test_create_upload_file_with_file_size_0_returns_single_link( await assert_multipart_uploads_in_progress( storage_s3_client, storage_s3_bucket, - simcore_file_id, expected_upload_ids=None, ) +@pytest.fixture +async def create_upload_file_link_v1( + client: TestClient, user_id: UserID, location_id: LocationID +) -> AsyncIterator[Callable[..., Awaitable[PresignedLink]]]: + file_params: list[tuple[UserID, int, SimcoreS3FileID]] = [] + + async def _link_creator(file_id: SimcoreS3FileID, **query_kwargs) -> PresignedLink: + assert client.app + url = ( + client.app.router["upload_file"] + .url_for( + location_id=f"{location_id}", + file_id=urllib.parse.quote(file_id, safe=""), + ) + .with_query(**query_kwargs, user_id=user_id) + ) + assert ( + "file_size" not in url.query + ), "v1 call to upload_file MUST NOT contain file_size field, this is reserved for v2 call" + response = await client.put(f"{url}") + data, error = await assert_status(response, status.HTTP_200_OK) + assert not error + assert data + received_file_upload_link = parse_obj_as(PresignedLink, data) + assert received_file_upload_link + file_params.append((user_id, location_id, file_id)) + return received_file_upload_link + + yield _link_creator + + # cleanup + assert client.app + clean_tasks = [] + for u_id, loc_id, file_id in file_params: + url = ( + client.app.router["delete_file"] + .url_for( + location_id=f"{loc_id}", + file_id=urllib.parse.quote(file_id, safe=""), + ) + .with_query(user_id=u_id) + ) + clean_tasks.append(client.delete(f"{url}")) + await asyncio.gather(*clean_tasks) + + @pytest.mark.parametrize( "single_link_param", [ @@ -217,7 +259,7 @@ async def test_create_upload_file_with_file_size_0_returns_single_link( ], ) async def test_create_upload_file_with_no_file_size_query_returns_v1_structure( - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, simcore_file_id: SimcoreS3FileID, single_link_param: SingleLinkParam, @@ -252,7 +294,6 @@ async def test_create_upload_file_with_no_file_size_query_returns_v1_structure( await assert_multipart_uploads_in_progress( storage_s3_client, storage_s3_bucket, - simcore_file_id, expected_upload_ids=None, ) @@ -261,7 +302,7 @@ async def test_create_upload_file_with_no_file_size_query_returns_v1_structure( class MultiPartParam: link_type: LinkType file_size: ByteSize - expected_response: HTTPStatus + expected_response: int expected_num_links: int expected_chunk_size: ByteSize @@ -322,7 +363,7 @@ class MultiPartParam: ], ) async def test_create_upload_file_presigned_with_file_size_returns_multipart_links_if_bigger_than_99MiB( # noqa: N802 - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, simcore_file_id: SimcoreS3FileID, test_param: MultiPartParam, @@ -360,7 +401,6 @@ async def test_create_upload_file_presigned_with_file_size_returns_multipart_lin await assert_multipart_uploads_in_progress( storage_s3_client, storage_s3_bucket, - simcore_file_id, expected_upload_ids=([upload_id] if upload_id else None), ) @@ -376,7 +416,7 @@ async def test_create_upload_file_presigned_with_file_size_returns_multipart_lin async def test_delete_unuploaded_file_correctly_cleans_up_db_and_s3( aiopg_engine: Engine, client: TestClient, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, with_versioning_enabled: None, simcore_file_id: SimcoreS3FileID, @@ -405,7 +445,6 @@ async def test_delete_unuploaded_file_correctly_cleans_up_db_and_s3( await assert_multipart_uploads_in_progress( storage_s3_client, storage_s3_bucket, - simcore_file_id, expected_upload_ids=([upload_id] if upload_id else None), ) # delete/abort file upload @@ -427,7 +466,6 @@ async def test_delete_unuploaded_file_correctly_cleans_up_db_and_s3( await assert_multipart_uploads_in_progress( storage_s3_client, storage_s3_bucket, - simcore_file_id, expected_upload_ids=None, ) @@ -445,7 +483,7 @@ async def test_delete_unuploaded_file_correctly_cleans_up_db_and_s3( async def test_upload_same_file_uuid_aborts_previous_upload( aiopg_engine: Engine, client: TestClient, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, simcore_file_id: SimcoreS3FileID, link_type: LinkType, @@ -475,7 +513,6 @@ async def test_upload_same_file_uuid_aborts_previous_upload( await assert_multipart_uploads_in_progress( storage_s3_client, storage_s3_bucket, - simcore_file_id, expected_upload_ids=([upload_id] if upload_id else None), ) @@ -511,7 +548,6 @@ async def test_upload_same_file_uuid_aborts_previous_upload( await assert_multipart_uploads_in_progress( storage_s3_client, storage_s3_bucket, - simcore_file_id, expected_upload_ids=([new_upload_id] if new_upload_id else None), ) @@ -558,7 +594,7 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w create_file_of_size: Callable[[ByteSize, str | None], Path], create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], aiopg_engine: Engine, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, ): """what does that mean? @@ -599,10 +635,10 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w stop=stop_after_delay(60), retry=retry_if_exception_type(AssertionError), ): - with attempt: - print( - f"--> checking for upload {state_url=}, {attempt.retry_state.attempt_number}..." - ) + with attempt, log_context( + logging.INFO, + f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", + ) as ctx: response = await client.post(f"{state_url}") data, error = await assert_status(response, status.HTTP_200_OK) assert not error @@ -611,8 +647,9 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w assert future.state == FileUploadCompleteState.OK assert future.e_tag is not None completion_etag = future.e_tag - print( - f"--> done waiting, data is completely uploaded [{attempt.retry_state.retry_object.statistics}]" + ctx.logger.info( + "%s", + f"--> done waiting, data is completely uploaded [{attempt.retry_state.retry_object.statistics}]", ) # check the entry in db now has the correct file size, and the upload id is gone await assert_file_meta_data_in_db( @@ -625,7 +662,9 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w expected_sha256_checksum=None, ) # check the file is in S3 for real - s3_metadata = await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) + s3_metadata = await storage_s3_client.get_object_metadata( + bucket=storage_s3_bucket, object_key=file_id + ) assert s3_metadata.size == file_size assert s3_metadata.last_modified assert s3_metadata.e_tag == completion_etag @@ -633,7 +672,7 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w async def test_upload_of_single_presigned_link_lazily_update_database_on_get( aiopg_engine: Engine, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, client: TestClient, create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], @@ -643,6 +682,7 @@ async def test_upload_of_single_presigned_link_lazily_update_database_on_get( node_id: NodeID, faker: Faker, get_file_meta_data: Callable[..., Awaitable[FileMetaDataGet]], + s3_client: S3Client, ): assert client.app file_size = parse_obj_as(ByteSize, "500Mib") @@ -657,14 +697,14 @@ async def test_upload_of_single_presigned_link_lazily_update_database_on_get( assert file_upload_link # let's use the storage s3 internal client to upload with file.open("rb") as fp: - response = await storage_s3_client.client.put_object( + response = await s3_client.put_object( Bucket=storage_s3_bucket, Key=simcore_file_id, Body=fp ) assert "ETag" in response upload_e_tag = json.loads(response["ETag"]) # check the file is now on S3 - s3_metadata = await storage_s3_client.get_file_metadata( - storage_s3_bucket, simcore_file_id + s3_metadata = await storage_s3_client.get_object_metadata( + bucket=storage_s3_bucket, object_key=simcore_file_id ) assert s3_metadata.size == file_size assert s3_metadata.last_modified @@ -676,7 +716,7 @@ async def test_upload_of_single_presigned_link_lazily_update_database_on_get( async def test_upload_real_file_with_s3_client( aiopg_engine: Engine, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, client: TestClient, create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], @@ -685,7 +725,7 @@ async def test_upload_real_file_with_s3_client( project_id: ProjectID, node_id: NodeID, faker: Faker, - get_file_meta_data: Callable[..., Awaitable[FileMetaDataGet]], + s3_client: S3Client, ): assert client.app file_size = parse_obj_as(ByteSize, "500Mib") @@ -699,14 +739,14 @@ async def test_upload_real_file_with_s3_client( ) # let's use the storage s3 internal client to upload with file.open("rb") as fp: - response = await storage_s3_client.client.put_object( + response = await s3_client.put_object( Bucket=storage_s3_bucket, Key=simcore_file_id, Body=fp ) assert "ETag" in response upload_e_tag = json.loads(response["ETag"]) # check the file is now on S3 - s3_metadata = await storage_s3_client.get_file_metadata( - storage_s3_bucket, simcore_file_id + s3_metadata = await storage_s3_client.get_object_metadata( + bucket=storage_s3_bucket, object_key=simcore_file_id ) assert s3_metadata.size == file_size assert s3_metadata.last_modified @@ -714,43 +754,41 @@ async def test_upload_real_file_with_s3_client( # complete the upload complete_url = URL(file_upload_link.links.complete_upload).relative() - start = perf_counter() - print(f"--> completing upload of {file=}") - response = await client.post(f"{complete_url}", json={"parts": []}) - response.raise_for_status() - data, error = await assert_status(response, status.HTTP_202_ACCEPTED) - assert not error - assert data - file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) - state_url = URL(file_upload_complete_response.links.state).relative() - completion_etag = None - async for attempt in AsyncRetrying( - reraise=True, - wait=wait_fixed(1), - stop=stop_after_delay(60), - retry=retry_if_exception_type(ValueError), - ): - with attempt: - print( - f"--> checking for upload {state_url=}, {attempt.retry_state.attempt_number}..." - ) - response = await client.post(f"{state_url}") - response.raise_for_status() - data, error = await assert_status(response, status.HTTP_200_OK) - assert not error - assert data - future = FileUploadCompleteFutureResponse.parse_obj(data) - if future.state != FileUploadCompleteState.OK: - msg = f"{data=}" - raise ValueError(msg) - assert future.state == FileUploadCompleteState.OK - assert future.e_tag is not None - completion_etag = future.e_tag - print( - f"--> done waiting, data is completely uploaded [{attempt.retry_state.retry_object.statistics}]" - ) - - print(f"--> completed upload in {perf_counter() - start}") + with log_context(logging.INFO, f"completing upload of {file=}"): + response = await client.post(f"{complete_url}", json={"parts": []}) + response.raise_for_status() + data, error = await assert_status(response, status.HTTP_202_ACCEPTED) + assert not error + assert data + file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) + state_url = URL(file_upload_complete_response.links.state).relative() + completion_etag = None + async for attempt in AsyncRetrying( + reraise=True, + wait=wait_fixed(1), + stop=stop_after_delay(60), + retry=retry_if_exception_type(ValueError), + ): + with attempt, log_context( + logging.INFO, + f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", + ) as ctx: + response = await client.post(f"{state_url}") + response.raise_for_status() + data, error = await assert_status(response, status.HTTP_200_OK) + assert not error + assert data + future = FileUploadCompleteFutureResponse.parse_obj(data) + if future.state != FileUploadCompleteState.OK: + msg = f"{data=}" + raise ValueError(msg) + assert future.state == FileUploadCompleteState.OK + assert future.e_tag is not None + completion_etag = future.e_tag + ctx.logger.info( + "%s", + f"--> done waiting, data is completely uploaded [{attempt.retry_state.retry_object.statistics}]", + ) # check the entry in db now has the correct file size, and the upload id is gone await assert_file_meta_data_in_db( @@ -763,8 +801,8 @@ async def test_upload_real_file_with_s3_client( expected_sha256_checksum=None, ) # check the file is in S3 for real - s3_metadata = await storage_s3_client.get_file_metadata( - storage_s3_bucket, simcore_file_id + s3_metadata = await storage_s3_client.get_object_metadata( + bucket=storage_s3_bucket, object_key=simcore_file_id ) assert s3_metadata.size == file_size assert s3_metadata.last_modified @@ -779,7 +817,7 @@ async def test_upload_real_file_with_s3_client( async def test_upload_twice_and_fail_second_time_shall_keep_first_version( aiopg_engine: Engine, client: TestClient, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, with_versioning_enabled: None, file_size: ByteSize, @@ -841,8 +879,8 @@ async def test_upload_twice_and_fail_second_time_shall_keep_first_version( expected_sha256_checksum=None, ) # check the file is in S3 for real - s3_metadata = await storage_s3_client.get_file_metadata( - storage_s3_bucket, uploaded_file_id + s3_metadata = await storage_s3_client.get_object_metadata( + bucket=storage_s3_bucket, object_key=uploaded_file_id ) assert s3_metadata.size == file_size @@ -872,14 +910,16 @@ async def test_download_file_no_file_was_uploaded( project_id: ProjectID, node_id: NodeID, user_id: UserID, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, ): assert client.app missing_file = parse_obj_as(SimcoreS3FileID, f"{project_id}/{node_id}/missing.file") assert ( - await storage_s3_client.file_exists(storage_s3_bucket, s3_object=missing_file) + await storage_s3_client.object_exists( + bucket=storage_s3_bucket, object_key=missing_file + ) is False ) @@ -903,7 +943,7 @@ async def test_download_file_1_to_1_with_file_meta_data( upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], location_id: int, user_id: UserID, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, tmp_path: Path, faker: Faker, @@ -915,8 +955,8 @@ async def test_download_file_1_to_1_with_file_meta_data( file_size, "meta_data_entry_is_file.file" ) assert ( - await storage_s3_client.file_exists( - storage_s3_bucket, s3_object=uploaded_file_uuid + await storage_s3_client.object_exists( + bucket=storage_s3_bucket, object_key=uploaded_file_uuid ) is True ) @@ -947,7 +987,7 @@ async def test_download_file_from_inside_a_directory( user_id: UserID, create_empty_directory: Callable[..., Awaitable[FileUploadSchema]], create_file_of_size: Callable[[ByteSize, str | None], Path], - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, tmp_path: Path, faker: Faker, @@ -967,10 +1007,15 @@ async def test_download_file_from_inside_a_directory( s3_file_id = parse_obj_as(SimcoreS3FileID, f"{dir_path_in_s3}/{file_name}") await storage_s3_client.upload_file( - storage_s3_bucket, file_to_upload_in_dir, s3_file_id, None + bucket=storage_s3_bucket, + file=file_to_upload_in_dir, + object_key=s3_file_id, + bytes_transfered_cb=None, ) assert ( - await storage_s3_client.file_exists(storage_s3_bucket, s3_object=s3_file_id) + await storage_s3_client.object_exists( + bucket=storage_s3_bucket, object_key=s3_file_id + ) is True ) @@ -1030,7 +1075,7 @@ async def test_download_file_access_rights( client: TestClient, location_id: int, user_id: UserID, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, faker: Faker, ): @@ -1041,7 +1086,9 @@ async def test_download_file_access_rights( SimcoreS3FileID, f"{faker.uuid4()}/{faker.uuid4()}/project_id_is_missing" ) assert ( - await storage_s3_client.file_exists(storage_s3_bucket, s3_object=missing_file) + await storage_s3_client.object_exists( + bucket=storage_s3_bucket, object_key=missing_file + ) is False ) @@ -1068,7 +1115,7 @@ async def test_download_file_access_rights( ) async def test_delete_file( aiopg_engine: Engine, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, client: TestClient, file_size: ByteSize, @@ -1103,7 +1150,9 @@ async def test_delete_file( ) # check the file is gone from S3 with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.get_file_metadata(storage_s3_bucket, uploaded_file_uuid) + await storage_s3_client.get_object_metadata( + bucket=storage_s3_bucket, object_key=uploaded_file_uuid + ) async def test_copy_as_soft_link( diff --git a/services/storage/tests/unit/test_handlers_health.py b/services/storage/tests/unit/test_handlers_health.py index 3504575c205..d10b882b611 100644 --- a/services/storage/tests/unit/test_handlers_health.py +++ b/services/storage/tests/unit/test_handlers_health.py @@ -1,6 +1,7 @@ # pylint: disable=protected-access # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +# pylint: disable=protected-access import simcore_service_storage._meta @@ -11,7 +12,7 @@ from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from simcore_service_storage.handlers_health import HealthCheck -from simcore_service_storage.s3_client import StorageS3Client +from types_aiobotocore_s3 import S3Client pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] @@ -26,8 +27,10 @@ async def test_health_check(client: TestClient): assert not error app_health = HealthCheck.parse_obj(data) - assert app_health.name == simcore_service_storage._meta.PROJECT_NAME - assert app_health.version == str(simcore_service_storage._meta.VERSION) + assert app_health.name == simcore_service_storage._meta.PROJECT_NAME # noqa: SLF001 + assert app_health.version == str( + simcore_service_storage._meta.VERSION + ) # noqa: SLF001 async def test_health_status(client: TestClient): @@ -39,8 +42,12 @@ async def test_health_status(client: TestClient): assert not error app_status_check = AppStatusCheck.parse_obj(data) - assert app_status_check.app_name == simcore_service_storage._meta.PROJECT_NAME - assert app_status_check.version == str(simcore_service_storage._meta.VERSION) + assert ( + app_status_check.app_name == simcore_service_storage._meta.PROJECT_NAME + ) # noqa: SLF001 + assert app_status_check.version == str( + simcore_service_storage._meta.VERSION + ) # noqa: SLF001 assert len(app_status_check.services) == 2 assert "postgres" in app_status_check.services assert "healthy" in app_status_check.services["postgres"] @@ -52,8 +59,8 @@ async def test_health_status(client: TestClient): async def test_bad_health_status_if_bucket_missing( client: TestClient, - storage_s3_client: StorageS3Client, storage_s3_bucket: S3BucketName, + s3_client: S3Client, ): assert client.app url = client.app.router["get_status"].url_for() @@ -64,7 +71,7 @@ async def test_bad_health_status_if_bucket_missing( app_status_check = AppStatusCheck.parse_obj(data) assert app_status_check.services["s3"]["healthy"] == "connected" # now delete the bucket - await storage_s3_client.client.delete_bucket(Bucket=storage_s3_bucket) + await s3_client.delete_bucket(Bucket=storage_s3_bucket) # check again the health response = await client.get(f"{url}") data, error = await assert_status(response, status.HTTP_200_OK) diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index 916f7d3cdba..0d2de438f85 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -5,6 +5,7 @@ # pylint:disable=no-name-in-module # pylint:disable=too-many-nested-blocks +import logging import sys from collections.abc import Awaitable, Callable from copy import deepcopy @@ -16,6 +17,7 @@ from aiohttp import ClientResponseError from aiohttp.test_utils import TestClient from aiopg.sa.engine import Engine +from aws_library.s3 import SimcoreS3API from faker import Faker from models_library.api_schemas_storage import FileMetaDataGet, FoldersBody from models_library.basic_types import SHA256Str @@ -26,13 +28,13 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, parse_file_as, parse_obj_as from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.logging import log_context from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.client import long_running_task_request -from servicelib.utils import logged_gather +from servicelib.utils import limited_gather from settings_library.s3 import S3Settings from simcore_postgres_database.storage_models import file_meta_data from simcore_service_storage.models import SearchFilesQueryParams -from simcore_service_storage.s3_client import StorageS3Client from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from tests.helpers.utils_file_meta_data import assert_file_meta_data_in_db from tests.helpers.utils_project import clone_project_data @@ -51,8 +53,10 @@ def mock_datcore_download(mocker, client): # Use to mock downloading from DATCore async def _fake_download_to_file_or_raise(session, url, dest_path): - print(f"Faking download: {url} -> {dest_path}") - Path(dest_path).write_text("FAKE: test_create_and_delete_folders_from_project") + with log_context(logging.INFO, f"Faking download: {url} -> {dest_path}"): + Path(dest_path).write_text( + "FAKE: test_create_and_delete_folders_from_project" + ) mocker.patch( "simcore_service_storage.simcore_s3_dsm.download_to_file_or_raise", @@ -93,18 +97,22 @@ async def _request_copy_folders( url = client.make_url( f"{(client.app.router['copy_folders_from_project'].url_for().with_query(user_id=user_id))}" ) - async for lr_task in long_running_task_request( - client.session, - url, - json=jsonable_encoder( - FoldersBody( - source=source_project, destination=dst_project, nodes_map=nodes_map - ) - ), - ): - print(f"<-- current state is {lr_task.progress=}") - if lr_task.done(): - return await lr_task.result() + with log_context( + logging.INFO, + f"Copying folders from {source_project['uuid']} to {dst_project['uuid']}", + ) as ctx: + async for lr_task in long_running_task_request( + client.session, + url, + json=jsonable_encoder( + FoldersBody( + source=source_project, destination=dst_project, nodes_map=nodes_map + ) + ), + ): + ctx.logger.info("%s", f"<-- current state is {lr_task.progress=}") + if lr_task.done(): + return await lr_task.result() pytest.fail(reason="Copy folders failed!") @@ -154,7 +162,7 @@ async def test_copy_folders_from_empty_project( user_id: UserID, create_project: Callable[[], Awaitable[dict[str, Any]]], aiopg_engine: Engine, - storage_s3_client: StorageS3Client, + storage_s3_client: SimcoreS3API, ): # we will copy from src to dst src_project = await create_project() @@ -189,7 +197,6 @@ async def test_copy_folders_from_valid_project_with_one_large_file( client: TestClient, user_id: UserID, create_project: Callable[[], Awaitable[dict[str, Any]]], - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], aiopg_engine: Engine, random_project_with_files: Callable[ [int, tuple[ByteSize], tuple[SHA256Str]], @@ -419,14 +426,14 @@ async def test_create_and_delete_folders_from_project_burst( project, exclude={"tags", "state", "prj_owner"}, by_alias=False ) await create_project(**project_as_dict) - await logged_gather( + await limited_gather( *[ _create_and_delete_folders_from_project( user_id, project_as_dict, client, create_project, check_list_files=False ) for _ in range(100) ], - max_concurrency=2, + limit=2, ) diff --git a/services/storage/tests/unit/test_handlers_simcore_s3_benchmark.py b/services/storage/tests/unit/test_handlers_simcore_s3_benchmark.py deleted file mode 100644 index 5c755252f7a..00000000000 --- a/services/storage/tests/unit/test_handlers_simcore_s3_benchmark.py +++ /dev/null @@ -1,421 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument - -import datetime -import json -import sys -import time -from collections.abc import AsyncIterator, Iterable -from contextlib import asynccontextmanager -from itertools import groupby -from pathlib import Path -from typing import Any, TypeAlias, TypedDict -from uuid import uuid4 - -import pytest -from faker import Faker -from models_library.projects import ProjectID -from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from pydantic import BaseModel, ByteSize, parse_file_as, parse_obj_as -from pytest_mock import MockerFixture -from servicelib.utils import logged_gather -from settings_library.s3 import S3Settings -from simcore_service_storage import s3_client -from simcore_service_storage.models import S3BucketName -from simcore_service_storage.s3_client import StorageS3Client -from simcore_service_storage.settings import Settings - -CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent - - -def _get_benchmark_s3_settings() -> list[S3Settings]: - # NOTE: if this file is defined tests will be activated using said bucket - path_to_file = CURRENT_DIR / "s3_settings_benchmark.ignore.json" - if path_to_file.exists(): - return parse_file_as(list[S3Settings], path_to_file) - - return [] - - -@pytest.fixture(params=_get_benchmark_s3_settings()) -async def benchmark_s3_settings(request: pytest.FixtureRequest) -> S3Settings: - return request.param - - -@pytest.fixture -def settings() -> Settings: - return Settings.create_from_envs() - - -@pytest.fixture -async def benchmark_s3_client( - benchmark_s3_settings: S3Settings, settings: Settings -) -> AsyncIterator[StorageS3Client]: - client = await StorageS3Client.create( - benchmark_s3_settings, - settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, - ) - bucket = S3BucketName(benchmark_s3_settings.S3_BUCKET_NAME) - - # make sure bucket is empty - await client.delete_files_in_path(bucket, prefix="") - - yield client - - # empty bucket once more when done testing - await client.delete_files_in_path(bucket, prefix="") - await client.close() - - -@asynccontextmanager -async def temp_file(size: ByteSize) -> AsyncIterator[Path]: - file_path = Path(f"/tmp/{uuid4()}") - file_path.write_text("a" * size) - assert file_path.exists() is True - - yield file_path - - file_path.unlink() - assert file_path.exists() is False - - -async def _create_file( - s3_client: StorageS3Client, - bucket: S3BucketName, - file_id: SimcoreS3FileID, - size: ByteSize = parse_obj_as(ByteSize, "1"), -) -> None: - async with temp_file(size) as file: - await s3_client.upload_file( - bucket=bucket, file=file, file_id=file_id, bytes_transfered_cb=None - ) - - -def _create_node_structure( - root_node: str, level: int, dirs_per_node: int, files_per_node: int -) -> set[str]: - if level == 0: - return set() - - leaves: set[str] = set() - for f in range(files_per_node): - p = f"{root_node}/f{f}" - leaves.add(p) - - for d in range(dirs_per_node): - new_leaves = _create_node_structure( - root_node + f"/l{level}/d{d}", level - 1, dirs_per_node, files_per_node - ) - leaves.update(new_leaves) - - return leaves - - -async def _create_files( - s3_client: StorageS3Client, - bucket: S3BucketName, - project_id: ProjectID, - node_id: NodeID, - *, - depth: int, - dirs_per_dir: int, - files_per_dir: int, -) -> set[SimcoreS3FileID]: - elements = _create_node_structure( - root_node="", - level=depth, - dirs_per_node=dirs_per_dir, - files_per_node=files_per_dir, - ) - file_ids: set[SimcoreS3FileID] = { - parse_obj_as(SimcoreS3FileID, f"{project_id}/{node_id}/{key}") - for key in elements - } - - await logged_gather( - *[_create_file(s3_client, bucket, file_id) for file_id in file_ids], - max_concurrency=20, - ) - - return file_ids - - -@pytest.fixture(scope="session") -def tests_session_id() -> str: - return datetime.datetime.utcnow().isoformat() - - -class MetricsResult(BaseModel): - session_id: str - duration_ms: float - tags: dict[str, str] - - -class MetricsResultList(BaseModel): - __root__: list[MetricsResult] - - -_TEST_RESULTS: Path = CURRENT_DIR / "test_results.ignore.json" - - -@asynccontextmanager -async def metrics(tests_session_id: str, tags: dict[str, str]) -> AsyncIterator[None]: - if not _TEST_RESULTS.exists(): - _TEST_RESULTS.write_text(json.dumps([])) - - start = time.time_ns() - - yield None - - elapsed_ms = (time.time_ns() - start) / 1e6 - - metrics_results = parse_file_as(list[MetricsResult], _TEST_RESULTS) - metrics_results.append( - MetricsResult(session_id=tests_session_id, duration_ms=elapsed_ms, tags=tags) - ) - _TEST_RESULTS.write_text(MetricsResultList.parse_obj(metrics_results).json()) - - -@pytest.fixture -def mock_max_items(mocker: MockerFixture) -> None: - # pylint: disable=protected-access - mocker.patch.object( - s3_client._list_objects_v2_paginated_gen, "__defaults__", (None,) - ) - - -@pytest.mark.parametrize("total_queries", [3]) -@pytest.mark.parametrize( - "depth, dirs_per_dir, files_per_dir, description", - [ - (1, 10, 3, "very few files"), - (1, 1, 1092, "1092 files in one dir"), - (1, 1, 3279, "3279 files in one dir"), - (6, 3, 3, "spread out files and dirs"), - (7, 3, 3, "lots of spread files and dirs"), - ], -) -async def test_benchmark_s3_listing( - # pylint: disable=too-many-arguments - mock_max_items: None, - benchmark_s3_client: StorageS3Client, - benchmark_s3_settings: S3Settings, - faker: Faker, - tests_session_id: str, - depth: int, - dirs_per_dir: int, - files_per_dir: int, - total_queries: int, - description: str, - generate_report: None, -): - project_id = faker.uuid4(cast_to=None) - node_id = faker.uuid4(cast_to=None) - - bucket: S3BucketName = S3BucketName(benchmark_s3_settings.S3_BUCKET_NAME) - - created_fils: set[SimcoreS3FileID] = await _create_files( - benchmark_s3_client, - bucket, - project_id, - node_id, - depth=depth, - dirs_per_dir=dirs_per_dir, - files_per_dir=files_per_dir, - ) - - for i in range(total_queries): - async with metrics( - tests_session_id=tests_session_id, - tags={ - "from": "Z43", - "to": benchmark_s3_settings.S3_ENDPOINT, - "query": "list_files(prefix='')", - "total_queries": f"{total_queries}", - "query_number": f"{i +1}", - "reason": description, - "depth": f"{depth}", - "dirs_per_dir": f"{dirs_per_dir}", - "files_per_dir": f"{files_per_dir}", - "generated_file_count": f"{len(created_fils)}", - }, - ): - files = await benchmark_s3_client.list_files(bucket, prefix="") - assert len(files) == len(created_fils) - - -####################### -## REPORT GENERATION ## -####################### - - -@pytest.fixture(scope="session") -def generate_report() -> Iterable[None]: - # creates report after running benchmark tests - yield - _render_report() - print(f"please open report found at {_REPORT}") - - -_REPORT: Path = CURRENT_DIR / "report.ignore.md" - -SessionId: TypeAlias = str -Reason: TypeAlias = str -FromTo: TypeAlias = str - - -class GroupMap(TypedDict): - session_id: SessionId - reason: Reason - from_to: FromTo - item: MetricsResult - - -def __group_by_keys( - data: list[GroupMap], group_keys: tuple[str, ...] -) -> dict[tuple[str, ...], list[MetricsResult]]: - # Sort the data based on the group keys - data.sort(key=lambda x: [x[key] for key in group_keys]) - - # Group the data by the specified keys - grouped_data = groupby(data, key=lambda x: tuple(x[key] for key in group_keys)) - - return {key: [x["item"] for x in group] for key, group in grouped_data} - - -def __get_grouping_map(metrics_results: list[MetricsResult]) -> list[GroupMap]: - # NOTE: if more fields are required for grouping, - # extend the GroupMap model and add them below - return [ - GroupMap( - session_id=entry.session_id, - reason=entry.tags["reason"], - from_to=entry.tags["from"] + " -> " + entry.tags["to"], - item=entry, - ) - for entry in metrics_results - ] - - -def _group_by_session_id_and_description( - metrics_results: list[MetricsResult], -) -> dict[tuple[SessionId, Reason], list[MetricsResult]]: - grouped_results: dict[ - tuple[SessionId, Reason], list[MetricsResult] - ] = __group_by_keys( - __get_grouping_map(metrics_results), group_keys=("session_id", "reason") - ) - return grouped_results - - -def _group_by_from_to_key( - metrics_results: list[MetricsResult], -) -> dict[tuple[FromTo], list[MetricsResult]]: - grouped_results: dict[ - tuple[SessionId, Reason], list[MetricsResult] - ] = __group_by_keys(__get_grouping_map(metrics_results), group_keys=("from_to",)) - return grouped_results - - -_TEMPLATE_REPORT_SECTION = """ -### Test Session {session_id} - -Reason `{reason}` data query `{query}` - -{rendered_table} - -""" - - -def _flip_list_matrix(matrix: list[list[Any]]) -> list[list[Any]]: - r_count = len(matrix) - c_count = len(matrix[0]) - - new_matrix = [["" for _ in range(r_count)] for _ in range(c_count)] - - for ir, r in enumerate(matrix): - for ic, element in enumerate(r): - new_matrix[ic][ir] = element - - return new_matrix - - -def _render_report() -> None: - # Data divided in: - # group by sessions_id - # -> group by reason - # -> group by subdivide by "[from] -> [to]" - # -> list entries here - - metrics_results = parse_file_as(list[MetricsResult], _TEST_RESULTS) - - def _render_table(table_data: list[list[str]]) -> str: - def _render_row(data: Iterable) -> str: - return "|" + "|".join(map(str, data)) + "|\n" - - result = "" - for k, row in enumerate(table_data): - result += _render_row(row) - if k == 0: - dahs_items = ["-" for _ in range(len(row))] - result += _render_row(dahs_items) - return result - - file_content = "" - - for (session_id, reason), g1_items in _group_by_session_id_and_description( - metrics_results - ).items(): - table_data: list[list[str]] = [] - - HEADERS = [ - "S3 Backend", - "Total queried files", - "Worst (ms)", - "Average (ms)", - "Best (ms)", - ] - table_data.append(HEADERS) - - for (from_to_key,), g2_items in _group_by_from_to_key(g1_items).items(): - all_times = [x.duration_ms for x in g2_items] - - worst = max(all_times) - average = sum(all_times) / len(all_times) - best = min(all_times) - - file_count = g2_items[0].tags["generated_file_count"] - - row = [ - from_to_key, - file_count, - f"{worst:.2f}", - f"{average:.2f}", - f"{best:.2f}", - ] - assert len(row) == len(HEADERS) - table_data.append(row) - - rendered_table = _render_table(_flip_list_matrix(table_data)) - - query = g1_items[0].tags["query"] - - rendered_report_section = _TEMPLATE_REPORT_SECTION.format( - session_id=session_id, - reason=reason, - query=query, - rendered_table=rendered_table, - ) - file_content += rendered_report_section - - _REPORT.write_text(file_content) - - -if __name__ == "__main__": - # use this to generate a report after the benchmark tests have been ran - if not _TEST_RESULTS.exists(): - print( - "First run the following\npytest tests/unit/test_handlers_simcore_s3_benchmark.py" - ) - sys.exit(1) - _render_report() diff --git a/services/storage/tests/unit/test_s3.py b/services/storage/tests/unit/test_s3.py deleted file mode 100644 index ef2a90354fc..00000000000 --- a/services/storage/tests/unit/test_s3.py +++ /dev/null @@ -1,24 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - - -from aiohttp.test_utils import TestClient -from simcore_service_storage.s3 import get_s3_client -from simcore_service_storage.settings import Settings - -pytest_simcore_core_services_selection = ["postgres"] - - -async def test_s3_client(app_settings: Settings, client: TestClient): - assert client.app - assert app_settings.STORAGE_S3 - s3_client = get_s3_client(client.app) - assert s3_client - - response = await s3_client.client.list_buckets() - assert response - assert "Buckets" in response - assert len(response["Buckets"]) == 1 - assert "Name" in response["Buckets"][0] - assert response["Buckets"][0]["Name"] == app_settings.STORAGE_S3.S3_BUCKET_NAME diff --git a/services/storage/tests/unit/test_s3_client.py b/services/storage/tests/unit/test_s3_client.py deleted file mode 100644 index 20dab8581bc..00000000000 --- a/services/storage/tests/unit/test_s3_client.py +++ /dev/null @@ -1,988 +0,0 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -# pylint:disable=too-many-arguments -# pylint:disable=no-name-in-module - - -import asyncio -import json -from collections.abc import AsyncIterator, Awaitable, Callable -from dataclasses import dataclass -from pathlib import Path -from random import choice -from typing import Final -from uuid import uuid4 - -import botocore.exceptions -import pytest -from aiohttp import ClientSession -from aws_library.s3.errors import ( - S3AccessError, - S3BucketInvalidError, - S3KeyNotFoundError, -) -from faker import Faker -from models_library.api_schemas_storage import UploadedPart -from models_library.basic_types import SHA256Str -from models_library.projects import ProjectID -from models_library.projects_nodes import NodeID -from models_library.projects_nodes_io import SimcoreS3FileID -from pydantic import ByteSize, parse_obj_as -from pytest_mock import MockFixture -from pytest_simcore.helpers.parametrizations import byte_size_ids -from simcore_service_storage.models import MultiPartUploadLinks, S3BucketName -from simcore_service_storage.s3_client import ( - StorageS3Client, - _list_objects_v2_paginated_gen, -) -from simcore_service_storage.settings import Settings -from tests.helpers.file_utils import ( - parametrized_file_size, - upload_file_to_presigned_link, -) -from types_aiobotocore_s3.type_defs import ObjectTypeDef - -DEFAULT_EXPIRATION_SECS: Final[int] = 10 - -pytest_simcore_core_services_selection = ["postgres"] - - -async def test_storage_storage_s3_client_creation( - app_settings: Settings, -): - assert app_settings.STORAGE_S3 - storage_s3_client = await StorageS3Client.create( - app_settings.STORAGE_S3, - app_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, - ) - assert storage_s3_client - response = await storage_s3_client.client.list_buckets() - assert not response["Buckets"] - - await storage_s3_client.close() - with pytest.raises(botocore.exceptions.HTTPClientError): - await storage_s3_client.client.list_buckets() - - -@pytest.fixture -async def storage_s3_client( - app_settings: Settings, -) -> AsyncIterator[StorageS3Client]: - assert app_settings.STORAGE_S3 - storage_s3_client = await StorageS3Client.create( - app_settings.STORAGE_S3, - app_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, - ) - # check that no bucket is lying around - assert storage_s3_client - response = await storage_s3_client.client.list_buckets() - assert not response[ - "Buckets" - ], f"for testing puproses, there should be no bucket lying around! {response=}" - yield storage_s3_client - - -async def test_create_bucket(storage_s3_client: StorageS3Client, faker: Faker): - response = await storage_s3_client.client.list_buckets() - assert not response["Buckets"] - bucket = faker.pystr() - await storage_s3_client.create_bucket(bucket, "us-east-1") - response = await storage_s3_client.client.list_buckets() - assert response["Buckets"] - assert len(response["Buckets"]) == 1 - assert "Name" in response["Buckets"][0] - assert response["Buckets"][0]["Name"] == bucket - # now we create the bucket again, it should silently work even if it exists already - await storage_s3_client.create_bucket(bucket, "us-east-1") - response = await storage_s3_client.client.list_buckets() - assert response["Buckets"] - assert len(response["Buckets"]) == 1 - assert "Name" in response["Buckets"][0] - assert response["Buckets"][0]["Name"] == bucket - - -@pytest.fixture -async def storage_s3_bucket(storage_s3_client: StorageS3Client, faker: Faker) -> str: - response = await storage_s3_client.client.list_buckets() - assert not response["Buckets"] - bucket_name = faker.pystr() - await storage_s3_client.create_bucket(bucket_name, "us-east-1") - response = await storage_s3_client.client.list_buckets() - assert response["Buckets"] - assert bucket_name in [ - bucket_struct.get("Name") for bucket_struct in response["Buckets"] - ], f"failed creating {bucket_name}" - - return bucket_name - - -async def test_create_single_presigned_upload_link( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - create_file_of_size: Callable[[ByteSize], Path], - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], -): - file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) - file_id = create_simcore_file_id(uuid4(), uuid4(), file.name) - presigned_url = await storage_s3_client.create_single_presigned_upload_link( - storage_s3_bucket, file_id, expiration_secs=DEFAULT_EXPIRATION_SECS - ) - assert presigned_url - - # upload the file with a fake multipart upload links structure - await upload_file_to_presigned_link( - file, - MultiPartUploadLinks( - upload_id="fake", - chunk_size=parse_obj_as(ByteSize, file.stat().st_size), - urls=[presigned_url], - ), - ) - - # check it is there - s3_metadata = await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - assert s3_metadata.size == file.stat().st_size - assert s3_metadata.last_modified - assert s3_metadata.e_tag - - -async def test_create_single_presigned_upload_link_invalid_raises( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - create_file_of_size: Callable[[ByteSize], Path], - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], -): - file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) - file_id = create_simcore_file_id(uuid4(), uuid4(), file.name) - with pytest.raises(S3BucketInvalidError): - await storage_s3_client.create_single_presigned_upload_link( - S3BucketName("pytestinvalidbucket"), - file_id, - expiration_secs=DEFAULT_EXPIRATION_SECS, - ) - - -@pytest.mark.parametrize( - "file_size", - [ - parametrized_file_size("10Mib"), - parametrized_file_size("100Mib"), - parametrized_file_size("1000Mib"), - ], - ids=byte_size_ids, -) -async def test_create_multipart_presigned_upload_link( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_multipart_presigned_link_without_completion: Callable[ - ..., Awaitable[tuple[SimcoreS3FileID, MultiPartUploadLinks, list[UploadedPart]]] - ], - file_size: ByteSize, -): - ( - file_id, - upload_links, - uploaded_parts, - ) = await upload_file_multipart_presigned_link_without_completion(file_size) - - # now complete it - received_e_tag = await storage_s3_client.complete_multipart_upload( - storage_s3_bucket, file_id, upload_links.upload_id, uploaded_parts - ) - - # check that the multipart upload is not listed anymore - list_ongoing_uploads = await storage_s3_client.list_ongoing_multipart_uploads( - storage_s3_bucket - ) - assert list_ongoing_uploads == [] - - # check the object is complete - s3_metadata = await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - assert s3_metadata.size == file_size - assert s3_metadata.last_modified - assert s3_metadata.e_tag == f"{json.loads(received_e_tag)}" - - -@pytest.mark.parametrize( - "file_size", - [ - parametrized_file_size("10Mib"), - ], - ids=byte_size_ids, -) -async def test_create_multipart_presigned_upload_link_invalid_raises( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_multipart_presigned_link_without_completion: Callable[ - ..., Awaitable[tuple[SimcoreS3FileID, MultiPartUploadLinks, list[UploadedPart]]] - ], - file_size: ByteSize, - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - faker: Faker, -): - ( - file_id, - upload_links, - uploaded_parts, - ) = await upload_file_multipart_presigned_link_without_completion(file_size) - - with pytest.raises(S3BucketInvalidError): - await storage_s3_client.complete_multipart_upload( - S3BucketName("pytestinvalidbucket"), - file_id, - upload_links.upload_id, - uploaded_parts, - ) - - wrong_file_id = create_simcore_file_id(uuid4(), uuid4(), faker.file_name()) - # with pytest.raises(S3KeyNotFoundError): - # NOTE: this does not raise... and it returns the file_id of the original file... - await storage_s3_client.complete_multipart_upload( - storage_s3_bucket, wrong_file_id, upload_links.upload_id, uploaded_parts - ) - # call it again triggers - with pytest.raises(S3AccessError): - await storage_s3_client.complete_multipart_upload( - storage_s3_bucket, - wrong_file_id, - upload_links.upload_id, - uploaded_parts, - ) - - -@pytest.mark.parametrize( - "file_size", [parametrized_file_size("100Mib")], ids=byte_size_ids -) -async def test_abort_multipart_upload( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_multipart_presigned_link_without_completion: Callable[ - ..., Awaitable[tuple[SimcoreS3FileID, MultiPartUploadLinks, list[UploadedPart]]] - ], - file_size: ByteSize, -): - ( - file_id, - upload_links, - _, - ) = await upload_file_multipart_presigned_link_without_completion(file_size) - - # now abort it - await storage_s3_client.abort_multipart_upload( - storage_s3_bucket, file_id, upload_links.upload_id - ) - - # now check that the listing is empty - ongoing_multipart_uploads = await storage_s3_client.list_ongoing_multipart_uploads( - storage_s3_bucket - ) - assert ongoing_multipart_uploads == [] - - # check it is not available - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - - -@pytest.mark.parametrize( - "file_size", [parametrized_file_size("100Mib")], ids=byte_size_ids -) -async def test_multiple_completion_of_multipart_upload( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_multipart_presigned_link_without_completion: Callable[ - ..., Awaitable[tuple[SimcoreS3FileID, MultiPartUploadLinks, list[UploadedPart]]] - ], - file_size: ByteSize, -): - ( - file_id, - upload_links, - uploaded_parts, - ) = await upload_file_multipart_presigned_link_without_completion(file_size) - - # first completion - await storage_s3_client.complete_multipart_upload( - storage_s3_bucket, file_id, upload_links.upload_id, uploaded_parts - ) - - with pytest.raises(S3AccessError): - await storage_s3_client.complete_multipart_upload( - storage_s3_bucket, file_id, upload_links.upload_id, uploaded_parts - ) - - -@pytest.mark.parametrize("file_size", [parametrized_file_size("1Gib")]) -async def test_break_completion_of_multipart_upload( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_multipart_presigned_link_without_completion: Callable[ - ..., Awaitable[tuple[SimcoreS3FileID, MultiPartUploadLinks, list[UploadedPart]]] - ], - file_size: ByteSize, -): - ( - file_id, - upload_links, - uploaded_parts, - ) = await upload_file_multipart_presigned_link_without_completion(file_size) - # let's break the completion very quickly task and see what happens - VERY_SHORT_TIMEOUT = 0.2 - with pytest.raises(asyncio.TimeoutError): - await asyncio.wait_for( - storage_s3_client.complete_multipart_upload( - storage_s3_bucket, file_id, upload_links.upload_id, uploaded_parts - ), - timeout=VERY_SHORT_TIMEOUT, - ) - # check we have the multipart upload initialized and listed - ongoing_multipart_uploads = await storage_s3_client.list_ongoing_multipart_uploads( - storage_s3_bucket - ) - assert ongoing_multipart_uploads - assert len(ongoing_multipart_uploads) == 1 - ongoing_upload_id, ongoing_file_id = ongoing_multipart_uploads[0] - assert ongoing_upload_id == upload_links.upload_id - assert ongoing_file_id == file_id - - # now wait - await asyncio.sleep(10) - - # check that the completion of the update completed... - assert ( - await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) == [] - ) - - # check the object is complete - s3_metadata = await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - assert s3_metadata.size == file_size - assert s3_metadata.last_modified - assert s3_metadata.e_tag - - -@pytest.fixture -def upload_file_single_presigned_link( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - create_file_of_size: Callable[[ByteSize], Path], -) -> Callable[..., Awaitable[SimcoreS3FileID]]: - async def _uploader(file_id: SimcoreS3FileID | None = None) -> SimcoreS3FileID: - file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) - if not file_id: - file_id = SimcoreS3FileID(file.name) - presigned_url = await storage_s3_client.create_single_presigned_upload_link( - storage_s3_bucket, file_id, expiration_secs=DEFAULT_EXPIRATION_SECS - ) - assert presigned_url - - # upload the file with a fake multipart upload links structure - await upload_file_to_presigned_link( - file, - MultiPartUploadLinks( - upload_id="fake", - chunk_size=parse_obj_as(ByteSize, file.stat().st_size), - urls=[presigned_url], - ), - ) - - # check the object is complete - s3_metadata = await storage_s3_client.get_file_metadata( - storage_s3_bucket, file_id - ) - assert s3_metadata.size == file.stat().st_size - return file_id - - return _uploader - - -@pytest.fixture -def upload_file_multipart_presigned_link_without_completion( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - create_file_of_size: Callable[[ByteSize], Path], - faker: Faker, -) -> Callable[ - ..., Awaitable[tuple[SimcoreS3FileID, MultiPartUploadLinks, list[UploadedPart]]] -]: - async def _uploader( - file_size: ByteSize, - file_id: SimcoreS3FileID | None = None, - ) -> tuple[SimcoreS3FileID, MultiPartUploadLinks, list[UploadedPart]]: - file = create_file_of_size(file_size) - if not file_id: - file_id = SimcoreS3FileID(file.name) - upload_links = await storage_s3_client.create_multipart_upload_links( - storage_s3_bucket, - file_id, - ByteSize(file.stat().st_size), - expiration_secs=DEFAULT_EXPIRATION_SECS, - sha256_checksum=parse_obj_as(SHA256Str, faker.sha256()), - ) - assert upload_links - - # check there is no file yet - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - - # check we have the multipart upload initialized and listed - ongoing_multipart_uploads = ( - await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) - ) - assert ongoing_multipart_uploads - assert len(ongoing_multipart_uploads) == 1 - ongoing_upload_id, ongoing_file_id = ongoing_multipart_uploads[0] - assert ongoing_upload_id == upload_links.upload_id - assert ongoing_file_id == file_id - - # upload the file - uploaded_parts: list[UploadedPart] = await upload_file_to_presigned_link( - file, - upload_links, - ) - assert len(uploaded_parts) == len(upload_links.urls) - - # check there is no file yet - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - - # check we have the multipart upload initialized and listed - ongoing_multipart_uploads = ( - await storage_s3_client.list_ongoing_multipart_uploads(storage_s3_bucket) - ) - assert ongoing_multipart_uploads - assert len(ongoing_multipart_uploads) == 1 - ongoing_upload_id, ongoing_file_id = ongoing_multipart_uploads[0] - assert ongoing_upload_id == upload_links.upload_id - assert ongoing_file_id == file_id - - return ( - file_id, - upload_links, - uploaded_parts, - ) - - return _uploader - - -async def test_delete_file( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_single_presigned_link: Callable[..., Awaitable[SimcoreS3FileID]], -): - file_id = await upload_file_single_presigned_link() - - # delete the file - await storage_s3_client.delete_file(storage_s3_bucket, file_id) - - # check it is not available - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - - -async def test_delete_file_invalid_raises( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - faker: Faker, -): - file_id = create_simcore_file_id(uuid4(), uuid4(), faker.file_name()) - with pytest.raises(S3BucketInvalidError): - await storage_s3_client.delete_file( - S3BucketName("pytestinvalidbucket"), file_id - ) - - # this does not raise - await storage_s3_client.delete_file(storage_s3_bucket, file_id) - - -async def test_delete_files_in_project_node( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_single_presigned_link: Callable[..., Awaitable[SimcoreS3FileID]], - faker: Faker, -): - # we upload files in these paths - project_1 = uuid4() - project_2 = uuid4() - node_1 = uuid4() - node_2 = uuid4() - node_3 = uuid4() - upload_paths = ( - "", - f"{project_1}/", - f"{project_1}/{node_1}/", - f"{project_1}/{node_2}/", - f"{project_1}/{node_2}/", - f"{project_1}/{node_3}/", - f"{project_1}/{node_3}/", - f"{project_1}/{node_3}/", - f"{project_2}/", - f"{project_2}/{node_1}/", - f"{project_2}/{node_2}/", - f"{project_2}/{node_2}/", - f"{project_2}/{node_2}/", - f"{project_2}/{node_2}/", - f"{project_2}/{node_3}/", - f"{project_2}/{node_3}/states/", - f"{project_2}/{node_3}/some_folder_of_sort/", - ) - - uploaded_file_ids = await asyncio.gather( - *( - upload_file_single_presigned_link(file_id=f"{path}{faker.file_name()}") - for path in upload_paths - ) - ) - assert len(uploaded_file_ids) == len(upload_paths) - - async def _assert_deleted(*, deleted_ids: tuple[str, ...]): - for file_id in uploaded_file_ids: - if file_id.startswith(deleted_ids): - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.get_file_metadata( - storage_s3_bucket, file_id - ) - else: - s3_metadata = await storage_s3_client.get_file_metadata( - storage_s3_bucket, file_id - ) - assert s3_metadata.e_tag - - # now let's delete some files and check they are correctly deleted - await storage_s3_client.delete_files_in_project_node( - storage_s3_bucket, project_1, node_3 - ) - await _assert_deleted(deleted_ids=(f"{project_1}/{node_3}",)) - - # delete some stuff in project 2 - await storage_s3_client.delete_files_in_project_node( - storage_s3_bucket, project_2, node_3 - ) - await _assert_deleted( - deleted_ids=( - f"{project_1}/{node_3}", - f"{project_2}/{node_3}", - ) - ) - - # completely delete project 2 - await storage_s3_client.delete_files_in_project_node( - storage_s3_bucket, project_2, None - ) - await _assert_deleted( - deleted_ids=( - f"{project_1}/{node_3}", - f"{project_2}", - ) - ) - - -async def test_undelete_file_raises_if_file_does_not_exists( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - faker: Faker, -): - file_id = create_simcore_file_id(uuid4(), uuid4(), faker.file_name()) - with pytest.raises(S3BucketInvalidError): - await storage_s3_client.delete_file( - S3BucketName("pytestinvalidbucket"), file_id - ) - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.undelete_file(storage_s3_bucket, file_id) - - -async def test_undelete_file_with_no_versioning_raises( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_single_presigned_link: Callable[..., Awaitable[SimcoreS3FileID]], -): - file_id = await upload_file_single_presigned_link() - await storage_s3_client.delete_file(storage_s3_bucket, file_id) - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.undelete_file(storage_s3_bucket, file_id) - - -async def test_undelete_file( - storage_s3_client: StorageS3Client, - with_versioning_enabled: None, - storage_s3_bucket: S3BucketName, - upload_file_single_presigned_link: Callable[..., Awaitable[SimcoreS3FileID]], -): - file_id = await upload_file_single_presigned_link() - - # delete the file - await storage_s3_client.delete_file(storage_s3_bucket, file_id) - - # check it is not available - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - - # undelete the file - await storage_s3_client.undelete_file(storage_s3_bucket, file_id) - # check the file is back - await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - - # delete the file again - await storage_s3_client.delete_file(storage_s3_bucket, file_id) - # check it is not available - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - - -async def test_delete_files_in_project_node_invalid_raises( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, -): - with pytest.raises(S3BucketInvalidError): - await storage_s3_client.delete_files_in_project_node( - S3BucketName("pytestinvalidbucket"), uuid4(), uuid4() - ) - # this should not raise - await storage_s3_client.delete_files_in_project_node( - storage_s3_bucket, uuid4(), uuid4() - ) - - -async def test_create_single_presigned_download_link( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_single_presigned_link: Callable[..., Awaitable[SimcoreS3FileID]], - tmp_path: Path, - faker: Faker, -): - file_id = await upload_file_single_presigned_link() - - presigned_url = await storage_s3_client.create_single_presigned_download_link( - storage_s3_bucket, file_id, expiration_secs=DEFAULT_EXPIRATION_SECS - ) - - assert presigned_url - - dest_file = tmp_path / faker.file_name() - # download the file - async with ClientSession() as session: - response = await session.get(presigned_url) - response.raise_for_status() - with dest_file.open("wb") as fp: - fp.write(await response.read()) - assert dest_file.exists() - - s3_metadata = await storage_s3_client.get_file_metadata(storage_s3_bucket, file_id) - assert s3_metadata.e_tag - assert s3_metadata.last_modified - assert dest_file.stat().st_size == s3_metadata.size - - -async def test_create_single_presigned_download_link_invalid_raises( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_single_presigned_link: Callable[..., Awaitable[SimcoreS3FileID]], - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - faker: Faker, -): - file_id = await upload_file_single_presigned_link() - - with pytest.raises(S3BucketInvalidError): - await storage_s3_client.create_single_presigned_download_link( - S3BucketName("invalidpytestbucket"), - file_id, - expiration_secs=DEFAULT_EXPIRATION_SECS, - ) - wrong_file_id = create_simcore_file_id(uuid4(), uuid4(), faker.file_name()) - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.create_single_presigned_download_link( - storage_s3_bucket, wrong_file_id, expiration_secs=DEFAULT_EXPIRATION_SECS - ) - - -@pytest.fixture -async def upload_file_with_aioboto3_managed_transfer( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - faker: Faker, - create_file_of_size: Callable[[ByteSize, str | None], Path], - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], -) -> Callable[[ByteSize], Awaitable[tuple[Path, SimcoreS3FileID]]]: - async def _uploader(file_size: ByteSize) -> tuple[Path, SimcoreS3FileID]: - file_name = faker.file_name() - file = create_file_of_size(file_size, file_name) - file_id = create_simcore_file_id(uuid4(), uuid4(), file_name) - response = await storage_s3_client.upload_file( - storage_s3_bucket, file, file_id, bytes_transfered_cb=None - ) - # there is no response from aioboto3... - assert not response - # check the object is uploaded - response = await storage_s3_client.client.list_objects_v2( - Bucket=storage_s3_bucket - ) - assert "Contents" in response - list_objects = response["Contents"] - assert len(list_objects) >= 1 - # find our object now - for s3_obj in list_objects: - if s3_obj.get("Key") == file_id: - # found it! - assert "ETag" in s3_obj - assert "Key" in s3_obj - assert s3_obj["Key"] == file_id - assert "Size" in s3_obj - assert s3_obj["Size"] == file.stat().st_size - return file, file_id - assert False, "Object was not properly uploaded!" - - return _uploader - - -@pytest.mark.parametrize( - "file_size", - [parametrized_file_size("500Mib")], - ids=byte_size_ids, -) -async def test_upload_file( - file_size: ByteSize, - upload_file_with_aioboto3_managed_transfer: Callable[ - [ByteSize], Awaitable[tuple[Path, SimcoreS3FileID]] - ], -): - await upload_file_with_aioboto3_managed_transfer(file_size) - - -async def test_upload_file_invalid_raises( - storage_s3_client: StorageS3Client, - create_file_of_size: Callable[[ByteSize, str | None], Path], - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - faker: Faker, -): - file = create_file_of_size(ByteSize(10), None) - file_id = create_simcore_file_id(uuid4(), uuid4(), file.name) - with pytest.raises(S3BucketInvalidError): - await storage_s3_client.upload_file( - S3BucketName("pytestinvalidbucket"), file, file_id, bytes_transfered_cb=None - ) - - -@pytest.mark.parametrize( - "file_size", - [parametrized_file_size("500Mib")], - ids=byte_size_ids, -) -async def test_copy_file( - file_size: ByteSize, - upload_file_with_aioboto3_managed_transfer: Callable[ - [ByteSize], Awaitable[tuple[Path, SimcoreS3FileID]] - ], - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - faker: Faker, -): - src_file, src_file_uuid = await upload_file_with_aioboto3_managed_transfer( - file_size - ) - dst_file_name = faker.file_name() - dst_file_uuid = create_simcore_file_id(uuid4(), uuid4(), dst_file_name) - await storage_s3_client.copy_file( - storage_s3_bucket, src_file_uuid, dst_file_uuid, bytes_transfered_cb=None - ) - - # check the object is uploaded - response = await storage_s3_client.client.list_objects_v2(Bucket=storage_s3_bucket) - assert "Contents" in response - list_objects = response["Contents"] - assert len(list_objects) == 2 - - list_file_uuids = [src_file_uuid, dst_file_uuid] - for s3_obj in list_objects: - assert "ETag" in s3_obj - assert "Key" in s3_obj - assert s3_obj["Key"] in list_file_uuids - list_file_uuids.pop(list_file_uuids.index(s3_obj["Key"])) - assert "Size" in s3_obj - assert s3_obj["Size"] == src_file.stat().st_size - - -async def test_copy_file_invalid_raises( - upload_file_with_aioboto3_managed_transfer: Callable[ - [ByteSize], Awaitable[tuple[Path, SimcoreS3FileID]] - ], - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - faker: Faker, -): - _, src_file_uuid = await upload_file_with_aioboto3_managed_transfer(ByteSize(1024)) - dst_file_name = faker.file_name() - dst_file_uuid = create_simcore_file_id(uuid4(), uuid4(), dst_file_name) - with pytest.raises(S3BucketInvalidError): - await storage_s3_client.copy_file( - S3BucketName("pytestinvalidbucket"), - src_file_uuid, - dst_file_uuid, - bytes_transfered_cb=None, - ) - with pytest.raises(S3KeyNotFoundError): - await storage_s3_client.copy_file( - storage_s3_bucket, - SimcoreS3FileID("missing_file_uuid"), - dst_file_uuid, - bytes_transfered_cb=None, - ) - - -async def test_list_files( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_with_aioboto3_managed_transfer: Callable[ - [ByteSize], Awaitable[tuple[Path, SimcoreS3FileID]] - ], -): - list_files = await storage_s3_client.list_files(storage_s3_bucket, prefix="") - assert list_files == [] - - NUM_FILES = 12 - FILE_SIZE = parse_obj_as(ByteSize, "11Mib") - uploaded_files: list[tuple[Path, SimcoreS3FileID]] = [] - for _ in range(NUM_FILES): - uploaded_files.append( - await upload_file_with_aioboto3_managed_transfer(FILE_SIZE) - ) - - list_files = await storage_s3_client.list_files(storage_s3_bucket, prefix="") - assert len(list_files) == NUM_FILES - - # check with limits - list_files = await storage_s3_client.list_files( - storage_s3_bucket, prefix="", max_files_to_list=NUM_FILES - 2 - ) - assert len(list_files) == NUM_FILES - 2 - - # test with prefix - file, file_id = choice(uploaded_files) - list_files = await storage_s3_client.list_files(storage_s3_bucket, prefix=file_id) - assert len(list_files) == 1 - assert list_files[0].file_id == file_id - assert list_files[0].size == file.stat().st_size - - -async def test_list_files_invalid_bucket_raises( - storage_s3_client: StorageS3Client, -): - with pytest.raises(S3BucketInvalidError): - await storage_s3_client.list_files( - S3BucketName("pytestinvalidbucket"), prefix="" - ) - - -@dataclass -class PaginationCase: - total_files: int - items_per_page: int - mock_upper_bound: int - - -@pytest.mark.parametrize( - "pagination_case", - [ - pytest.param( - PaginationCase( - total_files=10, - items_per_page=2, - mock_upper_bound=1000, - ), - id="normal_query", - ), - pytest.param( - PaginationCase( - total_files=10, - items_per_page=10, - mock_upper_bound=2, - ), - id="page_too_big", - ), - pytest.param( - PaginationCase( - total_files=100, - items_per_page=2, - mock_upper_bound=2, - ), - id="regression_more_files_than_limit", - ), - ], -) -async def test_list_objects_v2_paginated_and_list_all_objects_gen( - mocker: MockFixture, - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_with_aioboto3_managed_transfer: Callable[ - [ByteSize], Awaitable[tuple[Path, SimcoreS3FileID]] - ], - pagination_case: PaginationCase, -): - mocker.patch( - "simcore_service_storage.s3_client._MAX_ITEMS_PER_PAGE", - pagination_case.mock_upper_bound, - ) - - FILE_SIZE: ByteSize = parse_obj_as(ByteSize, "1") - - # create some files - created_files_data: list[tuple[Path, str]] = await asyncio.gather( - *[ - upload_file_with_aioboto3_managed_transfer(FILE_SIZE) - for _ in range(pagination_case.total_files) - ] - ) - - # fetch all items using pagination - listing_requests: list[ObjectTypeDef] = [] - - async for page_items in _list_objects_v2_paginated_gen( - client=storage_s3_client.client, - bucket=storage_s3_bucket, - prefix="", # all items - ): - listing_requests.extend(page_items) - - assert len(listing_requests) == pagination_case.total_files - - created_files = [x[1] for x in created_files_data] - queried_files = [x["Key"] for x in listing_requests] - assert len(created_files) == len(queried_files) - assert set(created_files) == set(queried_files) - - # fetch all items using the generator make sure it does not break - generator_query = [] - async for s3_objects in storage_s3_client.list_all_objects_gen( - storage_s3_bucket, - prefix="", - ): - generator_query.extend(s3_objects) - - assert len(generator_query) == len(created_files) - - -async def test_file_exists( - storage_s3_client: StorageS3Client, - storage_s3_bucket: S3BucketName, - upload_file_with_aioboto3_managed_transfer: Callable[ - [ByteSize], Awaitable[tuple[Path, SimcoreS3FileID]] - ], -): - FILE_SIZE: ByteSize = parse_obj_as(ByteSize, "1") - - _, simcore_s3_file_id = await upload_file_with_aioboto3_managed_transfer(FILE_SIZE) - assert ( - await storage_s3_client.file_exists( - bucket=storage_s3_bucket, s3_object=simcore_s3_file_id - ) - is True - ) - - assert ( - await storage_s3_client.file_exists( - bucket=storage_s3_bucket, s3_object="fake-missing-object" - ) - is False - ) diff --git a/services/storage/tests/unit/test_settings.py b/services/storage/tests/unit/test_settings.py deleted file mode 100644 index e18d35f977b..00000000000 --- a/services/storage/tests/unit/test_settings.py +++ /dev/null @@ -1,17 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -import logging - -from simcore_service_storage.settings import Settings - - -def test_loading_env_devel_in_settings(project_env_devel_environment): - settings = Settings.create_from_envs() - print("captured settings: \n", settings.json(indent=2)) - - assert settings.log_level == logging.INFO - assert ( - settings.STORAGE_POSTGRES.dsn - == "postgresql://test:secret@localhost:5432/testdb" - ) diff --git a/services/storage/tests/unit/test_simcore_s3_dsm.py b/services/storage/tests/unit/test_simcore_s3_dsm.py index c1ca13dac78..56ab5e06407 100644 --- a/services/storage/tests/unit/test_simcore_s3_dsm.py +++ b/services/storage/tests/unit/test_simcore_s3_dsm.py @@ -61,10 +61,14 @@ async def _copy_s3_path(s3_file_id_to_copy: SimcoreS3FileID) -> None: ) async def _count_files(s3_file_id: SimcoreS3FileID, expected_count: int) -> None: - files = await get_s3_client(simcore_s3_dsm.app).list_files( - simcore_s3_dsm.simcore_bucket_name, prefix=s3_file_id - ) - assert len(files) == expected_count + s3_client = get_s3_client(simcore_s3_dsm.app) + counted_files = 0 + async for s3_objects in s3_client.list_objects_paginated( + bucket=simcore_s3_dsm.simcore_bucket_name, prefix=s3_file_id + ): + counted_files += len(s3_objects) + + assert counted_files == expected_count # using directory diff --git a/services/storage/tests/unit/test_utils_handlers.py b/services/storage/tests/unit/test_utils_handlers.py index f471126ca06..a5f82a6b893 100644 --- a/services/storage/tests/unit/test_utils_handlers.py +++ b/services/storage/tests/unit/test_utils_handlers.py @@ -6,7 +6,7 @@ import pytest from aiohttp import web from aiohttp.typedefs import Handler -from aws_library.s3.errors import S3KeyNotFoundError +from aws_library.s3 import S3KeyNotFoundError from pydantic import BaseModel, ValidationError from pytest_mock import MockerFixture from servicelib.aiohttp.aiopg_utils import DBAPIError From 1a97db7a7a2357c85295ede8caa822c557adb86d Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Fri, 28 Jun 2024 14:08:48 +0200 Subject: [PATCH 069/219] =?UTF-8?q?=E2=AC=86=EF=B8=8FMaintenance:=20upgrad?= =?UTF-8?q?e=20uv=20to=200.2=20in=20Dockerfiles=20(#6008)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/postgres-database/docker/Dockerfile | 2 +- packages/service-integration/Dockerfile | 2 +- requirements/tools/Dockerfile | 2 +- scripts/mypy/Dockerfile | 2 +- services/agent/Dockerfile | 2 +- services/api-server/Dockerfile | 2 +- services/autoscaling/Dockerfile | 2 +- services/catalog/Dockerfile | 2 +- services/clusters-keeper/Dockerfile | 2 +- services/dask-sidecar/Dockerfile | 2 +- services/datcore-adapter/Dockerfile | 2 +- services/director-v2/Dockerfile | 2 +- services/dynamic-scheduler/Dockerfile | 2 +- services/dynamic-sidecar/Dockerfile | 2 +- services/efs-guardian/Dockerfile | 2 +- services/invitations/Dockerfile | 2 +- services/migration/Dockerfile | 2 +- services/osparc-gateway-server/Dockerfile | 2 +- services/payments/Dockerfile | 2 +- services/resource-usage-tracker/Dockerfile | 2 +- services/storage/Dockerfile | 2 +- services/web/Dockerfile | 2 +- 22 files changed, 22 insertions(+), 22 deletions(-) diff --git a/packages/postgres-database/docker/Dockerfile b/packages/postgres-database/docker/Dockerfile index 9ef018fdc89..c310ad5c53f 100644 --- a/packages/postgres-database/docker/Dockerfile +++ b/packages/postgres-database/docker/Dockerfile @@ -24,7 +24,7 @@ RUN apt-get update \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed packages may be moved to production image easily by copying the venv RUN uv venv "${VIRTUAL_ENV}" diff --git a/packages/service-integration/Dockerfile b/packages/service-integration/Dockerfile index 0a45be3b4c4..7812c48b66c 100644 --- a/packages/service-integration/Dockerfile +++ b/packages/service-integration/Dockerfile @@ -57,7 +57,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/requirements/tools/Dockerfile b/requirements/tools/Dockerfile index 64669258193..8d01e09395c 100644 --- a/requirements/tools/Dockerfile +++ b/requirements/tools/Dockerfile @@ -26,7 +26,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 RUN uv venv "${VIRTUAL_ENV}" diff --git a/scripts/mypy/Dockerfile b/scripts/mypy/Dockerfile index 930ebf7110b..df2e8397f4f 100644 --- a/scripts/mypy/Dockerfile +++ b/scripts/mypy/Dockerfile @@ -16,7 +16,7 @@ ENV PATH="${VIRTUAL_ENV}/bin:$PATH" # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 RUN \ --mount=type=cache,mode=0755,target=/root/.cache/uv \ diff --git a/services/agent/Dockerfile b/services/agent/Dockerfile index 5d997b24cac..17a597cbdb9 100644 --- a/services/agent/Dockerfile +++ b/services/agent/Dockerfile @@ -75,7 +75,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/api-server/Dockerfile b/services/api-server/Dockerfile index 6468a4fb18a..ccc7b6f89c7 100644 --- a/services/api-server/Dockerfile +++ b/services/api-server/Dockerfile @@ -71,7 +71,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/autoscaling/Dockerfile b/services/autoscaling/Dockerfile index 903e27fdc28..719a7a697aa 100644 --- a/services/autoscaling/Dockerfile +++ b/services/autoscaling/Dockerfile @@ -91,7 +91,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/catalog/Dockerfile b/services/catalog/Dockerfile index ed5270c29b3..63c537aece1 100644 --- a/services/catalog/Dockerfile +++ b/services/catalog/Dockerfile @@ -72,7 +72,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/clusters-keeper/Dockerfile b/services/clusters-keeper/Dockerfile index 8a9fbc27f53..13283e38aca 100644 --- a/services/clusters-keeper/Dockerfile +++ b/services/clusters-keeper/Dockerfile @@ -91,7 +91,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/dask-sidecar/Dockerfile b/services/dask-sidecar/Dockerfile index 3400eaa414e..335819505be 100644 --- a/services/dask-sidecar/Dockerfile +++ b/services/dask-sidecar/Dockerfile @@ -82,7 +82,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed packages may be moved to production image easily by copying the venv RUN uv venv "${VIRTUAL_ENV}" diff --git a/services/datcore-adapter/Dockerfile b/services/datcore-adapter/Dockerfile index f6c693651d0..40394e44d21 100644 --- a/services/datcore-adapter/Dockerfile +++ b/services/datcore-adapter/Dockerfile @@ -72,7 +72,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/director-v2/Dockerfile b/services/director-v2/Dockerfile index 1b74d8031ef..f87fc623868 100644 --- a/services/director-v2/Dockerfile +++ b/services/director-v2/Dockerfile @@ -72,7 +72,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/dynamic-scheduler/Dockerfile b/services/dynamic-scheduler/Dockerfile index f250232d1df..b0bc9e9846b 100644 --- a/services/dynamic-scheduler/Dockerfile +++ b/services/dynamic-scheduler/Dockerfile @@ -71,7 +71,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/dynamic-sidecar/Dockerfile b/services/dynamic-sidecar/Dockerfile index 0a6e223040f..2c2229fe4ad 100644 --- a/services/dynamic-sidecar/Dockerfile +++ b/services/dynamic-sidecar/Dockerfile @@ -102,7 +102,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/efs-guardian/Dockerfile b/services/efs-guardian/Dockerfile index 4ab1ab6e8fa..129abe9457a 100644 --- a/services/efs-guardian/Dockerfile +++ b/services/efs-guardian/Dockerfile @@ -91,7 +91,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/invitations/Dockerfile b/services/invitations/Dockerfile index 5e96ede97e0..d9673f0c1ae 100644 --- a/services/invitations/Dockerfile +++ b/services/invitations/Dockerfile @@ -71,7 +71,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/migration/Dockerfile b/services/migration/Dockerfile index c40a4fd1dff..05f0bb3ce19 100644 --- a/services/migration/Dockerfile +++ b/services/migration/Dockerfile @@ -45,7 +45,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed packages may be moved to production image easily by copying the venv RUN uv venv "${VIRTUAL_ENV}" diff --git a/services/osparc-gateway-server/Dockerfile b/services/osparc-gateway-server/Dockerfile index f5e9ff32d93..c80ccd9a5bf 100644 --- a/services/osparc-gateway-server/Dockerfile +++ b/services/osparc-gateway-server/Dockerfile @@ -74,7 +74,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed packages may be moved to production image easily by copying the venv RUN uv venv "${VIRTUAL_ENV}" diff --git a/services/payments/Dockerfile b/services/payments/Dockerfile index e4ba13fe8f0..c29f0ca9fe6 100644 --- a/services/payments/Dockerfile +++ b/services/payments/Dockerfile @@ -71,7 +71,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/resource-usage-tracker/Dockerfile b/services/resource-usage-tracker/Dockerfile index 459025da7f6..34d81cf8801 100644 --- a/services/resource-usage-tracker/Dockerfile +++ b/services/resource-usage-tracker/Dockerfile @@ -72,7 +72,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index b2e26143008..02974a9485b 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -80,7 +80,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv diff --git a/services/web/Dockerfile b/services/web/Dockerfile index 9cefb777a8c..ecb19b9afde 100644 --- a/services/web/Dockerfile +++ b/services/web/Dockerfile @@ -82,7 +82,7 @@ RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ # NOTE: install https://github.com/astral-sh/uv ultra-fast rust-based pip replacement RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ - pip install uv~=0.1 + pip install uv~=0.2 # NOTE: python virtualenv is used here such that installed From 2607824e3f5e068e000494793945a3f191c414f5 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Fri, 28 Jun 2024 15:23:32 +0200 Subject: [PATCH 070/219] =?UTF-8?q?=E2=AC=86=EF=B8=8FMaintenance:=20upgrad?= =?UTF-8?q?ed=20aioboto3=20(#6009)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/aws-library/requirements/_base.txt | 12 +- packages/aws-library/requirements/_test.txt | 4 +- packages/aws-library/tests/test_s3_client.py | 3 +- packages/simcore-sdk/requirements/_test.txt | 19 +- .../simcore-sdk/requirements/constraints.txt | 11 - services/agent/requirements/_test.txt | 15 +- services/agent/requirements/constraints.txt | 15 -- services/autoscaling/requirements/_base.txt | 9 +- services/autoscaling/requirements/_test.txt | 4 +- .../clusters-keeper/requirements/_base.txt | 9 +- .../clusters-keeper/requirements/_test.txt | 4 +- services/director-v2/requirements/_test.txt | 12 +- .../dynamic-sidecar/requirements/_test.txt | 12 +- services/efs-guardian/requirements/_base.txt | 247 +++++++++++++++++- services/efs-guardian/requirements/_test.txt | 110 ++++++-- services/efs-guardian/requirements/_tools.txt | 22 +- .../requirements/_base.txt | 9 +- .../requirements/_test.txt | 4 +- services/storage/requirements/_base.txt | 9 +- services/storage/requirements/_test.txt | 4 +- 20 files changed, 425 insertions(+), 109 deletions(-) diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index 519620748c1..590b802f30a 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -1,8 +1,8 @@ aio-pika==9.4.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -aioboto3==12.4.0 +aioboto3==13.1.0 # via -r requirements/_base.in -aiobotocore==2.12.3 +aiobotocore==2.13.1 # via aioboto3 aiocache==0.12.2 # via -r requirements/_base.in @@ -11,7 +11,9 @@ aiodebug==2.3.0 aiodocker==0.21.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiofiles==23.2.1 - # via -r requirements/../../../packages/service-library/requirements/_base.in + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # aioboto3 aiohttp==3.9.5 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -46,9 +48,9 @@ attrs==23.2.0 # aiohttp # jsonschema # referencing -boto3==1.34.69 +boto3==1.34.131 # via aiobotocore -botocore==1.34.69 +botocore==1.34.131 # via # aiobotocore # boto3 diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt index c8c50583a28..c3bd483ddea 100644 --- a/packages/aws-library/requirements/_test.txt +++ b/packages/aws-library/requirements/_test.txt @@ -13,12 +13,12 @@ aws-xray-sdk==2.13.0 # via moto blinker==1.8.1 # via flask -boto3==1.34.69 +boto3==1.34.131 # via # -c requirements/_base.txt # aws-sam-translator # moto -botocore==1.34.69 +botocore==1.34.131 # via # -c requirements/_base.txt # aws-xray-sdk diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index 747a2a9b85c..13cc3575943 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -1062,7 +1062,8 @@ async def test_copy_file_invalid_raises( file = create_file_of_size(parse_obj_as(ByteSize, "1MiB")) uploaded_file = await upload_file(file) dst_object_key = faker.file_name() - with pytest.raises(S3BucketInvalidError, match=f"{non_existing_s3_bucket}"): + # NOTE: since aioboto3 13.1.0 this raises S3KeyNotFoundError instead of S3BucketInvalidError + with pytest.raises(S3KeyNotFoundError, match=f"{non_existing_s3_bucket}"): await simcore_s3_api.copy_object( bucket=non_existing_s3_bucket, src_object_key=uploaded_file.s3_key, diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index 1bbb338519c..53e59d52537 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -1,9 +1,11 @@ -aioboto3==9.6.0 - # via - # -c requirements/./constraints.txt - # -r requirements/_test.in -aiobotocore==2.3.0 +aioboto3==13.1.0 + # via -r requirements/_test.in +aiobotocore==2.13.1 # via aioboto3 +aiofiles==23.2.1 + # via + # -c requirements/_base.txt + # aioboto3 aiohttp==3.9.5 # via # -c requirements/../../../requirements/constraints.txt @@ -42,13 +44,12 @@ aws-xray-sdk==2.13.0 # via moto blinker==1.8.1 # via flask -boto3==1.21.21 +boto3==1.34.131 # via - # -c requirements/./constraints.txt # aiobotocore # aws-sam-translator # moto -botocore==1.24.21 +botocore==1.34.131 # via # aiobotocore # aws-xray-sdk @@ -286,7 +287,7 @@ rpds-py==0.18.0 # -c requirements/_base.txt # jsonschema # referencing -s3transfer==0.5.2 +s3transfer==0.10.2 # via boto3 sarif-om==1.0.4 # via cfn-lint diff --git a/packages/simcore-sdk/requirements/constraints.txt b/packages/simcore-sdk/requirements/constraints.txt index 43100acabca..e69de29bb2d 100644 --- a/packages/simcore-sdk/requirements/constraints.txt +++ b/packages/simcore-sdk/requirements/constraints.txt @@ -1,11 +0,0 @@ -# There are incompatible versions in the resolved dependencies: -# boto3==1.21.21 (from -c requirements/./constraints.txt (line 3)) -# boto3<1.24.60,>=1.24.59 (from aiobotocore[boto3]==2.4.0->aioboto3==10.1.0->-r requirements/_test.in (line 13)) -# boto3>=1.9.201 (from moto[server]==4.0.1->-r requirements/_test.in (line 18)) -aioboto3<=9.6.0 -# There are incompatible versions in the resolved dependencies: -# botocore>=1.12.201 (from moto[server]==4.0.1->-r requirements/_test.in (line 18)) -# botocore>=1.11.3 (from aws-xray-sdk==2.10.0->moto[server]==4.0.1->-r requirements/_test.in (line 18)) -# botocore<1.28.0,>=1.27.95 (from boto3==1.24.95->moto[server]==4.0.1->-r requirements/_test.in (line 18)) -# botocore<1.24.22,>=1.24.21 (from aiobotocore[boto3]==2.3.0->aioboto3==9.6.0->-r requirements/_test.in (line 13)) -boto3<=1.21.21 diff --git a/services/agent/requirements/_test.txt b/services/agent/requirements/_test.txt index 037b0e53901..b223bfe1439 100644 --- a/services/agent/requirements/_test.txt +++ b/services/agent/requirements/_test.txt @@ -1,8 +1,6 @@ -aioboto3==9.6.0 - # via - # -c requirements/./constraints.txt - # -r requirements/_test.in -aiobotocore==2.3.0 +aioboto3==12.4.0 + # via -r requirements/_test.in +aiobotocore==2.12.3 # via aioboto3 aiohttp==3.8.5 # via @@ -36,13 +34,12 @@ aws-xray-sdk==2.13.0 # via moto blinker==1.8.1 # via flask -boto3==1.21.21 +boto3==1.34.69 # via - # -c requirements/./constraints.txt # aiobotocore # aws-sam-translator # moto -botocore==1.24.21 +botocore==1.34.69 # via # aiobotocore # aws-xray-sdk @@ -249,7 +246,7 @@ rsa==4.9 # via # -c requirements/../../../requirements/constraints.txt # python-jose -s3transfer==0.5.2 +s3transfer==0.10.2 # via boto3 sarif-om==1.0.4 # via cfn-lint diff --git a/services/agent/requirements/constraints.txt b/services/agent/requirements/constraints.txt index 5c14f8c59bc..b52ac1ee492 100644 --- a/services/agent/requirements/constraints.txt +++ b/services/agent/requirements/constraints.txt @@ -3,21 +3,6 @@ # CONSTRAINTS DUE TO TEST LIBRARIES # -# -# BELOW COSNTRAINTS are required to install moto[server] -# - -# There are incompatible versions in the resolved dependencies: -# boto3==1.21.21 (from -c requirements/./constraints.txt (line 3)) -# boto3<1.24.60,>=1.24.59 (from aiobotocore[boto3]==2.4.0->aioboto3==10.1.0->-r requirements/_test.in (line 13)) -# boto3>=1.9.201 (from moto[server]==4.0.1->-r requirements/_test.in (line 18)) -aioboto3<=9.6.0 -# There are incompatible versions in the resolved dependencies: -# botocore>=1.12.201 (from moto[server]==4.0.1->-r requirements/_test.in (line 18)) -# botocore>=1.11.3 (from aws-xray-sdk==2.10.0->moto[server]==4.0.1->-r requirements/_test.in (line 18)) -# botocore<1.28.0,>=1.27.95 (from boto3==1.24.95->moto[server]==4.0.1->-r requirements/_test.in (line 18)) -# botocore<1.24.22,>=1.24.21 (from aiobotocore[boto3]==2.3.0->aioboto3==9.6.0->-r requirements/_test.in (line 13)) -boto3<=1.21.21 # There are incompatible versions in the resolved dependencies: # jsonschema==3.2.0 (from -c requirements/_base.txt (line 159)) # jsonschema~=3.2 (from -c requirements/./constraints.txt (line 12)) diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt index 4ac010d1e2a..f4b6ba53caa 100644 --- a/services/autoscaling/requirements/_base.txt +++ b/services/autoscaling/requirements/_base.txt @@ -3,9 +3,9 @@ aio-pika==9.4.1 # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -aioboto3==12.4.0 +aioboto3==13.1.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in -aiobotocore==2.12.3 +aiobotocore==2.13.1 # via aioboto3 aiocache==0.12.2 # via @@ -27,6 +27,7 @@ aiofiles==23.2.1 # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # aioboto3 aiohttp==3.9.5 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -74,9 +75,9 @@ attrs==23.2.0 # aiohttp # jsonschema # referencing -boto3==1.34.69 +boto3==1.34.131 # via aiobotocore -botocore==1.34.69 +botocore==1.34.131 # via # aiobotocore # boto3 diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt index e0303b6f9ec..194970642d2 100644 --- a/services/autoscaling/requirements/_test.txt +++ b/services/autoscaling/requirements/_test.txt @@ -23,12 +23,12 @@ aws-xray-sdk==2.13.1 # via moto blinker==1.8.2 # via flask -boto3==1.34.69 +boto3==1.34.131 # via # -c requirements/_base.txt # aws-sam-translator # moto -botocore==1.34.69 +botocore==1.34.131 # via # -c requirements/_base.txt # aws-xray-sdk diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt index ec17fdb3216..e3f1840ef22 100644 --- a/services/clusters-keeper/requirements/_base.txt +++ b/services/clusters-keeper/requirements/_base.txt @@ -3,9 +3,9 @@ aio-pika==9.4.1 # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -aioboto3==12.4.0 +aioboto3==13.1.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in -aiobotocore==2.12.3 +aiobotocore==2.13.1 # via aioboto3 aiocache==0.12.2 # via -r requirements/../../../packages/aws-library/requirements/_base.in @@ -24,6 +24,7 @@ aiofiles==23.2.1 # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # aioboto3 aiohttp==3.9.5 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -71,9 +72,9 @@ attrs==23.2.0 # aiohttp # jsonschema # referencing -boto3==1.34.69 +boto3==1.34.131 # via aiobotocore -botocore==1.34.69 +botocore==1.34.131 # via # aiobotocore # boto3 diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt index 1b1f5e359e2..79eb749f0a1 100644 --- a/services/clusters-keeper/requirements/_test.txt +++ b/services/clusters-keeper/requirements/_test.txt @@ -38,12 +38,12 @@ aws-xray-sdk==2.13.1 # via moto blinker==1.8.2 # via flask -boto3==1.34.69 +boto3==1.34.131 # via # -c requirements/_base.txt # aws-sam-translator # moto -botocore==1.34.69 +botocore==1.34.131 # via # -c requirements/_base.txt # aws-xray-sdk diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index e6bc91f7458..d346f7f18fc 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -2,10 +2,14 @@ aio-pika==9.4.1 # via # -c requirements/_base.txt # -r requirements/_test.in -aioboto3==12.4.0 +aioboto3==13.1.0 # via -r requirements/_test.in -aiobotocore==2.12.3 +aiobotocore==2.13.1 # via aioboto3 +aiofiles==23.2.1 + # via + # -c requirements/_base.txt + # aioboto3 aiohttp==3.9.5 # via # -c requirements/../../../requirements/constraints.txt @@ -45,9 +49,9 @@ attrs==23.2.0 # pytest-docker bokeh==3.4.1 # via dask -boto3==1.34.69 +boto3==1.34.131 # via aiobotocore -botocore==1.34.69 +botocore==1.34.131 # via # aiobotocore # boto3 diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index 4e9c18c15a2..673b9a6da8e 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -1,7 +1,11 @@ -aioboto3==12.4.0 +aioboto3==13.1.0 # via -r requirements/_test.in -aiobotocore==2.12.3 +aiobotocore==2.13.1 # via aioboto3 +aiofiles==23.2.1 + # via + # -c requirements/_base.txt + # aioboto3 aiohttp==3.9.3 # via # -c requirements/../../../requirements/constraints.txt @@ -25,9 +29,9 @@ attrs==23.2.0 # via # -c requirements/_base.txt # aiohttp -boto3==1.34.69 +boto3==1.34.131 # via aiobotocore -botocore==1.34.69 +botocore==1.34.131 # via # aiobotocore # boto3 diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt index 8012fdf97a5..c0e83d8b8c4 100644 --- a/services/efs-guardian/requirements/_base.txt +++ b/services/efs-guardian/requirements/_base.txt @@ -1,14 +1,44 @@ aio-pika==9.4.1 -aioboto3==13.0.0 -aiobotocore==2.13.0 + # via + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +aioboto3==13.1.0 + # via -r requirements/../../../packages/aws-library/requirements/_base.in +aiobotocore==2.13.1 # via aioboto3 aiocache==0.12.2 + # via -r requirements/../../../packages/aws-library/requirements/_base.in aiodebug==2.3.0 + # via + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in aiodocker==0.21.0 + # via + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in aiofiles==23.2.1 - # via aioboto3 + # via + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # aioboto3 aiohttp==3.9.5 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # aiobotocore # aiodocker aioitertools==0.11.0 @@ -24,6 +54,15 @@ anyio==4.4.0 # httpx # starlette arrow==1.3.0 + # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 # via # aiohttp @@ -44,6 +83,18 @@ botocore-stubs==1.34.94 # via types-aiobotocore certifi==2024.2.2 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # httpcore # httpx click==8.1.7 @@ -59,8 +110,27 @@ exceptiongroup==1.2.1 fast-depends==2.4.3 # via faststream fastapi==0.99.1 - # via prometheus-fastapi-instrumentator + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # prometheus-fastapi-instrumentator faststream==0.5.9 + # via + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 # via # aiohttp @@ -72,6 +142,20 @@ h11==0.14.0 httpcore==1.0.5 # via httpx httpx==0.27.0 + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in idna==3.7 # via # anyio @@ -83,6 +167,12 @@ jmespath==1.0.1 # boto3 # botocore jsonschema==4.22.0 + # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2023.7.1 # via jsonschema markdown-it-py==3.0.0 @@ -94,31 +184,123 @@ multidict==6.0.5 # aiohttp # yarl orjson==3.10.3 + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in packaging==24.0 + # via -r requirements/_base.in pamqp==3.3.0 # via aiormq prometheus-client==0.20.0 - # via prometheus-fastapi-instrumentator + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in pydantic==1.10.15 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi pygments==2.18.0 # via rich pyinstrument==4.6.2 + # via + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via # arrow # botocore pyyaml==6.0.1 + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in redis==5.0.4 + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.29.3 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/./constraints.txt + # -c requirements/../../../packages/service-library/requirements/./constraints.txt # jsonschema # jsonschema-specifications rich==13.7.1 - # via typer + # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer rpds-py==0.18.1 # via # jsonschema @@ -126,6 +308,7 @@ rpds-py==0.18.1 s3transfer==0.10.1 # via boto3 sh==2.0.6 + # via -r requirements/../../../packages/aws-library/requirements/_base.in shellingham==1.5.4 # via typer six==1.16.0 @@ -135,13 +318,45 @@ sniffio==1.3.1 # anyio # httpx starlette==0.27.0 - # via fastapi + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi tenacity==8.3.0 + # via + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in toolz==0.12.1 + # via + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.66.4 + # via + # -c requirements/../../../packages/service-library/requirements/./_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in typer==0.12.3 - # via faststream + # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # faststream types-aiobotocore==2.13.0 + # via -r requirements/../../../packages/aws-library/requirements/_base.in types-aiobotocore-ec2==2.13.0 # via types-aiobotocore types-aiobotocore-s3==2.13.0 @@ -164,8 +379,22 @@ typing-extensions==4.11.0 # types-aiobotocore-s3 # uvicorn urllib3==2.2.1 - # via botocore + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # botocore uvicorn==0.30.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in wrapt==1.16.0 # via aiobotocore yarl==1.9.4 diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt index 0a40d9e8f25..8aa082f28d8 100644 --- a/services/efs-guardian/requirements/_test.txt +++ b/services/efs-guardian/requirements/_test.txt @@ -1,19 +1,32 @@ aiodocker==0.21.0 + # via + # -c requirements/_base.txt + # -r requirements/_test.in aiohttp==3.9.5 - # via aiodocker + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # aiodocker aiosignal==1.3.1 - # via aiohttp + # via + # -c requirements/_base.txt + # aiohttp antlr4-python3-runtime==4.13.1 # via moto anyio==4.4.0 - # via httpx + # via + # -c requirements/_base.txt + # httpx asgi-lifespan==2.1.0 + # via -r requirements/_test.in async-timeout==4.0.3 # via + # -c requirements/_base.txt # aiohttp # redis attrs==23.2.0 # via + # -c requirements/_base.txt # aiohttp # jschema-to-python # jsonschema @@ -27,16 +40,20 @@ blinker==1.8.2 # via flask boto3==1.34.106 # via + # -c requirements/_base.txt # aws-sam-translator # moto botocore==1.34.106 # via + # -c requirements/_base.txt # aws-xray-sdk # boto3 # moto # s3transfer certifi==2024.2.2 # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt # httpcore # httpx # requests @@ -47,23 +64,35 @@ cfn-lint==0.87.3 charset-normalizer==3.3.2 # via requests click==8.1.7 - # via flask + # via + # -c requirements/_base.txt + # flask coverage==7.5.3 - # via pytest-cov + # via + # -r requirements/_test.in + # pytest-cov cryptography==42.0.7 # via + # -c requirements/../../../requirements/constraints.txt # joserfc # moto debugpy==1.8.1 + # via -r requirements/_test.in deepdiff==7.0.1 + # via -r requirements/_test.in docker==7.1.0 - # via moto + # via + # -r requirements/_test.in + # moto exceptiongroup==1.2.1 # via + # -c requirements/_base.txt # anyio # pytest faker==25.3.0 + # via -r requirements/_test.in fakeredis==2.23.2 + # via -r requirements/_test.in flask==3.0.3 # via # flask-cors @@ -72,18 +101,28 @@ flask-cors==4.0.1 # via moto frozenlist==1.4.1 # via + # -c requirements/_base.txt # aiohttp # aiosignal graphql-core==3.2.3 # via moto h11==0.14.0 - # via httpcore + # via + # -c requirements/_base.txt + # httpcore httpcore==1.0.5 - # via httpx + # via + # -c requirements/_base.txt + # httpx httpx==0.27.0 - # via respx + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # -r requirements/_test.in + # respx idna==3.7 # via + # -c requirements/_base.txt # anyio # httpx # requests @@ -94,10 +133,12 @@ itsdangerous==2.2.0 # via flask jinja2==3.1.4 # via + # -c requirements/../../../requirements/constraints.txt # flask # moto jmespath==1.0.1 # via + # -c requirements/_base.txt # boto3 # botocore joserfc==0.10.0 @@ -116,6 +157,7 @@ jsonpointer==2.4 # via jsonpatch jsonschema==4.22.0 # via + # -c requirements/_base.txt # aws-sam-translator # cfn-lint # openapi-schema-validator @@ -124,6 +166,7 @@ jsonschema-path==0.3.2 # via openapi-spec-validator jsonschema-specifications==2023.7.1 # via + # -c requirements/_base.txt # jsonschema # openapi-schema-validator junit-xml==1.9 @@ -137,10 +180,12 @@ markupsafe==2.1.5 # jinja2 # werkzeug moto==5.0.8 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy multidict==6.0.5 # via + # -c requirements/_base.txt # aiohttp # yarl networkx==3.3 @@ -152,8 +197,11 @@ openapi-spec-validator==0.7.1 ordered-set==4.1.0 # via deepdiff packaging==24.0 - # via pytest + # via + # -c requirements/_base.txt + # pytest parse==1.20.1 + # via -r requirements/_test.in pathable==0.4.3 # via jsonschema-path pbr==6.0.0 @@ -165,39 +213,58 @@ pluggy==1.5.0 ply==3.11 # via jsonpath-ng psutil==5.9.8 + # via -r requirements/_test.in py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi pydantic==1.10.15 - # via aws-sam-translator + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # aws-sam-translator pyparsing==3.1.2 # via moto pytest==8.2.1 # via + # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-mock pytest-asyncio==0.21.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in pytest-cov==5.0.0 + # via -r requirements/_test.in pytest-mock==3.14.0 + # via -r requirements/_test.in pytest-runner==6.0.1 + # via -r requirements/_test.in python-dateutil==2.9.0.post0 # via + # -c requirements/_base.txt # botocore # faker # moto python-dotenv==1.0.1 + # via -r requirements/_test.in pyyaml==6.0.1 # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt # cfn-lint # jsonschema-path # moto # responses redis==5.0.4 - # via fakeredis + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fakeredis referencing==0.29.3 # via + # -c requirements/_base.txt # jsonschema # jsonschema-path # jsonschema-specifications @@ -212,25 +279,31 @@ requests==2.32.2 responses==0.25.0 # via moto respx==0.21.1 + # via -r requirements/_test.in rfc3339-validator==0.1.4 # via openapi-schema-validator rpds-py==0.18.1 # via + # -c requirements/_base.txt # jsonschema # referencing s3transfer==0.10.1 - # via boto3 + # via + # -c requirements/_base.txt + # boto3 sarif-om==1.0.4 # via cfn-lint setuptools==70.0.0 # via moto six==1.16.0 # via + # -c requirements/_base.txt # junit-xml # python-dateutil # rfc3339-validator sniffio==1.3.1 # via + # -c requirements/_base.txt # anyio # asgi-lifespan # httpx @@ -244,6 +317,7 @@ tomli==2.0.1 # pytest typing-extensions==4.11.0 # via + # -c requirements/_base.txt # aiodocker # anyio # aws-sam-translator @@ -251,6 +325,8 @@ typing-extensions==4.11.0 # pydantic urllib3==2.2.1 # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt # botocore # docker # requests @@ -260,8 +336,12 @@ werkzeug==3.0.3 # flask # moto wrapt==1.16.0 - # via aws-xray-sdk + # via + # -c requirements/_base.txt + # aws-xray-sdk xmltodict==0.13.0 # via moto yarl==1.9.4 - # via aiohttp + # via + # -c requirements/_base.txt + # aiohttp diff --git a/services/efs-guardian/requirements/_tools.txt b/services/efs-guardian/requirements/_tools.txt index a141a791764..986b079511d 100644 --- a/services/efs-guardian/requirements/_tools.txt +++ b/services/efs-guardian/requirements/_tools.txt @@ -1,13 +1,17 @@ astroid==3.2.2 # via pylint black==24.4.2 + # via -r requirements/../../../requirements/devenv.txt build==1.2.1 # via pip-tools bump2version==1.0.1 + # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit click==8.1.7 # via + # -c requirements/_base.txt + # -c requirements/_test.txt # black # pip-tools dill==0.3.8 @@ -19,7 +23,9 @@ filelock==3.14.0 identify==2.5.36 # via pre-commit isort==5.13.2 - # via pylint + # via + # -r requirements/../../../requirements/devenv.txt + # pylint mccabe==0.7.0 # via pylint mypy-extensions==1.0.0 @@ -28,6 +34,8 @@ nodeenv==1.8.0 # via pre-commit packaging==24.0 # via + # -c requirements/_base.txt + # -c requirements/_test.txt # black # build pathspec==0.12.1 @@ -35,28 +43,37 @@ pathspec==0.12.1 pip==24.0 # via pip-tools pip-tools==7.4.1 + # via -r requirements/../../../requirements/devenv.txt platformdirs==4.2.2 # via # black # pylint # virtualenv pre-commit==3.7.1 + # via -r requirements/../../../requirements/devenv.txt pylint==3.2.2 + # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via # build # pip-tools pyyaml==6.0.1 # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # -c requirements/_test.txt # pre-commit # watchdog ruff==0.4.5 + # via -r requirements/../../../requirements/devenv.txt setuptools==70.0.0 # via + # -c requirements/_test.txt # nodeenv # pip-tools tomli==2.0.1 # via + # -c requirements/_test.txt # black # build # pip-tools @@ -65,10 +82,13 @@ tomlkit==0.12.5 # via pylint typing-extensions==4.11.0 # via + # -c requirements/_base.txt + # -c requirements/_test.txt # astroid # black virtualenv==20.26.2 # via pre-commit watchdog==4.0.1 + # via -r requirements/_tools.in wheel==0.43.0 # via pip-tools diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt index 9165d4a0d7f..65f5a6accd6 100644 --- a/services/resource-usage-tracker/requirements/_base.txt +++ b/services/resource-usage-tracker/requirements/_base.txt @@ -3,9 +3,9 @@ aio-pika==9.4.1 # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -aioboto3==12.3.0 +aioboto3==13.1.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in -aiobotocore==2.11.2 +aiobotocore==2.13.1 # via aioboto3 aiocache==0.12.2 # via @@ -26,6 +26,7 @@ aiofiles==23.2.1 # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # aioboto3 aiohttp==3.9.3 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -80,9 +81,9 @@ attrs==23.2.0 # aiohttp # jsonschema # referencing -boto3==1.34.34 +boto3==1.34.131 # via aiobotocore -botocore==1.34.34 +botocore==1.34.131 # via # aiobotocore # boto3 diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt index 912d6adf4c0..40ab0a54500 100644 --- a/services/resource-usage-tracker/requirements/_test.txt +++ b/services/resource-usage-tracker/requirements/_test.txt @@ -27,12 +27,12 @@ aws-xray-sdk==2.13.0 # via moto blinker==1.8.1 # via flask -boto3==1.34.34 +boto3==1.34.131 # via # -c requirements/_base.txt # aws-sam-translator # moto -botocore==1.34.34 +botocore==1.34.131 # via # -c requirements/_base.txt # aws-xray-sdk diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index cc8627570ff..0856ab4fda0 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -3,11 +3,11 @@ aio-pika==9.4.1 # -c requirements/../../../packages/service-library/requirements/./_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -aioboto3==12.3.0 +aioboto3==13.1.0 # via # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/_base.in -aiobotocore==2.11.2 +aiobotocore==2.13.1 # via aioboto3 aiocache==0.12.2 # via -r requirements/../../../packages/aws-library/requirements/_base.in @@ -27,6 +27,7 @@ aiofiles==23.2.1 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in + # aioboto3 aiohttp==3.9.3 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -92,9 +93,9 @@ attrs==23.2.0 # aiohttp # jsonschema # referencing -boto3==1.34.34 +boto3==1.34.131 # via aiobotocore -botocore==1.34.34 +botocore==1.34.131 # via # aiobotocore # boto3 diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index 00596fd9e26..e52deb2dba9 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -32,12 +32,12 @@ aws-xray-sdk==2.13.0 # via moto blinker==1.8.1 # via flask -boto3==1.34.34 +boto3==1.34.131 # via # -c requirements/_base.txt # aws-sam-translator # moto -botocore==1.34.34 +botocore==1.34.131 # via # -c requirements/_base.txt # aws-xray-sdk From 4c53a1261574de071865e5dfd9d3e12c064c3ace Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 1 Jul 2024 09:57:20 +0200 Subject: [PATCH 071/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20New=20fakes=20in?= =?UTF-8?q?=20dev/catalog/service=20(#6012)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../api_schemas_catalog/services.py | 2 +- .../api_schemas_webserver/catalog.py | 212 ++++++++++++++---- .../catalog/_handlers.py | 10 +- 3 files changed, 177 insertions(+), 47 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services.py b/packages/models-library/src/models_library/api_schemas_catalog/services.py index 55493c7223c..116140306e5 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/services.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/services.py @@ -105,7 +105,7 @@ class Config: } }, "owner": "redpandas@wonderland.com", - } + }, } diff --git a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py index 929165329c2..f69af39fb0b 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py @@ -1,4 +1,3 @@ -from copy import deepcopy from typing import Any, ClassVar, TypeAlias from pydantic import Extra, Field @@ -100,18 +99,137 @@ class Config(_BaseCommonApiExtension.Config): ServiceOutputsGetDict: TypeAlias = dict[ServicePortKey, ServiceOutputGet] -_EXAMPLE: dict[str, Any] = deepcopy( - api_schemas_catalog_services.ServiceGet.Config.schema_extra["example"] -) -_EXAMPLE.update( - { +_EXAMPLE_FILEPICKER: dict[str, Any] = { + **api_schemas_catalog_services.ServiceGet.Config.schema_extra["example"], + **{ "inputs": { f"input{i}": example for i, example in enumerate(ServiceInputGet.Config.schema_extra["examples"]) }, "outputs": {"outFile": ServiceOutputGet.Config.schema_extra["example"]}, - } -) + }, +} + + +_EXAMPLE_SLEEPER: dict[str, Any] = { + "name": "sleeper", + "thumbnail": None, + "description": "A service which awaits for time to pass, two times.", + "classifiers": [], + "quality": {}, + "accessRights": {"1": {"execute_access": True, "write_access": False}}, + "key": "simcore/services/comp/itis/sleeper", + "version": "2.2.1", + "version_display": "2 Xtreme", + "integration-version": "1.0.0", + "type": "computational", + "authors": [ + { + "name": "Author Bar", + "email": "author@acme.com", + "affiliation": "ACME", + }, + ], + "contact": "contact@acme.com", + "inputs": { + "input_1": { + "displayOrder": 1, + "label": "File with int number", + "description": "Pick a file containing only one integer", + "type": "data:text/plain", + "fileToKeyMap": {"single_number.txt": "input_1"}, + "keyId": "input_1", + }, + "input_2": { + "unitLong": "second", + "unitShort": "s", + "label": "Sleep interval", + "description": "Choose an amount of time to sleep in range [0:]", + "keyId": "input_2", + "displayOrder": 2, + "type": "ref_contentSchema", + "contentSchema": { + "title": "Sleep interval", + "type": "integer", + "x_unit": "second", + "minimum": 0, + }, + "defaultValue": 2, + }, + "input_3": { + "displayOrder": 3, + "label": "Fail after sleep", + "description": "If set to true will cause service to fail after it sleeps", + "type": "boolean", + "defaultValue": False, + "keyId": "input_3", + }, + "input_4": { + "unitLong": "meter", + "unitShort": "m", + "label": "Distance to bed", + "description": "It will first walk the distance to bed", + "keyId": "input_4", + "displayOrder": 4, + "type": "ref_contentSchema", + "contentSchema": { + "title": "Distance to bed", + "type": "integer", + "x_unit": "meter", + }, + "defaultValue": 0, + }, + "input_5": { + "unitLong": "byte", + "unitShort": "B", + "label": "Dream (or nightmare) of the night", + "description": "Defines the size of the dream that will be generated [0:]", + "keyId": "input_5", + "displayOrder": 5, + "type": "ref_contentSchema", + "contentSchema": { + "title": "Dream of the night", + "type": "integer", + "x_unit": "byte", + "minimum": 0, + }, + "defaultValue": 0, + }, + }, + "outputs": { + "output_1": { + "displayOrder": 1, + "label": "File containing one random integer", + "description": "Integer is generated in range [1-9]", + "type": "data:text/plain", + "fileToKeyMap": {"single_number.txt": "output_1"}, + "keyId": "output_1", + }, + "output_2": { + "unitLong": "second", + "unitShort": "s", + "label": "Random sleep interval", + "description": "Interval is generated in range [1-9]", + "keyId": "output_2", + "displayOrder": 2, + "type": "ref_contentSchema", + "contentSchema": { + "title": "Random sleep interval", + "type": "integer", + "x_unit": "second", + }, + }, + "output_3": { + "displayOrder": 3, + "label": "Dream output", + "description": "Contains some random data representing a dream", + "type": "data:text/plain", + "fileToKeyMap": {"dream.txt": "output_3"}, + "keyId": "output_3", + }, + }, + "owner": "owner@acme.com", +} class ServiceGet(api_schemas_catalog_services.ServiceGet): @@ -124,7 +242,7 @@ class ServiceGet(api_schemas_catalog_services.ServiceGet): ) class Config(OutputSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = {"example": _EXAMPLE} + schema_extra: ClassVar[dict[str, Any]] = {"example": _EXAMPLE_FILEPICKER} class ServiceUpdate(api_schemas_catalog_services.ServiceUpdate): @@ -148,39 +266,51 @@ class DEVServiceGet(ServiceGet): class Config(OutputSchema.Config): schema_extra: ClassVar[dict[str, Any]] = { - "example": { - **_EXAMPLE, # 1.0.0 - "history": [ - { - "version": "1.0.5", - "version_display": "Summer Release", - "release_date": "2024-07-20T15:00:00", - }, - { - "version": _EXAMPLE["version"], - "compatibility": { - "can_update_to": "1.0.5", + "examples": [ + { + **_EXAMPLE_SLEEPER, # v2.2.1 (latest) + "history": [ + { + "version": _EXAMPLE_SLEEPER["version"], + "version_display": "Summer Release", + "release_date": "2024-07-20T15:00:00", }, - }, - {"version": "0.9.11"}, - {"version": "0.9.10"}, - { - "version": "0.9.8", - "compatibility": { - "can_update_to": "0.9.10", + { + "version": "2.0.0", + "compatibility": { + "can_update_to": _EXAMPLE_SLEEPER["version"], + }, }, - }, - { - "version": "0.9.1", - "version_display": "Matterhorn", - "release_date": "2024-01-20T18:49:17", - "compatibility": { - "can_update_to": "0.9.10", + {"version": "0.9.11"}, + {"version": "0.9.10"}, + { + "version": "0.9.8", + "compatibility": { + "can_update_to": "0.9.11", + }, }, - }, - {"version": "0.9.0"}, - {"version": "0.8.0"}, - {"version": "0.1.0"}, - ], - } + { + "version": "0.9.1", + "version_display": "Matterhorn", + "release_date": "2024-01-20T18:49:17", + "compatibility": { + "can_update_to": "0.9.11", + }, + }, + {"version": "0.9.0"}, + {"version": "0.8.0"}, + {"version": "0.1.0"}, + ], + }, + { + **_EXAMPLE_FILEPICKER, + "history": [ + { + "version": _EXAMPLE_FILEPICKER["version"], + "version_display": "Odei Release", + "release_date": "2025-03-25T00:00:00", + } + ], + }, + ] } diff --git a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py index fb55630c048..96c96779591 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py @@ -89,9 +89,9 @@ async def dev_list_services_latest(request: Request): assert query_params # nosec _logger.debug("Moking response for %s...", request) - got = [ - parse_obj_as(DEVServiceGet, DEVServiceGet.Config.schema_extra["example"]), - ] + got = parse_obj_as( + list[DEVServiceGet], DEVServiceGet.Config.schema_extra["examples"] + ) return envelope_json_response( got[query_params.offset : query_params.offset + query_params.limit] @@ -113,7 +113,7 @@ async def dev_get_service(request: Request): assert path_params # nosec _logger.debug("Moking response for %s...", request) - got = parse_obj_as(DEVServiceGet, DEVServiceGet.Config.schema_extra["example"]) + got = parse_obj_as(DEVServiceGet, DEVServiceGet.Config.schema_extra["examples"][0]) got.version = path_params.service_version got.key = path_params.service_key @@ -137,7 +137,7 @@ async def dev_update_service(request: Request): assert update # nosec _logger.debug("Moking response for %s...", request) - got = parse_obj_as(DEVServiceGet, DEVServiceGet.Config.schema_extra["example"]) + got = parse_obj_as(DEVServiceGet, DEVServiceGet.Config.schema_extra["examples"][0]) got.version = path_params.service_version got.key = path_params.service_key updated = got.copy(update=update.dict(exclude_unset=True)) From 7ef7cc57f634d557db18be588f5653e5b2c27908 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 1 Jul 2024 11:18:16 +0200 Subject: [PATCH 072/219] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20Bugfix:=20R?= =?UTF-8?q?equired=20inputs=20(#6013)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/dashboard/CardBase.js | 4 ++++ .../source/class/osparc/data/model/Node.js | 20 ++++++++++++------- .../class/osparc/widget/NodeDataManager.js | 1 + 3 files changed, 18 insertions(+), 7 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js index 37d2d79be66..6e394f87e89 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js @@ -112,6 +112,10 @@ qx.Class.define("osparc.dashboard.CardBase", { } return false; } + // if we get here, it means that it was shared-with-me via an organization + if (sharedWith === "shared-with-me") { + return false; + } return true; } return false; diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index 67c3cd85d83..b04a8febd6b 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -53,6 +53,7 @@ qx.Class.define("osparc.data.model.Node", { this.setOutputs({}); this.__inputNodes = []; + this.__inputsRequired = []; if (study) { this.setStudy(study); @@ -135,12 +136,6 @@ qx.Class.define("osparc.data.model.Node", { event: "changeInputs" }, - inputsRequired: { - check: "Array", - init: [], - event: "changeInputsRequired" - }, - outputs: { check: "Object", nullable: false, @@ -337,6 +332,7 @@ qx.Class.define("osparc.data.model.Node", { members: { __metaData: null, __inputNodes: null, + __inputsRequired: null, __settingsForm: null, __posX: null, __posY: null, @@ -882,6 +878,16 @@ qx.Class.define("osparc.data.model.Node", { }, // !---- Input Nodes ----- + // ----- Inputs Required ----- + getInputsRequired: function() { + return this.__inputsRequired; + }, + + setInputsRequired: function(inputsRequired) { + this.__inputsRequired = inputsRequired; + this.fireEvent("changeInputsRequired"); + }, + toggleInputRequired: function(portId) { const inputsRequired = this.getInputsRequired(); const index = inputsRequired.indexOf(portId); @@ -891,8 +897,8 @@ qx.Class.define("osparc.data.model.Node", { inputsRequired.push(portId); } this.setInputsRequired(inputsRequired); - this.fireEvent("changeInputsRequired"); }, + // !---- Inputs Required ----- canNodeStart: function() { return this.isDynamic() && ["idle", "failed"].includes(this.getStatus().getInteractive()); diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeDataManager.js b/services/static-webserver/client/source/class/osparc/widget/NodeDataManager.js index 412dbc95d48..eef07a3ac74 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodeDataManager.js +++ b/services/static-webserver/client/source/class/osparc/widget/NodeDataManager.js @@ -173,6 +173,7 @@ qx.Class.define("osparc.widget.NodeDataManager", { __reloadTree: function() { if (this.__filesTree) { + this.__filesTree.resetCache(); if (this.getStudyId()) { this.__filesTree.populateStudyTree(this.getStudyId()); } From 5c3a6bfb52fc85bb6a3666ab3496816dd0e0dcae Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 1 Jul 2024 12:42:54 +0200 Subject: [PATCH 073/219] =?UTF-8?q?=E2=99=BB=EF=B8=8F=F0=9F=90=9B=20[Front?= =?UTF-8?q?end]=20Reuse=20iframe=20starting=20sequence=20for=20guests=20(#?= =?UTF-8?q?6004)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../static-webserver/client/Manifest.json | 2 +- services/static-webserver/client/qx-lock.json | 4 +- .../class/osparc/data/model/IframeHandler.js | 379 ++++++++++++++++++ .../source/class/osparc/data/model/Node.js | 354 ++-------------- .../osparc/data/model/NodeProgressSequence.js | 10 +- .../source/class/osparc/data/model/Study.js | 6 +- .../class/osparc/data/model/Workbench.js | 4 +- .../source/class/osparc/desktop/MainPage.js | 2 +- .../class/osparc/desktop/WorkbenchView.js | 1 + .../client/source/class/osparc/store/Store.js | 2 + .../source/class/osparc/viewer/MainPage.js | 20 +- .../source/class/osparc/viewer/NodeViewer.js | 220 +++------- 12 files changed, 497 insertions(+), 507 deletions(-) create mode 100644 services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js diff --git a/services/static-webserver/client/Manifest.json b/services/static-webserver/client/Manifest.json index 21d82cd8cd3..bb5d7f01a15 100644 --- a/services/static-webserver/client/Manifest.json +++ b/services/static-webserver/client/Manifest.json @@ -41,7 +41,7 @@ "requires": { "@qooxdoo/compiler": "^1.0.0-beta", "@qooxdoo/framework": "^6.0.0-beta", - "ITISFoundation/qx-iconfont-fontawesome5": "^1.0.0", + "ITISFoundation/qx-iconfont-fontawesome5": "^0.2.2", "ITISFoundation/qx-osparc-theme": "^0.5.6", "qooxdoo/qxl.testtapper": "^0.4.3", "qooxdoo/qxl.apiviewer": "^1.0.0-beta", diff --git a/services/static-webserver/client/qx-lock.json b/services/static-webserver/client/qx-lock.json index 6b9be87bd53..71b270ebe76 100644 --- a/services/static-webserver/client/qx-lock.json +++ b/services/static-webserver/client/qx-lock.json @@ -3,10 +3,10 @@ { "library_name": "qx-iconfont-fontawesome5", "library_version": "1.0.0", - "path": "qx_packages/ITISFoundation_qx-iconfont-fontawesome5_v1_0_0", + "path": "qx_packages/ITISFoundation_qx-iconfont-fontawesome5_v0_2_2", "uri": "ITISFoundation/qx-iconfont-fontawesome5", "repo_name": "ITISFoundation/qx-iconfont-fontawesome5", - "repo_tag": "v1.0.0" + "repo_tag": "v0.2.2" }, { "library_name": "qx-osparc-theme", diff --git a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js new file mode 100644 index 00000000000..2a077b2dff5 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js @@ -0,0 +1,379 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.data.model.IframeHandler", { + extend: qx.core.Object, + include: qx.locale.MTranslation, + + construct: function(study, node) { + this.setStudy(study); + this.setNode(node); + + this.__initLoadingPage(); + this.__initIFrame(); + }, + + properties: { + study: { + check: "osparc.data.model.Study", + init: null, + nullable: false, + event: "changeStudy" + }, + + node: { + check: "osparc.data.model.Node", + init: null, + nullable: false + }, + + loadingPage: { + check: "osparc.ui.message.Loading", + init: null, + nullable: true + }, + + iFrame: { + check: "osparc.widget.PersistentIframe", + init: null, + nullable: true + } + }, + + events: { + "iframeChanged": "qx.event.type.Event" + }, + + members: { + __unresponsiveRetries: null, + __stopRequestingStatus: null, + __retriesLeft: null, + + startPolling: function() { + this.getNode().getStatus().getProgressSequence() + .resetSequence(); + + this.__unresponsiveRetries = 5; + this.__nodeState(); + }, + + stopIframe: function() { + this.getNode().getStatus().getProgressSequence() + .resetSequence(); + + this.__unresponsiveRetries = 5; + this.__nodeState(false); + + this.getIFrame().resetSource(); + }, + + __initIFrame: function() { + const iframe = new osparc.widget.PersistentIframe(); + osparc.utils.Utils.setIdToWidget(iframe.getIframe(), "iframe_"+this.getNode().getNodeId()); + if (osparc.product.Utils.isProduct("s4llite")) { + iframe.setShowToolbar(false); + } + iframe.addListener("restart", () => this.__restartIFrame(), this); + iframe.getDiskUsageIndicator().setCurrentNode(this.getNode()) + this.setIFrame(iframe); + }, + + __initLoadingPage: function() { + const showZoomMaximizeButton = !osparc.product.Utils.isProduct("s4llite"); + const loadingPage = new osparc.ui.message.Loading(showZoomMaximizeButton); + loadingPage.set({ + header: this.__getLoadingPageHeader() + }); + + const node = this.getNode(); + const thumbnail = node.getMetaData()["thumbnail"]; + if (thumbnail) { + loadingPage.setLogo(thumbnail); + } + node.addListener("changeLabel", () => loadingPage.setHeader(this.__getLoadingPageHeader()), this); + + const nodeStatus = node.getStatus(); + const sequenceWidget = nodeStatus.getProgressSequence().getWidgetForLoadingPage(); + nodeStatus.bind("interactive", sequenceWidget, "visibility", { + converter: state => ["starting", "pulling", "pending", "connecting"].includes(state) ? "visible" : "excluded" + }); + loadingPage.addExtraWidget(sequenceWidget); + + nodeStatus.addListener("changeInteractive", () => { + loadingPage.setHeader(this.__getLoadingPageHeader()); + const status = nodeStatus.getInteractive(); + if (["idle", "failed"].includes(status)) { + const startButton = new qx.ui.form.Button().set({ + label: this.tr("Start"), + icon: "@FontAwesome5Solid/play/18", + font: "text-18", + allowGrowX: false, + height: 32 + }); + startButton.addListener("execute", () => node.requestStartNode()); + loadingPage.addWidgetToMessages(startButton); + } else { + loadingPage.setMessages([]); + } + }, this); + this.setLoadingPage(loadingPage); + }, + + __getLoadingPageHeader: function() { + const node = this.getNode(); + let statusText = this.tr("Starting"); + const status = node.getStatus().getInteractive(); + if (status) { + statusText = status.charAt(0).toUpperCase() + status.slice(1); + } + return statusText + " " + node.getLabel() + " v" + node.getVersion() + ""; + }, + + __nodeState: function(starting=true) { + // Check if study is still there + if (this.getStudy() === null || this.__stopRequestingStatus === true) { + return; + } + // Check if node is still there + if (this.getStudy().getWorkbench().getNode(this.getNode().getNodeId()) === null) { + return; + } + + const node = this.getNode(); + const params = { + url: { + studyId: this.getStudy().getUuid(), + nodeId: node.getNodeId() + } + }; + osparc.data.Resources.fetch("studies", "getNode", params) + .then(data => this.__onNodeState(data, starting)) + .catch(err => { + let errorMsg = `Error retrieving ${node.getLabel()} status: ${err}`; + if ("status" in err && err.status === 406) { + errorMsg = node.getKey() + ":" + node.getVersion() + "is retired"; + node.getStatus().setInteractive("retired"); + osparc.FlashMessenger.getInstance().logAs(node.getLabel() + this.tr(" is retired"), "ERROR"); + } + const errorMsgData = { + nodeId: node.getNodeId(), + msg: errorMsg, + level: "ERROR" + }; + node.fireDataEvent("showInLogger", errorMsgData); + if ("status" in err && err.status === 406) { + return; + } + if (this.__unresponsiveRetries > 0) { + const retryMsg = `Retrying (${this.__unresponsiveRetries})`; + const retryMsgData = { + nodeId: node.getNodeId(), + msg: retryMsg, + level: "ERROR" + }; + node.fireDataEvent("showInLogger", retryMsgData); + this.__unresponsiveRetries--; + const interval = Math.floor(Math.random() * 5000) + 3000; + setTimeout(() => this.__nodeState(), interval); + } else { + node.getStatus().setInteractive("failed"); + osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error starting") + " " + node.getLabel(), "ERROR"); + } + }); + }, + + __onNodeState: function(data, starting=true) { + const serviceState = data["service_state"]; + const nodeId = data["service_uuid"]; + const node = this.getNode(); + const status = node.getStatus(); + switch (serviceState) { + case "idle": { + status.setInteractive(serviceState); + if (starting && this.__unresponsiveRetries>0) { + // a bit of a hack. We will get rid of it when the backend pushes the states + this.__unresponsiveRetries--; + const interval = 2000; + qx.event.Timer.once(() => this.__nodeState(starting), this, interval); + } + break; + } + case "pending": { + if (data["service_message"]) { + const serviceName = node.getLabel(); + const serviceMessage = data["service_message"]; + const msg = `The service "${serviceName}" is waiting for available ` + + `resources. Please inform support and provide the following message ` + + `in case this does not resolve in a few minutes: "${nodeId}" ` + + `reported "${serviceMessage}"`; + const msgData = { + nodeId: node.getNodeId(), + msg: msg, + level: "INFO" + }; + node.fireDataEvent("showInLogger", msgData); + } + status.setInteractive(serviceState); + const interval = 10000; + qx.event.Timer.once(() => this.__nodeState(starting), this, interval); + break; + } + case "stopping": + case "unknown": + case "starting": + case "pulling": { + status.setInteractive(serviceState); + const interval = 5000; + qx.event.Timer.once(() => this.__nodeState(starting), this, interval); + break; + } + case "running": { + if (nodeId !== node.getNodeId()) { + return; + } + if (!starting) { + status.setInteractive("stopping"); + const interval = 5000; + qx.event.Timer.once(() => this.__nodeState(starting), this, interval); + break; + } + const { + srvUrl, + isDynamicV2 + } = osparc.utils.Utils.computeServiceUrl(data); + node.setDynamicV2(isDynamicV2); + if (srvUrl) { + this.__retriesLeft = 40; + this.__waitForServiceReady(srvUrl); + } + break; + } + case "complete": + break; + case "failed": { + status.setInteractive(serviceState); + const msg = "Service failed: " + data["service_message"]; + const errorMsgData = { + nodeId: node.getNodeId(), + msg, + level: "ERROR" + }; + node.fireDataEvent("showInLogger", errorMsgData); + return; + } + default: + console.error(serviceState, "service state not supported"); + break; + } + }, + + __waitForServiceReady: function(srvUrl) { + this.getNode().getStatus().setInteractive("connecting"); + + if (this.__retriesLeft === 0) { + return; + } + + const retry = () => { + this.__retriesLeft--; + + // Check if node is still there + if (this.getStudy().getWorkbench().getNode(this.getNode().getNodeId()) === null) { + return; + } + const interval = 5000; + qx.event.Timer.once(() => this.__waitForServiceReady(srvUrl), this, interval); + }; + + // ping for some time until it is really reachable + try { + if (osparc.utils.Utils.isDevelopmentPlatform()) { + console.log("Connecting: about to fetch", srvUrl); + } + fetch(srvUrl) + .then(response => { + if (osparc.utils.Utils.isDevelopmentPlatform()) { + console.log("Connecting: fetch's response status", response.status); + } + if (response.status < 400) { + this.__serviceReadyIn(srvUrl); + } else { + console.log(`Connecting: ${srvUrl} is not reachable. Status: ${response.status}`); + retry(); + } + }) + .catch(err => { + console.error("Connecting: Error", err); + retry(); + }); + } catch (error) { + console.error(`Connecting: Error while checking ${srvUrl}:`, error); + retry(); + } + }, + + __serviceReadyIn: function(srvUrl) { + const node = this.getNode(); + node.setServiceUrl(srvUrl); + node.getStatus().setInteractive("ready"); + const msg = "Service ready on " + srvUrl; + const msgData = { + nodeId: node.getNodeId(), + msg, + level: "INFO" + }; + node.fireDataEvent("showInLogger", msgData); + this.__restartIFrame(); + node.callRetrieveInputs(); + }, + + __restartIFrame: function() { + const node = this.getNode(); + if (node.getServiceUrl() !== null) { + // restart button pushed + if (this.getIFrame().getSource().includes(node.getServiceUrl())) { + this.__loadIframe(); + } + + const loadingPage = this.getLoadingPage(); + const bounds = loadingPage.getBounds(); + const domEle = loadingPage.getContentElement().getDomElement(); + const boundsCR = domEle ? domEle.getBoundingClientRect() : null; + if (bounds !== null && boundsCR && boundsCR.width > 0) { + this.__loadIframe(); + } else { + // lazy loading + loadingPage.addListenerOnce("appear", () => this.__loadIframe(), this); + } + } + }, + + __loadIframe: function() { + const node = this.getNode(); + const status = node.getStatus().getInteractive(); + // it might have been stopped + if (status === "ready") { + this.getIFrame().resetSource(); + this.getIFrame().setSource(node.getServiceUrl()); + + // fire event to force switching to iframe's content: + // it is required in those cases where the native 'load' event isn't triggered (voila) + this.fireEvent("iframeChanged"); + } + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index b04a8febd6b..ee64755ec17 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -32,7 +32,6 @@ *
  *   let node = new osparc.data.model.Node(key, version, uuid);
  *   node.populateNodeData(nodeData);
- *   node.startDynamicService();
  * 
*/ @@ -44,9 +43,9 @@ qx.Class.define("osparc.data.model.Node", { * @param study {osparc.data.model.Study} Study or Serialized Study Object * @param key {String} key of the service represented by the node * @param version {String} version of the service represented by the node - * @param uuid {String} uuid of the service represented by the node (not needed for new Nodes) + * @param nodeId {String} uuid of the service represented by the node (not needed for new Nodes) */ - construct: function(study, key, version, uuid) { + construct: function(study, key, version, nodeId) { this.base(arguments); this.__metaData = osparc.service.Utils.getMetaData(key, version); @@ -59,7 +58,7 @@ qx.Class.define("osparc.data.model.Node", { this.setStudy(study); } this.set({ - nodeId: uuid || osparc.utils.Utils.uuidV4(), + nodeId: nodeId || osparc.utils.Utils.uuidV4(), key, version, status: new osparc.data.model.NodeStatus(this) @@ -196,18 +195,6 @@ qx.Class.define("osparc.data.model.Node", { event: "changeOutputConnected" }, - loadingPage: { - check: "osparc.ui.message.Loading", - init: null, - nullable: true - }, - - iFrame: { - check: "osparc.widget.PersistentIframe", - init: null, - nullable: true - }, - logger: { check: "osparc.widget.logger.LoggerView", init: null, @@ -336,9 +323,7 @@ qx.Class.define("osparc.data.model.Node", { __settingsForm: null, __posX: null, __posY: null, - __unresponsiveRetries: null, - __stopRequestingStatus: null, - __retriesLeft: null, + __iframeHandler: null, getWorkbench: function() { return this.getStudy().getWorkbench(); @@ -487,9 +472,8 @@ qx.Class.define("osparc.data.model.Node", { } this.__initLogger(); - if (this.isDynamic()) { - this.__initIFrame(); - } + + this.initIframeHandler(); if (this.isParameter()) { this.__initParameter(); @@ -534,6 +518,28 @@ qx.Class.define("osparc.data.model.Node", { } }, + initIframeHandler: function() { + if (this.isDynamic()) { + this.__iframeHandler = new osparc.data.model.IframeHandler(this.getStudy(), this); + } + }, + + getIframeHandler: function() { + return this.__iframeHandler; + }, + + getIFrame: function() { + return this.getIframeHandler() ? this.getIframeHandler().getIFrame() : null; + }, + + setIFrame: function(iframe) { + return this.getIframeHandler() ? this.getIframeHandler().setIFrame(iframe) : null; + }, + + getLoadingPage: function() { + return this.getIframeHandler() ? this.getIframeHandler().getLoadingPage() : null; + }, + __applyPropsForm: function() { const checkIsPipelineRunning = () => { const isPipelineRunning = this.getStudy().isPipelineRunning(); @@ -915,7 +921,7 @@ qx.Class.define("osparc.data.model.Node", { } }; osparc.data.Resources.fetch("studies", "startNode", params) - .then(() => this.startDynamicService()) + .then(() => this.startPollingState()) .catch(err => { if ("status" in err && (err.status === 409 || err.status === 402)) { osparc.FlashMessenger.getInstance().logAs(err.message, "WARNING"); @@ -972,97 +978,6 @@ qx.Class.define("osparc.data.model.Node", { this.setLogger(new osparc.widget.logger.LoggerView()); }, - __getLoadingPageHeader: function() { - let statusText = this.tr("Starting"); - const status = this.getStatus().getInteractive(); - if (status) { - statusText = status.charAt(0).toUpperCase() + status.slice(1); - } - return statusText + " " + this.getLabel() + " v" + this.getVersion() + ""; - }, - - __initLoadingPage: function() { - const showZoomMaximizeButton = !osparc.product.Utils.isProduct("s4llite"); - const loadingPage = new osparc.ui.message.Loading(showZoomMaximizeButton); - loadingPage.set({ - header: this.__getLoadingPageHeader() - }); - - const thumbnail = this.getMetaData()["thumbnail"]; - if (thumbnail) { - loadingPage.setLogo(thumbnail); - } - this.addListener("changeLabel", () => loadingPage.setHeader(this.__getLoadingPageHeader()), this); - - const nodeStatus = this.getStatus(); - const sequenceWidget = nodeStatus.getProgressSequence().getWidgetForLoadingPage(); - nodeStatus.bind("interactive", sequenceWidget, "visibility", { - converter: state => ["starting", "pulling", "pending", "connecting"].includes(state) ? "visible" : "excluded" - }); - loadingPage.addExtraWidget(sequenceWidget); - - this.getStatus().addListener("changeInteractive", () => { - loadingPage.setHeader(this.__getLoadingPageHeader()); - const status = this.getStatus().getInteractive(); - if (["idle", "failed"].includes(status)) { - const startButton = new qx.ui.form.Button().set({ - label: this.tr("Start"), - icon: "@FontAwesome5Solid/play/18", - font: "text-18", - allowGrowX: false, - height: 32 - }); - startButton.addListener("execute", () => this.requestStartNode()); - loadingPage.addWidgetToMessages(startButton); - } else { - loadingPage.setMessages([]); - } - }, this); - this.setLoadingPage(loadingPage); - }, - - __initIFrame: function() { - this.__initLoadingPage(); - - const iframe = new osparc.widget.PersistentIframe(); - osparc.utils.Utils.setIdToWidget(iframe.getIframe(), "iframe_"+this.getNodeId()); - if (osparc.product.Utils.isProduct("s4llite")) { - iframe.setShowToolbar(false); - } - iframe.addListener("restart", () => this.__restartIFrame(), this); - iframe.getDiskUsageIndicator().setCurrentNode(this) - this.setIFrame(iframe); - }, - - __restartIFrame: function() { - if (this.getServiceUrl() !== null) { - const loadIframe = () => { - const status = this.getStatus().getInteractive(); - // it might have been stopped - if (status === "ready") { - this.getIFrame().resetSource(); - this.getIFrame().setSource(this.getServiceUrl()); - } - }; - - // restart button pushed - if (this.getIFrame().getSource().includes(this.getServiceUrl())) { - loadIframe(); - } - - const loadingPage = this.getLoadingPage(); - const bounds = loadingPage.getBounds(); - const domEle = loadingPage.getContentElement().getDomElement(); - const boundsCR = domEle ? domEle.getBoundingClientRect() : null; - if (bounds !== null && boundsCR && boundsCR.width > 0) { - loadIframe(); - } else { - // lazy loading - loadingPage.addListenerOnce("appear", () => loadIframe(), this); - } - } - }, - __initParameter: function() { if (this.isParameter() && this.__getOutputData("out_1") === null) { const type = osparc.node.ParameterEditor.getParameterOutputType(this); @@ -1154,10 +1069,8 @@ qx.Class.define("osparc.data.model.Node", { } }, - startDynamicService: function() { + startPollingState: function() { if (this.isDynamic()) { - this.getStatus().getProgressSequence().resetSequence(); - const metaData = this.getMetaData(); const msg = "Starting " + metaData.key + ":" + metaData.version + "..."; const msgData = { @@ -1167,167 +1080,25 @@ qx.Class.define("osparc.data.model.Node", { }; this.fireDataEvent("showInLogger", msgData); - this.__unresponsiveRetries = 5; - this.__nodeState(); + this.getIframeHandler().startPolling(); } }, stopDynamicService: function() { if (this.isDynamic()) { - this.getStatus().getProgressSequence().resetSequence(); - const metaData = this.getMetaData(); const msg = "Stopping " + metaData.key + ":" + metaData.version + "..."; const msgData = { nodeId: this.getNodeId(), - msg: msg, + msg, level: "INFO" }; this.fireDataEvent("showInLogger", msgData); - this.__unresponsiveRetries = 5; - this.__nodeState(false); - - this.getIFrame().resetSource(); + this.getIframeHandler().stopIframe(); } }, - __onNodeState: function(data, starting=true) { - const serviceState = data["service_state"]; - const nodeId = data["service_uuid"]; - const status = this.getStatus(); - switch (serviceState) { - case "idle": { - status.setInteractive(serviceState); - if (starting && this.__unresponsiveRetries>0) { - // a bit of a hack. We will get rid of it when the backend pushes the states - this.__unresponsiveRetries--; - const interval = 2000; - qx.event.Timer.once(() => this.__nodeState(starting), this, interval); - } - break; - } - case "pending": { - if (data["service_message"]) { - const serviceName = this.getLabel(); - const serviceMessage = data["service_message"]; - const msg = `The service "${serviceName}" is waiting for available ` + - `resources. Please inform support and provide the following message ` + - `in case this does not resolve in a few minutes: "${nodeId}" ` + - `reported "${serviceMessage}"`; - const msgData = { - nodeId: this.getNodeId(), - msg: msg, - level: "INFO" - }; - this.fireDataEvent("showInLogger", msgData); - } - status.setInteractive(serviceState); - const interval = 10000; - qx.event.Timer.once(() => this.__nodeState(starting), this, interval); - break; - } - case "stopping": - case "unknown": - case "starting": - case "pulling": { - status.setInteractive(serviceState); - const interval = 5000; - qx.event.Timer.once(() => this.__nodeState(starting), this, interval); - break; - } - case "running": { - if (nodeId !== this.getNodeId()) { - return; - } - if (!starting) { - status.setInteractive("stopping"); - const interval = 5000; - qx.event.Timer.once(() => this.__nodeState(starting), this, interval); - break; - } - const { - srvUrl, - isDynamicV2 - } = osparc.utils.Utils.computeServiceUrl(data); - this.setDynamicV2(isDynamicV2); - if (srvUrl) { - this.__retriesLeft = 40; - this.__waitForServiceReady(srvUrl); - } - break; - } - case "complete": - break; - case "failed": { - status.setInteractive(serviceState); - const msg = "Service failed: " + data["service_message"]; - const errorMsgData = { - nodeId: this.getNodeId(), - msg, - level: "ERROR" - }; - this.fireDataEvent("showInLogger", errorMsgData); - return; - } - default: - console.error(serviceState, "service state not supported"); - break; - } - }, - - __nodeState: function(starting=true) { - // Check if study is still there - if (this.getStudy() === null || this.__stopRequestingStatus === true) { - return; - } - // Check if node is still there - if (this.getWorkbench().getNode(this.getNodeId()) === null) { - return; - } - - const params = { - url: { - studyId: this.getStudy().getUuid(), - nodeId: this.getNodeId() - } - }; - osparc.data.Resources.fetch("studies", "getNode", params) - .then(data => this.__onNodeState(data, starting)) - .catch(err => { - let errorMsg = `Error retrieving ${this.getLabel()} status: ${err}`; - if ("status" in err && err.status === 406) { - errorMsg = this.getKey() + ":" + this.getVersion() + "is retired"; - this.getStatus().setInteractive("retired"); - osparc.FlashMessenger.getInstance().logAs(this.getLabel() + this.tr(" is retired"), "ERROR"); - } - const errorMsgData = { - nodeId: this.getNodeId(), - msg: errorMsg, - level: "ERROR" - }; - this.fireDataEvent("showInLogger", errorMsgData); - if ("status" in err && err.status === 406) { - return; - } - if (this.__unresponsiveRetries > 0) { - const retryMsg = `Retrying (${this.__unresponsiveRetries})`; - const retryMsgData = { - nodeId: this.getNodeId(), - msg: retryMsg, - level: "ERROR" - }; - this.fireDataEvent("showInLogger", retryMsgData); - this.__unresponsiveRetries--; - const interval = Math.floor(Math.random() * 5000) + 3000; - setTimeout(() => this.__nodeState(), interval); - } else { - this.getStatus().setInteractive("failed"); - osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error starting") + " " + this.getLabel(), "ERROR"); - } - }); - }, - setNodeProgressSequence: function(progressType, progressReport) { const nodeStatus = this.getStatus(); if (nodeStatus.getProgressSequence()) { @@ -1335,65 +1106,6 @@ qx.Class.define("osparc.data.model.Node", { } }, - __waitForServiceReady: function(srvUrl) { - this.getStatus().setInteractive("connecting"); - - if (this.__retriesLeft === 0) { - return; - } - - const retry = () => { - this.__retriesLeft--; - - // Check if node is still there - if (this.getWorkbench().getNode(this.getNodeId()) === null) { - return; - } - const interval = 5000; - qx.event.Timer.once(() => this.__waitForServiceReady(srvUrl), this, interval); - }; - - // ping for some time until it is really reachable - try { - if (osparc.utils.Utils.isDevelopmentPlatform()) { - console.log("Connecting: about to fetch", srvUrl); - } - fetch(srvUrl) - .then(response => { - if (osparc.utils.Utils.isDevelopmentPlatform()) { - console.log("Connecting: fetch's response status", response.status); - } - if (response.status < 400) { - this.__serviceReadyIn(srvUrl); - } else { - console.log(`Connecting: ${srvUrl} is not reachable. Status: ${response.status}`); - retry(); - } - }) - .catch(err => { - console.error("Connecting: Error", err); - retry(); - }); - } catch (error) { - console.error(`Connecting: Error while checking ${srvUrl}:`, error); - retry(); - } - }, - - __serviceReadyIn: function(srvUrl) { - this.setServiceUrl(srvUrl); - this.getStatus().setInteractive("ready"); - const msg = "Service ready on " + srvUrl; - const msgData = { - nodeId: this.getNodeId(), - msg, - level: "INFO" - }; - this.fireDataEvent("showInLogger", msgData); - this.__restartIFrame(); - this.callRetrieveInputs(); - }, - attachHandlersToStartButton: function(startButton) { this.getStatus().bind("interactive", startButton, "visibility", { converter: state => (state === "ready") ? "excluded" : "visible" diff --git a/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js b/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js index 5b252f573c4..ef38a4a2905 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js +++ b/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js @@ -114,6 +114,8 @@ qx.Class.define("osparc.data.model.NodeProgressSequence", { HALO: 2, }, + DISCLAIMER_TIME: 50000, + createTaskLayout: function(label) { const layout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ alignY: "middle" @@ -212,6 +214,7 @@ qx.Class.define("osparc.data.model.NodeProgressSequence", { __pullingStateLayout: null, __pullingImagesLayout: null, __pullingInputsLayout: null, + __disclaimerTimer: null, __disclaimerText: null, getDefaultStartValues: function() { @@ -233,6 +236,9 @@ qx.Class.define("osparc.data.model.NodeProgressSequence", { }, resetSequence: function() { + if (this.__disclaimerTimer) { + clearTimeout(this.__disclaimerTimer); + } const defaultVals = this.getDefaultStartValues(); this.setOverallProgress(0); this.setClusterUpScaling(defaultVals); @@ -355,9 +361,7 @@ qx.Class.define("osparc.data.model.NodeProgressSequence", { __applyOverallProgress: function(value) { if (value > 0 && value < 6) { - setTimeout(() => { - this.__disclaimerText.show(); - }, 50000); + this.__disclaimerTimer = setTimeout(() => this.__disclaimerText.show(), this.self().DISCLAIMER_TIME); } else { this.__disclaimerText.exclude(); } diff --git a/services/static-webserver/client/source/class/osparc/data/model/Study.js b/services/static-webserver/client/source/class/osparc/data/model/Study.js index f34ce1865ef..80c24e76c2a 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Study.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Study.js @@ -66,7 +66,7 @@ qx.Class.define("osparc.data.model.Study", { this.setUi(new osparc.data.model.StudyUI(studyData.ui)); - this.__buildWorkbench(); + this.getWorkbench().buildWorkbench(); }, properties: { @@ -316,10 +316,6 @@ qx.Class.define("osparc.data.model.Study", { }, members: { - __buildWorkbench: function() { - this.getWorkbench().buildWorkbench(); - }, - initStudy: function() { this.getWorkbench().initWorkbench(); }, diff --git a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js index 01da1153297..a1e4bb5015e 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js @@ -96,7 +96,7 @@ qx.Class.define("osparc.data.model.Workbench", { initWorkbench: function() { const allModels = this.getNodes(); const nodes = Object.values(allModels); - nodes.forEach(node => node.startDynamicService()); + nodes.forEach(node => node.startPollingState()); }, getUpstreamCompNodes: function(node, recursive = true, upstreamNodes = new Set()) { @@ -308,7 +308,7 @@ qx.Class.define("osparc.data.model.Workbench", { node.populateNodeData(); this.giveUniqueNameToNode(node, node.getLabel()); - node.startDynamicService(); + node.startPollingState(); return node; }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js index 7ba78f72356..57c7f91a970 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js @@ -43,7 +43,7 @@ qx.Class.define("osparc.desktop.MainPage", { construct: function() { this.base(arguments); - this._setLayout(new qx.ui.layout.VBox(null, null)); + this._setLayout(new qx.ui.layout.VBox()); this._add(osparc.notification.RibbonNotifications.getInstance()); diff --git a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js index 1945a9742c9..2b86040f481 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js @@ -727,6 +727,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { }); this.__iFrameChanged(node); + node.getIframeHandler().addListener("iframeChanged", () => this.__iFrameChanged(node), this); iFrame.addListener("load", () => this.__iFrameChanged(node), this); } else { // This will keep what comes after at the bottom diff --git a/services/static-webserver/client/source/class/osparc/store/Store.js b/services/static-webserver/client/source/class/osparc/store/Store.js index 69ab07d4dbb..69bc426bdb5 100644 --- a/services/static-webserver/client/source/class/osparc/store/Store.js +++ b/services/static-webserver/client/source/class/osparc/store/Store.js @@ -454,6 +454,8 @@ qx.Class.define("osparc.store.Store", { }; osparc.data.Resources.getOne("services", params, null, !reload) .then(serviceData => { + osparc.service.Utils.addTSRInfo(serviceData); + osparc.service.Utils.addExtraTypeInfo(serviceData); resolve(serviceData); }); }); diff --git a/services/static-webserver/client/source/class/osparc/viewer/MainPage.js b/services/static-webserver/client/source/class/osparc/viewer/MainPage.js index 0d466f40255..fc9f330153f 100644 --- a/services/static-webserver/client/source/class/osparc/viewer/MainPage.js +++ b/services/static-webserver/client/source/class/osparc/viewer/MainPage.js @@ -21,26 +21,32 @@ qx.Class.define("osparc.viewer.MainPage", { construct: function(studyId, viewerNodeId) { this.base(); - this._setLayout(new qx.ui.layout.VBox(null, null, "separator-vertical")); + this._setLayout(new qx.ui.layout.VBox()); this._add(osparc.notification.RibbonNotifications.getInstance()); const navBar = new osparc.viewer.NavigationBar(); + navBar.populateLayout(); this._add(navBar); // Some resources request before building the main stack osparc.WindowSizeTracker.getInstance().startTracker(); osparc.MaintenanceTracker.getInstance().startTracker(); - const nodeViewer = this.__createNodeViewer(studyId, viewerNodeId); - this._add(nodeViewer, { - flex: 1 - }); + const store = osparc.store.Store.getInstance(); + const preloadPromises = []; + preloadPromises.push(store.getAllServices(true)); + Promise.all(preloadPromises) + .then(() => { + const nodeViewer = this.__createNodeViewer(studyId, viewerNodeId); + this._add(nodeViewer, { + flex: 1 + }); + }) + .catch(err => console.error(err)); }, members: { - __iframeLayout: null, - __createNodeViewer: function(studyId, viewerNodeId) { const nodeViewer = new osparc.viewer.NodeViewer(studyId, viewerNodeId); return nodeViewer; diff --git a/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js b/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js index 19bf8628a14..e5cfd3e015f 100644 --- a/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js +++ b/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js @@ -25,55 +25,47 @@ qx.Class.define("osparc.viewer.NodeViewer", { this._setLayout(new qx.ui.layout.VBox()); - this.__initIFrame(); - this.__iFrameChanged(); - - this.set({ - studyId, - nodeId - }); - this.self().openStudy(studyId) - .then(() => { - this.__nodeState(); + .then(studyData => { + if (studyData["workbench"] && nodeId in studyData["workbench"]) { + const nodeData = studyData["workbench"][nodeId]; + const key = nodeData["key"]; + const version = nodeData["version"]; + + // create study + const study = new osparc.data.model.Study(studyData); + this.setStudy(study); + + // create node + const node = new osparc.data.model.Node(study, key, version, nodeId); + this.setNode(node); + node.initIframeHandler(); + + const iframeHandler = node.getIframeHandler(); + if (iframeHandler) { + iframeHandler.startPolling(); + iframeHandler.addListener("iframeChanged", () => this.__buildLayout(), this); + iframeHandler.getIFrame().addListener("load", () => this.__buildLayout(), this); + this.__buildLayout(); + + this.__attachSocketEventHandlers(); + } + } }) - .catch(err => { - console.error(err); - }); + .catch(err => console.error(err)); }, properties: { - loadingPage: { - check: "osparc.ui.message.Loading", + study: { + check: "osparc.data.model.Study", init: null, - nullable: true - }, - - iFrame: { - check: "osparc.widget.PersistentIframe", - init: null, - nullable: true - }, - - studyId: { - check: "String", nullable: false }, - nodeId: { - check: "String", + node: { + check: "osparc.data.model.Node", + init: null, nullable: false - }, - - serviceUrl: { - check: "String", - nullable: true - }, - - dynamicV2: { - check: "Boolean", - init: false, - nullable: true } }, @@ -90,144 +82,42 @@ qx.Class.define("osparc.viewer.NodeViewer", { }, members: { - __initLoadingPage: function() { - const loadingPage = new osparc.ui.message.Loading().set({ - header: this.tr("Starting viewer") - }); - this.setLoadingPage(loadingPage); - }, - - __initIFrame: function() { - this.__initLoadingPage(); - - const iframe = new osparc.widget.PersistentIframe(); - this.setIFrame(iframe); - }, - - __nodeState: function() { - const params = { - url: { - "studyId": this.getStudyId(), - nodeId: this.getNodeId() - } - }; - osparc.data.Resources.fetch("studies", "getNode", params) - .then(data => this.__onNodeState(data)) - .catch(() => osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error starting the viewer."), "ERROR")); - }, - - __onNodeState: function(data) { - const serviceState = data["service_state"]; - if (serviceState) { - this.getLoadingPage().setHeader(serviceState + " viewer"); - } - switch (serviceState) { - case "idle": { - const interval = 1000; - qx.event.Timer.once(() => this.__nodeState(), this, interval); - break; - } - case "unknown": - case "starting": - case "connecting": - case "pulling": { - const interval = 5000; - qx.event.Timer.once(() => this.__nodeState(), this, interval); - break; - } - case "pending": { - const interval = 10000; - qx.event.Timer.once(() => this.__nodeState(), this, interval); - break; - } - case "running": { - const nodeId = data["service_uuid"]; - if (nodeId !== this.getNodeId()) { - return; - } - - const { - srvUrl, - isDynamicV2 - } = osparc.utils.Utils.computeServiceUrl(data); - this.setDynamicV2(isDynamicV2); - if (srvUrl) { - this.__waitForServiceReady(srvUrl); - } - break; - } - case "complete": - break; - case "deprecated": - case "retired": - case "failed": { - const msg = this.tr("Service failed: ") + data["service_message"]; - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); - return; - } - default: - console.error(serviceState, "service state not supported"); - break; - } - }, + __buildLayout: function() { + this._removeAll(); - __waitForServiceReady: function(srvUrl) { - // ping for some time until it is really ready - const pingRequest = new qx.io.request.Xhr(srvUrl); - pingRequest.addListenerOnce("success", () => { - this.__serviceReadyIn(srvUrl); - }, this); - pingRequest.addListenerOnce("fail", () => { - const interval = 2000; - qx.event.Timer.once(() => this.__waitForServiceReady(srvUrl), this, interval); + const iframeHandler = this.getNode().getIframeHandler(); + const loadingPage = iframeHandler.getLoadingPage(); + const iFrame = iframeHandler.getIFrame(); + const src = iFrame.getSource(); + const iFrameView = (src === null || src === "about:blank") ? loadingPage : iFrame; + this._add(iFrameView, { + flex: 1 }); - pingRequest.send(); }, - __serviceReadyIn: function(srvUrl) { - this.setServiceUrl(srvUrl); - this.__retrieveInputs(); + __attachSocketEventHandlers: function() { + this.__listenToNodeUpdated(); + this.__listenToNodeProgress(); }, - __retrieveInputs: function() { - const srvUrl = this.getServiceUrl(); - if (srvUrl) { - const urlRetrieve = this.isDynamicV2() ? osparc.utils.Utils.computeServiceV2RetrieveUrl(this.getStudyId(), this.getNodeId()) : osparc.utils.Utils.computeServiceRetrieveUrl(srvUrl); - const updReq = new qx.io.request.Xhr(); - const reqData = { - "port_keys": [] - }; - updReq.set({ - url: urlRetrieve, - method: "POST", - requestData: qx.util.Serializer.toJson(reqData) - }); - updReq.addListener("success", e => { - this.getIFrame().setSource(srvUrl); - this.__iFrameChanged(); + __listenToNodeUpdated: function() { + const socket = osparc.wrapper.WebSocket.getInstance(); + + if (!socket.slotExists("nodeUpdated")) { + socket.on("nodeUpdated", data => { + this.getStudy().nodeUpdated(data); }, this); - updReq.send(); } }, - __iFrameChanged: function() { - this._removeAll(); + __listenToNodeProgress: function() { + const socket = osparc.wrapper.WebSocket.getInstance(); - const loadingPage = this.getLoadingPage(); - const iFrame = this.getIFrame(); - const src = iFrame.getSource(); - let iFrameView; - if (src === null || src === "about:blank") { - iFrameView = loadingPage; - } else { - this.getLayoutParent().set({ - zIndex: iFrame.getZIndex()-1 - }); - iFrameView = iFrame; + if (!socket.slotExists("nodeProgress")) { + socket.on("nodeProgress", data => { + this.getStudy().nodeNodeProgressSequence(data); + }, this); } - this._add(iFrameView, { - flex: 1 - }); } } }); From bc7aef8a0ffcea795757e502563651dd9ce148c4 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:06:28 +0200 Subject: [PATCH 074/219] =?UTF-8?q?=F0=9F=94=92=EF=B8=8F=20Resolves=20secu?= =?UTF-8?q?rity=20scanning=20alerts=20(#6020)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../simcore_service_webserver/login/_sql.py | 10 --------- .../projects/_nodes_utils.py | 17 +++++++++----- .../projects/exceptions.py | 22 ++++++++++++++++++- .../projects/projects_api.py | 15 ++++++------- 4 files changed, 40 insertions(+), 24 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/login/_sql.py b/services/web/server/src/simcore_service_webserver/login/_sql.py index 20a5779acc8..deaeb6d3455 100644 --- a/services/web/server/src/simcore_service_webserver/login/_sql.py +++ b/services/web/server/src/simcore_service_webserver/login/_sql.py @@ -1,12 +1,5 @@ -from logging import getLogger - -log = getLogger(__name__) -LOG_TPL = "%s <--%s" - - def find_one(conn, table, filter_, fields=None): sql, values = find_one_sql(table, filter_, fields) - log.debug(LOG_TPL, sql, values) return conn.fetchrow(sql, *values) @@ -26,7 +19,6 @@ def find_one_sql(table, filter_, fields=None): def insert(conn, table, data, returning="id"): sql, values = insert_sql(table, data, returning) - log.debug(LOG_TPL, sql, values) return conn.fetchval(sql, *values) @@ -53,7 +45,6 @@ def insert_sql(table, data, returning="id"): def update(conn, table, filter_, updates): sql, values = update_sql(table, filter_, updates) - log.debug(LOG_TPL, sql, values) return conn.execute(sql, *values) @@ -72,7 +63,6 @@ def update_sql(table, filter_, updates): def delete(conn, table, filter_): sql, values = delete_sql(table, filter_) - log.debug(LOG_TPL, sql, values) return conn.execute(sql, *values) diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_utils.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_utils.py index 00d1f4cdcd3..f03b72c573e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_utils.py @@ -1,6 +1,9 @@ from models_library.services_resources import ServiceResourcesDict -from .exceptions import ProjectNodeResourcesInvalidError +from .exceptions import ( + InvalidContainerInResourcesSpecsError, + InvalidImageInResourcesSpecsError, +) def validate_new_service_resources( @@ -17,12 +20,16 @@ def validate_new_service_resources( # the docker container entries shall be contained in the current resources for container_name, container_resources in new_resources.items(): if container_name not in resources: - msg = f"Incompatible '{container_name=}' cannot be applied on any of {tuple(resources.keys())}!" - raise ProjectNodeResourcesInvalidError(msg) + raise InvalidContainerInResourcesSpecsError( + container_name=container_name, resource_keys=tuple(resources.keys()) + ) # now check the image names fit if container_resources.image != resources[container_name].image: - msg = f"Incompatible '{container_resources.image=}' cannot be applied on {container_name}:{resources[container_name].image}!" - raise ProjectNodeResourcesInvalidError(msg) + raise InvalidImageInResourcesSpecsError( + image_name=container_resources.image, + container_name=container_name, + expected_image=resources[container_name].image, + ) def set_reservation_same_as_limit( diff --git a/services/web/server/src/simcore_service_webserver/projects/exceptions.py b/services/web/server/src/simcore_service_webserver/projects/exceptions.py index ecd60a58c39..de6939f154e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/exceptions.py @@ -130,7 +130,27 @@ class PermalinkFactoryError(BaseProjectError): class ProjectNodeResourcesInvalidError(BaseProjectError): - ... + msg_template = "Invalid resource associated to node" + + +class InvalidContainerInResourcesSpecsError(ProjectNodeResourcesInvalidError): + msg_template = ( + "Incompatible '{container_name}' cannot be applied on any of {resource_keys}" + ) + + +class InvalidImageInResourcesSpecsError(ProjectNodeResourcesInvalidError): + msg_template = "Incompatible '{image_name}' cannot be applied on {container_name}:{expected_image}" + + +class InvalidKeysInResourcesSpecsError(ProjectNodeResourcesInvalidError): + msg_template = "Sub service is missing RAM/CPU resource keys ({missing_key})!" + + +class InvalidEC2TypeInResourcesSpecsError(ProjectNodeResourcesInvalidError): + msg_template = ( + "Invalid EC2 type name selected {ec2_types}. TIP: adjust product configuration" + ) class ProjectNodeResourcesInsufficientRightsError(BaseProjectError): diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 28296fa4018..cd738b3a35b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -122,13 +122,14 @@ from .exceptions import ( ClustersKeeperNotAvailableError, DefaultPricingUnitNotFoundError, + InvalidEC2TypeInResourcesSpecsError, + InvalidKeysInResourcesSpecsError, NodeNotFoundError, ProjectInvalidRightsError, ProjectLockError, ProjectNodeConnectionsMissingError, ProjectNodeOutputPortMissingValueError, ProjectNodeRequiredInputsNotSetError, - ProjectNodeResourcesInvalidError, ProjectOwnerNotFoundInTheProjectAccessRightsError, ProjectStartsTooManyDynamicNodesError, ProjectTooManyProjectOpenedError, @@ -435,14 +436,12 @@ def _by_type_name(ec2: EC2InstanceTypeGet) -> bool: check_update_allowed=False, ) except StopIteration as exc: - msg = ( - f"invalid EC2 type name selected {set(hardware_info.aws_ec2_instances)}." - " TIP: adjust product configuration" - ) - raise ProjectNodeResourcesInvalidError(msg) from exc + raise InvalidEC2TypeInResourcesSpecsError( + ec2_types=set(hardware_info.aws_ec2_instances) + ) from exc + except KeyError as exc: - msg = "Sub service is missing RAM/CPU resource keys!" - raise ProjectNodeResourcesInvalidError(msg) from exc + raise InvalidKeysInResourcesSpecsError(missing_key=f"{exc}") from exc except ( RemoteMethodNotRegisteredError, RPCServerError, From 430f229269d28c68fd4f6b805f814715e7529dc1 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Mon, 1 Jul 2024 16:02:50 +0200 Subject: [PATCH 075/219] =?UTF-8?q?=F0=9F=90=9BStorage:=20fix=20database?= =?UTF-8?q?=20locking=20(#6005)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../aws-library/src/aws_library/s3/_client.py | 49 +- .../src/aws_library/s3/_error_handler.py | 34 +- .../src/pytest_simcore/helpers/s3.py | 55 +- .../service-library/src/servicelib/utils.py | 42 +- .../handlers_simcore_s3.py | 24 +- .../simcore_service_storage/simcore_s3_dsm.py | 667 +++++++++--------- services/storage/tests/conftest.py | 26 +- .../tests/data/projects_with_data.json | 163 ----- .../storage/tests/fixtures/data_models.py | 7 +- .../storage/tests/helpers/utils_project.py | 1 + .../tests/unit/test_handlers_simcore_s3.py | 124 ++-- 11 files changed, 545 insertions(+), 647 deletions(-) delete mode 100644 services/storage/tests/data/projects_with_data.json diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 67478f4e204..85280a24cbb 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -22,7 +22,7 @@ from types_aiobotocore_s3.literals import BucketLocationConstraintType from types_aiobotocore_s3.type_defs import ObjectIdentifierTypeDef -from ._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE +from ._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE, PRESIGNED_LINK_MAX_SIZE from ._error_handler import s3_exception_handler, s3_exception_handler_async_gen from ._errors import S3DestinationNotEmptyError, S3KeyNotFoundError from ._models import ( @@ -398,7 +398,10 @@ async def copy_object( "CopySource": {"Bucket": bucket, "Key": src_object_key}, "Bucket": bucket, "Key": dst_object_key, - "Config": TransferConfig(max_concurrency=self.transfer_max_concurrency), + "Config": TransferConfig( + max_concurrency=self.transfer_max_concurrency, + multipart_threshold=PRESIGNED_LINK_MAX_SIZE, + ), } if bytes_transfered_cb: copy_options |= {"Callback": bytes_transfered_cb} @@ -419,34 +422,20 @@ async def copy_objects_recursively( ) if dst_metadata.size > 0: raise S3DestinationNotEmptyError(dst_prefix=dst_prefix) - try: - - await limited_gather( - *[ - self.copy_object( - bucket=bucket, - src_object_key=s3_object.object_key, - dst_object_key=s3_object.object_key.replace( - src_prefix, dst_prefix - ), - bytes_transfered_cb=bytes_transfered_cb, - ) - async for s3_object in self._list_all_objects( - bucket=bucket, prefix=src_prefix - ) - ], - limit=_MAX_CONCURRENT_COPY, - ) - - except Exception: - # rollback changes - with log_catch(_logger, reraise=False), log_context( - _logger, - logging.ERROR, - msg="Unexpected error while copying files recursively, deleting partially copied files", - ): - await self.delete_objects_recursively(bucket=bucket, prefix=dst_prefix) - raise + await limited_gather( + *[ + self.copy_object( + bucket=bucket, + src_object_key=s3_object.object_key, + dst_object_key=s3_object.object_key.replace(src_prefix, dst_prefix), + bytes_transfered_cb=bytes_transfered_cb, + ) + async for s3_object in self._list_all_objects( + bucket=bucket, prefix=src_prefix + ) + ], + limit=_MAX_CONCURRENT_COPY, + ) @staticmethod def is_multipart(file_size: ByteSize) -> bool: diff --git a/packages/aws-library/src/aws_library/s3/_error_handler.py b/packages/aws-library/src/aws_library/s3/_error_handler.py index 023d1553c27..b0bf38e8d63 100644 --- a/packages/aws-library/src/aws_library/s3/_error_handler.py +++ b/packages/aws-library/src/aws_library/s3/_error_handler.py @@ -2,7 +2,7 @@ import inspect import logging from collections.abc import AsyncGenerator, Callable, Coroutine -from typing import Any, Concatenate, ParamSpec, TypeVar +from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, TypeVar from botocore import exceptions as botocore_exc @@ -13,6 +13,11 @@ S3UploadNotFoundError, ) +if TYPE_CHECKING: + # NOTE: TYPE_CHECKING is True when static type checkers are running, + # allowing for circular imports only for them (mypy, pylance, ruff) + from ._client import SimcoreS3API + def _map_botocore_client_exception( botocore_error: botocore_exc.ClientError, **kwargs @@ -44,13 +49,14 @@ def _map_botocore_client_exception( P = ParamSpec("P") R = TypeVar("R") T = TypeVar("T") +Self = TypeVar("Self", bound="SimcoreS3API") def s3_exception_handler( logger: logging.Logger, -) -> Callable[ # type: ignore[name-defined] - [Callable[Concatenate["SimcoreS3API", P], Coroutine[Any, Any, R]]], - Callable[Concatenate["SimcoreS3API", P], Coroutine[Any, Any, R]], +) -> Callable[ + [Callable[Concatenate[Self, P], Coroutine[Any, Any, R]]], + Callable[Concatenate[Self, P], Coroutine[Any, Any, R]], ]: """ Raises: @@ -62,10 +68,10 @@ def s3_exception_handler( """ def decorator( - func: Callable[Concatenate["SimcoreS3API", P], Coroutine[Any, Any, R]] # type: ignore[name-defined] # noqa: F821 - ) -> Callable[Concatenate["SimcoreS3API", P], Coroutine[Any, Any, R]]: # type: ignore[name-defined] # noqa: F821 + func: Callable[Concatenate[Self, P], Coroutine[Any, Any, R]] + ) -> Callable[Concatenate[Self, P], Coroutine[Any, Any, R]]: @functools.wraps(func) - async def wrapper(self: "SimcoreS3API", *args: P.args, **kwargs: P.kwargs) -> R: # type: ignore[name-defined] # noqa: F821 + async def wrapper(self: Self, *args: P.args, **kwargs: P.kwargs) -> R: try: return await func(self, *args, **kwargs) except ( @@ -91,9 +97,9 @@ async def wrapper(self: "SimcoreS3API", *args: P.args, **kwargs: P.kwargs) -> R: def s3_exception_handler_async_gen( logger: logging.Logger, -) -> Callable[ # type: ignore[name-defined] - [Callable[Concatenate["SimcoreS3API", P], AsyncGenerator[T, None]]], # noqa: F821 - Callable[Concatenate["SimcoreS3API", P], AsyncGenerator[T, None]], # noqa: F821 +) -> Callable[ + [Callable[Concatenate[Self, P], AsyncGenerator[T, None]]], + Callable[Concatenate[Self, P], AsyncGenerator[T, None]], ]: """ Raises: @@ -105,11 +111,11 @@ def s3_exception_handler_async_gen( """ def decorator( - func: Callable[Concatenate["SimcoreS3API", P], AsyncGenerator[T, None]] # type: ignore[name-defined] # noqa: F821 - ) -> Callable[Concatenate["SimcoreS3API", P], AsyncGenerator[T, None]]: # type: ignore[name-defined] # noqa: F821 + func: Callable[Concatenate[Self, P], AsyncGenerator[T, None]] + ) -> Callable[Concatenate[Self, P], AsyncGenerator[T, None]]: @functools.wraps(func) async def async_generator_wrapper( - self: "SimcoreS3API", *args: P.args, **kwargs: P.kwargs # type: ignore[name-defined] # noqa: F821 + self: Self, *args: P.args, **kwargs: P.kwargs ) -> AsyncGenerator[T, None]: try: assert inspect.isasyncgenfunction(func) # nosec @@ -130,7 +136,7 @@ async def async_generator_wrapper( raise S3AccessError from exc async_generator_wrapper.__doc__ = ( - f"{func.__doc__}\n\n{s3_exception_handler.__doc__}" + f"{func.__doc__}\n\n{s3_exception_handler_async_gen.__doc__}" ) return async_generator_wrapper diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index 810f6caab8b..d76ff0f9aec 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -1,6 +1,6 @@ +import logging from collections.abc import Iterable from pathlib import Path -from time import perf_counter from typing import Final import aiofiles @@ -13,6 +13,8 @@ from servicelib.utils import limited_as_completed, logged_gather from types_aiobotocore_s3 import S3Client +from .logging import log_context + _SENDER_CHUNK_SIZE: Final[int] = parse_obj_as(ByteSize, "16Mib") @@ -77,36 +79,29 @@ async def upload_file_to_presigned_link( ) -> list[UploadedPart]: file_size = file.stat().st_size - start = perf_counter() - print(f"--> uploading {file=}") - async with ClientSession() as session: - file_chunk_size = int(file_upload_link.chunk_size) - num_urls = len(file_upload_link.urls) - last_chunk_size = file_size - file_chunk_size * (num_urls - 1) - upload_tasks = [] - for index, upload_url in enumerate(file_upload_link.urls): - this_file_chunk_size = ( - file_chunk_size if (index + 1) < num_urls else last_chunk_size - ) - upload_tasks.append( - upload_file_part( - session, - file, - index, - index * file_chunk_size, - this_file_chunk_size, - num_urls, - upload_url, + with log_context(logging.INFO, msg=f"uploading {file} via {file_upload_link=}"): + async with ClientSession() as session: + file_chunk_size = int(file_upload_link.chunk_size) + num_urls = len(file_upload_link.urls) + last_chunk_size = file_size - file_chunk_size * (num_urls - 1) + upload_tasks = [] + for index, upload_url in enumerate(file_upload_link.urls): + this_file_chunk_size = ( + file_chunk_size if (index + 1) < num_urls else last_chunk_size ) - ) - results = await logged_gather(*upload_tasks, max_concurrency=0) - part_to_etag = [ - UploadedPart(number=index + 1, e_tag=e_tag) for index, e_tag in results - ] - print( - f"--> upload of {file=} of {file_size=} completed in {perf_counter() - start}" - ) - return part_to_etag + upload_tasks.append( + upload_file_part( + session, + file, + index, + index * file_chunk_size, + this_file_chunk_size, + num_urls, + upload_url, + ) + ) + results = await logged_gather(*upload_tasks, max_concurrency=0) + return [UploadedPart(number=index + 1, e_tag=e_tag) for index, e_tag in results] async def delete_all_object_versions( diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py index 0f96e7af3a0..da84ba9c1e9 100644 --- a/packages/service-library/src/servicelib/utils.py +++ b/packages/service-library/src/servicelib/utils.py @@ -11,7 +11,16 @@ import socket from collections.abc import Awaitable, Coroutine, Generator, Iterable from pathlib import Path -from typing import Any, AsyncGenerator, AsyncIterable, Final, TypeVar, cast +from typing import ( + Any, + AsyncGenerator, + AsyncIterable, + Final, + Literal, + TypeVar, + cast, + overload, +) import toolz from pydantic import NonNegativeInt @@ -278,13 +287,35 @@ async def _wrapped( return index, exc +@overload +async def limited_gather( + *awaitables: Awaitable[T], + reraise: Literal[True] = True, + log: logging.Logger = _DEFAULT_LOGGER, + limit: int = _DEFAULT_LIMITED_CONCURRENCY, + tasks_group_prefix: str | None = None, +) -> list[T]: + ... + + +@overload +async def limited_gather( + *awaitables: Awaitable[T], + reraise: Literal[False] = False, + log: logging.Logger = _DEFAULT_LOGGER, + limit: int = _DEFAULT_LIMITED_CONCURRENCY, + tasks_group_prefix: str | None = None, +) -> list[T | BaseException]: + ... + + async def limited_gather( *awaitables: Awaitable[T], reraise: bool = True, log: logging.Logger = _DEFAULT_LOGGER, limit: int = _DEFAULT_LIMITED_CONCURRENCY, tasks_group_prefix: str | None = None, -) -> list[T | BaseException | None]: +) -> list[T] | list[T | BaseException]: """runs all the awaitables using the limited concurrency and returns them in the same order Arguments: @@ -311,13 +342,14 @@ async def limited_gather( for index, awaitable in enumerate(awaitables) ] - results: list[T | BaseException | None] = [None] * len(indexed_awaitables) + interim_results: list[T | BaseException | None] = [None] * len(indexed_awaitables) async for future in limited_as_completed( indexed_awaitables, limit=limit, tasks_group_prefix=tasks_group_prefix or _DEFAULT_GATHER_TASKS_GROUP_PREFIX, ): index, result = await future - results[index] = result + interim_results[index] = result - return results + # NOTE: None is already contained in T + return cast(list[T | BaseException], interim_results) diff --git a/services/storage/src/simcore_service_storage/handlers_simcore_s3.py b/services/storage/src/simcore_service_storage/handlers_simcore_s3.py index dbf463903ce..eefc73bcef1 100644 --- a/services/storage/src/simcore_service_storage/handlers_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/handlers_simcore_s3.py @@ -16,13 +16,13 @@ parse_request_path_parameters_as, parse_request_query_parameters_as, ) +from servicelib.logging_utils import log_context from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from settings_library.s3 import S3Settings -from simcore_service_storage.dsm import get_dsm_provider -from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from . import sts from ._meta import API_VTAG +from .dsm import get_dsm_provider from .models import ( DeleteFolderQueryParams, FileMetaData, @@ -30,6 +30,7 @@ SimcoreS3FoldersParams, StorageQueryParamsBase, ) +from .simcore_s3_dsm import SimcoreS3DataManager _logger = logging.getLogger(__name__) @@ -62,13 +63,18 @@ async def _copy_folders_from_project( SimcoreS3DataManager, get_dsm_provider(app).get(SimcoreS3DataManager.get_location_id()), ) - await dsm.deep_copy_project_simcore_s3( - query_params.user_id, - body.source, - body.destination, - body.nodes_map, - task_progress=task_progress, - ) + with log_context( + _logger, + logging.INFO, + msg=f"copying {body.source['uuid']} -> {body.destination['uuid']}", + ): + await dsm.deep_copy_project_simcore_s3( + query_params.user_id, + body.source, + body.destination, + body.nodes_map, + task_progress=task_progress, + ) raise web.HTTPCreated( text=json_dumps(body.destination), content_type=MIMETYPE_APPLICATION_JSON diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index d9e3d39242b..74ca1a08f36 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -3,8 +3,7 @@ import logging import tempfile import urllib.parse -from collections import deque -from collections.abc import Awaitable, Callable, Coroutine +from collections.abc import Callable, Coroutine from contextlib import suppress from dataclasses import dataclass from pathlib import Path @@ -14,7 +13,7 @@ from aiohttp import web from aiopg.sa import Engine from aiopg.sa.connection import SAConnection -from aws_library.s3 import S3KeyNotFoundError, S3MetaData +from aws_library.s3 import S3DirectoryMetaData, S3KeyNotFoundError, S3MetaData from models_library.api_schemas_storage import LinkType, S3BucketName, UploadedPart from models_library.basic_types import SHA256Str from models_library.projects import ProjectID @@ -148,7 +147,7 @@ async def list_files( # noqa C901 data: list[FileMetaData] = [] accessible_projects_ids = [] uid = UserID | None - async with self.engine.acquire() as conn, conn.begin(): + async with self.engine.acquire() as conn: if project_id is not None: project_access_rights = await get_project_access_rights( conn=conn, user_id=user_id, project_id=project_id @@ -175,23 +174,22 @@ async def list_files( # noqa C901 sha256_checksum=None, ) - # add all the entries from file_meta_data without - for metadata in file_and_directory_meta_data: - # below checks ensures that directoris either appear as - if metadata.is_directory and expand_dirs: - # avoids directory files and does not add any directory entry to the result - continue - - if is_file_entry_valid(metadata): - data.append(convert_db_to_model(metadata)) - continue - with suppress(S3KeyNotFoundError): - updated_fmd = await self._update_database_from_storage( - conn, metadata - ) - data.append(convert_db_to_model(updated_fmd)) + # add all the entries from file_meta_data without + for metadata in file_and_directory_meta_data: + # below checks ensures that directoris either appear as + if metadata.is_directory and expand_dirs: + # avoids directory files and does not add any directory entry to the result + continue - # now parse the project to search for node/project names + if is_file_entry_valid(metadata): + data.append(convert_db_to_model(metadata)) + continue + with suppress(S3KeyNotFoundError): + updated_fmd = await self._update_database_from_storage(metadata) + data.append(convert_db_to_model(updated_fmd)) + + # now parse the project to search for node/project names + async with self.engine.acquire() as conn: prj_names_mapping: dict[ProjectID | NodeID, str] = {} async for proj_data in db_projects.list_valid_projects_in( conn, accessible_projects_ids @@ -240,21 +238,21 @@ async def list_files( # noqa C901 return data async def get_file(self, user_id: UserID, file_id: StorageFileID) -> FileMetaData: - async with self.engine.acquire() as conn, conn.begin(): + async with self.engine.acquire() as conn: can: AccessRights = await get_file_access_rights( conn, int(user_id), file_id ) - if can.read: - fmd: FileMetaDataAtDB = await db_file_meta_data.get( - conn, parse_obj_as(SimcoreS3FileID, file_id) - ) - if is_file_entry_valid(fmd): - return convert_db_to_model(fmd) - fmd = await self._update_database_from_storage(conn, fmd) - return convert_db_to_model(fmd) + if not can.read: + raise FileAccessRightError(access_right="read", file_id=file_id) - _logger.debug("User %s cannot read file %s", user_id, file_id) - raise FileAccessRightError(access_right="read", file_id=file_id) + fmd = await db_file_meta_data.get( + conn, parse_obj_as(SimcoreS3FileID, file_id) + ) + if is_file_entry_valid(fmd): + return convert_db_to_model(fmd) + # get file from storage if available + fmd = await self._update_database_from_storage(fmd) + return convert_db_to_model(fmd) async def create_file_upload_links( self, @@ -266,7 +264,7 @@ async def create_file_upload_links( sha256_checksum: SHA256Str | None, is_directory: bool, ) -> UploadLinks: - async with self.engine.acquire() as conn, conn.begin() as transaction: + async with self.engine.acquire() as conn: can: AccessRights = await get_file_access_rights(conn, user_id, file_id) if not can.write: raise FileAccessRightError(access_right="write", file_id=file_id) @@ -278,16 +276,16 @@ async def create_file_upload_links( conn, parse_obj_as(SimcoreS3FileID, file_id) ) - # ensure file is deleted first in case it already exists - await self.delete_file( - user_id=user_id, - file_id=file_id, - # NOTE: bypassing check since the project access rights don't play well - # with collaborators - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/5159 - enforce_access_rights=False, - ) - + # ensure file is deleted first in case it already exists + await self.delete_file( + user_id=user_id, + file_id=file_id, + # NOTE: bypassing check since the project access rights don't play well + # with collaborators + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/5159 + enforce_access_rights=False, + ) + async with self.engine.acquire() as conn: # initiate the file meta data table fmd = await self._create_fmd_for_upload( conn, @@ -304,43 +302,42 @@ async def create_file_upload_links( is_directory=is_directory, sha256_checksum=sha256_checksum, ) - # NOTE: ensure the database is updated so cleaner does not pickup newly created uploads - await transaction.commit() - if link_type == LinkType.PRESIGNED and get_s3_client(self.app).is_multipart( - file_size_bytes - ): - # create multipart links - assert file_size_bytes # nosec - multipart_presigned_links = await get_s3_client( - self.app - ).create_multipart_upload_links( - bucket=fmd.bucket_name, - object_key=fmd.file_id, - file_size=file_size_bytes, - expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, - sha256_checksum=fmd.sha256_checksum, - ) - # update the database so we keep the upload id - fmd.upload_id = multipart_presigned_links.upload_id + if link_type == LinkType.PRESIGNED and get_s3_client(self.app).is_multipart( + file_size_bytes + ): + # create multipart links + assert file_size_bytes # nosec + multipart_presigned_links = await get_s3_client( + self.app + ).create_multipart_upload_links( + bucket=fmd.bucket_name, + object_key=fmd.file_id, + file_size=file_size_bytes, + expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, + sha256_checksum=fmd.sha256_checksum, + ) + # update the database so we keep the upload id + fmd.upload_id = multipart_presigned_links.upload_id + async with self.engine.acquire() as conn: await db_file_meta_data.upsert(conn, fmd) - return UploadLinks( - multipart_presigned_links.urls, - multipart_presigned_links.chunk_size, - ) - if link_type == LinkType.PRESIGNED: - # create single presigned link - single_presigned_link = await get_s3_client( - self.app - ).create_single_presigned_upload_link( - bucket=self.simcore_bucket_name, - object_key=fmd.file_id, - expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, - ) - return UploadLinks( - [single_presigned_link], - file_size_bytes or MAX_LINK_CHUNK_BYTE_SIZE[link_type], - ) + return UploadLinks( + multipart_presigned_links.urls, + multipart_presigned_links.chunk_size, + ) + if link_type == LinkType.PRESIGNED: + # create single presigned link + single_presigned_link = await get_s3_client( + self.app + ).create_single_presigned_upload_link( + bucket=self.simcore_bucket_name, + object_key=fmd.file_id, + expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, + ) + return UploadLinks( + [single_presigned_link], + file_size_bytes or MAX_LINK_CHUNK_BYTE_SIZE[link_type], + ) # user wants just the s3 link s3_link = get_s3_client(self.app).compute_s3_url( @@ -356,7 +353,7 @@ async def abort_file_upload( user_id: UserID, file_id: StorageFileID, ) -> None: - async with self.engine.acquire() as conn, conn.begin(): + async with self.engine.acquire() as conn: can: AccessRights = await get_file_access_rights( conn, int(user_id), file_id ) @@ -366,24 +363,25 @@ async def abort_file_upload( fmd: FileMetaDataAtDB = await db_file_meta_data.get( conn, parse_obj_as(SimcoreS3FileID, file_id) ) - if is_valid_managed_multipart_upload(fmd.upload_id): - assert fmd.upload_id # nosec - await get_s3_client(self.app).abort_multipart_upload( - bucket=fmd.bucket_name, - object_key=fmd.file_id, - upload_id=fmd.upload_id, - ) - # try to recover a file if it existed - with contextlib.suppress(S3KeyNotFoundError): - await get_s3_client(self.app).undelete_object( - bucket=fmd.bucket_name, object_key=fmd.file_id - ) + if is_valid_managed_multipart_upload(fmd.upload_id): + assert fmd.upload_id # nosec + await get_s3_client(self.app).abort_multipart_upload( + bucket=fmd.bucket_name, + object_key=fmd.file_id, + upload_id=fmd.upload_id, + ) + # try to recover a file if it existed + with contextlib.suppress(S3KeyNotFoundError): + await get_s3_client(self.app).undelete_object( + bucket=fmd.bucket_name, object_key=fmd.file_id + ) - try: - # try to revert to what we had in storage if any - await self._update_database_from_storage(conn, fmd) - except S3KeyNotFoundError: - # the file does not exist, so we delete the entry in the db + try: + # try to revert to what we had in storage if any + await self._update_database_from_storage(fmd) + except S3KeyNotFoundError: + # the file does not exist, so we delete the entry in the db + async with self.engine.acquire() as conn: await db_file_meta_data.delete(conn, [fmd.file_id]) async def complete_file_upload( @@ -419,10 +417,9 @@ async def complete_file_upload( upload_id=fmd.upload_id, uploaded_parts=uploaded_parts, ) - async with self.engine.acquire() as conn: - fmd = await self._update_database_from_storage(conn, fmd) - assert fmd # nosec - return convert_db_to_model(fmd) + fmd = await self._update_database_from_storage(fmd) + assert fmd # nosec + return convert_db_to_model(fmd) async def create_file_download_link( self, user_id: UserID, file_id: StorageFileID, link_type: LinkType @@ -439,15 +436,26 @@ async def create_file_download_link( directory_file_id: SimcoreS3FileID | None = await get_directory_file_id( conn, cast(SimcoreS3FileID, file_id) ) - return ( - await self._get_link_for_directory_fmd( - conn, user_id, directory_file_id, file_id, link_type - ) - if directory_file_id - else await self._get_link_for_file_fmd( - conn, user_id, file_id, link_type - ) + await self.__ensure_read_access_rights( + conn, user_id, directory_file_id if directory_file_id else file_id + ) + if directory_file_id: + if not await get_s3_client(self.app).object_exists( + bucket=self.simcore_bucket_name, object_key=f"{file_id}" + ): + raise S3KeyNotFoundError(key=file_id, bucket=self.simcore_bucket_name) + return await self.__get_link( + parse_obj_as(SimcoreS3FileID, file_id), link_type + ) + # standard file link + async with self.engine.acquire() as conn: + fmd = await db_file_meta_data.get( + conn, parse_obj_as(SimcoreS3FileID, file_id) ) + if not is_file_entry_valid(fmd): + # try lazy update + fmd = await self._update_database_from_storage(fmd) + return await self.__get_link(fmd.object_name, link_type) @staticmethod async def __ensure_read_access_rights( @@ -477,39 +485,6 @@ async def __get_link( return link - async def _get_link_for_file_fmd( - self, - conn: SAConnection, - user_id: UserID, - file_id: StorageFileID, - link_type: LinkType, - ) -> AnyUrl: - # 1. the file_id maps 1:1 to `file_meta_data` - await self.__ensure_read_access_rights(conn, user_id, file_id) - - fmd = await db_file_meta_data.get(conn, parse_obj_as(SimcoreS3FileID, file_id)) - if not is_file_entry_valid(fmd): - # try lazy update - fmd = await self._update_database_from_storage(conn, fmd) - - return await self.__get_link(fmd.object_name, link_type) - - async def _get_link_for_directory_fmd( - self, - conn: SAConnection, - user_id: UserID, - directory_file_id: SimcoreS3FileID, - file_id: StorageFileID, - link_type: LinkType, - ) -> AnyUrl: - # 2. the file_id represents a file inside a directory - await self.__ensure_read_access_rights(conn, user_id, directory_file_id) - if not await get_s3_client(self.app).object_exists( - bucket=self.simcore_bucket_name, object_key=f"{file_id}" - ): - raise S3KeyNotFoundError(key=file_id, bucket=self.simcore_bucket_name) - return await self.__get_link(parse_obj_as(SimcoreS3FileID, file_id), link_type) - async def delete_file( self, user_id: UserID, @@ -528,33 +503,33 @@ async def delete_file( # Only use this in those circumstances where a collaborator requires to delete a file (the current # permissions model will not allow him to do so, even though this is a legitimate action) # SEE https://github.com/ITISFoundation/osparc-simcore/issues/5159 - async with self.engine.acquire() as conn, conn.begin(): + async with self.engine.acquire() as conn: if enforce_access_rights: can: AccessRights = await get_file_access_rights(conn, user_id, file_id) if not can.delete: raise FileAccessRightError(access_right="delete", file_id=file_id) - with suppress(FileMetaDataNotFoundError): + with suppress(FileMetaDataNotFoundError): + # NOTE: deleting might be slow, so better ensure we release the connection + async with self.engine.acquire() as conn: file: FileMetaDataAtDB = await db_file_meta_data.get( conn, parse_obj_as(SimcoreS3FileID, file_id) ) - # NOTE: since this lists the files before deleting them - # it can be used to filter for just a single file and also - # to delete it - await get_s3_client(self.app).delete_objects_recursively( - bucket=file.bucket_name, - prefix=( - ensure_ends_with(file.file_id, "/") - if file.is_directory - else file.file_id - ), - ) + await get_s3_client(self.app).delete_objects_recursively( + bucket=file.bucket_name, + prefix=( + ensure_ends_with(file.file_id, "/") + if file.is_directory + else file.file_id + ), + ) + async with self.engine.acquire() as conn: await db_file_meta_data.delete(conn, [file.file_id]) async def delete_project_simcore_s3( self, user_id: UserID, project_id: ProjectID, node_id: NodeID | None = None ) -> None: - async with self.engine.acquire() as conn, conn.begin(): + async with self.engine.acquire() as conn: can: AccessRights | None = await get_project_access_rights( conn, user_id, project_id ) @@ -569,14 +544,14 @@ async def delete_project_simcore_s3( else: await db_file_meta_data.delete_all_from_node(conn, node_id) - await get_s3_client(self.app).delete_objects_recursively( - bucket=self.simcore_bucket_name, - prefix=ensure_ends_with( - f"{project_id}/{node_id}" if node_id else f"{project_id}", "/" - ), - ) + await get_s3_client(self.app).delete_objects_recursively( + bucket=self.simcore_bucket_name, + prefix=ensure_ends_with( + f"{project_id}/{node_id}" if node_id else f"{project_id}", "/" + ), + ) - async def deep_copy_project_simcore_s3( # noqa: C901 + async def deep_copy_project_simcore_s3( self, user_id: UserID, src_project: dict[str, Any], @@ -586,123 +561,125 @@ async def deep_copy_project_simcore_s3( # noqa: C901 ) -> None: src_project_uuid: ProjectID = ProjectID(src_project["uuid"]) dst_project_uuid: ProjectID = ProjectID(dst_project["uuid"]) - _logger.info( - "%s -> %s: Step 1: check access rights (read of src and write of dst)", - src_project_uuid, - dst_project_uuid, - ) - update_task_progress(task_progress, "Checking study access rights...") - async with self.engine.acquire() as conn: - for prj_uuid in [src_project_uuid, dst_project_uuid]: - if not await db_projects.project_exists(conn, prj_uuid): - raise ProjectNotFoundError(project_id=prj_uuid) - source_access_rights = await get_project_access_rights( - conn, user_id, project_id=src_project_uuid - ) - dest_access_rights = await get_project_access_rights( - conn, user_id, project_id=dst_project_uuid - ) - if not source_access_rights.read: - raise ProjectAccessRightError( - access_right="read", project_id=src_project_uuid - ) - if not dest_access_rights.write: - raise ProjectAccessRightError( - access_right="write", project_id=dst_project_uuid - ) - - _logger.info( - "%s -> %s: Step 2: start copying by listing what to copy", - src_project_uuid, - dst_project_uuid, - ) - update_task_progress( - task_progress, f"Collecting files of '{src_project['name']}'..." - ) - async with self.engine.acquire() as conn: - src_project_files: list[ - FileMetaDataAtDB - ] = await db_file_meta_data.list_fmds(conn, project_ids=[src_project_uuid]) + with log_context( + _logger, + logging.INFO, + msg=f"{src_project_uuid} -> {dst_project_uuid}: " + "Step 1: check access rights (read of src and write of dst)", + ): + update_task_progress(task_progress, "Checking study access rights...") + async with self.engine.acquire() as conn: + for prj_uuid in [src_project_uuid, dst_project_uuid]: + if not await db_projects.project_exists(conn, prj_uuid): + raise ProjectNotFoundError(project_id=prj_uuid) + source_access_rights = await get_project_access_rights( + conn, user_id, project_id=src_project_uuid + ) + dest_access_rights = await get_project_access_rights( + conn, user_id, project_id=dst_project_uuid + ) + if not source_access_rights.read: + raise ProjectAccessRightError( + access_right="read", project_id=src_project_uuid + ) + if not dest_access_rights.write: + raise ProjectAccessRightError( + access_right="write", project_id=dst_project_uuid + ) with log_context( _logger, logging.INFO, - ( - f"{src_project_uuid} -> {dst_project_uuid}: getting total file size for " - f"{len(src_project_files)} files" - ), - log_duration=True, + msg=f"{src_project_uuid} -> {dst_project_uuid}:" + " Step 2: collect what to copy", ): - sizes_and_num_files: list[tuple[ByteSize, int]] = await limited_gather( - *[self._get_size_and_num_files(fmd) for fmd in src_project_files], - limit=_MAX_PARALLEL_S3_CALLS, + update_task_progress( + task_progress, f"Collecting files of '{src_project['name']}'..." ) + async with self.engine.acquire() as conn: + src_project_files: list[ + FileMetaDataAtDB + ] = await db_file_meta_data.list_fmds( + conn, project_ids=[src_project_uuid] + ) - total_bytes_to_copy = sum(n for n, _ in sizes_and_num_files) - total_num_of_files = sum(n for _, n in sizes_and_num_files) - src_project_total_data_size: ByteSize = parse_obj_as( - ByteSize, total_bytes_to_copy - ) - _logger.info( - "%s -> %s: Step 3.1: copy: files referenced from file_metadata", - src_project_uuid, - dst_project_uuid, - ) - copy_tasks: deque[Awaitable] = deque() - s3_transfered_data_cb = S3TransferDataCB( - task_progress, - src_project_total_data_size, - task_progress_message_prefix=f"Copying {total_num_of_files} files to '{dst_project['name']}'", - ) - for src_fmd in src_project_files: - if not src_fmd.node_id or (src_fmd.location_id != self.location_id): - msg = ( - "This is not foreseen, stem from old decisions, and needs to " - f"be implemented if needed. Faulty metadata: {src_fmd=}" + with log_context( + _logger, + logging.INFO, + f"{src_project_uuid} -> {dst_project_uuid}: get total file size for " + f"{len(src_project_files)} files", + log_duration=True, + ): + sizes_and_num_files: list[tuple[ByteSize, int]] = await limited_gather( + *[self._get_size_and_num_files(fmd) for fmd in src_project_files], + limit=_MAX_PARALLEL_S3_CALLS, ) - raise NotImplementedError(msg) - - if new_node_id := node_mapping.get(src_fmd.node_id): - copy_tasks.append( - self._copy_path_s3_s3( - user_id, - src_fmd, - SimcoreS3FileID( - f"{dst_project_uuid}/{new_node_id}/{src_fmd.object_name.split('/', maxsplit=2)[-1]}" - ), - bytes_transfered_cb=s3_transfered_data_cb.copy_transfer_cb, + total_num_of_files = sum(n for _, n in sizes_and_num_files) + src_project_total_data_size: ByteSize = parse_obj_as( + ByteSize, sum(n for n, _ in sizes_and_num_files) + ) + with log_context( + _logger, + logging.INFO, + msg=f"{src_project_uuid} -> {dst_project_uuid}:" + " Step 3.1: prepare copy tasks for files referenced from simcore", + ): + copy_tasks = [] + s3_transfered_data_cb = S3TransferDataCB( + task_progress, + src_project_total_data_size, + task_progress_message_prefix=f"Copying {total_num_of_files} files to '{dst_project['name']}'", + ) + for src_fmd in src_project_files: + if not src_fmd.node_id or (src_fmd.location_id != self.location_id): + msg = ( + "This is not foreseen, stem from old decisions, and needs to " + f"be implemented if needed. Faulty metadata: {src_fmd=}" ) - ) - _logger.info( - "%s -> %s: Step 3.2: copy files referenced from file-picker from DAT-CORE", - src_project_uuid, - dst_project_uuid, - ) - for node_id, node in dst_project.get("workbench", {}).items(): - copy_tasks.extend( - [ - self._copy_file_datcore_s3( - user_id=user_id, - source_uuid=output["path"], - dest_project_id=dst_project_uuid, - dest_node_id=NodeID(node_id), - file_storage_link=output, - bytes_transfered_cb=s3_transfered_data_cb.copy_transfer_cb, + raise NotImplementedError(msg) + + if new_node_id := node_mapping.get(src_fmd.node_id): + copy_tasks.append( + self._copy_path_s3_s3( + user_id, + src_fmd=src_fmd, + dst_file_id=SimcoreS3FileID( + f"{dst_project_uuid}/{new_node_id}/{src_fmd.object_name.split('/', maxsplit=2)[-1]}" + ), + bytes_transfered_cb=s3_transfered_data_cb.copy_transfer_cb, + ) ) - for output in node.get("outputs", {}).values() - if isinstance(output, dict) - and (int(output.get("store", self.location_id)) == DATCORE_ID) - ] - ) - await limited_gather(*copy_tasks, limit=MAX_CONCURRENT_S3_TASKS) + with log_context( + _logger, + logging.INFO, + msg=f"{src_project_uuid} -> {dst_project_uuid}:" + " Step 3.1: prepare copy tasks for files referenced from DAT-CORE", + ): + for node_id, node in dst_project.get("workbench", {}).items(): + copy_tasks.extend( + [ + self._copy_file_datcore_s3( + user_id=user_id, + source_uuid=output["path"], + dest_project_id=dst_project_uuid, + dest_node_id=NodeID(node_id), + file_storage_link=output, + bytes_transfered_cb=s3_transfered_data_cb.copy_transfer_cb, + ) + for output in node.get("outputs", {}).values() + if isinstance(output, dict) + and (int(output.get("store", self.location_id)) == DATCORE_ID) + ] + ) + with log_context( + _logger, + logging.INFO, + msg=f"{src_project_uuid} -> {dst_project_uuid}: Step 3.3: effective copying {len(copy_tasks)} files", + ): + await limited_gather(*copy_tasks, limit=MAX_CONCURRENT_S3_TASKS) # ensure the full size is reported s3_transfered_data_cb.finalize_transfer() - _logger.info( - "%s -> %s: completed copy", - src_project_uuid, - dst_project_uuid, - ) async def _get_size_and_num_files( self, fmd: FileMetaDataAtDB @@ -750,15 +727,15 @@ async def search_owned_files( limit=limit, offset=offset, ) - resolved_fmds = [] - for fmd in file_metadatas: - if is_file_entry_valid(fmd): - resolved_fmds.append(convert_db_to_model(fmd)) - continue - with suppress(S3KeyNotFoundError): - updated_fmd = await self._update_database_from_storage(conn, fmd) - resolved_fmds.append(convert_db_to_model(updated_fmd)) - return resolved_fmds + resolved_fmds = [] + for fmd in file_metadatas: + if is_file_entry_valid(fmd): + resolved_fmds.append(convert_db_to_model(fmd)) + continue + with suppress(S3KeyNotFoundError): + updated_fmd = await self._update_database_from_storage(fmd) + resolved_fmds.append(convert_db_to_model(updated_fmd)) + return resolved_fmds async def create_soft_link( self, user_id: int, target_file_id: StorageFileID, link_file_id: StorageFileID @@ -840,15 +817,17 @@ async def _clean_expired_uploads(self) -> None: ) # try first to upload these from S3, they might have finished and the client forgot to tell us (conservative) + # NOTE: no concurrency here as we want to run low resources updated_fmds = await limited_gather( *( - self._update_database_from_storage_no_connection(fmd) + self._update_database_from_storage(fmd) for fmd in list_of_expired_uploads ), reraise=False, log=_logger, limit=_NO_CONCURRENCY, ) + list_of_fmds_to_delete = [ expired_fmd for expired_fmd, updated_fmd in zip( @@ -858,9 +837,7 @@ async def _clean_expired_uploads(self) -> None: ] # try to revert the files if they exist - async def _revert_file( - conn: SAConnection, fmd: FileMetaDataAtDB - ) -> FileMetaDataAtDB: + async def _revert_file(fmd: FileMetaDataAtDB) -> FileMetaDataAtDB: if is_valid_managed_multipart_upload(fmd.upload_id): assert fmd.upload_id # nosec await s3_client.abort_multipart_upload( @@ -871,17 +848,16 @@ async def _revert_file( await s3_client.undelete_object( bucket=fmd.bucket_name, object_key=fmd.file_id ) - return await self._update_database_from_storage(conn, fmd) + return await self._update_database_from_storage(fmd) s3_client = get_s3_client(self.app) - async with self.engine.acquire() as conn: - # NOTE: no concurrency here as we want to run low resources - reverted_fmds = await limited_gather( - *(_revert_file(conn, fmd) for fmd in list_of_fmds_to_delete), - reraise=False, - log=_logger, - limit=_NO_CONCURRENCY, - ) + # NOTE: no concurrency here as we want to run low resources + reverted_fmds = await limited_gather( + *(_revert_file(fmd) for fmd in list_of_fmds_to_delete), + reraise=False, + log=_logger, + limit=_NO_CONCURRENCY, + ) list_of_fmds_to_delete = [ fmd for fmd, reverted_fmd in zip( @@ -908,25 +884,20 @@ async def _revert_file( async def clean_expired_uploads(self) -> None: await self._clean_expired_uploads() - async def _update_database_from_storage( - self, conn: SAConnection, fmd: FileMetaDataAtDB + async def _update_fmd_from_other( + self, conn: SAConnection, *, fmd: FileMetaDataAtDB, copy_from: FileMetaDataAtDB ) -> FileMetaDataAtDB: - s3_metadata: S3MetaData | None = None if not fmd.is_directory: s3_metadata = await get_s3_client(self.app).get_object_metadata( bucket=fmd.bucket_name, object_key=fmd.object_name ) - - fmd = await db_file_meta_data.get(conn, fmd.file_id) - if not fmd.is_directory and s3_metadata: fmd.file_size = parse_obj_as(ByteSize, s3_metadata.size) fmd.last_modified = s3_metadata.last_modified fmd.entity_tag = s3_metadata.e_tag - elif fmd.is_directory: - s3_folder_metadata = await get_s3_client(self.app).get_directory_metadata( - bucket=fmd.bucket_name, prefix=fmd.object_name - ) - fmd.file_size = parse_obj_as(ByteSize, s3_folder_metadata.size) + else: + # we spare calling get_directory_metadata as it is not needed now and is costly + fmd.file_size = copy_from.file_size + fmd.upload_expires_at = None fmd.upload_id = None updated_fmd: FileMetaDataAtDB = await db_file_meta_data.upsert( @@ -934,12 +905,40 @@ async def _update_database_from_storage( ) return updated_fmd - async def _update_database_from_storage_no_connection( + async def _get_s3_metadata( + self, fmd: FileMetaDataAtDB + ) -> S3MetaData | S3DirectoryMetaData: + return ( + await get_s3_client(self.app).get_object_metadata( + bucket=fmd.bucket_name, object_key=fmd.object_name + ) + if not fmd.is_directory + else await get_s3_client(self.app).get_directory_metadata( + bucket=fmd.bucket_name, prefix=fmd.object_name + ) + ) + + async def _update_database_from_storage( self, fmd: FileMetaDataAtDB ) -> FileMetaDataAtDB: + """ + Raises: + S3KeyNotFoundError -- if the object key is not found in S3 + """ + s3_metadata = await self._get_s3_metadata(fmd) + if not fmd.is_directory: + assert isinstance(s3_metadata, S3MetaData) # nosec + fmd.file_size = parse_obj_as(ByteSize, s3_metadata.size) + fmd.last_modified = s3_metadata.last_modified + fmd.entity_tag = s3_metadata.e_tag + elif fmd.is_directory: + assert isinstance(s3_metadata, S3DirectoryMetaData) # nosec + fmd.file_size = parse_obj_as(ByteSize, s3_metadata.size) + fmd.upload_expires_at = None + fmd.upload_id = None async with self.engine.acquire() as conn: - updated_fmd: FileMetaDataAtDB = await self._update_database_from_storage( - conn, fmd + updated_fmd: FileMetaDataAtDB = await db_file_meta_data.upsert( + conn, convert_db_to_model(fmd) ) return updated_fmd @@ -971,7 +970,7 @@ async def _copy_file_datcore_s3( await download_to_file_or_raise(session, dc_link, local_file_path) # copying will happen using aioboto3, therefore multipart might happen - async with self.engine.acquire() as conn, conn.begin() as transaction: + async with self.engine.acquire() as conn: new_fmd = await self._create_fmd_for_upload( conn, user_id, @@ -980,16 +979,14 @@ async def _copy_file_datcore_s3( is_directory=False, sha256_checksum=None, ) - # NOTE: ensure the database is updated so cleaner does not pickup newly created uploads - await transaction.commit() - # Uploads local -> S3 - await get_s3_client(self.app).upload_file( - bucket=self.simcore_bucket_name, - file=local_file_path, - object_key=dst_file_id, - bytes_transfered_cb=bytes_transfered_cb, - ) - updated_fmd = await self._update_database_from_storage(conn, new_fmd) + # Uploads local -> S3 + await get_s3_client(self.app).upload_file( + bucket=self.simcore_bucket_name, + file=local_file_path, + object_key=dst_file_id, + bytes_transfered_cb=bytes_transfered_cb, + ) + updated_fmd = await self._update_database_from_storage(fmd=new_fmd) file_storage_link["store"] = self.location_id file_storage_link["path"] = new_fmd.file_id @@ -1000,29 +997,27 @@ async def _copy_file_datcore_s3( async def _copy_path_s3_s3( self, user_id: UserID, + *, src_fmd: FileMetaDataAtDB, dst_file_id: SimcoreS3FileID, bytes_transfered_cb: Callable[[int], None], ) -> FileMetaData: - _logger.debug( - "copying %s to %s, %s", - f"{src_fmd=}", - f"{dst_file_id=}", - f"{src_fmd.is_directory=}", - ) - # copying will happen using aioboto3, therefore multipart might happen - # NOTE: connection must be released to ensure database update - async with self.engine.acquire() as conn, conn.begin() as transaction: - new_fmd = await self._create_fmd_for_upload( - conn, - user_id, - dst_file_id, - upload_id=S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID, - is_directory=src_fmd.is_directory, - sha256_checksum=src_fmd.sha256_checksum, - ) - # NOTE: ensure the database is updated so cleaner does not pickup newly created uploads - await transaction.commit() + with log_context( + _logger, + logging.INFO, + f"copying {src_fmd.file_id=} to {dst_file_id=}, {src_fmd.is_directory=}", + ): + # copying will happen using aioboto3, therefore multipart might happen + # NOTE: connection must be released to ensure database update + async with self.engine.acquire() as conn: + new_fmd = await self._create_fmd_for_upload( + conn, + user_id, + dst_file_id, + upload_id=S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID, + is_directory=src_fmd.is_directory, + sha256_checksum=src_fmd.sha256_checksum, + ) s3_client = get_s3_client(self.app) @@ -1040,10 +1035,12 @@ async def _copy_path_s3_s3( dst_object_key=new_fmd.object_name, bytes_transfered_cb=bytes_transfered_cb, ) - - updated_fmd = await self._update_database_from_storage(conn, new_fmd) - _logger.info("copied %s to %s", f"{src_fmd=}", f"{updated_fmd=}") - return convert_db_to_model(updated_fmd) + # we are done, let's update the copy with the src + async with self.engine.acquire() as conn: + updated_fmd = await self._update_fmd_from_other( + conn, fmd=new_fmd, copy_from=src_fmd + ) + return convert_db_to_model(updated_fmd) async def _create_fmd_for_upload( self, diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index d4c240a5bf3..b6d27f0df29 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -43,9 +43,11 @@ from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.logging import log_context +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.s3 import upload_file_to_presigned_link from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status +from settings_library.s3 import S3Settings from simcore_postgres_database.storage_models import file_meta_data, projects, users from simcore_service_storage.application import create from simcore_service_storage.dsm import get_dsm_provider @@ -72,6 +74,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.file_extra", "pytest_simcore.httpbin_service", + "pytest_simcore.minio_service", "pytest_simcore.postgres_service", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", @@ -96,14 +99,6 @@ def package_dir(here: Path) -> Path: return dirpath -@pytest.fixture(scope="session") -def osparc_simcore_root_dir(here: Path) -> Path: - root_dir = here.parent.parent.parent - assert root_dir.exists() - assert any(root_dir.glob("services")), "Is this service within osparc-simcore repo?" - return root_dir - - @pytest.fixture(scope="session") def project_slug_dir(osparc_simcore_root_dir: Path) -> Path: # uses pytest_simcore.environs.osparc_simcore_root_dir @@ -169,8 +164,23 @@ def app_settings( aiopg_engine: Engine, postgres_host_config: dict[str, str], mocked_s3_server_envs: EnvVarsDict, + external_envfile_dict: EnvVarsDict, datcore_adapter_service_mock: aioresponses.aioresponses, + monkeypatch: pytest.MonkeyPatch, ) -> Settings: + if external_envfile_dict: + s3_settings = S3Settings.create_from_envs(**external_envfile_dict) + if s3_settings.S3_ENDPOINT is None: + monkeypatch.delenv("S3_ENDPOINT") + setenvs_from_dict( + monkeypatch, + s3_settings.dict(exclude={"S3_ENDPOINT"}), + ) + else: + setenvs_from_dict( + monkeypatch, + s3_settings.dict(), + ) test_app_settings = Settings.create_from_envs() print(f"{test_app_settings.json(indent=2)=}") return test_app_settings diff --git a/services/storage/tests/data/projects_with_data.json b/services/storage/tests/data/projects_with_data.json deleted file mode 100644 index 302a5d33481..00000000000 --- a/services/storage/tests/data/projects_with_data.json +++ /dev/null @@ -1,163 +0,0 @@ -[ - { - "uuid": "5a6d7f24-ee9a-4112-bca2-85a8ca49234a", - "name": "ISAN2019: 3D Paraview", - "description": "3D Paraview viewer with two inputs", - "thumbnail": "https://user-images.githubusercontent.com/33152403/60168939-073a5580-9806-11e9-8dad-8a7caa3eb5ab.png", - "prjOwner": "pytest@itis.swiss", - "creationDate": "2019-06-06T14:33:43.065Z", - "lastChangeDate": "2019-06-06T14:33:44.747Z", - "accessRights": {}, - "workbench": { - "de2578c5-431e-5753-af37-e6aec8120bf2": { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - "label": "File Picker 1", - "inputs": {}, - "inputNodes": [], - "outputs": { - "outFile": { - "store": 1, - "path": "Shared Data/HField_Big.vtk" - } - }, - "progress": 100, - "position": { - "x": 100, - "y": 100 - } - }, - "de2578c5-431e-522c-a377-dd8d7cd1265b": { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - "label": "File Picker 2", - "inputs": {}, - "inputNodes": [], - "outputs": { - "outFile": { - "store": 1, - "path": "Shared Data/bunny.vtk" - } - }, - "progress": 100, - "position": { - "x": 100, - "y": 250 - } - }, - "de2578c5-431e-9b0f-a456-67677a20996c": { - "key": "simcore/services/dynamic/3d-viewer", - "version": "2.10.0", - "label": "3D ParaViewer", - "inputs": { - "A": { - "nodeUuid": "de2578c5-431e-5753-af37-e6aec8120bf2", - "output": "outFile" - }, - "B": { - "nodeUuid": "de2578c5-431e-522c-a377-dd8d7cd1265b", - "output": "outFile" - } - }, - "inputNodes": [ - "de2578c5-431e-5753-af37-e6aec8120bf2", - "de2578c5-431e-522c-a377-dd8d7cd1265b" - ], - "outputs": {}, - "progress": 85, - "position": { - "x": 400, - "y": 175 - } - } - } - }, - { - "uuid": "de2578c5-431e-5d82-b08d-d39c436ca738", - "name": "ISAN: UCDavis use case: 0D", - "description": "Colleen Clancy Single Cell solver with a file picker and PostPro viewer", - "thumbnail": "https://user-images.githubusercontent.com/33152403/60168940-073a5580-9806-11e9-9a44-ae5266eeb020.png", - "prjOwner": "pytest@itis.swiss", - "creationDate": "2019-06-06T14:33:51.940Z", - "lastChangeDate": "2019-06-06T14:33:54.329Z", - "accessRights": {}, - "workbench": { - "de2578c5-431e-59d6-b1a5-6e7b2773636b": { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - "label": "File Picker 0D", - "inputs": {}, - "inputNodes": [], - "outputs": { - "outFile": { - "store": 1, - "path": "Shared Data/initial_WStates" - } - }, - "progress": 100, - "position": { - "x": 50, - "y": 150 - } - }, - "de2578c5-431e-562f-afd1-cca5105c8844": { - "key": "simcore/services/comp/ucdavis-singlecell-cardiac-model", - "version": "1.0.0", - "label": "DBP-Clancy-Rabbit-Single-Cell solver", - "inputs": { - "Na": 0, - "Kr": 0, - "BCL": 200, - "NBeats": 5, - "Ligand": 0, - "cAMKII": "WT", - "initfile": { - "nodeUuid": "de2578c5-431e-59d6-b1a5-6e7b2773636b", - "output": "outFile" - } - }, - "inputAccess": { - "Na": "ReadAndWrite", - "Kr": "ReadOnly", - "BCL": "ReadAndWrite", - "NBeats": "ReadOnly", - "Ligand": "Invisible", - "cAMKII": "Invisible" - }, - "inputNodes": [ - "de2578c5-431e-59d6-b1a5-6e7b2773636b" - ], - "outputs": {}, - "progress": 0, - "position": { - "x": 300, - "y": 150 - } - }, - "de2578c5-431e-5fdd-9daa-cb03c51d8138": { - "key": "simcore/services/dynamic/cc-0d-viewer", - "version": "2.8.0", - "label": "cc-0d-viewer", - "inputs": { - "vm_1Hz": { - "nodeUuid": "de2578c5-431e-562f-afd1-cca5105c8844", - "output": "out_4" - }, - "all_results_1Hz": { - "nodeUuid": "de2578c5-431e-562f-afd1-cca5105c8844", - "output": "out_1" - } - }, - "inputNodes": [ - "de2578c5-431e-562f-afd1-cca5105c8844" - ], - "outputs": {}, - "progress": 20, - "position": { - "x": 550, - "y": 150 - } - } - } - } -] diff --git a/services/storage/tests/fixtures/data_models.py b/services/storage/tests/fixtures/data_models.py index 197e877ec7a..779a18f2f2c 100644 --- a/services/storage/tests/fixtures/data_models.py +++ b/services/storage/tests/fixtures/data_models.py @@ -4,10 +4,11 @@ from collections import deque +from collections.abc import AsyncIterator, Awaitable, Callable from contextlib import asynccontextmanager from pathlib import Path from random import choice, randint -from typing import Any, AsyncIterator, Awaitable, Callable +from typing import Any import pytest import sqlalchemy as sa @@ -179,7 +180,7 @@ async def _creator( @pytest.fixture async def random_project_with_files( aiopg_engine: Engine, - create_project: Callable[[], Awaitable[dict[str, Any]]], + create_project: Callable[..., Awaitable[dict[str, Any]]], create_project_node: Callable[..., Awaitable[NodeID]], create_simcore_file_id: Callable[ [ProjectID, NodeID, str, Path | None], SimcoreS3FileID @@ -219,7 +220,7 @@ async def _creator( dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]] ]: assert len(file_sizes) == len(file_checksums) - project = await create_project() + project = await create_project(name="random-project") src_projects_list: dict[ NodeID, dict[SimcoreS3FileID, dict[str, Path | str]] ] = {} diff --git a/services/storage/tests/helpers/utils_project.py b/services/storage/tests/helpers/utils_project.py index b6519c4e82c..ad4535c9d70 100644 --- a/services/storage/tests/helpers/utils_project.py +++ b/services/storage/tests/helpers/utils_project.py @@ -15,6 +15,7 @@ def clone_project_data( project_copy_uuid = uuidlib.uuid4() # random project id project_copy["uuid"] = str(project_copy_uuid) project_copy.pop("id", None) + project_copy["name"] = f"{project['name']}-copy" # Workbench nodes shall be unique within the project context def _create_new_node_uuid(old_uuid: NodeIDStr) -> NodeIDStr: diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index 0d2de438f85..11664bbe387 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -5,6 +5,7 @@ # pylint:disable=no-name-in-module # pylint:disable=too-many-nested-blocks +import asyncio import logging import sys from collections.abc import Awaitable, Callable @@ -21,17 +22,16 @@ from faker import Faker from models_library.api_schemas_storage import FileMetaDataGet, FoldersBody from models_library.basic_types import SHA256Str -from models_library.projects import Project, ProjectID +from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr, SimcoreS3FileID from models_library.users import UserID -from models_library.utils.change_case import camel_to_snake from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import ByteSize, parse_file_as, parse_obj_as +from pydantic import ByteSize, parse_obj_as from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.logging import log_context +from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.client import long_running_task_request -from servicelib.utils import limited_gather from settings_library.s3 import S3Settings from simcore_postgres_database.storage_models import file_meta_data from simcore_service_storage.models import SearchFilesQueryParams @@ -43,7 +43,7 @@ from ..helpers.utils import get_updated_project pytest_simcore_core_services_selection = ["postgres"] -pytest_simcore_ops_services_selection = ["adminer"] +pytest_simcore_ops_services_selection = ["adminer", "minio"] CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @@ -315,21 +315,13 @@ async def test_copy_folders_from_valid_project( ) -def _get_project_with_data() -> list[Project]: - projects = parse_file_as( - list[Project], CURRENT_DIR / "../data/projects_with_data.json" - ) - assert projects - return projects - - async def _create_and_delete_folders_from_project( user_id: UserID, project: dict[str, Any], client: TestClient, project_db_creator: Callable, check_list_files: bool, -): +) -> None: destination_project, nodes_map = clone_project_data(project) await project_db_creator(**destination_project) @@ -343,15 +335,9 @@ async def _create_and_delete_folders_from_project( ) # data should be equal to the destination project, and all store entries should point to simcore.s3 - for key in data: - if key != "workbench": - assert data[key] == destination_project[key] - else: - for _node_id, node in data[key].items(): - if "outputs" in node: - for _o_id, o in node["outputs"].items(): - if "store" in o: - assert o["store"] == SimcoreS3DataManager.get_location_id() + # NOTE: data is jsonized where destination project is not! + assert jsonable_encoder(destination_project) == data + project_id = data["uuid"] # list data to check all is here @@ -388,52 +374,90 @@ async def _create_and_delete_folders_from_project( assert not data -@pytest.mark.parametrize( - "project", - [pytest.param(prj, id=prj.name) for prj in _get_project_with_data()], -) +@pytest.fixture +def set_log_levels_for_noisy_libraries() -> None: + # Reduce the log level for 'werkzeug' + logging.getLogger("werkzeug").setLevel(logging.WARNING) + + +@pytest.fixture +async def with_random_project_with_files( + random_project_with_files: Callable[ + ..., + Awaitable[ + tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]], + ] + ], + ], +) -> tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]],]: + return await random_project_with_files( + file_sizes=( + parse_obj_as(ByteSize, "1Mib"), + parse_obj_as(ByteSize, "2Mib"), + parse_obj_as(ByteSize, "5Mib"), + ) + ) + + +async def test_connect_to_external( + set_log_levels_for_noisy_libraries: None, + client: TestClient, + user_id: UserID, + project_id: ProjectID, +): + assert client.app + url = ( + client.app.router["get_files_metadata"] + .url_for(location_id=f"{SimcoreS3DataManager.get_location_id()}") + .with_query(user_id=f"{user_id}", uuid_filter=f"{project_id}") + ) + resp = await client.get(f"{url}") + data, error = await assert_status(resp, status.HTTP_200_OK) + print(data) + + async def test_create_and_delete_folders_from_project( + set_log_levels_for_noisy_libraries: None, client: TestClient, user_id: UserID, - project: Project, create_project: Callable[..., Awaitable[dict[str, Any]]], + with_random_project_with_files: tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]], + ], mock_datcore_download, ): - project_as_dict = jsonable_encoder(project, exclude={"tags", "state", "prj_owner"}) - # HACK: some key names must be changed but not all - KEYS = {"creationDate", "lastChangeDate", "accessRights"} - for k in KEYS: - project_as_dict[camel_to_snake(k)] = project_as_dict.pop(k, None) - - await create_project(**project_as_dict) + project_in_db, _ = with_random_project_with_files await _create_and_delete_folders_from_project( - user_id, project_as_dict, client, create_project, check_list_files=True + user_id, project_in_db, client, create_project, check_list_files=True ) -@pytest.mark.parametrize( - "project", - [pytest.param(prj, id=prj.name) for prj in _get_project_with_data()], -) +@pytest.mark.parametrize("num_concurrent_calls", [50]) async def test_create_and_delete_folders_from_project_burst( + set_log_levels_for_noisy_libraries: None, + minio_s3_settings_envs: EnvVarsDict, client: TestClient, user_id: UserID, - project: Project, + with_random_project_with_files: tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]], + ], create_project: Callable[..., Awaitable[dict[str, Any]]], mock_datcore_download, + num_concurrent_calls: int, ): - project_as_dict = jsonable_encoder( - project, exclude={"tags", "state", "prj_owner"}, by_alias=False - ) - await create_project(**project_as_dict) - await limited_gather( + project_in_db, _ = with_random_project_with_files + # NOTE: here the point is to NOT have a limit on the number of calls!! + await asyncio.gather( *[ _create_and_delete_folders_from_project( - user_id, project_as_dict, client, create_project, check_list_files=False + user_id, project_in_db, client, create_project, check_list_files=False ) - for _ in range(100) - ], - limit=2, + for _ in range(num_concurrent_calls) + ] ) From 71e5513b67b7050ef5f001583723537d13d4f9ad Mon Sep 17 00:00:00 2001 From: Mads Bisgaard <126242332+bisgaard-itis@users.noreply.github.com> Date: Tue, 2 Jul 2024 11:56:14 +0200 Subject: [PATCH 076/219] =?UTF-8?q?=E2=9C=A8=20Limit=20inflight=20requests?= =?UTF-8?q?=20to=20the=20api-server=20(#6007)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .env-devel | 1 + services/docker-compose.yml | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.env-devel b/.env-devel index d50e0af58b5..9a62f1511fe 100644 --- a/.env-devel +++ b/.env-devel @@ -21,6 +21,7 @@ AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678 API_SERVER_DEV_FEATURES_ENABLED=0 API_SERVER_LOGLEVEL=WARNING API_SERVER_PROFILING=1 +TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25 AUTOSCALING_DASK=null AUTOSCALING_DRAIN_NODES_WITH_LABELS=False diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 513224158c4..bfcdda2f4c1 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -56,7 +56,7 @@ services: - traefik.http.routers.${SWARM_STACK_NAME}_api-server.rule=hostregexp(`{host:.+}`) && (Path(`/`, `/v0`) || PathPrefix(`/v0/`) || Path(`/api/v0/openapi.json`)) - traefik.http.routers.${SWARM_STACK_NAME}_api-server.entrypoints=simcore_api - traefik.http.routers.${SWARM_STACK_NAME}_api-server.priority=1 - - traefik.http.routers.${SWARM_STACK_NAME}_api-server.middlewares=${SWARM_STACK_NAME}_gzip@docker,ratelimit-${SWARM_STACK_NAME}_api-server + - traefik.http.routers.${SWARM_STACK_NAME}_api-server.middlewares=${SWARM_STACK_NAME}_gzip@docker,ratelimit-${SWARM_STACK_NAME}_api-server,inflightreq-${SWARM_STACK_NAME}_api-server networks: - default @@ -1185,6 +1185,11 @@ services: - traefik.http.middlewares.ratelimit-${SWARM_STACK_NAME}_api-server.ratelimit.burst=10 # X-Forwarded-For header extracts second IP from the right, count starts at one - traefik.http.middlewares.ratelimit-${SWARM_STACK_NAME}_api-server.ratelimit.sourcecriterion.ipstrategy.depth=2 + # middleware for limiting total inflight requests the api-server is handling + - traefik.http.middlewares.ensure-group-header-${SWARM_STACK_NAME}_api-server.headers.customrequestheaders.X-Inflight-Limit-Group=all + - traefik.http.middlewares.limit-reqs-${SWARM_STACK_NAME}_api-server.inflightreq.amount=${TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT} + - traefik.http.middlewares.limit-reqs-${SWARM_STACK_NAME}_api-server.inflightreq.sourcecriterion.requestheadername=X-Inflight-Limit-Group + - traefik.http.middlewares.inflightreq-${SWARM_STACK_NAME}_api-server.chain.middlewares=ensure-group-header-${SWARM_STACK_NAME}_api-server,limit-reqs-${SWARM_STACK_NAME}_api-server networks: - default - interactive_services_subnet # for legacy dynamic services From 817d81ca4d952153a68fd455d5395907412594cb Mon Sep 17 00:00:00 2001 From: Mads Bisgaard <126242332+bisgaard-itis@users.noreply.github.com> Date: Tue, 2 Jul 2024 16:21:42 +0200 Subject: [PATCH 077/219] =?UTF-8?q?=E2=9C=A8=20Add=20metamodeling=20load?= =?UTF-8?q?=20tests=20(#6014)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../test_used_docker_compose.py | 4 +- tests/performance/Dockerfile | 5 +- tests/performance/Makefile | 8 +- tests/performance/docker-compose.yml | 10 +- tests/performance/locust_files/.env-devel | 11 ++ .../locust_files/metamodeling/.env-devel | 12 ++ .../locust_files/metamodeling/passer.py | 22 +++ .../metamodeling/study_template.png | Bin 0 -> 99265 bytes .../locust_files/metamodeling/workflow.py | 163 ++++++++++++++++++ 9 files changed, 224 insertions(+), 11 deletions(-) create mode 100644 tests/performance/locust_files/.env-devel create mode 100644 tests/performance/locust_files/metamodeling/.env-devel create mode 100644 tests/performance/locust_files/metamodeling/passer.py create mode 100644 tests/performance/locust_files/metamodeling/study_template.png create mode 100644 tests/performance/locust_files/metamodeling/workflow.py diff --git a/tests/environment-setup/test_used_docker_compose.py b/tests/environment-setup/test_used_docker_compose.py index 946da61d569..93d07ba9b66 100644 --- a/tests/environment-setup/test_used_docker_compose.py +++ b/tests/environment-setup/test_used_docker_compose.py @@ -76,7 +76,9 @@ def ensure_env_file(env_devel_file: Path) -> Iterable[Path]: def _skip_not_useful_docker_composes(p) -> bool: - return "osparc-gateway-server" not in f"{p}" and "manual" not in f"{p}" + result = "osparc-gateway-server" not in f"{p}" and "manual" not in f"{p}" + result &= "tests/performance" not in f"{p}" + return result compose_paths = filter( diff --git a/tests/performance/Dockerfile b/tests/performance/Dockerfile index 634e2d6776a..36219fc25ad 100644 --- a/tests/performance/Dockerfile +++ b/tests/performance/Dockerfile @@ -6,6 +6,9 @@ FROM locustio/locust:${LOCUST_VERSION} RUN pip3 --version && \ pip3 install \ faker \ + locust-plugins==2.1.1 \ + pydantic \ + pydantic-settings \ python-dotenv \ - locust-plugins==2.1.1 &&\ + tenacity && \ pip3 freeze --verbose diff --git a/tests/performance/Makefile b/tests/performance/Makefile index 38137391643..dfce104fb36 100644 --- a/tests/performance/Makefile +++ b/tests/performance/Makefile @@ -3,7 +3,7 @@ # include ../../scripts/common.Makefile -LOCUST_VERSION=2.5.1 +LOCUST_VERSION=2.29.1 export LOCUST_VERSION # UTILS @@ -42,9 +42,9 @@ down: ## stops and removes osparc locust containers docker compose --file docker-compose.yml down .PHONY: test -test: ## runs osparc locust with target=locust_test_file.py in headless mode for a minute. Will fail if 5% more fail requests or average response time is above 50ms, optional host can be set +test: ## runs osparc locust. locust and test confiuration are specified in .env file next to target file @$(call check_defined, target, please define target file when calling $@ - e.g. ```make $@ target=MY_LOCUST_FILE.py```) @export LOCUST_FILE=$(target); \ - export TARGET_URL=$(if $(host),$(host),"http://$(get_my_ip):9081"); \ - export LOCUST_OPTIONS="--headless --print-stats --users=100 --spawn-rate=20 --run-time=1m --check-fail-ratio=0.01 --check-avg-response-time=$(if $(resp_time),$(resp_time),200)"; \ + export ENV_FILE=$$(dirname $$(realpath locust_files/$${LOCUST_FILE}))/.env; \ + if [ ! -f $${ENV_FILE} ]; then cp $$(dirname $${ENV_FILE})/.env-devel $${ENV_FILE}; fi; \ docker compose --file docker-compose.yml up --scale worker=4 --exit-code-from=master diff --git a/tests/performance/docker-compose.yml b/tests/performance/docker-compose.yml index 2623454210e..b95ad4c691c 100644 --- a/tests/performance/docker-compose.yml +++ b/tests/performance/docker-compose.yml @@ -7,14 +7,14 @@ services: - ./locust_files:/mnt/locust - ./locust_report:/reporting command: > - -f /mnt/locust/${LOCUST_FILE} --host ${TARGET_URL} --html - /reporting/locust_html.html ${LOCUST_OPTIONS} --master + -f /mnt/locust/${LOCUST_FILE} --html /reporting/locust_html.html --master + env_file: + - ${ENV_FILE} worker: image: itisfoundation/locust:${LOCUST_VERSION} volumes: - ./locust_files:/mnt/locust command: -f /mnt/locust/${LOCUST_FILE} --worker --master-host master - environment: - - SC_USER_NAME=${SC_USER_NAME} - - SC_PASSWORD=${SC_PASSWORD} + env_file: + - ${ENV_FILE} diff --git a/tests/performance/locust_files/.env-devel b/tests/performance/locust_files/.env-devel new file mode 100644 index 00000000000..a0828e4312a --- /dev/null +++ b/tests/performance/locust_files/.env-devel @@ -0,0 +1,11 @@ +# user +SC_USER_NAME= +SC_PASSWORD= + +# locust settings +LOCUST_HOST= +LOCUST_USERS=100 +LOCUST_HEADLESS=true +LOCUST_PRINT_STATS=true +LOCUST_SPAWN_RATE=20 +LOCUST_RUN_TIME=1m diff --git a/tests/performance/locust_files/metamodeling/.env-devel b/tests/performance/locust_files/metamodeling/.env-devel new file mode 100644 index 00000000000..84629c235d7 --- /dev/null +++ b/tests/performance/locust_files/metamodeling/.env-devel @@ -0,0 +1,12 @@ +# user +OSPARC_API_KEY= +OSPARC_API_SECRET= +TEMPLATE_UUID=