Skip to content

Commit

Permalink
fixes and cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
sanderegg committed Dec 3, 2024
1 parent e098402 commit 0079662
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 64 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@
from faker import Faker
from fastapi import FastAPI
from models_library.clusters import (
DEFAULT_CLUSTER_ID,
Cluster,
BaseCluster,
ClusterAuthentication,
ClusterTypeInModel,
NoAuthentication,
Expand Down Expand Up @@ -80,10 +79,10 @@ def test_dask_clients_pool_properly_setup_and_deleted(


@pytest.fixture
def fake_clusters(faker: Faker) -> Callable[[int], list[Cluster]]:
def creator(num_clusters: int) -> list[Cluster]:
def fake_clusters(faker: Faker) -> Callable[[int], list[BaseCluster]]:
def creator(num_clusters: int) -> list[BaseCluster]:
return [
Cluster.model_validate(
BaseCluster.model_validate(
{
"id": faker.pyint(),
"name": faker.name(),
Expand Down Expand Up @@ -139,7 +138,7 @@ async def test_dask_clients_pool_acquisition_creates_client_on_demand(
minimal_dask_config: None,
mocker: MockerFixture,
client: TestClient,
fake_clusters: Callable[[int], list[Cluster]],
fake_clusters: Callable[[int], list[BaseCluster]],
):
assert client.app
the_app = cast(FastAPI, client.app)
Expand Down Expand Up @@ -184,7 +183,7 @@ async def test_acquiring_wrong_cluster_raises_exception(
minimal_dask_config: None,
mocker: MockerFixture,
client: TestClient,
fake_clusters: Callable[[int], list[Cluster]],
fake_clusters: Callable[[int], list[BaseCluster]],
):
assert client.app
the_app = cast(FastAPI, client.app)
Expand Down Expand Up @@ -215,7 +214,6 @@ def test_default_cluster_correctly_initialized(
== dask_scheduler_settings.COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL
)

assert default_cluster.id == DEFAULT_CLUSTER_ID
assert isinstance(default_cluster.authentication, get_args(ClusterAuthentication))


Expand Down
30 changes: 0 additions & 30 deletions services/director-v2/tests/unit/test_utils_db.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,10 @@
from contextlib import suppress
from typing import Any, cast

import pytest
from models_library.clusters import BaseCluster, Cluster
from models_library.projects_state import RunningState
from pydantic import BaseModel
from simcore_postgres_database.models.comp_pipeline import StateType
from simcore_service_director_v2.utils.db import (
DB_TO_RUNNING_STATE,
RUNNING_STATE_TO_DB,
to_clusters_db,
)


@pytest.mark.parametrize(
"model_cls",
[Cluster],
)
def test_export_clusters_to_db(
model_cls: type[BaseModel], model_cls_examples: dict[str, dict[str, Any]]
):
for example in model_cls_examples.values():
owner_gid = example["owner"]
# remove the owner from the access rights if any
with suppress(KeyError):
example.get("access_rights", {}).pop(owner_gid)
instance = cast(BaseCluster, model_cls(**example))

# for updates

cluster_db_dict = to_clusters_db(instance, only_update=True)
keys_not_in_db = ["id", "access_rights"]

assert list(cluster_db_dict.keys()) == [
x for x in example if x not in keys_not_in_db
]


@pytest.mark.parametrize("input_running_state", RunningState)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,10 @@
import pytest
from _helpers import PublishedProject
from faker import Faker
from models_library.clusters import DEFAULT_CLUSTER_ID
from models_library.projects import ProjectID
from models_library.projects_state import RunningState
from models_library.users import UserID
from simcore_service_director_v2.core.errors import (
ClusterNotFoundError,
ComputationalRunNotFoundError,
ProjectNotFoundError,
UserNotFoundError,
Expand Down Expand Up @@ -89,7 +87,6 @@ async def test_list(
created = await CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand All @@ -101,7 +98,6 @@ async def test_list(
CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=created.iteration + n + 1,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand Down Expand Up @@ -265,7 +261,6 @@ async def test_create(
await CompRunsRepository(aiopg_engine).create(
user_id=fake_user_id,
project_id=fake_project_id,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand All @@ -275,7 +270,6 @@ async def test_create(
await CompRunsRepository(aiopg_engine).create(
user_id=fake_user_id,
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand All @@ -284,7 +278,6 @@ async def test_create(
created = await CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand All @@ -299,7 +292,6 @@ async def test_create(
created = await CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand All @@ -314,16 +306,6 @@ async def test_create(
)
assert created == got

with pytest.raises(ClusterNotFoundError):
await CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=faker.pyint(min_value=1),
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
)


async def test_update(
aiopg_engine,
Expand All @@ -343,7 +325,6 @@ async def test_update(
created = await CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand Down Expand Up @@ -377,7 +358,6 @@ async def test_set_run_result(
created = await CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand Down Expand Up @@ -425,7 +405,6 @@ async def test_mark_for_cancellation(
created = await CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand Down Expand Up @@ -457,7 +436,6 @@ async def test_mark_for_scheduling(
created = await CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand Down Expand Up @@ -491,7 +469,6 @@ async def test_mark_scheduling_done(
created = await CompRunsRepository(aiopg_engine).create(
user_id=published_project.user["id"],
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
iteration=None,
metadata=run_metadata,
use_on_demand_clusters=faker.pybool(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import pytest
from _helpers import PublishedProject, assert_comp_runs, assert_comp_runs_empty
from fastapi import FastAPI
from models_library.clusters import DEFAULT_CLUSTER_ID
from models_library.projects import ProjectAtDB
from models_library.projects_state import RunningState
from pytest_mock.plugin import MockerFixture
Expand Down Expand Up @@ -173,7 +172,7 @@ async def test_schedule_all_pipelines(
assert comp_run.user_id == published_project.project.prj_owner
assert comp_run.iteration == 1
assert comp_run.cancelled is None
assert comp_run.cluster_id == DEFAULT_CLUSTER_ID
assert comp_run.cluster_id is None
assert comp_run.metadata == run_metadata
assert comp_run.result is RunningState.PUBLISHED
assert comp_run.scheduled is not None
Expand Down Expand Up @@ -276,7 +275,7 @@ async def test_schedule_all_pipelines_logs_error_if_it_find_old_pipelines(
assert comp_run.user_id == published_project.project.prj_owner
assert comp_run.iteration == 1
assert comp_run.cancelled is None
assert comp_run.cluster_id == DEFAULT_CLUSTER_ID
assert comp_run.cluster_id is None
assert comp_run.metadata == run_metadata
assert comp_run.result is RunningState.PUBLISHED
assert comp_run.scheduled is not None
Expand Down

0 comments on commit 0079662

Please sign in to comment.