diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8099390b2b..53fc2d6a83 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -65,6 +65,7 @@ repos: - capellambse - typer - types-lxml + - types-redis - repo: local hooks: - id: pylint diff --git a/backend/Makefile b/backend/Makefile index 0453d01b8f..41fcd1496c 100644 --- a/backend/Makefile +++ b/backend/Makefile @@ -7,6 +7,9 @@ DB_USER = dev DB_NAME = dev VENV = .venv +REDIS_PORT = 6379 +REDIS_INSIGHT_PORT = 8001 + DATABASE_LOAD_FILE ?= ../local/load.sql DATABASE_SAVE_DIR ?= ../local @@ -29,6 +32,14 @@ database: -e POSTGRES_DB=$(DB_NAME) \ postgres +redis: + docker start redis || \ + docker run -d \ + --name redis \ + -p $(REDIS_PORT):6379 \ + -p $(REDIS_INSIGHT_PORT):8001 \ + redis/redis-stack:latest + app: if [ -d "$(VENV)/bin" ]; then source $(VENV)/bin/activate; diff --git a/backend/capellacollab/alembic/versions/abddaf015966_add_repository_id_to_git_model_and_remove_unused_name.py b/backend/capellacollab/alembic/versions/abddaf015966_add_repository_id_to_git_model_and_remove_unused_name.py new file mode 100644 index 0000000000..98310b9b36 --- /dev/null +++ b/backend/capellacollab/alembic/versions/abddaf015966_add_repository_id_to_git_model_and_remove_unused_name.py @@ -0,0 +1,25 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +"""Add repository id to git model and remove unused git model name + +Revision ID: abddaf015966 +Revises: 028c72ddfd20 +Create Date: 2024-08-12 11:43:34.158404 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "abddaf015966" +down_revision = "028c72ddfd20" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + "git_models", sa.Column("repository_id", sa.String(), nullable=True) + ) + op.drop_column("git_models", "name") diff --git a/backend/capellacollab/core/database/__init__.py b/backend/capellacollab/core/database/__init__.py index 5e603bb836..4688885ced 100644 --- a/backend/capellacollab/core/database/__init__.py +++ b/backend/capellacollab/core/database/__init__.py @@ -1,9 +1,11 @@ # SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors # SPDX-License-Identifier: Apache-2.0 +import functools import typing as t import pydantic +import redis import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql @@ -35,6 +37,11 @@ def get_db() -> t.Iterator[orm.Session]: yield session +@functools.lru_cache +def get_redis() -> redis.Redis: + return redis.Redis(host="localhost", port=6379, decode_responses=True) + + def patch_database_with_pydantic_object( database_object: Base, pydantic_object: pydantic.BaseModel ): diff --git a/backend/capellacollab/projects/toolmodels/diagrams/models.py b/backend/capellacollab/projects/toolmodels/diagrams/models.py index 428775d92e..d28a7172a1 100644 --- a/backend/capellacollab/projects/toolmodels/diagrams/models.py +++ b/backend/capellacollab/projects/toolmodels/diagrams/models.py @@ -18,6 +18,7 @@ class DiagramMetadata(core_pydantic.BaseModel): class DiagramCacheMetadata(core_pydantic.BaseModel): diagrams: list[DiagramMetadata] last_updated: datetime.datetime + job_id: str | None = None _validate_last_updated = pydantic.field_serializer("last_updated")( core_pydantic.datetime_serializer diff --git a/backend/capellacollab/projects/toolmodels/diagrams/routes.py b/backend/capellacollab/projects/toolmodels/diagrams/routes.py index 46beaf9a16..3693c712cb 100644 --- a/backend/capellacollab/projects/toolmodels/diagrams/routes.py +++ b/backend/capellacollab/projects/toolmodels/diagrams/routes.py @@ -3,12 +3,14 @@ from __future__ import annotations +import json import logging import pathlib from urllib import parse import fastapi import requests +from aiohttp import web import capellacollab.projects.toolmodels.modelsources.git.injectables as git_injectables from capellacollab.core import logging as log @@ -40,25 +42,43 @@ async def get_diagram_metadata( ), logger: logging.LoggerAdapter = fastapi.Depends(log.get_request_logger), ): + job_id = None try: - ( - last_updated, - diagram_metadata_entries, - ) = await handler.get_file_from_repository_or_artifacts_as_json( - "diagram_cache/index.json", - "update_capella_diagram_cache", - "diagram-cache/" + handler.git_model.revision, + last_updated, diagram_metadata_entries = await handler.get_file( + trusted_file_path="diagram_cache/index.json", + revision=f"diagram-cache/{handler.revision}", ) - except requests.exceptions.HTTPError: - logger.info("Failed fetching diagram metadata", exc_info=True) - raise exceptions.DiagramCacheNotConfiguredProperlyError() + except Exception: + logger.info( + "Failed fetching diagram metadata file for %s on revision %s.", + handler.path, + f"diagram-cache/{handler.revision}", + exc_info=True, + ) + try: + job_id, last_updated, diagram_metadata_entries = ( + await handler.get_artifact( + trusted_file_path="diagram_cache/index.json", + job_name="update_capella_diagram_cache", + ) + ) + except (web.HTTPError, requests.HTTPError): + logger.info( + "Failed fetching diagram metadata artifact for %s on revision %s", + handler.path, + handler.revision, + exc_info=True, + ) + raise exceptions.DiagramCacheNotConfiguredProperlyError() + diagram_metadata_entries = json.loads(diagram_metadata_entries.decode()) return models.DiagramCacheMetadata( diagrams=[ models.DiagramMetadata.model_validate(diagram_metadata) for diagram_metadata in diagram_metadata_entries ], last_updated=last_updated, + job_id=job_id, ) @@ -69,6 +89,7 @@ async def get_diagram_metadata( ) async def get_diagram( diagram_uuid_or_filename: str, + job_id: str | None = None, handler: git_handler.GitHandler = fastapi.Depends( git_injectables.get_git_handler ), @@ -79,16 +100,37 @@ async def get_diagram( raise exceptions.FileExtensionNotSupportedError(fileextension) diagram_uuid = pathlib.PurePosixPath(diagram_uuid_or_filename).stem + file_path = f"diagram_cache/{parse.quote(diagram_uuid, safe='')}.svg" + + if not job_id: + try: + file = await handler.get_file( + trusted_file_path=file_path, + revision=f"diagram-cache/{handler.revision}", + ) + return responses.SVGResponse(content=file[1]) + except Exception: + logger.info( + "Failed fetching diagram file %s for %s on revision %s.", + diagram_uuid, + handler.path, + f"diagram-cache/{handler.revision}", + exc_info=True, + ) + try: - _, diagram = await handler.get_file_from_repository_or_artifacts( - f"diagram_cache/{parse.quote(diagram_uuid, safe='')}.svg", - "update_capella_diagram_cache", - "diagram-cache/" + handler.git_model.revision, + artifact = await handler.get_artifact( + trusted_file_path=file_path, + job_name="update_capella_diagram_cache", + job_id=job_id, + ) + return responses.SVGResponse(content=artifact[2]) + except (web.HTTPError, requests.HTTPError): + logger.info( + "Failed fetching diagram artifact %s for %s on revision %s.", + diagram_uuid, + handler.path, + f"diagram-cache/{handler.revision}", + exc_info=True, ) - except requests.exceptions.HTTPError: - logger.info("Failed fetching diagram", exc_info=True) raise exceptions.DiagramCacheNotConfiguredProperlyError() - - return responses.SVGResponse( - content=diagram, - ) diff --git a/backend/capellacollab/projects/toolmodels/modelbadge/routes.py b/backend/capellacollab/projects/toolmodels/modelbadge/routes.py index 5323a98769..cde79e2d69 100644 --- a/backend/capellacollab/projects/toolmodels/modelbadge/routes.py +++ b/backend/capellacollab/projects/toolmodels/modelbadge/routes.py @@ -5,9 +5,9 @@ import logging -import aiohttp.web import fastapi import requests +from aiohttp import web import capellacollab.projects.toolmodels.modelsources.git.injectables as git_injectables from capellacollab.core import logging as log @@ -41,13 +41,26 @@ async def get_model_complexity_badge( logger: logging.LoggerAdapter = fastapi.Depends(log.get_request_logger), ): try: - return responses.SVGResponse( - content=( - await git_handler.get_file_from_repository_or_artifacts( - "model-complexity-badge.svg", "generate-model-badge" - ) - )[1], + file = await git_handler.get_file("model-complexity-badge.svg") + return responses.SVGResponse(content=file[1]) + except Exception: + logger.debug( + "Failed fetching model badge file for %s on revision %s.", + git_handler.path, + git_handler.revision, + exc_info=True, + ) + + try: + artifact = await git_handler.get_artifact( + "model-complexity-badge.svg", "generate-model-badge" + ) + return responses.SVGResponse(content=artifact[2]) + except (web.HTTPError, requests.HTTPError): + logger.debug( + "Failed fetching model badge artifact for %s on revision %s.", + git_handler.path, + git_handler.revision, + exc_info=True, ) - except (aiohttp.web.HTTPException, requests.exceptions.HTTPError): - logger.info("Failed fetching model complexity badge", exc_info=True) raise exceptions.ModelBadgeNotConfiguredProperlyError() diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/crud.py b/backend/capellacollab/projects/toolmodels/modelsources/git/crud.py index 3696413876..8eae15bba9 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/crud.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/crud.py @@ -61,26 +61,38 @@ def make_git_model_primary( def update_git_model( db: orm.Session, git_model: models.DatabaseGitModel, - patch_model: models.PatchGitModel, + put_model: models.PutGitModel, ) -> models.DatabaseGitModel: - git_model.path = patch_model.path - git_model.entrypoint = patch_model.entrypoint - git_model.revision = patch_model.revision - - if patch_model.password: - git_model.username = patch_model.username - git_model.password = patch_model.password - elif not patch_model.username: + git_model.entrypoint = put_model.entrypoint + git_model.revision = put_model.revision + + if put_model.path != git_model.path: + git_model.path = put_model.path + git_model.repository_id = None + + if put_model.password: + git_model.username = put_model.username + git_model.password = put_model.password + elif not put_model.username: git_model.username = "" git_model.password = "" - if patch_model.primary and not git_model.primary: + if put_model.primary and not git_model.primary: git_model = make_git_model_primary(db, git_model) db.commit() return git_model +def update_git_model_repository_id( + db: orm.Session, git_model: models.DatabaseGitModel, repository_id: str +) -> models.DatabaseGitModel: + git_model.repository_id = repository_id + + db.commit() + return git_model + + def delete_git_model(db: orm.Session, git_model: models.DatabaseGitModel): db.delete(git_model) db.commit() diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/exceptions.py b/backend/capellacollab/projects/toolmodels/modelsources/git/exceptions.py index 8969cfd6fb..bb1e47fbf0 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/exceptions.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/exceptions.py @@ -71,19 +71,6 @@ def __init__(self, filename: str): ) -class GitInstanceAPIEndpointNotFoundError(core_exceptions.BaseError): - def __init__(self): - super().__init__( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - title="Git instance API endpoint not found", - reason=( - "The used Git instance has no API endpoint defined. " - "Please contact your administrator." - ), - err_code="GIT_INSTANCE_NO_API_ENDPOINT_DEFINED", - ) - - class GitPipelineJobNotFoundError(core_exceptions.BaseError): def __init__(self, job_name: str, revision: str): super().__init__( @@ -97,31 +84,18 @@ def __init__(self, job_name: str, revision: str): ) -class GitPipelineJobFailedError(core_exceptions.BaseError): - def __init__(self, job_name: str): - super().__init__( - status_code=status.HTTP_400_BAD_REQUEST, - title="Failed job found", - reason=f"The last job with the name '{job_name}' has failed.", - err_code="FAILED_JOB_FOUND", - ) - - -class GitPipelineJobUnknownStateError(core_exceptions.BaseError): - job_name: str - state: str - +class GitPipelineJobUnsuccessfulError(core_exceptions.BaseError): def __init__(self, job_name: str, state: str): self.job_name = job_name self.state = state super().__init__( status_code=status.HTTP_400_BAD_REQUEST, - title="Unknown job state", + title="Unsuccessful job", reason=( - f"Job '{job_name}' has an unhandled or unknown state: '{state}'. " + f"Job '{job_name}' has an unsuccessful state: {self.state}." "Please contact your administrator." ), - err_code="UNKNOWN_STATE_ERROR", + err_code="UNSUCCESSFUL_JOB_STATE_ERROR", ) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py b/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py index a02e4af67a..09408ddda4 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py @@ -4,7 +4,6 @@ import base64 import datetime import io -import json import typing as t import zipfile from urllib import parse @@ -18,63 +17,39 @@ class GithubHandler(handler.GitHandler): - async def get_project_id_by_git_url(self) -> str: + @classmethod + async def get_repository_id_by_git_url(cls, path: str, *_) -> str: # Project ID has the format '{owner}/{repo_name}' - return parse.urlparse(self.git_model.path).path[1:] - - async def get_last_job_run_id_for_git_model( - self, job_name: str, project_id: str | None = None - ) -> tuple[str, str]: - if not project_id: - project_id = await self.get_project_id_by_git_url() - jobs = self.get_last_pipeline_runs(project_id) - latest_job = self.__get_latest_successful_job(jobs, job_name) - return (latest_job["id"], latest_job["created_at"]) - - def get_artifact_from_job_as_json( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> dict: - return json.loads( - self.get_artifact_from_job( - project_id, - job_id, - trusted_path_to_artifact, + return parse.urlparse(path).path[1:] + + async def get_last_successful_job_run( + self, job_name: str + ) -> tuple[str, datetime.datetime]: + jobs = self.get_last_pipeline_runs() + if latest_job := self.__get_latest_successful_job(jobs, job_name): + created_at = datetime.datetime.fromisoformat( + latest_job["created_at"] ) - ) + return (latest_job["id"], created_at) - def get_artifact_from_job_as_content( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> bytes: - return self.get_artifact_from_job( - project_id, - job_id, - trusted_path_to_artifact, - ).encode() + raise git_exceptions.GitPipelineJobNotFoundError( + job_name=job_name, revision=self.revision + ) def __get_file_from_repository( self, - project_id: str, trusted_file_path: str, revision: str, headers: dict[str, str] | None = None, ) -> requests.Response: return requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/contents/{parse.quote(trusted_file_path)}?ref={parse.quote(revision, safe='')}", + f"{self.api_url}/repos/{self.repository_id}/contents/{parse.quote(trusted_file_path)}?ref={parse.quote(revision, safe='')}", timeout=config.requests.timeout, headers=headers, ) - async def get_file_from_repository( - self, - project_id: str, - trusted_file_path: str, - revision: str | None = None, + def get_file_from_repository( + self, trusted_file_path: str, revision: str | None = None ) -> bytes: """ If a repository is public but the permissions are not set correctly, you might be able to download the file without authentication @@ -83,15 +58,14 @@ async def get_file_from_repository( For that purpose first we try to reach it without authentication and only if that fails try to get the file authenticated. """ response = self.__get_file_from_repository( - project_id, trusted_file_path, revision or self.git_model.revision + trusted_file_path, revision or self.revision ) - if not response.ok and self.git_model.password: + if not response.ok and self.password: response = self.__get_file_from_repository( - project_id, trusted_file_path, - revision=revision or self.git_model.revision, - headers=self.__get_headers(self.git_model.password), + revision=revision or self.revision, + headers=self.__get_headers(), ) if response.status_code == 404: @@ -102,32 +76,23 @@ async def get_file_from_repository( return base64.b64decode(response.json()["content"]) - def get_last_pipeline_runs( - self, - project_id: str, - ) -> t.Any: - headers = None - if self.git_model.password: - headers = self.__get_headers(self.git_model.password) + def get_last_pipeline_runs(self) -> t.Any: response = requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/actions/runs?branch={parse.quote(self.git_model.revision, safe='')}&per_page=20", - headers=headers, + f"{self.api_url}/repos/{self.repository_id}/actions/runs?branch={parse.quote(self.revision, safe='')}&per_page=20", + headers=(self.__get_headers() if self.password else None), timeout=config.requests.timeout, ) response.raise_for_status() return response.json()["workflow_runs"] def get_artifact_from_job( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> str: - artifact = self.__get_latest_artifact_metadata(project_id, job_id) + self, job_id: str, trusted_path_to_artifact: str + ) -> bytes: + artifact = self.__get_latest_artifact_metadata(job_id) artifact_id = artifact["id"] artifact_response = requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/actions/artifacts/{artifact_id}/zip", - headers=self.__get_headers(self.git_model.password), + f"{self.api_url}/repos/{self.repository_id}/actions/artifacts/{artifact_id}/zip", + headers=self.__get_headers(), timeout=config.requests.timeout, ) artifact_response.raise_for_status() @@ -136,16 +101,12 @@ def get_artifact_from_job( artifact_response, trusted_path_to_artifact ) - def get_last_updated_for_file_path( - self, project_id: str, file_path: str, revision: str | None - ) -> datetime.datetime | None: + def get_last_updated_for_file( + self, file_path: str, revision: str | None = None + ) -> datetime.datetime: response = requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/commits?path={file_path}&sha={revision or self.git_model.revision}", - headers=( - self.__get_headers(self.git_model.password) - if self.git_model.password - else None - ), + f"{self.api_url}/repos/{self.repository_id}/commits?path={file_path}&sha={revision or self.revision}", + headers=(self.__get_headers() if self.password else None), timeout=config.requests.timeout, ) response.raise_for_status() @@ -153,41 +114,49 @@ def get_last_updated_for_file_path( raise git_exceptions.GitRepositoryFileNotFoundError( filename=file_path ) - return response.json()[0]["commit"]["author"]["date"] + return datetime.datetime.fromisoformat( + response.json()[0]["commit"]["author"]["date"] + ) + + def get_started_at_for_job(self, job_id: str) -> datetime.datetime: + response = requests.get( + f"{self.api_url}/repos/{self.repository_id}/actions/runs/{job_id}", + headers=self.__get_headers(), + timeout=config.requests.timeout, + ) + response.raise_for_status() + return datetime.datetime.fromisoformat(response.json()["created_at"]) def __get_file_content( self, response: requests.Response, trusted_file_path: str - ) -> str: + ) -> bytes: with zipfile.ZipFile(io.BytesIO(response.content)) as zip_file: file_list = zip_file.namelist() file_index = file_list.index(trusted_file_path.split("/")[-1]) with zip_file.open(file_list[file_index], "r") as file: - return file.read().decode() + return file.read() - def __get_latest_successful_job(self, jobs: list, job_name: str) -> dict: + def __get_latest_successful_job( + self, jobs: list, job_name: str + ) -> dict | None: matched_jobs = [job for job in jobs if job["name"] == job_name] if not matched_jobs: raise git_exceptions.GitPipelineJobNotFoundError( - job_name=job_name, revision=self.git_model.revision + job_name=job_name, revision=self.revision ) matched_jobs.sort(key=lambda job: job["created_at"], reverse=True) if matched_jobs[0]["conclusion"] == "success": return matched_jobs[0] - elif ( - matched_jobs[0]["conclusion"] == "failure" - or matched_jobs[0]["expired"] == "True" - ): - raise git_exceptions.GitPipelineJobFailedError(job_name) - raise git_exceptions.GitPipelineJobUnknownStateError( + raise git_exceptions.GitPipelineJobUnsuccessfulError( job_name, matched_jobs[0]["conclusion"] ) - def __get_latest_artifact_metadata(self, project_id: str, job_id: str): + def __get_latest_artifact_metadata(self, job_id: str): response = requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/actions/runs/{job_id}/artifacts", - headers=self.__get_headers(self.git_model.password), + f"{self.api_url}/repos/{self.repository_id}/actions/runs/{job_id}/artifacts", + headers=self.__get_headers(), timeout=config.requests.timeout, ) response.raise_for_status() @@ -196,9 +165,9 @@ def __get_latest_artifact_metadata(self, project_id: str, job_id: str): raise git_exceptions.GithubArtifactExpiredError() return artifact - def __get_headers(self, password: str) -> dict: + def __get_headers(self) -> dict: return { - "Authorization": f"token {password}", + "Authorization": f"token {self.password}", "X-GitHub-Api-Version": "2022-11-28", "Accept": "application/vnd.github+json", } diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py b/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py index ae3cfd2308..1defce3aa9 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py @@ -16,17 +16,18 @@ class GitlabHandler(handler.GitHandler): - async def get_project_id_by_git_url(self) -> str: + @classmethod + async def get_repository_id_by_git_url( + cls, path: str, password: str, api_url: str + ) -> str: project_name_encoded = parse.quote( - parse.urlparse(self.git_model.path) - .path.lstrip("/") - .removesuffix(".git"), + parse.urlparse(path).path.lstrip("/").removesuffix(".git"), safe="", ) async with aiohttp.ClientSession() as session: async with session.get( - f"{self.git_instance.api_url}/projects/{project_name_encoded}", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{api_url}/projects/{project_name_encoded}", + headers={"PRIVATE-TOKEN": password}, timeout=config.requests.timeout, ) as response: if response.status == 403: @@ -39,29 +40,25 @@ async def get_project_id_by_git_url(self) -> str: response.raise_for_status() return (await response.json())["id"] - async def get_last_job_run_id_for_git_model( - self, job_name: str, project_id: str | None = None - ) -> tuple[str, str]: - if not project_id: - project_id = await self.get_project_id_by_git_url() - for pipeline_id in await self.__get_last_pipeline_run_ids(project_id): + async def get_last_successful_job_run( + self, job_name: str + ) -> tuple[str, datetime.datetime]: + for pipeline_id in await self.__get_last_pipeline_run_ids(): if job := await self.__get_job_id_for_job_name( - project_id, - pipeline_id, - job_name, + pipeline_id, job_name ): return job raise git_exceptions.GitPipelineJobNotFoundError( - job_name=job_name, revision=self.git_model.revision + job_name=job_name, revision=self.revision ) - def get_last_updated_for_file_path( - self, project_id: str, file_path: str, revision: str | None - ) -> datetime.datetime | None: + def get_last_updated_for_file( + self, file_path: str, revision: str | None = None + ) -> datetime.datetime: response = requests.get( - f"{self.git_instance.api_url}/projects/{project_id}/repository/commits?ref_name={revision or self.git_model.revision}&path={file_path}", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/repository/commits?ref_name={revision or self.revision}&path={file_path}", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) response.raise_for_status() @@ -69,16 +66,24 @@ def get_last_updated_for_file_path( raise git_exceptions.GitRepositoryFileNotFoundError( filename=file_path ) - return response.json()[0]["authored_date"] + return datetime.datetime.fromisoformat( + response.json()[0]["authored_date"] + ) + + def get_started_at_for_job(self, job_id: str) -> datetime.datetime: + response = requests.get( + f"{self.api_url}/projects/{self.repository_id}/pipelines/{job_id}", + headers={"PRIVATE-TOKEN": self.password}, + timeout=config.requests.timeout, + ) + response.raise_for_status() + return datetime.datetime.fromisoformat(response.json()["started_at"]) - async def __get_last_pipeline_run_ids( - self, - project_id: str, - ) -> list[str]: + async def __get_last_pipeline_run_ids(self) -> list[str]: async with aiohttp.ClientSession() as session: async with session.get( - f"{self.git_instance.api_url}/projects/{project_id}/pipelines?ref={parse.quote(self.git_model.revision, safe='')}&per_page=20", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/pipelines?ref={parse.quote(self.revision, safe='')}&per_page=20", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) as response: response.raise_for_status() @@ -86,16 +91,13 @@ async def __get_last_pipeline_run_ids( return [pipeline["id"] for pipeline in await response.json()] async def __get_job_id_for_job_name( - self, - project_id: str, - pipeline_id: str, - job_name: str, - ) -> tuple[str, str] | None: + self, pipeline_id: str, job_name: str + ) -> tuple[str, datetime.datetime] | None: """Search for a job by name in a pipeline""" async with aiohttp.ClientSession() as session: async with session.get( - f"{self.git_instance.api_url}/projects/{project_id}/pipelines/{pipeline_id}/jobs", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/pipelines/{pipeline_id}/jobs", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) as response: response.raise_for_status() @@ -103,62 +105,36 @@ async def __get_job_id_for_job_name( for job in await response.json(): if job["name"] == job_name: if job["status"] == "success": - return job["id"], job["started_at"] + started_at = datetime.datetime.fromisoformat( + job["started_at"] + ) + return job["id"], started_at if job["status"] == "failed": - raise git_exceptions.GitPipelineJobFailedError( - job_name + raise git_exceptions.GitPipelineJobUnsuccessfulError( + job_name, "failed" ) return None - def get_artifact_from_job_as_json( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> dict: - return self.get_artifact_from_job( - project_id, - job_id, - trusted_path_to_artifact, - ).json() - - def get_artifact_from_job_as_content( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> bytes: - return self.get_artifact_from_job( - project_id, - job_id, - trusted_path_to_artifact, - ).content - def get_artifact_from_job( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> requests.Response: + self, job_id: str, trusted_path_to_artifact: str + ) -> bytes: response = requests.get( - f"{self.git_instance.api_url}/projects/{project_id}/jobs/{job_id}/artifacts/{trusted_path_to_artifact}", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/jobs/{job_id}/artifacts/{trusted_path_to_artifact}", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) response.raise_for_status() - return response - async def get_file_from_repository( - self, - project_id: str, - trusted_file_path: str, - revision: str | None = None, + return response.content + + def get_file_from_repository( + self, trusted_file_path: str, revision: str | None = None ) -> bytes: - branch = revision if revision else self.git_model.revision + branch = revision if revision else self.revision response = requests.get( - f"{self.git_instance.api_url}/projects/{project_id}/repository/files/{parse.quote(trusted_file_path, safe='')}?ref={parse.quote(branch, safe='')}", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/repository/files/{parse.quote(trusted_file_path, safe='')}?ref={parse.quote(branch, safe='')}", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/cache.py b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/cache.py new file mode 100644 index 0000000000..842ba697ce --- /dev/null +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/cache.py @@ -0,0 +1,88 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +import datetime + +from capellacollab.core import database + + +class GitRedisCache: + def __init__(self, path: str, revision: str) -> None: + self._redis = database.get_redis() + self.path = path + self.revision = revision + super().__init__() + + def get_file_data( + self, file_path: str, revision: str | None = None + ) -> tuple[datetime.datetime, bytes] | None: + revision = revision or self.revision + + file_data = self._redis.hmget( + name=self._get_file_key(file_path, revision), + keys=["last_updated", "content"], + ) + if (last_update := file_data[0]) and (content := file_data[1]): + last_update = datetime.datetime.fromisoformat(last_update) + return last_update, content + + return None + + def get_artifact_data( + self, job_id: str, file_path: str + ) -> tuple[datetime.datetime, bytes] | None: + artifact_data = self._redis.hmget( + name=self._get_artifact_key(job_id, file_path), + keys=["started_at", "content"], + ) + if (started_at := artifact_data[0]) and (content := artifact_data[1]): + started_at = datetime.datetime.fromisoformat(started_at) + return started_at, content + + return None + + def put_file_data( + self, + file_path: str, + last_updated: datetime.datetime, + content: bytes, + revision: str | None = None, + ttl: int = 3600, + ) -> None: + revision = revision or self.revision + + self._redis.hset( + name=self._get_file_key(file_path, revision), + mapping={ + "last_updated": last_updated.isoformat(), + "content": content, + }, + ) + self._redis.expire( + name=self._get_file_key(file_path, revision), time=ttl + ) + + def put_artifact_data( + self, + job_id: str, + file_path: str, + started_at: datetime.datetime, + content: bytes, + ttl: int = 3600, + ) -> None: + self._redis.hset( + name=self._get_artifact_key(job_id, file_path), + mapping={"started_at": started_at.isoformat(), "content": content}, + ) + self._redis.expire( + name=self._get_artifact_key(job_id, file_path), time=ttl + ) + + def clear(self) -> None: + self._redis.flushdb() + + def _get_file_key(self, file_path: str, revision: str) -> str: + return f"f:{self.path}:{revision}:{file_path}" + + def _get_artifact_key(self, job_id: str, file_path: str) -> str: + return f"a:{self.path}:{self.revision}:{job_id}:{file_path}" diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/exceptions.py b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/exceptions.py index 6f3a35b140..bdb6db5cf4 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/exceptions.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/exceptions.py @@ -27,3 +27,26 @@ def __init__(self): ), err_code="NO_MATCHING_GIT_INSTANCE", ) + + +class GitInstanceAPIEndpointNotFoundError(core_exceptions.BaseError): + def __init__(self): + super().__init__( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + title="Git instance API endpoint not found", + reason=( + "The used Git instance has no API endpoint defined. " + "Please contact your administrator." + ), + err_code="GIT_INSTANCE_NO_API_ENDPOINT_DEFINED", + ) + + +class GitRepositoryIdNotFoundError(core_exceptions.BaseError): + def __init__(self): + super().__init__( + status_code=status.HTTP_404_NOT_FOUND, + title="Git model repository id not found", + reason="The used Git model has no repository id. Please contact your administrator", + err_code="GIT_MODEL_REPOSITORY_ID_NOT_SET", + ) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/factory.py b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/factory.py index 7a161506ca..5d44010638 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/factory.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/factory.py @@ -4,6 +4,7 @@ from sqlalchemy import orm +import capellacollab.projects.toolmodels.modelsources.git.crud as git_crud import capellacollab.projects.toolmodels.modelsources.git.models as git_models import capellacollab.settings.modelsources.git.crud as settings_git_crud import capellacollab.settings.modelsources.git.models as settings_git_models @@ -15,21 +16,41 @@ class GitHandlerFactory: @staticmethod - def create_git_handler( + async def create_git_handler( db: orm.Session, git_model: git_models.DatabaseGitModel ) -> handler.GitHandler: + """ + Create a git handler for the given git model. + + Args: + db (orm.Session): Database session. + git_model (git_models.DatabaseGitModel): The git model instance. + + Returns: + handler.GitHandler: An instance of GitHandler. + + Raises: + GitInstanceAPIEndpointNotFoundError: If the git instance API endpoint is not found. + GitInstanceUnsupportedError: If the git instance type is unsupported. + """ git_instance = GitHandlerFactory.get_git_instance_for_git_model( db, git_model ) - match git_instance.type: - case settings_git_models.GitType.GITLAB: - return gitlab_handler.GitlabHandler(git_model, git_instance) - case settings_git_models.GitType.GITHUB: - return github_handler.GithubHandler(git_model, git_instance) - case _: - raise exceptions.GitInstanceUnsupportedError( - instance_name=str(git_instance.type) - ) + + if not git_instance.api_url: + raise exceptions.GitInstanceAPIEndpointNotFoundError() + + if not git_model.repository_id: + repository_id = await GitHandlerFactory._get_repository_id( + git_model, git_instance + ) + git_crud.update_git_model_repository_id( + db, git_model, repository_id + ) + + return GitHandlerFactory._create_specific_git_handler( + git_model, git_instance + ) @staticmethod def get_git_instance_for_git_model( @@ -48,3 +69,57 @@ def get_git_instance_for_git_model( if git_model.path.startswith(instance.url): return instance raise exceptions.NoMatchingGitInstanceError + + @staticmethod + async def _get_repository_id( + git_model: git_models.DatabaseGitModel, + git_instance: settings_git_models.DatabaseGitInstance, + ) -> str: + if not (api_url := git_instance.api_url): + raise exceptions.GitInstanceAPIEndpointNotFoundError() + + match git_instance.type: + case settings_git_models.GitType.GITLAB: + return await gitlab_handler.GitlabHandler.get_repository_id_by_git_url( + git_model.path, git_model.password, api_url + ) + case settings_git_models.GitType.GITHUB: + return await github_handler.GithubHandler.get_repository_id_by_git_url( + git_model.path, git_model.password + ) + case _: + raise exceptions.GitInstanceUnsupportedError( + instance_name=str(git_instance.type) + ) + + @staticmethod + def _create_specific_git_handler( + git_model: git_models.DatabaseGitModel, + git_instance: settings_git_models.DatabaseGitInstance, + ) -> handler.GitHandler: + if not (api_url := git_instance.api_url): + raise exceptions.GitInstanceAPIEndpointNotFoundError() + if not (repository_id := git_model.repository_id): + raise exceptions.GitRepositoryIdNotFoundError() + + match git_instance.type: + case settings_git_models.GitType.GITLAB: + return gitlab_handler.GitlabHandler( + git_model.path, + git_model.revision, + git_model.password, + api_url, + repository_id, + ) + case settings_git_models.GitType.GITHUB: + return github_handler.GithubHandler( + git_model.path, + git_model.revision, + git_model.password, + api_url, + repository_id, + ) + case _: + raise exceptions.GitInstanceUnsupportedError( + instance_name=str(git_instance.type) + ) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/handler.py b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/handler.py index e09a62b5a5..e4979cffb7 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/handler.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/handler.py @@ -5,118 +5,149 @@ import abc import datetime -import json -import typing as t -import requests - -import capellacollab.projects.toolmodels.modelsources.git.models as git_models -import capellacollab.settings.modelsources.git.models as settings_git_models - -from .. import exceptions - -if t.TYPE_CHECKING: - from capellambse import diagram_cache +from . import cache class GitHandler: def __init__( self, - git_model: git_models.DatabaseGitModel, - git_instance: settings_git_models.DatabaseGitInstance, + path: str, + revision: str, + password: str, + api_url: str, + repository_id: str, ) -> None: - self.git_model = git_model - self.git_instance = git_instance - self.check_git_instance_has_api_url() - - def check_git_instance_has_api_url(self): - if not self.git_instance.api_url: - raise exceptions.GitInstanceAPIEndpointNotFoundError() - + self.path = path + self.revision = revision + self.password = password + self.api_url = api_url + self.repository_id = repository_id + self.cache = cache.GitRedisCache(path, revision) + + @classmethod @abc.abstractmethod - async def get_project_id_by_git_url(self) -> str: + async def get_repository_id_by_git_url( + cls, path: str, password: str, api_url: str + ) -> str: pass @abc.abstractmethod - async def get_last_job_run_id_for_git_model( - self, job_name: str, project_id: str | None = None - ) -> tuple[str, str]: - pass + async def get_last_successful_job_run( + self, job_name: str + ) -> tuple[str, datetime.datetime]: + """ + Retrieve the ID and start time of the most recent run for a specified job. - @abc.abstractmethod - def get_artifact_from_job_as_json( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> dict: - pass + Args: + job_name (str): The name of the job whose last run information is to be retrieved. + + Returns: + tuple[str, datetime.datetime]: A tuple containing the job ID and the start time (as a datetime object) of the most recent run. + + Raises: + GitPipelineJobNotFoundError: If the job cannot be found in any of the recent pipeline runs. + GitPipelineJobUnsuccessfulError: If the last job state indicates that the job was not successful. + """ @abc.abstractmethod - def get_artifact_from_job_as_content( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, + def get_artifact_from_job( + self, job_id: str, trusted_path_to_artifact: str ) -> bytes: - pass + """ + Retrieve an artifact from a specified job. + + Args: + job_id (str): The unique identifier of the job from which to retrieve the artifact. + trusted_path_to_artifact (str): The path within the job's artifacts where the desired artifact is stored. + + Returns: + bytes: The content of the artifact as a byte stream. + """ @abc.abstractmethod - async def get_file_from_repository( - self, - project_id: str, - trusted_file_path: str, - revision: str | None = None, + def get_file_from_repository( + self, trusted_file_path: str, revision: str | None = None ) -> bytes: - pass + """ + Retrieve the contents of a specified file from the repository. + + Args: + trusted_file_path (str): The path to the file within the repository. + revision (str | None): The specific revision to use. If None, the handler revision is used. + + Returns: + bytes: The content of the file. + + Raises: + GitRepositoryFileNotFoundError: If the file does not exist in the specified revision. + """ @abc.abstractmethod - def get_last_updated_for_file_path( - self, project_id: str, file_path: str, revision: str | None - ) -> datetime.datetime | None: - pass + def get_last_updated_for_file( + self, file_path: str, revision: str | None = None + ) -> datetime.datetime: + """ + Retrieve the last update datetime for the specified file in the repository. - async def get_file_from_repository_or_artifacts_as_json( - self, - trusted_file_path: str, - job_name: str, - revision: str | None = None, - ) -> tuple[datetime.datetime, list[diagram_cache.IndexEntry]]: - ( - last_updated, - result, - ) = await self.get_file_from_repository_or_artifacts( - trusted_file_path, job_name, revision - ) - return (last_updated, json.loads(result.decode("utf-8"))) + Args: + file_path (str): The path to the file within the repository. + revision (str | None): The specific revision to use. If None, the handler revision is used. - async def get_file_from_repository_or_artifacts( - self, - trusted_file_path: str, - job_name: str, - revision: str | None = None, - ) -> tuple[t.Any, bytes]: - project_id = await self.get_project_id_by_git_url() - try: - return ( - self.get_last_updated_for_file_path( - project_id, - trusted_file_path, - revision=revision, - ), - await self.get_file_from_repository( - project_id, trusted_file_path, revision - ), - ) - except (requests.HTTPError, exceptions.GitRepositoryFileNotFoundError): - pass + Returns: + datetime.datetime: The datetime of the last update to the specified file. - job_id, last_updated = await self.get_last_job_run_id_for_git_model( - job_name, project_id + Raises: + GitRepositoryFileNotFoundError: If the file does not exist in the revision. + """ + + @abc.abstractmethod + def get_started_at_for_job(self, job_id: str) -> datetime.datetime: + """ + Retrieve the start datetime for the specified job in the repository. + + Args: + job_id (str): The unique identifier of the job from which to retrieve the artifact. + + Returns: + datetime.datetime: The datetime of the start time of the specified job. + """ + + async def get_file( + self, trusted_file_path: str, revision: str | None = None + ) -> tuple[datetime.datetime, bytes]: + last_updated = self.get_last_updated_for_file( + trusted_file_path, revision ) - return ( - last_updated, - self.get_artifact_from_job_as_content( - project_id, job_id, trusted_file_path - ), + + if file_data := self.cache.get_file_data(trusted_file_path, revision): + last_updated_cache, content_cache = file_data + + if last_updated == last_updated_cache: + return last_updated_cache, content_cache + + content = self.get_file_from_repository(trusted_file_path, revision) + self.cache.put_file_data( + trusted_file_path, last_updated, content, revision ) + + return last_updated, content + + async def get_artifact( + self, trusted_file_path: str, job_name: str, job_id: str | None = None + ) -> tuple[str, datetime.datetime, bytes]: + if not job_id: + job_id, started_at = await self.get_last_successful_job_run( + job_name + ) + else: + started_at = self.get_started_at_for_job(job_id) + + if artifact_data := self.cache.get_artifact_data( + job_id, trusted_file_path + ): + return job_id, artifact_data[0], artifact_data[1] + + content = self.get_artifact_from_job(job_id, trusted_file_path) + + return job_id, started_at, content diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/injectables.py b/backend/capellacollab/projects/toolmodels/modelsources/git/injectables.py index 97b3b233c9..685e0acf3b 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/injectables.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/injectables.py @@ -49,10 +49,10 @@ def get_existing_primary_git_model( raise exceptions.NoGitRepositoryAssignedToModelError(tool_model.slug) -def get_git_handler( +async def get_git_handler( git_model: git_models.DatabaseGitModel = fastapi.Depends( get_existing_primary_git_model ), db: orm.Session = fastapi.Depends(database.get_db), ) -> handler.GitHandler: - return factory.GitHandlerFactory.create_git_handler(db, git_model) + return await factory.GitHandlerFactory.create_git_handler(db, git_model) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/models.py b/backend/capellacollab/projects/toolmodels/modelsources/git/models.py index c5c84ff85e..ade6239691 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/models.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/models.py @@ -24,14 +24,14 @@ class PostGitModel(core_pydantic.BaseModel): password: str -class PatchGitModel(PostGitModel): +class PutGitModel(PostGitModel): primary: bool class GitModel(PostGitModel): id: int - name: str primary: bool + repository_id: str | None @pydantic.field_serializer("password") def transform_password(self, data: str) -> bool: @@ -44,7 +44,6 @@ class DatabaseGitModel(database.Base): id: orm.Mapped[int] = orm.mapped_column( init=False, primary_key=True, index=True, autoincrement=True ) - name: orm.Mapped[str] path: orm.Mapped[str] entrypoint: orm.Mapped[str] revision: orm.Mapped[str] @@ -60,12 +59,13 @@ class DatabaseGitModel(database.Base): username: orm.Mapped[str] password: orm.Mapped[str] + repository_id: orm.Mapped[str | None] = orm.mapped_column(default=None) + @classmethod def from_post_git_model( cls, model: "DatabaseToolModel", primary: bool, new_model: PostGitModel ): return cls( - name="", primary=primary, model=model, **new_model.model_dump(), diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/routes.py b/backend/capellacollab/projects/toolmodels/modelsources/git/routes.py index 26ef52b8a7..91bf22ec68 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/routes.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/routes.py @@ -136,14 +136,14 @@ def create_git_model( ], ) def update_git_model_by_id( - patch_git_model: models.PatchGitModel, + put_git_model: models.PutGitModel, db_git_model: models.DatabaseGitModel = fastapi.Depends( injectables.get_existing_git_model ), db: orm.Session = fastapi.Depends(database.get_db), ) -> models.DatabaseGitModel: - git_util.verify_path_prefix(db, patch_git_model.path) - return crud.update_git_model(db, db_git_model, patch_git_model) + git_util.verify_path_prefix(db, put_git_model.path) + return crud.update_git_model(db, db_git_model, put_git_model) @router.delete( diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/validation.py b/backend/capellacollab/projects/toolmodels/modelsources/git/validation.py index 9e599cfece..746fdfd9fd 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/validation.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/validation.py @@ -54,10 +54,10 @@ async def check_pipeline_health( return models.ModelArtifactStatus.UNCONFIGURED try: - git_handler = factory.GitHandlerFactory.create_git_handler( + git_handler = await factory.GitHandlerFactory.create_git_handler( db, primary_git_model ) - await git_handler.get_last_job_run_id_for_git_model(job_name) + await git_handler.get_last_successful_job_run(job_name) except exceptions.GitPipelineJobNotFoundError: return models.ModelArtifactStatus.UNCONFIGURED except handler_exceptions.GitInstanceUnsupportedError: diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 730a34f81f..dd15d05c2c 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -26,7 +26,7 @@ dependencies = [ "alembic==1.13.2", "appdirs", "cachetools", - "fastapi>=0.101.0,<0.112.4", + "fastapi[all]>=0.101.0,<0.112.4", "kubernetes", "psycopg2-binary>2.9.7", "pydantic>=2.0.0", @@ -43,6 +43,8 @@ dependencies = [ "argon2-cffi", "typer", "lxml", + "redis", + "redis[hiredis]", ] [project.urls] @@ -65,6 +67,7 @@ dev = [ "pytest-cov", "aioresponses", "types-lxml", + "types-redis", ] [tool.black] diff --git a/backend/tests/projects/toolmodels/conftest.py b/backend/tests/projects/toolmodels/conftest.py index 9fec519b1a..ec4af74a16 100644 --- a/backend/tests/projects/toolmodels/conftest.py +++ b/backend/tests/projects/toolmodels/conftest.py @@ -1,8 +1,6 @@ # SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors # SPDX-License-Identifier: Apache-2.0 - -import json import typing as t import pytest @@ -43,7 +41,7 @@ def fixture_git_instance_api_url( def fixture_git_instance( db: orm.Session, git_type: git_models.GitType, git_instance_api_url: str ) -> git_models.DatabaseGitInstance: - git_instance = git_models.DatabaseGitInstance( + git_instance = git_models.PostGitInstance( name="test", url="https://example.com/test/project", api_url=git_instance_api_url, @@ -147,28 +145,6 @@ def fixture_git_query_params(request: pytest.FixtureRequest) -> t.List[dict]: return request.param -def github_commit_api_callback(request): - response_body = [ - { - "sha": "43bf21488c5cc309af0ec635a8698b8509379527", - "commit": { - "author": { - "name": "test-name", - "email": "test-email", - "date": "2050-06-26T13:46:21Z", - }, - "committer": { - "name": "test-name", - "email": "test-email", - "date": "2050-07-03T09:50:57Z", - }, - "message": "test: Test commit message", - }, - } - ] - return (200, {}, json.dumps(response_body)) - - @pytest.fixture(name="mock_git_get_commit_information_api") def fixture_mock_git_get_commit_information_api( request: pytest.FixtureRequest, diff --git a/backend/tests/projects/toolmodels/fixtures.py b/backend/tests/projects/toolmodels/fixtures.py index daacd2e7ad..ebfeeba842 100644 --- a/backend/tests/projects/toolmodels/fixtures.py +++ b/backend/tests/projects/toolmodels/fixtures.py @@ -1,8 +1,6 @@ # SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors # SPDX-License-Identifier: Apache-2.0 -import uuid - import pytest from sqlalchemy import orm @@ -38,10 +36,7 @@ def fixture_jupyter_model( jupyter_tool: tools_models.DatabaseTool, ) -> toolmodels_models.DatabaseToolModel: jupyter_model = toolmodels_models.PostToolModel( - name="Jupyter test", - description="", - tool_id=jupyter_tool.id, - configuration={"workspace": str(uuid.uuid4())}, + name="Jupyter test", description="", tool_id=jupyter_tool.id ) return toolmodels_crud.create_model( db, diff --git a/backend/tests/projects/toolmodels/test_diagrams.py b/backend/tests/projects/toolmodels/test_diagrams.py index cec25a87ee..2a89061726 100644 --- a/backend/tests/projects/toolmodels/test_diagrams.py +++ b/backend/tests/projects/toolmodels/test_diagrams.py @@ -178,7 +178,7 @@ def test_get_diagram_metadata_from_repository( f"/api/v1/projects/{project.slug}/models/{capella_model.slug}/diagrams", ) assert response.status_code == 200 - assert len(response.json()) == 2 + assert len(response.json()) == 3 @responses.activate @@ -225,7 +225,7 @@ def test_get_diagram_metadata_from_artifacts( f"/api/v1/projects/{project.slug}/models/{capella_model.slug}/diagrams", ) assert response.status_code == 200 - assert len(response.json()) == 2 + assert len(response.json()) == 3 @responses.activate @@ -370,9 +370,12 @@ def test_get_diagrams_failed_diagram_cache_job_found( response = client.get( f"/api/v1/projects/{project.slug}/models/{capella_model.slug}/diagrams", ) - + reason = response.json()["detail"]["reason"] assert response.status_code == 400 - assert response.json()["detail"]["err_code"] == "FAILED_JOB_FOUND" + assert ( + response.json()["detail"]["err_code"] == "UNSUCCESSFUL_JOB_STATE_ERROR" + ) + assert "failure" in reason or "failed" in reason @responses.activate diff --git a/backend/tests/projects/toolmodels/test_git_model.py b/backend/tests/projects/toolmodels/test_git_model.py new file mode 100644 index 0000000000..b2e254a7c0 --- /dev/null +++ b/backend/tests/projects/toolmodels/test_git_model.py @@ -0,0 +1,26 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +from sqlalchemy import orm + +import capellacollab.projects.toolmodels.modelsources.git.crud as project_git_crud +import capellacollab.projects.toolmodels.modelsources.git.models as project_git_models + + +def test_reset_repository_id_on_git_model_path_change( + db: orm.Session, + git_model: project_git_models.DatabaseGitModel, +): + assert git_model.repository_id is None + + project_git_crud.update_git_model_repository_id(db, git_model, "1") + + assert git_model.repository_id == "1" + + put_git_model = project_git_models.PutGitModel.model_validate(git_model) + put_git_model.path = "random-new-path" + + project_git_crud.update_git_model(db, git_model, put_git_model) + + assert git_model.path == "random-new-path" + assert git_model.repository_id is None diff --git a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.ts b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.ts index c1cbe65c76..cadb785581 100644 --- a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.ts +++ b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.ts @@ -138,8 +138,14 @@ export class ModelDiagramDialogComponent implements OnInit { lazyLoadDiagram(uuid: string) { if (!this.diagrams[uuid]) { this.diagrams[uuid] = { loading: true, content: undefined }; + this.projectsModelsDiagramsService - .getDiagram(uuid, this.data.project.slug, this.data.model.slug) + .getDiagram( + uuid, + this.data.project.slug, + this.data.model.slug, + this.diagramMetadata?.job_id || undefined, + ) .subscribe({ next: (response: Blob) => { const reader = new FileReader(); diff --git a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.stories.ts b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.stories.ts index 7191346fb3..ea08008d28 100644 --- a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.stories.ts +++ b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.stories.ts @@ -36,11 +36,13 @@ type Story = StoryObj; const emptyDiagramCacheMetadata: DiagramCacheMetadata = { diagrams: [], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; const loadedDiagramCacheMetadata: DiagramCacheMetadata = { diagrams: [{ name: 'fakeDiagram1', uuid: 'fakeUUID-Loaded', success: true }], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; const notLoadedDiagramCacheMetadata: DiagramCacheMetadata = { @@ -48,11 +50,13 @@ const notLoadedDiagramCacheMetadata: DiagramCacheMetadata = { { name: 'fakeDiagram2', uuid: 'fakeUUID-Not-Loaded', success: true }, ], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; const errorDiagramCacheMetadata: DiagramCacheMetadata = { diagrams: [{ name: 'fakeDiagram3', uuid: 'fakeUUID-Loaded', success: false }], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; const combinedDiagramCacheMetadata: DiagramCacheMetadata = { @@ -62,6 +66,7 @@ const combinedDiagramCacheMetadata: DiagramCacheMetadata = { { name: 'fakeDiagram3', uuid: 'fakeUUID-Loaded', success: false }, ], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; // prettier-ignore diff --git a/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/model-complexity-badge.component.ts b/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/model-complexity-badge.component.ts index 8ddd5bd64e..f65aa9149e 100644 --- a/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/model-complexity-badge.component.ts +++ b/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/model-complexity-badge.component.ts @@ -9,7 +9,7 @@ import { MatSlideToggleModule } from '@angular/material/slide-toggle'; import { UntilDestroy, untilDestroyed } from '@ngneat/until-destroy'; import { NgxSkeletonLoaderModule } from 'ngx-skeleton-loader'; import { filter, map, switchMap } from 'rxjs'; -import { ModelComplexityBadgeService } from 'src/app/projects/project-detail/model-overview/model-complexity-badge/service/model-complexity-badge.service'; +import { ProjectsModelsModelComplexityBadgeService } from 'src/app/openapi'; import { ProjectWrapperService } from 'src/app/projects/service/project.service'; import { environment } from 'src/environments/environment'; @@ -36,7 +36,7 @@ export class ModelComplexityBadgeComponent implements OnChanges { errorCode?: string; constructor( - private modelComplexityBadgeService: ModelComplexityBadgeService, + private modelComplexityBadgeService: ProjectsModelsModelComplexityBadgeService, private projectService: ProjectWrapperService, ) {}