diff --git a/.gitignore b/.gitignore index 7b0c55fb2..f7e7c95fa 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,4 @@ certs/* /helm/charts/* /logs/* .env +autoscaler diff --git a/Makefile b/Makefile index 1f3c17715..def8736bd 100644 --- a/Makefile +++ b/Makefile @@ -152,6 +152,12 @@ create-cluster: registry kubectl cluster-info kubectl config set-context --current --namespace=$(NAMESPACE) +install-vpa: + git clone https://github.com/kubernetes/autoscaler.git + cd autoscaler/vertical-pod-autoscaler + ./hack/vpa-up.sh + kubectl --namespace=kube-system get pods | grep vpa + delete-cluster: k3d cluster list $(CLUSTER_NAME) 2>&- && k3d cluster delete $(CLUSTER_NAME) diff --git a/backend/Makefile b/backend/Makefile index d7be1332d..66288b602 100644 --- a/backend/Makefile +++ b/backend/Makefile @@ -5,10 +5,15 @@ DB_PORT = 5432 DB_PASSWORD = dev DB_USER = dev DB_NAME = dev + +VALKEY_PASSWORD ?= password + VENV = .venv HOST ?= 127.0.0.1 +VALKEY_PORT = 6379 + DATABASE_LOAD_FILE ?= ../local/load.sql DATABASE_SAVE_DIR ?= ../local @@ -31,6 +36,18 @@ database: -e POSTGRES_DB=$(DB_NAME) \ postgres +valkey: + TMP_FILE=$$(mktemp) + echo "requirepass $(VALKEY_PASSWORD)" > $$TMP_FILE + docker start capella-collab-valkey || \ + docker run -d \ + --name capella-collab-valkey \ + -p $(VALKEY_PORT):6379 \ + -v $$TMP_FILE:/usr/local/etc/valkey/valkey.conf \ + valkey/valkey:latest \ + valkey-server \ + /usr/local/etc/valkey/valkey.conf + app: if [ -d "$(VENV)/bin" ]; then source $(VENV)/bin/activate; @@ -57,7 +74,7 @@ install: openapi: CAPELLACOLLAB_SKIP_OPENAPI_ERROR_RESPONSES=1 $(VENV)/bin/python -m capellacollab.cli openapi generate /tmp/openapi.json -dev: database app +dev: database valkey app cleanup: docker stop capella-collab-postgres diff --git a/backend/capellacollab/alembic/versions/abddaf015966_add_repository_id_to_git_model_and_remove_unused_name.py b/backend/capellacollab/alembic/versions/abddaf015966_add_repository_id_to_git_model_and_remove_unused_name.py new file mode 100644 index 000000000..98310b9b3 --- /dev/null +++ b/backend/capellacollab/alembic/versions/abddaf015966_add_repository_id_to_git_model_and_remove_unused_name.py @@ -0,0 +1,25 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +"""Add repository id to git model and remove unused git model name + +Revision ID: abddaf015966 +Revises: 028c72ddfd20 +Create Date: 2024-08-12 11:43:34.158404 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "abddaf015966" +down_revision = "028c72ddfd20" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + "git_models", sa.Column("repository_id", sa.String(), nullable=True) + ) + op.drop_column("git_models", "name") diff --git a/backend/capellacollab/config/models.py b/backend/capellacollab/config/models.py index b550e4fcd..7932df03e 100644 --- a/backend/capellacollab/config/models.py +++ b/backend/capellacollab/config/models.py @@ -288,6 +288,12 @@ class DatabaseConfig(BaseConfig): ) +class ValkeyConfig(BaseConfig): + url: str = pydantic.Field( + default="valkey://default:password@localhost:6379/0" + ) + + class InitialConfig(BaseConfig): admin: str = pydantic.Field( default="admin", @@ -367,6 +373,7 @@ class AppConfig(BaseConfig): authentication: AuthenticationConfig = AuthenticationConfig() prometheus: PrometheusConfig = PrometheusConfig() database: DatabaseConfig = DatabaseConfig() + valkey: ValkeyConfig = ValkeyConfig() initial: InitialConfig = InitialConfig() logging: LoggingConfig = LoggingConfig() requests: RequestsConfig = RequestsConfig() diff --git a/backend/capellacollab/core/database/__init__.py b/backend/capellacollab/core/database/__init__.py index 5e603bb83..0e7bc67fe 100644 --- a/backend/capellacollab/core/database/__init__.py +++ b/backend/capellacollab/core/database/__init__.py @@ -1,10 +1,12 @@ # SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors # SPDX-License-Identifier: Apache-2.0 +import functools import typing as t import pydantic import sqlalchemy as sa +import valkey from sqlalchemy import orm from sqlalchemy.dialects import postgresql @@ -35,6 +37,11 @@ def get_db() -> t.Iterator[orm.Session]: yield session +@functools.lru_cache +def get_valkey() -> valkey.Valkey: + return valkey.Valkey.from_url(config.valkey.url, decode_responses=True) + + def patch_database_with_pydantic_object( database_object: Base, pydantic_object: pydantic.BaseModel ): diff --git a/backend/capellacollab/projects/toolmodels/diagrams/models.py b/backend/capellacollab/projects/toolmodels/diagrams/models.py index 428775d92..d28a7172a 100644 --- a/backend/capellacollab/projects/toolmodels/diagrams/models.py +++ b/backend/capellacollab/projects/toolmodels/diagrams/models.py @@ -18,6 +18,7 @@ class DiagramMetadata(core_pydantic.BaseModel): class DiagramCacheMetadata(core_pydantic.BaseModel): diagrams: list[DiagramMetadata] last_updated: datetime.datetime + job_id: str | None = None _validate_last_updated = pydantic.field_serializer("last_updated")( core_pydantic.datetime_serializer diff --git a/backend/capellacollab/projects/toolmodels/diagrams/routes.py b/backend/capellacollab/projects/toolmodels/diagrams/routes.py index 46beaf9a1..2269e2f5d 100644 --- a/backend/capellacollab/projects/toolmodels/diagrams/routes.py +++ b/backend/capellacollab/projects/toolmodels/diagrams/routes.py @@ -3,6 +3,7 @@ from __future__ import annotations +import json import logging import pathlib from urllib import parse @@ -41,24 +42,30 @@ async def get_diagram_metadata( logger: logging.LoggerAdapter = fastapi.Depends(log.get_request_logger), ): try: - ( - last_updated, - diagram_metadata_entries, - ) = await handler.get_file_from_repository_or_artifacts_as_json( - "diagram_cache/index.json", - "update_capella_diagram_cache", - "diagram-cache/" + handler.git_model.revision, + job_id, last_updated, diagram_metadata_entries = ( + await handler.get_file_or_artifact( + trusted_file_path="diagram_cache/index.json", + logger=logger, + job_name="update_capella_diagram_cache", + file_revision=f"diagram-cache/{handler.revision}", + ) + ) + except requests.HTTPError: + logger.info( + "Failed fetching diagram metadata file or artifact for %s", + handler.path, + exc_info=True, ) - except requests.exceptions.HTTPError: - logger.info("Failed fetching diagram metadata", exc_info=True) raise exceptions.DiagramCacheNotConfiguredProperlyError() + diagram_metadata_entries = json.loads(diagram_metadata_entries) return models.DiagramCacheMetadata( diagrams=[ models.DiagramMetadata.model_validate(diagram_metadata) for diagram_metadata in diagram_metadata_entries ], last_updated=last_updated, + job_id=job_id, ) @@ -69,6 +76,7 @@ async def get_diagram_metadata( ) async def get_diagram( diagram_uuid_or_filename: str, + job_id: str | None = None, handler: git_handler.GitHandler = fastapi.Depends( git_injectables.get_git_handler ), @@ -79,16 +87,23 @@ async def get_diagram( raise exceptions.FileExtensionNotSupportedError(fileextension) diagram_uuid = pathlib.PurePosixPath(diagram_uuid_or_filename).stem + file_path = f"diagram_cache/{parse.quote(diagram_uuid, safe='')}.svg" + try: - _, diagram = await handler.get_file_from_repository_or_artifacts( - f"diagram_cache/{parse.quote(diagram_uuid, safe='')}.svg", - "update_capella_diagram_cache", - "diagram-cache/" + handler.git_model.revision, + file_or_artifact = await handler.get_file_or_artifact( + trusted_file_path=file_path, + logger=logger, + job_name="update_capella_diagram_cache", + job_id=job_id, + file_revision=f"diagram-cache/{handler.revision}", + ) + return responses.SVGResponse(content=file_or_artifact[2]) + except requests.HTTPError: + logger.info( + "Failed fetching diagram file or artifact %s for %s.", + diagram_uuid, + handler.path, + f"diagram-cache/{handler.revision}", + exc_info=True, ) - except requests.exceptions.HTTPError: - logger.info("Failed fetching diagram", exc_info=True) raise exceptions.DiagramCacheNotConfiguredProperlyError() - - return responses.SVGResponse( - content=diagram, - ) diff --git a/backend/capellacollab/projects/toolmodels/modelbadge/routes.py b/backend/capellacollab/projects/toolmodels/modelbadge/routes.py index 5323a9876..479d1f5c0 100644 --- a/backend/capellacollab/projects/toolmodels/modelbadge/routes.py +++ b/backend/capellacollab/projects/toolmodels/modelbadge/routes.py @@ -5,9 +5,7 @@ import logging -import aiohttp.web import fastapi -import requests import capellacollab.projects.toolmodels.modelsources.git.injectables as git_injectables from capellacollab.core import logging as log @@ -41,13 +39,17 @@ async def get_model_complexity_badge( logger: logging.LoggerAdapter = fastapi.Depends(log.get_request_logger), ): try: - return responses.SVGResponse( - content=( - await git_handler.get_file_from_repository_or_artifacts( - "model-complexity-badge.svg", "generate-model-badge" - ) - )[1], + file_or_artifact = await git_handler.get_file_or_artifact( + trusted_file_path="model-complexity-badge.svg", + job_name="generate-model-badge", + logger=logger, + ) + return responses.SVGResponse(content=file_or_artifact[2]) + except Exception: + logger.debug( + "Failed fetching model badge file or artifact for %s on revision %s.", + git_handler.path, + git_handler.revision, + exc_info=True, ) - except (aiohttp.web.HTTPException, requests.exceptions.HTTPError): - logger.info("Failed fetching model complexity badge", exc_info=True) raise exceptions.ModelBadgeNotConfiguredProperlyError() diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/crud.py b/backend/capellacollab/projects/toolmodels/modelsources/git/crud.py index 369641387..d57b2567a 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/crud.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/crud.py @@ -61,26 +61,36 @@ def make_git_model_primary( def update_git_model( db: orm.Session, git_model: models.DatabaseGitModel, - patch_model: models.PatchGitModel, + put_model: models.PutGitModel, ) -> models.DatabaseGitModel: - git_model.path = patch_model.path - git_model.entrypoint = patch_model.entrypoint - git_model.revision = patch_model.revision - - if patch_model.password: - git_model.username = patch_model.username - git_model.password = patch_model.password - elif not patch_model.username: + git_model.path = put_model.path + git_model.entrypoint = put_model.entrypoint + git_model.revision = put_model.revision + git_model.repository_id = None + + if put_model.password: + git_model.username = put_model.username + git_model.password = put_model.password + elif not put_model.username: git_model.username = "" git_model.password = "" - if patch_model.primary and not git_model.primary: + if put_model.primary and not git_model.primary: git_model = make_git_model_primary(db, git_model) db.commit() return git_model +def update_git_model_repository_id( + db: orm.Session, git_model: models.DatabaseGitModel, repository_id: str +) -> models.DatabaseGitModel: + git_model.repository_id = repository_id + + db.commit() + return git_model + + def delete_git_model(db: orm.Session, git_model: models.DatabaseGitModel): db.delete(git_model) db.commit() diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/exceptions.py b/backend/capellacollab/projects/toolmodels/modelsources/git/exceptions.py index 8969cfd6f..bb1e47fbf 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/exceptions.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/exceptions.py @@ -71,19 +71,6 @@ def __init__(self, filename: str): ) -class GitInstanceAPIEndpointNotFoundError(core_exceptions.BaseError): - def __init__(self): - super().__init__( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - title="Git instance API endpoint not found", - reason=( - "The used Git instance has no API endpoint defined. " - "Please contact your administrator." - ), - err_code="GIT_INSTANCE_NO_API_ENDPOINT_DEFINED", - ) - - class GitPipelineJobNotFoundError(core_exceptions.BaseError): def __init__(self, job_name: str, revision: str): super().__init__( @@ -97,31 +84,18 @@ def __init__(self, job_name: str, revision: str): ) -class GitPipelineJobFailedError(core_exceptions.BaseError): - def __init__(self, job_name: str): - super().__init__( - status_code=status.HTTP_400_BAD_REQUEST, - title="Failed job found", - reason=f"The last job with the name '{job_name}' has failed.", - err_code="FAILED_JOB_FOUND", - ) - - -class GitPipelineJobUnknownStateError(core_exceptions.BaseError): - job_name: str - state: str - +class GitPipelineJobUnsuccessfulError(core_exceptions.BaseError): def __init__(self, job_name: str, state: str): self.job_name = job_name self.state = state super().__init__( status_code=status.HTTP_400_BAD_REQUEST, - title="Unknown job state", + title="Unsuccessful job", reason=( - f"Job '{job_name}' has an unhandled or unknown state: '{state}'. " + f"Job '{job_name}' has an unsuccessful state: {self.state}." "Please contact your administrator." ), - err_code="UNKNOWN_STATE_ERROR", + err_code="UNSUCCESSFUL_JOB_STATE_ERROR", ) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py b/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py index a02e4af67..c9e68fd87 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py @@ -4,7 +4,6 @@ import base64 import datetime import io -import json import typing as t import zipfile from urllib import parse @@ -18,63 +17,39 @@ class GithubHandler(handler.GitHandler): - async def get_project_id_by_git_url(self) -> str: + @classmethod + async def get_repository_id_by_git_url(cls, path: str, *_) -> str: # Project ID has the format '{owner}/{repo_name}' - return parse.urlparse(self.git_model.path).path[1:] - - async def get_last_job_run_id_for_git_model( - self, job_name: str, project_id: str | None = None - ) -> tuple[str, str]: - if not project_id: - project_id = await self.get_project_id_by_git_url() - jobs = self.get_last_pipeline_runs(project_id) - latest_job = self.__get_latest_successful_job(jobs, job_name) - return (latest_job["id"], latest_job["created_at"]) - - def get_artifact_from_job_as_json( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> dict: - return json.loads( - self.get_artifact_from_job( - project_id, - job_id, - trusted_path_to_artifact, + return parse.urlparse(path).path[1:] + + async def get_last_successful_job_run( + self, job_name: str + ) -> tuple[str, datetime.datetime]: + jobs = self.get_last_pipeline_runs() + if latest_job := self.__get_latest_successful_job(jobs, job_name): + created_at = datetime.datetime.fromisoformat( + latest_job["created_at"] ) - ) + return (str(latest_job["id"]), created_at) - def get_artifact_from_job_as_content( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> bytes: - return self.get_artifact_from_job( - project_id, - job_id, - trusted_path_to_artifact, - ).encode() + raise git_exceptions.GitPipelineJobNotFoundError( + job_name=job_name, revision=self.revision + ) def __get_file_from_repository( self, - project_id: str, trusted_file_path: str, revision: str, headers: dict[str, str] | None = None, ) -> requests.Response: return requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/contents/{parse.quote(trusted_file_path)}?ref={parse.quote(revision, safe='')}", + f"{self.api_url}/repos/{self.repository_id}/contents/{parse.quote(trusted_file_path)}?ref={parse.quote(revision, safe='')}", timeout=config.requests.timeout, headers=headers, ) - async def get_file_from_repository( - self, - project_id: str, - trusted_file_path: str, - revision: str | None = None, + def get_file_from_repository( + self, trusted_file_path: str, revision: str | None = None ) -> bytes: """ If a repository is public but the permissions are not set correctly, you might be able to download the file without authentication @@ -83,15 +58,14 @@ async def get_file_from_repository( For that purpose first we try to reach it without authentication and only if that fails try to get the file authenticated. """ response = self.__get_file_from_repository( - project_id, trusted_file_path, revision or self.git_model.revision + trusted_file_path, revision or self.revision ) - if not response.ok and self.git_model.password: + if not response.ok and self.password: response = self.__get_file_from_repository( - project_id, trusted_file_path, - revision=revision or self.git_model.revision, - headers=self.__get_headers(self.git_model.password), + revision=revision or self.revision, + headers=self.__get_headers(), ) if response.status_code == 404: @@ -102,32 +76,23 @@ async def get_file_from_repository( return base64.b64decode(response.json()["content"]) - def get_last_pipeline_runs( - self, - project_id: str, - ) -> t.Any: - headers = None - if self.git_model.password: - headers = self.__get_headers(self.git_model.password) + def get_last_pipeline_runs(self) -> t.Any: response = requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/actions/runs?branch={parse.quote(self.git_model.revision, safe='')}&per_page=20", - headers=headers, + f"{self.api_url}/repos/{self.repository_id}/actions/runs?branch={parse.quote(self.revision, safe='')}&per_page=20", + headers=(self.__get_headers() if self.password else None), timeout=config.requests.timeout, ) response.raise_for_status() return response.json()["workflow_runs"] def get_artifact_from_job( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> str: - artifact = self.__get_latest_artifact_metadata(project_id, job_id) + self, job_id: str, trusted_path_to_artifact: str + ) -> bytes: + artifact = self.__get_latest_artifact_metadata(job_id) artifact_id = artifact["id"] artifact_response = requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/actions/artifacts/{artifact_id}/zip", - headers=self.__get_headers(self.git_model.password), + f"{self.api_url}/repos/{self.repository_id}/actions/artifacts/{artifact_id}/zip", + headers=self.__get_headers(), timeout=config.requests.timeout, ) artifact_response.raise_for_status() @@ -136,16 +101,12 @@ def get_artifact_from_job( artifact_response, trusted_path_to_artifact ) - def get_last_updated_for_file_path( - self, project_id: str, file_path: str, revision: str | None - ) -> datetime.datetime | None: + def get_last_updated_for_file( + self, file_path: str, revision: str | None = None + ) -> datetime.datetime: response = requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/commits?path={file_path}&sha={revision or self.git_model.revision}", - headers=( - self.__get_headers(self.git_model.password) - if self.git_model.password - else None - ), + f"{self.api_url}/repos/{self.repository_id}/commits?path={file_path}&sha={revision or self.revision}", + headers=(self.__get_headers() if self.password else None), timeout=config.requests.timeout, ) response.raise_for_status() @@ -153,41 +114,49 @@ def get_last_updated_for_file_path( raise git_exceptions.GitRepositoryFileNotFoundError( filename=file_path ) - return response.json()[0]["commit"]["author"]["date"] + return datetime.datetime.fromisoformat( + response.json()[0]["commit"]["author"]["date"] + ) + + def get_started_at_for_job(self, job_id: str) -> datetime.datetime: + response = requests.get( + f"{self.api_url}/repos/{self.repository_id}/actions/runs/{job_id}", + headers=self.__get_headers(), + timeout=config.requests.timeout, + ) + response.raise_for_status() + return datetime.datetime.fromisoformat(response.json()["created_at"]) def __get_file_content( self, response: requests.Response, trusted_file_path: str - ) -> str: + ) -> bytes: with zipfile.ZipFile(io.BytesIO(response.content)) as zip_file: file_list = zip_file.namelist() file_index = file_list.index(trusted_file_path.split("/")[-1]) with zip_file.open(file_list[file_index], "r") as file: - return file.read().decode() + return file.read() - def __get_latest_successful_job(self, jobs: list, job_name: str) -> dict: + def __get_latest_successful_job( + self, jobs: list, job_name: str + ) -> dict | None: matched_jobs = [job for job in jobs if job["name"] == job_name] if not matched_jobs: raise git_exceptions.GitPipelineJobNotFoundError( - job_name=job_name, revision=self.git_model.revision + job_name=job_name, revision=self.revision ) matched_jobs.sort(key=lambda job: job["created_at"], reverse=True) if matched_jobs[0]["conclusion"] == "success": return matched_jobs[0] - elif ( - matched_jobs[0]["conclusion"] == "failure" - or matched_jobs[0]["expired"] == "True" - ): - raise git_exceptions.GitPipelineJobFailedError(job_name) - raise git_exceptions.GitPipelineJobUnknownStateError( + raise git_exceptions.GitPipelineJobUnsuccessfulError( job_name, matched_jobs[0]["conclusion"] ) - def __get_latest_artifact_metadata(self, project_id: str, job_id: str): + def __get_latest_artifact_metadata(self, job_id: str): response = requests.get( - f"{self.git_instance.api_url}/repos/{project_id}/actions/runs/{job_id}/artifacts", - headers=self.__get_headers(self.git_model.password), + f"{self.api_url}/repos/{self.repository_id}/actions/runs/{job_id}/artifacts", + headers=self.__get_headers(), timeout=config.requests.timeout, ) response.raise_for_status() @@ -196,9 +165,9 @@ def __get_latest_artifact_metadata(self, project_id: str, job_id: str): raise git_exceptions.GithubArtifactExpiredError() return artifact - def __get_headers(self, password: str) -> dict: + def __get_headers(self) -> dict: return { - "Authorization": f"token {password}", + "Authorization": f"token {self.password}", "X-GitHub-Api-Version": "2022-11-28", "Accept": "application/vnd.github+json", } diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py b/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py index ae3cfd230..709aab069 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py @@ -16,17 +16,18 @@ class GitlabHandler(handler.GitHandler): - async def get_project_id_by_git_url(self) -> str: + @classmethod + async def get_repository_id_by_git_url( + cls, path: str, password: str, api_url: str + ) -> str: project_name_encoded = parse.quote( - parse.urlparse(self.git_model.path) - .path.lstrip("/") - .removesuffix(".git"), + parse.urlparse(path).path.lstrip("/").removesuffix(".git"), safe="", ) async with aiohttp.ClientSession() as session: async with session.get( - f"{self.git_instance.api_url}/projects/{project_name_encoded}", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{api_url}/projects/{project_name_encoded}", + headers={"PRIVATE-TOKEN": password}, timeout=config.requests.timeout, ) as response: if response.status == 403: @@ -39,29 +40,25 @@ async def get_project_id_by_git_url(self) -> str: response.raise_for_status() return (await response.json())["id"] - async def get_last_job_run_id_for_git_model( - self, job_name: str, project_id: str | None = None - ) -> tuple[str, str]: - if not project_id: - project_id = await self.get_project_id_by_git_url() - for pipeline_id in await self.__get_last_pipeline_run_ids(project_id): + async def get_last_successful_job_run( + self, job_name: str + ) -> tuple[str, datetime.datetime]: + for pipeline_id in await self.__get_last_pipeline_run_ids(): if job := await self.__get_job_id_for_job_name( - project_id, - pipeline_id, - job_name, + pipeline_id, job_name ): - return job + return (str(job[0]), job[1]) raise git_exceptions.GitPipelineJobNotFoundError( - job_name=job_name, revision=self.git_model.revision + job_name=job_name, revision=self.revision ) - def get_last_updated_for_file_path( - self, project_id: str, file_path: str, revision: str | None - ) -> datetime.datetime | None: + def get_last_updated_for_file( + self, file_path: str, revision: str | None = None + ) -> datetime.datetime: response = requests.get( - f"{self.git_instance.api_url}/projects/{project_id}/repository/commits?ref_name={revision or self.git_model.revision}&path={file_path}", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/repository/commits?ref_name={revision or self.revision}&path={file_path}", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) response.raise_for_status() @@ -69,16 +66,24 @@ def get_last_updated_for_file_path( raise git_exceptions.GitRepositoryFileNotFoundError( filename=file_path ) - return response.json()[0]["authored_date"] + return datetime.datetime.fromisoformat( + response.json()[0]["authored_date"] + ) + + def get_started_at_for_job(self, job_id: str) -> datetime.datetime: + response = requests.get( + f"{self.api_url}/projects/{self.repository_id}/jobs/{job_id}", + headers={"PRIVATE-TOKEN": self.password}, + timeout=config.requests.timeout, + ) + response.raise_for_status() + return datetime.datetime.fromisoformat(response.json()["started_at"]) - async def __get_last_pipeline_run_ids( - self, - project_id: str, - ) -> list[str]: + async def __get_last_pipeline_run_ids(self) -> list[str]: async with aiohttp.ClientSession() as session: async with session.get( - f"{self.git_instance.api_url}/projects/{project_id}/pipelines?ref={parse.quote(self.git_model.revision, safe='')}&per_page=20", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/pipelines?ref={parse.quote(self.revision, safe='')}&per_page=20", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) as response: response.raise_for_status() @@ -86,16 +91,13 @@ async def __get_last_pipeline_run_ids( return [pipeline["id"] for pipeline in await response.json()] async def __get_job_id_for_job_name( - self, - project_id: str, - pipeline_id: str, - job_name: str, - ) -> tuple[str, str] | None: + self, pipeline_id: str, job_name: str + ) -> tuple[str, datetime.datetime] | None: """Search for a job by name in a pipeline""" async with aiohttp.ClientSession() as session: async with session.get( - f"{self.git_instance.api_url}/projects/{project_id}/pipelines/{pipeline_id}/jobs", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/pipelines/{pipeline_id}/jobs", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) as response: response.raise_for_status() @@ -103,62 +105,36 @@ async def __get_job_id_for_job_name( for job in await response.json(): if job["name"] == job_name: if job["status"] == "success": - return job["id"], job["started_at"] + started_at = datetime.datetime.fromisoformat( + job["started_at"] + ) + return job["id"], started_at if job["status"] == "failed": - raise git_exceptions.GitPipelineJobFailedError( - job_name + raise git_exceptions.GitPipelineJobUnsuccessfulError( + job_name, "failed" ) return None - def get_artifact_from_job_as_json( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> dict: - return self.get_artifact_from_job( - project_id, - job_id, - trusted_path_to_artifact, - ).json() - - def get_artifact_from_job_as_content( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> bytes: - return self.get_artifact_from_job( - project_id, - job_id, - trusted_path_to_artifact, - ).content - def get_artifact_from_job( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> requests.Response: + self, job_id: str, trusted_path_to_artifact: str + ) -> bytes: response = requests.get( - f"{self.git_instance.api_url}/projects/{project_id}/jobs/{job_id}/artifacts/{trusted_path_to_artifact}", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/jobs/{job_id}/artifacts/{trusted_path_to_artifact}", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) response.raise_for_status() - return response - async def get_file_from_repository( - self, - project_id: str, - trusted_file_path: str, - revision: str | None = None, + return response.content + + def get_file_from_repository( + self, trusted_file_path: str, revision: str | None = None ) -> bytes: - branch = revision if revision else self.git_model.revision + branch = revision if revision else self.revision response = requests.get( - f"{self.git_instance.api_url}/projects/{project_id}/repository/files/{parse.quote(trusted_file_path, safe='')}?ref={parse.quote(branch, safe='')}", - headers={"PRIVATE-TOKEN": self.git_model.password}, + f"{self.api_url}/projects/{self.repository_id}/repository/files/{parse.quote(trusted_file_path, safe='')}?ref={parse.quote(branch, safe='')}", + headers={"PRIVATE-TOKEN": self.password}, timeout=config.requests.timeout, ) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/cache.py b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/cache.py new file mode 100644 index 000000000..2fff0e614 --- /dev/null +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/cache.py @@ -0,0 +1,115 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +import datetime +import logging + +import valkey.exceptions + +from capellacollab.core import database + +DEFAULT_TTL = datetime.timedelta(days=90) + + +class GitValkeyCache: + def __init__( + self, + git_model_id: int, + ) -> None: + self._valkey = database.get_valkey() + self.git_model_id = git_model_id + super().__init__() + + def get_file_data( + self, file_path: str, revision: str, logger: logging.LoggerAdapter + ) -> tuple[datetime.datetime, bytes] | None: + try: + file_data = self._valkey.hmget( + name=self._get_file_key(file_path, revision), + keys=["last_updated", "content"], + ) + except valkey.exceptions.ValkeyError: + logger.exception("Failed to load file data from valkey") + return None + if (last_update := file_data[0]) and (content := file_data[1]): + last_update = datetime.datetime.fromisoformat(last_update) + return last_update, content + + return None + + def get_artifact_data( + self, job_id: str, file_path: str, logger: logging.LoggerAdapter + ) -> tuple[datetime.datetime, bytes] | None: + try: + artifact_data = self._valkey.hmget( + name=self._get_artifact_key(job_id, file_path), + keys=["started_at", "content"], + ) + except valkey.exceptions.ValkeyError: + logger.exception("Failed to load artifact data from valkey") + return None + if (started_at := artifact_data[0]) and (content := artifact_data[1]): + started_at = datetime.datetime.fromisoformat(started_at) + return started_at, content + + return None + + def put_file_data( + self, + file_path: str, + last_updated: datetime.datetime, + content: bytes, + revision: str, + logger: logging.LoggerAdapter, + ) -> None: + try: + self._valkey.hset( + name=self._get_file_key(file_path, revision), + mapping={ + "last_updated": last_updated.isoformat(), + "content": content, + }, + ) + self._valkey.expire( + name=self._get_file_key(file_path, revision), time=DEFAULT_TTL + ) + except valkey.exceptions.ValkeyError: + logger.exception("Failed to save file data to valkey") + + def put_artifact_data( + self, + job_id: str, + file_path: str, + started_at: datetime.datetime, + content: bytes, + logger: logging.LoggerAdapter, + ) -> None: + try: + self._valkey.hset( + name=self._get_artifact_key(job_id, file_path), + mapping={ + "started_at": started_at.isoformat(), + "content": content, + }, + ) + self._valkey.expire( + name=self._get_artifact_key(job_id, file_path), + time=DEFAULT_TTL, + ) + except valkey.exceptions.ValkeyError: + logger.exception("Failed to save artifact data to valkey") + + def clear(self) -> None: + for key in self._valkey.scan_iter(match=f"{self.git_model_id}:*"): + self._valkey.delete(key) + + def _get_file_key(self, file_path: str, revision: str) -> str: + return f"{self.git_model_id}:f:{self._escape_string(revision)}:{self._escape_string(file_path)}" + + def _get_artifact_key(self, job_id: str, file_path: str) -> str: + return ( + f"{self.git_model_id}:a:{job_id}:{self._escape_string(file_path)}" + ) + + def _escape_string(self, string: str) -> str: + return string.replace(":", "-") diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/exceptions.py b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/exceptions.py index 6f3a35b14..fcabd7fc6 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/exceptions.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/exceptions.py @@ -27,3 +27,16 @@ def __init__(self): ), err_code="NO_MATCHING_GIT_INSTANCE", ) + + +class GitInstanceAPIEndpointNotFoundError(core_exceptions.BaseError): + def __init__(self): + super().__init__( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + title="Git instance API endpoint not found", + reason=( + "The used Git instance has no API endpoint defined. " + "Please contact your administrator." + ), + err_code="GIT_INSTANCE_NO_API_ENDPOINT_DEFINED", + ) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/factory.py b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/factory.py index 7a161506c..a71327c74 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/factory.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/factory.py @@ -4,6 +4,7 @@ from sqlalchemy import orm +import capellacollab.projects.toolmodels.modelsources.git.crud as git_crud import capellacollab.projects.toolmodels.modelsources.git.models as git_models import capellacollab.settings.modelsources.git.crud as settings_git_crud import capellacollab.settings.modelsources.git.models as settings_git_models @@ -15,21 +16,46 @@ class GitHandlerFactory: @staticmethod - def create_git_handler( + async def create_git_handler( db: orm.Session, git_model: git_models.DatabaseGitModel ) -> handler.GitHandler: + """ + Create a git handler for the given git model. + + Args: + db (orm.Session): Database session. + git_model (git_models.DatabaseGitModel): The git model instance. + + Returns: + handler.GitHandler: An instance of GitHandler. + + Raises: + GitInstanceAPIEndpointNotFoundError: If the git instance API endpoint is not found. + GitInstanceUnsupportedError: If the git instance type is unsupported. + """ git_instance = GitHandlerFactory.get_git_instance_for_git_model( db, git_model ) - match git_instance.type: - case settings_git_models.GitType.GITLAB: - return gitlab_handler.GitlabHandler(git_model, git_instance) - case settings_git_models.GitType.GITHUB: - return github_handler.GithubHandler(git_model, git_instance) - case _: - raise exceptions.GitInstanceUnsupportedError( - instance_name=str(git_instance.type) - ) + + if not git_instance.api_url: + raise exceptions.GitInstanceAPIEndpointNotFoundError() + + if not git_model.repository_id: + repository_id = await GitHandlerFactory._get_repository_id( + git_model, git_instance.api_url, git_instance.type + ) + git_crud.update_git_model_repository_id( + db, git_model, repository_id + ) + else: + repository_id = git_model.repository_id + + return GitHandlerFactory._create_specific_git_handler( + git_model, + repository_id, + git_instance.api_url, + git_instance.type, + ) @staticmethod def get_git_instance_for_git_model( @@ -48,3 +74,54 @@ def get_git_instance_for_git_model( if git_model.path.startswith(instance.url): return instance raise exceptions.NoMatchingGitInstanceError + + @staticmethod + async def _get_repository_id( + git_model: git_models.DatabaseGitModel, + git_instance_api_url: str, + git_instance_type: settings_git_models.GitType, + ) -> str: + match git_instance_type: + case settings_git_models.GitType.GITLAB: + return await gitlab_handler.GitlabHandler.get_repository_id_by_git_url( + git_model.path, git_model.password, git_instance_api_url + ) + case settings_git_models.GitType.GITHUB: + return await github_handler.GithubHandler.get_repository_id_by_git_url( + git_model.path, git_model.password + ) + case _: + raise exceptions.GitInstanceUnsupportedError( + instance_name=str(git_instance_type) + ) + + @staticmethod + def _create_specific_git_handler( + git_model: git_models.DatabaseGitModel, + git_model_repository_id: str, + git_instance_api_url: str, + git_instance_type: settings_git_models.GitType, + ) -> handler.GitHandler: + match git_instance_type: + case settings_git_models.GitType.GITLAB: + return gitlab_handler.GitlabHandler( + git_model.id, + git_model.path, + git_model.revision, + git_model.password, + git_instance_api_url, + git_model_repository_id, + ) + case settings_git_models.GitType.GITHUB: + return github_handler.GithubHandler( + git_model.id, + git_model.path, + git_model.revision, + git_model.password, + git_instance_api_url, + git_model_repository_id, + ) + case _: + raise exceptions.GitInstanceUnsupportedError( + instance_name=str(git_instance_type) + ) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/handler.py b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/handler.py index e09a62b5a..c13ce3bd8 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/handler/handler.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/handler/handler.py @@ -5,118 +5,193 @@ import abc import datetime -import json -import typing as t +import logging import requests -import capellacollab.projects.toolmodels.modelsources.git.models as git_models -import capellacollab.settings.modelsources.git.models as settings_git_models - -from .. import exceptions - -if t.TYPE_CHECKING: - from capellambse import diagram_cache +from .. import exceptions as git_exceptions +from . import cache class GitHandler: def __init__( self, - git_model: git_models.DatabaseGitModel, - git_instance: settings_git_models.DatabaseGitInstance, + git_model_id: int, + path: str, + revision: str, + password: str, + api_url: str, + repository_id: str, ) -> None: - self.git_model = git_model - self.git_instance = git_instance - self.check_git_instance_has_api_url() - - def check_git_instance_has_api_url(self): - if not self.git_instance.api_url: - raise exceptions.GitInstanceAPIEndpointNotFoundError() - + self.path = path + self.revision = revision + self.password = password + self.api_url = api_url + self.repository_id = repository_id + self.cache = cache.GitValkeyCache(git_model_id) + + @classmethod @abc.abstractmethod - async def get_project_id_by_git_url(self) -> str: + async def get_repository_id_by_git_url( + cls, path: str, password: str, api_url: str + ) -> str: pass @abc.abstractmethod - async def get_last_job_run_id_for_git_model( - self, job_name: str, project_id: str | None = None - ) -> tuple[str, str]: - pass + async def get_last_successful_job_run( + self, job_name: str + ) -> tuple[str, datetime.datetime]: + """ + Retrieve the ID and start time of the most recent run for a specified job. - @abc.abstractmethod - def get_artifact_from_job_as_json( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, - ) -> dict: - pass + Args: + job_name (str): The name of the job whose last run information is to be retrieved. + + Returns: + tuple[str, datetime.datetime]: A tuple containing the job ID and the start time (as a datetime object) of the most recent run. + + Raises: + GitPipelineJobNotFoundError: If the job cannot be found in any of the recent pipeline runs. + GitPipelineJobUnsuccessfulError: If the last job state indicates that the job was not successful. + """ @abc.abstractmethod - def get_artifact_from_job_as_content( - self, - project_id: str, - job_id: str, - trusted_path_to_artifact: str, + def get_artifact_from_job( + self, job_id: str, trusted_path_to_artifact: str ) -> bytes: - pass + """ + Retrieve an artifact from a specified job. + + Args: + job_id (str): The unique identifier of the job from which to retrieve the artifact. + trusted_path_to_artifact (str): The path within the job's artifacts where the desired artifact is stored. + + Returns: + bytes: The content of the artifact as a byte stream. + """ @abc.abstractmethod - async def get_file_from_repository( - self, - project_id: str, - trusted_file_path: str, - revision: str | None = None, + def get_file_from_repository( + self, trusted_file_path: str, revision: str | None = None ) -> bytes: - pass + """ + Retrieve the contents of a specified file from the repository. + + Args: + trusted_file_path (str): The path to the file within the repository. + revision (str | None): The specific revision to use. If None, the handler revision is used. + + Returns: + bytes: The content of the file. + + Raises: + GitRepositoryFileNotFoundError: If the file does not exist in the specified revision. + """ @abc.abstractmethod - def get_last_updated_for_file_path( - self, project_id: str, file_path: str, revision: str | None - ) -> datetime.datetime | None: - pass + def get_last_updated_for_file( + self, file_path: str, revision: str | None = None + ) -> datetime.datetime: + """ + Retrieve the last update datetime for the specified file in the repository. - async def get_file_from_repository_or_artifacts_as_json( + Args: + file_path (str): The path to the file within the repository. + revision (str | None): The specific revision to use. If None, the handler revision is used. + + Returns: + datetime.datetime: The datetime of the last update to the specified file. + + Raises: + GitRepositoryFileNotFoundError: If the file does not exist in the revision. + """ + + @abc.abstractmethod + def get_started_at_for_job(self, job_id: str) -> datetime.datetime: + """ + Retrieve the start datetime for the specified job in the repository. + + Args: + job_id (str): The unique identifier of the job from which to retrieve the artifact. + + Returns: + datetime.datetime: The datetime of the start time of the specified job. + """ + + async def get_file( self, trusted_file_path: str, - job_name: str, + logger: logging.LoggerAdapter, revision: str | None = None, - ) -> tuple[datetime.datetime, list[diagram_cache.IndexEntry]]: - ( - last_updated, - result, - ) = await self.get_file_from_repository_or_artifacts( - trusted_file_path, job_name, revision + ) -> tuple[datetime.datetime, bytes]: + if not revision: + revision = self.revision + + last_updated = self.get_last_updated_for_file( + trusted_file_path, revision ) - return (last_updated, json.loads(result.decode("utf-8"))) - async def get_file_from_repository_or_artifacts( + if file_data := self.cache.get_file_data( + trusted_file_path, revision, logger + ): + last_updated_cache, content_cache = file_data + + if last_updated == last_updated_cache: + return last_updated_cache, content_cache + + content = self.get_file_from_repository(trusted_file_path, revision) + self.cache.put_file_data( + trusted_file_path, last_updated, content, revision, logger + ) + + return last_updated, content + + async def get_artifact( self, trusted_file_path: str, job_name: str, - revision: str | None = None, - ) -> tuple[t.Any, bytes]: - project_id = await self.get_project_id_by_git_url() - try: - return ( - self.get_last_updated_for_file_path( - project_id, - trusted_file_path, - revision=revision, - ), - await self.get_file_from_repository( - project_id, trusted_file_path, revision - ), + logger: logging.LoggerAdapter, + job_id: str | None = None, + ) -> tuple[str, datetime.datetime, bytes]: + if not job_id: + job_id, started_at = await self.get_last_successful_job_run( + job_name ) - except (requests.HTTPError, exceptions.GitRepositoryFileNotFoundError): - pass + else: + started_at = self.get_started_at_for_job(job_id) + + if artifact_data := self.cache.get_artifact_data( + job_id, trusted_file_path, logger + ): + return job_id, artifact_data[0], artifact_data[1] - job_id, last_updated = await self.get_last_job_run_id_for_git_model( - job_name, project_id + content = self.get_artifact_from_job(job_id, trusted_file_path) + self.cache.put_artifact_data( + job_id, trusted_file_path, started_at, content, logger ) - return ( - last_updated, - self.get_artifact_from_job_as_content( - project_id, job_id, trusted_file_path - ), + + return job_id, started_at, content + + async def get_file_or_artifact( + self, + trusted_file_path: str, + job_name: str, + logger: logging.LoggerAdapter, + job_id: str | None = None, + file_revision: str | None = None, + ) -> tuple[str | None, datetime.datetime, bytes]: + if not job_id: + try: + file = await self.get_file( + trusted_file_path, logger, file_revision + ) + return (None, file[0], file[1]) + except ( + requests.HTTPError, + git_exceptions.GitRepositoryFileNotFoundError, + ): + pass + + return await self.get_artifact( + trusted_file_path, job_name, logger, job_id ) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/injectables.py b/backend/capellacollab/projects/toolmodels/modelsources/git/injectables.py index 97b3b233c..685e0acf3 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/injectables.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/injectables.py @@ -49,10 +49,10 @@ def get_existing_primary_git_model( raise exceptions.NoGitRepositoryAssignedToModelError(tool_model.slug) -def get_git_handler( +async def get_git_handler( git_model: git_models.DatabaseGitModel = fastapi.Depends( get_existing_primary_git_model ), db: orm.Session = fastapi.Depends(database.get_db), ) -> handler.GitHandler: - return factory.GitHandlerFactory.create_git_handler(db, git_model) + return await factory.GitHandlerFactory.create_git_handler(db, git_model) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/models.py b/backend/capellacollab/projects/toolmodels/modelsources/git/models.py index c5c84ff85..ade623969 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/models.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/models.py @@ -24,14 +24,14 @@ class PostGitModel(core_pydantic.BaseModel): password: str -class PatchGitModel(PostGitModel): +class PutGitModel(PostGitModel): primary: bool class GitModel(PostGitModel): id: int - name: str primary: bool + repository_id: str | None @pydantic.field_serializer("password") def transform_password(self, data: str) -> bool: @@ -44,7 +44,6 @@ class DatabaseGitModel(database.Base): id: orm.Mapped[int] = orm.mapped_column( init=False, primary_key=True, index=True, autoincrement=True ) - name: orm.Mapped[str] path: orm.Mapped[str] entrypoint: orm.Mapped[str] revision: orm.Mapped[str] @@ -60,12 +59,13 @@ class DatabaseGitModel(database.Base): username: orm.Mapped[str] password: orm.Mapped[str] + repository_id: orm.Mapped[str | None] = orm.mapped_column(default=None) + @classmethod def from_post_git_model( cls, model: "DatabaseToolModel", primary: bool, new_model: PostGitModel ): return cls( - name="", primary=primary, model=model, **new_model.model_dump(), diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/routes.py b/backend/capellacollab/projects/toolmodels/modelsources/git/routes.py index cb6077ab7..42339c215 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/routes.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/routes.py @@ -19,6 +19,7 @@ from capellacollab.settings.modelsources.git import util as git_util from . import crud, exceptions, injectables, models +from .handler import cache router = fastapi.APIRouter() log = logging.getLogger(__name__) @@ -136,14 +137,24 @@ def create_git_model( ], ) def update_git_model_by_id( - patch_git_model: models.PatchGitModel, + put_git_model: models.PutGitModel, db_git_model: models.DatabaseGitModel = fastapi.Depends( injectables.get_existing_git_model ), db: orm.Session = fastapi.Depends(database.get_db), ) -> models.DatabaseGitModel: - git_util.verify_path_prefix(db, patch_git_model.path) - return crud.update_git_model(db, db_git_model, patch_git_model) + git_util.verify_path_prefix(db, put_git_model.path) + cache.GitValkeyCache(git_model_id=db_git_model.id).clear() + return crud.update_git_model(db, db_git_model, put_git_model) + + +@router.delete("/cache") +def empty_cache( + git_model: models.DatabaseGitModel = fastapi.Depends( + injectables.get_existing_primary_git_model + ), +): + cache.GitValkeyCache(git_model_id=git_model.id).clear() @router.delete( @@ -165,5 +176,5 @@ def delete_git_model_by_id( ): if backups_crud.get_pipelines_for_git_model(db, db_git_model): raise exceptions.GitRepositoryUsedInPipelines(db_git_model.id) - + cache.GitValkeyCache(git_model_id=db_git_model.id).clear() crud.delete_git_model(db, db_git_model) diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/validation.py b/backend/capellacollab/projects/toolmodels/modelsources/git/validation.py index 9e599cfec..746fdfd9f 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/validation.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/validation.py @@ -54,10 +54,10 @@ async def check_pipeline_health( return models.ModelArtifactStatus.UNCONFIGURED try: - git_handler = factory.GitHandlerFactory.create_git_handler( + git_handler = await factory.GitHandlerFactory.create_git_handler( db, primary_git_model ) - await git_handler.get_last_job_run_id_for_git_model(job_name) + await git_handler.get_last_successful_job_run(job_name) except exceptions.GitPipelineJobNotFoundError: return models.ModelArtifactStatus.UNCONFIGURED except handler_exceptions.GitInstanceUnsupportedError: diff --git a/backend/pyproject.toml b/backend/pyproject.toml index c5b0f5845..787a8a39c 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -44,6 +44,7 @@ dependencies = [ "argon2-cffi", "typer", "lxml", + "valkey[libvalkey]", ] [project.urls] @@ -126,6 +127,7 @@ module = [ "argon2.*", "websocket.*", "testcontainers.*", + "valkey.*", ] ignore_missing_imports = true @@ -168,6 +170,7 @@ disable = [ "too-few-public-methods", "too-many-ancestors", "too-many-arguments", + "too-many-positional-arguments", "too-many-boolean-expressions", "too-many-branches", "too-many-instance-attributes", diff --git a/backend/tests/projects/toolmodels/conftest.py b/backend/tests/projects/toolmodels/conftest.py index ea35572c9..14f8433e6 100644 --- a/backend/tests/projects/toolmodels/conftest.py +++ b/backend/tests/projects/toolmodels/conftest.py @@ -1,9 +1,8 @@ # SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors # SPDX-License-Identifier: Apache-2.0 - -import json -import typing as t +import datetime +import logging import pytest import responses @@ -17,10 +16,62 @@ from capellacollab.core import credentials -@pytest.fixture( - name="git_type", - params=[git_models.GitType.GITLAB], -) +@pytest.fixture(name="mock_git_valkey_cache") +def fixture_mock_git_valkey_cache(monkeypatch: pytest.MonkeyPatch): + class MockGitValkeyCache: + cache: dict[str, tuple[datetime.datetime, bytes]] = {} + + def __init__(self, *args, **kwargs) -> None: + super().__init__() + + def get_file_data( + self, file_path: str, revision: str, logger: logging.LoggerAdapter + ) -> tuple[datetime.datetime, bytes] | None: + return MockGitValkeyCache.cache.get(f"f:{file_path}", None) + + def get_artifact_data( + self, job_id: str, file_path: str, logger: logging.LoggerAdapter + ) -> tuple[datetime.datetime, bytes] | None: + return MockGitValkeyCache.cache.get(f"a:{file_path}:{job_id}") + + def put_file_data( + self, + file_path: str, + last_updated: datetime.datetime, + content: bytes, + revision: str, + logger: logging.LoggerAdapter, + ) -> None: + MockGitValkeyCache.cache[f"f:{file_path}"] = ( + last_updated, + content, + ) + + def put_artifact_data( + self, + job_id: str, + file_path: str, + started_at: datetime.datetime, + content: bytes, + logger: logging.LoggerAdapter, + ) -> None: + MockGitValkeyCache.cache[f"a:{file_path}:{job_id}"] = ( + started_at, + content, + ) + + def clear(self) -> None: + MockGitValkeyCache.cache.clear() + + monkeypatch.setattr( + "capellacollab.projects.toolmodels.modelsources.git.handler.cache.GitValkeyCache", + MockGitValkeyCache, + ) + + return MockGitValkeyCache() + + +@pytest.fixture(name="git_type", params=[git_models.GitType.GITLAB]) def fixture_git_type(request: pytest.FixtureRequest) -> git_models.GitType: return request.param @@ -43,7 +94,7 @@ def fixture_git_instance_api_url( def fixture_git_instance( db: orm.Session, git_type: git_models.GitType, git_instance_api_url: str ) -> git_models.DatabaseGitInstance: - git_instance = git_models.DatabaseGitInstance( + git_instance = git_models.PostGitInstance( name="test", url="https://example.com/test/project", api_url=git_instance_api_url, @@ -147,28 +198,6 @@ def fixture_git_query_params(request: pytest.FixtureRequest) -> list[dict]: return request.param -def github_commit_api_callback(request): - response_body = [ - { - "sha": "43bf21488c5cc309af0ec635a8698b8509379527", - "commit": { - "author": { - "name": "test-name", - "email": "test-email", - "date": "2050-06-26T13:46:21Z", - }, - "committer": { - "name": "test-name", - "email": "test-email", - "date": "2050-07-03T09:50:57Z", - }, - "message": "test: Test commit message", - }, - } - ] - return (200, {}, json.dumps(response_body)) - - @pytest.fixture(name="mock_git_get_commit_information_api") def fixture_mock_git_get_commit_information_api( request: pytest.FixtureRequest, diff --git a/backend/tests/projects/toolmodels/fixtures.py b/backend/tests/projects/toolmodels/fixtures.py index daacd2e7a..ebfeeba84 100644 --- a/backend/tests/projects/toolmodels/fixtures.py +++ b/backend/tests/projects/toolmodels/fixtures.py @@ -1,8 +1,6 @@ # SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors # SPDX-License-Identifier: Apache-2.0 -import uuid - import pytest from sqlalchemy import orm @@ -38,10 +36,7 @@ def fixture_jupyter_model( jupyter_tool: tools_models.DatabaseTool, ) -> toolmodels_models.DatabaseToolModel: jupyter_model = toolmodels_models.PostToolModel( - name="Jupyter test", - description="", - tool_id=jupyter_tool.id, - configuration={"workspace": str(uuid.uuid4())}, + name="Jupyter test", description="", tool_id=jupyter_tool.id ) return toolmodels_crud.create_model( db, diff --git a/backend/tests/projects/toolmodels/test_diagrams.py b/backend/tests/projects/toolmodels/test_diagrams.py index cec25a87e..3510df572 100644 --- a/backend/tests/projects/toolmodels/test_diagrams.py +++ b/backend/tests/projects/toolmodels/test_diagrams.py @@ -168,6 +168,7 @@ def fixture_mock_gitlab_diagram_cache_svg(git_type: git_models.GitType): "mock_git_rest_api_for_artifacts", "mock_git_diagram_cache_index_api", "mock_git_get_commit_information_api", + "mock_git_valkey_cache", ) def test_get_diagram_metadata_from_repository( project: project_models.DatabaseProject, @@ -178,7 +179,7 @@ def test_get_diagram_metadata_from_repository( f"/api/v1/projects/{project.slug}/models/{capella_model.slug}/diagrams", ) assert response.status_code == 200 - assert len(response.json()) == 2 + assert len(response.json()) == 3 @responses.activate @@ -215,6 +216,7 @@ def test_get_diagram_metadata_from_repository( "mock_git_rest_api_for_artifacts", "mock_git_diagram_cache_index_api", "mock_git_get_commit_information_api", + "mock_git_valkey_cache", ) def test_get_diagram_metadata_from_artifacts( project: project_models.DatabaseProject, @@ -225,7 +227,7 @@ def test_get_diagram_metadata_from_artifacts( f"/api/v1/projects/{project.slug}/models/{capella_model.slug}/diagrams", ) assert response.status_code == 200 - assert len(response.json()) == 2 + assert len(response.json()) == 3 @responses.activate @@ -370,9 +372,12 @@ def test_get_diagrams_failed_diagram_cache_job_found( response = client.get( f"/api/v1/projects/{project.slug}/models/{capella_model.slug}/diagrams", ) - + reason = response.json()["detail"]["reason"] assert response.status_code == 400 - assert response.json()["detail"]["err_code"] == "FAILED_JOB_FOUND" + assert ( + response.json()["detail"]["err_code"] == "UNSUCCESSFUL_JOB_STATE_ERROR" + ) + assert "failure" in reason or "failed" in reason @responses.activate @@ -418,8 +423,8 @@ def test_get_diagrams_failed_diagram_cache_job_found( "mock_git_diagram_cache_index_api", "mock_git_diagram_cache_svg", "mock_git_get_commit_information_api", + "mock_git_valkey_cache", ) -@pytest.mark.usefixtures("project_user", "git_instance", "git_model") def test_get_single_diagram_from_artifacts( project: project_models.DatabaseProject, capella_model: toolmodels_models.ToolModel, @@ -476,8 +481,8 @@ def test_get_single_diagram_from_artifacts( "mock_git_diagram_cache_index_api", "mock_git_diagram_cache_svg", "mock_git_get_commit_information_api", + "mock_git_valkey_cache", ) -@pytest.mark.usefixtures("project_user", "git_instance", "git_model") def test_get_single_diagram_from_artifacts_with_file_ending( project: project_models.DatabaseProject, capella_model: toolmodels_models.ToolModel, @@ -535,7 +540,6 @@ def test_get_single_diagram_from_artifacts_with_file_ending( "mock_git_diagram_cache_svg", "mock_git_get_commit_information_api", ) -@pytest.mark.usefixtures("project_user", "git_instance", "git_model") def test_get_single_diagram_from_artifacts_with_wrong_file_ending( project: project_models.DatabaseProject, capella_model: toolmodels_models.ToolModel, @@ -589,12 +593,13 @@ def test_get_single_diagram_from_artifacts_with_wrong_file_ending( "mock_git_diagram_cache_index_api", "mock_git_diagram_cache_svg", "mock_git_get_commit_information_api", + "mock_git_valkey_cache", ) -@pytest.mark.usefixtures("project_user", "git_instance", "git_model") -def test_get_single_diagram_from_repository( +def test_get_single_diagram_from_file_and_cache( project: project_models.DatabaseProject, capella_model: toolmodels_models.ToolModel, client: testclient.TestClient, + mock_git_valkey_cache, ): response = client.get( f"/api/v1/projects/{project.slug}/models/{capella_model.slug}/diagrams/_c90e4Hdf2d2UosmJBo0GTw", @@ -602,3 +607,9 @@ def test_get_single_diagram_from_repository( assert response.status_code == 200 assert response.content == EXAMPLE_SVG + + client.get( + f"/api/v1/projects/{project.slug}/models/{capella_model.slug}/diagrams/_c90e4Hdf2d2UosmJBo0GTw" + ) + + assert len(mock_git_valkey_cache.cache) == 1 diff --git a/backend/tests/projects/toolmodels/test_git_model.py b/backend/tests/projects/toolmodels/test_git_model.py new file mode 100644 index 000000000..220572d62 --- /dev/null +++ b/backend/tests/projects/toolmodels/test_git_model.py @@ -0,0 +1,103 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +import pytest +from fastapi import testclient +from sqlalchemy import orm + +import capellacollab.projects.toolmodels.modelsources.git.crud as project_git_crud +import capellacollab.projects.toolmodels.modelsources.git.models as project_git_models +from capellacollab.projects.toolmodels.modelsources.git.handler import ( + cache as git_handler_cache, +) + + +@pytest.fixture(name="mock_valkey_clear") +def fixture_mock_valkey_clear(monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr( + git_handler_cache.GitValkeyCache, "clear", lambda self: None + ) + + +@pytest.mark.usefixtures( + "git_instance", "project_manager", "capella_model", "mock_valkey_clear" +) +def test_reset_repository_id_on_git_model_path_change( + db: orm.Session, + git_model: project_git_models.DatabaseGitModel, + client: testclient.TestClient, +): + assert git_model.repository_id is None + + project_git_crud.update_git_model_repository_id(db, git_model, "1") + + assert git_model.repository_id == "1" + + new_path = "https://example.com/test/project/anything" + response = client.put( + f"/api/v1/projects/{git_model.model.project.slug}/models/{git_model.model.slug}/modelsources/git/{git_model.id}", + json={ + "path": new_path, + "entrypoint": git_model.entrypoint, + "revision": git_model.revision, + "username": git_model.username, + "password": "", + "primary": git_model.primary, + }, + ) + + assert response.status_code == 200 + + assert git_model.repository_id is None + assert git_model.path == new_path + + assert response.json()["repository_id"] is None + assert response.json()["path"] == new_path + + +@pytest.mark.usefixtures( + "project_manager", "capella_model", "mock_valkey_clear" +) +def test_delete_git_model( + git_model: project_git_models.DatabaseGitModel, + client: testclient.TestClient, +): + project_slug = git_model.model.project.slug + model_slug = git_model.model.slug + + del_response = client.delete( + f"/api/v1/projects/{project_slug}/models/{model_slug}/modelsources/git/{git_model.id}" + ) + + assert del_response.status_code == 204 + + get_response = client.get( + f"/api/v1/projects/{project_slug}/models/{model_slug}/modelsources/git" + ) + + assert get_response.status_code == 200 + assert len(get_response.json()) == 0 + + +@pytest.mark.usefixtures("git_instance", "project_manager", "capella_model") +def test_raise_instance_prefix_unmatched_error_on_model_update( + git_model: project_git_models.DatabaseGitModel, + client: testclient.TestClient, +): + response = client.put( + f"/api/v1/projects/{git_model.model.project.slug}/models/{git_model.model.slug}/modelsources/git/{git_model.id}", + json={ + "path": "https://unknown.com/test/project", + "entrypoint": git_model.entrypoint, + "revision": git_model.revision, + "username": git_model.username, + "password": "", + "primary": git_model.primary, + }, + ) + + assert response.status_code == 404 + assert ( + response.json()["detail"]["err_code"] + == "NO_GIT_INSTANCE_WITH_PREFIX_FOUND" + ) diff --git a/backend/tests/projects/toolmodels/test_model_badge.py b/backend/tests/projects/toolmodels/test_model_badge.py index d30a9c62e..852d666d1 100644 --- a/backend/tests/projects/toolmodels/test_model_badge.py +++ b/backend/tests/projects/toolmodels/test_model_badge.py @@ -194,6 +194,7 @@ def test_get_model_badge_fails_without_api_endpoint( "mock_git_rest_api", "mock_git_model_badge_file_api", "mock_git_get_commit_information_api", + "mock_git_valkey_cache", ) def test_get_model_badge( project: project_models.DatabaseProject, @@ -232,6 +233,7 @@ def test_get_model_badge( "mock_git_model_badge_file_api_not_found", "mock_get_model_badge_from_artifacts_api", "mock_git_get_commit_information_api", + "mock_git_valkey_cache", ) def test_get_model_badge_from_artifacts( project: project_models.DatabaseProject, diff --git a/frontend/src/app/openapi/.openapi-generator/FILES b/frontend/src/app/openapi/.openapi-generator/FILES index 929e1a2d8..e2b8c8111 100644 --- a/frontend/src/app/openapi/.openapi-generator/FILES +++ b/frontend/src/app/openapi/.openapi-generator/FILES @@ -98,7 +98,6 @@ model/notice-level.ts model/notice-response.ts model/page-history-event.ts model/page-pipeline-run.ts -model/patch-git-model.ts model/patch-project-user.ts model/patch-project.ts model/patch-t4-c-instance.ts @@ -132,6 +131,7 @@ model/prometheus-configuration-output.ts model/protocol.ts model/pure-variants-licenses-input.ts model/pure-variants-licenses-output.ts +model/put-git-model.ts model/rdp-ports-input.ts model/rdp-ports-output.ts model/resources-input.ts diff --git a/frontend/src/app/openapi/api/projects-models-diagrams.service.ts b/frontend/src/app/openapi/api/projects-models-diagrams.service.ts index ac18cd4c8..d5fcb8520 100644 --- a/frontend/src/app/openapi/api/projects-models-diagrams.service.ts +++ b/frontend/src/app/openapi/api/projects-models-diagrams.service.ts @@ -99,13 +99,14 @@ export class ProjectsModelsDiagramsService { * @param diagramUuidOrFilename * @param projectSlug * @param modelSlug + * @param jobId * @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body. * @param reportProgress flag to report request and response progress. */ - public getDiagram(diagramUuidOrFilename: string, projectSlug: string, modelSlug: string, observe?: 'body', reportProgress?: boolean, options?: {httpHeaderAccept?: 'image/svg+xml' | 'application/json', context?: HttpContext, transferCache?: boolean}): Observable; - public getDiagram(diagramUuidOrFilename: string, projectSlug: string, modelSlug: string, observe?: 'response', reportProgress?: boolean, options?: {httpHeaderAccept?: 'image/svg+xml' | 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; - public getDiagram(diagramUuidOrFilename: string, projectSlug: string, modelSlug: string, observe?: 'events', reportProgress?: boolean, options?: {httpHeaderAccept?: 'image/svg+xml' | 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; - public getDiagram(diagramUuidOrFilename: string, projectSlug: string, modelSlug: string, observe: any = 'body', reportProgress: boolean = false, options?: {httpHeaderAccept?: 'image/svg+xml' | 'application/json', context?: HttpContext, transferCache?: boolean}): Observable { + public getDiagram(diagramUuidOrFilename: string, projectSlug: string, modelSlug: string, jobId?: string, observe?: 'body', reportProgress?: boolean, options?: {httpHeaderAccept?: 'image/svg+xml' | 'application/json', context?: HttpContext, transferCache?: boolean}): Observable; + public getDiagram(diagramUuidOrFilename: string, projectSlug: string, modelSlug: string, jobId?: string, observe?: 'response', reportProgress?: boolean, options?: {httpHeaderAccept?: 'image/svg+xml' | 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; + public getDiagram(diagramUuidOrFilename: string, projectSlug: string, modelSlug: string, jobId?: string, observe?: 'events', reportProgress?: boolean, options?: {httpHeaderAccept?: 'image/svg+xml' | 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; + public getDiagram(diagramUuidOrFilename: string, projectSlug: string, modelSlug: string, jobId?: string, observe: any = 'body', reportProgress: boolean = false, options?: {httpHeaderAccept?: 'image/svg+xml' | 'application/json', context?: HttpContext, transferCache?: boolean}): Observable { if (diagramUuidOrFilename === null || diagramUuidOrFilename === undefined) { throw new Error('Required parameter diagramUuidOrFilename was null or undefined when calling getDiagram.'); } @@ -116,6 +117,12 @@ export class ProjectsModelsDiagramsService { throw new Error('Required parameter modelSlug was null or undefined when calling getDiagram.'); } + let localVarQueryParameters = new HttpParams({encoder: this.encoder}); + if (jobId !== undefined && jobId !== null) { + localVarQueryParameters = this.addToHttpParams(localVarQueryParameters, + jobId, 'job_id'); + } + let localVarHeaders = this.defaultHeaders; let localVarCredential: string | undefined; @@ -153,6 +160,7 @@ export class ProjectsModelsDiagramsService { return this.httpClient.request('get', `${this.configuration.basePath}${localVarPath}`, { context: localVarHttpContext, + params: localVarQueryParameters, responseType: "blob", withCredentials: this.configuration.withCredentials, headers: localVarHeaders, diff --git a/frontend/src/app/openapi/api/projects-models-git.service.ts b/frontend/src/app/openapi/api/projects-models-git.service.ts index 0e60953f6..20d6a66f4 100644 --- a/frontend/src/app/openapi/api/projects-models-git.service.ts +++ b/frontend/src/app/openapi/api/projects-models-git.service.ts @@ -25,9 +25,9 @@ import { GitModel } from '../model/git-model'; // @ts-ignore import { HTTPValidationError } from '../model/http-validation-error'; // @ts-ignore -import { PatchGitModel } from '../model/patch-git-model'; -// @ts-ignore import { PostGitModel } from '../model/post-git-model'; +// @ts-ignore +import { PutGitModel } from '../model/put-git-model'; // @ts-ignore import { BASE_PATH, COLLECTION_FORMATS } from '../variables'; @@ -268,6 +268,81 @@ export class ProjectsModelsGitService { ); } + /** + * Empty Cache + * @param projectSlug + * @param modelSlug + * @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body. + * @param reportProgress flag to report request and response progress. + */ + public emptyCache(projectSlug: string, modelSlug: string, observe?: 'body', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable; + public emptyCache(projectSlug: string, modelSlug: string, observe?: 'response', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; + public emptyCache(projectSlug: string, modelSlug: string, observe?: 'events', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; + public emptyCache(projectSlug: string, modelSlug: string, observe: any = 'body', reportProgress: boolean = false, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable { + if (projectSlug === null || projectSlug === undefined) { + throw new Error('Required parameter projectSlug was null or undefined when calling emptyCache.'); + } + if (modelSlug === null || modelSlug === undefined) { + throw new Error('Required parameter modelSlug was null or undefined when calling emptyCache.'); + } + + let localVarHeaders = this.defaultHeaders; + + let localVarCredential: string | undefined; + // authentication (PersonalAccessToken) required + localVarCredential = this.configuration.lookupCredential('PersonalAccessToken'); + if (localVarCredential) { + localVarHeaders = localVarHeaders.set('Authorization', 'Basic ' + localVarCredential); + } + + let localVarHttpHeaderAcceptSelected: string | undefined = options && options.httpHeaderAccept; + if (localVarHttpHeaderAcceptSelected === undefined) { + // to determine the Accept header + const httpHeaderAccepts: string[] = [ + 'application/json' + ]; + localVarHttpHeaderAcceptSelected = this.configuration.selectHeaderAccept(httpHeaderAccepts); + } + if (localVarHttpHeaderAcceptSelected !== undefined) { + localVarHeaders = localVarHeaders.set('Accept', localVarHttpHeaderAcceptSelected); + } + + let localVarHttpContext: HttpContext | undefined = options && options.context; + if (localVarHttpContext === undefined) { + localVarHttpContext = new HttpContext(); + } + + let localVarTransferCache: boolean | undefined = options && options.transferCache; + if (localVarTransferCache === undefined) { + localVarTransferCache = true; + } + + + let responseType_: 'text' | 'json' | 'blob' = 'json'; + if (localVarHttpHeaderAcceptSelected) { + if (localVarHttpHeaderAcceptSelected.startsWith('text')) { + responseType_ = 'text'; + } else if (this.configuration.isJsonMime(localVarHttpHeaderAcceptSelected)) { + responseType_ = 'json'; + } else { + responseType_ = 'blob'; + } + } + + let localVarPath = `/api/v1/projects/${this.configuration.encodeParam({name: "projectSlug", value: projectSlug, in: "path", style: "simple", explode: false, dataType: "string", dataFormat: undefined})}/models/${this.configuration.encodeParam({name: "modelSlug", value: modelSlug, in: "path", style: "simple", explode: false, dataType: "string", dataFormat: undefined})}/modelsources/git/cache`; + return this.httpClient.request('delete', `${this.configuration.basePath}${localVarPath}`, + { + context: localVarHttpContext, + responseType: responseType_, + withCredentials: this.configuration.withCredentials, + headers: localVarHeaders, + observe: observe, + transferCache: localVarTransferCache, + reportProgress: reportProgress + } + ); + } + /** * Get Git Model By Id * @param projectSlug @@ -595,14 +670,14 @@ export class ProjectsModelsGitService { * @param projectSlug * @param gitModelId * @param modelSlug - * @param patchGitModel + * @param putGitModel * @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body. * @param reportProgress flag to report request and response progress. */ - public updateGitModelById(projectSlug: string, gitModelId: number, modelSlug: string, patchGitModel: PatchGitModel, observe?: 'body', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable; - public updateGitModelById(projectSlug: string, gitModelId: number, modelSlug: string, patchGitModel: PatchGitModel, observe?: 'response', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; - public updateGitModelById(projectSlug: string, gitModelId: number, modelSlug: string, patchGitModel: PatchGitModel, observe?: 'events', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; - public updateGitModelById(projectSlug: string, gitModelId: number, modelSlug: string, patchGitModel: PatchGitModel, observe: any = 'body', reportProgress: boolean = false, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable { + public updateGitModelById(projectSlug: string, gitModelId: number, modelSlug: string, putGitModel: PutGitModel, observe?: 'body', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable; + public updateGitModelById(projectSlug: string, gitModelId: number, modelSlug: string, putGitModel: PutGitModel, observe?: 'response', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; + public updateGitModelById(projectSlug: string, gitModelId: number, modelSlug: string, putGitModel: PutGitModel, observe?: 'events', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable>; + public updateGitModelById(projectSlug: string, gitModelId: number, modelSlug: string, putGitModel: PutGitModel, observe: any = 'body', reportProgress: boolean = false, options?: {httpHeaderAccept?: 'application/json', context?: HttpContext, transferCache?: boolean}): Observable { if (projectSlug === null || projectSlug === undefined) { throw new Error('Required parameter projectSlug was null or undefined when calling updateGitModelById.'); } @@ -612,8 +687,8 @@ export class ProjectsModelsGitService { if (modelSlug === null || modelSlug === undefined) { throw new Error('Required parameter modelSlug was null or undefined when calling updateGitModelById.'); } - if (patchGitModel === null || patchGitModel === undefined) { - throw new Error('Required parameter patchGitModel was null or undefined when calling updateGitModelById.'); + if (putGitModel === null || putGitModel === undefined) { + throw new Error('Required parameter putGitModel was null or undefined when calling updateGitModelById.'); } let localVarHeaders = this.defaultHeaders; @@ -672,7 +747,7 @@ export class ProjectsModelsGitService { return this.httpClient.request('put', `${this.configuration.basePath}${localVarPath}`, { context: localVarHttpContext, - body: patchGitModel, + body: putGitModel, responseType: responseType_, withCredentials: this.configuration.withCredentials, headers: localVarHeaders, diff --git a/frontend/src/app/openapi/model/diagram-cache-metadata.ts b/frontend/src/app/openapi/model/diagram-cache-metadata.ts index 1e3894ed5..8b4893bc3 100644 --- a/frontend/src/app/openapi/model/diagram-cache-metadata.ts +++ b/frontend/src/app/openapi/model/diagram-cache-metadata.ts @@ -15,5 +15,6 @@ import { DiagramMetadata } from './diagram-metadata'; export interface DiagramCacheMetadata { diagrams: Array; last_updated: string; + job_id: string | null; } diff --git a/frontend/src/app/openapi/model/git-model.ts b/frontend/src/app/openapi/model/git-model.ts index 8d3b1307c..c5a49c868 100644 --- a/frontend/src/app/openapi/model/git-model.ts +++ b/frontend/src/app/openapi/model/git-model.ts @@ -18,7 +18,7 @@ export interface GitModel { username: string; password: boolean; id: number; - name: string; primary: boolean; + repository_id: string | null; } diff --git a/frontend/src/app/openapi/model/models.ts b/frontend/src/app/openapi/model/models.ts index a0c996257..547f2b1aa 100644 --- a/frontend/src/app/openapi/model/models.ts +++ b/frontend/src/app/openapi/model/models.ts @@ -77,7 +77,6 @@ export * from './notice-level'; export * from './notice-response'; export * from './page-history-event'; export * from './page-pipeline-run'; -export * from './patch-git-model'; export * from './patch-project'; export * from './patch-project-user'; export * from './patch-t4-c-instance'; @@ -111,6 +110,7 @@ export * from './prometheus-configuration-output'; export * from './protocol'; export * from './pure-variants-licenses-input'; export * from './pure-variants-licenses-output'; +export * from './put-git-model'; export * from './rdp-ports-input'; export * from './rdp-ports-output'; export * from './resources-input'; diff --git a/frontend/src/app/openapi/model/patch-git-model.ts b/frontend/src/app/openapi/model/put-git-model.ts similarity index 93% rename from frontend/src/app/openapi/model/patch-git-model.ts rename to frontend/src/app/openapi/model/put-git-model.ts index 743aed476..777c1004e 100644 --- a/frontend/src/app/openapi/model/patch-git-model.ts +++ b/frontend/src/app/openapi/model/put-git-model.ts @@ -11,7 +11,7 @@ -export interface PatchGitModel { +export interface PutGitModel { path: string; entrypoint: string; revision: string; diff --git a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-code-block/model-diagram-code-block.component.ts b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-code-block/model-diagram-code-block.component.ts index da44ccb1e..76cbf4adb 100644 --- a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-code-block/model-diagram-code-block.component.ts +++ b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-code-block/model-diagram-code-block.component.ts @@ -70,6 +70,9 @@ export class ModelDiagramCodeBlockComponent implements OnInit, AfterViewInit { @Input({ required: true }) project!: Project; + @Input() + jobId: string | undefined; + @Input() expanded = false; @@ -103,7 +106,7 @@ model = capellambse.MelodyModel( diagram_cache={ "path": "${basePath}/api/v1/projects/${this.project!.slug}/models/${ this.model.slug - }/diagrams/%s", + }/diagrams/%s${this.jobId ? '?job_id=' + this.jobId : ''}", "username": "${this.userService.user?.name}", "password": "${this.passwordValue ? this.passwordValue : '**************'}", } diff --git a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-code-block/model-diagram-code-block.stories.ts b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-code-block/model-diagram-code-block.stories.ts index 490cbde3c..8b7ceaf58 100644 --- a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-code-block/model-diagram-code-block.stories.ts +++ b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-code-block/model-diagram-code-block.stories.ts @@ -22,3 +22,21 @@ export const CodeBlock: Story = { expanded: true, }, }; + +export const CodeBlockWithToken: Story = { + args: { + model: mockModel, + project: mockProject, + expanded: true, + passwordValue: 'verysecretpassword', + }, +}; + +export const CodeBlockWithJobID: Story = { + args: { + model: mockModel, + project: mockProject, + expanded: true, + jobId: '1234', + }, +}; diff --git a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.html b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.html index ff5619b6a..10a21be6b 100644 --- a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.html +++ b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.html @@ -15,18 +15,31 @@

View diagrams

- - Search - - search - +
+ + Search + + search + + +
@if (this.diagramMetadata?.diagrams?.length === 0) { Your model doesn't seem to contain diagrams. diff --git a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.ts b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.ts index 610c69c38..10e3b7e72 100644 --- a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.ts +++ b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.component.ts @@ -30,11 +30,13 @@ import { MatInput } from '@angular/material/input'; import { MatTooltip } from '@angular/material/tooltip'; import { saveAs } from 'file-saver'; import { NgxSkeletonLoaderModule } from 'ngx-skeleton-loader'; +import { Observable, switchMap, tap } from 'rxjs'; import { DiagramCacheMetadata, DiagramMetadata, Project, ProjectsModelsDiagramsService, + ProjectsModelsGitService, ToolModel, } from 'src/app/openapi'; import { @@ -89,23 +91,39 @@ export class ModelDiagramDialogComponent implements OnInit { constructor( private dialog: MatDialog, private dialogRef: MatDialogRef, + private projectsModelsGitService: ProjectsModelsGitService, private projectsModelsDiagramsService: ProjectsModelsDiagramsService, @Inject(MAT_DIALOG_DATA) public data: { model: ToolModel; project: Project }, ) {} ngOnInit(): void { - this.projectsModelsDiagramsService + this.loadDiagramCacheMetadata().subscribe(); + } + + loadDiagramCacheMetadata(): Observable { + return this.projectsModelsDiagramsService .getDiagramMetadata(this.data.project.slug, this.data.model.slug) - .subscribe({ - next: (diagramMetadata) => { - this.diagramMetadata = diagramMetadata; - this.observeVisibleDiagrams(); - }, - error: () => { - this.dialogRef.close(); - }, - }); + .pipe( + tap({ + next: (diagramMetadata) => { + this.diagramMetadata = diagramMetadata; + this.observeVisibleDiagrams(); + }, + error: () => { + this.dialogRef.close(); + }, + }), + ); + } + + clearCache() { + this.diagramMetadata = undefined; + this.diagrams = {}; + this.projectsModelsGitService + .emptyCache(this.data.project.slug, this.data.model.slug) + .pipe(switchMap(() => this.loadDiagramCacheMetadata())) + .subscribe(); } observeVisibleDiagrams() { @@ -137,8 +155,14 @@ export class ModelDiagramDialogComponent implements OnInit { lazyLoadDiagram(uuid: string) { if (!this.diagrams[uuid]) { this.diagrams[uuid] = { loading: true, content: undefined }; + this.projectsModelsDiagramsService - .getDiagram(uuid, this.data.project.slug, this.data.model.slug) + .getDiagram( + uuid, + this.data.project.slug, + this.data.model.slug, + this.diagramMetadata?.job_id ?? undefined, + ) .subscribe({ next: (response: Blob) => { const reader = new FileReader(); diff --git a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.stories.ts b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.stories.ts index 12c20c4a4..6d6819972 100644 --- a/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.stories.ts +++ b/frontend/src/app/projects/models/diagrams/model-diagram-dialog/model-diagram-dialog.stories.ts @@ -36,11 +36,13 @@ type Story = StoryObj; const emptyDiagramCacheMetadata: DiagramCacheMetadata = { diagrams: [], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; const loadedDiagramCacheMetadata: DiagramCacheMetadata = { diagrams: [{ name: 'fakeDiagram1', uuid: 'fakeUUID-Loaded', success: true }], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; const notLoadedDiagramCacheMetadata: DiagramCacheMetadata = { @@ -48,11 +50,13 @@ const notLoadedDiagramCacheMetadata: DiagramCacheMetadata = { { name: 'fakeDiagram2', uuid: 'fakeUUID-Not-Loaded', success: true }, ], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; const errorDiagramCacheMetadata: DiagramCacheMetadata = { diagrams: [{ name: 'fakeDiagram3', uuid: 'fakeUUID-Loaded', success: false }], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; const combinedDiagramCacheMetadata: DiagramCacheMetadata = { @@ -62,6 +66,7 @@ const combinedDiagramCacheMetadata: DiagramCacheMetadata = { { name: 'fakeDiagram3', uuid: 'fakeUUID-Loaded', success: false }, ], last_updated: '2024-04-29T14:00:00Z', + job_id: null, }; // prettier-ignore diff --git a/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/model-complexity-badge.component.ts b/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/model-complexity-badge.component.ts index cb83c985e..6c5ea3f14 100644 --- a/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/model-complexity-badge.component.ts +++ b/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/model-complexity-badge.component.ts @@ -3,13 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ import { CommonModule } from '@angular/common'; +import { HttpContext } from '@angular/common/http'; import { Component, Input, OnChanges, SimpleChanges } from '@angular/core'; import { MatIconModule } from '@angular/material/icon'; import { MatSlideToggleModule } from '@angular/material/slide-toggle'; import { UntilDestroy, untilDestroyed } from '@ngneat/until-destroy'; import { NgxSkeletonLoaderModule } from 'ngx-skeleton-loader'; import { filter, map, switchMap } from 'rxjs'; -import { ModelComplexityBadgeService } from 'src/app/projects/project-detail/model-overview/model-complexity-badge/service/model-complexity-badge.service'; +import { SKIP_ERROR_HANDLING } from 'src/app/general/error-handling/error-handling.interceptor'; +import { ProjectsModelsModelComplexityBadgeService } from 'src/app/openapi'; import { ProjectWrapperService } from 'src/app/projects/service/project.service'; import { environment } from 'src/environments/environment'; @@ -35,7 +37,7 @@ export class ModelComplexityBadgeComponent implements OnChanges { errorCode?: string; constructor( - private modelComplexityBadgeService: ModelComplexityBadgeService, + private modelComplexityBadgeService: ProjectsModelsModelComplexityBadgeService, private projectService: ProjectWrapperService, ) {} @@ -55,6 +57,11 @@ export class ModelComplexityBadgeComponent implements OnChanges { return this.modelComplexityBadgeService.getModelComplexityBadge( projectSlug, this.modelSlug!, + undefined, + undefined, + { + context: new HttpContext().set(SKIP_ERROR_HANDLING, true), + }, ); }), ) diff --git a/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/service/model-complexity-badge.service.ts b/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/service/model-complexity-badge.service.ts deleted file mode 100644 index 2c78313f8..000000000 --- a/frontend/src/app/projects/project-detail/model-overview/model-complexity-badge/service/model-complexity-badge.service.ts +++ /dev/null @@ -1,33 +0,0 @@ -/* - * SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors - * SPDX-License-Identifier: Apache-2.0 - */ -import { HttpContext } from '@angular/common/http'; -import { Injectable } from '@angular/core'; -import { Observable } from 'rxjs'; -import { SKIP_ERROR_HANDLING } from 'src/app/general/error-handling/error-handling.interceptor'; -import { ProjectsModelsModelComplexityBadgeService } from 'src/app/openapi'; - -@Injectable({ - providedIn: 'root', -}) -export class ModelComplexityBadgeService { - constructor( - private complexityBadgeService: ProjectsModelsModelComplexityBadgeService, - ) {} - - getModelComplexityBadge( - projectSlug: string, - modelSlug: string, - ): Observable { - return this.complexityBadgeService.getModelComplexityBadge( - projectSlug, - modelSlug, - undefined, - undefined, - { - context: new HttpContext().set(SKIP_ERROR_HANDLING, true), - }, - ); - } -} diff --git a/frontend/src/storybook/git.ts b/frontend/src/storybook/git.ts index 7dcc26b3f..5c20edbb2 100644 --- a/frontend/src/storybook/git.ts +++ b/frontend/src/storybook/git.ts @@ -11,13 +11,13 @@ import { export const mockPrimaryGitModel: Readonly = { id: 1, - name: 'fakeGitModelName', primary: true, path: 'fakePath', revision: 'fakeRevision', entrypoint: 'fakeEntrypoint', password: false, username: 'fakeUsername', + repository_id: null, }; export class MockGitModelService implements Partial { diff --git a/helm/config/backend.yaml b/helm/config/backend.yaml index b032f5f2f..35137d6cc 100644 --- a/helm/config/backend.yaml +++ b/helm/config/backend.yaml @@ -66,6 +66,9 @@ database: url: "{{ .Values.database.backend.external.uri }}" {{ end }} +valkey: + url: "valkey://default:{{ .Values.valkey.password }}@{{ .Release.Name }}-valkey:6379/0" + pipelines: timeout: {{ .Values.pipelines.timeout }} diff --git a/helm/templates/valkey/valkey.deployment.yaml b/helm/templates/valkey/valkey.deployment.yaml new file mode 100644 index 000000000..23b18edcd --- /dev/null +++ b/helm/templates/valkey/valkey.deployment.yaml @@ -0,0 +1,64 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-valkey + labels: + id: {{ .Release.Name }}-deployment-valkey +spec: + replicas: {{ .Values.replicaCount.valkey | default 1 }} + selector: + matchLabels: + id: {{ .Release.Name }}-deployment-valkey + strategy: + type: Recreate + template: + metadata: + labels: + id: {{ .Release.Name }}-deployment-valkey + spec: + automountServiceAccountToken: false + volumes: + - name: data + emptyDir: {} + - name: config + secret: + secretName: {{ .Release.Name }}-valkey + containers: + - name: {{ .Release.Name }}-valkey + {{ if .Values.docker.images.valkey }} + image: {{ .Values.docker.images.valkey }} + {{ else }} + image: {{ .Values.docker.registry.external }}/valkey/valkey:7.2.6 + {{ end }} + args: ["valkey-server", "/usr/local/etc/valkey/valkey.conf"] + ports: + - name: valkey + containerPort: 6379 + protocol: TCP + resources: + {{ if .Values.development }} + requests: + cpu: "0.005" + memory: 20Mi + limits: + cpu: "0.1" + memory: 250Mi + ephemeral-storage: "2Gi" + {{ else }} + requests: + cpu: "0.01" + memory: "500Mi" + limits: + cpu: "0.2" + memory: "3Gi" + ephemeral-storage: "5Gi" + {{ end }} + volumeMounts: + - mountPath: /data + name: data + - mountPath: /usr/local/etc/valkey/valkey.conf + name: config + subPath: valkey.conf diff --git a/helm/templates/valkey/valkey.disruptionsbudget.yaml b/helm/templates/valkey/valkey.disruptionsbudget.yaml new file mode 100644 index 000000000..7d13c6714 --- /dev/null +++ b/helm/templates/valkey/valkey.disruptionsbudget.yaml @@ -0,0 +1,12 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +apiVersion: policy/v1 +kind: PodDisruptionBudget +metadata: + name: {{ .Release.Name }}-valkey +spec: + minAvailable: 1 + selector: + matchLabels: + id: {{ .Release.Name }}-deployment-valkey diff --git a/helm/templates/valkey/valkey.secret.yaml b/helm/templates/valkey/valkey.secret.yaml new file mode 100644 index 000000000..ae4b1bf42 --- /dev/null +++ b/helm/templates/valkey/valkey.secret.yaml @@ -0,0 +1,13 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +apiVersion: v1 +kind: Secret +metadata: + name: {{ .Release.Name }}-valkey + labels: + id: {{ .Release.Name }}-secret-valkey +type: Opaque +stringData: + valkey.conf: | + requirepass {{ .Values.valkey.password }} diff --git a/helm/templates/valkey/valkey.service.yaml b/helm/templates/valkey/valkey.service.yaml new file mode 100644 index 000000000..42dc7cb17 --- /dev/null +++ b/helm/templates/valkey/valkey.service.yaml @@ -0,0 +1,18 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +apiVersion: v1 +kind: Service +metadata: + name: {{ .Release.Name }}-valkey + labels: + id: {{ .Release.Name }}-service-valkey +spec: + type: ClusterIP + selector: + id: {{ .Release.Name }}-deployment-valkey + ports: + - port: 6379 + targetPort: valkey + protocol: TCP + name: valkey diff --git a/helm/templates/valkey/valkey.vpa.yaml b/helm/templates/valkey/valkey.vpa.yaml new file mode 100644 index 000000000..8eb800ba5 --- /dev/null +++ b/helm/templates/valkey/valkey.vpa.yaml @@ -0,0 +1,17 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +{{ if .Values.cluster.vpa.enabled }} +apiVersion: autoscaling.k8s.io/v1 +kind: VerticalPodAutoscaler +metadata: + name: {{ .Release.Name }}-valkey + namespace: {{ .Release.Namespace }} +spec: + targetRef: + apiVersion: apps/v1 + kind: Deployment + name: {{ .Release.Name }}-valkey + updatePolicy: + updateMode: 'Off' +{{ end }} diff --git a/helm/values.yaml b/helm/values.yaml index 1e8ff448e..d3fed5a4e 100644 --- a/helm/values.yaml +++ b/helm/values.yaml @@ -9,6 +9,7 @@ replicaCount: backend: 1 backendPostgres: 1 + valkey: 1 docs: 1 frontend: 1 grafana: 1 @@ -54,6 +55,7 @@ docker: promtail: null postgres: null + valkey: null grafana: null nginxUnprivileged: null mockOauth2Server: null @@ -141,6 +143,9 @@ database: # Provide URI to the datebase in the format: postgresql://user:password@url:port/db_name uri: postgresql://user:password@url:port/db_name +valkey: + password: secret + backend: authentication: endpoints: @@ -238,6 +243,12 @@ cluster: service: kube-dns # dns-default for OpenShift namespace: kube-system # openshift-dns for OpenShift + # Enable Vertical Pod Autoscaler for resource recommandations + # If you want to enable it locally, install VPA first: + # `make install-vpa` + vpa: + enabled: false + # Specify the NO_PROXY environment for the backend # Leave it empty if not needed proxy: