From 66728d2b8a4b745851a2774040005fe264663502 Mon Sep 17 00:00:00 2001 From: romeonicholas Date: Thu, 22 Feb 2024 13:22:41 +0100 Subject: [PATCH] feat: Convert config template and schema to Pydantic model Co-authored-by: MoritzWeber0 --- backend/.gitignore | 1 - backend/capellacollab/__init__.py | 5 - backend/capellacollab/__main__.py | 6 +- backend/capellacollab/alembic/env.py | 4 +- backend/capellacollab/config/__init__.py | 21 +- .../capellacollab/config/config_schema.yaml | 304 --------------- backend/capellacollab/config/diff.py | 62 --- backend/capellacollab/config/generate.py | 37 ++ backend/capellacollab/config/loader.py | 41 +- backend/capellacollab/config/models.py | 364 ++++++++++++++++++ .../core/authentication/__init__.py | 5 +- .../core/authentication/jwt_bearer.py | 4 +- .../authentication/provider/azure/__main__.py | 9 +- .../authentication/provider/azure/keystore.py | 14 +- .../authentication/provider/azure/routes.py | 14 +- .../authentication/provider/oauth/__main__.py | 8 +- .../authentication/provider/oauth/flow.py | 35 +- .../authentication/provider/oauth/keystore.py | 14 +- .../capellacollab/core/database/__init__.py | 2 +- .../capellacollab/core/database/migration.py | 6 +- .../capellacollab/core/logging/__init__.py | 2 +- backend/capellacollab/core/logging/loki.py | 17 +- backend/capellacollab/core/metadata.py | 16 +- .../toolmodels/backups/runs/interface.py | 2 +- .../modelsources/git/github/handler.py | 10 +- .../modelsources/git/gitlab/handler.py | 12 +- .../capellacollab/sessions/hooks/guacamole.py | 23 +- backend/capellacollab/sessions/hooks/http.py | 12 - .../capellacollab/sessions/hooks/jupyter.py | 8 - backend/capellacollab/sessions/idletimeout.py | 6 +- backend/capellacollab/sessions/injection.py | 4 +- .../capellacollab/sessions/operators/k8s.py | 63 +-- backend/capellacollab/sessions/util.py | 13 +- .../settings/modelsources/t4c/interface.py | 2 +- .../t4c/repositories/interface.py | 2 +- backend/config/config_template.yaml | 99 ----- backend/pyproject.toml | 2 - .../tests/config/test_app_configuration.py | 107 +++++ .../sessions/test_session_environment.py | 11 +- .../sessions/test_session_idletimeout.py | 9 +- ...ration.py => test_global_configuration.py} | 6 + backend/tests/test_event_creation.py | 2 +- docs/docs/development/index.md | 7 +- helm/config/backend.yaml | 5 +- helm/values.yaml | 6 - 45 files changed, 694 insertions(+), 708 deletions(-) delete mode 100644 backend/capellacollab/config/config_schema.yaml delete mode 100644 backend/capellacollab/config/diff.py create mode 100644 backend/capellacollab/config/generate.py create mode 100644 backend/capellacollab/config/models.py delete mode 100644 backend/config/config_template.yaml create mode 100644 backend/tests/config/test_app_configuration.py rename backend/tests/settings/{test_configuration.py => test_global_configuration.py} (96%) diff --git a/backend/.gitignore b/backend/.gitignore index 7e8560735..d55f338df 100644 --- a/backend/.gitignore +++ b/backend/.gitignore @@ -3,7 +3,6 @@ .idea config/* -!config/config_template.yaml .history **/__pycache__/ .vscode diff --git a/backend/capellacollab/__init__.py b/backend/capellacollab/__init__.py index b05402fb2..3d9638f6f 100644 --- a/backend/capellacollab/__init__.py +++ b/backend/capellacollab/__init__.py @@ -3,13 +3,8 @@ from importlib import metadata -from capellacollab import config as config_module - try: __version__ = metadata.version("capellacollab-backend") except metadata.PackageNotFoundError: __version__ = "0.0.0+unknown" del metadata - - -config_module.validate_schema() diff --git a/backend/capellacollab/__main__.py b/backend/capellacollab/__main__.py index c5a0a099a..d23b0fe0f 100644 --- a/backend/capellacollab/__main__.py +++ b/backend/capellacollab/__main__.py @@ -52,20 +52,20 @@ stream_handler.setFormatter(core_logging.CustomFormatter()) timed_rotating_file_handler = core_logging.CustomTimedRotatingFileHandler( - str(config["logging"]["logPath"]) + "backend.log" + str(config.logging.log_path) + "backend.log" ) timed_rotating_file_handler.setFormatter( core_logging.CustomFormatter(colored_output=False) ) logging.basicConfig( - level=config["logging"]["level"], + level=config.logging.level, handlers=[stream_handler, timed_rotating_file_handler], ) async def startup(): - migration.migrate_db(engine, config["database"]["url"]) + migration.migrate_db(engine, config.database.url) logging.info("Migrations done - Server is running") # This is needed to load the Kubernetes configuration at startup diff --git a/backend/capellacollab/alembic/env.py b/backend/capellacollab/alembic/env.py index 342c12421..4852e1352 100644 --- a/backend/capellacollab/alembic/env.py +++ b/backend/capellacollab/alembic/env.py @@ -16,7 +16,7 @@ # access to the values within the .ini file in use. config = context.config -logging.basicConfig(level=cfg["logging"]["level"]) +logging.basicConfig(level=cfg.logging.level) if os.getenv("ALEMBIC_CONFIGURE_LOGGER", "true") != "false": logging.getLogger("capellacollab").setLevel("WARNING") @@ -25,7 +25,7 @@ # this will overwrite the ini-file sqlalchemy.url path # with the path given in the config of the main code if not config.get_main_option("sqlalchemy.url"): - config.set_main_option("sqlalchemy.url", cfg["database"]["url"]) + config.set_main_option("sqlalchemy.url", cfg.database.url) # Import models diff --git a/backend/capellacollab/config/__init__.py b/backend/capellacollab/config/__init__.py index d8d703145..556783168 100644 --- a/backend/capellacollab/config/__init__.py +++ b/backend/capellacollab/config/__init__.py @@ -1,23 +1,10 @@ # SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors # SPDX-License-Identifier: Apache-2.0 +from . import generate, loader, models -import logging +if not loader.does_config_exist(): + generate.write_config() -import jsonschema -import jsonschema.exceptions -from . import exceptions, loader - -log = logging.getLogger(__name__) -config = loader.load_yaml() - - -def validate_schema(): - config_schema = loader.load_config_schema() - try: - jsonschema.validate(config, config_schema) - except jsonschema.exceptions.ValidationError as error: - raise exceptions.InvalidConfigurationError( - f"{error.__class__.__name__}: {error.message}", - ) from None +config = models.AppConfig(**loader.load_yaml()) diff --git a/backend/capellacollab/config/config_schema.yaml b/backend/capellacollab/config/config_schema.yaml deleted file mode 100644 index d6b5e333b..000000000 --- a/backend/capellacollab/config/config_schema.yaml +++ /dev/null @@ -1,304 +0,0 @@ -# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors -# SPDX-License-Identifier: Apache-2.0 - -type: object -additionalProperties: false -required: - - docker - - k8s - - extensions - - authentication - - prometheus - - database - - initial - - logging - - requests -properties: - docker: - type: object - additionalProperties: false - required: - - registry - - externalRegistry - properties: - registry: - type: string - externalRegistry: - type: string - k8s: - type: object - additionalProperties: false - required: - - storageClassName - - storageAccessMode - - promtail - - namespace - properties: - storageClassName: - type: string - storageAccessMode: - type: string - enum: - - ReadWriteOnce - - ReadOnlyMany - - ReadWriteMany - - ReadWriteOncePod - cluster: - type: object - additionalProperties: false - properties: - imagePullPolicy: - type: string - enum: - - Always - - IfNotPresent - - Never - podSecurityContext: - type: object - additionalProperties: false - properties: - runAsUser: - type: number - runAsGroup: - type: number - fsGroup: - type: number - runAsNonRoot: - type: boolean - promtail: - type: object - additionalProperties: false - required: - - lokiEnabled - properties: - lokiEnabled: - type: boolean - lokiUrl: - type: - - string - lokiUsername: - type: - - string - lokiPassword: - type: - - string - serverPort: - type: - - number - anyOf: - - properties: - lokiEnabled: - const: false - - required: - - lokiUrl - - lokiUsername - - lokiPassword - - serverPort - namespace: - type: string - context: - type: string - ingressClassName: - type: string - general: - type: object - additionalProperties: false - required: - - host - - port - - scheme - properties: - host: - type: string - port: - type: ['number', 'string'] - scheme: - type: string - wildcardHost: - type: boolean - extensions: - type: object - additionalProperties: false - required: - - guacamole - properties: - guacamole: - type: object - additionalProperties: false - required: - - baseURI - - publicURI - - username - - password - properties: - baseURI: - type: string - publicURI: - type: string - username: - type: string - password: - type: string - authentication: - type: object - required: - - provider - oneOf: - - type: object - additionalProperties: false - required: - - oauth - - jwt - properties: - provider: - const: oauth - oauth: - type: object - additionalProperties: false - required: - - endpoints - - audience - - scopes - - client - - redirectURI - properties: - endpoints: - type: object - additionalProperties: false - properties: - tokenIssuance: - type: - - string - - 'null' - authorization: - type: - - string - - 'null' - wellKnown: - type: - - string - - 'null' - audience: - type: string - scopes: - type: - - array - - 'null' - items: - type: string - enum: - - openid - client: - type: object - additionalProperties: false - required: - - id - - secret - properties: - id: - type: string - secret: - type: - - string - - 'null' - redirectURI: - type: string - jwt: - type: object - additionalProperties: false - properties: - usernameClaim: - type: string - - type: object - additionalProperties: false - required: - - azure - - jwt - properties: - provider: - const: azure - azure: - type: object - additionalProperties: false - required: - - authorizationEndpoint - - client - - redirectURI - properties: - authorizationEndpoint: - type: string - client: - type: object - additionalProperties: false - required: - - id - - secret - properties: - id: - type: string - secret: - type: string - redirectURI: - type: string - jwt: - type: object - additionalProperties: false - properties: - usernameClaim: - type: string - prometheus: - type: object - additionalProperties: false - required: - - url - properties: - url: - type: string - database: - type: object - additionalProperties: false - required: - - url - properties: - url: - type: string - initial: - type: object - additionalProperties: false - required: - - admin - properties: - admin: - type: string - logging: - type: object - additionalProperties: false - required: - - level - - logPath - properties: - logPath: - type: string - level: - type: string - enum: - - CRITICAL - - ERROR - - WARNING - - INFO - - DEBUG - - NOTSET - requests: - type: object - additionalProperties: false - required: - - timeout - properties: - timeout: - type: number - pipelines: - type: object - additionalProperties: false - properties: - timeout: - type: number diff --git a/backend/capellacollab/config/diff.py b/backend/capellacollab/config/diff.py deleted file mode 100644 index ee400663b..000000000 --- a/backend/capellacollab/config/diff.py +++ /dev/null @@ -1,62 +0,0 @@ -# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors -# SPDX-License-Identifier: Apache-2.0 - -# pylint: disable=bad-builtin - -import pathlib - -import deepdiff -import yaml - -from . import loader - - -class bcolors: - HEADER = "\033[95m" - OKBLUE = "\033[94m" - OKGREEN = "\033[92m" - WARNING = "\033[93m" - FAIL = "\033[91m" - ENDC = "\033[0m" - - -print("Start comparison of configuration files") - -config_template = yaml.safe_load( - ( - pathlib.Path(__file__).parents[2] / "config" / "config_template.yaml" - ).open() -) - -config = loader.load_yaml() - -diff = deepdiff.DeepDiff( - config, config_template, ignore_order=True, report_repetition=True -) - -for key, value in ( - diff.get("type_changes", {}) | diff.get("values_changed", {}) -).items(): - new_value = value["new_value"] - print( - f"{bcolors.OKBLUE}" - f"Your configuration differs to the template. Key {key} has a different value. Expected value '{new_value}'. If you changed the value on purpose, you can ignore this message." - f"{bcolors.ENDC}" - ) - -for key in diff.get("dictionary_item_removed", {}): - print( - f"{bcolors.WARNING}" - f"Found unknown configuration option {key}!" - f"{bcolors.ENDC}" - ) - -for key in diff.get("dictionary_item_added", ""): - print( - f"{bcolors.FAIL}" - f"Missing configuration option {key}. Please add the key to your configuration file." - f"{bcolors.ENDC}" - ) - -if not diff: - print("Your configuration file is up to date!") diff --git a/backend/capellacollab/config/generate.py b/backend/capellacollab/config/generate.py new file mode 100644 index 000000000..be3f4e6fc --- /dev/null +++ b/backend/capellacollab/config/generate.py @@ -0,0 +1,37 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +import logging +import pathlib + +import yaml + +from . import models + +log = logging.getLogger(__name__) + + +def write_config() -> None: + """Write the default configuration to a config file.""" + + config_path = ( + pathlib.Path(__file__).parent.parent.parent / "config" / "config.yaml" + ) + + config_path.parent.mkdir(parents=True, exist_ok=True) + + _ansi_red = "\x1b[31;40m" + _ansi_reset = "\x1b[0m" + log.warning( + "%sNo configuration file found. Generating default configuration at %s%s", + _ansi_red, + str(config_path.absolute()), + _ansi_reset, + ) + + with config_path.open("w") as yaml_file: + yaml.dump( + models.AppConfig().model_dump(by_alias=True), + yaml_file, + sort_keys=False, + ) diff --git a/backend/capellacollab/config/loader.py b/backend/capellacollab/config/loader.py index f71d9dca2..712a8a5e1 100644 --- a/backend/capellacollab/config/loader.py +++ b/backend/capellacollab/config/loader.py @@ -11,16 +11,14 @@ from . import exceptions log = logging.getLogger(__name__) +CONFIG_FILE_NAME = "config.yaml" config_locations: list[pathlib.Path] = [ - pathlib.Path(__file__).parents[2] / "config" / "config.yaml", + pathlib.Path(__file__).parents[0] / CONFIG_FILE_NAME, + pathlib.Path(__file__).parents[2] / "config" / CONFIG_FILE_NAME, pathlib.Path(appdirs.user_config_dir("capellacollab", "db")) - / "config.yaml", - pathlib.Path("/etc/capellacollab") / "config.yaml", -] - -config_fallback_locations: list[pathlib.Path] = [ - pathlib.Path(__file__).parents[2] / "config" / "config_template.yaml", + / CONFIG_FILE_NAME, + pathlib.Path("/etc/capellacollab") / CONFIG_FILE_NAME, ] @@ -38,28 +36,27 @@ def construct_mapping(self, node, deep=False): return super().construct_mapping(node, deep) +def does_config_exist() -> bool: + for loc in config_locations: + if loc.exists(): + return True + + return False + + def load_yaml() -> dict: log.debug("Searching for configuration files...") for loc in config_locations: if loc.exists(): - log.info("Loading configuration file at location %s", str(loc)) - return yaml.load(loc.open(), UniqueKeyLoader) + log.info( + "Loading configuration file at location %s", + str(loc.absolute()), + ) + with loc.open(encoding="utf-8") as f: + return yaml.load(f, UniqueKeyLoader) else: log.debug( "Didn't find a configuration file at location %s", str(loc) ) - for loc in config_fallback_locations: - if loc.exists(): - log.warning( - "Loading fallback configuration file at location %s", str(loc) - ) - return yaml.safe_load(loc.open()) - raise FileNotFoundError("config.yaml") - - -def load_config_schema() -> dict: - return yaml.safe_load( - (pathlib.Path(__file__).parents[0] / "config_schema.yaml").read_bytes() - ) diff --git a/backend/capellacollab/config/models.py b/backend/capellacollab/config/models.py new file mode 100644 index 000000000..bcdde4d60 --- /dev/null +++ b/backend/capellacollab/config/models.py @@ -0,0 +1,364 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +import typing as t + +import pydantic + + +def from_snake_to_camel(name: str) -> str: + components = name.split("_") + return components[0] + "".join( + component.title() for component in components[1:] + ) + + +class BaseConfig(pydantic.BaseModel): + model_config = pydantic.ConfigDict( + extra="forbid", + alias_generator=from_snake_to_camel, + ) + + +class DockerConfig(BaseConfig): + registry: str = pydantic.Field( + default="k3d-myregistry.localhost:12345", + description=( + "The default registry from which to pull Docker images for session containers. " + "The is only used during initial tool creation, you can change the registry for each tool individually." + ), + examples=["ghcr.io/dsd-dbs/capella-dockerimages"], + ) + external_registry: str = pydantic.Field( + default="docker.io", + description=( + "The external registry from which to pull Docker images from Docker Hub, " + "used to enabe Loki monitoring." + ), + examples=["docker.io"], + ) + + +class K8sPodSecurityContext(BaseConfig): + run_as_user: int = pydantic.Field( + default=1004370000, + description="The UID under which the Pod's containers will run as non-root.", + examples=[1004370000], + ) + run_as_group: int = pydantic.Field( + default=1004370000, + description=( + "The GID under which the Pod's containers will run, " + "sets all processes of the container to run as this group." + ), + examples=[1004370000], + ) + fs_group: int = pydantic.Field( + default=1004370000, + description=( + "The GID fo rthe volumes that support ownership management, " + "used when setting the ownership of volume filesystems, " + "when determining access for volume filesystems, and for other purposes." + ), + examples=[1004370000], + ) + run_as_non_root: bool = True + + +class K8sClusterConfig(BaseConfig): + image_pull_policy: t.Literal["Always", "IfNotPresent", "Never"] = ( + pydantic.Field( + default="Always", + description=( + "Determines whether an image should be pulled," + "must match one of the examples." + ), + examples=["Always", "IfNotPresent", "Never"], + ) + ) + pod_security_context: K8sPodSecurityContext | None = ( + K8sPodSecurityContext() + ) + + +class K8sPromtailConfig(BaseConfig): + loki_enabled: bool = pydantic.Field( + default=True, + description="Whether to enable Loki monitoring.", + examples=[True], + ) + loki_url: str = pydantic.Field( + default="http://localhost:30001/loki/api/v1/push", + alias="lokiURL", + description="The URL of the Loki instance to which to push logs.", + examples=["http://localhost:30001/loki/api/v1/push"], + ) + loki_username: str = pydantic.Field( + default="localLokiUser", + description="The username for the Loki instance.", + examples=["localLokiUser"], + ) + loki_password: str = pydantic.Field( + default="localLokiPassword", + description="The password for the Loki instance.", + examples=["localLokiPassword"], + ) + server_port: int = pydantic.Field( + default=3101, + description="The port of the promtail server.", + examples=[3101], + ) + + +class K8sConfig(BaseConfig): + storage_class_name: str = pydantic.Field( + default="local-path", + description="The name of the StorageClass used for persistent volumes.", + examples=["local-path"], + ) + storage_access_mode: t.Literal[ + "ReadWriteOnce", "ReadOnlyMany", "ReadWriteMany", "ReadWriteOncePod" + ] = pydantic.Field( + default="ReadWriteOnce", + description=( + "The access mode of the StorageClass used for persistent volumes," + "must match one of the examples." + ), + examples=[ + "ReadWriteOnce", + "ReadOnlyMany", + "ReadWriteMany", + "ReadWriteOncePod", + ], + ) + promtail: K8sPromtailConfig = K8sPromtailConfig() + namespace: str = pydantic.Field( + default="collab-sessions", + description="The namespace in which to deploy the session containers.", + examples=["collab-sessions"], + ) + cluster: K8sClusterConfig = K8sClusterConfig() + context: str | None = pydantic.Field( + default=None, + description="The name of the Kubernetes context to use.", + examples=["k3d-collab-cluster"], + ) + ingress_class_name: str = pydantic.Field( + default="traefik", + description="The name of the IngressClass to use.", + examples=["traefik", "nginx"], + ) + + +class GeneralConfig(BaseConfig): + host: str = pydantic.Field( + default="localhost", + description="The host name of the application.", + examples=["localhost", "capella.example.com"], + ) + port: int | str = pydantic.Field( + default=8000, + description="The port the application should run on.", + examples=[8000, 443, 8080], + ) + scheme: t.Literal["http", "https"] = pydantic.Field( + default="http", + description='The identifier for the protocol to be used, must be "http" or "https"', + examples=["http", "https"], + ) + + +class ExtensionGuacamoleConfig(BaseConfig): + base_uri: str = pydantic.Field( + default="http://localhost:8080/guacamole", + alias="baseURI", + description="The base URI of the Guacamole instance.", + examples=["http://localhost:8080/guacamole"], + ) + public_uri: str = pydantic.Field( + default="http://localhost:8080/guacamole", + alias="publicURI", + description="The public URI of the Guacamole instance.", + examples=["http://localhost:8080/guacamole"], + ) + username: str = pydantic.Field( + default="guacadmin", + description="The username for the Guacamole instance.", + examples=["guacadmin"], + ) + password: str = pydantic.Field( + default="guacadmin", + description=( + "The password for the Guacamole instance," + "the default should be changed immediately." + ), + examples=["guacadmin"], + ) + + +class ExtensionsConfig(BaseConfig): + guacamole: ExtensionGuacamoleConfig = ExtensionGuacamoleConfig() + + +class AuthOauthClientConfig(BaseConfig): + id: str = pydantic.Field( + default="default", description="The authentication provider client ID." + ) + secret: str | None = pydantic.Field( + default=None, description="The authentication provider client secret." + ) + + +class AuthOauthEndpointsConfig(BaseConfig): + token_issuance: str | None = pydantic.Field( + default=None, + description=( + "The URL of the token issuance endpoint. " + "If not set, the URL is read from the well-known endpoint." + ), + ) + authorization: str | None = pydantic.Field( + default=None, + description=( + "The URL of the authorization endpoint. " + "If not set, the URL is read from the well-known endpoint." + ), + ) + well_known: str | None = pydantic.Field( + default="http://localhost:8083/default/.well-known/openid-configuration", + description="The URL of the OpenID Connect discovery document.", + examples=[ + "http://localhost:8083/default/.well-known/openid-configuration" + ], + ) + + +class AuthOauthConfig(BaseConfig): + endpoints: AuthOauthEndpointsConfig = AuthOauthEndpointsConfig() + audience: str = pydantic.Field(default="default") + scopes: list[str] | None = pydantic.Field( + default=["openid"], + description="List of scopes that application neeeds to access the required attributes.", + ) + client: AuthOauthClientConfig = AuthOauthClientConfig() + redirect_uri: str = pydantic.Field( + default="http://localhost:4200/oauth2/callback", + description="The URI to which the user is redirected after authentication.", + examples=["http://localhost:4200/oauth2/callback"], + alias="redirectURI", + ) + + +class JWTConfig(BaseConfig): + username_claim: str = pydantic.Field( + default="sub", + description="Specifies the key in the JWT payload where the username is stored.", + examples=["sub", "aud", "preferred_username"], + ) + + +class AzureClientConfig(BaseConfig): + id: str + secret: str | None = None + + +class AzureConfig(BaseConfig): + authorization_endpoint: str + client: AzureClientConfig + + +class GeneralAuthenticationConfig(BaseConfig): + jwt: JWTConfig = JWTConfig() + + +class OAuthAuthenticationConfig(GeneralAuthenticationConfig): + provider: t.Literal["oauth"] = pydantic.Field( + default="oauth", + description="Indicates the use of OAuth for authentication, not to be changed.", + ) + oauth: AuthOauthConfig = AuthOauthConfig() + + +class AzureAuthenticationConfig(GeneralAuthenticationConfig): + provider: t.Literal["azure"] = pydantic.Field( + default="azure", + description="Indicates the use of Azure AD for authentication, not to be changed.", + ) + azure: AzureConfig + + +class PipelineConfig(BaseConfig): + timeout: int = pydantic.Field( + default=60, + description="The timeout (in minutes) for pipeline runs.", + examples=[60, 90], + ) + + +class DatabaseConfig(BaseConfig): + url: str = pydantic.Field( + default="postgresql://dev:dev@localhost:5432/dev", + description=( + "The URL of the database. " + "The format is described here: https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING-URIS" + ), + examples=["postgresql://dev:dev@localhost:5432/dev"], + ) + + +class InitialConfig(BaseConfig): + admin: str = pydantic.Field( + default="admin", + description="The username given to the admin user at database intitialization and for testing.", + examples=["admin"], + ) + + +class LoggingConfig(BaseConfig): + level: t.Literal[ + "CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET" + ] = pydantic.Field( + default="DEBUG", + description="The logging level to use across the entire application.", + examples=["DEBUG", "ERROR"], + ) + log_path: str = pydantic.Field( + default="logs/", + description="The path to the log file (saved as 'backend.log').", + examples=["logs/"], + ) + + +class RequestsConfig(BaseConfig): + timeout: int = pydantic.Field( + default=2, + description=( + "The number (in seconds) to wait for a response from external services." + "External services are TeamForCapella, Guacamole, Grafana Loki and Prometheus." + ), + examples=[2, 5], + ) + + +class PrometheusConfig(BaseConfig): + url: str = pydantic.Field( + default="http://localhost:8080/prometheus/", + description="The base URL of the Prometheus instance.", + examples=["http://localhost:8080/prometheus/"], + ) + + +class AppConfig(BaseConfig): + docker: DockerConfig = DockerConfig() + k8s: K8sConfig = K8sConfig(context="k3d-collab-cluster") + general: GeneralConfig = GeneralConfig() + extensions: ExtensionsConfig = ExtensionsConfig() + authentication: OAuthAuthenticationConfig | AzureAuthenticationConfig = ( + OAuthAuthenticationConfig() + ) + prometheus: PrometheusConfig = PrometheusConfig() + database: DatabaseConfig = DatabaseConfig() + initial: InitialConfig = InitialConfig() + logging: LoggingConfig = LoggingConfig() + requests: RequestsConfig = RequestsConfig() + pipelines: PipelineConfig = PipelineConfig() diff --git a/backend/capellacollab/core/authentication/__init__.py b/backend/capellacollab/core/authentication/__init__.py index f05e811ad..1fe88f71e 100644 --- a/backend/capellacollab/core/authentication/__init__.py +++ b/backend/capellacollab/core/authentication/__init__.py @@ -14,11 +14,10 @@ def get_authentication_entrypoint(): for i in metadata.entry_points().select( group="capellacollab.authentication.providers" ) - if i.name == config["authentication"]["provider"] + if i.name == config.authentication.provider ) return ep except StopIteration: raise ValueError( - "Unknown authentication provider " - + config["authentication"]["provider"] + "Unknown authentication provider " + config.authentication.provider ) from None diff --git a/backend/capellacollab/core/authentication/jwt_bearer.py b/backend/capellacollab/core/authentication/jwt_bearer.py index 60ba2a5f1..9b60b439a 100644 --- a/backend/capellacollab/core/authentication/jwt_bearer.py +++ b/backend/capellacollab/core/authentication/jwt_bearer.py @@ -52,9 +52,7 @@ async def __call__( # type: ignore return None def get_username(self, token_decoded: dict[str, str]) -> str: - return token_decoded[ - config["authentication"]["jwt"]["usernameClaim"] - ].strip() + return token_decoded[config.authentication.jwt.username_claim].strip() def initialize_user(self, token_decoded: dict[str, str]): with database.SessionLocal() as session: diff --git a/backend/capellacollab/core/authentication/provider/azure/__main__.py b/backend/capellacollab/core/authentication/provider/azure/__main__.py index ac9822ef5..36967a8c8 100644 --- a/backend/capellacollab/core/authentication/provider/azure/__main__.py +++ b/backend/capellacollab/core/authentication/provider/azure/__main__.py @@ -4,18 +4,21 @@ import typing as t from capellacollab.config import config +from capellacollab.config import models as config_models from . import keystore # Our "singleton" key store: KeyStore = keystore._KeyStore(jwks_uri=keystore.get_jwks_uri_for_azure_ad()) - -cfg = config["authentication"]["azure"] +assert isinstance( + config.authentication, config_models.AzureAuthenticationConfig +) +cfg = config.authentication.azure def get_jwk_cfg(token: str) -> dict[str, t.Any]: return { - "audience": cfg["client"]["id"], + "audience": cfg.client.id if cfg else None, "key": KeyStore.key_for_token(token).model_dump(), } diff --git a/backend/capellacollab/core/authentication/provider/azure/keystore.py b/backend/capellacollab/core/authentication/provider/azure/keystore.py index f72a8c24b..36bbac429 100644 --- a/backend/capellacollab/core/authentication/provider/azure/keystore.py +++ b/backend/capellacollab/core/authentication/provider/azure/keystore.py @@ -12,11 +12,15 @@ from jose import jwt from capellacollab.config import config +from capellacollab.config import models as config_models from .. import models as provider_models log = logging.getLogger(__name__) -cfg = config["authentication"]["azure"] +assert isinstance( + config.authentication, config_models.AzureAuthenticationConfig +) +cfg = config.authentication.azure # Copied and adapted from https://github.com/marpaia/jwks/blob/master/jwks/jwks.py: @@ -47,9 +51,7 @@ def keys_need_refresh(self) -> bool: def refresh_keys(self) -> None: try: - resp = requests.get( - self.jwks_uri, timeout=config["requests"]["timeout"] - ) + resp = requests.get(self.jwks_uri, timeout=config.requests.timeout) except Exception: log.error("Could not retrieve JWKS data from %s", self.jwks_uri) return @@ -90,7 +92,7 @@ def key_for_token( def get_jwks_uri_for_azure_ad( - authorization_endpoint=cfg["authorizationEndpoint"], + authorization_endpoint=cfg.authorization_endpoint, ): discoveryEndpoint = ( f"{authorization_endpoint}/v2.0/.well-known/openid-configuration" @@ -98,7 +100,7 @@ def get_jwks_uri_for_azure_ad( openid_config = requests.get( discoveryEndpoint, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ).json() return openid_config["jwks_uri"] diff --git a/backend/capellacollab/core/authentication/provider/azure/routes.py b/backend/capellacollab/core/authentication/provider/azure/routes.py index e734f2c6a..90888934a 100644 --- a/backend/capellacollab/core/authentication/provider/azure/routes.py +++ b/backend/capellacollab/core/authentication/provider/azure/routes.py @@ -12,6 +12,7 @@ import capellacollab.users.crud as users_crud from capellacollab.config import config +from capellacollab.config import models as config_models from capellacollab.core import database from capellacollab.core.authentication import injectables as auth_injectables from capellacollab.core.authentication import jwt_bearer @@ -22,15 +23,20 @@ from capellacollab.users.models import Role router = fastapi.APIRouter() -cfg = config["authentication"]["azure"] + +assert isinstance( + config.authentication, config_models.AzureAuthenticationConfig +) + +cfg = config.authentication.azure @lru_cache def ad_session(): return ConfidentialClientApplication( - cfg["client"]["id"], - client_credential=cfg["client"]["secret"], - authority=cfg["authorizationEndpoint"], + cfg.client.id, + client_credential=cfg.client.secret, + authority=cfg.authorization_endpoint, ) diff --git a/backend/capellacollab/core/authentication/provider/oauth/__main__.py b/backend/capellacollab/core/authentication/provider/oauth/__main__.py index bbc0d66d4..4f4d7272d 100644 --- a/backend/capellacollab/core/authentication/provider/oauth/__main__.py +++ b/backend/capellacollab/core/authentication/provider/oauth/__main__.py @@ -5,15 +5,19 @@ import typing as t from capellacollab.config import config +from capellacollab.config import models as config_models from .keystore import KeyStore -cfg = config["authentication"]["oauth"] +assert isinstance( + config.authentication, config_models.OAuthAuthenticationConfig +) +cfg = config.authentication.oauth def get_jwk_cfg(token: str) -> dict[str, t.Any]: return { "algorithms": ["RS256"], - "audience": cfg["audience"] or cfg["client"]["id"], + "audience": cfg.audience or cfg.client.id, "key": KeyStore.key_for_token(token).model_dump(), } diff --git a/backend/capellacollab/core/authentication/provider/oauth/flow.py b/backend/capellacollab/core/authentication/provider/oauth/flow.py index d51b0a6b3..e0a3d09eb 100644 --- a/backend/capellacollab/core/authentication/provider/oauth/flow.py +++ b/backend/capellacollab/core/authentication/provider/oauth/flow.py @@ -11,17 +11,22 @@ from requests_oauthlib import OAuth2Session from capellacollab.config import config +from capellacollab.config import models as config_models -cfg = config["authentication"]["oauth"] +assert isinstance( + config.authentication, config_models.OAuthAuthenticationConfig +) +cfg = config.authentication.oauth logger = logging.getLogger(__name__) auth_args = {} -if cfg["scopes"]: - auth_args["scope"] = cfg["scopes"] +if cfg.scopes: + auth_args["scope"] = cfg.scopes + auth_session = OAuth2Session( - cfg["client"]["id"], redirect_uri=cfg["redirectURI"], **auth_args + cfg.client.id, redirect_uri=cfg.redirect_uri, **auth_args ) @@ -38,8 +43,8 @@ def get_token(code: str) -> dict[str, t.Any]: return auth_session.fetch_token( read_well_known()["token_endpoint"], code=code, - client_id=cfg["client"]["id"], - client_secret=cfg["client"]["secret"], + client_id=cfg.client.id, + client_secret=cfg.client.secret, ) @@ -48,8 +53,8 @@ def refresh_token(_refresh_token: str) -> dict[str, t.Any]: return auth_session.refresh_token( read_well_known()["token_endpoint"], refresh_token=_refresh_token, - client_id=cfg["client"]["id"], - client_secret=cfg["client"]["secret"], + client_id=cfg.client.id, + client_secret=cfg.client.secret, ) except Exception as e: logger.debug("Could not refresh token because of exception %s", str(e)) @@ -63,10 +68,10 @@ def refresh_token(_refresh_token: str) -> dict[str, t.Any]: def read_well_known() -> dict[str, t.Any]: - if cfg["endpoints"]["wellKnown"]: + if cfg.endpoints.well_known: r = requests.get( - cfg["endpoints"]["wellKnown"], - timeout=config["requests"]["timeout"], + cfg.endpoints.well_known, + timeout=config.requests.timeout, ) r.raise_for_status() @@ -75,11 +80,11 @@ def read_well_known() -> dict[str, t.Any]: authorization_endpoint = resp["authorization_endpoint"] token_endpoint = resp["token_endpoint"] - if cfg["endpoints"]["authorization"]: - authorization_endpoint = cfg["endpoints"]["authorization"] + if cfg.endpoints.authorization: + authorization_endpoint = cfg.endpoints.authorization - if cfg["endpoints"]["tokenIssuance"]: - token_endpoint = cfg["endpoints"]["tokenIssuance"] + if cfg.endpoints.token_issuance: + token_endpoint = cfg.endpoints.token_issuance return { "authorization_endpoint": authorization_endpoint, diff --git a/backend/capellacollab/core/authentication/provider/oauth/keystore.py b/backend/capellacollab/core/authentication/provider/oauth/keystore.py index 35ef7f8a7..3b266c1e1 100644 --- a/backend/capellacollab/core/authentication/provider/oauth/keystore.py +++ b/backend/capellacollab/core/authentication/provider/oauth/keystore.py @@ -12,12 +12,16 @@ import requests from capellacollab.config import config +from capellacollab.config import models as config_models from capellacollab.core.authentication.provider import models from .. import models as provider_models log = logging.getLogger(__name__) -cfg = config["authentication"]["oauth"] +assert isinstance( + config.authentication, config_models.OAuthAuthenticationConfig +) +cfg = config.authentication.oauth # Copied and adapted from https://github.com/marpaia/jwks/blob/master/jwks/jwks.py: @@ -50,9 +54,7 @@ def refresh_keys(self) -> None: if not self.jwks_uri: self.jwks_uri = self.get_jwks_uri() try: - resp = requests.get( - self.jwks_uri, timeout=config["requests"]["timeout"] - ) + resp = requests.get(self.jwks_uri, timeout=config.requests.timeout) except Exception: log.error("Could not retrieve JWKS data from %s", self.jwks_uri) return @@ -90,10 +92,10 @@ def key_for_token( return self.key_for_token(token, in_retry=1) -def _get_jwks_uri(wellknown_endpoint=cfg["endpoints"]["wellKnown"]): +def _get_jwks_uri(wellknown_endpoint=cfg.endpoints.well_known): openid_config = requests.get( wellknown_endpoint, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ).json() return openid_config["jwks_uri"] diff --git a/backend/capellacollab/core/database/__init__.py b/backend/capellacollab/core/database/__init__.py index 557ac4e91..5e603bb83 100644 --- a/backend/capellacollab/core/database/__init__.py +++ b/backend/capellacollab/core/database/__init__.py @@ -11,7 +11,7 @@ from capellacollab.config import config engine = sa.create_engine( - config["database"]["url"], + config.database.url, connect_args={"connect_timeout": 5, "options": "-c timezone=utc"}, ) SessionLocal = orm.sessionmaker(autocommit=False, autoflush=False, bind=engine) diff --git a/backend/capellacollab/core/database/migration.py b/backend/capellacollab/core/database/migration.py index 7f6411e16..9dc4605b7 100644 --- a/backend/capellacollab/core/database/migration.py +++ b/backend/capellacollab/core/database/migration.py @@ -84,10 +84,10 @@ def migrate_db(engine, database_url: str): def initialize_admin_user(db: orm.Session): - LOGGER.info("Initialized adminuser %s", config["initial"]["admin"]) + LOGGER.info("Initialized adminuser %s", config.initial.admin) admin_user = users_crud.create_user( db=db, - username=config["initial"]["admin"], + username=config.initial.admin, role=users_models.Role.ADMIN, ) events_crud.create_user_creation_event(db, admin_user) @@ -328,7 +328,7 @@ def create_jupyter_tool( def create_tools(db: orm.Session): if core.DEVELOPMENT_MODE: - registry = config["docker"]["registry"] + registry = config.docker.registry else: registry = "ghcr.io/dsd-dbs/capella-dockerimages" diff --git a/backend/capellacollab/core/logging/__init__.py b/backend/capellacollab/core/logging/__init__.py index a5474d0d5..f4ddcbcb1 100644 --- a/backend/capellacollab/core/logging/__init__.py +++ b/backend/capellacollab/core/logging/__init__.py @@ -16,7 +16,7 @@ from capellacollab import config from capellacollab.core.authentication import injectables as auth_injectables -LOGGING_LEVEL = config.config["logging"]["level"] +LOGGING_LEVEL = config.config.logging.level class CustomFormatter(logging.Formatter): diff --git a/backend/capellacollab/core/logging/loki.py b/backend/capellacollab/core/logging/loki.py index bef66ed35..370fafbed 100644 --- a/backend/capellacollab/core/logging/loki.py +++ b/backend/capellacollab/core/logging/loki.py @@ -12,11 +12,12 @@ from requests import auth from capellacollab.config import config +from capellacollab.config import models as config_models from . import exceptions -LOGGING_LEVEL = config["logging"]["level"] -PROMTAIL_CONFIGURATION: dict[str, str] = config["k8s"]["promtail"] +LOGGING_LEVEL = config.logging.level +PROMTAIL_CONFIGURATION: config_models.K8sPromtailConfig = config.k8s.promtail class LogEntry(t.TypedDict): @@ -46,12 +47,12 @@ def push_logs_to_loki(entries: list[LogEntry], labels): # Send the log data to Loki try: response = requests.post( - PROMTAIL_CONFIGURATION["lokiUrl"] + "/push", + PROMTAIL_CONFIGURATION.loki_url + "/push", data=log_data, headers={"Content-Type": "application/json"}, auth=auth.HTTPBasicAuth( - PROMTAIL_CONFIGURATION["lokiUsername"], - PROMTAIL_CONFIGURATION["lokiPassword"], + PROMTAIL_CONFIGURATION.loki_username, + PROMTAIL_CONFIGURATION.loki_password, ), timeout=10, ) @@ -78,12 +79,12 @@ def fetch_logs_from_loki( # Send the query request to Loki try: response = requests.get( - PROMTAIL_CONFIGURATION["lokiUrl"] + "/query_range", + PROMTAIL_CONFIGURATION.loki_url + "/query_range", params=params, headers={"Content-Type": "application/json"}, auth=auth.HTTPBasicAuth( - PROMTAIL_CONFIGURATION["lokiUsername"], - PROMTAIL_CONFIGURATION["lokiPassword"], + PROMTAIL_CONFIGURATION.loki_username, + PROMTAIL_CONFIGURATION.loki_password, ), timeout=5, ) diff --git a/backend/capellacollab/core/metadata.py b/backend/capellacollab/core/metadata.py index a61cef903..bbfd04096 100644 --- a/backend/capellacollab/core/metadata.py +++ b/backend/capellacollab/core/metadata.py @@ -1,8 +1,6 @@ # SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors # SPDX-License-Identifier: Apache-2.0 -import typing as t - import fastapi import pydantic from sqlalchemy import orm @@ -11,7 +9,9 @@ from capellacollab.config import config from capellacollab.core import database from capellacollab.settings.configuration import core as config_core -from capellacollab.settings.configuration import models as config_models +from capellacollab.settings.configuration import ( + models as settings_config_models, +) class Metadata(pydantic.BaseModel): @@ -31,8 +31,6 @@ class Metadata(pydantic.BaseModel): router = fastapi.APIRouter() -general_cfg: dict[str, t.Any] = config["general"] - @router.get( "/metadata", @@ -40,14 +38,14 @@ class Metadata(pydantic.BaseModel): ) def get_metadata(db: orm.Session = fastapi.Depends(database.get_db)): cfg = config_core.get_config(db, "global") - assert isinstance(cfg, config_models.GlobalConfiguration) + assert isinstance(cfg, settings_config_models.GlobalConfiguration) return Metadata.model_validate( cfg.metadata.model_dump() | { "version": capellacollab.__version__, - "host": general_cfg.get("host"), - "port": str(general_cfg.get("port")), - "protocol": general_cfg.get("scheme"), + "host": config.general.host, + "port": str(config.general.port), + "protocol": config.general.scheme, } ) diff --git a/backend/capellacollab/projects/toolmodels/backups/runs/interface.py b/backend/capellacollab/projects/toolmodels/backups/runs/interface.py index 2e4ccde14..2d8dea86a 100644 --- a/backend/capellacollab/projects/toolmodels/backups/runs/interface.py +++ b/backend/capellacollab/projects/toolmodels/backups/runs/interface.py @@ -24,7 +24,7 @@ log = logging.getLogger(__name__) -PIPELINES_TIMEOUT = config.get("pipelines", {}).get("timeout", 60) +PIPELINES_TIMEOUT = config.pipelines.timeout async def schedule_refresh_and_trigger_pipeline_jobs(interval=5): diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py b/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py index 898940bd3..1a557137f 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/github/handler.py @@ -66,7 +66,7 @@ def __get_file_from_repository( ) -> requests.Response: return requests.get( f"{self.git_instance.api_url}/repos/{project_id}/contents/{parse.quote(trusted_file_path)}?ref={parse.quote(revision, safe='')}", - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, headers=headers, ) @@ -112,7 +112,7 @@ def get_last_pipeline_runs( response = requests.get( f"{self.git_instance.api_url}/repos/{project_id}/actions/runs?branch={parse.quote(self.git_model.revision, safe='')}&per_page=20", headers=headers, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) response.raise_for_status() return response.json()["workflow_runs"] @@ -128,7 +128,7 @@ def get_artifact_from_job( artifact_response = requests.get( f"{self.git_instance.api_url}/repos/{project_id}/actions/artifacts/{artifact_id}/zip", headers=self.__get_headers(self.git_model.password), - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) artifact_response.raise_for_status() @@ -146,7 +146,7 @@ def get_last_updated_for_file_path( if self.git_model.password else None ), - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) response.raise_for_status() if len(response.json()) == 0: @@ -186,7 +186,7 @@ def __get_latest_artifact_metadata(self, project_id: str, job_id: str): response = requests.get( f"{self.git_instance.api_url}/repos/{project_id}/actions/runs/{job_id}/artifacts", headers=self.__get_headers(self.git_model.password), - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) response.raise_for_status() artifact = response.json()["artifacts"][0] diff --git a/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py b/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py index a8c62b82d..f36b586b5 100644 --- a/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py +++ b/backend/capellacollab/projects/toolmodels/modelsources/git/gitlab/handler.py @@ -27,7 +27,7 @@ async def get_project_id_by_git_url(self) -> str: async with session.get( f"{self.git_instance.api_url}/projects/{project_name_encoded}", headers={"PRIVATE-TOKEN": self.git_model.password}, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) as response: if response.status == 403: raise exceptions.GitlabAccessDeniedError @@ -60,7 +60,7 @@ def get_last_updated_for_file_path( response = requests.get( f"{self.git_instance.api_url}/projects/{project_id}/repository/commits?ref_name={revision or self.git_model.revision}&path={file_path}", headers={"PRIVATE-TOKEN": self.git_model.password}, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) response.raise_for_status() if len(response.json()) == 0: @@ -77,7 +77,7 @@ async def __get_last_pipeline_run_ids( async with session.get( f"{self.git_instance.api_url}/projects/{project_id}/pipelines?ref={parse.quote(self.git_model.revision, safe='')}&per_page=20", headers={"PRIVATE-TOKEN": self.git_model.password}, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) as response: response.raise_for_status() @@ -94,7 +94,7 @@ async def __get_job_id_for_job_name( async with session.get( f"{self.git_instance.api_url}/projects/{project_id}/pipelines/{pipeline_id}/jobs", headers={"PRIVATE-TOKEN": self.git_model.password}, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) as response: response.raise_for_status() @@ -142,7 +142,7 @@ def get_artifact_from_job( response = requests.get( f"{self.git_instance.api_url}/projects/{project_id}/jobs/{job_id}/artifacts/{trusted_path_to_artifact}", headers={"PRIVATE-TOKEN": self.git_model.password}, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) response.raise_for_status() return response @@ -157,7 +157,7 @@ async def get_file_from_repository( response = requests.get( f"{self.git_instance.api_url}/projects/{project_id}/repository/files/{parse.quote(trusted_file_path, safe='')}?ref={parse.quote(branch, safe='')}", headers={"PRIVATE-TOKEN": self.git_model.password}, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) if response.status_code == 404: diff --git a/backend/capellacollab/sessions/hooks/guacamole.py b/backend/capellacollab/sessions/hooks/guacamole.py index cb2204618..dce8f001c 100644 --- a/backend/capellacollab/sessions/hooks/guacamole.py +++ b/backend/capellacollab/sessions/hooks/guacamole.py @@ -32,7 +32,7 @@ class GuacamoleConfig(t.TypedDict): class GuacamoleIntegration(interface.HookRegistration): - _baseURI = config["extensions"]["guacamole"]["baseURI"] + _baseURI = config.extensions.guacamole.base_uri _prefix = f"{_baseURI}/api/session/data/postgresql" _headers = {"Content-Type": "application/x-www-form-urlencoded"} _proxies = { @@ -99,8 +99,7 @@ def session_connection_hook( # type: ignore[override] ) return interface.SessionConnectionHookResult( local_storage={"GUAC_AUTH": json.dumps(token)}, - redirect_url=config["extensions"]["guacamole"]["publicURI"] - + "/#/", + redirect_url=config.extensions.guacamole.public_uri + "/#/", ) def pre_session_termination_hook( # type: ignore[override] @@ -129,11 +128,11 @@ def _get_admin_token(cls) -> str: r = requests.post( f"{cls._baseURI}/api/tokens", auth=requests_auth.HTTPBasicAuth( - config["extensions"]["guacamole"]["username"], - config["extensions"]["guacamole"]["password"], + config.extensions.guacamole.username, + config.extensions.guacamole.password, ), headers=cls._headers, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, proxies=cls._proxies, ) try: @@ -158,7 +157,7 @@ def _get_token(cls, username: str, password: str) -> str: f"{cls._baseURI}/api/tokens", auth=requests_auth.HTTPBasicAuth(username, password), headers=cls._headers, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, proxies=cls._proxies, ) r.raise_for_status() @@ -178,7 +177,7 @@ def _create_user( "password": password, "attributes": {}, }, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, proxies=cls._proxies, ) r.raise_for_status() @@ -197,7 +196,7 @@ def _assign_user_to_connection( "value": "READ", } ], - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, proxies=cls._proxies, ) r.raise_for_status() @@ -206,7 +205,7 @@ def _assign_user_to_connection( def _delete_user(cls, token: str, username: str): r = requests.delete( f"{cls._prefix}/users/{username}?token={token}", - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, proxies=cls._proxies, ) try: @@ -224,7 +223,7 @@ def _delete_user(cls, token: str, username: str): def _delete_connection(cls, token: str, connection_name: str): r = requests.delete( f"{cls._prefix}/connections/{connection_name}?token={token}", - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, proxies=cls._proxies, ) try: @@ -262,7 +261,7 @@ def _create_connection( }, "attributes": {}, }, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, proxies=cls._proxies, ) diff --git a/backend/capellacollab/sessions/hooks/http.py b/backend/capellacollab/sessions/hooks/http.py index e8d5bdcaf..c43e83ed1 100644 --- a/backend/capellacollab/sessions/hooks/http.py +++ b/backend/capellacollab/sessions/hooks/http.py @@ -2,9 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 import logging -import typing as t -from capellacollab.config import config from capellacollab.core import models as core_models from capellacollab.tools import models as tools_models @@ -13,17 +11,7 @@ from . import interface -class GeneralConfigEnvironment(t.TypedDict): - scheme: str - host: str - port: str - wildcardHost: t.NotRequired[bool | None] - - class HTTPIntegration(interface.HookRegistration): - def __init__(self): - self._general_conf: GeneralConfigEnvironment = config["general"] - def session_connection_hook( # type: ignore[override] self, db_session: sessions_models.DatabaseSession, diff --git a/backend/capellacollab/sessions/hooks/jupyter.py b/backend/capellacollab/sessions/hooks/jupyter.py index ebc5097e1..27597ae20 100644 --- a/backend/capellacollab/sessions/hooks/jupyter.py +++ b/backend/capellacollab/sessions/hooks/jupyter.py @@ -3,7 +3,6 @@ import logging import pathlib -import typing as t from sqlalchemy import orm @@ -20,13 +19,6 @@ log = logging.getLogger(__name__) -class GeneralConfigEnvironment(t.TypedDict): - scheme: str - host: str - port: str - wildcardHost: t.NotRequired[bool | None] - - class JupyterIntegration(interface.HookRegistration): def configuration_hook( # type: ignore[override] self, diff --git a/backend/capellacollab/sessions/idletimeout.py b/backend/capellacollab/sessions/idletimeout.py index 697840122..164b0f9e1 100644 --- a/backend/capellacollab/sessions/idletimeout.py +++ b/backend/capellacollab/sessions/idletimeout.py @@ -18,11 +18,11 @@ def terminate_idle_session(): - url = config["prometheus"]["url"] + url = config.prometheus.url url += "/".join(("api", "v1", 'query?query=ALERTS{alertstate="firing"}')) response = requests.get( url, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) log.debug("Requested alerts %d", response.status_code) if response.status_code != 200: @@ -60,7 +60,7 @@ async def loop(): def run(): - logging.basicConfig(level=config["logging"]["level"]) + logging.basicConfig(level=config.logging.level) terminate_idle_session() diff --git a/backend/capellacollab/sessions/injection.py b/backend/capellacollab/sessions/injection.py index 1e3c34fa0..4fe4c42b6 100644 --- a/backend/capellacollab/sessions/injection.py +++ b/backend/capellacollab/sessions/injection.py @@ -19,12 +19,12 @@ def get_last_seen(sid: str) -> str: if core.DEVELOPMENT_MODE: return "Disabled in development mode" - url = config["prometheus"]["url"] + url = config.prometheus.url url += "/".join(("api", "v1", "query?query=idletime_minutes")) try: response = requests.get( url, - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, ) response.raise_for_status() for session in response.json()["data"]["result"]: diff --git a/backend/capellacollab/sessions/operators/k8s.py b/backend/capellacollab/sessions/operators/k8s.py index 3281f1f37..ad1adbdd3 100644 --- a/backend/capellacollab/sessions/operators/k8s.py +++ b/backend/capellacollab/sessions/operators/k8s.py @@ -24,6 +24,7 @@ from kubernetes.client import exceptions from capellacollab.config import config +from capellacollab.config import models as config_models from capellacollab.sessions import models as sessions_models from capellacollab.tools import models as tools_models @@ -38,37 +39,17 @@ "backend_sessions_killed", "Sessions killed, either by user or timeout" ) -external_registry: str = config["docker"]["externalRegistry"] +cfg: config_models.K8sConfig = config.k8s -cfg: dict[str, t.Any] = config["k8s"] +namespace: str = cfg.namespace +loki_enabled: bool = cfg.promtail.loki_enabled -namespace: str = cfg["namespace"] -storage_access_mode: str = cfg["storageAccessMode"] -storage_class_name: str = cfg["storageClassName"] - -loki_enabled: bool = cfg["promtail"]["lokiEnabled"] - - -def deserialize_kubernetes_resource(content: t.Any, resource: str): - # This is needed as "workaround" for the deserialize function - class FakeKubeResponse: - def __init__(self, obj): - self.data = json.dumps(obj) - - return client.ApiClient().deserialize(FakeKubeResponse(content), resource) - - -# Resolve securityContext and pullPolicy -image_pull_policy: str = cfg.get("cluster", {}).get( - "imagePullPolicy", "Always" -) +image_pull_policy: str = cfg.cluster.image_pull_policy pod_security_context = None -if _pod_security_context := cfg.get("cluster", {}).get( - "podSecurityContext", None -): - pod_security_context = deserialize_kubernetes_resource( - _pod_security_context, client.V1PodSecurityContext.__name__ +if _pod_security_context := cfg.cluster.pod_security_context: + pod_security_context = client.V1PodSecurityContext( + **_pod_security_context.__dict__ ) @@ -91,9 +72,9 @@ def __init__(self) -> None: def load_config(self) -> None: self.kubectl_arguments = [] - if cfg.get("context", None): - self.kubectl_arguments += ["--context", cfg["context"]] - kubernetes.config.load_config(context=cfg["context"]) + if cfg.context: + self.kubectl_arguments += ["--context", cfg.context] + kubernetes.config.load_config(context=cfg.context) else: kubernetes.config.load_incluster_config() @@ -521,7 +502,7 @@ def _create_deployment( containers.append( client.V1Container( name="promtail", - image=f"{external_registry}/grafana/promtail", + image=f"{config.docker.external_registry}/grafana/promtail", args=[ "--config.file=/etc/promtail/promtail.yaml", "-log-config-reverse-order", @@ -581,7 +562,7 @@ def create_secret( if overwrite: self.delete_secret(name) - return self.v1_core.create_namespaced_secret(cfg["namespace"], secret) + return self.v1_core.create_namespaced_secret(cfg.namespace, secret) def _create_disruption_budget( self, @@ -662,8 +643,8 @@ def create_persistent_volume( api_version="v1", metadata=client.V1ObjectMeta(name=name, labels=labels), spec=client.V1PersistentVolumeClaimSpec( - access_modes=[storage_access_mode], - storage_class_name=storage_class_name, + access_modes=[cfg.storage_access_mode], + storage_class_name=cfg.storage_class_name, resources=client.V1ResourceRequirements( requests={"storage": size} ), @@ -756,18 +737,14 @@ def _create_promtail_configmap( "promtail.yaml": yaml.dump( { "server": { - "http_listen_port": cfg["promtail"]["serverPort"], + "http_listen_port": cfg.promtail.server_port, }, "clients": [ { - "url": cfg["promtail"]["lokiUrl"] + "/push", + "url": cfg.promtail.loki_url + "/push", "basic_auth": { - "username": cfg["promtail"][ - "lokiUsername" - ], - "password": cfg["promtail"][ - "lokiPassword" - ], + "username": cfg.promtail.loki_username, + "password": cfg.promtail.loki_password, }, } ], @@ -995,7 +972,7 @@ def download_file(self, _id: str, filename: str) -> t.Iterable[bytes]: self.v1_core.connect_get_namespaced_pod_exec, pod_name, container=_id, - namespace=cfg["namespace"], + namespace=cfg.namespace, command=exec_command, stderr=True, stdin=False, diff --git a/backend/capellacollab/sessions/util.py b/backend/capellacollab/sessions/util.py index 9466afe1b..f92ebc6a3 100644 --- a/backend/capellacollab/sessions/util.py +++ b/backend/capellacollab/sessions/util.py @@ -4,11 +4,10 @@ import logging import random import string -import typing as t from sqlalchemy import orm -from capellacollab import config +from capellacollab.config import config from capellacollab.core import credentials from capellacollab.core import models as core_models from capellacollab.sessions import hooks @@ -52,8 +51,6 @@ def get_environment( connection_method: tools_models.ToolSessionConnectionMethod, session_id: str, ) -> models.SessionEnvironment: - general_cfg: dict[str, t.Any] = config.config["general"] - if isinstance(connection_method, tools_models.HTTPConnectionMethod): container_port = connection_method.ports.http elif isinstance(connection_method, tools_models.GuacamoleConnectionMethod): @@ -69,10 +66,10 @@ def get_environment( "CAPELLACOLLAB_SESSION_REQUESTER_USERNAME": user.name, "CAPELLACOLLAB_SESSIONS_BASE_PATH": f"/session/{session_id}", "CAPELLACOLLAB_SESSION_CONNECTION_METHOD_TYPE": connection_method.type, - "CAPELLACOLLAB_ORIGIN_BASE_URL": f"{general_cfg.get('scheme')}://{general_cfg.get('host')}:{general_cfg.get('port')}", - "CAPELLACOLLAB_SESSIONS_SCHEME": general_cfg["scheme"], - "CAPELLACOLLAB_SESSIONS_HOST": general_cfg["host"], - "CAPELLACOLLAB_SESSIONS_PORT": str(general_cfg["port"]), + "CAPELLACOLLAB_ORIGIN_BASE_URL": f"{config.general.scheme}://{config.general.host}:{config.general.port}", + "CAPELLACOLLAB_SESSIONS_SCHEME": config.general.scheme, + "CAPELLACOLLAB_SESSIONS_HOST": config.general.host, + "CAPELLACOLLAB_SESSIONS_PORT": str(config.general.port), "CAPELLACOLLAB_SESSION_CONTAINER_PORT": str(container_port), } diff --git a/backend/capellacollab/settings/modelsources/t4c/interface.py b/backend/capellacollab/settings/modelsources/t4c/interface.py index 2b4e7b849..23b991b66 100644 --- a/backend/capellacollab/settings/modelsources/t4c/interface.py +++ b/backend/capellacollab/settings/modelsources/t4c/interface.py @@ -23,7 +23,7 @@ def get_t4c_status( auth=requests_auth.HTTPBasicAuth( instance.username, instance.password ), - timeout=config.config["requests"]["timeout"], + timeout=config.config.requests.timeout, ) except requests.Timeout: raise fastapi.HTTPException( diff --git a/backend/capellacollab/settings/modelsources/t4c/repositories/interface.py b/backend/capellacollab/settings/modelsources/t4c/repositories/interface.py index 0cd5bdade..e93074a62 100644 --- a/backend/capellacollab/settings/modelsources/t4c/repositories/interface.py +++ b/backend/capellacollab/settings/modelsources/t4c/repositories/interface.py @@ -128,7 +128,7 @@ def make_request( method, url, auth=auth.HTTPBasicAuth(instance.username, instance.password), - timeout=config["requests"]["timeout"], + timeout=config.requests.timeout, **kwargs, ) diff --git a/backend/config/config_template.yaml b/backend/config/config_template.yaml deleted file mode 100644 index 2a4cc042a..000000000 --- a/backend/config/config_template.yaml +++ /dev/null @@ -1,99 +0,0 @@ -# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors -# SPDX-License-Identifier: Apache-2.0 - -docker: - registry: k3d-myregistry.localhost:12345 - externalRegistry: docker.io - -k8s: - # Only required when using operator k8s - context: k3d-collab-cluster # Only required, if you'd like to use a local k3d environment - namespace: collab-sessions - - # apiURL: dummy # Only required when no kubectl context is available - # token: dummy # Only required when no kubectl context is available - - storageClassName: local-path - storageAccessMode: ReadWriteOnce - - cluster: - imagePullPolicy: Always - podSecurityContext: - runAsUser: 1004370000 - runAsGroup: 1004370000 - fsGroup: 1004370000 - runAsNonRoot: true - - promtail: - lokiEnabled: True - lokiUrl: http://localhost:30001/loki/api/v1/push - lokiUsername: localLokiUser - lokiPassword: localLokiPassword - serverPort: 3101 - -general: - host: localhost - port: 8000 - scheme: http - wildcardHost: False - -extensions: - guacamole: - baseURI: http://localhost:8080/guacamole - publicURI: http://localhost:8080/guacamole - - username: guacadmin - password: guacadmin - -authentication: - provider: oauth # oauth | azure - jwt: - usernameClaim: sub # preferred_username - - oauth: - # Only required when using provider oauth - endpoints: - wellKnown: http://localhost:8083/default/.well-known/openid-configuration - tokenIssuance: - authorization: - - audience: default - - scopes: - - openid - - client: - id: default - secret: - - redirectURI: http://localhost:4200/oauth2/callback - - # azure: - # # Only required when using provider azure - # authorizationEndpoint: http://tbd - - # client: - # id: tbd - # secret: tbd - - # audience: tbd - # redirectURI: http://localhost:4200/oauth2/callback - -pipelines: - timeout: 60 - -database: - url: postgresql://dev:dev@localhost:5432/dev - -initial: - admin: admin - -logging: - level: DEBUG - logPath: logs/ - -requests: - timeout: 2 - -prometheus: - url: http://localhost:8080/prometheus/ diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 441d4fa46..75d4ad1bd 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -38,7 +38,6 @@ dependencies = [ "sqlalchemy>=2.0.0", "uvicorn[standard]", "python-slugify[unidecode]", - "jsonschema", "starlette-prometheus", "fastapi-pagination>=0.12.19", "aiohttp", @@ -115,7 +114,6 @@ module = [ "prometheus_client.*", "deepdiff.*", "appdirs.*", - "jsonschema.*", "requests.*", "jose.*", "slugify.*", diff --git a/backend/tests/config/test_app_configuration.py b/backend/tests/config/test_app_configuration.py new file mode 100644 index 000000000..7137de9f2 --- /dev/null +++ b/backend/tests/config/test_app_configuration.py @@ -0,0 +1,107 @@ +# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors +# SPDX-License-Identifier: Apache-2.0 + + +import io + +import pytest +import yaml + +from capellacollab.config import exceptions as config_exceptions +from capellacollab.config import loader + + +def test_loader_unique_key_loader_succeeds(): + """Test that loading a YAML file with unique keys succeeds.""" + yaml_str = """ + key1: value1 + key2: value2 + """ + result = yaml.load(yaml_str, Loader=loader.UniqueKeyLoader) + assert result == {"key1": "value1", "key2": "value2"} + + +def test_loader_unique_key_loader_fails(): + """Test that attempting to load a YAML file with duplicate keys raises an exception.""" + yaml_str = """ + key1: value1 + key1: value2 + """ + with pytest.raises(config_exceptions.InvalidConfigurationError) as excinfo: + yaml.load(yaml_str, Loader=loader.UniqueKeyLoader) + assert "Duplicate key 'key1' found in configuration." in str(excinfo.value) + + +class MockLocation: + _exists: bool = False + content: str | None = "key: value" + + def exists(self): + return self._exists + + def open(self, encoding: str = "utf-8"): # pylint: disable=unused-argument + return io.StringIO(self.content) + + def absolute(self): + return "mocked_location" + + +@pytest.fixture(name="mock_locations") +def fixture_mock_locations() -> tuple[MockLocation, MockLocation]: + + mock_location_1 = MockLocation() + mock_location_2 = MockLocation() + return mock_location_1, mock_location_2 + + +def test_loader_does_config_exist_true( + mock_locations: tuple[MockLocation, MockLocation] +): + """Test that does_config_exist returns True when a config file exists in one of the provided locations.""" + + mock_location_1, mock_location_2 = mock_locations + mock_location_1._exists = False + mock_location_2._exists = True + loader.config_locations = [mock_location_1, mock_location_2] + + assert loader.does_config_exist() is True + + +def test_loader_does_config_exist_false( + mock_locations: tuple[MockLocation, MockLocation] +): + """Test that does_config_exist returns False when a config file does not exist in one of the provided locations.""" + + mock_location_1, mock_location_2 = mock_locations + mock_location_1._exists = False + mock_location_2._exists = False + loader.config_locations = [mock_location_1, mock_location_2] + + assert loader.does_config_exist() is False + + +def test_load_yaml_exists(mock_locations: tuple[MockLocation, MockLocation]): + """Test that load_yaml successfully loads when a config file is in one of the provided locations.""" + + mock_location_1, mock_location_2 = mock_locations + mock_location_1._exists = False + mock_location_2.content = None + mock_location_2._exists = True + mock_location_2.content = "key: value" + loader.config_locations = [mock_location_1, mock_location_2] + + assert loader.load_yaml() == {"key": "value"} + + +def test_load_yaml_not_exists( + mock_locations: tuple[MockLocation, MockLocation] +): + """Test that load_yaml raises an exception when no config file is found in provided locations.""" + + mock_location_1, mock_location_2 = mock_locations + mock_location_1._exists = False + mock_location_2._exists = False + loader.config_locations = [mock_location_1, mock_location_2] + + with pytest.raises(FileNotFoundError): + loader.load_yaml() diff --git a/backend/tests/sessions/test_session_environment.py b/backend/tests/sessions/test_session_environment.py index 5d7df2bc6..c58ddf01f 100644 --- a/backend/tests/sessions/test_session_environment.py +++ b/backend/tests/sessions/test_session_environment.py @@ -6,6 +6,7 @@ import pytest from capellacollab import config +from capellacollab.config import models as config_models from capellacollab.sessions import crud as sessions_crud from capellacollab.sessions import hooks as sessions_hooks from capellacollab.sessions import models as sessions_models @@ -102,14 +103,12 @@ def fixture_patch_irrelevant_request_session_calls( sessions_crud, "update_session_config", lambda *args, **kwargs: None ) - monkeypatch.setitem( + monkeypatch.setattr( config.config, "general", - { - "host": "localhost", - "port": 8080, - "scheme": "http", - }, + config_models.GeneralConfig( + host="localhost", port=8080, scheme="http" + ), ) diff --git a/backend/tests/sessions/test_session_idletimeout.py b/backend/tests/sessions/test_session_idletimeout.py index 9c255dfbc..611cd9465 100644 --- a/backend/tests/sessions/test_session_idletimeout.py +++ b/backend/tests/sessions/test_session_idletimeout.py @@ -5,15 +5,18 @@ import requests import capellacollab.sessions.idletimeout +from capellacollab.config import models as config_models from capellacollab.sessions.idletimeout import terminate_idle_session @pytest.fixture(autouse=True) def mock_config(monkeypatch): + mocked_config = config_models.AppConfig( + prometheus=config_models.PrometheusConfig(url=""), + requests=config_models.RequestsConfig(timeout=60), + ) monkeypatch.setattr( - capellacollab.sessions.idletimeout, - "config", - {"prometheus": {"url": ""}, "requests": {"timeout": 60}}, + capellacollab.sessions.idletimeout, "config", mocked_config ) diff --git a/backend/tests/settings/test_configuration.py b/backend/tests/settings/test_global_configuration.py similarity index 96% rename from backend/tests/settings/test_configuration.py rename to backend/tests/settings/test_global_configuration.py index 7bb04767c..aa6b5022c 100644 --- a/backend/tests/settings/test_configuration.py +++ b/backend/tests/settings/test_global_configuration.py @@ -89,6 +89,12 @@ def test_update_general_configuration(client: testclient.TestClient): }, ) + assert response.status_code == 200 + assert ( + response.json()["metadata"]["provider"] + == "Still the best team in the world!" + ) + @pytest.mark.usefixtures("admin") def test_update_general_configuration_additional_properties_fails( diff --git a/backend/tests/test_event_creation.py b/backend/tests/test_event_creation.py index c07c9e3da..739800eb2 100644 --- a/backend/tests/test_event_creation.py +++ b/backend/tests/test_event_creation.py @@ -17,7 +17,7 @@ def test_create_admin_user_by_system(db): user: users_models.DatabaseUser = users_crud.get_user_by_name( - db, config.config["initial"]["admin"] + db, config.config.initial.admin ) events: list[events_models.DatabaseUserHistoryEvent] = ( diff --git a/docs/docs/development/index.md b/docs/docs/development/index.md index 3f318785c..d029cb1ee 100644 --- a/docs/docs/development/index.md +++ b/docs/docs/development/index.md @@ -39,15 +39,12 @@ reloading of the frontend and backend. ### Backend Configuration The backend uses various configuration settings. You can find them in the -`config` directory. Please copy the file `config_template.yaml` to -`config.yaml` and adjust the values. +`backend/config` directory. A `config.yaml` with default values will be +generated the first time you run the application. _Hint_: If you already have the k8d cluster running and if you have the application deployed, then no configuration values need to be adjusted. -_Hint_: You can run `python -m capellacollab.config.diff` after each update to -check if your config is up to date. - ### Getting Started To get started, run the following command in the root of the repository for the diff --git a/helm/config/backend.yaml b/helm/config/backend.yaml index aa34a42d9..ba9b0ee45 100644 --- a/helm/config/backend.yaml +++ b/helm/config/backend.yaml @@ -20,7 +20,7 @@ k8s: promtail: lokiEnabled: {{ .Values.loki.enabled }} - lokiUrl: http://loki-gateway.{{- .Release.Namespace -}}.svc.cluster.local/loki/api/v1 + lokiURL: http://loki-gateway.{{- .Release.Namespace -}}.svc.cluster.local/loki/api/v1 lokiUsername: {{ .Values.definitions.loki.username }} lokiPassword: {{ .Values.definitions.loki.password }} serverPort: 3101 @@ -29,7 +29,6 @@ general: host: "{{ .Values.general.host }}" port: "{{ .Values.general.port }}" scheme: "{{ .Values.general.scheme }}" - wildcardHost: {{ .Values.general.wildcardHost }} extensions: guacamole: @@ -70,8 +69,6 @@ authentication: client: id: "{{ .Values.backend.authentication.azure.client.id }}" secret: "{{ .Values.backend.authentication.azure.client.secret }}" - - redirectURI: "{{ .Values.backend.authentication.azure.redirectURI }}" {{ end }} prometheus: diff --git a/helm/values.yaml b/helm/values.yaml index 53cc32cca..85798e1ee 100644 --- a/helm/values.yaml +++ b/helm/values.yaml @@ -145,10 +145,6 @@ backend: audience: default - publicKey: | - -----BEGIN PUBLIC KEY----- - -----END PUBLIC KEY----- - scopes: - openid @@ -171,8 +167,6 @@ backend: id: tbd secret: tbd - redirectURI: tbd - # Namespace in which the Session Deployments / Pods should be spawned. # IMPORTANT: The namespace has to exist already k8sSessionNamespace: collab-sessions