Skip to content

Commit

Permalink
feat: Set tool configuration with YAML
Browse files Browse the repository at this point in the history
  • Loading branch information
MoritzWeber0 committed Feb 22, 2024
1 parent 2c014d1 commit 4bf51b3
Show file tree
Hide file tree
Showing 52 changed files with 1,635 additions and 1,296 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: Apache-2.0

"""Migrate tools to JSON configuration
Revision ID: c973be2e2ac7
Revises: 86ab7d4d1684
Create Date: 2024-01-31 17:40:31.743565
"""
import typing as t

import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = "c973be2e2ac7"
down_revision = "86ab7d4d1684"
branch_labels = None
depends_on = None

t_tools = sa.Table(
"tools",
sa.MetaData(),
sa.Column("id", sa.Integer()),
sa.Column("docker_image_backup_template", sa.String()),
sa.Column("docker_image_template", sa.String()),
sa.Column("readonly_docker_image_template", sa.String()),
)

t_tools_new = sa.Table(
"tools",
sa.MetaData(),
sa.Column("id", sa.Integer()),
sa.Column("integrations", postgresql.JSONB(astext_type=sa.Text())),
)

t_integration = sa.Table(
"tool_integrations",
sa.MetaData(),
sa.Column("t4c", sa.Boolean()),
sa.Column("pure_variants", sa.Boolean()),
sa.Column("jupyter", sa.Boolean()),
sa.Column("tool_id", sa.Integer()),
)

t_tool_versions = sa.Table(
"versions",
sa.MetaData(),
sa.Column("id", sa.Integer()),
sa.Column("is_deprecated", sa.Boolean()),
sa.Column("is_recommended", sa.Boolean()),
sa.Column("tool_id", sa.Integer()),
)

t_tool_versions_new = sa.Table(
"versions",
sa.MetaData(),
sa.Column("id", sa.Integer()),
sa.Column("config", postgresql.JSONB(astext_type=sa.Text())),
)


def upgrade():
connection = op.get_bind()
tools = connection.execute(sa.select(t_tools)).mappings().all()

tool_version_mapping = get_mapping_version_id_to_config(tools)
tool_mapping = get_mapping_tool_id_to_integrations(tools)

drop_table_and_columns()

for tool_version_id, config in tool_version_mapping.items():
connection.execute(
sa.update(t_tool_versions_new)
.where(t_tool_versions_new.c.id == tool_version_id)
.values(config=config)
)

for tool_id, integrations in tool_mapping.items():
connection.execute(
sa.update(t_tools_new)
.where(t_tools_new.c.id == tool_id)
.values(integrations=integrations)
)


def drop_table_and_columns():
op.drop_table("tool_integrations")
op.add_column(
"tools",
sa.Column(
"integrations",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
server_default=sa.text("'{}'::jsonb"),
),
)

op.drop_column("tools", "docker_image_backup_template")
op.drop_column("tools", "docker_image_template")
op.drop_column("tools", "readonly_docker_image_template")
op.add_column(
"versions",
sa.Column(
"config",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
server_default=sa.text("'{}'::jsonb"),
),
)
op.drop_column("versions", "is_deprecated")
op.drop_column("versions", "is_recommended")


def get_mapping_version_id_to_config(tools: t.Sequence[sa.RowMapping]):
connection = op.get_bind()
mapping = {}
for tool in tools:
versions = (
connection.execute(
sa.select(t_tool_versions).where(
tool["id"] == t_tool_versions.c.tool_id
)
)
.mappings()
.all()
)

for version in versions:
mapping[version["id"]] = {
"is_recommended": version["is_recommended"],
"is_deprecated": version["is_deprecated"],
"sessions": {
"persistent": {
"image": replace_dollar_with_format_syntax(
tool["docker_image_template"]
),
},
"read_only": {
"image": replace_dollar_with_format_syntax(
tool["readonly_docker_image_template"]
),
},
},
"backups": {
"image": replace_dollar_with_format_syntax(
tool["docker_image_backup_template"]
),
},
}

return mapping


def get_mapping_tool_id_to_integrations(tools: t.Sequence[sa.RowMapping]):
connection = op.get_bind()
mapping = {}
for tool in tools:
integration = (
connection.execute(
sa.select(t_integration).where(
t_integration.c.tool_id == tool["id"]
)
)
.mappings()
.first()
)

if not integration:
continue

mapping[tool["id"]] = {
"t4c": integration["t4c"],
"pure_variants": integration["pure_variants"],
"jupyter": integration["jupyter"],
}

return mapping


def replace_dollar_with_format_syntax(template: str | None):
return template.replace("$version", "{version}") if template else None
1 change: 1 addition & 0 deletions backend/capellacollab/core/database/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ class Base(orm.MappedAsDataclass, orm.DeclarativeBase):
type_annotation_map = {
dict[str, str]: postgresql.JSONB,
dict[str, t.Any]: postgresql.JSONB,
dict[str, bool]: postgresql.JSONB,
}


Expand Down
70 changes: 70 additions & 0 deletions backend/capellacollab/core/database/decorator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: Apache-2.0

import typing as t

import pydantic
from sqlalchemy import types
from sqlalchemy.dialects import postgresql


class PydanticDecorator(types.TypeDecorator):
"""Maps a pydantic object to a JSONB column and vice versa.
Use in Database models like this:
```py
json_column: orm.Mapped[pydantic.BaseModel] = orm.mapped_column(PydanticDecorator(pydantic.BaseModel))
```
Replace:
- `json_column` with the name of the column in the database
- `pydantic.BaseModel` with the pydantic model you want to use
"""

impl = postgresql.JSONB
python_type = pydantic.BaseModel

cache_ok = True

def __init__(self, pydantic_model: t.Type[pydantic.BaseModel]):
super().__init__()
self.pydantic_model = pydantic_model

def process_bind_param(
self, value, dialect # pylint: disable=unused-argument
):
"""Convert a pydantic object to JSONB."""
if value is None:
return None

Check warning on line 38 in backend/capellacollab/core/database/decorator.py

View check run for this annotation

Codecov / codecov/patch

backend/capellacollab/core/database/decorator.py#L38

Added line #L38 was not covered by tests
return value.model_dump()

def process_literal_param(
self, value, dialect # pylint: disable=unused-argument
):
"""Convert a literal pydantic object to JSONB."""
if value is None:
return None
return value.model_dump()

Check warning on line 47 in backend/capellacollab/core/database/decorator.py

View check run for this annotation

Codecov / codecov/patch

backend/capellacollab/core/database/decorator.py#L46-L47

Added lines #L46 - L47 were not covered by tests

def process_result_value(
self, value, dialect # pylint: disable=unused-argument
):
"""Convert JSONB to a pydantic object."""
if value is None:
return None

Check warning on line 54 in backend/capellacollab/core/database/decorator.py

View check run for this annotation

Codecov / codecov/patch

backend/capellacollab/core/database/decorator.py#L54

Added line #L54 was not covered by tests
return self.pydantic_model.model_validate(value)


class PydanticDatabaseModel(pydantic.BaseModel):
"""Base class for database models with an ID.
Use it to extend pydantic models with the database ID field:
```py
class PydanticModel(PydanticSuperModel, decorator.PydanticDatabaseModel):
pass
```
"""

id: int = pydantic.Field(
description="Unique identifier of the resource.", ge=1
)
Loading

0 comments on commit 4bf51b3

Please sign in to comment.