diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 1142725..f3920ce 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -18,9 +18,9 @@ jobs:
python-version: ['3.8', '3.9', '3.10']
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
@@ -82,6 +82,11 @@ jobs:
poetry run pip install --upgrade pip
poetry install
+ - name: Test root command
+ run: |
+ cd example
+ poetry run vertex-deployer --version
+
- name: Test list command
run: |
cd example
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 4e2167b..a3c0be4 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -1,3 +1,15 @@
+# This workflow triggers the CI, updates the version, and uploads the release to GitHub and Google Cloud Storage when a push is made to either the 'main' or 'develop' branch.
+#
+# Workflow Steps:
+#
+# 1. Ci is triggered using the CI workflow defined in .github/workflows/ci.yaml
+# 2. If it succeeds, the version is updated using Python Semantic Release
+# 3. The release is uploaded to GitHub (same step and GitHub action)
+# 5. Authentication to Google Cloud is achieved using Workload Identity Federation
+# 6. The release is uploaded to Google Cloud Storage
+#
+# For more details on setting up Workload Identity Federation for GitHub, visit https://github.com/google-github-actions/auth#setting-up-workload-identity-federation
+
name: CI and Release on main
on:
@@ -26,3 +38,20 @@ jobs:
uses: python-semantic-release/python-semantic-release@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: 'Authenticate to Google Cloud'
+ uses: 'google-github-actions/auth@v1'
+ with:
+ token_format: 'access_token'
+ workload_identity_provider: '${{ secrets.WIF_PROVIDER }}' # e.g. - projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider
+ service_account: '${{ secrets.WIF_SERVICE_ACCOUNT }}' # e.g. - my-service-account@my-project.iam.gserviceaccount.com
+
+ - name: Copy release to root
+ run: cp -r dist/*.tar.gz .
+
+ - name: 'Upload Release to Google Cloud Storage'
+ uses: 'google-github-actions/upload-cloud-storage@v1'
+ with:
+ path: '.'
+ destination: vertex-pipelines-deployer
+ glob: '*.tar.gz'
diff --git a/README.md b/README.md
index 8f21c44..bea4293 100644
--- a/README.md
+++ b/README.md
@@ -23,14 +23,12 @@
-> **Warning**
-> This is a work in progress and is not ready for production use.
-
-
## Table of Contents
- [Why this tool?](#why-this-tool)
- [Prerequisites](#prerequisites)
- [Installation](#installation)
+ - [From git repo](#from-git-repo)
+ - [From GCS (not available in PyPI yet)](#from-gcs-not-available-in-pypi-yet)
- [Usage](#usage)
- [Setup](#setup)
- [Folder Structure](#folder-structure)
@@ -52,6 +50,8 @@ Two uses cases:
Commands:
- `check`: check your pipelines (imports, compile, check configs validity against pipeline definition).
- `deploy`: compile, upload to Artifact Registry, run and schedule your pipelines.
+- `create`: create a new pipeline and config files.
+- `list`: list all pipelines in the `vertex/pipelines` folder.
## Prerequisites
@@ -62,6 +62,15 @@ Commands:
## Installation
+
+### From git repo
+
+Stable version:
+```bash
+pip install git+https://github.com/artefactory/vertex-pipelines-deployer.git@main
+```
+
+Develop version:
```bash
pip install git+https://github.com/artefactory/vertex-pipelines-deployer.git@develop
```
@@ -73,6 +82,20 @@ poetry install
cd example
```
+### From GCS (not available in PyPI yet)
+
+Install a specific version:
+```bash
+export VERSION=0.0.1
+wget https://storage.cloud.google.com/vertex-pipelines-deployer/vertex_deployer-$VERSION.tar.gz
+pip install ./vertex_deployer-$VERSION.tar.gz
+```
+
+List available versions:
+```bash
+gsutil ls gs://vertex-pipelines-deployer
+```
+
## Usage
### Setup
@@ -150,13 +173,17 @@ You must respect the following folder structure. If you already follow the
```
vertex
-├─ config/
+├─ configs/
│ └─ {pipeline_name}
│ └─ {config_name}.json
└─ pipelines/
└─ {pipeline_name}.py
```
+> [!NOTE]
+> You must have at lease these files. If you need to share some config elements between pipelines,
+> you can have a `shared` folder in `configs` and import them in your pipeline configs.
+
#### Pipelines
You file `{pipeline_name}.py` must contain a function called `pipeline` decorated using `kfp.dsl.pipeline`.
@@ -201,7 +228,7 @@ VERTEX_SERVICE_ACCOUNT=YOUR_VERTEX_SERVICE_ACCOUNT # Vertex Pipelines Service A
> **Note**
> We're using env files and dotenv to load the environment variables.
> No default value for `--env-file` argument is provided to ensure that you don't accidentally deploy to the wrong project.
-> An [`example.env`](example/example.env) file is provided in this repo.
+> An [`example.env`](./example/example.env) file is provided in this repo.
> This also allows you to work with multiple environments thanks to env files (`test.env`, `dev.env`, `prod.env`, etc)
### CLI: Deploying a Pipeline
diff --git a/deployer/cli.py b/deployer/cli.py
index bb0e525..e37b265 100644
--- a/deployer/cli.py
+++ b/deployer/cli.py
@@ -4,6 +4,7 @@
import typer
from loguru import logger
+from pydantic import ValidationError
from typing_extensions import Annotated
from deployer.constants import (
@@ -22,18 +23,41 @@
load_config,
load_vertex_settings,
)
-from deployer.utils.logging import LoguruLevel
+from deployer.utils.logging import LoguruLevel, console
from deployer.utils.utils import (
import_pipeline_from_dir,
make_enum_from_python_package_dir,
+ print_check_results_table,
+ print_pipelines_list,
)
+
+def display_version_and_exit(value: bool):
+ if value:
+ from deployer import __version__
+
+ typer.echo(f"version: {__version__}")
+ raise typer.Exit()
+
+
app = typer.Typer(no_args_is_help=True, rich_help_panel="rich", rich_markup_mode="markdown")
@app.callback(name="set_logger")
def cli_set_logger(
- log_level: Annotated[LoguruLevel, typer.Option("--log-level", "-log")] = LoguruLevel.INFO
+ ctx: typer.Context,
+ log_level: Annotated[
+ LoguruLevel, typer.Option("--log-level", "-log", help="Set the logging level.")
+ ] = LoguruLevel.INFO,
+ version: Annotated[
+ bool,
+ typer.Option(
+ "--version",
+ "-v",
+ callback=display_version_and_exit,
+ help="Display the version number and exit.",
+ ),
+ ] = False,
):
logger.configure(handlers=[{"sink": sys.stderr, "level": log_level}])
@@ -42,7 +66,7 @@ def cli_set_logger(
@app.command(no_args_is_help=True)
-def deploy(
+def deploy( # noqa: C901
pipeline_name: Annotated[
PipelineName, typer.Argument(..., help="The name of the pipeline to run.")
],
@@ -79,7 +103,13 @@ def deploy(
help="Whether to create a schedule for the pipeline.",
),
] = False,
- cron: Annotated[str, typer.Option(help="Cron expression for scheduling the pipeline.")] = None,
+ cron: Annotated[
+ str,
+ typer.Option(
+ help="Cron expression for scheduling the pipeline."
+ " To pass it to the CLI, use hyphens e.g. '0-10-*-*-*'."
+ ),
+ ] = None,
delete_last_schedule: Annotated[
bool,
typer.Option(
@@ -97,12 +127,22 @@ def deploy(
"--config-filepath",
"-cfp",
help="Path to the json/py file with parameter values and input artifacts"
- "to use when running the pipeline.",
+ " to use when running the pipeline.",
exists=True,
dir_okay=False,
file_okay=True,
),
] = None,
+ config_name: Annotated[
+ str,
+ typer.Option(
+ "--config-name",
+ "-cn",
+ help="Name of the json/py file with parameter values and input artifacts"
+ " to use when running the pipeline. It must be in the pipeline config dir."
+ " e.g. `config_dev.json` for `./vertex/configs/{pipeline-name}/config_dev.json`.",
+ ),
+ ] = None,
enable_caching: Annotated[
bool,
typer.Option(
@@ -133,6 +173,21 @@ def deploy(
"""Compile, upload, run and schedule pipelines."""
vertex_settings = load_vertex_settings(env_file=env_file)
+ if schedule:
+ if cron is None or cron == "":
+ raise typer.BadParameter("--cron must be specified to schedule a pipeline")
+ if run or schedule:
+ if config_filepath is None and config_name is None:
+ raise typer.BadParameter(
+ "Both --config-filepath and --config-name are missing."
+ " Please specify at least one to run or schedule a pipeline."
+ )
+ if config_filepath is not None and config_name is not None:
+ raise typer.BadParameter(
+ "Both --config-filepath and --config-name are provided."
+ " Please specify only one to run or schedule a pipeline."
+ )
+
pipeline_func = import_pipeline_from_dir(PIPELINE_ROOT_PATH, pipeline_name.value)
deployer = VertexPipelineDeployer(
@@ -148,34 +203,38 @@ def deploy(
)
if run or schedule:
+ if config_name is not None:
+ config_filepath = Path(CONFIG_ROOT_PATH) / pipeline_name.value / config_name
parameter_values, input_artifacts = load_config(config_filepath)
if compile:
- deployer.compile()
+ with console.status("Compiling pipeline..."):
+ deployer.compile()
if upload:
- deployer.upload_to_registry(tags=tags)
+ with console.status("Uploading pipeline..."):
+ deployer.upload_to_registry(tags=tags)
if run:
- deployer.run(
- enable_caching=enable_caching,
- parameter_values=parameter_values,
- experiment_name=experiment_name,
- input_artifacts=input_artifacts,
- tag=tags[0] if tags else None,
- )
+ with console.status("Running pipeline..."):
+ deployer.run(
+ enable_caching=enable_caching,
+ parameter_values=parameter_values,
+ experiment_name=experiment_name,
+ input_artifacts=input_artifacts,
+ tag=tags[0] if tags else None,
+ )
if schedule:
- if cron is None:
- raise ValueError("`cron` must be specified when scheduling the pipeline")
- cron = cron.replace("-", " ") # ugly fix to allow cron expression as env variable
- deployer.schedule(
- cron=cron,
- enable_caching=enable_caching,
- parameter_values=parameter_values,
- tag=tags[0] if tags else None,
- delete_last_schedule=delete_last_schedule,
- )
+ with console.status("Scheduling pipeline..."):
+ cron = cron.replace("-", " ") # ugly fix to allow cron expression as env variable
+ deployer.schedule(
+ cron=cron,
+ enable_caching=enable_caching,
+ parameter_values=parameter_values,
+ tag=tags[0] if tags else None,
+ delete_last_schedule=delete_last_schedule,
+ )
@app.command(no_args_is_help=True)
@@ -199,6 +258,14 @@ def check(
file_okay=True,
),
] = None,
+ raise_error: Annotated[
+ bool,
+ typer.Option(
+ "--raise-error / --no-raise-error",
+ "-re / -nre",
+ help="Whether to raise an error if the pipeline is not valid.",
+ ),
+ ] = False,
):
"""Check that pipelines are valid.
@@ -231,24 +298,30 @@ def check(
else:
raise ValueError("Please specify either --all or a pipeline name")
- config_filepaths = [config_filepath] if config_filepath is not None else None
- pipelines = Pipelines.model_validate(
- {
- "pipelines": {
- p.value: {"pipeline_name": p.value, "config_paths": config_filepaths}
- for p in pipelines_to_check
- }
+ if config_filepath is None:
+ to_check = {
+ p.value: list_config_filepaths(CONFIG_ROOT_PATH, p.value) for p in pipelines_to_check
}
- )
-
- log_message = "Checked pipelines and config paths:\n"
- for pipeline in pipelines.pipelines.values():
- log_message += f"- {pipeline.pipeline_name.value}:\n"
- if len(pipeline.config_paths) == 0:
- log_message += " - No config path found\n"
- for config_filepath in pipeline.config_paths:
- log_message += f" - {config_filepath.name}\n"
- logger.opt(ansi=True).success(log_message)
+ else:
+ to_check = {p.value: [config_filepath] for p in pipelines_to_check}
+
+ try:
+ with console.status("Checking pipelines..."):
+ Pipelines.model_validate(
+ {
+ "pipelines": {
+ p: {"pipeline_name": p, "config_paths": config_filepaths}
+ for p, config_filepaths in to_check.items()
+ }
+ }
+ )
+ except ValidationError as e:
+ if raise_error:
+ raise e
+ print_check_results_table(to_check, e)
+ sys.exit(1)
+ else:
+ print_check_results_table(to_check)
@app.command()
@@ -261,24 +334,21 @@ def list(
] = False
):
"""List all pipelines."""
- log_msg = "Available pipelines:\n"
if len(PipelineName.__members__) == 0:
- log_msg += (
- "No pipeline found. Please check that the pipeline root path is"
- f" correct ('{PIPELINE_ROOT_PATH}')"
+ logger.warning(
+ "No pipeline found. Please check that the pipeline root path is"
+ f" correct (current: '{PIPELINE_ROOT_PATH}')"
)
- else:
- for pipeline_name in PipelineName.__members__.values():
- log_msg += f"- {pipeline_name.value}\n"
- if with_configs:
- config_filepaths = list_config_filepaths(CONFIG_ROOT_PATH, pipeline_name.value)
- if len(config_filepaths) == 0:
- log_msg += " - No config file found\n"
- for config_filepath in config_filepaths:
- log_msg += f" - {config_filepath.name}\n"
+ if with_configs:
+ pipelines_dict = {
+ p.name: list_config_filepaths(CONFIG_ROOT_PATH, p.name)
+ for p in PipelineName.__members__.values()
+ }
+ else:
+ pipelines_dict = {p.name: [] for p in PipelineName.__members__.values()}
- logger.opt(ansi=True).info(log_msg)
+ print_pipelines_list(pipelines_dict, with_configs)
@app.command(no_args_is_help=True)
diff --git a/deployer/pipeline_checks.py b/deployer/pipeline_checks.py
index b98749c..b90da97 100644
--- a/deployer/pipeline_checks.py
+++ b/deployer/pipeline_checks.py
@@ -48,7 +48,7 @@ def populate_config_names(cls, data: Any) -> Any:
@computed_field
def pipeline(self) -> Any:
"""Import pipeline"""
- with disable_logger("deployer.utils"):
+ with disable_logger("deployer.utils.utils"):
return import_pipeline_from_dir(PIPELINE_ROOT_PATH, self.pipeline_name.value)
@computed_field()
@@ -73,7 +73,7 @@ def compile_pipeline(self):
"""Validate that the pipeline can be compiled"""
logger.debug(f"Compiling pipeline {self.pipeline_name.value}")
try:
- with disable_logger("deployer.pipelines_deployer"):
+ with disable_logger("deployer.pipeline_deployer"):
VertexPipelineDeployer(
pipeline_name=self.pipeline_name.value,
pipeline_func=self.pipeline,
diff --git a/deployer/pipeline_deployer.py b/deployer/pipeline_deployer.py
index 5754dfb..2ab3ce6 100644
--- a/deployer/pipeline_deployer.py
+++ b/deployer/pipeline_deployer.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import os
from pathlib import Path
from typing import Callable, List, Optional
@@ -77,17 +79,17 @@ def _get_template_path(self, tag: Optional[str] = None) -> str:
"""
if self.gar_host is not None:
if tag:
- return f"{self.gar_host}/{self.pipeline_name.replace('_', '-')}/{tag}"
+ return os.path.join(self.gar_host, self.pipeline_name.replace("_", "-"), tag)
if self.template_name is not None and self.version_name is not None:
- return f"{self.gar_host}/{self.template_name}/{self.version_name}"
+ return os.path.join(self.gar_host, self.template_name, self.version_name)
logger.warning(
"tag or template_name and version_name not provided."
" Falling back to local package."
)
- return str(self.local_package_path / f"{self.pipeline_name}.yaml")
+ return os.path.join(str(self.local_package_path), f"{self.pipeline_name}.yaml")
def _check_gar_host(self) -> None:
if self.gar_host is None:
@@ -98,10 +100,10 @@ def _check_gar_host(self) -> None:
def _check_experiment_name(self, experiment_name: Optional[str] = None) -> str:
if experiment_name is None:
- experiment_name = f"{self.pipeline_name}-experiment"
+ experiment_name = f"{self.pipeline_name}-experiment".replace("_", "-")
logger.info(f"Experiment name not provided, using {experiment_name}")
-
- experiment_name = experiment_name.replace("_", "-")
+ else:
+ experiment_name = experiment_name.replace("_", "-")
return experiment_name
@@ -123,7 +125,7 @@ def _create_pipeline_job(
)
return job
- def compile(self) -> "VertexPipelineDeployer":
+ def compile(self) -> VertexPipelineDeployer:
"""Compile pipeline and save it to the local package path using kfp compiler"""
self.local_package_path.mkdir(parents=True, exist_ok=True)
pipeline_filepath = self.local_package_path / f"{self.pipeline_name}.yaml"
@@ -139,7 +141,7 @@ def compile(self) -> "VertexPipelineDeployer":
def upload_to_registry(
self,
tags: List[str] = ["latest"], # noqa: B006
- ) -> "VertexPipelineDeployer":
+ ) -> VertexPipelineDeployer:
"""Upload pipeline to Artifact Registry"""
self._check_gar_host()
client = RegistryClient(host=self.gar_host)
@@ -159,7 +161,7 @@ def run(
input_artifacts: Optional[dict] = None,
experiment_name: Optional[str] = None,
tag: Optional[str] = None,
- ) -> "VertexPipelineDeployer":
+ ) -> VertexPipelineDeployer:
"""Run pipeline on Vertex AI Pipelines
If the experiment name is not provided, use the pipeline name with the suffix
@@ -197,7 +199,7 @@ def compile_upload_run(
parameter_values: Optional[dict] = None,
experiment_name: Optional[str] = None,
tags: List[str] = ["latest"], # noqa: B006
- ) -> "VertexPipelineDeployer":
+ ) -> VertexPipelineDeployer:
"""Compile, upload and run pipeline on Vertex AI Pipelines"""
self.compile()
@@ -219,7 +221,7 @@ def schedule(
parameter_values: Optional[dict] = None,
tag: Optional[str] = None,
delete_last_schedule: bool = False,
- ) -> "VertexPipelineDeployer":
+ ) -> VertexPipelineDeployer:
"""Create pipeline schedule on Vertex AI Pipelines
Compiled pipeline file is the one uploaded on artifact registry if the host is provided,
@@ -254,13 +256,14 @@ def schedule(
if tag:
client = RegistryClient(host=self.gar_host)
+ package_name = self.pipeline_name.replace("_", "-")
try:
- tag_metadata = client.get_tag(package_name=self.pipeline_name, tag=tag)
+ tag_metadata = client.get_tag(package_name=package_name, tag=tag)
except HTTPError as e:
- tags_list = client.list_tags(self.pipeline_name)
+ tags_list = client.list_tags(package_name)
tags_list_parsed = [x["name"].split("/")[-1] for x in tags_list]
raise TagNotFoundError(
- f"Tag {tag} not found for package {self.gar_host}/{self.pipeline_name}.\
+ f"Tag {tag} not found for package {self.gar_host}/{package_name}.\
Available tags: {tags_list_parsed}"
) from e
diff --git a/deployer/utils/logging.py b/deployer/utils/logging.py
index ba51d8f..fe0a80d 100644
--- a/deployer/utils/logging.py
+++ b/deployer/utils/logging.py
@@ -1,6 +1,7 @@
from enum import Enum
from loguru import logger
+from rich.console import Console
class LoguruLevel(str, Enum): # noqa: D101
@@ -24,3 +25,6 @@ def __enter__(self) -> None: # noqa: D105
def __exit__(self, exc_type, exc_val, exc_tb) -> None: # noqa: D105
logger.enable(self.name)
+
+
+console = Console()
diff --git a/deployer/utils/models.py b/deployer/utils/models.py
index 9c9c298..c69f4dc 100644
--- a/deployer/utils/models.py
+++ b/deployer/utils/models.py
@@ -1,4 +1,5 @@
from inspect import signature
+from typing import Literal
import kfp.components.graph_component
from pydantic import BaseModel, ConfigDict, create_model
@@ -43,3 +44,15 @@ def create_model_from_pipeline(
)
return pipeline_model
+
+
+class ChecksTableRow(CustomBaseModel):
+ """A class to represent a row of the check results table."""
+
+ status: Literal["✅", "⚠️", "❌"]
+ pipeline: str
+ pipeline_error_message: str = None
+ config_file: str
+ attribute: str = None
+ config_error_type: str = None
+ config_error_message: str = None
diff --git a/deployer/utils/utils.py b/deployer/utils/utils.py
index e76a65a..1a8ae6f 100644
--- a/deployer/utils/utils.py
+++ b/deployer/utils/utils.py
@@ -1,10 +1,15 @@
import importlib
from enum import Enum
from pathlib import Path
-from typing import Optional
+from typing import Dict, Optional
from kfp.components import graph_component
from loguru import logger
+from pydantic import ValidationError
+from rich.table import Table
+
+from deployer.utils.logging import console
+from deployer.utils.models import ChecksTableRow
def make_enum_from_python_package_dir(dir_path: Path, raise_if_not_found: bool = False) -> Enum:
@@ -45,3 +50,120 @@ def import_pipeline_from_dir(dirpath: Path, pipeline_name: str) -> graph_compone
logger.debug(f"Pipeline {module_path} imported successfully.")
return pipeline
+
+
+def print_pipelines_list(pipelines_dict: Dict[str, list], with_configs: bool = False) -> None:
+ """This function prints a table of pipelines to the console.
+
+ Args:
+ pipelines_dict (dict[str, list]): A dictionary containing the pipelines as keys
+ and the config filepaths as values.
+ with_configs (bool, optional): Whether to print the config filepaths or not.
+ Defaults to False.
+ """
+ table = Table(show_header=True, header_style="bold", show_lines=True)
+
+ table.add_column("Pipeline")
+ table.add_column("Config Files")
+
+ for pipeline_name, config_filepaths in pipelines_dict.items():
+ config_paths_str = "\n".join([c.name for c in config_filepaths])
+ style = None
+
+ if len(config_filepaths) == 0 and with_configs:
+ config_paths_str = "No config files found"
+ style = "yellow"
+
+ table.add_row(pipeline_name, config_paths_str, style=style)
+
+ if not with_configs:
+ table.columns = table.columns[:1]
+
+ console.print(table)
+
+
+def print_check_results_table(
+ to_check: Dict[str, list], validation_error: Optional[ValidationError] = None
+) -> None:
+ """This function prints a table of check results to the console.
+
+ Args:
+ to_check (dict[str, list]): A dictionary containing the pipelines to check
+ as keys and the config filepaths as values.
+ validation_error (ValidationError): The validation error if any occurred during the check.
+ """
+ val_error_dict = validation_error.errors() if validation_error else {}
+ parsed_val_error_dict = {
+ p: [v for v in val_error_dict if v["loc"][1] == p] for p in to_check.keys()
+ }
+
+ table = Table(show_header=True, header_style="bold", show_lines=True)
+
+ table.add_column("Status", justify="center")
+ table.add_column("Pipeline")
+ table.add_column("Pipeline Error Message")
+ table.add_column("Config File")
+ table.add_column("Attribute")
+ table.add_column("Config Error Type")
+ table.add_column("Config Error Message")
+
+ for pipeline_name, config_filepaths in to_check.items():
+ errors = parsed_val_error_dict[pipeline_name]
+ if len(errors) == 0:
+ for config_filepath in config_filepaths:
+ row = ChecksTableRow(
+ status="✅",
+ pipeline=pipeline_name,
+ config_file=config_filepath.name,
+ )
+ table.add_row(*row.model_dump().values(), style="green")
+ if len(config_filepaths) == 0:
+ row = ChecksTableRow(
+ status="⚠️",
+ pipeline=pipeline_name,
+ config_file="No configs found",
+ )
+ table.add_row(*row.model_dump().values(), style="bold yellow")
+
+ elif len(errors) == 1 and len(errors[0]["loc"]) == 2:
+ print(errors)
+ row = ChecksTableRow(
+ status="❌",
+ pipeline=pipeline_name,
+ pipeline_error_message=errors[0]["msg"],
+ config_file="Could not check config files due to pipeline error.",
+ )
+ table.add_row(*row.model_dump().values(), style="red")
+
+ else:
+ for config_filepath in config_filepaths:
+ error_rows = []
+ for error in errors:
+ if error["loc"][3] == config_filepath.name:
+ error_row = {
+ "type": error["type"],
+ "attribute": error["loc"][4],
+ "msg": error["msg"],
+ }
+ error_rows.append(error_row)
+ if error_rows:
+ row = ChecksTableRow(
+ status="❌",
+ pipeline=pipeline_name,
+ config_file=config_filepath.name,
+ config_error_type="\n".join([er["type"] for er in error_rows]),
+ attribute="\n".join([er["attribute"] for er in error_rows]),
+ config_error_message="\n".join([er["msg"] for er in error_rows]),
+ )
+ table.add_row(*row.model_dump().values(), style="red")
+ else:
+ row = ChecksTableRow(
+ status="✅",
+ pipeline=pipeline_name,
+ config_file=config_filepath.name,
+ )
+ table.add_row(*row.model_dump().values(), style="green")
+
+ table.columns = [c for c in table.columns if "".join(c._cells) != ""]
+
+ console.print(table)
diff --git a/pyproject.toml b/pyproject.toml
index 6cc63c9..859a543 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -15,7 +15,7 @@ kfp = ">=2.0.1, <2.1.0"
google-cloud-aiplatform = "^1.26.1"
requests = "^2.31.0"
typer = "^0.9.0"
-rich = {version = "^13.5.3", optional = true}
+rich = "^13.5.3"
loguru = "^0.7.2"
pydantic-settings = "^2.0.3"
pydantic = "^2.3.0"
@@ -30,9 +30,6 @@ nbstripout = "^0.6.1"
ruff = "^0.0.289"
pytest-cov = "^4.1.0"
-[tool.poetry.extras]
-rich = ["rich"]
-
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
@@ -72,11 +69,14 @@ convention = "google"
"tests/*" = ["D"]
[tool.semantic_release]
-version_variable = [
- "deployer/__init__.py:__version__",
- "pyproject.toml:version"
-]
+version_variables = ["deployer/__init__.py:__version__"]
+version_toml = ["pyproject.toml:tool.poetry.version"]
branch = "main"
upload_to_pypi = false
upload_to_release = true
build_command = "pip install poetry && poetry build"
+commit_message = "chore(release): {version}\n\nAutomatically generated by semantic-release"
+tag_format = "{version}"
+
+[tool.semantic_release.changelog]
+exclude_commit_patterns = ["chore(release)", "Merge pull request"]