diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..e69de29 diff --git a/.github/workflows/cleanup.yml b/.github/workflows/cleanup.yml new file mode 100644 index 0000000..e69de29 diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..e69de29 diff --git a/paaws/app.py b/paaws/app.py index c35ec5d..39e9da2 100644 --- a/paaws/app.py +++ b/paaws/app.py @@ -23,6 +23,21 @@ def wrapper(self, *args, **kwargs): return wrapper +def merge(source: dict, destination: dict) -> dict: + """ + Perform a "deep" merge of the two dictionaries + """ + for key, value in source.items(): + if isinstance(value, dict): + # get node or create one + node = destination.setdefault(key, {}) + merge(value, node) + else: + destination[key] = value + + return destination + + class Application: name: str cluster: str @@ -32,9 +47,9 @@ class Application: shell_service: str tags: List[dict] - def _load_config(self) -> dict: + def _load_config(self, name: str) -> dict: """Load any configuration for app from parameter store""" - config_parameter_name = f"/paaws/apps/{self.name}" + config_parameter_name = f"/paaws/apps/{self.name}/{name}" ssm = boto3.client("ssm") try: @@ -48,29 +63,50 @@ def _load_config(self) -> dict: pass return {} - def _define_resources(self) -> None: + def _initialize_settings(self) -> None: """Set attributes for resources on this app""" if not self.name: raise NoApplicationDefined() - defaults = { - "cluster": self.name, - "log_group": self.name, - "shell_service": f"{self.name}-debug", - "shell_command": "bash -l", - "parameter_prefix": f"/{self.name}", - "codebuild_project": self.name, - "db_utils_bucket": f"{self.name}-dbutils", - "chamber_compatible_config": False, + default_settings = { + "cluster": {"name": self.name}, + "log_group": {"name": self.name}, + "parameter_store": {"prefix": f"/{self.name}", "chamber_compatible": False}, + "codebuild_project": {"name": self.name}, + "shell": {"task_family": f"{self.name}-shell", "command": "bash -l",}, + "db_utils": { + "shell_task_family": f"{self.name}-dbutils-shell", + "dumpload_task_family": f"{self.name}-dbutils-dumpload", + "s3_bucket": f"{self.name}-dbutils", + }, "tags": [], } - defaults.update(self._load_config()) - for k, v in defaults.items(): - setattr(self, k, v) + + self.settings = merge(self._load_config("settings"), default_settings) def setup(self, name: str) -> None: """Update resources when name is set""" self.name = name - self._define_resources() + self._initialize_settings() + + @property + def cluster(self) -> str: + return self.settings["cluster"]["name"] + + @property + def tags(self) -> List[dict]: + return self.settings["tags"] + + @property + def log_group(self) -> str: + return self.settings["log_group"]["name"] + + @property + def parameter_prefix(self) -> str: + return self.settings["parameter_store"]["prefix"] + + @property + def chamber_compatible_config(self) -> bool: + return self.settings["parameter_store"]["chamber_compatible"] @requires_appname def get_tasks(self) -> List[dict]: @@ -89,33 +125,22 @@ def get_tasks(self) -> List[dict]: def get_services(self) -> List[dict]: """List of service descriptions for app""" ecs = boto3.client("ecs") - service_arns = ecs.list_services(cluster=app.cluster)["serviceArns"] + service_arns = ecs.list_services(cluster=self.cluster)["serviceArns"] return [ s for s in ecs.describe_services( - cluster=app.cluster, services=service_arns, include=["TAGS"] + cluster=self.cluster, services=service_arns, include=["TAGS"] )["services"] - if tags_match(s.get("tags", []), app.tags) + if tags_match(s.get("tags", []), self.tags) ] - @requires_appname - def get_shell_task_definition(self) -> dict: - """Get task definition from shell service""" - try: - service = [ - s for s in self.get_services() if s["serviceName"] == self.shell_service - ][0] - except IndexError: - raise Exception(f"Shell service '{app.shell_service}' does not exist") - return service["taskDefinition"] - @requires_appname def get_builds(self, limit=20): codebuild = boto3.client("codebuild") return codebuild.batch_get_builds( - ids=codebuild.list_builds_for_project(projectName=self.codebuild_project)[ - "ids" - ][:limit] + ids=codebuild.list_builds_for_project( + projectName=self.settings["codebuild_project"]["name"] + )["ids"][:limit] )["builds"] diff --git a/paaws/cli/builds.py b/paaws/cli/builds.py index 1d7db07..44f959c 100644 --- a/paaws/cli/builds.py +++ b/paaws/cli/builds.py @@ -1,4 +1,4 @@ -from pydoc import pager +import logging from textwrap import indent import boto3 @@ -9,8 +9,13 @@ from ..app import app from ..utils import formatted_time_ago +log = logging.getLogger(__name__) + def get_artifact(build: dict, name: str) -> str: + if not build["artifacts"]["location"]: + log.debug("No artifacts stored by Codebuild. Skipping download of %s", name) + return "" artifact_arn = build["artifacts"]["location"] parts = ":".join(artifact_arn.split(":")[5:]) bucket, key_prefix = parts.split("/", 1) diff --git a/paaws/cli/config.py b/paaws/cli/config.py index a0c4ffd..8f17fcd 100644 --- a/paaws/cli/config.py +++ b/paaws/cli/config.py @@ -44,7 +44,7 @@ def list_() -> None: colored(f"{app.name} Config Vars", "white", attrs=["bold"]), ) parameters = load_parameters(app.parameter_prefix) - cell_width = max([len(k) for k in parameters.keys()]) + cell_width = max([len(k) for k in parameters.keys()]) + 1 for k in sorted(load_parameters(f"/{app.name}").keys()): print( colored( diff --git a/paaws/cli/db.py b/paaws/cli/db.py index 5e5a5b9..c6fcf01 100644 --- a/paaws/cli/db.py +++ b/paaws/cli/db.py @@ -16,15 +16,20 @@ def s3_location(app_name: str, prefix: str) -> (str, str): timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") object_name = f"{prefix}{timestamp}-{getpass.getuser()}.dump" - return app.db_utils_bucket, object_name + return app.settings["db_utils"]["s3_bucket"], object_name def run_task(app_name: str, definition: str, command: List[str]) -> str: # Fetch the default runTask arguments from parameter store - ssm = boto3.client("ssm") - run_task_kwargs = json.loads( - ssm.get_parameter(Name=f"/paaws/ecs/{app_name}")["Parameter"]["Value"] - )["run_task_args"] + try: + ssm = boto3.client("ssm") + run_task_kwargs = json.loads( + ssm.get_parameter(Name=f"/paaws/apps/{app_name}/ecs-config")["Parameter"][ + "Value" + ] + )["run_task_args"] + except ssm.exceptions.ParameterNotFound: + run_task_kwargs = {"cluster": app.cluster} run_task_kwargs["overrides"] = { "containerOverrides": [{"name": "app", "command": command}] @@ -63,9 +68,11 @@ def dump(): Dump database to local file """ bucket, object_name = s3_location(app.name, "dumps/") + print(json.dumps(app.settings, indent=2)) + print(app.settings["db_utils"]["dumpload_task_family"]) task_arn = run_task( app.name, - f"{app.name}-dbutils-dump", + app.settings["db_utils"]["dumpload_task_family"], ["dump-to-s3.sh", f"s3://{bucket}/{object_name}"], ) wait_for_task(app.cluster, task_arn, "dumping database") @@ -80,7 +87,7 @@ def load(local_file: str): upload_file(local_file, bucket, object_name) task_arn = run_task( app.name, - f"{app.name}-dbutils-load", + app.settings["db_utils"]["dumpload_task_family"], ["load-from-s3.sh", f"s3://{bucket}/{object_name}"], ) wait_for_task(app.name, task_arn, "loading database") @@ -93,7 +100,7 @@ def shell(): """ ecs = boto3.client("ecs") task = run_task_until_disconnect( - cluster=app.cluster, task_defn=f"{app.name}-dbutils-shell" + cluster=app.cluster, task_defn=app.settings["db_utils"]["shell_task_family"] ) task_arn = task["taskArn"] Halo(text=f"starting task {task_arn}").info() diff --git a/paaws/cli/ps.py b/paaws/cli/ps.py index f6223de..2dc4e12 100644 --- a/paaws/cli/ps.py +++ b/paaws/cli/ps.py @@ -14,7 +14,7 @@ def task_id(task_detail: dict) -> str: try: return tags["paaws:buildNumber"] except KeyError: - return task_detail["arn"].split("/")[-1] + return task_detail["taskArn"].split("/")[-1] @click.command() diff --git a/paaws/cli/shell.py b/paaws/cli/shell.py index 1ac662e..5855320 100644 --- a/paaws/cli/shell.py +++ b/paaws/cli/shell.py @@ -48,8 +48,8 @@ def shell(): ), ) exit(1) - task = run_task_until_disconnect(app.cluster, app.get_shell_task_definition()) + task = run_task_until_disconnect(app.cluster, app.settings["shell"]["task_family"]) task_arn = task["taskArn"] Halo(text=f"starting task {task_arn}").info() wait_for_task(app.cluster, task_arn, "running container", status="tasks_running") - shell_to_task(task, app.cluster, app.shell_command) + shell_to_task(task, app.cluster, app.settings["shell"]["command"])