From a7c99af4bb56c884ef0e8ba3bba3091eeb2298ae Mon Sep 17 00:00:00 2001 From: shayancanonical <99665202+shayancanonical@users.noreply.github.com> Date: Tue, 19 Mar 2024 04:50:16 -0400 Subject: [PATCH] DPE-1794 Implement COS integration (#93) ## Issue We need to introduce COS support for MySQLRouter ## Solution 1. Introduce COS support for MySQLRouter 2. Introduce the scaffolding for secrets to be able to store the monitoring password in the juju secrets backend ## Prerequisites 1. We need to merge the [snap PR](https://github.com/canonical/charmed-mysql-snap/pull/41) that adds the COS machinery to the snap. After, we need to update the snap revision in the `snap.py` file 2. We need to import the secrets lib after the [following secrets PR in data-platform-libs](https://github.com/canonical/data-platform-libs/pull/117) is merged ## TODO 1. ~~Determine why the `All Connections Information` and `Route byte to/from server` sections on the dashboard are not populated when more than 1 routers are deployed~~ (see https://github.com/canonical/mysql-router-operator/pull/93#issuecomment-1988408876) 2. ~~Test router log collection (via Loki) when [the following issue is resolved](https://github.com/canonical/grafana-agent-operator/issues/56)~~ (see https://github.com/canonical/mysql-router-operator/pull/93#issuecomment-1988408876) ## Demo ![image](https://github.com/canonical/mysql-router-operator/assets/99665202/e2173939-c2e8-4de2-a007-bb0dbb2269d4) --- .../data_platform_libs/v0/data_interfaces.py | 1434 ++++++++++++----- lib/charms/grafana_agent/v0/cos_agent.py | 806 +++++++++ lib/charms/operator_libs_linux/v2/snap.py | 68 +- metadata.yaml | 8 +- poetry.lock | 92 +- pyproject.toml | 3 + src/abstract_charm.py | 56 +- src/container.py | 72 +- .../mysql-router-metrics.json | 1286 +++++++++++++++ src/machine_charm.py | 11 +- src/mysql_shell/__init__.py | 12 +- src/relations/cos.py | 132 ++ src/relations/database_providers_wrapper.py | 8 +- src/relations/secrets.py | 92 ++ src/snap.py | 39 +- src/utils.py | 13 + src/workload.py | 167 +- tests/integration/test_database.py | 60 +- tests/integration/test_exporter.py | 148 ++ tests/unit/conftest.py | 28 +- 20 files changed, 3984 insertions(+), 551 deletions(-) create mode 100644 lib/charms/grafana_agent/v0/cos_agent.py create mode 100644 src/grafana_dashboards/mysql-router-metrics.json create mode 100644 src/relations/cos.py create mode 100644 src/relations/secrets.py create mode 100644 src/utils.py create mode 100644 tests/integration/test_exporter.py diff --git a/lib/charms/data_platform_libs/v0/data_interfaces.py b/lib/charms/data_platform_libs/v0/data_interfaces.py index 714eace4..d24aa6ff 100644 --- a/lib/charms/data_platform_libs/v0/data_interfaces.py +++ b/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -300,7 +300,7 @@ def _on_topic_requested(self, event: TopicRequestedEvent): from enum import Enum from typing import Callable, Dict, List, Optional, Set, Tuple, Union -from ops import JujuVersion, Secret, SecretInfo, SecretNotFoundError +from ops import JujuVersion, Model, Secret, SecretInfo, SecretNotFoundError from ops.charm import ( CharmBase, CharmEvents, @@ -320,7 +320,7 @@ def _on_topic_requested(self, event: TopicRequestedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 24 +LIBPATCH = 29 PYDEPS = ["ops>=2.0.0"] @@ -347,16 +347,6 @@ class SecretGroup(Enum): EXTRA = "extra" -# Local map to associate mappings with secrets potentially as a group -SECRET_LABEL_MAP = { - "username": SecretGroup.USER, - "password": SecretGroup.USER, - "uris": SecretGroup.USER, - "tls": SecretGroup.TLS, - "tls-ca": SecretGroup.TLS, -} - - class DataInterfacesError(Exception): """Common ancestor for DataInterfaces related exceptions.""" @@ -407,7 +397,7 @@ def set_encoded_field( relation.data[member].update({field: json.dumps(value)}) -def diff(event: RelationChangedEvent, bucket: Union[Unit, Application]) -> Diff: +def diff(event: RelationChangedEvent, bucket: Optional[Union[Unit, Application]]) -> Diff: """Retrieves the diff of the data in the relation changed databag. Args: @@ -419,6 +409,9 @@ def diff(event: RelationChangedEvent, bucket: Union[Unit, Application]) -> Diff: keys from the event relation databag. """ # Retrieve the old data from the data key in the application relation databag. + if not bucket: + return Diff([], [], []) + old_data = get_encoded_dict(event.relation, bucket, "data") if not old_data: @@ -432,15 +425,15 @@ def diff(event: RelationChangedEvent, bucket: Union[Unit, Application]) -> Diff: ) # These are the keys that were added to the databag and triggered this event. - added = new_data.keys() - old_data.keys() # pyright: ignore [reportGeneralTypeIssues] + added = new_data.keys() - old_data.keys() # pyright: ignore [reportAssignmentType] # These are the keys that were removed from the databag and triggered this event. - deleted = old_data.keys() - new_data.keys() # pyright: ignore [reportGeneralTypeIssues] + deleted = old_data.keys() - new_data.keys() # pyright: ignore [reportAssignmentType] # These are the keys that already existed in the databag, # but had their values changed. changed = { key - for key in old_data.keys() & new_data.keys() # pyright: ignore [reportGeneralTypeIssues] - if old_data[key] != new_data[key] # pyright: ignore [reportGeneralTypeIssues] + for key in old_data.keys() & new_data.keys() # pyright: ignore [reportAssignmentType] + if old_data[key] != new_data[key] # pyright: ignore [reportAssignmentType] } # Convert the new_data to a serializable format and save it for a next diff check. set_encoded_field(event.relation, bucket, "data", new_data) @@ -453,7 +446,7 @@ def leader_only(f): """Decorator to ensure that only leader can perform given operation.""" def wrapper(self, *args, **kwargs): - if not self.local_unit.is_leader(): + if self.component == self.local_app and not self.local_unit.is_leader(): logger.error( "This operation (%s()) can only be performed by the leader unit", f.__name__ ) @@ -487,12 +480,19 @@ class CachedSecret: The data structure is precisely re-using/simulating as in the actual Secret Storage """ - def __init__(self, charm: CharmBase, label: str, secret_uri: Optional[str] = None): + def __init__( + self, + model: Model, + component: Union[Application, Unit], + label: str, + secret_uri: Optional[str] = None, + ): self._secret_meta = None self._secret_content = {} self._secret_uri = secret_uri self.label = label - self.charm = charm + self._model = model + self.component = component def add_secret(self, content: Dict[str, str], relation: Relation) -> Secret: """Create a new secret.""" @@ -501,8 +501,10 @@ def add_secret(self, content: Dict[str, str], relation: Relation) -> Secret: "Secret is already defined with uri %s", self._secret_uri ) - secret = self.charm.app.add_secret(content, label=self.label) - secret.grant(relation) + secret = self.component.add_secret(content, label=self.label) + if relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) self._secret_uri = secret.id self._secret_meta = secret return self._secret_meta @@ -514,10 +516,10 @@ def meta(self) -> Optional[Secret]: if not (self._secret_uri or self.label): return try: - self._secret_meta = self.charm.model.get_secret(label=self.label) + self._secret_meta = self._model.get_secret(label=self.label) except SecretNotFoundError: if self._secret_uri: - self._secret_meta = self.charm.model.get_secret( + self._secret_meta = self._model.get_secret( id=self._secret_uri, label=self.label ) return self._secret_meta @@ -531,8 +533,13 @@ def get_content(self) -> Dict[str, str]: except (ValueError, ModelError) as err: # https://bugs.launchpad.net/juju/+bug/2042596 # Only triggered when 'refresh' is set - msg = "ERROR either URI or label should be used for getting an owned secret but not both" - if isinstance(err, ModelError) and msg not in str(err): + known_model_errors = [ + "ERROR either URI or label should be used for getting an owned secret but not both", + "ERROR secret owner cannot use --refresh", + ] + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in known_model_errors + ): raise # Due to: ValueError: Secret owner cannot use refresh=True self._secret_content = self.meta.get_content() @@ -554,18 +561,31 @@ def get_info(self) -> Optional[SecretInfo]: if self.meta: return self.meta.get_info() + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + class SecretCache: """A data structure storing CachedSecret objects.""" - def __init__(self, charm): - self.charm = charm + def __init__(self, model: Model, component: Union[Application, Unit]): + self._model = model + self.component = component self._secrets: Dict[str, CachedSecret] = {} def get(self, label: str, uri: Optional[str] = None) -> Optional[CachedSecret]: """Getting a secret from Juju Secret store or cache.""" if not self._secrets.get(label): - secret = CachedSecret(self.charm, label, uri) + secret = CachedSecret(self._model, self.component, label, uri) if secret.meta: self._secrets[label] = secret return self._secrets.get(label) @@ -575,37 +595,57 @@ def add(self, label: str, content: Dict[str, str], relation: Relation) -> Cached if self._secrets.get(label): raise SecretAlreadyExistsError(f"Secret {label} already exists") - secret = CachedSecret(self.charm, label) + secret = CachedSecret(self._model, self.component, label) secret.add_secret(content, relation) self._secrets[label] = secret return self._secrets[label] + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + secret.remove() + self._secrets.pop(label) + else: + logging.error("Non-existing Juju Secret was attempted to be removed %s", label) + -# Base DataRelation +# Base Data -class DataRelation(Object, ABC): +class Data(ABC): """Base relation data mainpulation (abstract) class.""" - def __init__(self, charm: CharmBase, relation_name: str) -> None: - super().__init__(charm, relation_name) - self.charm = charm - self.local_app = self.charm.model.app - self.local_unit = self.charm.unit + SCOPE = Scope.APP + + # Local map to associate mappings with secrets potentially as a group + SECRET_LABEL_MAP = { + "username": SecretGroup.USER, + "password": SecretGroup.USER, + "uris": SecretGroup.USER, + "tls": SecretGroup.TLS, + "tls-ca": SecretGroup.TLS, + } + + def __init__( + self, + model: Model, + relation_name: str, + ) -> None: + self._model = model + self.local_app = self._model.app + self.local_unit = self._model.unit self.relation_name = relation_name - self.framework.observe( - charm.on[relation_name].relation_changed, - self._on_relation_changed_event, - ) self._jujuversion = None - self.secrets = SecretCache(self.charm) + self.component = self.local_app if self.SCOPE == Scope.APP else self.local_unit + self.secrets = SecretCache(self._model, self.component) + self.data_component = None @property def relations(self) -> List[Relation]: """The list of Relation instances associated with this relation_name.""" return [ relation - for relation in self.charm.model.relations[self.relation_name] + for relation in self._model.relations[self.relation_name] if self._is_relation_active(relation) ] @@ -618,11 +658,6 @@ def secrets_enabled(self): # Mandatory overrides for internal/helper methods - @abstractmethod - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - @abstractmethod def _get_relation_secret( self, relation_id: int, group_mapping: SecretGroup, relation_name: Optional[str] = None @@ -677,8 +712,7 @@ def _generate_secret_label( """Generate unique group_mappings for secrets within a relation context.""" return f"{relation_name}.{relation_id}.{group_mapping.value}.secret" - @staticmethod - def _generate_secret_field_name(group_mapping: SecretGroup) -> str: + def _generate_secret_field_name(self, group_mapping: SecretGroup) -> str: """Generate unique group_mappings for secrets within a relation context.""" return f"{PROV_SECRET_PREFIX}{group_mapping.value}" @@ -705,8 +739,8 @@ def _relation_from_secret_label(self, secret_label: str) -> Optional[Relation]: except ModelError: return - @staticmethod - def _group_secret_fields(secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: + @classmethod + def _group_secret_fields(cls, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: """Helper function to arrange secret mappings under their group. NOTE: All unrecognized items end up in the 'extra' secret bucket. @@ -714,7 +748,7 @@ def _group_secret_fields(secret_fields: List[str]) -> Dict[SecretGroup, List[str """ secret_fieldnames_grouped = {} for key in secret_fields: - if group := SECRET_LABEL_MAP.get(key): + if group := cls.SECRET_LABEL_MAP.get(key): secret_fieldnames_grouped.setdefault(group, []).append(key) else: secret_fieldnames_grouped.setdefault(SecretGroup.EXTRA, []).append(key) @@ -736,22 +770,22 @@ def _get_group_secret_contents( return {k: v for k, v in secret_data.items() if k in secret_fields} return {} - @staticmethod + @classmethod def _content_for_secret_group( - content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup + cls, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup ) -> Dict[str, str]: """Select : pairs from input, that belong to this particular Secret group.""" if group_mapping == SecretGroup.EXTRA: return { k: v for k, v in content.items() - if k in secret_fields and k not in SECRET_LABEL_MAP.keys() + if k in secret_fields and k not in cls.SECRET_LABEL_MAP.keys() } return { k: v for k, v in content.items() - if k in secret_fields and SECRET_LABEL_MAP.get(k) == group_mapping + if k in secret_fields and cls.SECRET_LABEL_MAP.get(k) == group_mapping } @juju_secrets_only @@ -780,11 +814,11 @@ def _process_secret_fields( # If the relation started on a databag, we just stay on the databag # (Rolling upgrades may result in a relation starting on databag, getting secrets enabled on-the-fly) - # self.local_app is sufficient to check (ignored if Requires, never has secrets -- works if Provides) + # self.local_app is sufficient to check (ignored if Requires, never has secrets -- works if Provider) fallback_to_databag = ( req_secret_fields - and self.local_unit.is_leader() - and set(req_secret_fields) & set(relation.data[self.local_app]) + and (self.local_unit == self._model.unit and self.local_unit.is_leader()) + and set(req_secret_fields) & set(relation.data[self.component]) ) normal_fields = set(impacted_rel_fields) @@ -807,26 +841,28 @@ def _process_secret_fields( return (result, normal_fields) def _fetch_relation_data_without_secrets( - self, app: Application, relation: Relation, fields: Optional[List[str]] + self, component: Union[Application, Unit], relation: Relation, fields: Optional[List[str]] ) -> Dict[str, str]: """Fetching databag contents when no secrets are involved. Since the Provider's databag is the only one holding secrest, we can apply a simplified workflow to read the Require's side's databag. - This is used typically when the Provides side wants to read the Requires side's data, + This is used typically when the Provider side wants to read the Requires side's data, or when the Requires side may want to read its own data. """ - if app not in relation.data or not relation.data[app]: + if component not in relation.data or not relation.data[component]: return {} if fields: - return {k: relation.data[app][k] for k in fields if k in relation.data[app]} + return { + k: relation.data[component][k] for k in fields if k in relation.data[component] + } else: - return dict(relation.data[app]) + return dict(relation.data[component]) def _fetch_relation_data_with_secrets( self, - app: Application, + component: Union[Application, Unit], req_secret_fields: Optional[List[str]], relation: Relation, fields: Optional[List[str]] = None, @@ -835,23 +871,19 @@ def _fetch_relation_data_with_secrets( This function has internal logic to resolve if a requested field may be "hidden" within a Relation Secret, or directly available as a databag field. Typically - used to read the Provides side's databag (eigher by the Requires side, or by - Provides side itself). + used to read the Provider side's databag (eigher by the Requires side, or by + Provider side itself). """ result = {} normal_fields = [] if not fields: - if app not in relation.data or not relation.data[app]: + if component not in relation.data: return {} - all_fields = list(relation.data[app].keys()) + all_fields = list(relation.data[component].keys()) normal_fields = [field for field in all_fields if not self._is_secret_field(field)] - - # There must have been secrets there - if all_fields != normal_fields and req_secret_fields: - # So we assemble the full fields list (without 'secret-' fields) - fields = normal_fields + req_secret_fields + fields = normal_fields + req_secret_fields if req_secret_fields else normal_fields if fields: result, normal_fields = self._process_secret_fields( @@ -859,41 +891,38 @@ def _fetch_relation_data_with_secrets( ) # Processing "normal" fields. May include leftover from what we couldn't retrieve as a secret. - # (Typically when Juju3 Requires meets Juju2 Provides) + # (Typically when Juju3 Requires meets Juju2 Provider) if normal_fields: result.update( - self._fetch_relation_data_without_secrets(app, relation, list(normal_fields)) + self._fetch_relation_data_without_secrets(component, relation, list(normal_fields)) ) return result def _update_relation_data_without_secrets( - self, app: Application, relation: Relation, data: Dict[str, str] + self, component: Union[Application, Unit], relation: Relation, data: Dict[str, str] ) -> None: """Updating databag contents when no secrets are involved.""" - if app not in relation.data or relation.data[app] is None: + if component not in relation.data or relation.data[component] is None: return - if any(self._is_secret_field(key) for key in data.keys()): - raise SecretsIllegalUpdateError("Can't update secret {key}.") - if relation: - relation.data[app].update(data) + relation.data[component].update(data) def _delete_relation_data_without_secrets( - self, app: Application, relation: Relation, fields: List[str] + self, component: Union[Application, Unit], relation: Relation, fields: List[str] ) -> None: """Remove databag fields 'fields' from Relation.""" - if app not in relation.data or not relation.data[app]: + if component not in relation.data or relation.data[component] is None: return for field in fields: try: - relation.data[app].pop(field) + relation.data[component].pop(field) except KeyError: - logger.debug( - "Non-existing field was attempted to be removed from the databag %s, %s", - str(relation.id), + logger.error( + "Non-existing field '%s' was attempted to be removed from the databag (relation ID: %s)", str(field), + str(relation.id), ) pass @@ -902,7 +931,7 @@ def _delete_relation_data_without_secrets( def get_relation(self, relation_name, relation_id) -> Relation: """Safe way of retrieving a relation.""" - relation = self.charm.model.get_relation(relation_name, relation_id) + relation = self._model.get_relation(relation_name, relation_id) if not relation: raise DataInterfacesError( @@ -954,7 +983,6 @@ def fetch_relation_field( .get(field) ) - @leader_only def fetch_my_relation_data( self, relation_ids: Optional[List[int]] = None, @@ -983,7 +1011,6 @@ def fetch_my_relation_data( data[relation.id] = self._fetch_my_specific_relation_data(relation, fields) return data - @leader_only def fetch_my_relation_field( self, relation_id: int, field: str, relation_name: Optional[str] = None ) -> Optional[str]: @@ -1010,14 +1037,22 @@ def delete_relation_data(self, relation_id: int, fields: List[str]) -> None: return self._delete_relation_data(relation, fields) -# Base DataProvides and DataRequires +class EventHandlers(Object): + """Requires-side of the relation.""" + def __init__(self, charm: CharmBase, relation_data: Data, unique_key: str = ""): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_data.relation_name + super().__init__(charm, unique_key) -class DataProvides(DataRelation): - """Base provides-side of the data products relation.""" + self.charm = charm + self.relation_data = relation_data - def __init__(self, charm: CharmBase, relation_name: str) -> None: - super().__init__(charm, relation_name) + self.framework.observe( + charm.on[self.relation_data.relation_name].relation_changed, + self._on_relation_changed_event, + ) def _diff(self, event: RelationChangedEvent) -> Diff: """Retrieves the diff of the data in the relation changed databag. @@ -1029,33 +1064,64 @@ def _diff(self, event: RelationChangedEvent) -> Diff: a Diff instance containing the added, deleted and changed keys from the event relation databag. """ - return diff(event, self.local_app) + return diff(event, self.relation_data.data_component) + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + +# Base ProviderData and RequiresData + + +class ProviderData(Data): + """Base provides-side of the data products relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + ) -> None: + super().__init__(model, relation_name) + self.data_component = self.local_app # Private methods handling secrets @juju_secrets_only def _add_relation_secret( - self, relation: Relation, content: Dict[str, str], group_mapping: SecretGroup + self, + relation: Relation, + group_mapping: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + uri_to_databag=True, ) -> bool: """Add a new Juju Secret that will be registered in the relation databag.""" secret_field = self._generate_secret_field_name(group_mapping) - if relation.data[self.local_app].get(secret_field): + if uri_to_databag and relation.data[self.component].get(secret_field): logging.error("Secret for relation %s already exists, not adding again", relation.id) return False + content = self._content_for_secret_group(data, secret_fields, group_mapping) + label = self._generate_secret_label(self.relation_name, relation.id, group_mapping) secret = self.secrets.add(label, content, relation) # According to lint we may not have a Secret ID - if secret.meta and secret.meta.id: - relation.data[self.local_app][secret_field] = secret.meta.id + if uri_to_databag and secret.meta and secret.meta.id: + relation.data[self.component][secret_field] = secret.meta.id # Return the content that was added return True @juju_secrets_only def _update_relation_secret( - self, relation: Relation, content: Dict[str, str], group_mapping: SecretGroup + self, + relation: Relation, + group_mapping: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], ) -> bool: """Update the contents of an existing Juju Secret, referred in the relation databag.""" secret = self._get_relation_secret(relation.id, group_mapping) @@ -1064,6 +1130,8 @@ def _update_relation_secret( logging.error("Can't update secret for relation %s", relation.id) return False + content = self._content_for_secret_group(data, secret_fields, group_mapping) + old_content = secret.get_content() full_content = copy.deepcopy(old_content) full_content.update(content) @@ -1078,13 +1146,13 @@ def _add_or_update_relation_secrets( group: SecretGroup, secret_fields: Set[str], data: Dict[str, str], + uri_to_databag=True, ) -> bool: """Update contents for Secret group. If the Secret doesn't exist, create it.""" - secret_content = self._content_for_secret_group(data, secret_fields, group) if self._get_relation_secret(relation.id, group): - return self._update_relation_secret(relation, secret_content, group) + return self._update_relation_secret(relation, group, secret_fields, data) else: - return self._add_relation_secret(relation, secret_content, group) + return self._add_relation_secret(relation, group, secret_fields, data, uri_to_databag) @juju_secrets_only def _delete_relation_secret( @@ -1110,15 +1178,17 @@ def _delete_relation_secret( ) return False - secret.set_content(new_content) - # Remove secret from the relation if it's fully gone if not new_content: field = self._generate_secret_field_name(group) try: - relation.data[self.local_app].pop(field) + relation.data[self.component].pop(field) except KeyError: pass + label = self._generate_secret_label(self.relation_name, relation.id, group) + self.secrets.remove(label) + else: + secret.set_content(new_content) # Return the content that was removed return True @@ -1137,7 +1207,7 @@ def _get_relation_secret( if secret := self.secrets.get(label): return secret - relation = self.charm.model.get_relation(relation_name, relation_id) + relation = self._model.get_relation(relation_name, relation_id) if not relation: return @@ -1148,9 +1218,9 @@ def _get_relation_secret( def _fetch_specific_relation_data( self, relation: Relation, fields: Optional[List[str]] ) -> Dict[str, str]: - """Fetching relation data for Provides. + """Fetching relation data for Provider. - NOTE: Since all secret fields are in the Provides side of the databag, we don't need to worry about that + NOTE: Since all secret fields are in the Provider side of the databag, we don't need to worry about that """ if not relation.app: return {} @@ -1233,33 +1303,31 @@ def set_tls_ca(self, relation_id: int, tls_ca: str) -> None: """ self.update_relation_data(relation_id, {"tls-ca": tls_ca}) + # Public functions -- inherited -class DataRequires(DataRelation): - """Requires-side of the relation.""" + fetch_my_relation_data = leader_only(Data.fetch_my_relation_data) + fetch_my_relation_field = leader_only(Data.fetch_my_relation_field) + + +class RequirerData(Data): + """Requirer-side of the relation.""" SECRET_FIELDS = ["username", "password", "tls", "tls-ca", "uris"] def __init__( self, - charm, + model, relation_name: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], ): """Manager of base client relations.""" - super().__init__(charm, relation_name) + super().__init__(model, relation_name) self.extra_user_roles = extra_user_roles self._secret_fields = list(self.SECRET_FIELDS) if additional_secret_fields: self._secret_fields += additional_secret_fields - - self.framework.observe( - self.charm.on[relation_name].relation_created, self._on_relation_created_event - ) - self.framework.observe( - charm.on.secret_changed, - self._on_secret_changed_event, - ) + self.data_component = self.local_unit @property def secret_fields(self) -> Optional[List[str]]: @@ -1267,18 +1335,6 @@ def secret_fields(self) -> Optional[List[str]]: if self.secrets_enabled: return self._secret_fields - def _diff(self, event: RelationChangedEvent) -> Diff: - """Retrieves the diff of the data in the relation changed databag. - - Args: - event: relation changed event. - - Returns: - a Diff instance containing the added, deleted and changed - keys from the event relation databag. - """ - return diff(event, self.local_unit) - # Internal helper functions def _register_secret_to_relation( @@ -1291,13 +1347,13 @@ def _register_secret_to_relation( then will be "stuck" on the Secret object, whenever it may appear (i.e. as an event attribute, or fetched manually) on future occasions. - This will allow us to uniquely identify the secret on Provides side (typically on + This will allow us to uniquely identify the secret on Provider side (typically on 'secret-changed' events), and map it to the corresponding relation. """ label = self._generate_secret_label(relation_name, relation_id, group) # Fetchin the Secret's meta information ensuring that it's locally getting registered with - CachedSecret(self.charm, label, secret_id).meta + CachedSecret(self._model, self.component, label, secret_id).meta def _register_secrets_to_relation(self, relation: Relation, params_name_list: List[str]): """Make sure that secrets of the provided list are locally 'registered' from the databag. @@ -1357,23 +1413,6 @@ def is_resource_created(self, relation_id: Optional[int] = None) -> bool: else False ) - # Event handlers - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the relation is created.""" - if not self.local_unit.is_leader(): - return - - if self.secret_fields: - set_encoded_field( - event.relation, self.charm.app, REQ_SECRET_FIELDS, self.secret_fields - ) - - @abstractmethod - def _on_secret_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - # Mandatory internal overrides @juju_secrets_only @@ -1390,7 +1429,7 @@ def _get_relation_secret( def _fetch_specific_relation_data( self, relation, fields: Optional[List[str]] = None ) -> Dict[str, str]: - """Fetching Requires data -- that may include secrets.""" + """Fetching Requirer data -- that may include secrets.""" if not relation.app: return {} return self._fetch_relation_data_with_secrets( @@ -1426,139 +1465,499 @@ def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """ return self._delete_relation_data_without_secrets(self.local_app, relation, fields) + # Public functions -- inherited -# General events - - -class ExtraRoleEvent(RelationEvent): - """Base class for data events.""" + fetch_my_relation_data = leader_only(Data.fetch_my_relation_data) + fetch_my_relation_field = leader_only(Data.fetch_my_relation_field) - @property - def extra_user_roles(self) -> Optional[str]: - """Returns the extra user roles that were requested.""" - if not self.relation.app: - return None - return self.relation.data[self.relation.app].get("extra-user-roles") +class RequirerEventHandlers(EventHandlers): + """Requires-side of the relation.""" + def __init__(self, charm: CharmBase, relation_data: RequirerData, unique_key: str = ""): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) -class AuthenticationEvent(RelationEvent): - """Base class for authentication fields for events. + self.framework.observe( + self.charm.on[relation_data.relation_name].relation_created, + self._on_relation_created_event, + ) + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) - The amount of logic added here is not ideal -- but this was the only way to preserve - the interface when moving to Juju Secrets - """ + # Event handlers - @property - def _secrets(self) -> dict: - """Caching secrets to avoid fetching them each time a field is referrd. + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + if not self.relation_data.local_unit.is_leader(): + return - DON'T USE the encapsulated helper variable outside of this function - """ - if not hasattr(self, "_cached_secrets"): - self._cached_secrets = {} - return self._cached_secrets + if self.relation_data.secret_fields: # pyright: ignore [reportAttributeAccessIssue] + set_encoded_field( + event.relation, + self.charm.app, + REQ_SECRET_FIELDS, + self.relation_data.secret_fields, # pyright: ignore [reportAttributeAccessIssue] + ) - @property - def _jujuversion(self) -> JujuVersion: - """Caching jujuversion to avoid a Juju call on each field evaluation. + @abstractmethod + def _on_secret_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError - DON'T USE the encapsulated helper variable outside of this function - """ - if not hasattr(self, "_cached_jujuversion"): - self._cached_jujuversion = None - if not self._cached_jujuversion: - self._cached_jujuversion = JujuVersion.from_environ() - return self._cached_jujuversion - def _get_secret(self, group) -> Optional[Dict[str, str]]: - """Retrieveing secrets.""" - if not self.app: - return - if not self._secrets.get(group): - self._secrets[group] = None - secret_field = f"{PROV_SECRET_PREFIX}{group}" - if secret_uri := self.relation.data[self.app].get(secret_field): - secret = self.framework.model.get_secret(id=secret_uri) - self._secrets[group] = secret.get_content() - return self._secrets[group] +# Base DataPeer - @property - def secrets_enabled(self): - """Is this Juju version allowing for Secrets usage?""" - return self._jujuversion.has_secrets - @property - def username(self) -> Optional[str]: - """Returns the created username.""" - if not self.relation.app: - return None +class DataPeerData(RequirerData, ProviderData): + """Represents peer relations data.""" - if self.secrets_enabled: - secret = self._get_secret("user") - if secret: - return secret.get("username") + SECRET_FIELDS = ["operator-password"] + SECRET_FIELD_NAME = "internal_secret" + SECRET_LABEL_MAP = {} - return self.relation.data[self.relation.app].get("username") + def __init__( + self, + model, + relation_name: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + ): + """Manager of base client relations.""" + RequirerData.__init__( + self, + model, + relation_name, + extra_user_roles, + additional_secret_fields, + ) + self.secret_field_name = secret_field_name if secret_field_name else self.SECRET_FIELD_NAME + self.deleted_label = deleted_label @property - def password(self) -> Optional[str]: - """Returns the password for the created user.""" - if not self.relation.app: - return None - - if self.secrets_enabled: - secret = self._get_secret("user") - if secret: - return secret.get("password") - - return self.relation.data[self.relation.app].get("password") + def scope(self) -> Optional[Scope]: + """Turn component information into Scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT - @property - def tls(self) -> Optional[str]: - """Returns whether TLS is configured.""" - if not self.relation.app: - return None + def _generate_secret_label( + self, relation_name: str, relation_id: int, group_mapping: SecretGroup + ) -> str: + members = [self._model.app.name] + if self.scope: + members.append(self.scope.value) + return f"{'.'.join(members)}" - if self.secrets_enabled: - secret = self._get_secret("tls") - if secret: - return secret.get("tls") + def _generate_secret_field_name(self, group_mapping: SecretGroup = SecretGroup.EXTRA) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + return f"{self.secret_field_name}" - return self.relation.data[self.relation.app].get("tls") + @juju_secrets_only + def _get_relation_secret( + self, + relation_id: int, + group_mapping: SecretGroup = SecretGroup.EXTRA, + relation_name: Optional[str] = None, + ) -> Optional[CachedSecret]: + """Retrieve a Juju Secret specifically for peer relations. - @property - def tls_ca(self) -> Optional[str]: - """Returns TLS CA.""" - if not self.relation.app: - return None + In case this code may be executed within a rolling upgrade, and we may need to + migrate secrets from the databag to labels, we make sure to stick the correct + label on the secret, and clean up the local databag. + """ + if not relation_name: + relation_name = self.relation_name - if self.secrets_enabled: - secret = self._get_secret("tls") - if secret: - return secret.get("tls-ca") + relation = self._model.get_relation(relation_name, relation_id) + if not relation: + return - return self.relation.data[self.relation.app].get("tls-ca") + label = self._generate_secret_label(relation_name, relation_id, group_mapping) + secret_uri = relation.data[self.component].get(self._generate_secret_field_name(), None) + # Fetching the secret with fallback to URI (in case label is not yet known) + # Label would we "stuck" on the secret in case it is found + secret = self.secrets.get(label, secret_uri) -# Database related events and fields + # Either app scope secret with leader executing, or unit scope secret + leader_or_unit_scope = self.component != self.local_app or self.local_unit.is_leader() + if secret_uri and secret and leader_or_unit_scope: + # Databag reference to the secret URI can be removed, now that it's labelled + relation.data[self.component].pop(self._generate_secret_field_name(), None) + return secret + def _get_group_secret_contents( + self, + relation: Relation, + group: SecretGroup, + secret_fields: Optional[Union[Set[str], List[str]]] = None, + ) -> Dict[str, str]: + """Helper function to retrieve collective, requested contents of a secret.""" + result = super()._get_group_secret_contents(relation, group, secret_fields) + if not self.deleted_label: + return result + return {key: result[key] for key in result if result[key] != self.deleted_label} -class DatabaseProvidesEvent(RelationEvent): - """Base class for database events.""" + def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: + """For Rolling Upgrades -- when moving from databag to secrets usage. - @property - def database(self) -> Optional[str]: - """Returns the database that was requested.""" - if not self.relation.app: - return None + Practically what happens here is to remove stuff from the databag that is + to be stored in secrets. + """ + if not self.secret_fields: + return - return self.relation.data[self.relation.app].get("database") + secret_fields_passed = set(self.secret_fields) & set(fields) + for field in secret_fields_passed: + if self._fetch_relation_data_without_secrets(self.component, relation, [field]): + self._delete_relation_data_without_secrets(self.component, relation, [field]) + + def _fetch_specific_relation_data( + self, relation: Relation, fields: Optional[List[str]] + ) -> Dict[str, str]: + """Fetch data available (directily or indirectly -- i.e. secrets) from the relation.""" + return self._fetch_relation_data_with_secrets( + self.component, self.secret_fields, relation, fields + ) + + def _fetch_my_specific_relation_data( + self, relation: Relation, fields: Optional[List[str]] + ) -> Dict[str, str]: + """Fetch data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + return self._fetch_relation_data_with_secrets( + self.component, self.secret_fields, relation, fields + ) + + def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: + """Update data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + self._remove_secret_from_databag(relation, list(data.keys())) + _, normal_fields = self._process_secret_fields( + relation, + self.secret_fields, + list(data), + self._add_or_update_relation_secrets, + data=data, + uri_to_databag=False, + ) + + normal_content = {k: v for k, v in data.items() if k in normal_fields} + self._update_relation_data_without_secrets(self.component, relation, normal_content) + + def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: + """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + if self.secret_fields and self.deleted_label: + current_data = self.fetch_my_relation_data([relation.id], fields) + if current_data is not None: + # Check if the secret we wanna delete actually exists + # Given the "deleted label", here we can't rely on the default mechanism (i.e. 'key not found') + if non_existent := (set(fields) & set(self.secret_fields)) - set( + current_data.get(relation.id, []) + ): + logger.error( + "Non-existing secret %s was attempted to be removed.", + ", ".join(non_existent), + ) + + _, normal_fields = self._process_secret_fields( + relation, + self.secret_fields, + fields, + self._update_relation_secret, + data={field: self.deleted_label for field in fields}, + ) + else: + _, normal_fields = self._process_secret_fields( + relation, self.secret_fields, fields, self._delete_relation_secret, fields=fields + ) + self._delete_relation_data_without_secrets(self.component, relation, list(normal_fields)) + + def fetch_relation_data( + self, + relation_ids: Optional[List[int]] = None, + fields: Optional[List[str]] = None, + relation_name: Optional[str] = None, + ) -> Dict[int, Dict[str, str]]: + """This method makes no sense for a Peer Relation.""" + raise NotImplementedError( + "Peer Relation only supports 'self-side' fetch methods: " + "fetch_my_relation_data() and fetch_my_relation_field()" + ) + + def fetch_relation_field( + self, relation_id: int, field: str, relation_name: Optional[str] = None + ) -> Optional[str]: + """This method makes no sense for a Peer Relation.""" + raise NotImplementedError( + "Peer Relation only supports 'self-side' fetch methods: " + "fetch_my_relation_data() and fetch_my_relation_field()" + ) + + # Public functions -- inherited + + fetch_my_relation_data = Data.fetch_my_relation_data + fetch_my_relation_field = Data.fetch_my_relation_field + + +class DataPeerEventHandlers(EventHandlers): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: RequirerData, unique_key: str = ""): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + pass + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + + +class DataPeer(DataPeerData, DataPeerEventHandlers): + """Represents peer relations.""" + + def __init__( + self, + charm, + relation_name: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + unique_key: str = "", + ): + DataPeerData.__init__( + self, + charm.model, + relation_name, + extra_user_roles, + additional_secret_fields, + secret_field_name, + deleted_label, + ) + DataPeerEventHandlers.__init__(self, charm, self, unique_key) + + +class DataPeerUnitData(DataPeerData): + """Unit data abstraction representation.""" + + SCOPE = Scope.UNIT + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class DataPeerUnit(DataPeerUnitData, DataPeerEventHandlers): + """Unit databag representation.""" + + def __init__( + self, + charm, + relation_name: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + unique_key: str = "", + ): + DataPeerData.__init__( + self, + charm.model, + relation_name, + extra_user_roles, + additional_secret_fields, + secret_field_name, + deleted_label, + ) + DataPeerEventHandlers.__init__(self, charm, self, unique_key) + + +class DataPeerOtherUnitData(DataPeerUnitData): + """Unit data abstraction representation.""" + + def __init__(self, unit: Unit, *args, **kwargs): + super().__init__(*args, **kwargs) + self.local_unit = unit + self.component = unit + + +class DataPeerOtherUnitEventHandlers(DataPeerEventHandlers): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: DataPeerUnitData): + """Manager of base client relations.""" + unique_key = f"{relation_data.relation_name}-{relation_data.local_unit.name}" + super().__init__(charm, relation_data, unique_key=unique_key) + + +class DataPeerOtherUnit(DataPeerOtherUnitData, DataPeerOtherUnitEventHandlers): + """Unit databag representation for another unit than the executor.""" + + def __init__( + self, + unit: Unit, + charm: CharmBase, + relation_name: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + unique_key: str = "", + ): + DataPeerData.__init__( + self, + charm.model, + relation_name, + extra_user_roles, + additional_secret_fields, + secret_field_name, + deleted_label, + ) + DataPeerEventHandlers.__init__(self, charm, self, unique_key) + + +# General events + + +class ExtraRoleEvent(RelationEvent): + """Base class for data events.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + +class RelationEventWithSecret(RelationEvent): + """Base class for Relation Events that need to handle secrets.""" + + @property + def _secrets(self) -> dict: + """Caching secrets to avoid fetching them each time a field is referrd. + + DON'T USE the encapsulated helper variable outside of this function + """ + if not hasattr(self, "_cached_secrets"): + self._cached_secrets = {} + return self._cached_secrets + + def _get_secret(self, group) -> Optional[Dict[str, str]]: + """Retrieveing secrets.""" + if not self.app: + return + if not self._secrets.get(group): + self._secrets[group] = None + secret_field = f"{PROV_SECRET_PREFIX}{group}" + if secret_uri := self.relation.data[self.app].get(secret_field): + secret = self.framework.model.get_secret(id=secret_uri) + self._secrets[group] = secret.get_content() + return self._secrets[group] + + @property + def secrets_enabled(self): + """Is this Juju version allowing for Secrets usage?""" + return JujuVersion.from_environ().has_secrets + + +class AuthenticationEvent(RelationEventWithSecret): + """Base class for authentication fields for events. + + The amount of logic added here is not ideal -- but this was the only way to preserve + the interface when moving to Juju Secrets + """ + + @property + def username(self) -> Optional[str]: + """Returns the created username.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("username") + + return self.relation.data[self.relation.app].get("username") + + @property + def password(self) -> Optional[str]: + """Returns the password for the created user.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("password") + + return self.relation.data[self.relation.app].get("password") + + @property + def tls(self) -> Optional[str]: + """Returns whether TLS is configured.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("tls") + if secret: + return secret.get("tls") + + return self.relation.data[self.relation.app].get("tls") + + @property + def tls_ca(self) -> Optional[str]: + """Returns TLS CA.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("tls") + if secret: + return secret.get("tls-ca") + + return self.relation.data[self.relation.app].get("tls-ca") + + +# Database related events and fields + + +class DatabaseProvidesEvent(RelationEvent): + """Base class for database events.""" + + @property + def database(self) -> Optional[str]: + """Returns the database that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("database") class DatabaseRequestedEvent(DatabaseProvidesEvent, ExtraRoleEvent): """Event emitted when a new database is requested for use on this relation.""" + @property + def external_node_connectivity(self) -> bool: + """Returns the requested external_node_connectivity field.""" + if not self.relation.app: + return False + + return ( + self.relation.data[self.relation.app].get("external-node-connectivity", "false") + == "true" + ) + class DatabaseProvidesEvents(CharmEvents): """Database events. @@ -1569,7 +1968,7 @@ class DatabaseProvidesEvents(CharmEvents): database_requested = EventSource(DatabaseRequestedEvent) -class DatabaseRequiresEvent(RelationEvent): +class DatabaseRequiresEvent(RelationEventWithSecret): """Base class for database events.""" @property @@ -1624,6 +2023,11 @@ def uris(self) -> Optional[str]: if not self.relation.app: return None + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("uris") + return self.relation.data[self.relation.app].get("uris") @property @@ -1664,28 +2068,11 @@ class DatabaseRequiresEvents(CharmEvents): # Database Provider and Requires -class DatabaseProvides(DataProvides): - """Provider-side of the database relations.""" - - on = DatabaseProvidesEvents() # pyright: ignore [reportGeneralTypeIssues] +class DatabaseProviderData(ProviderData): + """Provider-side data of the database relations.""" - def __init__(self, charm: CharmBase, relation_name: str) -> None: - super().__init__(charm, relation_name) - - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation has changed.""" - # Leader only - if not self.local_unit.is_leader(): - return - # Check which data has changed to emit customs events. - diff = self._diff(event) - - # Emit a database requested event if the setup key (database name and optional - # extra user roles) was added to the relation databag by the application. - if "database" in diff.added: - getattr(self.on, "database_requested").emit( - event.relation, app=event.app, unit=event.unit - ) + def __init__(self, model: Model, relation_name: str) -> None: + super().__init__(model, relation_name) def set_database(self, relation_id: int, database_name: str) -> None: """Set database name. @@ -1752,44 +2139,147 @@ def set_uris(self, relation_id: int, uris: str) -> None: def set_version(self, relation_id: int, version: str) -> None: """Set the database version in the application relation databag. - Args: - relation_id: the identifier for a particular relation. - version: database version. - """ - self.update_relation_data(relation_id, {"version": version}) + Args: + relation_id: the identifier for a particular relation. + version: database version. + """ + self.update_relation_data(relation_id, {"version": version}) + + +class DatabaseProviderEventHandlers(EventHandlers): + """Provider-side of the database relation handlers.""" + + on = DatabaseProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__( + self, charm: CharmBase, relation_data: DatabaseProviderData, unique_key: str = "" + ): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + # Just to calm down pyright, it can't parse that the same type is being used in the super() call above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Emit a database requested event if the setup key (database name and optional + # extra user roles) was added to the relation databag by the application. + if "database" in diff.added: + getattr(self.on, "database_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + +class DatabaseProvides(DatabaseProviderData, DatabaseProviderEventHandlers): + """Provider-side of the database relations.""" + def __init__(self, charm: CharmBase, relation_name: str) -> None: + DatabaseProviderData.__init__(self, charm.model, relation_name) + DatabaseProviderEventHandlers.__init__(self, charm, self) -class DatabaseRequires(DataRequires): - """Requires-side of the database relation.""" - on = DatabaseRequiresEvents() # pyright: ignore [reportGeneralTypeIssues] +class DatabaseRequirerData(RequirerData): + """Requirer-side of the database relation.""" def __init__( self, - charm, + model: Model, relation_name: str, database_name: str, extra_user_roles: Optional[str] = None, relations_aliases: Optional[List[str]] = None, additional_secret_fields: Optional[List[str]] = [], + external_node_connectivity: bool = False, ): """Manager of database client relations.""" - super().__init__(charm, relation_name, extra_user_roles, additional_secret_fields) + super().__init__(model, relation_name, extra_user_roles, additional_secret_fields) self.database = database_name self.relations_aliases = relations_aliases + self.external_node_connectivity = external_node_connectivity + + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_index: optional relation index to check the database + (default: 0 - first relation). + + PostgreSQL only. + """ + # Psycopg 3 is imported locally to avoid the need of its package installation + # when relating to a database charm other than PostgreSQL. + import psycopg + + # Return False if no relation is established. + if len(self.relations) == 0: + return False + + relation_id = self.relations[relation_index].id + host = self.fetch_relation_field(relation_id, "endpoints") + + # Return False if there is no endpoint available. + if host is None: + return False + + host = host.split(":")[0] + + content = self.fetch_relation_data([relation_id], ["username", "password"]).get( + relation_id, {} + ) + user = content.get("username") + password = content.get("password") + + connection_string = ( + f"host='{host}' dbname='{self.database}' user='{user}' password='{password}'" + ) + try: + with psycopg.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) + ) + return cursor.fetchone() is not None + except psycopg.Error as e: + logger.exception( + f"failed to check whether {plugin} plugin is enabled in the database: %s", str(e) + ) + return False + + +class DatabaseRequirerEventHandlers(RequirerEventHandlers): + """Requires-side of the relation.""" + + on = DatabaseRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__( + self, charm: CharmBase, relation_data: DatabaseRequirerData, unique_key: str = "" + ): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data # Define custom event names for each alias. - if relations_aliases: + if self.relation_data.relations_aliases: # Ensure the number of aliases does not exceed the maximum # of connections allowed in the specific relation. - relation_connection_limit = self.charm.meta.requires[relation_name].limit - if len(relations_aliases) != relation_connection_limit: + relation_connection_limit = self.charm.meta.requires[ + self.relation_data.relation_name + ].limit + if len(self.relation_data.relations_aliases) != relation_connection_limit: raise ValueError( f"The number of aliases must match the maximum number of connections allowed in the relation. " - f"Expected {relation_connection_limit}, got {len(relations_aliases)}" + f"Expected {relation_connection_limit}, got {len(self.relation_data.relations_aliases)}" ) - for relation_alias in relations_aliases: + if self.relation_data.relations_aliases: + for relation_alias in self.relation_data.relations_aliases: self.on.define_event(f"{relation_alias}_database_created", DatabaseCreatedEvent) self.on.define_event( f"{relation_alias}_endpoints_changed", DatabaseEndpointsChangedEvent @@ -1812,32 +2302,32 @@ def _assign_relation_alias(self, relation_id: int) -> None: relation_id: the identifier for a particular relation. """ # If no aliases were provided, return immediately. - if not self.relations_aliases: + if not self.relation_data.relations_aliases: return # Return if an alias was already assigned to this relation # (like when there are more than one unit joining the relation). - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation and relation.data[self.local_unit].get("alias"): + relation = self.charm.model.get_relation(self.relation_data.relation_name, relation_id) + if relation and relation.data[self.relation_data.local_unit].get("alias"): return # Retrieve the available aliases (the ones that weren't assigned to any relation). - available_aliases = self.relations_aliases[:] - for relation in self.charm.model.relations[self.relation_name]: - alias = relation.data[self.local_unit].get("alias") + available_aliases = self.relation_data.relations_aliases[:] + for relation in self.charm.model.relations[self.relation_data.relation_name]: + alias = relation.data[self.relation_data.local_unit].get("alias") if alias: logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) available_aliases.remove(alias) # Set the alias in the unit relation databag of the specific relation. - relation = self.charm.model.get_relation(self.relation_name, relation_id) + relation = self.charm.model.get_relation(self.relation_data.relation_name, relation_id) if relation: - relation.data[self.local_unit].update({"alias": available_aliases[0]}) + relation.data[self.relation_data.local_unit].update({"alias": available_aliases[0]}) # We need to set relation alias also on the application level so, # it will be accessible in show-unit juju command, executed for a consumer application unit - if self.local_unit.is_leader(): - self.update_relation_data(relation_id, {"alias": available_aliases[0]}) + if self.relation_data.local_unit.is_leader(): + self.relation_data.update_relation_data(relation_id, {"alias": available_aliases[0]}) def _emit_aliased_event(self, event: RelationChangedEvent, event_name: str) -> None: """Emit an aliased event to a particular relation if it has an alias. @@ -1861,60 +2351,11 @@ def _get_relation_alias(self, relation_id: int) -> Optional[str]: Returns: the relation alias or None if the relation was not found. """ - for relation in self.charm.model.relations[self.relation_name]: + for relation in self.charm.model.relations[self.relation_data.relation_name]: if relation.id == relation_id: - return relation.data[self.local_unit].get("alias") + return relation.data[self.relation_data.local_unit].get("alias") return None - def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: - """Returns whether a plugin is enabled in the database. - - Args: - plugin: name of the plugin to check. - relation_index: optional relation index to check the database - (default: 0 - first relation). - - PostgreSQL only. - """ - # Psycopg 3 is imported locally to avoid the need of its package installation - # when relating to a database charm other than PostgreSQL. - import psycopg - - # Return False if no relation is established. - if len(self.relations) == 0: - return False - - relation_id = self.relations[relation_index].id - host = self.fetch_relation_field(relation_id, "endpoints") - - # Return False if there is no endpoint available. - if host is None: - return False - - host = host.split(":")[0] - - content = self.fetch_relation_data([relation_id], ["username", "password"]).get( - relation_id, {} - ) - user = content.get("username") - password = content.get("password") - - connection_string = ( - f"host='{host}' dbname='{self.database}' user='{user}' password='{password}'" - ) - try: - with psycopg.connect(connection_string) as connection: - with connection.cursor() as cursor: - cursor.execute( - "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) - ) - return cursor.fetchone() is not None - except psycopg.Error as e: - logger.exception( - f"failed to check whether {plugin} plugin is enabled in the database: %s", str(e) - ) - return False - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: """Event emitted when the database relation is created.""" super()._on_relation_created_event(event) @@ -1924,19 +2365,19 @@ def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: # Sets both database and extra user roles in the relation # if the roles are provided. Otherwise, sets only the database. - if not self.local_unit.is_leader(): + if not self.relation_data.local_unit.is_leader(): return - if self.extra_user_roles: - self.update_relation_data( - event.relation.id, - { - "database": self.database, - "extra-user-roles": self.extra_user_roles, - }, - ) - else: - self.update_relation_data(event.relation.id, {"database": self.database}) + event_data = {"database": self.relation_data.database} + + if self.relation_data.extra_user_roles: + event_data["extra-user-roles"] = self.relation_data.extra_user_roles + + # set external-node-connectivity field + if self.relation_data.external_node_connectivity: + event_data["external-node-connectivity"] = "true" + + self.relation_data.update_relation_data(event.relation.id, event_data) def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: """Event emitted when the database relation has changed.""" @@ -1944,12 +2385,12 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: diff = self._diff(event) # Register all new secrets with their labels - if any(newval for newval in diff.added if self._is_secret_field(newval)): - self._register_secrets_to_relation(event.relation, diff.added) + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) # Check if the database is created # (the database charm shared the credentials). - secret_field_user = self._generate_secret_field_name(SecretGroup.USER) + secret_field_user = self.relation_data._generate_secret_field_name(SecretGroup.USER) if ( "username" in diff.added and "password" in diff.added ) or secret_field_user in diff.added: @@ -1995,6 +2436,32 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: self._emit_aliased_event(event, "read_only_endpoints_changed") +class DatabaseRequires(DatabaseRequirerData, DatabaseRequirerEventHandlers): + """Provider-side of the database relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + database_name: str, + extra_user_roles: Optional[str] = None, + relations_aliases: Optional[List[str]] = None, + additional_secret_fields: Optional[List[str]] = [], + external_node_connectivity: bool = False, + ): + DatabaseRequirerData.__init__( + self, + charm.model, + relation_name, + database_name, + extra_user_roles, + relations_aliases, + additional_secret_fields, + external_node_connectivity, + ) + DatabaseRequirerEventHandlers.__init__(self, charm, self) + + # Kafka related events @@ -2088,29 +2555,11 @@ class KafkaRequiresEvents(CharmEvents): # Kafka Provides and Requires -class KafkaProvides(DataProvides): +class KafkaProvidesData(ProviderData): """Provider-side of the Kafka relation.""" - on = KafkaProvidesEvents() # pyright: ignore [reportGeneralTypeIssues] - - def __init__(self, charm: CharmBase, relation_name: str) -> None: - super().__init__(charm, relation_name) - - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation has changed.""" - # Leader only - if not self.local_unit.is_leader(): - return - - # Check which data has changed to emit customs events. - diff = self._diff(event) - - # Emit a topic requested event if the setup key (topic name and optional - # extra user roles) was added to the relation databag by the application. - if "topic" in diff.added: - getattr(self.on, "topic_requested").emit( - event.relation, app=event.app, unit=event.unit - ) + def __init__(self, model: Model, relation_name: str) -> None: + super().__init__(model, relation_name) def set_topic(self, relation_id: int, topic: str) -> None: """Set topic name in the application relation databag. @@ -2149,14 +2598,47 @@ def set_zookeeper_uris(self, relation_id: int, zookeeper_uris: str) -> None: self.update_relation_data(relation_id, {"zookeeper-uris": zookeeper_uris}) -class KafkaRequires(DataRequires): - """Requires-side of the Kafka relation.""" +class KafkaProvidesEventHandlers(EventHandlers): + """Provider-side of the Kafka relation.""" + + on = KafkaProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaProvidesData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + # Leader only + if not self.relation_data.local_unit.is_leader(): + return - on = KafkaRequiresEvents() # pyright: ignore [reportGeneralTypeIssues] + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Emit a topic requested event if the setup key (topic name and optional + # extra user roles) was added to the relation databag by the application. + if "topic" in diff.added: + getattr(self.on, "topic_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + +class KafkaProvides(KafkaProvidesData, KafkaProvidesEventHandlers): + """Provider-side of the Kafka relation.""" + + def __init__(self, charm: CharmBase, relation_name: str) -> None: + KafkaProvidesData.__init__(self, charm.model, relation_name) + KafkaProvidesEventHandlers.__init__(self, charm, self) + + +class KafkaRequiresData(RequirerData): + """Requirer-side of the Kafka relation.""" def __init__( self, - charm, + model: Model, relation_name: str, topic: str, extra_user_roles: Optional[str] = None, @@ -2164,9 +2646,7 @@ def __init__( additional_secret_fields: Optional[List[str]] = [], ): """Manager of Kafka client relations.""" - # super().__init__(charm, relation_name) - super().__init__(charm, relation_name, extra_user_roles, additional_secret_fields) - self.charm = charm + super().__init__(model, relation_name, extra_user_roles, additional_secret_fields) self.topic = topic self.consumer_group_prefix = consumer_group_prefix or "" @@ -2182,11 +2662,22 @@ def topic(self, value): raise ValueError(f"Error on topic '{value}', cannot be a wildcard.") self._topic = value + +class KafkaRequiresEventHandlers(RequirerEventHandlers): + """Requires-side of the Kafka relation.""" + + on = KafkaRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaRequiresData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: """Event emitted when the Kafka relation is created.""" super()._on_relation_created_event(event) - if not self.local_unit.is_leader(): + if not self.relation_data.local_unit.is_leader(): return # Sets topic, extra user roles, and "consumer-group-prefix" in the relation @@ -2195,7 +2686,7 @@ def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: for f in ["consumer-group-prefix", "extra-user-roles", "topic"] } - self.update_relation_data(event.relation.id, relation_data) + self.relation_data.update_relation_data(event.relation.id, relation_data) def _on_secret_changed_event(self, event: SecretChangedEvent): """Event notifying about a new value of a secret.""" @@ -2210,10 +2701,10 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: # (the Kafka charm shared the credentials). # Register all new secrets with their labels - if any(newval for newval in diff.added if self._is_secret_field(newval)): - self._register_secrets_to_relation(event.relation, diff.added) + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) - secret_field_user = self._generate_secret_field_name(SecretGroup.USER) + secret_field_user = self.relation_data._generate_secret_field_name(SecretGroup.USER) if ( "username" in diff.added and "password" in diff.added ) or secret_field_user in diff.added: @@ -2236,6 +2727,30 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: return +class KafkaRequires(KafkaRequiresData, KafkaRequiresEventHandlers): + """Provider-side of the Kafka relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + topic: str, + extra_user_roles: Optional[str] = None, + consumer_group_prefix: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + ) -> None: + KafkaRequiresData.__init__( + self, + charm.model, + relation_name, + topic, + extra_user_roles, + consumer_group_prefix, + additional_secret_fields, + ) + KafkaRequiresEventHandlers.__init__(self, charm, self) + + # Opensearch related events @@ -2286,28 +2801,11 @@ class OpenSearchRequiresEvents(CharmEvents): # OpenSearch Provides and Requires Objects -class OpenSearchProvides(DataProvides): +class OpenSearchProvidesData(ProviderData): """Provider-side of the OpenSearch relation.""" - on = OpenSearchProvidesEvents() # pyright: ignore[reportGeneralTypeIssues] - - def __init__(self, charm: CharmBase, relation_name: str) -> None: - super().__init__(charm, relation_name) - - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation has changed.""" - # Leader only - if not self.local_unit.is_leader(): - return - # Check which data has changed to emit customs events. - diff = self._diff(event) - - # Emit an index requested event if the setup key (index name and optional extra user roles) - # have been added to the relation databag by the application. - if "index" in diff.added: - getattr(self.on, "index_requested").emit( - event.relation, app=event.app, unit=event.unit - ) + def __init__(self, model: Model, relation_name: str) -> None: + super().__init__(model, relation_name) def set_index(self, relation_id: int, index: str) -> None: """Set the index in the application relation databag. @@ -2339,45 +2837,87 @@ def set_version(self, relation_id: int, version: str) -> None: self.update_relation_data(relation_id, {"version": version}) -class OpenSearchRequires(DataRequires): - """Requires-side of the OpenSearch relation.""" +class OpenSearchProvidesEventHandlers(EventHandlers): + """Provider-side of the OpenSearch relation.""" + + on = OpenSearchProvidesEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: OpenSearchProvidesData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Emit an index requested event if the setup key (index name and optional extra user roles) + # have been added to the relation databag by the application. + if "index" in diff.added: + getattr(self.on, "index_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + +class OpenSearchProvides(OpenSearchProvidesData, OpenSearchProvidesEventHandlers): + """Provider-side of the OpenSearch relation.""" + + def __init__(self, charm: CharmBase, relation_name: str) -> None: + OpenSearchProvidesData.__init__(self, charm.model, relation_name) + OpenSearchProvidesEventHandlers.__init__(self, charm, self) - on = OpenSearchRequiresEvents() # pyright: ignore[reportGeneralTypeIssues] + +class OpenSearchRequiresData(RequirerData): + """Requires data side of the OpenSearch relation.""" def __init__( self, - charm, + model: Model, relation_name: str, index: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], ): """Manager of OpenSearch client relations.""" - super().__init__(charm, relation_name, extra_user_roles, additional_secret_fields) - self.charm = charm + super().__init__(model, relation_name, extra_user_roles, additional_secret_fields) self.index = index + +class OpenSearchRequiresEventHandlers(RequirerEventHandlers): + """Requires events side of the OpenSearch relation.""" + + on = OpenSearchRequiresEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: OpenSearchRequiresData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: """Event emitted when the OpenSearch relation is created.""" super()._on_relation_created_event(event) - if not self.local_unit.is_leader(): + if not self.relation_data.local_unit.is_leader(): return # Sets both index and extra user roles in the relation if the roles are provided. # Otherwise, sets only the index. - data = {"index": self.index} - if self.extra_user_roles: - data["extra-user-roles"] = self.extra_user_roles + data = {"index": self.relation_data.index} + if self.relation_data.extra_user_roles: + data["extra-user-roles"] = self.relation_data.extra_user_roles - self.update_relation_data(event.relation.id, data) + self.relation_data.update_relation_data(event.relation.id, data) def _on_secret_changed_event(self, event: SecretChangedEvent): """Event notifying about a new value of a secret.""" if not event.secret.label: return - relation = self._relation_from_secret_label(event.secret.label) + relation = self.relation_data._relation_from_secret_label(event.secret.label) if not relation: logging.info( f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" @@ -2406,11 +2946,11 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: diff = self._diff(event) # Register all new secrets with their labels - if any(newval for newval in diff.added if self._is_secret_field(newval)): - self._register_secrets_to_relation(event.relation, diff.added) + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) - secret_field_user = self._generate_secret_field_name(SecretGroup.USER) - secret_field_tls = self._generate_secret_field_name(SecretGroup.TLS) + secret_field_user = self.relation_data._generate_secret_field_name(SecretGroup.USER) + secret_field_tls = self.relation_data._generate_secret_field_name(SecretGroup.TLS) updates = {"username", "password", "tls", "tls-ca", secret_field_user, secret_field_tls} if len(set(diff._asdict().keys()) - updates) < len(diff): logger.info("authentication updated at: %s", datetime.now()) @@ -2440,3 +2980,25 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: event.relation, app=event.app, unit=event.unit ) # here check if this is the right design return + + +class OpenSearchRequires(OpenSearchRequiresData, OpenSearchRequiresEventHandlers): + """Requires-side of the OpenSearch relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + index: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + ) -> None: + OpenSearchRequiresData.__init__( + self, + charm.model, + relation_name, + index, + extra_user_roles, + additional_secret_fields, + ) + OpenSearchRequiresEventHandlers.__init__(self, charm, self) diff --git a/lib/charms/grafana_agent/v0/cos_agent.py b/lib/charms/grafana_agent/v0/cos_agent.py new file mode 100644 index 00000000..870ba62a --- /dev/null +++ b/lib/charms/grafana_agent/v0/cos_agent.py @@ -0,0 +1,806 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +r"""## Overview. + +This library can be used to manage the cos_agent relation interface: + +- `COSAgentProvider`: Use in machine charms that need to have a workload's metrics + or logs scraped, or forward rule files or dashboards to Prometheus, Loki or Grafana through + the Grafana Agent machine charm. + +- `COSAgentConsumer`: Used in the Grafana Agent machine charm to manage the requirer side of + the `cos_agent` interface. + + +## COSAgentProvider Library Usage + +Grafana Agent machine Charmed Operator interacts with its clients using the cos_agent library. +Charms seeking to send telemetry, must do so using the `COSAgentProvider` object from +this charm library. + +Using the `COSAgentProvider` object only requires instantiating it, +typically in the `__init__` method of your charm (the one which sends telemetry). + +The constructor of `COSAgentProvider` has only one required and nine optional parameters: + +```python + def __init__( + self, + charm: CharmType, + relation_name: str = DEFAULT_RELATION_NAME, + metrics_endpoints: Optional[List[_MetricsEndpointDict]] = None, + metrics_rules_dir: str = "./src/prometheus_alert_rules", + logs_rules_dir: str = "./src/loki_alert_rules", + recurse_rules_dirs: bool = False, + log_slots: Optional[List[str]] = None, + dashboard_dirs: Optional[List[str]] = None, + refresh_events: Optional[List] = None, + scrape_configs: Optional[Union[List[Dict], Callable]] = None, + ): +``` + +### Parameters + +- `charm`: The instance of the charm that instantiates `COSAgentProvider`, typically `self`. + +- `relation_name`: If your charmed operator uses a relation name other than `cos-agent` to use + the `cos_agent` interface, this is where you have to specify that. + +- `metrics_endpoints`: In this parameter you can specify the metrics endpoints that Grafana Agent + machine Charmed Operator will scrape. The configs of this list will be merged with the configs + from `scrape_configs`. + +- `metrics_rules_dir`: The directory in which the Charmed Operator stores its metrics alert rules + files. + +- `logs_rules_dir`: The directory in which the Charmed Operator stores its logs alert rules files. + +- `recurse_rules_dirs`: This parameters set whether Grafana Agent machine Charmed Operator has to + search alert rules files recursively in the previous two directories or not. + +- `log_slots`: Snap slots to connect to for scraping logs in the form ["snap-name:slot", ...]. + +- `dashboard_dirs`: List of directories where the dashboards are stored in the Charmed Operator. + +- `refresh_events`: List of events on which to refresh relation data. + +- `scrape_configs`: List of standard scrape_configs dicts or a callable that returns the list in + case the configs need to be generated dynamically. The contents of this list will be merged + with the configs from `metrics_endpoints`. + + +### Example 1 - Minimal instrumentation: + +In order to use this object the following should be in the `charm.py` file. + +```python +from charms.grafana_agent.v0.cos_agent import COSAgentProvider +... +class TelemetryProviderCharm(CharmBase): + def __init__(self, *args): + ... + self._grafana_agent = COSAgentProvider(self) +``` + +### Example 2 - Full instrumentation: + +In order to use this object the following should be in the `charm.py` file. + +```python +from charms.grafana_agent.v0.cos_agent import COSAgentProvider +... +class TelemetryProviderCharm(CharmBase): + def __init__(self, *args): + ... + self._grafana_agent = COSAgentProvider( + self, + relation_name="custom-cos-agent", + metrics_endpoints=[ + # specify "path" and "port" to scrape from localhost + {"path": "/metrics", "port": 9000}, + {"path": "/metrics", "port": 9001}, + {"path": "/metrics", "port": 9002}, + ], + metrics_rules_dir="./src/alert_rules/prometheus", + logs_rules_dir="./src/alert_rules/loki", + recursive_rules_dir=True, + log_slots=["my-app:slot"], + dashboard_dirs=["./src/dashboards_1", "./src/dashboards_2"], + refresh_events=["update-status", "upgrade-charm"], + scrape_configs=[ + { + "job_name": "custom_job", + "metrics_path": "/metrics", + "authorization": {"credentials": "bearer-token"}, + "static_configs": [ + { + "targets": ["localhost:9003"]}, + "labels": {"key": "value"}, + }, + ], + }, + ] + ) +``` + +### Example 3 - Dynamic scrape configs generation: + +Pass a function to the `scrape_configs` to decouple the generation of the configs +from the instantiation of the COSAgentProvider object. + +```python +from charms.grafana_agent.v0.cos_agent import COSAgentProvider +... + +class TelemetryProviderCharm(CharmBase): + def generate_scrape_configs(self): + return [ + { + "job_name": "custom", + "metrics_path": "/metrics", + "static_configs": [{"targets": ["localhost:9000"]}], + }, + ] + + def __init__(self, *args): + ... + self._grafana_agent = COSAgentProvider( + self, + scrape_configs=self.generate_scrape_configs, + ) +``` + +## COSAgentConsumer Library Usage + +This object may be used by any Charmed Operator which gathers telemetry data by +implementing the consumer side of the `cos_agent` interface. +For instance Grafana Agent machine Charmed Operator. + +For this purpose the charm needs to instantiate the `COSAgentConsumer` object with one mandatory +and two optional arguments. + +### Parameters + +- `charm`: A reference to the parent (Grafana Agent machine) charm. + +- `relation_name`: The name of the relation that the charm uses to interact + with its clients that provides telemetry data using the `COSAgentProvider` object. + + If provided, this relation name must match a provided relation in metadata.yaml with the + `cos_agent` interface. + The default value of this argument is "cos-agent". + +- `refresh_events`: List of events on which to refresh relation data. + + +### Example 1 - Minimal instrumentation: + +In order to use this object the following should be in the `charm.py` file. + +```python +from charms.grafana_agent.v0.cos_agent import COSAgentConsumer +... +class GrafanaAgentMachineCharm(GrafanaAgentCharm) + def __init__(self, *args): + ... + self._cos = COSAgentRequirer(self) +``` + + +### Example 2 - Full instrumentation: + +In order to use this object the following should be in the `charm.py` file. + +```python +from charms.grafana_agent.v0.cos_agent import COSAgentConsumer +... +class GrafanaAgentMachineCharm(GrafanaAgentCharm) + def __init__(self, *args): + ... + self._cos = COSAgentRequirer( + self, + relation_name="cos-agent-consumer", + refresh_events=["update-status", "upgrade-charm"], + ) +``` +""" + +import json +import logging +from collections import namedtuple +from itertools import chain +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, List, Optional, Set, Tuple, Union + +import pydantic +from cosl import GrafanaDashboard, JujuTopology +from cosl.rules import AlertRules +from ops.charm import RelationChangedEvent +from ops.framework import EventBase, EventSource, Object, ObjectEvents +from ops.model import Relation +from ops.testing import CharmType + +if TYPE_CHECKING: + try: + from typing import TypedDict + + class _MetricsEndpointDict(TypedDict): + path: str + port: int + + except ModuleNotFoundError: + _MetricsEndpointDict = Dict # pyright: ignore + +LIBID = "dc15fa84cef84ce58155fb84f6c6213a" +LIBAPI = 0 +LIBPATCH = 8 + +PYDEPS = ["cosl", "pydantic < 2"] + +DEFAULT_RELATION_NAME = "cos-agent" +DEFAULT_PEER_RELATION_NAME = "peers" +DEFAULT_SCRAPE_CONFIG = { + "static_configs": [{"targets": ["localhost:80"]}], + "metrics_path": "/metrics", +} + +logger = logging.getLogger(__name__) +SnapEndpoint = namedtuple("SnapEndpoint", "owner, name") + + +class CosAgentProviderUnitData(pydantic.BaseModel): + """Unit databag model for `cos-agent` relation.""" + + # The following entries are the same for all units of the same principal. + # Note that the same grafana agent subordinate may be related to several apps. + # this needs to make its way to the gagent leader + metrics_alert_rules: dict + log_alert_rules: dict + dashboards: List[GrafanaDashboard] + # subordinate is no longer used but we should keep it until we bump the library to ensure + # we don't break compatibility. + subordinate: Optional[bool] = None + + # The following entries may vary across units of the same principal app. + # this data does not need to be forwarded to the gagent leader + metrics_scrape_jobs: List[Dict] + log_slots: List[str] + + # when this whole datastructure is dumped into a databag, it will be nested under this key. + # while not strictly necessary (we could have it 'flattened out' into the databag), + # this simplifies working with the model. + KEY: ClassVar[str] = "config" + + +class CosAgentPeersUnitData(pydantic.BaseModel): + """Unit databag model for `peers` cos-agent machine charm peer relation.""" + + # We need the principal unit name and relation metadata to be able to render identifiers + # (e.g. topology) on the leader side, after all the data moves into peer data (the grafana + # agent leader can only see its own principal, because it is a subordinate charm). + unit_name: str + relation_id: str + relation_name: str + + # The only data that is forwarded to the leader is data that needs to go into the app databags + # of the outgoing o11y relations. + metrics_alert_rules: Optional[dict] + log_alert_rules: Optional[dict] + dashboards: Optional[List[GrafanaDashboard]] + + # when this whole datastructure is dumped into a databag, it will be nested under this key. + # while not strictly necessary (we could have it 'flattened out' into the databag), + # this simplifies working with the model. + KEY: ClassVar[str] = "config" + + @property + def app_name(self) -> str: + """Parse out the app name from the unit name. + + TODO: Switch to using `model_post_init` when pydantic v2 is released? + https://github.com/pydantic/pydantic/issues/1729#issuecomment-1300576214 + """ + return self.unit_name.split("/")[0] + + +class COSAgentProvider(Object): + """Integration endpoint wrapper for the provider side of the cos_agent interface.""" + + def __init__( + self, + charm: CharmType, + relation_name: str = DEFAULT_RELATION_NAME, + metrics_endpoints: Optional[List["_MetricsEndpointDict"]] = None, + metrics_rules_dir: str = "./src/prometheus_alert_rules", + logs_rules_dir: str = "./src/loki_alert_rules", + recurse_rules_dirs: bool = False, + log_slots: Optional[List[str]] = None, + dashboard_dirs: Optional[List[str]] = None, + refresh_events: Optional[List] = None, + *, + scrape_configs: Optional[Union[List[dict], Callable]] = None, + ): + """Create a COSAgentProvider instance. + + Args: + charm: The `CharmBase` instance that is instantiating this object. + relation_name: The name of the relation to communicate over. + metrics_endpoints: List of endpoints in the form [{"path": path, "port": port}, ...]. + This argument is a simplified form of the `scrape_configs`. + The contents of this list will be merged with the contents of `scrape_configs`. + metrics_rules_dir: Directory where the metrics rules are stored. + logs_rules_dir: Directory where the logs rules are stored. + recurse_rules_dirs: Whether to recurse into rule paths. + log_slots: Snap slots to connect to for scraping logs + in the form ["snap-name:slot", ...]. + dashboard_dirs: Directory where the dashboards are stored. + refresh_events: List of events on which to refresh relation data. + scrape_configs: List of standard scrape_configs dicts or a callable + that returns the list in case the configs need to be generated dynamically. + The contents of this list will be merged with the contents of `metrics_endpoints`. + """ + super().__init__(charm, relation_name) + dashboard_dirs = dashboard_dirs or ["./src/grafana_dashboards"] + + self._charm = charm + self._relation_name = relation_name + self._metrics_endpoints = metrics_endpoints or [] + self._scrape_configs = scrape_configs or [] + self._metrics_rules = metrics_rules_dir + self._logs_rules = logs_rules_dir + self._recursive = recurse_rules_dirs + self._log_slots = log_slots or [] + self._dashboard_dirs = dashboard_dirs + self._refresh_events = refresh_events or [self._charm.on.config_changed] + + events = self._charm.on[relation_name] + self.framework.observe(events.relation_joined, self._on_refresh) + self.framework.observe(events.relation_changed, self._on_refresh) + for event in self._refresh_events: + self.framework.observe(event, self._on_refresh) + + def _on_refresh(self, event): + """Trigger the class to update relation data.""" + relations = self._charm.model.relations[self._relation_name] + + for relation in relations: + # Before a principal is related to the grafana-agent subordinate, we'd get + # ModelError: ERROR cannot read relation settings: unit "zk/2": settings not found + # Add a guard to make sure it doesn't happen. + if relation.data and self._charm.unit in relation.data: + # Subordinate relations can communicate only over unit data. + try: + data = CosAgentProviderUnitData( + metrics_alert_rules=self._metrics_alert_rules, + log_alert_rules=self._log_alert_rules, + dashboards=self._dashboards, + metrics_scrape_jobs=self._scrape_jobs, + log_slots=self._log_slots, + ) + relation.data[self._charm.unit][data.KEY] = data.json() + except ( + pydantic.ValidationError, + json.decoder.JSONDecodeError, + ) as e: + logger.error("Invalid relation data provided: %s", e) + + @property + def _scrape_jobs(self) -> List[Dict]: + """Return a prometheus_scrape-like data structure for jobs. + + https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config + """ + if callable(self._scrape_configs): + scrape_configs = self._scrape_configs() + else: + # Create a copy of the user scrape_configs, since we will mutate this object + scrape_configs = self._scrape_configs.copy() + + # Convert "metrics_endpoints" to standard scrape_configs, and add them in + for endpoint in self._metrics_endpoints: + scrape_configs.append( + { + "metrics_path": endpoint["path"], + "static_configs": [{"targets": [f"localhost:{endpoint['port']}"]}], + } + ) + + scrape_configs = scrape_configs or [DEFAULT_SCRAPE_CONFIG] + + # Augment job name to include the app name and a unique id (index) + for idx, scrape_config in enumerate(scrape_configs): + scrape_config["job_name"] = "_".join( + [self._charm.app.name, str(idx), scrape_config.get("job_name", "default")] + ) + + return scrape_configs + + @property + def _metrics_alert_rules(self) -> Dict: + """Use (for now) the prometheus_scrape AlertRules to initialize this.""" + alert_rules = AlertRules( + query_type="promql", topology=JujuTopology.from_charm(self._charm) + ) + alert_rules.add_path(self._metrics_rules, recursive=self._recursive) + return alert_rules.as_dict() + + @property + def _log_alert_rules(self) -> Dict: + """Use (for now) the loki_push_api AlertRules to initialize this.""" + alert_rules = AlertRules(query_type="logql", topology=JujuTopology.from_charm(self._charm)) + alert_rules.add_path(self._logs_rules, recursive=self._recursive) + return alert_rules.as_dict() + + @property + def _dashboards(self) -> List[GrafanaDashboard]: + dashboards: List[GrafanaDashboard] = [] + for d in self._dashboard_dirs: + for path in Path(d).glob("*"): + dashboard = GrafanaDashboard._serialize(path.read_bytes()) + dashboards.append(dashboard) + return dashboards + + +class COSAgentDataChanged(EventBase): + """Event emitted by `COSAgentRequirer` when relation data changes.""" + + +class COSAgentValidationError(EventBase): + """Event emitted by `COSAgentRequirer` when there is an error in the relation data.""" + + def __init__(self, handle, message: str = ""): + super().__init__(handle) + self.message = message + + def snapshot(self) -> Dict: + """Save COSAgentValidationError source information.""" + return {"message": self.message} + + def restore(self, snapshot): + """Restore COSAgentValidationError source information.""" + self.message = snapshot["message"] + + +class COSAgentRequirerEvents(ObjectEvents): + """`COSAgentRequirer` events.""" + + data_changed = EventSource(COSAgentDataChanged) + validation_error = EventSource(COSAgentValidationError) + + +class COSAgentRequirer(Object): + """Integration endpoint wrapper for the Requirer side of the cos_agent interface.""" + + on = COSAgentRequirerEvents() # pyright: ignore + + def __init__( + self, + charm: CharmType, + *, + relation_name: str = DEFAULT_RELATION_NAME, + peer_relation_name: str = DEFAULT_PEER_RELATION_NAME, + refresh_events: Optional[List[str]] = None, + ): + """Create a COSAgentRequirer instance. + + Args: + charm: The `CharmBase` instance that is instantiating this object. + relation_name: The name of the relation to communicate over. + peer_relation_name: The name of the peer relation to communicate over. + refresh_events: List of events on which to refresh relation data. + """ + super().__init__(charm, relation_name) + self._charm = charm + self._relation_name = relation_name + self._peer_relation_name = peer_relation_name + self._refresh_events = refresh_events or [self._charm.on.config_changed] + + events = self._charm.on[relation_name] + self.framework.observe( + events.relation_joined, self._on_relation_data_changed + ) # TODO: do we need this? + self.framework.observe(events.relation_changed, self._on_relation_data_changed) + for event in self._refresh_events: + self.framework.observe(event, self.trigger_refresh) # pyright: ignore + + # Peer relation events + # A peer relation is needed as it is the only mechanism for exchanging data across + # subordinate units. + # self.framework.observe( + # self.on[self._peer_relation_name].relation_joined, self._on_peer_relation_joined + # ) + peer_events = self._charm.on[peer_relation_name] + self.framework.observe(peer_events.relation_changed, self._on_peer_relation_changed) + + @property + def peer_relation(self) -> Optional["Relation"]: + """Helper function for obtaining the peer relation object. + + Returns: peer relation object + (NOTE: would return None if called too early, e.g. during install). + """ + return self.model.get_relation(self._peer_relation_name) + + def _on_peer_relation_changed(self, _): + # Peer data is used for forwarding data from principal units to the grafana agent + # subordinate leader, for updating the app data of the outgoing o11y relations. + if self._charm.unit.is_leader(): + self.on.data_changed.emit() # pyright: ignore + + def _on_relation_data_changed(self, event: RelationChangedEvent): + # Peer data is the only means of communication between subordinate units. + if not self.peer_relation: + event.defer() + return + + cos_agent_relation = event.relation + if not event.unit or not cos_agent_relation.data.get(event.unit): + return + principal_unit = event.unit + + # Coherence check + units = cos_agent_relation.units + if len(units) > 1: + # should never happen + raise ValueError( + f"unexpected error: subordinate relation {cos_agent_relation} " + f"should have exactly one unit" + ) + + if not (raw := cos_agent_relation.data[principal_unit].get(CosAgentProviderUnitData.KEY)): + return + + if not (provider_data := self._validated_provider_data(raw)): + return + + # Copy data from the cos_agent relation to the peer relation, so the leader could + # follow up. + # Save the originating unit name, so it could be used for topology later on by the leader. + data = CosAgentPeersUnitData( # peer relation databag model + unit_name=event.unit.name, + relation_id=str(event.relation.id), + relation_name=event.relation.name, + metrics_alert_rules=provider_data.metrics_alert_rules, + log_alert_rules=provider_data.log_alert_rules, + dashboards=provider_data.dashboards, + ) + self.peer_relation.data[self._charm.unit][ + f"{CosAgentPeersUnitData.KEY}-{event.unit.name}" + ] = data.json() + + # We can't easily tell if the data that was changed is limited to only the data + # that goes into peer relation (in which case, if this is not a leader unit, we wouldn't + # need to emit `on.data_changed`), so we're emitting `on.data_changed` either way. + self.on.data_changed.emit() # pyright: ignore + + def _validated_provider_data(self, raw) -> Optional[CosAgentProviderUnitData]: + try: + return CosAgentProviderUnitData(**json.loads(raw)) + except (pydantic.ValidationError, json.decoder.JSONDecodeError) as e: + self.on.validation_error.emit(message=str(e)) # pyright: ignore + return None + + def trigger_refresh(self, _): + """Trigger a refresh of relation data.""" + # FIXME: Figure out what we should do here + self.on.data_changed.emit() # pyright: ignore + + @property + def _remote_data(self) -> List[Tuple[CosAgentProviderUnitData, JujuTopology]]: + """Return a list of remote data from each of the related units. + + Assumes that the relation is of type subordinate. + Relies on the fact that, for subordinate relations, the only remote unit visible to + *this unit* is the principal unit that this unit is attached to. + """ + all_data = [] + + for relation in self._charm.model.relations[self._relation_name]: + if not relation.units: + continue + unit = next(iter(relation.units)) + if not (raw := relation.data[unit].get(CosAgentProviderUnitData.KEY)): + continue + if not (provider_data := self._validated_provider_data(raw)): + continue + + topology = JujuTopology( + model=self._charm.model.name, + model_uuid=self._charm.model.uuid, + application=unit.app.name, + unit=unit.name, + ) + + all_data.append((provider_data, topology)) + + return all_data + + def _gather_peer_data(self) -> List[CosAgentPeersUnitData]: + """Collect data from the peers. + + Returns a trimmed-down list of CosAgentPeersUnitData. + """ + relation = self.peer_relation + + # Ensure that whatever context we're running this in, we take the necessary precautions: + if not relation or not relation.data or not relation.app: + return [] + + # Iterate over all peer unit data and only collect every principal once. + peer_data: List[CosAgentPeersUnitData] = [] + app_names: Set[str] = set() + + for unit in chain((self._charm.unit,), relation.units): + if not relation.data.get(unit): + continue + + for unit_name in relation.data.get(unit): # pyright: ignore + if not unit_name.startswith(CosAgentPeersUnitData.KEY): + continue + raw = relation.data[unit].get(unit_name) + if raw is None: + continue + data = CosAgentPeersUnitData(**json.loads(raw)) + # Have we already seen this principal app? + if (app_name := data.app_name) in app_names: + continue + peer_data.append(data) + app_names.add(app_name) + + return peer_data + + @property + def metrics_alerts(self) -> Dict[str, Any]: + """Fetch metrics alerts.""" + alert_rules = {} + + seen_apps: List[str] = [] + for data in self._gather_peer_data(): + if rules := data.metrics_alert_rules: + app_name = data.app_name + if app_name in seen_apps: + continue # dedup! + seen_apps.append(app_name) + # This is only used for naming the file, so be as specific as we can be + identifier = JujuTopology( + model=self._charm.model.name, + model_uuid=self._charm.model.uuid, + application=app_name, + # For the topology unit, we could use `data.principal_unit_name`, but that unit + # name may not be very stable: `_gather_peer_data` de-duplicates by app name so + # the exact unit name that turns up first in the iterator may vary from time to + # time. So using the grafana-agent unit name instead. + unit=self._charm.unit.name, + ).identifier + + alert_rules[identifier] = rules + + return alert_rules + + @property + def metrics_jobs(self) -> List[Dict]: + """Parse the relation data contents and extract the metrics jobs.""" + scrape_jobs = [] + for data, topology in self._remote_data: + for job in data.metrics_scrape_jobs: + # In #220, relation schema changed from a simplified dict to the standard + # `scrape_configs`. + # This is to ensure backwards compatibility with Providers older than v0.5. + if "path" in job and "port" in job and "job_name" in job: + job = { + "job_name": job["job_name"], + "metrics_path": job["path"], + "static_configs": [{"targets": [f"localhost:{job['port']}"]}], + # We include insecure_skip_verify because we are always scraping localhost. + # Even if we have the certs for the scrape targets, we'd rather specify the scrape + # jobs with localhost rather than the SAN DNS the cert was issued for. + "tls_config": {"insecure_skip_verify": True}, + } + + # Apply labels to the scrape jobs + for static_config in job.get("static_configs", []): + topo_as_dict = topology.as_dict(excluded_keys=["charm_name"]) + static_config["labels"] = { + # Be sure to keep labels from static_config + **static_config.get("labels", {}), + # TODO: We should add a new method in juju_topology.py + # that like `as_dict` method, returns the keys with juju_ prefix + # https://github.com/canonical/cos-lib/issues/18 + **{ + "juju_{}".format(key): value + for key, value in topo_as_dict.items() + if value + }, + } + + scrape_jobs.append(job) + + return scrape_jobs + + @property + def snap_log_endpoints(self) -> List[SnapEndpoint]: + """Fetch logging endpoints exposed by related snaps.""" + plugs = [] + for data, _ in self._remote_data: + targets = data.log_slots + if targets: + for target in targets: + if target in plugs: + logger.warning( + f"plug {target} already listed. " + "The same snap is being passed from multiple " + "endpoints; this should not happen." + ) + else: + plugs.append(target) + + endpoints = [] + for plug in plugs: + if ":" not in plug: + logger.error(f"invalid plug definition received: {plug}. Ignoring...") + else: + endpoint = SnapEndpoint(*plug.split(":")) + endpoints.append(endpoint) + return endpoints + + @property + def logs_alerts(self) -> Dict[str, Any]: + """Fetch log alerts.""" + alert_rules = {} + seen_apps: List[str] = [] + + for data in self._gather_peer_data(): + if rules := data.log_alert_rules: + # This is only used for naming the file, so be as specific as we can be + app_name = data.app_name + if app_name in seen_apps: + continue # dedup! + seen_apps.append(app_name) + + identifier = JujuTopology( + model=self._charm.model.name, + model_uuid=self._charm.model.uuid, + application=app_name, + # For the topology unit, we could use `data.unit_name`, but that unit + # name may not be very stable: `_gather_peer_data` de-duplicates by app name so + # the exact unit name that turns up first in the iterator may vary from time to + # time. So using the grafana-agent unit name instead. + unit=self._charm.unit.name, + ).identifier + + alert_rules[identifier] = rules + + return alert_rules + + @property + def dashboards(self) -> List[Dict[str, str]]: + """Fetch dashboards as encoded content. + + Dashboards are assumed not to vary across units of the same primary. + """ + dashboards: List[Dict[str, Any]] = [] + + seen_apps: List[str] = [] + for data in self._gather_peer_data(): + app_name = data.app_name + if app_name in seen_apps: + continue # dedup! + seen_apps.append(app_name) + + for encoded_dashboard in data.dashboards or (): + content = GrafanaDashboard(encoded_dashboard)._deserialize() + + title = content.get("title", "no_title") + + dashboards.append( + { + "relation_id": data.relation_id, + # We have the remote charm name - use it for the identifier + "charm": f"{data.relation_name}-{app_name}", + "content": content, + "title": title, + } + ) + + return dashboards diff --git a/lib/charms/operator_libs_linux/v2/snap.py b/lib/charms/operator_libs_linux/v2/snap.py index 38c88cf0..871ff5de 100644 --- a/lib/charms/operator_libs_linux/v2/snap.py +++ b/lib/charms/operator_libs_linux/v2/snap.py @@ -83,7 +83,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 3 +LIBPATCH = 4 # Regex to locate 7-bit C1 ANSI sequences @@ -214,7 +214,7 @@ class Snap(object): - state: a `SnapState` representation of its install status - channel: "stable", "candidate", "beta", and "edge" are common - revision: a string representing the snap's revision - - confinement: "classic" or "strict" + - confinement: "classic", "strict", or "devmode" """ def __init__( @@ -475,6 +475,8 @@ def _install( args = [] if self.confinement == "classic": args.append("--classic") + if self.confinement == "devmode": + args.append("--devmode") if channel: args.append('--channel="{}"'.format(channel)) if revision: @@ -489,6 +491,7 @@ def _refresh( channel: Optional[str] = "", cohort: Optional[str] = "", revision: Optional[str] = None, + devmode: bool = False, leave_cohort: Optional[bool] = False, ) -> None: """Refresh a snap. @@ -497,6 +500,7 @@ def _refresh( channel: the channel to install from cohort: optionally, specify a cohort. revision: optionally, specify the revision of the snap to refresh + devmode: optionally, specify devmode confinement leave_cohort: leave the current cohort. """ args = [] @@ -506,6 +510,9 @@ def _refresh( if revision: args.append('--revision="{}"'.format(revision)) + if devmode: + args.append("--devmode") + if not cohort: cohort = self._cohort @@ -530,6 +537,7 @@ def ensure( self, state: SnapState, classic: Optional[bool] = False, + devmode: bool = False, channel: Optional[str] = "", cohort: Optional[str] = "", revision: Optional[str] = None, @@ -539,6 +547,7 @@ def ensure( Args: state: a `SnapState` to reconcile to. classic: an (Optional) boolean indicating whether classic confinement should be used + devmode: an (Optional) boolean indicating whether devmode confinement should be used channel: the channel to install from cohort: optional. Specify the key of a snap cohort. revision: optional. the revision of the snap to install/refresh @@ -549,7 +558,15 @@ def ensure( Raises: SnapError if an error is encountered """ - self._confinement = "classic" if classic or self._confinement == "classic" else "" + if classic and devmode: + raise ValueError("Cannot set both classic and devmode confinement") + + if classic or self._confinement == "classic": + self._confinement = "classic" + elif devmode or self._confinement == "devmode": + self._confinement = "devmode" + else: + self._confinement = "" if state not in (SnapState.Present, SnapState.Latest): # We are attempting to remove this snap. @@ -566,7 +583,7 @@ def ensure( self._install(channel, cohort, revision) else: # The snap is installed, but we are changing it (e.g., switching channels). - self._refresh(channel, cohort, revision) + self._refresh(channel=channel, cohort=cohort, revision=revision, devmode=devmode) self._update_snap_apps() self._state = state @@ -892,6 +909,7 @@ def add( state: Union[str, SnapState] = SnapState.Latest, channel: Optional[str] = "", classic: Optional[bool] = False, + devmode: bool = False, cohort: Optional[str] = "", revision: Optional[str] = None, ) -> Union[Snap, List[Snap]]: @@ -904,6 +922,8 @@ def add( channel: an (Optional) channel as a string. Defaults to 'latest' classic: an (Optional) boolean specifying whether it should be added with classic confinement. Default `False` + devmode: an (Optional) boolean specifying whether it should be added with devmode + confinement. Default `False` cohort: an (Optional) string specifying the snap cohort to use revision: an (Optional) string specifying the snap revision to use @@ -920,7 +940,7 @@ def add( if isinstance(state, str): state = SnapState(state) - return _wrap_snap_operations(snap_names, state, channel, classic, cohort, revision) + return _wrap_snap_operations(snap_names, state, channel, classic, devmode, cohort, revision) @_cache_init @@ -936,8 +956,13 @@ def remove(snap_names: Union[str, List[str]]) -> Union[Snap, List[Snap]]: snap_names = [snap_names] if isinstance(snap_names, str) else snap_names if not snap_names: raise TypeError("Expected at least one snap to add, received zero!") - - return _wrap_snap_operations(snap_names, SnapState.Absent, "", False) + return _wrap_snap_operations( + snap_names=snap_names, + state=SnapState.Absent, + channel="", + classic=False, + devmode=False, + ) @_cache_init @@ -946,6 +971,7 @@ def ensure( state: str, channel: Optional[str] = "", classic: Optional[bool] = False, + devmode: bool = False, cohort: Optional[str] = "", revision: Optional[int] = None, ) -> Union[Snap, List[Snap]]: @@ -957,6 +983,8 @@ def ensure( channel: an (Optional) channel as a string. Defaults to 'latest' classic: an (Optional) boolean specifying whether it should be added with classic confinement. Default `False` + devmode: an (Optional) boolean specifying whether it should be added with devmode + confinement. Default `False` cohort: an (Optional) string specifying the snap cohort to use revision: an (Optional) integer specifying the snap revision to use @@ -970,7 +998,15 @@ def ensure( channel = "latest" if state in ("present", "latest") or revision: - return add(snap_names, SnapState(state), channel, classic, cohort, revision) + return add( + snap_names=snap_names, + state=SnapState(state), + channel=channel, + classic=classic, + devmode=devmode, + cohort=cohort, + revision=revision, + ) else: return remove(snap_names) @@ -980,6 +1016,7 @@ def _wrap_snap_operations( state: SnapState, channel: str, classic: bool, + devmode: bool, cohort: Optional[str] = "", revision: Optional[str] = None, ) -> Union[Snap, List[Snap]]: @@ -995,7 +1032,12 @@ def _wrap_snap_operations( snap.ensure(state=SnapState.Absent) else: snap.ensure( - state=state, classic=classic, channel=channel, cohort=cohort, revision=revision + state=state, + classic=classic, + devmode=devmode, + channel=channel, + cohort=cohort, + revision=revision, ) snaps["success"].append(snap) except SnapError as e: @@ -1014,13 +1056,17 @@ def _wrap_snap_operations( def install_local( - filename: str, classic: Optional[bool] = False, dangerous: Optional[bool] = False + filename: str, + classic: Optional[bool] = False, + devmode: Optional[bool] = False, + dangerous: Optional[bool] = False, ) -> Snap: """Perform a snap operation. Args: filename: the path to a local .snap file to install classic: whether to use classic confinement + devmode: whether to use devmode confinement dangerous: whether --dangerous should be passed to install snaps without a signature Raises: @@ -1033,6 +1079,8 @@ def install_local( ] if classic: args.append("--classic") + if devmode: + args.append("--devmode") if dangerous: args.append("--dangerous") try: diff --git a/metadata.yaml b/metadata.yaml index 1f8b8049..f42fbdff 100644 --- a/metadata.yaml +++ b/metadata.yaml @@ -26,6 +26,9 @@ provides: shared-db: interface: mysql-shared scope: container + cos-agent: + interface: cos_agent + limit: 1 requires: backend-database: interface: mysql_client @@ -36,14 +39,13 @@ requires: interface: juju-info scope: container peers: + cos: + interface: cos upgrade-version-a: # Relation versioning scheme: # DA056 - Upgrading in-place upgrade protocol # https://docs.google.com/document/d/1H7qy5SAwLiCOKO9xMQJbbQP5_-jGV6Lhi-mJOk4gZ08/edit interface: upgrade -# TODO TLS VM: re-enable peer relation -# mysql-router-peers: -# interface: mysql_router_peers # DEPRECATED shared-db: Workaround for legacy "mysql-shared" interface using unit databags instead of app databag deprecated-shared-db-credentials: interface: _deprecated_shared_db_peers diff --git a/poetry.lock b/poetry.lock index 7d93e0b9..0b1d62b0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "appnope" @@ -343,6 +343,22 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "cosl" +version = "0.0.7" +description = "Utils for COS Lite charms" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cosl-0.0.7-py3-none-any.whl", hash = "sha256:ed7cf980b47f4faa0e65066d65e5b4274f1972fb6cd3533441a90edae360b4a7"}, + {file = "cosl-0.0.7.tar.gz", hash = "sha256:edf07a81d152720c3ee909a1201063e5b1a35c49f574a7ec1deb989a8bc6fada"}, +] + +[package.dependencies] +ops = "*" +pyyaml = "*" +typing-extensions = "*" + [[package]] name = "coverage" version = "7.3.0" @@ -835,6 +851,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1249,6 +1275,58 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pydantic" +version = "1.10.13" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + [[package]] name = "pydocstyle" version = "6.3.0" @@ -1550,6 +1628,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1557,8 +1636,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1575,6 +1661,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1582,6 +1669,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1995,4 +2083,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "24ab891f1773af8d539b731209ecc6dce6e0259e331d2f521b64f4d116cbdc60" +content-hash = "41c5b76947360248ec47b3b253c41d5c99b06199e6f5c349ec6241f86a59ff9b" diff --git a/pyproject.toml b/pyproject.toml index 58a49dc4..ac9e4f32 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,9 @@ jinja2 = "^3.1.2" [tool.poetry.group.charm-libs.dependencies] # data_platform_libs/v0/data_interfaces.py ops = ">=2.0.0" +# grafana_agent/v0/cos_agent.py +pydantic = "<2" +cosl = "*" [tool.poetry.group.format] optional = true diff --git a/src/abstract_charm.py b/src/abstract_charm.py index f4c9e27d..9923f330 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -15,6 +15,7 @@ import lifecycle import logrotate import machine_upgrade +import relations.cos import relations.database_provides import relations.database_requires import server_exceptions @@ -67,12 +68,6 @@ def _subordinate_relation_endpoint_names(self) -> typing.Optional[typing.Iterabl Does NOT include relations where charm is principal """ - @property - def _tls_certificate_saved(self) -> bool: - """Whether a TLS certificate is available to use""" - # TODO VM TLS: Remove property after implementing TLS on machine charm - return False - @property @abc.abstractmethod def _container(self) -> container.Container: @@ -88,6 +83,11 @@ def _upgrade(self) -> typing.Optional[upgrade.Upgrade]: def _logrotate(self) -> logrotate.LogRotate: """logrotate""" + @property + @abc.abstractmethod + def _cos(self) -> relations.cos.COSRelation: + """COS""" + @property @abc.abstractmethod def _read_write_endpoint(self) -> str: @@ -98,6 +98,31 @@ def _read_write_endpoint(self) -> str: def _read_only_endpoint(self) -> str: """MySQL Router read-only endpoint""" + @property + def _tls_certificate_saved(self) -> bool: + """Whether a TLS certificate is available to use""" + # TODO VM TLS: Update property after implementing TLS on machine_charm + return False + + @property + def _tls_key(self) -> str: + """Custom TLS key""" + # TODO VM TLS: Update property after implementing TLS on machine_charm + return None + + @property + def _tls_certificate(self) -> str: + """Custom TLS certificate""" + # TODO VM TLS: Update property after implementing TLS on machine_charm + return None + + def _cos_exporter_config(self, event) -> typing.Optional[relations.cos.ExporterConfig]: + """Returns the exporter config for MySQLRouter exporter if cos relation exists""" + cos_relation_exists = self._cos.relation_exists and not self._cos.is_relation_breaking( + event + ) + return self._cos.exporter_user_config if cos_relation_exists else None + def get_workload(self, *, event): """MySQL Router workload""" if connection_info := self._database_requires.get_connection_info(event=event): @@ -105,9 +130,12 @@ def get_workload(self, *, event): container_=self._container, logrotate_=self._logrotate, connection_info=connection_info, + cos=self._cos, charm_=self, ) - return self._workload_type(container_=self._container, logrotate_=self._logrotate) + return self._workload_type( + container_=self._container, logrotate_=self._logrotate, cos=self._cos + ) @staticmethod # TODO python3.10 min version: Use `list` instead of `typing.List` @@ -232,7 +260,9 @@ def reconcile(self, event=None) -> None: # noqa: C901 f"{workload_.container_ready=}, " f"{self._database_requires.is_relation_breaking(event)=}, " f"{self._upgrade.in_progress=}" + f"{self._cos.is_relation_breaking(event)=}" ) + try: if self._unit_lifecycle.authorized_leader: if self._database_requires.is_relation_breaking(event): @@ -252,10 +282,14 @@ def reconcile(self, event=None) -> None: # noqa: C901 router_read_only_endpoint=self._read_only_endpoint, shell=workload_.shell, ) - if isinstance(workload_, workload.AuthenticatedWorkload) and workload_.container_ready: - workload_.enable(tls=self._tls_certificate_saved, unit_name=self.unit.name) - elif workload_.container_ready: - workload_.disable() + if workload_.container_ready: + workload_.reconcile( + tls=self._tls_certificate_saved, + unit_name=self.unit.name, + exporter_config=self._cos_exporter_config(event), + key=self._tls_key, + certificate=self._tls_certificate, + ) # Empty waiting status means we're waiting for database requires relation before # starting workload if not workload_.status or workload_.status == ops.WaitingStatus(): diff --git a/src/container.py b/src/container.py index 5538fcf8..8ca1881d 100644 --- a/src/container.py +++ b/src/container.py @@ -10,6 +10,9 @@ import ops +if typing.TYPE_CHECKING: + import relations.cos + class Path(pathlib.PurePosixPath, abc.ABC): """Workload container (snap or ROCK) filesystem path""" @@ -74,14 +77,31 @@ def router_config_file(self) -> Path: """ return self.router_config_directory / "mysqlrouter.conf" + @property + def rest_api_credentials_file(self) -> Path: + """Credentials file for MySQL Router's REST API""" + return self.router_config_directory / "rest_api_credentials" + + @property + def rest_api_config_file(self) -> Path: + """Configuration file for the REST API for MySQLRouter""" + return self.router_config_directory / "router_rest_api.conf" + @property def tls_config_file(self) -> Path: """Extra MySQL Router configuration file to enable TLS""" return self.router_config_directory / "tls.conf" - def __init__(self, *, mysql_router_command: str, mysql_shell_command: str) -> None: + def __init__( + self, + *, + mysql_router_command: str, + mysql_shell_command: str, + mysql_router_password_command: str, + ) -> None: self._mysql_router_command = mysql_router_command self._mysql_shell_command = mysql_shell_command + self._mysql_router_password_command = mysql_router_password_command @property @abc.abstractmethod @@ -96,6 +116,11 @@ def ready(self) -> bool: def mysql_router_service_enabled(self) -> bool: """MySQL Router service status""" + @property + @abc.abstractmethod + def mysql_router_exporter_service_enabled(self) -> bool: + """MySQL Router exporter service status""" + @abc.abstractmethod def update_mysql_router_service(self, *, enabled: bool, tls: bool = None) -> None: """Update and restart MySQL Router service. @@ -107,6 +132,19 @@ def update_mysql_router_service(self, *, enabled: bool, tls: bool = None) -> Non if enabled: assert tls is not None, "`tls` argument required when enabled=True" + @abc.abstractmethod + def update_mysql_router_exporter_service( + self, *, enabled: bool, config: "relations.cos.ExporterConfig" = None + ) -> None: + """Update and restart the MySQL Router exporter service. + + Args: + enabled: Whether MySQL Router exporter service is enabled + config: The configuration for MySQL Router exporter + """ + if enabled and not config: + raise ValueError("Missing MySQL Router exporter config") + @abc.abstractmethod def upgrade(self, unit: ops.Unit) -> None: """Upgrade container version @@ -116,7 +154,13 @@ def upgrade(self, unit: ops.Unit) -> None: @abc.abstractmethod # TODO python3.10 min version: Use `list` instead of `typing.List` - def _run_command(self, command: typing.List[str], *, timeout: typing.Optional[int]) -> str: + def _run_command( + self, + command: typing.List[str], + *, + timeout: typing.Optional[int], + input: str = None, + ) -> str: """Run command in container. Raises: @@ -146,3 +190,27 @@ def run_mysql_shell(self, args: typing.List[str], *, timeout: int = None) -> str @abc.abstractmethod def path(self, *args) -> Path: """Container filesystem path""" + + def create_router_rest_api_credentials_file(self) -> None: + """Creates a credentials file for the router rest api if it does not exist.""" + if not self.rest_api_credentials_file.exists(): + # create empty credentials file + self.rest_api_credentials_file.write_text("") + + def set_mysql_router_rest_api_password( + self, *, user: str, password: typing.Optional[str] + ) -> None: + """Set REST API credentials using the mysqlrouter_password command.""" + self.create_router_rest_api_credentials_file() + + action = "set" if password else "delete" + self._run_command( + [ + self._mysql_router_password_command, + action, + str(self.rest_api_credentials_file), + user, + ], + input=password, + timeout=30, + ) diff --git a/src/grafana_dashboards/mysql-router-metrics.json b/src/grafana_dashboards/mysql-router-metrics.json new file mode 100644 index 00000000..10583066 --- /dev/null +++ b/src/grafana_dashboards/mysql-router-metrics.json @@ -0,0 +1,1286 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "description": "https://github.com/rluisr/mysqlrouter_exporter", + "editable": true, + "fiscalYearStartMonth": 0, + "gnetId": 10741, + "graphTooltip": 0, + "id": 7, + "links": [], + "liveNow": false, + "panels": [ + { + "columns": [], + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "fontSize": "100%", + "gridPos": { + "h": 2, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 11, + "links": [], + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "align": "auto", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "hidden" + }, + { + "alias": "", + "align": "auto", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "__name__", + "thresholds": [], + "type": "hidden", + "unit": "short" + }, + { + "alias": "", + "align": "auto", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "instance", + "thresholds": [], + "type": "hidden", + "unit": "short" + }, + { + "alias": "", + "align": "auto", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "job", + "thresholds": [], + "type": "hidden", + "unit": "short" + }, + { + "alias": "", + "align": "auto", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "Value", + "thresholds": [], + "type": "hidden", + "unit": "short" + } + ], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "expr": "mysqlrouter_router_status{hostname=\"$host\"}", + "format": "table", + "instant": true, + "legendFormat": "", + "refId": "A" + } + ], + "transform": "table", + "transparent": true, + "type": "table-old" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "description": "Nodes of cluster", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 0 + }, + "id": 6, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "last" + ], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "9.2.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "mysqlrouter_metadata_config_nodes", + "format": "table", + "instant": true, + "interval": "", + "intervalFactor": 1, + "legendFormat": "__auto", + "refId": "A" + } + ], + "title": "Nodes of cluster", + "transparent": true, + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "description": "Each routing has its own list of blocked hosts. Blocked clients receive the MySQL Server error 1129 code with a slightly different error message: \"1129: Too many connection errors from fail.example.com\". The Router logs contain extra information for blocked clients, such as: INFO [...] 1 authentication errors for fail.example.com (max 100) WARNING [...] blocking client host fail.example.com", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "decimals": 0, + "displayName": "", + "mappings": [], + "max": 100, + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 1 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 18, + "y": 0 + }, + "id": 9, + "options": { + "displayMode": "lcd", + "minVizHeight": 10, + "minVizWidth": 0, + "orientation": "vertical", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showUnfilled": true, + "valueMode": "color" + }, + "pluginVersion": "9.2.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "expr": "mysqlrouter_route_blocked_hosts{name=~\"$cluster.*\", router_hostname=\"$host\"}", + "legendFormat": "{{name}}", + "refId": "A" + } + ], + "title": "$host - Route Blocked Hosts", + "transparent": true, + "type": "bargauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "from": "", + "id": 1, + "operator": "", + "text": "UP", + "to": "", + "type": 1, + "value": "1" + }, + { + "from": "", + "id": 2, + "operator": "", + "text": "DOWN", + "to": "", + "type": 1, + "value": "0" + } + ], + "max": 1, + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + }, + { + "color": "green", + "value": 1 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 12, + "x": 0, + "y": 2 + }, + "id": 4, + "links": [], + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "last" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": false + }, + "pluginVersion": "9.2.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "mysqlrouter_route_health{router_hostname=\"$host\"}", + "format": "time_series", + "instant": true, + "legendFormat": "{{name}}", + "refId": "A" + } + ], + "title": "$host - Route Health", + "transparent": true, + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "links": [], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 8 + }, + "id": 2, + "links": [], + "options": { + "legend": { + "calcs": [ + "mean", + "lastNotNull", + "max" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.1.4", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "expr": "mysqlrouter_route_active_connections{name=~\"$cluster.*\", router_hostname=\"$host\"}", + "legendFormat": "{{name}}", + "refId": "A" + } + ], + "title": "$host - Route Active Connections", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "links": [], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 12, + "y": 8 + }, + "id": 16, + "links": [], + "options": { + "legend": { + "calcs": [ + "mean", + "lastNotNull", + "max" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.1.4", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "mysqlrouter_route_total_connections{name=~\"$cluster.*\", router_hostname=\"$host\"}", + "legendFormat": "{{name}}", + "range": true, + "refId": "A" + } + ], + "title": "$host - Route Total Connections", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "description": "Click table column", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "displayMode": "auto", + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Time" + }, + "properties": [ + { + "id": "displayName", + "value": "Time" + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "__name__" + }, + "properties": [ + { + "id": "displayName", + "value": "Metric" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "destination_address" + }, + "properties": [ + { + "id": "displayName", + "value": "Destination Address" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "instance" + }, + "properties": [ + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "job" + }, + "properties": [ + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "name" + }, + "properties": [ + { + "id": "displayName", + "value": "Route Name" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "source_address" + }, + "properties": [ + { + "id": "displayName", + "value": "Source Address" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Value #A" + }, + "properties": [ + { + "id": "displayName", + "value": "Bytes From Server" + }, + { + "id": "unit", + "value": "decbytes" + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Value #B" + }, + "properties": [ + { + "id": "displayName", + "value": "Bytes To Server" + }, + { + "id": "unit", + "value": "decbytes" + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Value #C" + }, + "properties": [ + { + "id": "displayName", + "value": "Time Connected To Server" + }, + { + "id": "unit", + "value": "locale" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "unit", + "value": "time: YYYY-MM-DD HH:mm:ss" + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Value #D" + }, + "properties": [ + { + "id": "displayName", + "value": "Time Last Received From Server" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "unit", + "value": "time: YYYY-MM-DD HH:mm:ss" + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Value #E" + }, + "properties": [ + { + "id": "displayName", + "value": "Time Last Sent To Server" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "unit", + "value": "time: YYYY-MM-DD HH:mm:ss" + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Value #F" + }, + "properties": [ + { + "id": "displayName", + "value": "Time Started" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": 2 + }, + { + "id": "unit", + "value": "time: YYYY-MM-DD HH:mm:ss" + }, + { + "id": "custom.align" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Metric" + }, + "properties": [ + { + "id": "custom.width", + "value": 341 + } + ] + } + ] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 15, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "enablePagination": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "frameIndex": 2, + "showHeader": true, + "sortBy": [] + }, + "pluginVersion": "9.2.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "route_connections_byte_to_server{router_hostname=\"$host\"}", + "format": "table", + "hide": false, + "instant": true, + "intervalFactor": 1, + "legendFormat": "", + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "route_connections_byte_from_server{router_hostname=\"$host\"}", + "format": "table", + "hide": false, + "instant": true, + "intervalFactor": 1, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "route_connections_time_connected_to_server{router_hostname=\"$host\"}", + "format": "table", + "instant": true, + "intervalFactor": 1, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "route_connections_time_last_received_from_server{router_hostname=\"$host\"}", + "format": "table", + "instant": true, + "refId": "D" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "route_connections_time_last_sent_to_server{router_hostname=\"$host\"}", + "format": "table", + "instant": true, + "refId": "E" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "route_connections_time_started{router_hostname=\"$host\"}", + "format": "table", + "instant": true, + "refId": "F" + } + ], + "title": "$host - All Connections Information", + "transformations": [ + { + "id": "seriesToRows", + "options": { + "reducers": [] + } + } + ], + "type": "table" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "description": "If route have no connections, There is no data.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "links": [], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "decbytes" + }, + "overrides": [ + { + "matcher": { + "id": "byRegexp", + "options": "/from_server/" + }, + "properties": [ + { + "id": "custom.transform", + "value": "negative-Y" + } + ] + }, + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "from_server bootstrap_rw - /var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock -> 10.173.238.22:3306" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 12, + "y": 18 + }, + "id": 13, + "options": { + "legend": { + "calcs": [ + "mean", + "lastNotNull", + "max" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.1.4", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "route_connections_byte_from_server{router_hostname=\"$host\"}", + "instant": false, + "intervalFactor": 3, + "legendFormat": "from_server {{name}} - {{source_address}} -> {{destination_address}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "editorMode": "code", + "expr": "route_connections_byte_to_server{outer_hostname=\"$host\"}", + "instant": false, + "intervalFactor": 3, + "legendFormat": "to_server {{name}} - {{destination_address}} -> {{source_address}}", + "refId": "B" + } + ], + "title": "$host - Route byte from/to server", + "type": "timeseries" + } + ], + "refresh": "5m", + "schemaVersion": 37, + "style": "dark", + "tags": [], + "templating": { + "list": [ + { + "current": { + "isNone": true, + "selected": false, + "text": "None", + "value": "" + }, + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "definition": "label_values(mysqlrouter_router_status,service)", + "hide": 0, + "includeAll": false, + "label": "", + "multi": false, + "name": "service", + "options": [], + "query": { + "query": "label_values(mysqlrouter_router_status,service)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 1, + "tagValuesQuery": "", + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "current": { + "selected": false, + "text": "$host", + "value": "$host" + }, + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "definition": "label_values(mysqlrouter_router_status{service=\"$service\"},hostname)", + "hide": 0, + "includeAll": false, + "multi": false, + "name": "host", + "options": [], + "query": { + "query": "label_values(mysqlrouter_router_status{service=\"$service\"},hostname)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 1, + "tagValuesQuery": "", + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "current": { + "isNone": true, + "selected": false, + "text": "None", + "value": "" + }, + "datasource": { + "type": "prometheus", + "uid": "${prometheusds}" + }, + "definition": "label_values(mysqlrouter_metadata{name=\"$cluster\"},name)", + "hide": 0, + "includeAll": false, + "multi": false, + "name": "cluster", + "options": [], + "query": { + "query": "label_values(mysqlrouter_metadata{name=\"$cluster\"},name)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 1, + "tagValuesQuery": "", + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-12h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ] + }, + "timezone": "", + "title": "MySQLRouter Exporter", + "uid": "8_sRZYdZk", + "version": 10, + "weekStart": "" + } diff --git a/src/machine_charm.py b/src/machine_charm.py index 90a9f748..02d8acc7 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -14,6 +14,7 @@ import abstract_charm import machine_logrotate import machine_upgrade +import relations.cos import relations.database_providers_wrapper import snap import socket_workload @@ -29,8 +30,10 @@ class MachineSubordinateRouterCharm(abstract_charm.MySQLRouterCharm): def __init__(self, *args) -> None: super().__init__(*args) # DEPRECATED shared-db: Enable legacy "mysql-shared" interface - del self._database_provides - self._database_provides = relations.database_providers_wrapper.RelationEndpoint(self) + self._database_provides = relations.database_providers_wrapper.RelationEndpoint( + self, self._database_provides + ) + self._cos_relation = relations.cos.COSRelation(self, self._container) self._authenticated_workload_type = socket_workload.AuthenticatedSocketWorkload self.framework.observe(self.on.install, self._on_install) @@ -60,6 +63,10 @@ def _upgrade(self) -> typing.Optional[machine_upgrade.Upgrade]: def _logrotate(self) -> machine_logrotate.LogRotate: return machine_logrotate.LogRotate(container_=self._container) + @property + def _cos(self) -> relations.cos.COSRelation: + return self._cos_relation + @property def _read_write_endpoint(self) -> str: return f'file://{self._container.path("/run/mysqlrouter/mysql.sock")}' diff --git a/src/mysql_shell/__init__.py b/src/mysql_shell/__init__.py index 29808a82..82fa2639 100644 --- a/src/mysql_shell/__init__.py +++ b/src/mysql_shell/__init__.py @@ -10,22 +10,19 @@ import json import logging import pathlib -import secrets -import string import typing import jinja2 import container import server_exceptions +import utils if typing.TYPE_CHECKING: import relations.database_requires logger = logging.getLogger(__name__) -_PASSWORD_LENGTH = 24 - # TODO python3.10 min version: Add `(kw_only=True)` @dataclasses.dataclass @@ -123,11 +120,6 @@ def _run_sql(self, sql_statements: typing.List[str]) -> None: _jinja_env.get_template("run_sql.py.jinja").render(statements=sql_statements) ) - @staticmethod - def _generate_password() -> str: - choices = string.ascii_letters + string.digits - return "".join(secrets.choice(choices) for _ in range(_PASSWORD_LENGTH)) - def _get_attributes(self, additional_attributes: dict = None) -> str: """Attributes for (MySQL) users created by this charm @@ -143,7 +135,7 @@ def create_application_database_and_user(self, *, username: str, database: str) """Create database and user for related database_provides application.""" attributes = self._get_attributes() logger.debug(f"Creating {database=} and {username=} with {attributes=}") - password = self._generate_password() + password = utils.generate_password() self._run_sql( [ f"CREATE DATABASE IF NOT EXISTS `{database}`", diff --git a/src/relations/cos.py b/src/relations/cos.py new file mode 100644 index 00000000..bcd0761b --- /dev/null +++ b/src/relations/cos.py @@ -0,0 +1,132 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Relation to the cos charms.""" + +import logging +import typing +from dataclasses import dataclass + +import ops +from charms.grafana_agent.v0.cos_agent import COSAgentProvider + +import container +import relations.secrets +import utils +from snap import _SNAP_NAME + +if typing.TYPE_CHECKING: + import abstract_charm + +logger = logging.getLogger(__name__) + + +@dataclass +class ExporterConfig: + """Configuration for the MySQL Router exporter""" + + url: str + username: str + password: str + + +class COSRelation: + """Relation with the cos bundle.""" + + _EXPORTER_PORT = "49152" + _HTTP_SERVER_PORT = "8443" + _NAME = "cos-agent" + _PEER_RELATION_NAME = "cos" + + _MONITORING_USERNAME = "monitoring" + _MONITORING_PASSWORD_KEY = "monitoring-password" + + def __init__(self, charm_: "abstract_charm.MySQLRouterCharm", container_: container.Container): + self._interface = COSAgentProvider( + charm_, + metrics_endpoints=[ + { + "path": "/metrics", + "port": self._EXPORTER_PORT, + } + ], + log_slots=[f"{_SNAP_NAME}:logs"], + ) + self.charm = charm_ + self._container = container_ + + charm_.framework.observe( + charm_.on[self._NAME].relation_created, + charm_.reconcile, + ) + charm_.framework.observe( + charm_.on[self._NAME].relation_broken, + charm_.reconcile, + ) + + self._secrets = relations.secrets.RelationSecrets( + charm_, + self._PEER_RELATION_NAME, + unit_secret_fields=[self._MONITORING_PASSWORD_KEY], + ) + + @property + def exporter_user_config(self) -> ExporterConfig: + """Returns user config needed for the router exporter service.""" + return ExporterConfig( + url=f"https://127.0.0.1:{self._HTTP_SERVER_PORT}", + username=self._MONITORING_USERNAME, + password=self._get_monitoring_password(), + ) + + @property + def relation_exists(self) -> bool: + """Whether relation with cos exists.""" + return len(self.charm.model.relations.get(self._NAME, [])) == 1 + + def _get_monitoring_password(self) -> str: + """Gets the monitoring password from unit peer data, or generate and cache it.""" + monitoring_password = self._secrets.get_secret( + relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY + ) + if monitoring_password: + return monitoring_password + + monitoring_password = utils.generate_password() + self._secrets.set_secret( + relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY, monitoring_password + ) + return monitoring_password + + def _reset_monitoring_password(self) -> None: + """Reset the monitoring password from unit peer data.""" + self._secrets.set_secret(relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY, None) + + def is_relation_breaking(self, event) -> bool: + """Whether relation will be broken after the current event is handled.""" + if not self.relation_exists: + return False + + return ( + isinstance(event, ops.RelationBrokenEvent) + and event.relation.id == self.charm.model.relations[self._NAME][0].id + ) + + def setup_monitoring_user(self) -> None: + """Set up a router REST API use for mysqlrouter exporter.""" + logger.debug("Setting up router REST API user for mysqlrouter exporter") + self._container.set_mysql_router_rest_api_password( + user=self._MONITORING_USERNAME, + password=self._get_monitoring_password(), + ) + logger.debug("Set up router REST API user for mysqlrouter exporter") + + def cleanup_monitoring_user(self) -> None: + """Clean up router REST API user for mysqlrouter exporter.""" + logger.debug("Cleaning router REST API user for mysqlrouter exporter") + self._container.set_mysql_router_rest_api_password( + user=self._MONITORING_USERNAME, + password=None, + ) + self._reset_monitoring_password() + logger.debug("Cleaned router REST API user for mysqlrouter exporter") diff --git a/src/relations/database_providers_wrapper.py b/src/relations/database_providers_wrapper.py index 22dd875d..382e2554 100644 --- a/src/relations/database_providers_wrapper.py +++ b/src/relations/database_providers_wrapper.py @@ -28,8 +28,12 @@ class RelationEndpoint: (deprecated_shared_db_database_provides.py) endpoint """ - def __init__(self, charm_: "abstract_charm.MySQLRouterCharm") -> None: - self._database_provides = relations.database_provides.RelationEndpoint(charm_) + def __init__( + self, + charm_: "abstract_charm.MySQLRouterCharm", + database_provides: relations.database_provides.RelationEndpoint, + ) -> None: + self._database_provides = database_provides self._deprecated_shared_db = deprecated_shared_db_database_provides.RelationEndpoint( charm_ ) diff --git a/src/relations/secrets.py b/src/relations/secrets.py new file mode 100644 index 00000000..ffab9d27 --- /dev/null +++ b/src/relations/secrets.py @@ -0,0 +1,92 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Secrets for MySQLRouter""" + +import logging +import typing + +import charms.data_platform_libs.v0.data_interfaces as data_interfaces + +if typing.TYPE_CHECKING: + import abstract_charm + +logger = logging.getLogger(__name__) + +APP_SCOPE = "app" +UNIT_SCOPE = "unit" +Scopes = typing.Literal[APP_SCOPE, UNIT_SCOPE] + + +class RelationSecrets: + """MySQLRouter secrets on a specific peer relation""" + + _SECRET_INTERNAL_LABEL = "internal-secret" + _SECRET_DELETED_LABEL = "None" + + def __init__( + self, + charm: "abstract_charm.MySQLRouterCharm", + peer_relation_name: str, + app_secret_fields: typing.List[str] = [], + unit_secret_fields: typing.List[str] = [], + ) -> None: + self.charm = charm + self.peer_relation_name = peer_relation_name + + self.peer_relation_app = data_interfaces.DataPeer( + charm, + relation_name=peer_relation_name, + additional_secret_fields=app_secret_fields, + secret_field_name=self._SECRET_INTERNAL_LABEL, + deleted_label=self._SECRET_DELETED_LABEL, + ) + self.peer_relation_unit = data_interfaces.DataPeerUnit( + charm, + relation_name=peer_relation_name, + additional_secret_fields=unit_secret_fields, + secret_field_name=self._SECRET_INTERNAL_LABEL, + deleted_label=self._SECRET_DELETED_LABEL, + ) + + def _scope_obj(self, scope: Scopes): + if scope == APP_SCOPE: + return self.app + if scope == UNIT_SCOPE: + return self.unit + + def peer_relation_data(self, scope: Scopes) -> data_interfaces.DataPeer: + """Returns the peer relation data per scope.""" + if scope == APP_SCOPE: + return self.peer_relation_app + elif scope == UNIT_SCOPE: + return self.peer_relation_unit + + def get_secret(self, scope: Scopes, key: str) -> typing.Optional[str]: + """Get secret from the secret storage.""" + if scope not in typing.get_args(Scopes): + raise ValueError("Unknown secret scope") + + peers = self.charm.model.get_relation(self.peer_relation_name) + return self.peer_relation_data(scope).fetch_my_relation_field(peers.id, key) + + def set_secret( + self, scope: Scopes, key: str, value: typing.Optional[str] + ) -> typing.Optional[str]: + """Set secret from the secret storage.""" + if scope not in typing.get_args(Scopes): + raise ValueError("Unknown secret scope") + + if not value: + return self.remove_secret(scope, key) + + peers = self.charm.model.get_relation(self.peer_relation_name) + self.peer_relation_data(scope).update_relation_data(peers.id, {key: value}) + + def remove_secret(self, scope: Scopes, key: str) -> None: + """Removing a secret.""" + if scope not in typing.get_args(Scopes): + raise ValueError("Unknown secret scope") + + peers = self.charm.model.get_relation(self.peer_relation_name) + self.peer_relation_data(scope).delete_relation_data(peers.id, [key]) diff --git a/src/snap.py b/src/snap.py index e45f38b7..44621d27 100644 --- a/src/snap.py +++ b/src/snap.py @@ -16,6 +16,9 @@ import container +if typing.TYPE_CHECKING: + import relations.cos + logger = logging.getLogger(__name__) _SNAP_NAME = "charmed-mysql" @@ -149,11 +152,13 @@ class Snap(container.Container): """Workload snap container""" _SERVICE_NAME = "mysqlrouter-service" + _EXPORTER_SERVICE_NAME = "mysqlrouter-exporter" def __init__(self) -> None: super().__init__( mysql_router_command=f"{_SNAP_NAME}.mysqlrouter", mysql_shell_command=f"{_SNAP_NAME}.mysqlsh", + mysql_router_password_command=f"{_SNAP_NAME}.mysqlrouter-passwd", ) @property @@ -164,24 +169,56 @@ def ready(self) -> bool: def mysql_router_service_enabled(self) -> bool: return _snap.services[self._SERVICE_NAME]["active"] + @property + def mysql_router_exporter_service_enabled(self) -> bool: + return _snap.services[self._EXPORTER_SERVICE_NAME]["active"] + def update_mysql_router_service(self, *, enabled: bool, tls: bool = None) -> None: super().update_mysql_router_service(enabled=enabled, tls=tls) if tls: raise NotImplementedError # TODO VM TLS + if enabled: _snap.start([self._SERVICE_NAME], enable=True) else: _snap.stop([self._SERVICE_NAME], disable=True) + def update_mysql_router_exporter_service( + self, *, enabled: bool, config: "relations.cos.ExporterConfig" = None + ) -> None: + super().update_mysql_router_exporter_service(enabled=enabled, config=config) + + if enabled: + _snap.set( + { + "mysqlrouter-exporter.user": config.username, + "mysqlrouter-exporter.password": config.password, + "mysqlrouter-exporter.url": config.url, + } + ) + _snap.start([self._EXPORTER_SERVICE_NAME], enable=True) + else: + _snap.unset("mysqlrouter-exporter.user") + _snap.unset("mysqlrouter-exporter.password") + _snap.unset("mysqlrouter-exporter.url") + _snap.stop([self._EXPORTER_SERVICE_NAME], disable=True) + def upgrade(self, unit: ops.Unit) -> None: """Upgrade snap.""" _refresh(unit=unit, verb=_RefreshVerb.UPGRADE) # TODO python3.10 min version: Use `list` instead of `typing.List` - def _run_command(self, command: typing.List[str], *, timeout: typing.Optional[int]) -> str: + def _run_command( + self, + command: typing.List[str], + *, + timeout: typing.Optional[int], + input: typing.Optional[str] = None, + ) -> str: try: output = subprocess.run( command, + input=input, capture_output=True, timeout=timeout, check=True, diff --git a/src/utils.py b/src/utils.py new file mode 100644 index 00000000..c56b7fde --- /dev/null +++ b/src/utils.py @@ -0,0 +1,13 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Utility functions.""" + +import secrets +import string + + +def generate_password() -> str: + """Generate a random password.""" + choices = string.ascii_letters + string.digits + return "".join(secrets.choice(choices) for _ in range(24)) diff --git a/src/workload.py b/src/workload.py index 623c9251..06d6e54a 100644 --- a/src/workload.py +++ b/src/workload.py @@ -14,12 +14,13 @@ import ops import container -import logrotate import mysql_shell import server_exceptions if typing.TYPE_CHECKING: import abstract_charm + import logrotate + import relations.cos import relations.database_requires logger = logging.getLogger(__name__) @@ -38,10 +39,15 @@ class Workload: """MySQL Router workload""" def __init__( - self, *, container_: container.Container, logrotate_: logrotate.LogRotate + self, + *, + container_: container.Container, + logrotate_: "logrotate.LogRotate", + cos: "relations.cos.COSRelation", ) -> None: self._container = container_ self._logrotate = logrotate_ + self._cos = cos self._router_data_directory = self._container.path("/var/lib/mysqlrouter") self._tls_key_file = self._container.router_config_directory / "custom-key.pem" self._tls_certificate_file = ( @@ -65,19 +71,6 @@ def version(self) -> str: return component return "" - def disable(self) -> None: - """Stop and disable MySQL Router service.""" - if not self._container.mysql_router_service_enabled: - return - logger.debug("Disabling MySQL Router service") - self._container.update_mysql_router_service(enabled=False) - self._logrotate.disable() - self._container.router_config_directory.rmtree() - self._container.router_config_directory.mkdir() - self._router_data_directory.rmtree() - self._router_data_directory.mkdir() - logger.debug("Disabled MySQL Router service") - def upgrade(self, *, unit: ops.Unit, tls: bool) -> None: """Upgrade MySQL Router. @@ -100,24 +93,68 @@ def _tls_config_file_data(self) -> str: ) return config_string - def enable_tls(self, *, key: str, certificate: str): + @property + def _custom_tls_enabled(self) -> bool: + """Whether custom TLS certs are enabled for MySQL Router""" + return self._tls_key_file.exists() and self._tls_certificate_file.exists() + + def _disable_exporter(self) -> None: + """Stop and disable MySQL Router exporter service, keeping router enabled.""" + if not self._container.mysql_router_exporter_service_enabled: + return + logger.debug("Disabling MySQL Router exporter service") + self._cos.cleanup_monitoring_user() + self._container.update_mysql_router_exporter_service(enabled=False) + logger.debug("Disabled MySQL Router exporter service") + + def _enable_tls(self, *, key: str, certificate: str) -> None: """Enable TLS.""" - logger.debug("Enabling TLS") + logger.debug("Creating TLS files") self._container.tls_config_file.write_text(self._tls_config_file_data) self._tls_key_file.write_text(key) self._tls_certificate_file.write_text(certificate) - logger.debug("Enabled TLS") + logger.debug("Created TLS files") - def disable_tls(self) -> None: + def _disable_tls(self) -> None: """Disable TLS.""" - logger.debug("Disabling TLS") + logger.debug("Deleting TLS files") for file in ( self._container.tls_config_file, self._tls_key_file, self._tls_certificate_file, ): file.unlink(missing_ok=True) - logger.debug("Disabled TLS") + logger.debug("Deleting TLS files") + + def reconcile( + self, + *, + tls: bool, + unit_name: str, + exporter_config: "relations.cos.ExporterConfig", + key: str = None, + certificate: str = None, + ) -> None: + """Reconcile all workloads (router, exporter, tls).""" + if tls and not (key and certificate): + raise ValueError("`key` and `certificate` arguments required when tls=True") + + if self._container.mysql_router_service_enabled: + logger.debug("Disabling MySQL Router service") + self._container.update_mysql_router_service(enabled=False) + self._logrotate.disable() + self._container.router_config_directory.rmtree() + self._container.router_config_directory.mkdir() + self._router_data_directory.rmtree() + self._router_data_directory.mkdir() + logger.debug("Disabled MySQL Router service") + + self._disable_exporter() + + if tls: + self._enable_tls(key=key, certificate=certificate) + else: + self._disable_tls() @property def status(self) -> typing.Optional[ops.StatusBase]: @@ -135,12 +172,14 @@ def __init__( self, *, container_: container.Container, - logrotate_: logrotate.LogRotate, + logrotate_: "logrotate.LogRotate", connection_info: "relations.database_requires.CompleteConnectionInformation", + cos: "relations.cos.COSRelation", charm_: "abstract_charm.MySQLRouterCharm", ) -> None: - super().__init__(container_=container_, logrotate_=logrotate_) + super().__init__(container_=container_, logrotate_=logrotate_, cos=cos) self._connection_info = connection_info + self._cos = cos self._charm = charm_ @property @@ -187,6 +226,10 @@ def _get_bootstrap_command( "--force", "--conf-set-option", "http_server.bind_address=127.0.0.1", + "--conf-set-option", + "http_auth_backend:default_auth_backend.backend=file", + "--conf-set-option", + f"http_auth_backend:default_auth_backend.filename={self._container.path(self._container.rest_api_credentials_file).relative_to_container}", "--conf-use-gr-notifications", ] @@ -248,24 +291,6 @@ def _router_username(self) -> str: """ return self._parse_username_from_config(self._container.router_config_file.read_text()) - def enable(self, *, tls: bool, unit_name: str) -> None: - """Start and enable MySQL Router service.""" - if self._container.mysql_router_service_enabled: - # If the host or port changes, MySQL Router will receive topology change - # notifications from MySQL. - # Therefore, if the host or port changes, we do not need to restart MySQL Router. - return - logger.debug("Enabling MySQL Router service") - self._cleanup_after_upgrade_or_potential_container_restart() - self._bootstrap_router(tls=tls) - self.shell.add_attributes_to_mysql_router_user( - username=self._router_username, router_id=self._router_id, unit_name=unit_name - ) - self._container.update_mysql_router_service(enabled=True, tls=tls) - self._logrotate.enable() - logger.debug("Enabled MySQL Router service") - self._charm.wait_until_mysql_router_ready() - def _restart(self, *, tls: bool) -> None: """Restart MySQL Router to enable or disable TLS.""" logger.debug("Restarting MySQL Router") @@ -277,17 +302,55 @@ def _restart(self, *, tls: bool) -> None: # status self._charm.set_status(event=None) - def enable_tls(self, *, key: str, certificate: str): - """Enable TLS and restart MySQL Router.""" - super().enable_tls(key=key, certificate=certificate) - if self._container.mysql_router_service_enabled: - self._restart(tls=True) - - def disable_tls(self) -> None: - """Disable TLS and restart MySQL Router.""" - super().disable_tls() - if self._container.mysql_router_service_enabled: - self._restart(tls=False) + def reconcile( + self, + *, + tls: bool, + unit_name: str, + exporter_config: "relations.cos.ExporterConfig", + key: str = None, + certificate: str = None, + ) -> None: + """Reconcile all workloads (router, exporter, tls).""" + if tls and not (key and certificate): + raise ValueError("`key` and `certificate` arguments required when tls=True") + + # value changes based on whether tls is enabled or disabled + tls_was_enabled = self._custom_tls_enabled + if tls: + self._enable_tls(key, certificate) + if not tls_was_enabled and self._container.mysql_router_service_enabled: + self._restart(tls=tls) + else: + self._disable_tls() + if tls_was_enabled and self._container.mysql_router_service_enabled: + self._restart(tls=tls) + + # If the host or port changes, MySQL Router will receive topology change + # notifications from MySQL. + # Therefore, if the host or port changes, we do not need to restart MySQL Router. + if not self._container.mysql_router_service_enabled: + logger.debug("Enabling MySQL Router service") + self._cleanup_after_upgrade_or_potential_container_restart() + self._container.create_router_rest_api_credentials_file() # create an empty credentials file + self._bootstrap_router(tls=self._custom_tls_enabled) + self.shell.add_attributes_to_mysql_router_user( + username=self._router_username, router_id=self._router_id, unit_name=unit_name + ) + self._container.update_mysql_router_service(enabled=True, tls=self._custom_tls_enabled) + self._logrotate.enable() + logger.debug("Enabled MySQL Router service") + self._charm.wait_until_mysql_router_ready() + + if not self._container.mysql_router_exporter_service_enabled and exporter_config: + logger.debug("Enabling MySQL Router exporter service") + self._cos.setup_monitoring_user() + self._container.update_mysql_router_exporter_service( + enabled=True, config=exporter_config + ) + logger.debug("Enabled MySQL Router exporter service") + elif self._container.mysql_router_exporter_service_enabled and not exporter_config: + self._disable_exporter() @property def status(self) -> typing.Optional[ops.StatusBase]: diff --git a/tests/integration/test_database.py b/tests/integration/test_database.py index 8e160b73..86f34a8a 100644 --- a/tests/integration/test_database.py +++ b/tests/integration/test_database.py @@ -56,28 +56,25 @@ async def test_database_relation(ops_test: OpsTest, mysql_router_charm_series: s ), ) - mysql_app, application_app = applications[0], applications[2] + [mysql_app, mysql_router_app, application_app] = applications await ops_test.model.relate( f"{MYSQL_ROUTER_APP_NAME}:backend-database", f"{MYSQL_APP_NAME}:database" ) # the mysqlrouter application will be in unknown state since it is a subordinate charm - async with ops_test.fast_forward(): + async with ops_test.fast_forward("60s"): await asyncio.gather( - ops_test.model.wait_for_idle( - apps=[MYSQL_APP_NAME], - status="active", - raise_on_blocked=True, + ops_test.model.block_until( + lambda: mysql_app.status in ("active", "error", "blocked"), timeout=SLOW_TIMEOUT, ), - ops_test.model.wait_for_idle( - apps=[APPLICATION_APP_NAME], - status="waiting", - raise_on_blocked=True, + ops_test.model.block_until( + lambda: application_app.status in ("waiting", "error", "blocked"), timeout=SLOW_TIMEOUT, ), ) + assert mysql_app.status == "active" and application_app.status == "waiting" await ops_test.model.relate( f"{MYSQL_ROUTER_APP_NAME}:database", f"{APPLICATION_APP_NAME}:database" @@ -90,6 +87,26 @@ async def test_database_relation(ops_test: OpsTest, mysql_router_charm_series: s timeout=SLOW_TIMEOUT, ) + await asyncio.gather( + ops_test.model.block_until( + lambda: mysql_app.status in ("active", "error", "blocked"), + timeout=SLOW_TIMEOUT, + ), + ops_test.model.block_until( + lambda: mysql_router_app.status in ("active", "error", "blocked"), + timeout=SLOW_TIMEOUT, + ), + ops_test.model.block_until( + lambda: application_app.status in ("active", "error", "blocked"), + timeout=SLOW_TIMEOUT, + ), + ) + assert ( + mysql_app.status == "active" + and mysql_router_app.status == "active" + and application_app.status == "active" + ) + # Ensure that the data inserted by sample application is present in the database application_unit = application_app.units[0] inserted_data = await get_inserted_data_by_application(application_unit) @@ -117,9 +134,22 @@ async def test_database_relation(ops_test: OpsTest, mysql_router_charm_series: s await ops_test.model.block_until(lambda: len(application_app.units) == 2) - await ops_test.model.wait_for_idle( - apps=[MYSQL_APP_NAME, MYSQL_ROUTER_APP_NAME, APPLICATION_APP_NAME], - status="active", - raise_on_blocked=True, - timeout=SLOW_TIMEOUT, + await asyncio.gather( + ops_test.model.block_until( + lambda: mysql_app.status in ("active", "error", "blocked"), + timeout=SLOW_TIMEOUT, + ), + ops_test.model.block_until( + lambda: mysql_router_app.status in ("active", "error", "blocked"), + timeout=SLOW_TIMEOUT, + ), + ops_test.model.block_until( + lambda: application_app.status in ("active", "error", "blocked"), + timeout=SLOW_TIMEOUT, + ), + ) + assert ( + mysql_app.status == "active" + and mysql_router_app.status == "active" + and application_app.status == "active" ) diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py new file mode 100644 index 00000000..7051abb9 --- /dev/null +++ b/tests/integration/test_exporter.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +import asyncio +import logging +import time + +import pytest +import urllib3 +from pytest_operator.plugin import OpsTest + +logger = logging.getLogger(__name__) + +MYSQL_APP_NAME = "mysql" +MYSQL_ROUTER_APP_NAME = "mysql-router" +APPLICATION_APP_NAME = "mysql-test-app" +GRAFANA_AGENT_APP_NAME = "grafana-agent" +SLOW_TIMEOUT = 25 * 60 + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_exporter_endpoint(ops_test: OpsTest, mysql_router_charm_series: str) -> None: + """Test that exporter endpoint is functional.""" + http = urllib3.PoolManager() + + # Build and deploy applications + mysqlrouter_charm = await ops_test.build_charm(".") + + logger.info("Deploying all the applications") + + # deploy mysqlrouter with num_units=None since it's a subordinate charm + # and will be installed with the related consumer application + applications = await asyncio.gather( + ops_test.model.deploy( + MYSQL_APP_NAME, + channel="8.0/edge", + application_name=MYSQL_APP_NAME, + config={"profile": "testing"}, + num_units=1, + ), + ops_test.model.deploy( + mysqlrouter_charm, + application_name=MYSQL_ROUTER_APP_NAME, + num_units=0, + series=mysql_router_charm_series, + ), + ops_test.model.deploy( + APPLICATION_APP_NAME, + application_name=APPLICATION_APP_NAME, + num_units=1, + # MySQL Router and Grafana agent are subordinate - + # they will use the series of the principal charm + series=mysql_router_charm_series, + channel="latest/edge", + ), + ops_test.model.deploy( + GRAFANA_AGENT_APP_NAME, + application_name=GRAFANA_AGENT_APP_NAME, + num_units=0, + channel="latest/stable", + series=mysql_router_charm_series, + ), + ) + + mysql_app, mysql_router_app, mysql_test_app, grafana_agent_app = applications + + logger.info("Relating mysqlrouter and grafana-agent with mysql-test-app") + + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:database", f"{APPLICATION_APP_NAME}:database" + ) + + await ops_test.model.relate( + f"{APPLICATION_APP_NAME}:juju-info", f"{GRAFANA_AGENT_APP_NAME}:juju-info" + ) + + async with ops_test.fast_forward(): + await asyncio.gather( + ops_test.model.block_until(lambda: mysql_app.status == "active", timeout=SLOW_TIMEOUT), + ops_test.model.block_until( + lambda: mysql_router_app.status == "blocked", timeout=SLOW_TIMEOUT + ), + ops_test.model.block_until( + lambda: mysql_test_app.status == "waiting", timeout=SLOW_TIMEOUT + ), + ops_test.model.block_until( + lambda: grafana_agent_app.status == "blocked", timeout=SLOW_TIMEOUT + ), + ) + + logger.info("Relating mysqlrouter with mysql") + + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:backend-database", f"{MYSQL_APP_NAME}:database" + ) + + await asyncio.gather( + ops_test.model.block_until(lambda: mysql_app.status == "active", timeout=SLOW_TIMEOUT), + ops_test.model.block_until( + lambda: mysql_router_app.status == "active", timeout=SLOW_TIMEOUT + ), + ops_test.model.block_until( + lambda: mysql_test_app.status == "active", timeout=SLOW_TIMEOUT + ), + ops_test.model.block_until( + lambda: grafana_agent_app.status == "blocked", timeout=SLOW_TIMEOUT + ), + ) + + unit = mysql_test_app.units[0] + unit_address = await unit.get_public_address() + + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" + else: + assert False, "❌ can connect to metrics endpoint without relation with cos" + + await ops_test.model.relate( + f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" + ) + + time.sleep(30) + + jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") + assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" + assert "mysqlrouter_route_health" in str( + jmx_resp.data + ), "❌ did not find expected metric in response" + + await mysql_router_app.remove_relation( + f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" + ) + + time.sleep(30) + + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + assert False, "❌ can connect to metrics endpoint without relation with cos" + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index a6df9dcb..e0b10f49 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -47,6 +47,7 @@ def patch(monkeypatch): monkeypatch.setattr("upgrade.Upgrade.is_compatible", True) +# flake8: noqa: C901 @pytest.fixture(autouse=True) def machine_patch(monkeypatch): monkeypatch.setattr("lifecycle.Unit._on_subordinate_relation_broken", lambda *args: None) @@ -55,21 +56,38 @@ class Snap: present = False def __init__(self): - self.services = {"mysqlrouter-service": {"active": False}} + self.services = { + "mysqlrouter-service": {"active": False}, + "mysqlrouter-exporter": {"active": False}, + } def ensure(self, *_, **__): return + def set(self, *_, **__): + return + + def unset(self, *_, **__): + return + def hold(self, *_, **__): return def start(self, services: list[str] = None, *_, **__): - assert services == ["mysqlrouter-service"] - self.services["mysqlrouter-service"]["active"] = True + for service in services: + assert service in ("mysqlrouter-service", "mysqlrouter-exporter") + + self.services["mysqlrouter-service"]["active"] = "mysqlrouter-service" in services + self.services["mysqlrouter-exporter"]["active"] = "mysqlrouter-exporter" in services def stop(self, services: list[str] = None, *_, **__): - assert services == ["mysqlrouter-service"] - self.services["mysqlrouter-service"]["active"] = False + for service in services: + assert service in ("mysqlrouter-service", "mysqlrouter-exporter") + + if "mysqlrouter-service" in services: + self.services["mysqlrouter-service"]["active"] = False + if "mysqlrouter-exporter" in services: + self.services["mysqlrouter-exporter"]["active"] = False monkeypatch.setattr(snap, "_snap", Snap())