diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 421eee2c..9c3915d7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -116,6 +116,9 @@ jobs: - juju-snap-channel: "2.9/stable" agent-version: "2.9.45" libjuju-version: "2.9.44.1" + exclude: + - groups: {path_to_test_file: tests/integration/test_data_integrator.py} + ubuntu-versions: {series: focal} name: ${{ matrix.juju-snap-channel }} - (GH hosted) ${{ matrix.groups.job_name }} | ${{ matrix.ubuntu-versions.series }} needs: - lint diff --git a/actions.yaml b/actions.yaml index 18633ed2..be08699e 100644 --- a/actions.yaml +++ b/actions.yaml @@ -12,3 +12,12 @@ force-upgrade: Use to - force incompatible upgrade and/or - continue upgrade if 1+ upgraded units have non-active status +set-tls-private-key: + description: + Set the private key, which will be used for certificate signing requests (CSR). Run + for each unit separately. + params: + internal-key: + type: string + description: The content of private key for internal communications with + clients. Content will be auto-generated if this option is not specified. diff --git a/charmcraft.yaml b/charmcraft.yaml index 33dd2864..47fab5e7 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -27,6 +27,12 @@ parts: echo 'ERROR: Use "tox run -e build" instead of calling "charmcraft pack" directly' >&2 exit 1 fi + build-packages: + - libffi-dev + - libssl-dev + - pkg-config + - rustc + - cargo charm-entrypoint: src/machine_charm.py prime: - charm_version diff --git a/lib/charms/data_platform_libs/v0/data_interfaces.py b/lib/charms/data_platform_libs/v0/data_interfaces.py index d24aa6ff..3ce69e15 100644 --- a/lib/charms/data_platform_libs/v0/data_interfaces.py +++ b/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -295,10 +295,21 @@ def _on_topic_requested(self, event: TopicRequestedEvent): import json import logging from abc import ABC, abstractmethod -from collections import namedtuple +from collections import UserDict, namedtuple from datetime import datetime from enum import Enum -from typing import Callable, Dict, List, Optional, Set, Tuple, Union +from typing import ( + Callable, + Dict, + ItemsView, + KeysView, + List, + Optional, + Set, + Tuple, + Union, + ValuesView, +) from ops import JujuVersion, Model, Secret, SecretInfo, SecretNotFoundError from ops.charm import ( @@ -320,7 +331,7 @@ def _on_topic_requested(self, event: TopicRequestedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 29 +LIBPATCH = 34 PYDEPS = ["ops>=2.0.0"] @@ -337,21 +348,46 @@ def _on_topic_requested(self, event: TopicRequestedEvent): PROV_SECRET_PREFIX = "secret-" REQ_SECRET_FIELDS = "requested-secrets" +GROUP_MAPPING_FIELD = "secret_group_mapping" +GROUP_SEPARATOR = "@" + + +class SecretGroup(str): + """Secret groups specific type.""" + + +class SecretGroupsAggregate(str): + """Secret groups with option to extend with additional constants.""" + + def __init__(self): + self.USER = SecretGroup("user") + self.TLS = SecretGroup("tls") + self.EXTRA = SecretGroup("extra") + + def __setattr__(self, name, value): + """Setting internal constants.""" + if name in self.__dict__: + raise RuntimeError("Can't set constant!") + else: + super().__setattr__(name, SecretGroup(value)) + def groups(self) -> list: + """Return the list of stored SecretGroups.""" + return list(self.__dict__.values()) -class SecretGroup(Enum): - """Secret groups as constants.""" + def get_group(self, group: str) -> Optional[SecretGroup]: + """If the input str translates to a group name, return that.""" + return SecretGroup(group) if group in self.groups() else None - USER = "user" - TLS = "tls" - EXTRA = "extra" + +SECRET_GROUPS = SecretGroupsAggregate() class DataInterfacesError(Exception): """Common ancestor for DataInterfaces related exceptions.""" -class SecretError(Exception): +class SecretError(DataInterfacesError): """Common ancestor for Secrets related exceptions.""" @@ -367,6 +403,10 @@ class SecretsIllegalUpdateError(SecretError): """Secrets aren't yet available for Juju version used.""" +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + def get_encoded_dict( relation: Relation, member: Union[Unit, Application], field: str ) -> Optional[Dict[str, str]]: @@ -453,6 +493,7 @@ def wrapper(self, *args, **kwargs): return return f(self, *args, **kwargs) + wrapper.leader_only = True return wrapper @@ -467,6 +508,34 @@ def wrapper(self, *args, **kwargs): return wrapper +def dynamic_secrets_only(f): + """Decorator to ensure that certain operations would be only executed when NO static secrets are defined.""" + + def wrapper(self, *args, **kwargs): + if self.static_secret_fields: + raise IllegalOperationError( + "Unsafe usage of statically and dynamically defined secrets, aborting." + ) + return f(self, *args, **kwargs) + + return wrapper + + +def either_static_or_dynamic_secrets(f): + """Decorator to ensure that static and dynamic secrets won't be used in parallel.""" + + def wrapper(self, *args, **kwargs): + if self.static_secret_fields and set(self.current_secret_fields) - set( + self.static_secret_fields + ): + raise IllegalOperationError( + "Unsafe usage of statically and dynamically defined secrets, aborting." + ) + return f(self, *args, **kwargs) + + return wrapper + + class Scope(Enum): """Peer relations scope.""" @@ -474,6 +543,11 @@ class Scope(Enum): UNIT = "unit" +################################################################################ +# Secrets internal caching +################################################################################ + + class CachedSecret: """Locally cache a secret. @@ -486,6 +560,7 @@ def __init__( component: Union[Application, Unit], label: str, secret_uri: Optional[str] = None, + legacy_labels: List[str] = [], ): self._secret_meta = None self._secret_content = {} @@ -493,16 +568,25 @@ def __init__( self.label = label self._model = model self.component = component + self.legacy_labels = legacy_labels + self.current_label = None - def add_secret(self, content: Dict[str, str], relation: Relation) -> Secret: + def add_secret( + self, + content: Dict[str, str], + relation: Optional[Relation] = None, + label: Optional[str] = None, + ) -> Secret: """Create a new secret.""" if self._secret_uri: raise SecretAlreadyExistsError( "Secret is already defined with uri %s", self._secret_uri ) - secret = self.component.add_secret(content, label=self.label) - if relation.app != self._model.app: + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: # If it's not a peer relation, grant is to be applied secret.grant(relation) self._secret_uri = secret.id @@ -515,13 +599,20 @@ def meta(self) -> Optional[Secret]: if not self._secret_meta: if not (self._secret_uri or self.label): return - try: - self._secret_meta = self._model.get_secret(label=self.label) - except SecretNotFoundError: - if self._secret_uri: - self._secret_meta = self._model.get_secret( - id=self._secret_uri, label=self.label - ) + + for label in [self.label] + self.legacy_labels: + try: + self._secret_meta = self._model.get_secret(label=label) + except SecretNotFoundError: + pass + else: + if label != self.label: + self.current_label = label + break + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) return self._secret_meta def get_content(self) -> Dict[str, str]: @@ -545,12 +636,30 @@ def get_content(self) -> Dict[str, str]: self._secret_content = self.meta.get_content() return self._secret_content + def _move_to_new_label_if_needed(self): + """Helper function to re-create the secret with a different label.""" + if not self.current_label or not (self.meta and self._secret_meta): + return + + # Create a new secret with the new label + old_meta = self._secret_meta + content = self._secret_meta.get_content() + + # I wish we could just check if we are the owners of the secret... + try: + self._secret_meta = self.add_secret(content, label=self.label) + except ModelError as err: + if "this unit is not the leader" not in str(err): + raise + old_meta.remove_all_revisions() + def set_content(self, content: Dict[str, str]) -> None: """Setting cached secret content.""" if not self.meta: return if content: + self._move_to_new_label_if_needed() self.meta.set_content(content) self._secret_content = content else: @@ -582,10 +691,14 @@ def __init__(self, model: Model, component: Union[Application, Unit]): self.component = component self._secrets: Dict[str, CachedSecret] = {} - def get(self, label: str, uri: Optional[str] = None) -> Optional[CachedSecret]: + def get( + self, label: str, uri: Optional[str] = None, legacy_labels: List[str] = [] + ) -> Optional[CachedSecret]: """Getting a secret from Juju Secret store or cache.""" if not self._secrets.get(label): - secret = CachedSecret(self._model, self.component, label, uri) + secret = CachedSecret( + self._model, self.component, label, uri, legacy_labels=legacy_labels + ) if secret.meta: self._secrets[label] = secret return self._secrets.get(label) @@ -603,15 +716,130 @@ def add(self, label: str, content: Dict[str, str], relation: Relation) -> Cached def remove(self, label: str) -> None: """Remove a secret from the cache.""" if secret := self.get(label): - secret.remove() - self._secrets.pop(label) - else: - logging.error("Non-existing Juju Secret was attempted to be removed %s", label) + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) + + +################################################################################ +# Relation Data base/abstract ancestors (i.e. parent classes) +################################################################################ # Base Data +class DataDict(UserDict): + """Python Standard Library 'dict' - like representation of Relation Data.""" + + def __init__(self, relation_data: "Data", relation_id: int): + self.relation_data = relation_data + self.relation_id = relation_id + + @property + def data(self) -> Dict[str, str]: + """Return the full content of the Abstract Relation Data dictionary.""" + result = self.relation_data.fetch_my_relation_data([self.relation_id]) + try: + result_remote = self.relation_data.fetch_relation_data([self.relation_id]) + except NotImplementedError: + result_remote = {self.relation_id: {}} + if result: + result_remote[self.relation_id].update(result[self.relation_id]) + return result_remote.get(self.relation_id, {}) + + def __setitem__(self, key: str, item: str) -> None: + """Set an item of the Abstract Relation Data dictionary.""" + self.relation_data.update_relation_data(self.relation_id, {key: item}) + + def __getitem__(self, key: str) -> str: + """Get an item of the Abstract Relation Data dictionary.""" + result = None + + # Avoiding "leader_only" error when cross-charm non-leader unit, not to report useless error + if ( + not hasattr(self.relation_data.fetch_my_relation_field, "leader_only") + or self.relation_data.component != self.relation_data.local_app + or self.relation_data.local_unit.is_leader() + ): + result = self.relation_data.fetch_my_relation_field(self.relation_id, key) + + if not result: + try: + result = self.relation_data.fetch_relation_field(self.relation_id, key) + except NotImplementedError: + pass + + if not result: + raise KeyError + return result + + def __eq__(self, d: dict) -> bool: + """Equality.""" + return self.data == d + + def __repr__(self) -> str: + """String representation Abstract Relation Data dictionary.""" + return repr(self.data) + + def __len__(self) -> int: + """Length of the Abstract Relation Data dictionary.""" + return len(self.data) + + def __delitem__(self, key: str) -> None: + """Delete an item of the Abstract Relation Data dictionary.""" + self.relation_data.delete_relation_data(self.relation_id, [key]) + + def has_key(self, key: str) -> bool: + """Does the key exist in the Abstract Relation Data dictionary?""" + return key in self.data + + def update(self, items: Dict[str, str]): + """Update the Abstract Relation Data dictionary.""" + self.relation_data.update_relation_data(self.relation_id, items) + + def keys(self) -> KeysView[str]: + """Keys of the Abstract Relation Data dictionary.""" + return self.data.keys() + + def values(self) -> ValuesView[str]: + """Values of the Abstract Relation Data dictionary.""" + return self.data.values() + + def items(self) -> ItemsView[str, str]: + """Items of the Abstract Relation Data dictionary.""" + return self.data.items() + + def pop(self, item: str) -> str: + """Pop an item of the Abstract Relation Data dictionary.""" + result = self.relation_data.fetch_my_relation_field(self.relation_id, item) + if not result: + raise KeyError(f"Item {item} doesn't exist.") + self.relation_data.delete_relation_data(self.relation_id, [item]) + return result + + def __contains__(self, item: str) -> bool: + """Does the Abstract Relation Data dictionary contain item?""" + return item in self.data.values() + + def __iter__(self): + """Iterate through the Abstract Relation Data dictionary.""" + return iter(self.data) + + def get(self, key: str, default: Optional[str] = None) -> Optional[str]: + """Safely get an item of the Abstract Relation Data dictionary.""" + try: + if result := self[key]: + return result + except KeyError: + return default + + class Data(ABC): """Base relation data mainpulation (abstract) class.""" @@ -619,11 +847,11 @@ class Data(ABC): # Local map to associate mappings with secrets potentially as a group SECRET_LABEL_MAP = { - "username": SecretGroup.USER, - "password": SecretGroup.USER, - "uris": SecretGroup.USER, - "tls": SecretGroup.TLS, - "tls-ca": SecretGroup.TLS, + "username": SECRET_GROUPS.USER, + "password": SECRET_GROUPS.USER, + "uris": SECRET_GROUPS.USER, + "tls": SECRET_GROUPS.TLS, + "tls-ca": SECRET_GROUPS.TLS, } def __init__( @@ -656,6 +884,11 @@ def secrets_enabled(self): self._jujuversion = JujuVersion.from_environ() return self._jujuversion.has_secrets + @property + def secret_label_map(self): + """Exposing secret-label map via a property -- could be overridden in descendants!""" + return self.SECRET_LABEL_MAP + # Mandatory overrides for internal/helper methods @abstractmethod @@ -710,11 +943,11 @@ def _generate_secret_label( relation_name: str, relation_id: int, group_mapping: SecretGroup ) -> str: """Generate unique group_mappings for secrets within a relation context.""" - return f"{relation_name}.{relation_id}.{group_mapping.value}.secret" + return f"{relation_name}.{relation_id}.{group_mapping}.secret" def _generate_secret_field_name(self, group_mapping: SecretGroup) -> str: """Generate unique group_mappings for secrets within a relation context.""" - return f"{PROV_SECRET_PREFIX}{group_mapping.value}" + return f"{PROV_SECRET_PREFIX}{group_mapping}" def _relation_from_secret_label(self, secret_label: str) -> Optional[Relation]: """Retrieve the relation that belongs to a secret label.""" @@ -739,8 +972,7 @@ def _relation_from_secret_label(self, secret_label: str) -> Optional[Relation]: except ModelError: return - @classmethod - def _group_secret_fields(cls, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: + def _group_secret_fields(self, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: """Helper function to arrange secret mappings under their group. NOTE: All unrecognized items end up in the 'extra' secret bucket. @@ -748,44 +980,42 @@ def _group_secret_fields(cls, secret_fields: List[str]) -> Dict[SecretGroup, Lis """ secret_fieldnames_grouped = {} for key in secret_fields: - if group := cls.SECRET_LABEL_MAP.get(key): + if group := self.secret_label_map.get(key): secret_fieldnames_grouped.setdefault(group, []).append(key) else: - secret_fieldnames_grouped.setdefault(SecretGroup.EXTRA, []).append(key) + secret_fieldnames_grouped.setdefault(SECRET_GROUPS.EXTRA, []).append(key) return secret_fieldnames_grouped def _get_group_secret_contents( self, relation: Relation, group: SecretGroup, - secret_fields: Optional[Union[Set[str], List[str]]] = None, + secret_fields: Union[Set[str], List[str]] = [], ) -> Dict[str, str]: """Helper function to retrieve collective, requested contents of a secret.""" - if not secret_fields: - secret_fields = [] - if (secret := self._get_relation_secret(relation.id, group)) and ( secret_data := secret.get_content() ): - return {k: v for k, v in secret_data.items() if k in secret_fields} + return { + k: v for k, v in secret_data.items() if not secret_fields or k in secret_fields + } return {} - @classmethod def _content_for_secret_group( - cls, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup + self, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup ) -> Dict[str, str]: """Select : pairs from input, that belong to this particular Secret group.""" - if group_mapping == SecretGroup.EXTRA: + if group_mapping == SECRET_GROUPS.EXTRA: return { k: v for k, v in content.items() - if k in secret_fields and k not in cls.SECRET_LABEL_MAP.keys() + if k in secret_fields and k not in self.secret_label_map.keys() } return { k: v for k, v in content.items() - if k in secret_fields and cls.SECRET_LABEL_MAP.get(k) == group_mapping + if k in secret_fields and self.secret_label_map.get(k) == group_mapping } @juju_secrets_only @@ -919,7 +1149,7 @@ def _delete_relation_data_without_secrets( try: relation.data[component].pop(field) except KeyError: - logger.error( + logger.debug( "Non-existing field '%s' was attempted to be removed from the databag (relation ID: %s)", str(field), str(relation.id), @@ -929,6 +1159,10 @@ def _delete_relation_data_without_secrets( # Public interface methods # Handling Relation Fields seamlessly, regardless if in databag or a Juju Secret + def as_dict(self, relation_id: int) -> UserDict: + """Dict behavior representation of the Abstract Data.""" + return DataDict(self, relation_id) + def get_relation(self, relation_name, relation_id) -> Relation: """Safe way of retrieving a relation.""" relation = self._model.get_relation(relation_name, relation_id) @@ -1171,7 +1405,7 @@ def _delete_relation_secret( try: new_content.pop(field) except KeyError: - logging.error( + logging.debug( "Non-existing secret was attempted to be removed %s, %s", str(relation.id), str(field), @@ -1363,7 +1597,7 @@ def _register_secrets_to_relation(self, relation: Relation, params_name_list: Li if not relation.app: return - for group in SecretGroup: + for group in SECRET_GROUPS.groups(): secret_field = self._generate_secret_field_name(group) if secret_field in params_name_list: if secret_uri := relation.data[relation.app].get(secret_field): @@ -1497,7 +1731,7 @@ def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: if self.relation_data.secret_fields: # pyright: ignore [reportAttributeAccessIssue] set_encoded_field( event.relation, - self.charm.app, + self.relation_data.component, REQ_SECRET_FIELDS, self.relation_data.secret_fields, # pyright: ignore [reportAttributeAccessIssue] ) @@ -1508,13 +1742,15 @@ def _on_secret_changed_event(self, event: RelationChangedEvent) -> None: raise NotImplementedError -# Base DataPeer +################################################################################ +# Peer Relation Data +################################################################################ class DataPeerData(RequirerData, ProviderData): """Represents peer relations data.""" - SECRET_FIELDS = ["operator-password"] + SECRET_FIELDS = [] SECRET_FIELD_NAME = "internal_secret" SECRET_LABEL_MAP = {} @@ -1524,6 +1760,7 @@ def __init__( relation_name: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, ): @@ -1537,6 +1774,19 @@ def __init__( ) self.secret_field_name = secret_field_name if secret_field_name else self.SECRET_FIELD_NAME self.deleted_label = deleted_label + self._secret_label_map = {} + # Secrets that are being dynamically added within the scope of this event handler run + self._new_secrets = [] + self._additional_secret_group_mapping = additional_secret_group_mapping + + for group, fields in additional_secret_group_mapping.items(): + if group not in SECRET_GROUPS.groups(): + setattr(SECRET_GROUPS, group, group) + for field in fields: + secret_group = SECRET_GROUPS.get_group(group) + internal_field = self._field_to_internal_name(field, secret_group) + self._secret_label_map.setdefault(group, []).append(internal_field) + self._secret_fields.append(internal_field) @property def scope(self) -> Optional[Scope]: @@ -1546,15 +1796,232 @@ def scope(self) -> Optional[Scope]: if isinstance(self.component, Unit): return Scope.UNIT + @property + def secret_label_map(self) -> Dict[str, str]: + """Property storing secret mappings.""" + return self._secret_label_map + + @property + def static_secret_fields(self) -> List[str]: + """Re-definition of the property in a way that dynamically extended list is retrieved.""" + return self._secret_fields + + @property + def secret_fields(self) -> List[str]: + """Re-definition of the property in a way that dynamically extended list is retrieved.""" + return ( + self.static_secret_fields if self.static_secret_fields else self.current_secret_fields + ) + + @property + def current_secret_fields(self) -> List[str]: + """Helper method to get all currently existing secret fields (added statically or dynamically).""" + if not self.secrets_enabled: + return [] + + if len(self._model.relations[self.relation_name]) > 1: + raise ValueError(f"More than one peer relation on {self.relation_name}") + + relation = self._model.relations[self.relation_name][0] + fields = [] + + ignores = [SECRET_GROUPS.get_group("user"), SECRET_GROUPS.get_group("tls")] + for group in SECRET_GROUPS.groups(): + if group in ignores: + continue + if content := self._get_group_secret_contents(relation, group): + fields += list(content.keys()) + return list(set(fields) | set(self._new_secrets)) + + @dynamic_secrets_only + def set_secret( + self, + relation_id: int, + field: str, + value: str, + group_mapping: Optional[SecretGroup] = None, + ) -> None: + """Public interface method to add a Relation Data field specifically as a Juju Secret. + + Args: + relation_id: ID of the relation + field: The secret field that is to be added + value: The string value of the secret + group_mapping: The name of the "secret group", in case the field is to be added to an existing secret + """ + full_field = self._field_to_internal_name(field, group_mapping) + if self.secrets_enabled and full_field not in self.current_secret_fields: + self._new_secrets.append(full_field) + if self._no_group_with_databag(field, full_field): + self.update_relation_data(relation_id, {full_field: value}) + + # Unlike for set_secret(), there's no harm using this operation with static secrets + # The restricion is only added to keep the concept clear + @dynamic_secrets_only + def get_secret( + self, + relation_id: int, + field: str, + group_mapping: Optional[SecretGroup] = None, + ) -> Optional[str]: + """Public interface method to fetch secrets only.""" + full_field = self._field_to_internal_name(field, group_mapping) + if ( + self.secrets_enabled + and full_field not in self.current_secret_fields + and field not in self.current_secret_fields + ): + return + if self._no_group_with_databag(field, full_field): + return self.fetch_my_relation_field(relation_id, full_field) + + @dynamic_secrets_only + def delete_secret( + self, + relation_id: int, + field: str, + group_mapping: Optional[SecretGroup] = None, + ) -> Optional[str]: + """Public interface method to delete secrets only.""" + full_field = self._field_to_internal_name(field, group_mapping) + if self.secrets_enabled and full_field not in self.current_secret_fields: + logger.warning(f"Secret {field} from group {group_mapping} was not found") + return + if self._no_group_with_databag(field, full_field): + self.delete_relation_data(relation_id, [full_field]) + + # Helpers + + @staticmethod + def _field_to_internal_name(field: str, group: Optional[SecretGroup]) -> str: + if not group or group == SECRET_GROUPS.EXTRA: + return field + return f"{field}{GROUP_SEPARATOR}{group}" + + @staticmethod + def _internal_name_to_field(name: str) -> Tuple[str, SecretGroup]: + parts = name.split(GROUP_SEPARATOR) + if not len(parts) > 1: + return (parts[0], SECRET_GROUPS.EXTRA) + secret_group = SECRET_GROUPS.get_group(parts[1]) + if not secret_group: + raise ValueError(f"Invalid secret field {name}") + return (parts[0], secret_group) + + def _group_secret_fields(self, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: + """Helper function to arrange secret mappings under their group. + + NOTE: All unrecognized items end up in the 'extra' secret bucket. + Make sure only secret fields are passed! + """ + secret_fieldnames_grouped = {} + for key in secret_fields: + field, group = self._internal_name_to_field(key) + secret_fieldnames_grouped.setdefault(group, []).append(field) + return secret_fieldnames_grouped + + def _content_for_secret_group( + self, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup + ) -> Dict[str, str]: + """Select : pairs from input, that belong to this particular Secret group.""" + if group_mapping == SECRET_GROUPS.EXTRA: + return {k: v for k, v in content.items() if k in self.secret_fields} + return { + self._internal_name_to_field(k)[0]: v + for k, v in content.items() + if k in self.secret_fields + } + + # Backwards compatibility + + def _check_deleted_label(self, relation, fields) -> None: + """Helper function for legacy behavior.""" + current_data = self.fetch_my_relation_data([relation.id], fields) + if current_data is not None: + # Check if the secret we wanna delete actually exists + # Given the "deleted label", here we can't rely on the default mechanism (i.e. 'key not found') + if non_existent := (set(fields) & set(self.secret_fields)) - set( + current_data.get(relation.id, []) + ): + logger.debug( + "Non-existing secret %s was attempted to be removed.", + ", ".join(non_existent), + ) + + def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: + """For Rolling Upgrades -- when moving from databag to secrets usage. + + Practically what happens here is to remove stuff from the databag that is + to be stored in secrets. + """ + if not self.secret_fields: + return + + secret_fields_passed = set(self.secret_fields) & set(fields) + for field in secret_fields_passed: + if self._fetch_relation_data_without_secrets(self.component, relation, [field]): + self._delete_relation_data_without_secrets(self.component, relation, [field]) + + def _remove_secret_field_name_from_databag(self, relation) -> None: + """Making sure that the old databag URI is gone. + + This action should not be executed more than once. + """ + # Nothing to do if 'internal-secret' is not in the databag + if not (relation.data[self.component].get(self._generate_secret_field_name())): + return + + # Making sure that the secret receives its label + # (This should have happened by the time we get here, rather an extra security measure.) + secret = self._get_relation_secret(relation.id) + + # Either app scope secret with leader executing, or unit scope secret + leader_or_unit_scope = self.component != self.local_app or self.local_unit.is_leader() + if secret and leader_or_unit_scope: + # Databag reference to the secret URI can be removed, now that it's labelled + relation.data[self.component].pop(self._generate_secret_field_name(), None) + + def _previous_labels(self) -> List[str]: + """Generator for legacy secret label names, for backwards compatibility.""" + result = [] + members = [self._model.app.name] + if self.scope: + members.append(self.scope.value) + result.append(f"{'.'.join(members)}") + return result + + def _no_group_with_databag(self, field: str, full_field: str) -> bool: + """Check that no secret group is attempted to be used together with databag.""" + if not self.secrets_enabled and full_field != field: + logger.error( + f"Can't access {full_field}: no secrets available (i.e. no secret groups either)." + ) + return False + return True + + # Event handlers + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + pass + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + + # Overrides of Relation Data handling functions + def _generate_secret_label( self, relation_name: str, relation_id: int, group_mapping: SecretGroup ) -> str: - members = [self._model.app.name] + members = [relation_name, self._model.app.name] if self.scope: members.append(self.scope.value) + if group_mapping != SECRET_GROUPS.EXTRA: + members.append(group_mapping) return f"{'.'.join(members)}" - def _generate_secret_field_name(self, group_mapping: SecretGroup = SecretGroup.EXTRA) -> str: + def _generate_secret_field_name(self, group_mapping: SecretGroup = SECRET_GROUPS.EXTRA) -> str: """Generate unique group_mappings for secrets within a relation context.""" return f"{self.secret_field_name}" @@ -1562,7 +2029,7 @@ def _generate_secret_field_name(self, group_mapping: SecretGroup = SecretGroup.E def _get_relation_secret( self, relation_id: int, - group_mapping: SecretGroup = SecretGroup.EXTRA, + group_mapping: SecretGroup = SECRET_GROUPS.EXTRA, relation_name: Optional[str] = None, ) -> Optional[CachedSecret]: """Retrieve a Juju Secret specifically for peer relations. @@ -1581,51 +2048,29 @@ def _get_relation_secret( label = self._generate_secret_label(relation_name, relation_id, group_mapping) secret_uri = relation.data[self.component].get(self._generate_secret_field_name(), None) - # Fetching the secret with fallback to URI (in case label is not yet known) - # Label would we "stuck" on the secret in case it is found - secret = self.secrets.get(label, secret_uri) - - # Either app scope secret with leader executing, or unit scope secret - leader_or_unit_scope = self.component != self.local_app or self.local_unit.is_leader() - if secret_uri and secret and leader_or_unit_scope: - # Databag reference to the secret URI can be removed, now that it's labelled - relation.data[self.component].pop(self._generate_secret_field_name(), None) - return secret + # URI or legacy label is only to applied when moving single legacy secret to a (new) label + if group_mapping == SECRET_GROUPS.EXTRA: + # Fetching the secret with fallback to URI (in case label is not yet known) + # Label would we "stuck" on the secret in case it is found + return self.secrets.get(label, secret_uri, legacy_labels=self._previous_labels()) + return self.secrets.get(label) def _get_group_secret_contents( self, relation: Relation, group: SecretGroup, - secret_fields: Optional[Union[Set[str], List[str]]] = None, + secret_fields: Union[Set[str], List[str]] = [], ) -> Dict[str, str]: """Helper function to retrieve collective, requested contents of a secret.""" + secret_fields = [self._internal_name_to_field(k)[0] for k in secret_fields] result = super()._get_group_secret_contents(relation, group, secret_fields) - if not self.deleted_label: - return result - return {key: result[key] for key in result if result[key] != self.deleted_label} - - def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: - """For Rolling Upgrades -- when moving from databag to secrets usage. - - Practically what happens here is to remove stuff from the databag that is - to be stored in secrets. - """ - if not self.secret_fields: - return - - secret_fields_passed = set(self.secret_fields) & set(fields) - for field in secret_fields_passed: - if self._fetch_relation_data_without_secrets(self.component, relation, [field]): - self._delete_relation_data_without_secrets(self.component, relation, [field]) - - def _fetch_specific_relation_data( - self, relation: Relation, fields: Optional[List[str]] - ) -> Dict[str, str]: - """Fetch data available (directily or indirectly -- i.e. secrets) from the relation.""" - return self._fetch_relation_data_with_secrets( - self.component, self.secret_fields, relation, fields - ) + if self.deleted_label: + result = {key: result[key] for key in result if result[key] != self.deleted_label} + if self._additional_secret_group_mapping: + return {self._field_to_internal_name(key, group): result[key] for key in result} + return result + @either_static_or_dynamic_secrets def _fetch_my_specific_relation_data( self, relation: Relation, fields: Optional[List[str]] ) -> Dict[str, str]: @@ -1634,6 +2079,7 @@ def _fetch_my_specific_relation_data( self.component, self.secret_fields, relation, fields ) + @either_static_or_dynamic_secrets def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: """Update data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" self._remove_secret_from_databag(relation, list(data.keys())) @@ -1645,24 +2091,17 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non data=data, uri_to_databag=False, ) + self._remove_secret_field_name_from_databag(relation) normal_content = {k: v for k, v in data.items() if k in normal_fields} self._update_relation_data_without_secrets(self.component, relation, normal_content) + @either_static_or_dynamic_secrets def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" if self.secret_fields and self.deleted_label: - current_data = self.fetch_my_relation_data([relation.id], fields) - if current_data is not None: - # Check if the secret we wanna delete actually exists - # Given the "deleted label", here we can't rely on the default mechanism (i.e. 'key not found') - if non_existent := (set(fields) & set(self.secret_fields)) - set( - current_data.get(relation.id, []) - ): - logger.error( - "Non-existing secret %s was attempted to be removed.", - ", ".join(non_existent), - ) + # Legacy, backwards compatibility + self._check_deleted_label(relation, fields) _, normal_fields = self._process_secret_fields( relation, @@ -1704,7 +2143,7 @@ def fetch_relation_field( fetch_my_relation_field = Data.fetch_my_relation_field -class DataPeerEventHandlers(EventHandlers): +class DataPeerEventHandlers(RequirerEventHandlers): """Requires-side of the relation.""" def __init__(self, charm: CharmBase, relation_data: RequirerData, unique_key: str = ""): @@ -1729,6 +2168,7 @@ def __init__( relation_name: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, unique_key: str = "", @@ -1739,6 +2179,7 @@ def __init__( relation_name, extra_user_roles, additional_secret_fields, + additional_secret_group_mapping, secret_field_name, deleted_label, ) @@ -1763,6 +2204,7 @@ def __init__( relation_name: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, unique_key: str = "", @@ -1773,6 +2215,7 @@ def __init__( relation_name, extra_user_roles, additional_secret_fields, + additional_secret_group_mapping, secret_field_name, deleted_label, ) @@ -1787,6 +2230,14 @@ def __init__(self, unit: Unit, *args, **kwargs): self.local_unit = unit self.component = unit + def update_relation_data(self, relation_id: int, data: dict) -> None: + """This method makes no sense for a Other Peer Relation.""" + raise NotImplementedError("It's not possible to update data of another unit.") + + def delete_relation_data(self, relation_id: int, fields: List[str]) -> None: + """This method makes no sense for a Other Peer Relation.""" + raise NotImplementedError("It's not possible to delete data of another unit.") + class DataPeerOtherUnitEventHandlers(DataPeerEventHandlers): """Requires-side of the relation.""" @@ -1807,23 +2258,29 @@ def __init__( relation_name: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, - unique_key: str = "", ): - DataPeerData.__init__( + DataPeerOtherUnitData.__init__( self, + unit, charm.model, relation_name, extra_user_roles, additional_secret_fields, + additional_secret_group_mapping, secret_field_name, deleted_label, ) - DataPeerEventHandlers.__init__(self, charm, self, unique_key) + DataPeerOtherUnitEventHandlers.__init__(self, charm, self) + +################################################################################ +# Cross-charm Relatoins Data Handling and Evenets +################################################################################ -# General events +# Generic events class ExtraRoleEvent(RelationEvent): @@ -2390,7 +2847,7 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: # Check if the database is created # (the database charm shared the credentials). - secret_field_user = self.relation_data._generate_secret_field_name(SecretGroup.USER) + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) if ( "username" in diff.added and "password" in diff.added ) or secret_field_user in diff.added: @@ -2462,7 +2919,11 @@ def __init__( DatabaseRequirerEventHandlers.__init__(self, charm, self) -# Kafka related events +################################################################################ +# Charm-specific Relations Data and Events +################################################################################ + +# Kafka Events class KafkaProvidesEvent(RelationEvent): @@ -2704,7 +3165,7 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): self.relation_data._register_secrets_to_relation(event.relation, diff.added) - secret_field_user = self.relation_data._generate_secret_field_name(SecretGroup.USER) + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) if ( "username" in diff.added and "password" in diff.added ) or secret_field_user in diff.added: @@ -2949,8 +3410,8 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): self.relation_data._register_secrets_to_relation(event.relation, diff.added) - secret_field_user = self.relation_data._generate_secret_field_name(SecretGroup.USER) - secret_field_tls = self.relation_data._generate_secret_field_name(SecretGroup.TLS) + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) + secret_field_tls = self.relation_data._generate_secret_field_name(SECRET_GROUPS.TLS) updates = {"username", "password", "tls", "tls-ca", secret_field_user, secret_field_tls} if len(set(diff._asdict().keys()) - updates) < len(diff): logger.info("authentication updated at: %s", datetime.now()) diff --git a/lib/charms/tls_certificates_interface/v2/tls_certificates.py b/lib/charms/tls_certificates_interface/v2/tls_certificates.py new file mode 100644 index 00000000..9f67833b --- /dev/null +++ b/lib/charms/tls_certificates_interface/v2/tls_certificates.py @@ -0,0 +1,1959 @@ +# Copyright 2021 Canonical Ltd. +# See LICENSE file for licensing details. + + +"""Library for the tls-certificates relation. + +This library contains the Requires and Provides classes for handling the tls-certificates +interface. + +## Getting Started +From a charm directory, fetch the library using `charmcraft`: + +```shell +charmcraft fetch-lib charms.tls_certificates_interface.v2.tls_certificates +``` + +Add the following libraries to the charm's `requirements.txt` file: +- jsonschema +- cryptography + +Add the following section to the charm's `charmcraft.yaml` file: +```yaml +parts: + charm: + build-packages: + - libffi-dev + - libssl-dev + - rustc + - cargo +``` + +### Provider charm +The provider charm is the charm providing certificates to another charm that requires them. In +this example, the provider charm is storing its private key using a peer relation interface called +`replicas`. + +Example: +```python +from charms.tls_certificates_interface.v2.tls_certificates import ( + CertificateCreationRequestEvent, + CertificateRevocationRequestEvent, + TLSCertificatesProvidesV2, + generate_private_key, +) +from ops.charm import CharmBase, InstallEvent +from ops.main import main +from ops.model import ActiveStatus, WaitingStatus + + +def generate_ca(private_key: bytes, subject: str) -> str: + return "whatever ca content" + + +def generate_certificate(ca: str, private_key: str, csr: str) -> str: + return "Whatever certificate" + + +class ExampleProviderCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + self.certificates = TLSCertificatesProvidesV2(self, "certificates") + self.framework.observe( + self.certificates.on.certificate_request, + self._on_certificate_request + ) + self.framework.observe( + self.certificates.on.certificate_revocation_request, + self._on_certificate_revocation_request + ) + self.framework.observe(self.on.install, self._on_install) + + def _on_install(self, event: InstallEvent) -> None: + private_key_password = b"banana" + private_key = generate_private_key(password=private_key_password) + ca_certificate = generate_ca(private_key=private_key, subject="whatever") + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + replicas_relation.data[self.app].update( + { + "private_key_password": "banana", + "private_key": private_key, + "ca_certificate": ca_certificate, + } + ) + self.unit.status = ActiveStatus() + + def _on_certificate_request(self, event: CertificateCreationRequestEvent) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + ca_certificate = replicas_relation.data[self.app].get("ca_certificate") + private_key = replicas_relation.data[self.app].get("private_key") + certificate = generate_certificate( + ca=ca_certificate, + private_key=private_key, + csr=event.certificate_signing_request, + ) + + self.certificates.set_relation_certificate( + certificate=certificate, + certificate_signing_request=event.certificate_signing_request, + ca=ca_certificate, + chain=[ca_certificate, certificate], + relation_id=event.relation_id, + ) + + def _on_certificate_revocation_request(self, event: CertificateRevocationRequestEvent) -> None: + # Do what you want to do with this information + pass + + +if __name__ == "__main__": + main(ExampleProviderCharm) +``` + +### Requirer charm +The requirer charm is the charm requiring certificates from another charm that provides them. In +this example, the requirer charm is storing its certificates using a peer relation interface called +`replicas`. + +Example: +```python +from charms.tls_certificates_interface.v2.tls_certificates import ( + CertificateAvailableEvent, + CertificateExpiringEvent, + CertificateRevokedEvent, + TLSCertificatesRequiresV2, + generate_csr, + generate_private_key, +) +from ops.charm import CharmBase, RelationJoinedEvent +from ops.main import main +from ops.model import ActiveStatus, WaitingStatus +from typing import Union + + +class ExampleRequirerCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + self.cert_subject = "whatever" + self.certificates = TLSCertificatesRequiresV2(self, "certificates") + self.framework.observe(self.on.install, self._on_install) + self.framework.observe( + self.on.certificates_relation_joined, self._on_certificates_relation_joined + ) + self.framework.observe( + self.certificates.on.certificate_available, self._on_certificate_available + ) + self.framework.observe( + self.certificates.on.certificate_expiring, self._on_certificate_expiring + ) + self.framework.observe( + self.certificates.on.certificate_invalidated, self._on_certificate_invalidated + ) + self.framework.observe( + self.certificates.on.all_certificates_invalidated, + self._on_all_certificates_invalidated + ) + + def _on_install(self, event) -> None: + private_key_password = b"banana" + private_key = generate_private_key(password=private_key_password) + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + replicas_relation.data[self.app].update( + {"private_key_password": "banana", "private_key": private_key.decode()} + ) + + def _on_certificates_relation_joined(self, event: RelationJoinedEvent) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + private_key_password = replicas_relation.data[self.app].get("private_key_password") + private_key = replicas_relation.data[self.app].get("private_key") + csr = generate_csr( + private_key=private_key.encode(), + private_key_password=private_key_password.encode(), + subject=self.cert_subject, + ) + replicas_relation.data[self.app].update({"csr": csr.decode()}) + self.certificates.request_certificate_creation(certificate_signing_request=csr) + + def _on_certificate_available(self, event: CertificateAvailableEvent) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + replicas_relation.data[self.app].update({"certificate": event.certificate}) + replicas_relation.data[self.app].update({"ca": event.ca}) + replicas_relation.data[self.app].update({"chain": event.chain}) + self.unit.status = ActiveStatus() + + def _on_certificate_expiring( + self, event: Union[CertificateExpiringEvent, CertificateInvalidatedEvent] + ) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + old_csr = replicas_relation.data[self.app].get("csr") + private_key_password = replicas_relation.data[self.app].get("private_key_password") + private_key = replicas_relation.data[self.app].get("private_key") + new_csr = generate_csr( + private_key=private_key.encode(), + private_key_password=private_key_password.encode(), + subject=self.cert_subject, + ) + self.certificates.request_certificate_renewal( + old_certificate_signing_request=old_csr, + new_certificate_signing_request=new_csr, + ) + replicas_relation.data[self.app].update({"csr": new_csr.decode()}) + + def _certificate_revoked(self) -> None: + old_csr = replicas_relation.data[self.app].get("csr") + private_key_password = replicas_relation.data[self.app].get("private_key_password") + private_key = replicas_relation.data[self.app].get("private_key") + new_csr = generate_csr( + private_key=private_key.encode(), + private_key_password=private_key_password.encode(), + subject=self.cert_subject, + ) + self.certificates.request_certificate_renewal( + old_certificate_signing_request=old_csr, + new_certificate_signing_request=new_csr, + ) + replicas_relation.data[self.app].update({"csr": new_csr.decode()}) + replicas_relation.data[self.app].pop("certificate") + replicas_relation.data[self.app].pop("ca") + replicas_relation.data[self.app].pop("chain") + self.unit.status = WaitingStatus("Waiting for new certificate") + + def _on_certificate_invalidated(self, event: CertificateInvalidatedEvent) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + if event.reason == "revoked": + self._certificate_revoked() + if event.reason == "expired": + self._on_certificate_expiring(event) + + def _on_all_certificates_invalidated(self, event: AllCertificatesInvalidatedEvent) -> None: + # Do what you want with this information, probably remove all certificates. + pass + + +if __name__ == "__main__": + main(ExampleRequirerCharm) +``` + +You can relate both charms by running: + +```bash +juju relate +``` + +""" # noqa: D405, D410, D411, D214, D416 + +import copy +import json +import logging +import uuid +from contextlib import suppress +from datetime import datetime, timedelta, timezone +from ipaddress import IPv4Address +from typing import Any, Dict, List, Literal, Optional, Union + +from cryptography import x509 +from cryptography.hazmat._oid import ExtensionOID +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives.serialization import pkcs12 +from jsonschema import exceptions, validate +from ops.charm import ( + CharmBase, + CharmEvents, + RelationBrokenEvent, + RelationChangedEvent, + SecretExpiredEvent, + UpdateStatusEvent, +) +from ops.framework import EventBase, EventSource, Handle, Object +from ops.jujuversion import JujuVersion +from ops.model import ModelError, Relation, RelationDataContent, SecretNotFoundError + +# The unique Charmhub library identifier, never change it +LIBID = "afd8c2bccf834997afce12c2706d2ede" + +# Increment this major API version when introducing breaking changes +LIBAPI = 2 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 28 + +PYDEPS = ["cryptography", "jsonschema"] + +REQUIRER_JSON_SCHEMA = { + "$schema": "http://json-schema.org/draft-04/schema#", + "$id": "https://canonical.github.io/charm-relation-interfaces/interfaces/tls_certificates/v1/schemas/requirer.json", + "type": "object", + "title": "`tls_certificates` requirer root schema", + "description": "The `tls_certificates` root schema comprises the entire requirer databag for this interface.", # noqa: E501 + "examples": [ + { + "certificate_signing_requests": [ + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----\\nMIICWjCCAUICAQAwFTETMBEGA1UEAwwKYmFuYW5hLmNvbTCCASIwDQYJKoZIhvcN\\nAQEBBQADggEPADCCAQoCggEBANWlx9wE6cW7Jkb4DZZDOZoEjk1eDBMJ+8R4pyKp\\nFBeHMl1SQSDt6rAWsrfL3KOGiIHqrRY0B5H6c51L8LDuVrJG0bPmyQ6rsBo3gVke\\nDSivfSLtGvHtp8lwYnIunF8r858uYmblAR0tdXQNmnQvm+6GERvURQ6sxpgZ7iLC\\npPKDoPt+4GKWL10FWf0i82FgxWC2KqRZUtNbgKETQuARLig7etBmCnh20zmynorA\\ncY7vrpTPAaeQpGLNqqYvKV9W6yWVY08V+nqARrFrjk3vSioZSu8ZJUdZ4d9++SGl\\nbH7A6e77YDkX9i/dQ3Pa/iDtWO3tXS2MvgoxX1iSWlGNOHcCAwEAAaAAMA0GCSqG\\nSIb3DQEBCwUAA4IBAQCW1fKcHessy/ZhnIwAtSLznZeZNH8LTVOzkhVd4HA7EJW+\\nKVLBx8DnN7L3V2/uPJfHiOg4Rx7fi7LkJPegl3SCqJZ0N5bQS/KvDTCyLG+9E8Y+\\n7wqCmWiXaH1devimXZvazilu4IC2dSks2D8DPWHgsOdVks9bme8J3KjdNMQudegc\\newWZZ1Dtbd+Rn7cpKU3jURMwm4fRwGxbJ7iT5fkLlPBlyM/yFEik4SmQxFYrZCQg\\n0f3v4kBefTh5yclPy5tEH+8G0LMsbbo3dJ5mPKpAShi0QEKDLd7eR1R/712lYTK4\\ndi4XaEfqERgy68O4rvb4PGlJeRGS7AmL7Ss8wfAq\\n-----END CERTIFICATE REQUEST-----\\n" # noqa: E501 + }, + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----\\nMIICWjCCAUICAQAwFTETMBEGA1UEAwwKYmFuYW5hLmNvbTCCASIwDQYJKoZIhvcN\\nAQEBBQADggEPADCCAQoCggEBAMk3raaX803cHvzlBF9LC7KORT46z4VjyU5PIaMb\\nQLIDgYKFYI0n5hf2Ra4FAHvOvEmW7bjNlHORFEmvnpcU5kPMNUyKFMTaC8LGmN8z\\nUBH3aK+0+FRvY4afn9tgj5435WqOG9QdoDJ0TJkjJbJI9M70UOgL711oU7ql6HxU\\n4d2ydFK9xAHrBwziNHgNZ72L95s4gLTXf0fAHYf15mDA9U5yc+YDubCKgTXzVySQ\\nUx73VCJLfC/XkZIh559IrnRv5G9fu6BMLEuBwAz6QAO4+/XidbKWN4r2XSq5qX4n\\n6EPQQWP8/nd4myq1kbg6Q8w68L/0YdfjCmbyf2TuoWeImdUCAwEAAaAAMA0GCSqG\\nSIb3DQEBCwUAA4IBAQBIdwraBvpYo/rl5MH1+1Um6HRg4gOdQPY5WcJy9B9tgzJz\\nittRSlRGTnhyIo6fHgq9KHrmUthNe8mMTDailKFeaqkVNVvk7l0d1/B90Kz6OfmD\\nxN0qjW53oP7y3QB5FFBM8DjqjmUnz5UePKoX4AKkDyrKWxMwGX5RoET8c/y0y9jp\\nvSq3Wh5UpaZdWbe1oVY8CqMVUEVQL2DPjtopxXFz2qACwsXkQZxWmjvZnRiP8nP8\\nbdFaEuh9Q6rZ2QdZDEtrU4AodPU3NaukFr5KlTUQt3w/cl+5//zils6G5zUWJ2pN\\ng7+t9PTvXHRkH+LnwaVnmsBFU2e05qADQbfIn7JA\\n-----END CERTIFICATE REQUEST-----\\n" # noqa: E501 + }, + ] + } + ], + "properties": { + "certificate_signing_requests": { + "type": "array", + "items": { + "type": "object", + "properties": { + "certificate_signing_request": {"type": "string"}, + "ca": {"type": "boolean"}, + }, + "required": ["certificate_signing_request"], + }, + } + }, + "required": ["certificate_signing_requests"], + "additionalProperties": True, +} + +PROVIDER_JSON_SCHEMA = { + "$schema": "http://json-schema.org/draft-04/schema#", + "$id": "https://canonical.github.io/charm-relation-interfaces/interfaces/tls_certificates/v1/schemas/provider.json", + "type": "object", + "title": "`tls_certificates` provider root schema", + "description": "The `tls_certificates` root schema comprises the entire provider databag for this interface.", # noqa: E501 + "examples": [ + { + "certificates": [ + { + "ca": "-----BEGIN CERTIFICATE-----\\nMIIDJTCCAg2gAwIBAgIUMsSK+4FGCjW6sL/EXMSxColmKw8wDQYJKoZIhvcNAQEL\\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIyMDcyOTIx\\nMTgyN1oXDTIzMDcyOTIxMTgyN1owIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdo\\nYXRldmVyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA55N9DkgFWbJ/\\naqcdQhso7n1kFvt6j/fL1tJBvRubkiFMQJnZFtekfalN6FfRtA3jq+nx8o49e+7t\\nLCKT0xQ+wufXfOnxv6/if6HMhHTiCNPOCeztUgQ2+dfNwRhYYgB1P93wkUVjwudK\\n13qHTTZ6NtEF6EzOqhOCe6zxq6wrr422+ZqCvcggeQ5tW9xSd/8O1vNID/0MTKpy\\nET3drDtBfHmiUEIBR3T3tcy6QsIe4Rz/2sDinAcM3j7sG8uY6drh8jY3PWar9til\\nv2l4qDYSU8Qm5856AB1FVZRLRJkLxZYZNgreShAIYgEd0mcyI2EO/UvKxsIcxsXc\\nd45GhGpKkwIDAQABo1cwVTAfBgNVHQ4EGAQWBBRXBrXKh3p/aFdQjUcT/UcvICBL\\nODAhBgNVHSMEGjAYgBYEFFcGtcqHen9oV1CNRxP9Ry8gIEs4MA8GA1UdEwEB/wQF\\nMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGmCEvcoFUrT9e133SHkgF/ZAgzeIziO\\nBjfAdU4fvAVTVfzaPm0yBnGqzcHyacCzbZjKQpaKVgc5e6IaqAQtf6cZJSCiJGhS\\nJYeosWrj3dahLOUAMrXRr8G/Ybcacoqc+osKaRa2p71cC3V6u2VvcHRV7HDFGJU7\\noijbdB+WhqET6Txe67rxZCJG9Ez3EOejBJBl2PJPpy7m1Ml4RR+E8YHNzB0lcBzc\\nEoiJKlDfKSO14E2CPDonnUoWBJWjEvJys3tbvKzsRj2fnLilytPFU0gH3cEjCopi\\nzFoWRdaRuNHYCqlBmso1JFDl8h4fMmglxGNKnKRar0WeGyxb4xXBGpI=\\n-----END CERTIFICATE-----\\n", # noqa: E501 + "chain": [ + "-----BEGIN CERTIFICATE-----\\nMIIDJTCCAg2gAwIBAgIUMsSK+4FGCjW6sL/EXMSxColmKw8wDQYJKoZIhvcNAQEL\\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIyMDcyOTIx\\nMTgyN1oXDTIzMDcyOTIxMTgyN1owIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdo\\nYXRldmVyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA55N9DkgFWbJ/\\naqcdQhso7n1kFvt6j/fL1tJBvRubkiFMQJnZFtekfalN6FfRtA3jq+nx8o49e+7t\\nLCKT0xQ+wufXfOnxv6/if6HMhHTiCNPOCeztUgQ2+dfNwRhYYgB1P93wkUVjwudK\\n13qHTTZ6NtEF6EzOqhOCe6zxq6wrr422+ZqCvcggeQ5tW9xSd/8O1vNID/0MTKpy\\nET3drDtBfHmiUEIBR3T3tcy6QsIe4Rz/2sDinAcM3j7sG8uY6drh8jY3PWar9til\\nv2l4qDYSU8Qm5856AB1FVZRLRJkLxZYZNgreShAIYgEd0mcyI2EO/UvKxsIcxsXc\\nd45GhGpKkwIDAQABo1cwVTAfBgNVHQ4EGAQWBBRXBrXKh3p/aFdQjUcT/UcvICBL\\nODAhBgNVHSMEGjAYgBYEFFcGtcqHen9oV1CNRxP9Ry8gIEs4MA8GA1UdEwEB/wQF\\nMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGmCEvcoFUrT9e133SHkgF/ZAgzeIziO\\nBjfAdU4fvAVTVfzaPm0yBnGqzcHyacCzbZjKQpaKVgc5e6IaqAQtf6cZJSCiJGhS\\nJYeosWrj3dahLOUAMrXRr8G/Ybcacoqc+osKaRa2p71cC3V6u2VvcHRV7HDFGJU7\\noijbdB+WhqET6Txe67rxZCJG9Ez3EOejBJBl2PJPpy7m1Ml4RR+E8YHNzB0lcBzc\\nEoiJKlDfKSO14E2CPDonnUoWBJWjEvJys3tbvKzsRj2fnLilytPFU0gH3cEjCopi\\nzFoWRdaRuNHYCqlBmso1JFDl8h4fMmglxGNKnKRar0WeGyxb4xXBGpI=\\n-----END CERTIFICATE-----\\n" # noqa: E501, W505 + ], + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----\nMIICWjCCAUICAQAwFTETMBEGA1UEAwwKYmFuYW5hLmNvbTCCASIwDQYJKoZIhvcN\nAQEBBQADggEPADCCAQoCggEBANWlx9wE6cW7Jkb4DZZDOZoEjk1eDBMJ+8R4pyKp\nFBeHMl1SQSDt6rAWsrfL3KOGiIHqrRY0B5H6c51L8LDuVrJG0bPmyQ6rsBo3gVke\nDSivfSLtGvHtp8lwYnIunF8r858uYmblAR0tdXQNmnQvm+6GERvURQ6sxpgZ7iLC\npPKDoPt+4GKWL10FWf0i82FgxWC2KqRZUtNbgKETQuARLig7etBmCnh20zmynorA\ncY7vrpTPAaeQpGLNqqYvKV9W6yWVY08V+nqARrFrjk3vSioZSu8ZJUdZ4d9++SGl\nbH7A6e77YDkX9i/dQ3Pa/iDtWO3tXS2MvgoxX1iSWlGNOHcCAwEAAaAAMA0GCSqG\nSIb3DQEBCwUAA4IBAQCW1fKcHessy/ZhnIwAtSLznZeZNH8LTVOzkhVd4HA7EJW+\nKVLBx8DnN7L3V2/uPJfHiOg4Rx7fi7LkJPegl3SCqJZ0N5bQS/KvDTCyLG+9E8Y+\n7wqCmWiXaH1devimXZvazilu4IC2dSks2D8DPWHgsOdVks9bme8J3KjdNMQudegc\newWZZ1Dtbd+Rn7cpKU3jURMwm4fRwGxbJ7iT5fkLlPBlyM/yFEik4SmQxFYrZCQg\n0f3v4kBefTh5yclPy5tEH+8G0LMsbbo3dJ5mPKpAShi0QEKDLd7eR1R/712lYTK4\ndi4XaEfqERgy68O4rvb4PGlJeRGS7AmL7Ss8wfAq\n-----END CERTIFICATE REQUEST-----\n", # noqa: E501 + "certificate": "-----BEGIN CERTIFICATE-----\nMIICvDCCAaQCFFPAOD7utDTsgFrm0vS4We18OcnKMA0GCSqGSIb3DQEBCwUAMCAx\nCzAJBgNVBAYTAlVTMREwDwYDVQQDDAh3aGF0ZXZlcjAeFw0yMjA3MjkyMTE5Mzha\nFw0yMzA3MjkyMTE5MzhaMBUxEzARBgNVBAMMCmJhbmFuYS5jb20wggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVpcfcBOnFuyZG+A2WQzmaBI5NXgwTCfvE\neKciqRQXhzJdUkEg7eqwFrK3y9yjhoiB6q0WNAeR+nOdS/Cw7layRtGz5skOq7Aa\nN4FZHg0or30i7Rrx7afJcGJyLpxfK/OfLmJm5QEdLXV0DZp0L5vuhhEb1EUOrMaY\nGe4iwqTyg6D7fuBili9dBVn9IvNhYMVgtiqkWVLTW4ChE0LgES4oO3rQZgp4dtM5\nsp6KwHGO766UzwGnkKRizaqmLylfVusllWNPFfp6gEaxa45N70oqGUrvGSVHWeHf\nfvkhpWx+wOnu+2A5F/Yv3UNz2v4g7Vjt7V0tjL4KMV9YklpRjTh3AgMBAAEwDQYJ\nKoZIhvcNAQELBQADggEBAChjRzuba8zjQ7NYBVas89Oy7u++MlS8xWxh++yiUsV6\nWMk3ZemsPtXc1YmXorIQohtxLxzUPm2JhyzFzU/sOLmJQ1E/l+gtZHyRCwsb20fX\nmphuJsMVd7qv/GwEk9PBsk2uDqg4/Wix0Rx5lf95juJP7CPXQJl5FQauf3+LSz0y\nwF/j+4GqvrwsWr9hKOLmPdkyKkR6bHKtzzsxL9PM8GnElk2OpaPMMnzbL/vt2IAt\nxK01ZzPxCQCzVwHo5IJO5NR/fIyFbEPhxzG17QsRDOBR9fl9cOIvDeSO04vyZ+nz\n+kA2c3fNrZFAtpIlOOmFh8Q12rVL4sAjI5mVWnNEgvI=\n-----END CERTIFICATE-----\n", # noqa: E501 + } + ] + }, + { + "certificates": [ + { + "ca": "-----BEGIN CERTIFICATE-----\\nMIIDJTCCAg2gAwIBAgIUMsSK+4FGCjW6sL/EXMSxColmKw8wDQYJKoZIhvcNAQEL\\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIyMDcyOTIx\\nMTgyN1oXDTIzMDcyOTIxMTgyN1owIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdo\\nYXRldmVyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA55N9DkgFWbJ/\\naqcdQhso7n1kFvt6j/fL1tJBvRubkiFMQJnZFtekfalN6FfRtA3jq+nx8o49e+7t\\nLCKT0xQ+wufXfOnxv6/if6HMhHTiCNPOCeztUgQ2+dfNwRhYYgB1P93wkUVjwudK\\n13qHTTZ6NtEF6EzOqhOCe6zxq6wrr422+ZqCvcggeQ5tW9xSd/8O1vNID/0MTKpy\\nET3drDtBfHmiUEIBR3T3tcy6QsIe4Rz/2sDinAcM3j7sG8uY6drh8jY3PWar9til\\nv2l4qDYSU8Qm5856AB1FVZRLRJkLxZYZNgreShAIYgEd0mcyI2EO/UvKxsIcxsXc\\nd45GhGpKkwIDAQABo1cwVTAfBgNVHQ4EGAQWBBRXBrXKh3p/aFdQjUcT/UcvICBL\\nODAhBgNVHSMEGjAYgBYEFFcGtcqHen9oV1CNRxP9Ry8gIEs4MA8GA1UdEwEB/wQF\\nMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGmCEvcoFUrT9e133SHkgF/ZAgzeIziO\\nBjfAdU4fvAVTVfzaPm0yBnGqzcHyacCzbZjKQpaKVgc5e6IaqAQtf6cZJSCiJGhS\\nJYeosWrj3dahLOUAMrXRr8G/Ybcacoqc+osKaRa2p71cC3V6u2VvcHRV7HDFGJU7\\noijbdB+WhqET6Txe67rxZCJG9Ez3EOejBJBl2PJPpy7m1Ml4RR+E8YHNzB0lcBzc\\nEoiJKlDfKSO14E2CPDonnUoWBJWjEvJys3tbvKzsRj2fnLilytPFU0gH3cEjCopi\\nzFoWRdaRuNHYCqlBmso1JFDl8h4fMmglxGNKnKRar0WeGyxb4xXBGpI=\\n-----END CERTIFICATE-----\\n", # noqa: E501 + "chain": [ + "-----BEGIN CERTIFICATE-----\\nMIIDJTCCAg2gAwIBAgIUMsSK+4FGCjW6sL/EXMSxColmKw8wDQYJKoZIhvcNAQEL\\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIyMDcyOTIx\\nMTgyN1oXDTIzMDcyOTIxMTgyN1owIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdo\\nYXRldmVyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA55N9DkgFWbJ/\\naqcdQhso7n1kFvt6j/fL1tJBvRubkiFMQJnZFtekfalN6FfRtA3jq+nx8o49e+7t\\nLCKT0xQ+wufXfOnxv6/if6HMhHTiCNPOCeztUgQ2+dfNwRhYYgB1P93wkUVjwudK\\n13qHTTZ6NtEF6EzOqhOCe6zxq6wrr422+ZqCvcggeQ5tW9xSd/8O1vNID/0MTKpy\\nET3drDtBfHmiUEIBR3T3tcy6QsIe4Rz/2sDinAcM3j7sG8uY6drh8jY3PWar9til\\nv2l4qDYSU8Qm5856AB1FVZRLRJkLxZYZNgreShAIYgEd0mcyI2EO/UvKxsIcxsXc\\nd45GhGpKkwIDAQABo1cwVTAfBgNVHQ4EGAQWBBRXBrXKh3p/aFdQjUcT/UcvICBL\\nODAhBgNVHSMEGjAYgBYEFFcGtcqHen9oV1CNRxP9Ry8gIEs4MA8GA1UdEwEB/wQF\\nMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGmCEvcoFUrT9e133SHkgF/ZAgzeIziO\\nBjfAdU4fvAVTVfzaPm0yBnGqzcHyacCzbZjKQpaKVgc5e6IaqAQtf6cZJSCiJGhS\\nJYeosWrj3dahLOUAMrXRr8G/Ybcacoqc+osKaRa2p71cC3V6u2VvcHRV7HDFGJU7\\noijbdB+WhqET6Txe67rxZCJG9Ez3EOejBJBl2PJPpy7m1Ml4RR+E8YHNzB0lcBzc\\nEoiJKlDfKSO14E2CPDonnUoWBJWjEvJys3tbvKzsRj2fnLilytPFU0gH3cEjCopi\\nzFoWRdaRuNHYCqlBmso1JFDl8h4fMmglxGNKnKRar0WeGyxb4xXBGpI=\\n-----END CERTIFICATE-----\\n" # noqa: E501, W505 + ], + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----\nMIICWjCCAUICAQAwFTETMBEGA1UEAwwKYmFuYW5hLmNvbTCCASIwDQYJKoZIhvcN\nAQEBBQADggEPADCCAQoCggEBANWlx9wE6cW7Jkb4DZZDOZoEjk1eDBMJ+8R4pyKp\nFBeHMl1SQSDt6rAWsrfL3KOGiIHqrRY0B5H6c51L8LDuVrJG0bPmyQ6rsBo3gVke\nDSivfSLtGvHtp8lwYnIunF8r858uYmblAR0tdXQNmnQvm+6GERvURQ6sxpgZ7iLC\npPKDoPt+4GKWL10FWf0i82FgxWC2KqRZUtNbgKETQuARLig7etBmCnh20zmynorA\ncY7vrpTPAaeQpGLNqqYvKV9W6yWVY08V+nqARrFrjk3vSioZSu8ZJUdZ4d9++SGl\nbH7A6e77YDkX9i/dQ3Pa/iDtWO3tXS2MvgoxX1iSWlGNOHcCAwEAAaAAMA0GCSqG\nSIb3DQEBCwUAA4IBAQCW1fKcHessy/ZhnIwAtSLznZeZNH8LTVOzkhVd4HA7EJW+\nKVLBx8DnN7L3V2/uPJfHiOg4Rx7fi7LkJPegl3SCqJZ0N5bQS/KvDTCyLG+9E8Y+\n7wqCmWiXaH1devimXZvazilu4IC2dSks2D8DPWHgsOdVks9bme8J3KjdNMQudegc\newWZZ1Dtbd+Rn7cpKU3jURMwm4fRwGxbJ7iT5fkLlPBlyM/yFEik4SmQxFYrZCQg\n0f3v4kBefTh5yclPy5tEH+8G0LMsbbo3dJ5mPKpAShi0QEKDLd7eR1R/712lYTK4\ndi4XaEfqERgy68O4rvb4PGlJeRGS7AmL7Ss8wfAq\n-----END CERTIFICATE REQUEST-----\n", # noqa: E501 + "certificate": "-----BEGIN CERTIFICATE-----\nMIICvDCCAaQCFFPAOD7utDTsgFrm0vS4We18OcnKMA0GCSqGSIb3DQEBCwUAMCAx\nCzAJBgNVBAYTAlVTMREwDwYDVQQDDAh3aGF0ZXZlcjAeFw0yMjA3MjkyMTE5Mzha\nFw0yMzA3MjkyMTE5MzhaMBUxEzARBgNVBAMMCmJhbmFuYS5jb20wggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVpcfcBOnFuyZG+A2WQzmaBI5NXgwTCfvE\neKciqRQXhzJdUkEg7eqwFrK3y9yjhoiB6q0WNAeR+nOdS/Cw7layRtGz5skOq7Aa\nN4FZHg0or30i7Rrx7afJcGJyLpxfK/OfLmJm5QEdLXV0DZp0L5vuhhEb1EUOrMaY\nGe4iwqTyg6D7fuBili9dBVn9IvNhYMVgtiqkWVLTW4ChE0LgES4oO3rQZgp4dtM5\nsp6KwHGO766UzwGnkKRizaqmLylfVusllWNPFfp6gEaxa45N70oqGUrvGSVHWeHf\nfvkhpWx+wOnu+2A5F/Yv3UNz2v4g7Vjt7V0tjL4KMV9YklpRjTh3AgMBAAEwDQYJ\nKoZIhvcNAQELBQADggEBAChjRzuba8zjQ7NYBVas89Oy7u++MlS8xWxh++yiUsV6\nWMk3ZemsPtXc1YmXorIQohtxLxzUPm2JhyzFzU/sOLmJQ1E/l+gtZHyRCwsb20fX\nmphuJsMVd7qv/GwEk9PBsk2uDqg4/Wix0Rx5lf95juJP7CPXQJl5FQauf3+LSz0y\nwF/j+4GqvrwsWr9hKOLmPdkyKkR6bHKtzzsxL9PM8GnElk2OpaPMMnzbL/vt2IAt\nxK01ZzPxCQCzVwHo5IJO5NR/fIyFbEPhxzG17QsRDOBR9fl9cOIvDeSO04vyZ+nz\n+kA2c3fNrZFAtpIlOOmFh8Q12rVL4sAjI5mVWnNEgvI=\n-----END CERTIFICATE-----\n", # noqa: E501 + "revoked": True, + } + ] + }, + ], + "properties": { + "certificates": { + "$id": "#/properties/certificates", + "type": "array", + "items": { + "$id": "#/properties/certificates/items", + "type": "object", + "required": ["certificate_signing_request", "certificate", "ca", "chain"], + "properties": { + "certificate_signing_request": { + "$id": "#/properties/certificates/items/certificate_signing_request", + "type": "string", + }, + "certificate": { + "$id": "#/properties/certificates/items/certificate", + "type": "string", + }, + "ca": {"$id": "#/properties/certificates/items/ca", "type": "string"}, + "chain": { + "$id": "#/properties/certificates/items/chain", + "type": "array", + "items": { + "type": "string", + "$id": "#/properties/certificates/items/chain/items", + }, + }, + "revoked": { + "$id": "#/properties/certificates/items/revoked", + "type": "boolean", + }, + }, + "additionalProperties": True, + }, + } + }, + "required": ["certificates"], + "additionalProperties": True, +} + + +logger = logging.getLogger(__name__) + + +class CertificateAvailableEvent(EventBase): + """Charm Event triggered when a TLS certificate is available.""" + + def __init__( + self, + handle: Handle, + certificate: str, + certificate_signing_request: str, + ca: str, + chain: List[str], + ): + super().__init__(handle) + self.certificate = certificate + self.certificate_signing_request = certificate_signing_request + self.ca = ca + self.chain = chain + + def snapshot(self) -> dict: + """Return snapshot.""" + return { + "certificate": self.certificate, + "certificate_signing_request": self.certificate_signing_request, + "ca": self.ca, + "chain": self.chain, + } + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.certificate = snapshot["certificate"] + self.certificate_signing_request = snapshot["certificate_signing_request"] + self.ca = snapshot["ca"] + self.chain = snapshot["chain"] + + +class CertificateExpiringEvent(EventBase): + """Charm Event triggered when a TLS certificate is almost expired.""" + + def __init__(self, handle, certificate: str, expiry: str): + """CertificateExpiringEvent. + + Args: + handle (Handle): Juju framework handle + certificate (str): TLS Certificate + expiry (str): Datetime string representing the time at which the certificate + won't be valid anymore. + """ + super().__init__(handle) + self.certificate = certificate + self.expiry = expiry + + def snapshot(self) -> dict: + """Return snapshot.""" + return {"certificate": self.certificate, "expiry": self.expiry} + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.certificate = snapshot["certificate"] + self.expiry = snapshot["expiry"] + + +class CertificateInvalidatedEvent(EventBase): + """Charm Event triggered when a TLS certificate is invalidated.""" + + def __init__( + self, + handle: Handle, + reason: Literal["expired", "revoked"], + certificate: str, + certificate_signing_request: str, + ca: str, + chain: List[str], + ): + super().__init__(handle) + self.reason = reason + self.certificate_signing_request = certificate_signing_request + self.certificate = certificate + self.ca = ca + self.chain = chain + + def snapshot(self) -> dict: + """Return snapshot.""" + return { + "reason": self.reason, + "certificate_signing_request": self.certificate_signing_request, + "certificate": self.certificate, + "ca": self.ca, + "chain": self.chain, + } + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.reason = snapshot["reason"] + self.certificate_signing_request = snapshot["certificate_signing_request"] + self.certificate = snapshot["certificate"] + self.ca = snapshot["ca"] + self.chain = snapshot["chain"] + + +class AllCertificatesInvalidatedEvent(EventBase): + """Charm Event triggered when all TLS certificates are invalidated.""" + + def __init__(self, handle: Handle): + super().__init__(handle) + + def snapshot(self) -> dict: + """Return snapshot.""" + return {} + + def restore(self, snapshot: dict): + """Restore snapshot.""" + pass + + +class CertificateCreationRequestEvent(EventBase): + """Charm Event triggered when a TLS certificate is required.""" + + def __init__( + self, + handle: Handle, + certificate_signing_request: str, + relation_id: int, + is_ca: bool = False, + ): + super().__init__(handle) + self.certificate_signing_request = certificate_signing_request + self.relation_id = relation_id + self.is_ca = is_ca + + def snapshot(self) -> dict: + """Return snapshot.""" + return { + "certificate_signing_request": self.certificate_signing_request, + "relation_id": self.relation_id, + "is_ca": self.is_ca, + } + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.certificate_signing_request = snapshot["certificate_signing_request"] + self.relation_id = snapshot["relation_id"] + self.is_ca = snapshot["is_ca"] + + +class CertificateRevocationRequestEvent(EventBase): + """Charm Event triggered when a TLS certificate needs to be revoked.""" + + def __init__( + self, + handle: Handle, + certificate: str, + certificate_signing_request: str, + ca: str, + chain: str, + ): + super().__init__(handle) + self.certificate = certificate + self.certificate_signing_request = certificate_signing_request + self.ca = ca + self.chain = chain + + def snapshot(self) -> dict: + """Return snapshot.""" + return { + "certificate": self.certificate, + "certificate_signing_request": self.certificate_signing_request, + "ca": self.ca, + "chain": self.chain, + } + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.certificate = snapshot["certificate"] + self.certificate_signing_request = snapshot["certificate_signing_request"] + self.ca = snapshot["ca"] + self.chain = snapshot["chain"] + + +def _load_relation_data(relation_data_content: RelationDataContent) -> dict: + """Load relation data from the relation data bag. + + Json loads all data. + + Args: + relation_data_content: Relation data from the databag + + Returns: + dict: Relation data in dict format. + """ + certificate_data = {} + try: + for key in relation_data_content: + try: + certificate_data[key] = json.loads(relation_data_content[key]) + except (json.decoder.JSONDecodeError, TypeError): + certificate_data[key] = relation_data_content[key] + except ModelError: + pass + return certificate_data + + +def _get_closest_future_time( + expiry_notification_time: datetime, expiry_time: datetime +) -> datetime: + """Return expiry_notification_time if not in the past, otherwise return expiry_time. + + Args: + expiry_notification_time (datetime): Notification time of impending expiration + expiry_time (datetime): Expiration time + + Returns: + datetime: expiry_notification_time if not in the past, expiry_time otherwise + """ + return ( + expiry_notification_time + if datetime.now(timezone.utc) < expiry_notification_time + else expiry_time + ) + + +def _get_certificate_expiry_time(certificate: str) -> Optional[datetime]: + """Extract expiry time from a certificate string. + + Args: + certificate (str): x509 certificate as a string + + Returns: + Optional[datetime]: Expiry datetime or None + """ + try: + certificate_object = x509.load_pem_x509_certificate(data=certificate.encode()) + return certificate_object.not_valid_after_utc + except ValueError: + logger.warning("Could not load certificate.") + return None + + +def generate_ca( + private_key: bytes, + subject: str, + private_key_password: Optional[bytes] = None, + validity: int = 365, + country: str = "US", +) -> bytes: + """Generate a CA Certificate. + + Args: + private_key (bytes): Private key + subject (str): Common Name that can be an IP or a Full Qualified Domain Name (FQDN). + private_key_password (bytes): Private key password + validity (int): Certificate validity time (in days) + country (str): Certificate Issuing country + + Returns: + bytes: CA Certificate. + """ + private_key_object = serialization.load_pem_private_key( + private_key, password=private_key_password + ) + subject_name = x509.Name( + [ + x509.NameAttribute(x509.NameOID.COUNTRY_NAME, country), + x509.NameAttribute(x509.NameOID.COMMON_NAME, subject), + ] + ) + subject_identifier_object = x509.SubjectKeyIdentifier.from_public_key( + private_key_object.public_key() # type: ignore[arg-type] + ) + subject_identifier = key_identifier = subject_identifier_object.public_bytes() + key_usage = x509.KeyUsage( + digital_signature=True, + key_encipherment=True, + key_cert_sign=True, + key_agreement=False, + content_commitment=False, + data_encipherment=False, + crl_sign=False, + encipher_only=False, + decipher_only=False, + ) + cert = ( + x509.CertificateBuilder() + .subject_name(subject_name) + .issuer_name(subject_name) + .public_key(private_key_object.public_key()) # type: ignore[arg-type] + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.now(timezone.utc)) + .not_valid_after(datetime.now(timezone.utc) + timedelta(days=validity)) + .add_extension(x509.SubjectKeyIdentifier(digest=subject_identifier), critical=False) + .add_extension( + x509.AuthorityKeyIdentifier( + key_identifier=key_identifier, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ), + critical=False, + ) + .add_extension(key_usage, critical=True) + .add_extension( + x509.BasicConstraints(ca=True, path_length=None), + critical=True, + ) + .sign(private_key_object, hashes.SHA256()) # type: ignore[arg-type] + ) + return cert.public_bytes(serialization.Encoding.PEM) + + +def get_certificate_extensions( + authority_key_identifier: bytes, + csr: x509.CertificateSigningRequest, + alt_names: Optional[List[str]], + is_ca: bool, +) -> List[x509.Extension]: + """Generate a list of certificate extensions from a CSR and other known information. + + Args: + authority_key_identifier (bytes): Authority key identifier + csr (x509.CertificateSigningRequest): CSR + alt_names (list): List of alt names to put on cert - prefer putting SANs in CSR + is_ca (bool): Whether the certificate is a CA certificate + + Returns: + List[x509.Extension]: List of extensions + """ + cert_extensions_list: List[x509.Extension] = [ + x509.Extension( + oid=ExtensionOID.AUTHORITY_KEY_IDENTIFIER, + value=x509.AuthorityKeyIdentifier( + key_identifier=authority_key_identifier, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ), + critical=False, + ), + x509.Extension( + oid=ExtensionOID.SUBJECT_KEY_IDENTIFIER, + value=x509.SubjectKeyIdentifier.from_public_key(csr.public_key()), + critical=False, + ), + x509.Extension( + oid=ExtensionOID.BASIC_CONSTRAINTS, + critical=True, + value=x509.BasicConstraints(ca=is_ca, path_length=None), + ), + ] + + sans: List[x509.GeneralName] = [] + san_alt_names = [x509.DNSName(name) for name in alt_names] if alt_names else [] + sans.extend(san_alt_names) + try: + loaded_san_ext = csr.extensions.get_extension_for_class(x509.SubjectAlternativeName) + sans.extend( + [x509.DNSName(name) for name in loaded_san_ext.value.get_values_for_type(x509.DNSName)] + ) + sans.extend( + [x509.IPAddress(ip) for ip in loaded_san_ext.value.get_values_for_type(x509.IPAddress)] + ) + sans.extend( + [ + x509.RegisteredID(oid) + for oid in loaded_san_ext.value.get_values_for_type(x509.RegisteredID) + ] + ) + except x509.ExtensionNotFound: + pass + + if sans: + cert_extensions_list.append( + x509.Extension( + oid=ExtensionOID.SUBJECT_ALTERNATIVE_NAME, + critical=False, + value=x509.SubjectAlternativeName(sans), + ) + ) + + if is_ca: + cert_extensions_list.append( + x509.Extension( + ExtensionOID.KEY_USAGE, + critical=True, + value=x509.KeyUsage( + digital_signature=False, + content_commitment=False, + key_encipherment=False, + data_encipherment=False, + key_agreement=False, + key_cert_sign=True, + crl_sign=True, + encipher_only=False, + decipher_only=False, + ), + ) + ) + + existing_oids = {ext.oid for ext in cert_extensions_list} + for extension in csr.extensions: + if extension.oid == ExtensionOID.SUBJECT_ALTERNATIVE_NAME: + continue + if extension.oid in existing_oids: + logger.warning("Extension %s is managed by the TLS provider, ignoring.", extension.oid) + continue + cert_extensions_list.append(extension) + + return cert_extensions_list + + +def generate_certificate( + csr: bytes, + ca: bytes, + ca_key: bytes, + ca_key_password: Optional[bytes] = None, + validity: int = 365, + alt_names: Optional[List[str]] = None, + is_ca: bool = False, +) -> bytes: + """Generate a TLS certificate based on a CSR. + + Args: + csr (bytes): CSR + ca (bytes): CA Certificate + ca_key (bytes): CA private key + ca_key_password: CA private key password + validity (int): Certificate validity (in days) + alt_names (list): List of alt names to put on cert - prefer putting SANs in CSR + is_ca (bool): Whether the certificate is a CA certificate + + Returns: + bytes: Certificate + """ + csr_object = x509.load_pem_x509_csr(csr) + subject = csr_object.subject + ca_pem = x509.load_pem_x509_certificate(ca) + issuer = ca_pem.issuer + private_key = serialization.load_pem_private_key(ca_key, password=ca_key_password) + + certificate_builder = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(csr_object.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.now(timezone.utc)) + .not_valid_after(datetime.now(timezone.utc) + timedelta(days=validity)) + ) + extensions = get_certificate_extensions( + authority_key_identifier=ca_pem.extensions.get_extension_for_class( + x509.SubjectKeyIdentifier + ).value.key_identifier, + csr=csr_object, + alt_names=alt_names, + is_ca=is_ca, + ) + for extension in extensions: + try: + certificate_builder = certificate_builder.add_extension( + extval=extension.value, + critical=extension.critical, + ) + except ValueError as e: + logger.warning("Failed to add extension %s: %s", extension.oid, e) + + cert = certificate_builder.sign(private_key, hashes.SHA256()) # type: ignore[arg-type] + return cert.public_bytes(serialization.Encoding.PEM) + + +def generate_pfx_package( + certificate: bytes, + private_key: bytes, + package_password: str, + private_key_password: Optional[bytes] = None, +) -> bytes: + """Generate a PFX package to contain the TLS certificate and private key. + + Args: + certificate (bytes): TLS certificate + private_key (bytes): Private key + package_password (str): Password to open the PFX package + private_key_password (bytes): Private key password + + Returns: + bytes: + """ + private_key_object = serialization.load_pem_private_key( + private_key, password=private_key_password + ) + certificate_object = x509.load_pem_x509_certificate(certificate) + name = certificate_object.subject.rfc4514_string() + pfx_bytes = pkcs12.serialize_key_and_certificates( + name=name.encode(), + cert=certificate_object, + key=private_key_object, # type: ignore[arg-type] + cas=None, + encryption_algorithm=serialization.BestAvailableEncryption(package_password.encode()), + ) + return pfx_bytes + + +def generate_private_key( + password: Optional[bytes] = None, + key_size: int = 2048, + public_exponent: int = 65537, +) -> bytes: + """Generate a private key. + + Args: + password (bytes): Password for decrypting the private key + key_size (int): Key size in bytes + public_exponent: Public exponent. + + Returns: + bytes: Private Key + """ + private_key = rsa.generate_private_key( + public_exponent=public_exponent, + key_size=key_size, + ) + key_bytes = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=( + serialization.BestAvailableEncryption(password) + if password + else serialization.NoEncryption() + ), + ) + return key_bytes + + +def generate_csr( # noqa: C901 + private_key: bytes, + subject: str, + add_unique_id_to_subject_name: bool = True, + organization: Optional[str] = None, + email_address: Optional[str] = None, + country_name: Optional[str] = None, + private_key_password: Optional[bytes] = None, + sans: Optional[List[str]] = None, + sans_oid: Optional[List[str]] = None, + sans_ip: Optional[List[str]] = None, + sans_dns: Optional[List[str]] = None, + additional_critical_extensions: Optional[List] = None, +) -> bytes: + """Generate a CSR using private key and subject. + + Args: + private_key (bytes): Private key + subject (str): CSR Common Name that can be an IP or a Full Qualified Domain Name (FQDN). + add_unique_id_to_subject_name (bool): Whether a unique ID must be added to the CSR's + subject name. Always leave to "True" when the CSR is used to request certificates + using the tls-certificates relation. + organization (str): Name of organization. + email_address (str): Email address. + country_name (str): Country Name. + private_key_password (bytes): Private key password + sans (list): Use sans_dns - this will be deprecated in a future release + List of DNS subject alternative names (keeping it for now for backward compatibility) + sans_oid (list): List of registered ID SANs + sans_dns (list): List of DNS subject alternative names (similar to the arg: sans) + sans_ip (list): List of IP subject alternative names + additional_critical_extensions (list): List of critical additional extension objects. + Object must be a x509 ExtensionType. + + Returns: + bytes: CSR + """ + signing_key = serialization.load_pem_private_key(private_key, password=private_key_password) + subject_name = [x509.NameAttribute(x509.NameOID.COMMON_NAME, subject)] + if add_unique_id_to_subject_name: + unique_identifier = uuid.uuid4() + subject_name.append( + x509.NameAttribute(x509.NameOID.X500_UNIQUE_IDENTIFIER, str(unique_identifier)) + ) + if organization: + subject_name.append(x509.NameAttribute(x509.NameOID.ORGANIZATION_NAME, organization)) + if email_address: + subject_name.append(x509.NameAttribute(x509.NameOID.EMAIL_ADDRESS, email_address)) + if country_name: + subject_name.append(x509.NameAttribute(x509.NameOID.COUNTRY_NAME, country_name)) + csr = x509.CertificateSigningRequestBuilder(subject_name=x509.Name(subject_name)) + + _sans: List[x509.GeneralName] = [] + if sans_oid: + _sans.extend([x509.RegisteredID(x509.ObjectIdentifier(san)) for san in sans_oid]) + if sans_ip: + _sans.extend([x509.IPAddress(IPv4Address(san)) for san in sans_ip]) + if sans: + _sans.extend([x509.DNSName(san) for san in sans]) + if sans_dns: + _sans.extend([x509.DNSName(san) for san in sans_dns]) + if _sans: + csr = csr.add_extension(x509.SubjectAlternativeName(set(_sans)), critical=False) + + if additional_critical_extensions: + for extension in additional_critical_extensions: + csr = csr.add_extension(extension, critical=True) + + signed_certificate = csr.sign(signing_key, hashes.SHA256()) # type: ignore[arg-type] + return signed_certificate.public_bytes(serialization.Encoding.PEM) + + +def csr_matches_certificate(csr: str, cert: str) -> bool: + """Check if a CSR matches a certificate. + + Args: + csr (str): Certificate Signing Request as a string + cert (str): Certificate as a string + Returns: + bool: True/False depending on whether the CSR matches the certificate. + """ + try: + csr_object = x509.load_pem_x509_csr(csr.encode("utf-8")) + cert_object = x509.load_pem_x509_certificate(cert.encode("utf-8")) + + if csr_object.public_key().public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) != cert_object.public_key().public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ): + return False + if ( + csr_object.public_key().public_numbers().n # type: ignore[union-attr] + != cert_object.public_key().public_numbers().n # type: ignore[union-attr] + ): + return False + except ValueError: + logger.warning("Could not load certificate or CSR.") + return False + return True + + +class CertificatesProviderCharmEvents(CharmEvents): + """List of events that the TLS Certificates provider charm can leverage.""" + + certificate_creation_request = EventSource(CertificateCreationRequestEvent) + certificate_revocation_request = EventSource(CertificateRevocationRequestEvent) + + +class CertificatesRequirerCharmEvents(CharmEvents): + """List of events that the TLS Certificates requirer charm can leverage.""" + + certificate_available = EventSource(CertificateAvailableEvent) + certificate_expiring = EventSource(CertificateExpiringEvent) + certificate_invalidated = EventSource(CertificateInvalidatedEvent) + all_certificates_invalidated = EventSource(AllCertificatesInvalidatedEvent) + + +class TLSCertificatesProvidesV2(Object): + """TLS certificates provider class to be instantiated by TLS certificates providers.""" + + on = CertificatesProviderCharmEvents() # type: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relationship_name: str): + super().__init__(charm, relationship_name) + self.framework.observe( + charm.on[relationship_name].relation_changed, self._on_relation_changed + ) + self.charm = charm + self.relationship_name = relationship_name + + def _load_app_relation_data(self, relation: Relation) -> dict: + """Load relation data from the application relation data bag. + + Json loads all data. + + Args: + relation: Relation data from the application databag + + Returns: + dict: Relation data in dict format. + """ + # If unit is not leader, it does not try to reach relation data. + if not self.model.unit.is_leader(): + return {} + return _load_relation_data(relation.data[self.charm.app]) + + def _add_certificate( + self, + relation_id: int, + certificate: str, + certificate_signing_request: str, + ca: str, + chain: List[str], + ) -> None: + """Add certificate to relation data. + + Args: + relation_id (int): Relation id + certificate (str): Certificate + certificate_signing_request (str): Certificate Signing Request + ca (str): CA Certificate + chain (list): CA Chain + + Returns: + None + """ + relation = self.model.get_relation( + relation_name=self.relationship_name, relation_id=relation_id + ) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} does not exist - " + f"The certificate request can't be completed" + ) + new_certificate = { + "certificate": certificate, + "certificate_signing_request": certificate_signing_request, + "ca": ca, + "chain": chain, + } + provider_relation_data = self._load_app_relation_data(relation) + provider_certificates = provider_relation_data.get("certificates", []) + certificates = copy.deepcopy(provider_certificates) + if new_certificate in certificates: + logger.info("Certificate already in relation data - Doing nothing") + return + certificates.append(new_certificate) + relation.data[self.model.app]["certificates"] = json.dumps(certificates) + + def _remove_certificate( + self, + relation_id: int, + certificate: Optional[str] = None, + certificate_signing_request: Optional[str] = None, + ) -> None: + """Remove certificate from a given relation based on user provided certificate or csr. + + Args: + relation_id (int): Relation id + certificate (str): Certificate (optional) + certificate_signing_request: Certificate signing request (optional) + + Returns: + None + """ + relation = self.model.get_relation( + relation_name=self.relationship_name, + relation_id=relation_id, + ) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} with relation id {relation_id} does not exist" + ) + provider_relation_data = self._load_app_relation_data(relation) + provider_certificates = provider_relation_data.get("certificates", []) + certificates = copy.deepcopy(provider_certificates) + for certificate_dict in certificates: + if certificate and certificate_dict["certificate"] == certificate: + certificates.remove(certificate_dict) + if ( + certificate_signing_request + and certificate_dict["certificate_signing_request"] == certificate_signing_request + ): + certificates.remove(certificate_dict) + relation.data[self.model.app]["certificates"] = json.dumps(certificates) + + @staticmethod + def _relation_data_is_valid(certificates_data: dict) -> bool: + """Use JSON schema validator to validate relation data content. + + Args: + certificates_data (dict): Certificate data dictionary as retrieved from relation data. + + Returns: + bool: True/False depending on whether the relation data follows the json schema. + """ + try: + validate(instance=certificates_data, schema=REQUIRER_JSON_SCHEMA) + return True + except exceptions.ValidationError: + return False + + def revoke_all_certificates(self) -> None: + """Revoke all certificates of this provider. + + This method is meant to be used when the Root CA has changed. + """ + for relation in self.model.relations[self.relationship_name]: + provider_relation_data = self._load_app_relation_data(relation) + provider_certificates = copy.deepcopy(provider_relation_data.get("certificates", [])) + for certificate in provider_certificates: + certificate["revoked"] = True + relation.data[self.model.app]["certificates"] = json.dumps(provider_certificates) + + def set_relation_certificate( + self, + certificate: str, + certificate_signing_request: str, + ca: str, + chain: List[str], + relation_id: int, + ) -> None: + """Add certificates to relation data. + + Args: + certificate (str): Certificate + certificate_signing_request (str): Certificate signing request + ca (str): CA Certificate + chain (list): CA Chain + relation_id (int): Juju relation ID + + Returns: + None + """ + if not self.model.unit.is_leader(): + return + certificates_relation = self.model.get_relation( + relation_name=self.relationship_name, relation_id=relation_id + ) + if not certificates_relation: + raise RuntimeError(f"Relation {self.relationship_name} does not exist") + self._remove_certificate( + certificate_signing_request=certificate_signing_request.strip(), + relation_id=relation_id, + ) + self._add_certificate( + relation_id=relation_id, + certificate=certificate.strip(), + certificate_signing_request=certificate_signing_request.strip(), + ca=ca.strip(), + chain=[cert.strip() for cert in chain], + ) + + def remove_certificate(self, certificate: str) -> None: + """Remove a given certificate from relation data. + + Args: + certificate (str): TLS Certificate + + Returns: + None + """ + certificates_relation = self.model.relations[self.relationship_name] + if not certificates_relation: + raise RuntimeError(f"Relation {self.relationship_name} does not exist") + for certificate_relation in certificates_relation: + self._remove_certificate(certificate=certificate, relation_id=certificate_relation.id) + + def get_issued_certificates( + self, relation_id: Optional[int] = None + ) -> Dict[str, List[Dict[str, str]]]: + """Return a dictionary of issued certificates. + + It returns certificates from all relations if relation_id is not specified. + Certificates are returned per application name and CSR. + + Returns: + dict: Certificates per application name. + """ + certificates: Dict[str, List[Dict[str, str]]] = {} + relations = ( + [ + relation + for relation in self.model.relations[self.relationship_name] + if relation.id == relation_id + ] + if relation_id is not None + else self.model.relations.get(self.relationship_name, []) + ) + for relation in relations: + provider_relation_data = self._load_app_relation_data(relation) + provider_certificates = provider_relation_data.get("certificates", []) + + certificates[relation.app.name] = [] # type: ignore[union-attr] + for certificate in provider_certificates: + if not certificate.get("revoked", False): + certificates[relation.app.name].append( # type: ignore[union-attr] + { + "csr": certificate["certificate_signing_request"], + "certificate": certificate["certificate"], + } + ) + + return certificates + + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """Handle relation changed event. + + Looks at the relation data and either emits: + - certificate request event: If the unit relation data contains a CSR for which + a certificate does not exist in the provider relation data. + - certificate revocation event: If the provider relation data contains a CSR for which + a csr does not exist in the requirer relation data. + + Args: + event: Juju event + + Returns: + None + """ + if event.unit is None: + logger.error("Relation_changed event does not have a unit.") + return + if not self.model.unit.is_leader(): + return + requirer_relation_data = _load_relation_data(event.relation.data[event.unit]) + provider_relation_data = self._load_app_relation_data(event.relation) + if not self._relation_data_is_valid(requirer_relation_data): + logger.debug("Relation data did not pass JSON Schema validation") + return + provider_certificates = provider_relation_data.get("certificates", []) + requirer_csrs = requirer_relation_data.get("certificate_signing_requests", []) + provider_csrs = [ + certificate_creation_request["certificate_signing_request"] + for certificate_creation_request in provider_certificates + ] + requirer_unit_certificate_requests = [ + { + "csr": certificate_creation_request["certificate_signing_request"], + "is_ca": certificate_creation_request.get("ca", False), + } + for certificate_creation_request in requirer_csrs + ] + for certificate_request in requirer_unit_certificate_requests: + if certificate_request["csr"] not in provider_csrs: + self.on.certificate_creation_request.emit( + certificate_signing_request=certificate_request["csr"], + relation_id=event.relation.id, + is_ca=certificate_request["is_ca"], + ) + self._revoke_certificates_for_which_no_csr_exists(relation_id=event.relation.id) + + def _revoke_certificates_for_which_no_csr_exists(self, relation_id: int) -> None: + """Revoke certificates for which no unit has a CSR. + + Goes through all generated certificates and compare against the list of CSRs for all units + of a given relationship. + + Args: + relation_id (int): Relation id + + Returns: + None + """ + certificates_relation = self.model.get_relation( + relation_name=self.relationship_name, relation_id=relation_id + ) + if not certificates_relation: + raise RuntimeError(f"Relation {self.relationship_name} does not exist") + provider_relation_data = self._load_app_relation_data(certificates_relation) + list_of_csrs: List[str] = [] + for unit in certificates_relation.units: + requirer_relation_data = _load_relation_data(certificates_relation.data[unit]) + requirer_csrs = requirer_relation_data.get("certificate_signing_requests", []) + list_of_csrs.extend(csr["certificate_signing_request"] for csr in requirer_csrs) + provider_certificates = provider_relation_data.get("certificates", []) + for certificate in provider_certificates: + if certificate["certificate_signing_request"] not in list_of_csrs: + self.on.certificate_revocation_request.emit( + certificate=certificate["certificate"], + certificate_signing_request=certificate["certificate_signing_request"], + ca=certificate["ca"], + chain=certificate["chain"], + ) + self.remove_certificate(certificate=certificate["certificate"]) + + def get_outstanding_certificate_requests( + self, relation_id: Optional[int] = None + ) -> List[Dict[str, Union[int, str, List[Dict[str, str]]]]]: + """Return CSR's for which no certificate has been issued. + + Example return: [ + { + "relation_id": 0, + "application_name": "tls-certificates-requirer", + "unit_name": "tls-certificates-requirer/0", + "unit_csrs": [ + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + "is_ca": false + } + ] + } + ] + + Args: + relation_id (int): Relation id + + Returns: + list: List of dictionaries that contain the unit's csrs + that don't have a certificate issued. + """ + all_unit_csr_mappings = copy.deepcopy(self.get_requirer_csrs(relation_id=relation_id)) + filtered_all_unit_csr_mappings: List[Dict[str, Union[int, str, List[Dict[str, str]]]]] = [] + for unit_csr_mapping in all_unit_csr_mappings: + csrs_without_certs = [] + for csr in unit_csr_mapping["unit_csrs"]: # type: ignore[union-attr] + if not self.certificate_issued_for_csr( + app_name=unit_csr_mapping["application_name"], # type: ignore[arg-type] + csr=csr["certificate_signing_request"], # type: ignore[index] + relation_id=relation_id, + ): + csrs_without_certs.append(csr) + if csrs_without_certs: + unit_csr_mapping["unit_csrs"] = csrs_without_certs # type: ignore[assignment] + filtered_all_unit_csr_mappings.append(unit_csr_mapping) + return filtered_all_unit_csr_mappings + + def get_requirer_csrs( + self, relation_id: Optional[int] = None + ) -> List[Dict[str, Union[int, str, List[Dict[str, str]]]]]: + """Return a list of requirers' CSRs grouped by unit. + + It returns CSRs from all relations if relation_id is not specified. + CSRs are returned per relation id, application name and unit name. + + Returns: + list: List of dictionaries that contain the unit's csrs + with the following information + relation_id, application_name and unit_name. + """ + unit_csr_mappings: List[Dict[str, Union[int, str, List[Dict[str, str]]]]] = [] + + relations = ( + [ + relation + for relation in self.model.relations[self.relationship_name] + if relation.id == relation_id + ] + if relation_id is not None + else self.model.relations.get(self.relationship_name, []) + ) + + for relation in relations: + for unit in relation.units: + requirer_relation_data = _load_relation_data(relation.data[unit]) + unit_csrs_list = requirer_relation_data.get("certificate_signing_requests", []) + unit_csr_mappings.append( + { + "relation_id": relation.id, + "application_name": relation.app.name, # type: ignore[union-attr] + "unit_name": unit.name, + "unit_csrs": unit_csrs_list, + } + ) + return unit_csr_mappings + + def certificate_issued_for_csr( + self, app_name: str, csr: str, relation_id: Optional[int] + ) -> bool: + """Check whether a certificate has been issued for a given CSR. + + Args: + app_name (str): Application name that the CSR belongs to. + csr (str): Certificate Signing Request. + relation_id (Optional[int]): Relation ID + Returns: + bool: True/False depending on whether a certificate has been issued for the given CSR. + """ + issued_certificates_per_csr = self.get_issued_certificates(relation_id=relation_id)[ + app_name + ] + for issued_pair in issued_certificates_per_csr: + if "csr" in issued_pair and issued_pair["csr"] == csr: + return csr_matches_certificate(csr, issued_pair["certificate"]) + return False + + +class TLSCertificatesRequiresV2(Object): + """TLS certificates requirer class to be instantiated by TLS certificates requirers.""" + + on = CertificatesRequirerCharmEvents() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relationship_name: str, + expiry_notification_time: int = 168, + ): + """Generate/use private key and observes relation changed event. + + Args: + charm: Charm object + relationship_name: Juju relation name + expiry_notification_time (int): Time difference between now and expiry (in hours). + Used to trigger the CertificateExpiring event. Default: 7 days. + """ + super().__init__(charm, relationship_name) + self.relationship_name = relationship_name + self.charm = charm + self.expiry_notification_time = expiry_notification_time + self.framework.observe( + charm.on[relationship_name].relation_changed, self._on_relation_changed + ) + self.framework.observe( + charm.on[relationship_name].relation_broken, self._on_relation_broken + ) + if JujuVersion.from_environ().has_secrets: + self.framework.observe(charm.on.secret_expired, self._on_secret_expired) + else: + self.framework.observe(charm.on.update_status, self._on_update_status) + + @property + def _requirer_csrs(self) -> List[Dict[str, Union[bool, str]]]: + """Return list of requirer's CSRs from relation unit data. + + Example: + [ + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + "ca": false + } + ] + """ + relation = self.model.get_relation(self.relationship_name) + if not relation: + raise RuntimeError(f"Relation {self.relationship_name} does not exist") + requirer_relation_data = _load_relation_data(relation.data[self.model.unit]) + return requirer_relation_data.get("certificate_signing_requests", []) + + @property + def _provider_certificates(self) -> List[Dict[str, str]]: + """Return list of certificates from the provider's relation data.""" + relation = self.model.get_relation(self.relationship_name) + if not relation: + logger.debug("No relation: %s", self.relationship_name) + return [] + if not relation.app: + logger.debug("No remote app in relation: %s", self.relationship_name) + return [] + provider_relation_data = _load_relation_data(relation.data[relation.app]) + if not self._relation_data_is_valid(provider_relation_data): + logger.warning("Provider relation data did not pass JSON Schema validation") + return [] + return provider_relation_data.get("certificates", []) + + def _add_requirer_csr(self, csr: str, is_ca: bool) -> None: + """Add CSR to relation data. + + Args: + csr (str): Certificate Signing Request + is_ca (bool): Whether the certificate is a CA certificate + + Returns: + None + """ + relation = self.model.get_relation(self.relationship_name) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} does not exist - " + f"The certificate request can't be completed" + ) + new_csr_dict: Dict[str, Union[bool, str]] = { + "certificate_signing_request": csr, + "ca": is_ca, + } + if new_csr_dict in self._requirer_csrs: + logger.info("CSR already in relation data - Doing nothing") + return + requirer_csrs = copy.deepcopy(self._requirer_csrs) + requirer_csrs.append(new_csr_dict) + relation.data[self.model.unit]["certificate_signing_requests"] = json.dumps(requirer_csrs) + + def _remove_requirer_csr(self, csr: str) -> None: + """Remove CSR from relation data. + + Args: + csr (str): Certificate signing request + + Returns: + None + """ + relation = self.model.get_relation(self.relationship_name) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} does not exist - " + f"The certificate request can't be completed" + ) + requirer_csrs = copy.deepcopy(self._requirer_csrs) + if not requirer_csrs: + logger.info("No CSRs in relation data - Doing nothing") + return + for requirer_csr in requirer_csrs: + if requirer_csr["certificate_signing_request"] == csr: + requirer_csrs.remove(requirer_csr) + relation.data[self.model.unit]["certificate_signing_requests"] = json.dumps(requirer_csrs) + + def request_certificate_creation( + self, certificate_signing_request: bytes, is_ca: bool = False + ) -> None: + """Request TLS certificate to provider charm. + + Args: + certificate_signing_request (bytes): Certificate Signing Request + is_ca (bool): Whether the certificate is a CA certificate + + Returns: + None + """ + relation = self.model.get_relation(self.relationship_name) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} does not exist - " + f"The certificate request can't be completed" + ) + self._add_requirer_csr(certificate_signing_request.decode().strip(), is_ca=is_ca) + logger.info("Certificate request sent to provider") + + def request_certificate_revocation(self, certificate_signing_request: bytes) -> None: + """Remove CSR from relation data. + + The provider of this relation is then expected to remove certificates associated to this + CSR from the relation data as well and emit a request_certificate_revocation event for the + provider charm to interpret. + + Args: + certificate_signing_request (bytes): Certificate Signing Request + + Returns: + None + """ + self._remove_requirer_csr(certificate_signing_request.decode().strip()) + logger.info("Certificate revocation sent to provider") + + def request_certificate_renewal( + self, old_certificate_signing_request: bytes, new_certificate_signing_request: bytes + ) -> None: + """Renew certificate. + + Removes old CSR from relation data and adds new one. + + Args: + old_certificate_signing_request: Old CSR + new_certificate_signing_request: New CSR + + Returns: + None + """ + try: + self.request_certificate_revocation( + certificate_signing_request=old_certificate_signing_request + ) + except RuntimeError: + logger.warning("Certificate revocation failed.") + self.request_certificate_creation( + certificate_signing_request=new_certificate_signing_request + ) + logger.info("Certificate renewal request completed.") + + def get_assigned_certificates(self) -> List[Dict[str, str]]: + """Get a list of certificates that were assigned to this unit. + + Returns: + List of certificates. For example: + [ + { + "ca": "-----BEGIN CERTIFICATE-----...", + "chain": [ + "-----BEGIN CERTIFICATE-----..." + ], + "certificate": "-----BEGIN CERTIFICATE-----...", + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + } + ] + """ + final_list = [] + for csr in self.get_certificate_signing_requests(fulfilled_only=True): + assert isinstance(csr["certificate_signing_request"], str) + if cert := self._find_certificate_in_relation_data(csr["certificate_signing_request"]): + final_list.append(cert) + return final_list + + def get_expiring_certificates(self) -> List[Dict[str, str]]: + """Get a list of certificates that were assigned to this unit that are expiring or expired. + + Returns: + List of certificates. For example: + [ + { + "ca": "-----BEGIN CERTIFICATE-----...", + "chain": [ + "-----BEGIN CERTIFICATE-----..." + ], + "certificate": "-----BEGIN CERTIFICATE-----...", + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + } + ] + """ + final_list = [] + for csr in self.get_certificate_signing_requests(fulfilled_only=True): + assert isinstance(csr["certificate_signing_request"], str) + if cert := self._find_certificate_in_relation_data(csr["certificate_signing_request"]): + expiry_time = _get_certificate_expiry_time(cert["certificate"]) + if not expiry_time: + continue + expiry_notification_time = expiry_time - timedelta( + hours=self.expiry_notification_time + ) + if datetime.now(timezone.utc) > expiry_notification_time: + final_list.append(cert) + return final_list + + def get_certificate_signing_requests( + self, + fulfilled_only: bool = False, + unfulfilled_only: bool = False, + ) -> List[Dict[str, Union[bool, str]]]: + """Get the list of CSR's that were sent to the provider. + + You can choose to get only the CSR's that have a certificate assigned or only the CSR's + that don't. + + Args: + fulfilled_only (bool): This option will discard CSRs that don't have certificates yet. + unfulfilled_only (bool): This option will discard CSRs that have certificates signed. + + Returns: + List of CSR dictionaries. For example: + [ + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + "ca": false + } + ] + """ + final_list = [] + for csr in self._requirer_csrs: + assert isinstance(csr["certificate_signing_request"], str) + cert = self._find_certificate_in_relation_data(csr["certificate_signing_request"]) + if (unfulfilled_only and cert) or (fulfilled_only and not cert): + continue + final_list.append(csr) + + return final_list + + @staticmethod + def _relation_data_is_valid(certificates_data: dict) -> bool: + """Check whether relation data is valid based on json schema. + + Args: + certificates_data: Certificate data in dict format. + + Returns: + bool: Whether relation data is valid. + """ + try: + validate(instance=certificates_data, schema=PROVIDER_JSON_SCHEMA) + return True + except exceptions.ValidationError: + return False + + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """Handle relation changed event. + + Goes through all providers certificates that match a requested CSR. + + If the provider certificate is revoked, emit a CertificateInvalidateEvent, + otherwise emit a CertificateAvailableEvent. + + When Juju secrets are available, remove the secret for revoked certificate, + or add a secret with the correct expiry time for new certificates. + + + Args: + event: Juju event + + Returns: + None + """ + requirer_csrs = [ + certificate_creation_request["certificate_signing_request"] + for certificate_creation_request in self._requirer_csrs + ] + for certificate in self._provider_certificates: + if certificate["certificate_signing_request"] in requirer_csrs: + if certificate.get("revoked", False): + if JujuVersion.from_environ().has_secrets: + with suppress(SecretNotFoundError): + secret = self.model.get_secret( + label=f"{LIBID}-{certificate['certificate_signing_request']}" + ) + secret.remove_all_revisions() + self.on.certificate_invalidated.emit( + reason="revoked", + certificate=certificate["certificate"], + certificate_signing_request=certificate["certificate_signing_request"], + ca=certificate["ca"], + chain=certificate["chain"], + ) + else: + if JujuVersion.from_environ().has_secrets: + try: + secret = self.model.get_secret( + label=f"{LIBID}-{certificate['certificate_signing_request']}" + ) + secret.set_content({"certificate": certificate["certificate"]}) + secret.set_info( + expire=self._get_next_secret_expiry_time( + certificate["certificate"] + ), + ) + except SecretNotFoundError: + secret = self.charm.unit.add_secret( + {"certificate": certificate["certificate"]}, + label=f"{LIBID}-{certificate['certificate_signing_request']}", + expire=self._get_next_secret_expiry_time( + certificate["certificate"] + ), + ) + self.on.certificate_available.emit( + certificate_signing_request=certificate["certificate_signing_request"], + certificate=certificate["certificate"], + ca=certificate["ca"], + chain=certificate["chain"], + ) + + def _get_next_secret_expiry_time(self, certificate: str) -> Optional[datetime]: + """Return the expiry time or expiry notification time. + + Extracts the expiry time from the provided certificate, calculates the + expiry notification time and return the closest of the two, that is in + the future. + + Args: + certificate: x509 certificate + + Returns: + Optional[datetime]: None if the certificate expiry time cannot be read, + next expiry time otherwise. + """ + expiry_time = _get_certificate_expiry_time(certificate) + if not expiry_time: + return None + expiry_notification_time = expiry_time - timedelta(hours=self.expiry_notification_time) + return _get_closest_future_time(expiry_notification_time, expiry_time) + + def _on_relation_broken(self, event: RelationBrokenEvent) -> None: + """Handle relation broken event. + + Emitting `all_certificates_invalidated` from `relation-broken` rather + than `relation-departed` since certs are stored in app data. + + Args: + event: Juju event + + Returns: + None + """ + self.on.all_certificates_invalidated.emit() + + def _on_secret_expired(self, event: SecretExpiredEvent) -> None: + """Handle secret expired event. + + Loads the certificate from the secret, and will emit 1 of 2 + events. + + If the certificate is not yet expired, emits CertificateExpiringEvent + and updates the expiry time of the secret to the exact expiry time on + the certificate. + + If the certificate is expired, emits CertificateInvalidedEvent and + deletes the secret. + + Args: + event (SecretExpiredEvent): Juju event + """ + if not event.secret.label or not event.secret.label.startswith(f"{LIBID}-"): + return + csr = event.secret.label[len(f"{LIBID}-") :] + certificate_dict = self._find_certificate_in_relation_data(csr) + if not certificate_dict: + # A secret expired but we did not find matching certificate. Cleaning up + event.secret.remove_all_revisions() + return + + expiry_time = _get_certificate_expiry_time(certificate_dict["certificate"]) + if not expiry_time: + # A secret expired but matching certificate is invalid. Cleaning up + event.secret.remove_all_revisions() + return + + if datetime.now(timezone.utc) < expiry_time: + logger.warning("Certificate almost expired") + self.on.certificate_expiring.emit( + certificate=certificate_dict["certificate"], + expiry=expiry_time.isoformat(), + ) + event.secret.set_info( + expire=_get_certificate_expiry_time(certificate_dict["certificate"]), + ) + else: + logger.warning("Certificate is expired") + self.on.certificate_invalidated.emit( + reason="expired", + certificate=certificate_dict["certificate"], + certificate_signing_request=certificate_dict["certificate_signing_request"], + ca=certificate_dict["ca"], + chain=certificate_dict["chain"], + ) + self.request_certificate_revocation(certificate_dict["certificate"].encode()) + event.secret.remove_all_revisions() + + def _find_certificate_in_relation_data(self, csr: str) -> Optional[Dict[str, Any]]: + """Return the certificate that match the given CSR.""" + for certificate_dict in self._provider_certificates: + if certificate_dict["certificate_signing_request"] != csr: + continue + return certificate_dict + return None + + def _on_update_status(self, event: UpdateStatusEvent) -> None: + """Handle update status event. + + Goes through each certificate in the "certificates" relation and checks their expiry date. + If they are close to expire (<7 days), emits a CertificateExpiringEvent event and if + they are expired, emits a CertificateExpiredEvent. + + Args: + event (UpdateStatusEvent): Juju event + + Returns: + None + """ + for certificate_dict in self._provider_certificates: + expiry_time = _get_certificate_expiry_time(certificate_dict["certificate"]) + if not expiry_time: + continue + time_difference = expiry_time - datetime.now(timezone.utc) + if time_difference.total_seconds() < 0: + logger.warning("Certificate is expired") + self.on.certificate_invalidated.emit( + reason="expired", + certificate=certificate_dict["certificate"], + certificate_signing_request=certificate_dict["certificate_signing_request"], + ca=certificate_dict["ca"], + chain=certificate_dict["chain"], + ) + self.request_certificate_revocation(certificate_dict["certificate"].encode()) + continue + if time_difference.total_seconds() < (self.expiry_notification_time * 60 * 60): + logger.warning("Certificate almost expired") + self.on.certificate_expiring.emit( + certificate=certificate_dict["certificate"], + expiry=expiry_time.isoformat(), + ) diff --git a/metadata.yaml b/metadata.yaml index f42fbdff..215c7406 100644 --- a/metadata.yaml +++ b/metadata.yaml @@ -38,7 +38,13 @@ requires: juju-info: interface: juju-info scope: container + certificates: + interface: tls-certificates + optional: true + limit: 1 peers: + tls: + interface: tls cos: interface: cos upgrade-version-a: diff --git a/poetry.lock b/poetry.lock index 029e14a9..4025fb0e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -28,6 +28,25 @@ six = "*" [package.extras] test = ["astroid", "pytest"] +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + [[package]] name = "backcall" version = "0.2.0" @@ -428,47 +447,56 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.3" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, - {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, - {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, - {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -622,6 +650,24 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -741,6 +787,44 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jsonschema" +version = "4.21.1" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +jsonschema-specifications = ">=2023.03.6" +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +referencing = ">=0.31.0" + [[package]] name = "juju" version = "3.2.0.1" @@ -989,13 +1073,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "ops" -version = "2.6.0" +version = "2.9.0" description = "The Python library behind great charms" optional = false python-versions = ">=3.8" files = [ - {file = "ops-2.6.0-py3-none-any.whl", hash = "sha256:b8ce352df4d17de483173dd2f8caf397e89585212d73b4d466b541b1ad3b8966"}, - {file = "ops-2.6.0.tar.gz", hash = "sha256:387d5f5bd004b865f575ea4e25ca298bdca05b2b2beb2c51f203d5856c460cf6"}, + {file = "ops-2.9.0-py3-none-any.whl", hash = "sha256:1d443e4d45e0c2443b8334d37a177287f22a12ee0cb02a30cf7c3159316cb643"}, + {file = "ops-2.9.0.tar.gz", hash = "sha256:d3c541659eded56f42f9c18270408cc6313895968f1360b3f1de75c99cc99ada"}, ] [package.dependencies] @@ -1117,6 +1201,17 @@ files = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + [[package]] name = "platformdirs" version = "3.10.0" @@ -1691,6 +1786,21 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "referencing" +version = "0.34.0" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"}, + {file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + [[package]] name = "requests" version = "2.31.0" @@ -1730,6 +1840,114 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "rpds-py" +version = "0.18.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, +] + [[package]] name = "rsa" version = "4.9" @@ -2096,7 +2314,22 @@ files = [ {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"}, ] +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "623db8d07c7bcc8ba1a0d7d8322b2c749895686892fb43ce83ed0b27925675a3" +content-hash = "4efb0ad3c8e6d914741f689cd67e03785fb32ce25157cde71cdd0c9affa64a56" diff --git a/pyproject.toml b/pyproject.toml index ca1499c7..793912ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,8 @@ authors = [] [tool.poetry.dependencies] python = "^3.8.1" # ^3.8.1 required by flake8 -ops = "^2.6.0" +# there is a breaking change in ops 2.10.0: https://github.com/canonical/operator/pull/1091#issuecomment-1888644075 +ops = "^2.6.0,<2.10.0" tenacity = "^8.2.3" poetry-core = "^1.7.0" jinja2 = "^3.1.2" @@ -19,6 +20,10 @@ requests = "^2.31.0" [tool.poetry.group.charm-libs.dependencies] # data_platform_libs/v0/data_interfaces.py ops = ">=2.0.0" +# tls_certificates_interface/v2/tls_certificates.py +# tls_certificates lib v2 uses a feature only available in cryptography >=42.0.5 +cryptography = ">=42.0.5" +jsonschema = "*" # grafana_agent/v0/cos_agent.py pydantic = "<2" cosl = "*" diff --git a/src/abstract_charm.py b/src/abstract_charm.py index 3f769c93..b395d87e 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -5,11 +5,9 @@ import abc import logging -import socket import typing import ops -import tenacity import container import lifecycle @@ -18,6 +16,7 @@ import relations.cos import relations.database_provides import relations.database_requires +import relations.tls import server_exceptions import upgrade import workload @@ -28,6 +27,11 @@ class MySQLRouterCharm(ops.CharmBase, abc.ABC): """MySQL Router charm""" + _READ_WRITE_PORT = 6446 + _READ_ONLY_PORT = 6447 + _READ_WRITE_X_PORT = 6448 + _READ_ONLY_X_PORT = 6449 + def __init__(self, *args) -> None: super().__init__(*args) # Instantiate before registering other event observers @@ -60,6 +64,7 @@ def __init__(self, *args) -> None: self.on[upgrade.PEER_RELATION_ENDPOINT_NAME].relation_created, self._upgrade_relation_created, ) + self.tls = relations.tls.RelationEndpoint(self) @property @abc.abstractmethod @@ -94,28 +99,42 @@ def _read_write_endpoint(self) -> str: def _read_only_endpoint(self) -> str: """MySQL Router read-only endpoint""" + @property + @abc.abstractmethod + def _exposed_read_write_endpoint(self) -> str: + """The exposed read-write endpoint""" + + @property + @abc.abstractmethod + def _exposed_read_only_endpoint(self) -> str: + """The exposed read-only endpoint""" + + @abc.abstractmethod + def is_externally_accessible(self, *, event) -> typing.Optional[bool]: + """Whether endpoints should be externally accessible. + + Only defined in vm charm to return True/False. In k8s charm, returns None. + """ + @property def _tls_certificate_saved(self) -> bool: """Whether a TLS certificate is available to use""" - # TODO VM TLS: Update property after implementing TLS on machine_charm - return False + return self.tls.certificate_saved @property def _tls_key(self) -> typing.Optional[str]: """Custom TLS key""" - # TODO VM TLS: Update property after implementing TLS on machine_charm - return None + return self.tls.key @property - def _tls_certificate(self) -> typing.Optional[str]: - """Custom TLS certificate""" - # TODO VM TLS: Update property after implementing TLS on machine_charm - return None + def _tls_certificate_authority(self) -> typing.Optional[str]: + """Custom TLS certificate authority""" + return self.tls.certificate_authority @property - def _tls_certificate_authority(self) -> typing.Optional[str]: - # TODO VM TLS: Update property after implementing TLS on machine charm - return None + def _tls_certificate(self) -> typing.Optional[str]: + """Custom TLS certificate""" + return self.tls.certificate def _cos_exporter_config(self, event) -> typing.Optional[relations.cos.ExporterConfig]: """Returns the exporter config for MySQLRouter exporter if cos relation exists""" @@ -191,28 +210,26 @@ def set_status(self, *, event, app=True, unit=True) -> None: self.unit.status = self._determine_unit_status(event=event) logger.debug(f"Set unit status to {self.unit.status}") - def wait_until_mysql_router_ready(self) -> None: + @abc.abstractmethod + def wait_until_mysql_router_ready(self, *, event) -> None: """Wait until a connection to MySQL Router is possible. Retry every 5 seconds for up to 30 seconds. """ - logger.debug("Waiting until MySQL Router is ready") - self.unit.status = ops.MaintenanceStatus("MySQL Router starting") - try: - for attempt in tenacity.Retrying( - reraise=True, - stop=tenacity.stop_after_delay(30), - wait=tenacity.wait_fixed(5), - ): - with attempt: - for port in (6446, 6447): - with socket.socket() as s: - assert s.connect_ex(("localhost", port)) == 0 - except AssertionError: - logger.exception("Unable to connect to MySQL Router") - raise - else: - logger.debug("MySQL Router is ready") + + @abc.abstractmethod + def _reconcile_node_port(self, *, event) -> None: + """Reconcile node port. + + Only applies to Kubernetes charm + """ + + @abc.abstractmethod + def _reconcile_ports(self, *, event) -> None: + """Reconcile exposed ports. + + Only applies to Machine charm + """ # ======================= # Handlers @@ -254,7 +271,10 @@ def reconcile(self, event=None) -> None: # noqa: C901 if self._upgrade.unit_state == "outdated": if self._upgrade.authorized: self._upgrade.upgrade_unit( - workload_=workload_, tls=self._tls_certificate_saved + event=event, + workload_=workload_, + tls=self._tls_certificate_saved, + exporter_config=self._cos_exporter_config(event), ) else: self.set_status(event=event) @@ -283,14 +303,18 @@ def reconcile(self, event=None) -> None: # noqa: C901 and isinstance(workload_, workload.AuthenticatedWorkload) and workload_.container_ready ): + self._reconcile_node_port(event=event) self._database_provides.reconcile_users( event=event, router_read_write_endpoint=self._read_write_endpoint, router_read_only_endpoint=self._read_only_endpoint, + exposed_read_write_endpoint=self._exposed_read_write_endpoint, + exposed_read_only_endpoint=self._exposed_read_only_endpoint, shell=workload_.shell, ) if workload_.container_ready: workload_.reconcile( + event=event, tls=self._tls_certificate_saved, unit_name=self.unit.name, exporter_config=self._cos_exporter_config(event), @@ -298,6 +322,11 @@ def reconcile(self, event=None) -> None: # noqa: C901 certificate=self._tls_certificate, certificate_authority=self._tls_certificate_authority, ) + if not self._upgrade.in_progress and isinstance( + workload_, workload.AuthenticatedWorkload + ): + self._reconcile_ports(event=event) + # Empty waiting status means we're waiting for database requires relation before # starting workload if not workload_.status or workload_.status == ops.WaitingStatus(): diff --git a/src/container.py b/src/container.py index ed8774dc..2a42e574 100644 --- a/src/container.py +++ b/src/container.py @@ -98,10 +98,12 @@ def __init__( mysql_router_command: str, mysql_shell_command: str, mysql_router_password_command: str, + unit_name: str, ) -> None: self._mysql_router_command = mysql_router_command self._mysql_shell_command = mysql_shell_command self._mysql_router_password_command = mysql_router_password_command + self._unit_name = unit_name @property @abc.abstractmethod diff --git a/src/machine_charm.py b/src/machine_charm.py index b507c37e..fb14c9a5 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -7,16 +7,18 @@ """MySQL Router machine charm""" import logging +import socket import typing import ops +import tenacity import abstract_charm import machine_logrotate import machine_upgrade +import machine_workload import relations.database_providers_wrapper import snap -import socket_workload import upgrade logger = logging.getLogger(__name__) @@ -32,7 +34,7 @@ def __init__(self, *args) -> None: self._database_provides = relations.database_providers_wrapper.RelationEndpoint( self, self._database_provides ) - self._authenticated_workload_type = socket_workload.AuthenticatedSocketWorkload + self._authenticated_workload_type = machine_workload.AuthenticatedMachineWorkload self.framework.observe(self.on.install, self._on_install) self.framework.observe(self.on.remove, self._on_remove) self.framework.observe(self.on.upgrade_charm, self._on_upgrade_charm) @@ -47,7 +49,7 @@ def _subordinate_relation_endpoint_names(self) -> typing.Optional[typing.Iterabl @property def _container(self) -> snap.Snap: - return snap.Snap() + return snap.Snap(unit_name=self.unit.name) @property def _upgrade(self) -> typing.Optional[machine_upgrade.Upgrade]: @@ -60,6 +62,11 @@ def _upgrade(self) -> typing.Optional[machine_upgrade.Upgrade]: def _logrotate(self) -> machine_logrotate.LogRotate: return machine_logrotate.LogRotate(container_=self._container) + @property + def host_address(self) -> str: + """The host address for the machine.""" + return str(self.model.get_binding("juju-info").network.bind_address) + @property def _read_write_endpoint(self) -> str: return f'file://{self._container.path("/run/mysqlrouter/mysql.sock")}' @@ -68,6 +75,61 @@ def _read_write_endpoint(self) -> str: def _read_only_endpoint(self) -> str: return f'file://{self._container.path("/run/mysqlrouter/mysqlro.sock")}' + @property + def _exposed_read_write_endpoint(self) -> str: + return f"{self.host_address}:{self._READ_WRITE_PORT}" + + @property + def _exposed_read_only_endpoint(self) -> str: + return f"{self.host_address}:{self._READ_ONLY_PORT}" + + def is_externally_accessible(self, *, event) -> typing.Optional[bool]: + return self._database_provides.external_connectivity(event) + + def _reconcile_node_port(self, *, event) -> None: + """Only applies to Kubernetes charm, so no-op.""" + pass + + def _reconcile_ports(self, *, event) -> None: + if self.is_externally_accessible(event=event): + ports = [self._READ_WRITE_PORT, self._READ_ONLY_PORT] + else: + ports = [] + self.unit.set_ports(*ports) + + def wait_until_mysql_router_ready(self, *, event) -> None: + logger.debug("Waiting until MySQL Router is ready") + self.unit.status = ops.MaintenanceStatus("MySQL Router starting") + try: + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(30), + wait=tenacity.wait_fixed(5), + ): + with attempt: + if self.is_externally_accessible(event=event): + for port in ( + self._READ_WRITE_PORT, + self._READ_ONLY_PORT, + self._READ_WRITE_X_PORT, + self._READ_ONLY_X_PORT, + ): + with socket.socket() as s: + assert s.connect_ex(("localhost", port)) == 0 + else: + for socket_file in ( + "/run/mysqlrouter/mysql.sock", + "/run/mysqlrouter/mysqlro.sock", + ): + assert self._container.path(socket_file).exists() + with socket.socket(socket.AF_UNIX) as s: + assert s.connect_ex(str(self._container.path(socket_file))) == 0 + except AssertionError: + logger.exception("Unable to connect to MySQL Router") + raise + else: + logger.debug("MySQL Router is ready") + # ======================= # Handlers # ======================= @@ -111,7 +173,7 @@ def _on_force_upgrade_action(self, event: ops.ActionEvent) -> None: logger.debug("Forcing upgrade") event.log(f"Forcefully upgrading {self.unit.name}") self._upgrade.upgrade_unit( - workload_=self.get_workload(event=None), tls=self._tls_certificate_saved + event=event, workload_=self.get_workload(event=None), tls=self._tls_certificate_saved ) self.reconcile() event.set_results({"result": f"Forcefully upgraded {self.unit.name}"}) diff --git a/src/machine_logrotate.py b/src/machine_logrotate.py index 00b148e3..58a47e80 100644 --- a/src/machine_logrotate.py +++ b/src/machine_logrotate.py @@ -51,6 +51,6 @@ def enable(self) -> None: def disable(self) -> None: logger.debug("Removing cron job for log rotation of mysqlrouter") - self._logrotate_config.unlink() - self._cron_file.unlink() + self._logrotate_config.unlink(missing_ok=True) + self._cron_file.unlink(missing_ok=True) logger.debug("Removed cron job for log rotation of mysqlrouter") diff --git a/src/machine_upgrade.py b/src/machine_upgrade.py index 6a0d7197..58366218 100644 --- a/src/machine_upgrade.py +++ b/src/machine_upgrade.py @@ -17,6 +17,9 @@ import upgrade import workload +if typing.TYPE_CHECKING: + import relations.cos + logger = logging.getLogger(__name__) @@ -152,10 +155,17 @@ def authorized(self) -> bool: return False return False - def upgrade_unit(self, *, workload_: workload.Workload, tls: bool) -> None: + def upgrade_unit( + self, + *, + event, + workload_: workload.Workload, + tls: bool, + exporter_config: "relations.cos.ExporterConfig", + ) -> None: logger.debug(f"Upgrading {self.authorized=}") self.unit_state = "upgrading" - workload_.upgrade(unit=self._unit, tls=tls) + workload_.upgrade(event=event, unit=self._unit, tls=tls, exporter_config=exporter_config) self._unit_workload_container_version = snap.REVISION self._unit_workload_version = self._current_versions["workload"] logger.debug( diff --git a/src/socket_workload.py b/src/machine_workload.py similarity index 56% rename from src/socket_workload.py rename to src/machine_workload.py index 411fd8fe..62fee82e 100644 --- a/src/socket_workload.py +++ b/src/machine_workload.py @@ -11,26 +11,38 @@ import workload +if typing.TYPE_CHECKING: + import relations.database_requires + logger = logging.getLogger(__name__) -class AuthenticatedSocketWorkload(workload.AuthenticatedWorkload): +class AuthenticatedMachineWorkload(workload.AuthenticatedWorkload): """Workload with connection to MySQL cluster and with Unix sockets enabled""" # TODO python3.10 min version: Use `list` instead of `typing.List` - def _get_bootstrap_command(self, password: str) -> typing.List[str]: - command = super()._get_bootstrap_command(password) - command.extend( - [ - "--conf-bind-address", - "127.0.0.1", - "--conf-use-sockets", - # For unix sockets, authentication fails on first connection if this option is not - # set. Workaround for https://bugs.mysql.com/bug.php?id=107291 - "--conf-set-option", - "DEFAULT.server_ssl_mode=PREFERRED", - ] - ) + def _get_bootstrap_command( + self, *, event, connection_info: "relations.database_requires.ConnectionInformation" + ) -> typing.List[str]: + command = super()._get_bootstrap_command(connection_info) + if self._charm.is_externally_accessible(event=event): + command.extend( + [ + "--conf-bind-address", + "0.0.0.0", + ] + ) + else: + command.extend( + [ + "--conf-use-sockets", + # For unix sockets, authentication fails on first connection if this option is not + # set. Workaround for https://bugs.mysql.com/bug.php?id=107291 + "--conf-set-option", + "DEFAULT.server_ssl_mode=PREFERRED", + "--conf-skip-tcp", + ] + ) return command def _update_configured_socket_file_locations(self) -> None: @@ -58,6 +70,7 @@ def _update_configured_socket_file_locations(self) -> None: self._container.router_config_file.write_text(output.getvalue()) logger.debug("Updated configured socket file locations") - def _bootstrap_router(self, *, tls: bool) -> None: - super()._bootstrap_router(tls=tls) - self._update_configured_socket_file_locations() + def _bootstrap_router(self, *, event, tls: bool) -> None: + super()._bootstrap_router(event=event, tls=tls) + if not self._charm.is_externally_accessible(event=event): + self._update_configured_socket_file_locations() diff --git a/src/relations/cos.py b/src/relations/cos.py index 93919d67..2041fe7d 100644 --- a/src/relations/cos.py +++ b/src/relations/cos.py @@ -86,21 +86,21 @@ def relation_exists(self) -> bool: def get_monitoring_password(self) -> str: """Gets the monitoring password from unit peer data, or generate and cache it.""" - monitoring_password = self._secrets.get_secret( + monitoring_password = self._secrets.get_value( relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY ) if monitoring_password: return monitoring_password monitoring_password = utils.generate_password() - self._secrets.set_secret( + self._secrets.set_value( relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY, monitoring_password ) return monitoring_password def _reset_monitoring_password(self) -> None: """Reset the monitoring password from unit peer data.""" - self._secrets.set_secret(relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY, None) + self._secrets.set_value(relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY, None) def is_relation_breaking(self, event) -> bool: """Whether relation will be broken after the current event is handled.""" diff --git a/src/relations/database_providers_wrapper.py b/src/relations/database_providers_wrapper.py index 382e2554..3c21cec1 100644 --- a/src/relations/database_providers_wrapper.py +++ b/src/relations/database_providers_wrapper.py @@ -38,12 +38,18 @@ def __init__( charm_ ) + def external_connectivity(self, event) -> bool: + """Whether any of the relations are marked as external.""" + return self._database_provides.external_connectivity(event) + def reconcile_users( self, *, event, router_read_write_endpoint: str, router_read_only_endpoint: str, + exposed_read_write_endpoint: str, + exposed_read_only_endpoint: str, shell: mysql_shell.Shell, ) -> None: """Create requested users and delete inactive users. @@ -56,6 +62,8 @@ def reconcile_users( event=event, router_read_write_endpoint=router_read_write_endpoint, router_read_only_endpoint=router_read_only_endpoint, + exposed_read_write_endpoint=exposed_read_write_endpoint, + exposed_read_only_endpoint=exposed_read_only_endpoint, shell=shell, ) self._deprecated_shared_db.reconcile_users(event=event, shell=shell) diff --git a/src/relations/database_provides.py b/src/relations/database_provides.py index e97483aa..08bf6dc5 100644 --- a/src/relations/database_provides.py +++ b/src/relations/database_provides.py @@ -70,6 +70,11 @@ def __init__( # Application charm databag databag = remote_databag.RemoteDatabag(interface=interface, relation=relation) self._database: str = databag["database"] + # Whether endpoints should be externally accessible + # (e.g. when related to `data-integrator` charm) + # Implements DA073 - Add Expose Flag to the Database Interface + # https://docs.google.com/document/d/1Y7OZWwMdvF8eEMuVKrqEfuFV3JOjpqLHL7_GPqJpRHU + self.external_connectivity = databag.get("external-node-connectivity") == "true" if databag.get("extra-user-roles"): raise _UnsupportedExtraUserRole( app_name=relation.app.name, endpoint_name=relation.name @@ -100,6 +105,8 @@ def create_database_and_user( *, router_read_write_endpoint: str, router_read_only_endpoint: str, + exposed_read_write_endpoint: str, + exposed_read_only_endpoint: str, shell: mysql_shell.Shell, ) -> None: """Create database & user and update databag.""" @@ -115,11 +122,21 @@ def create_database_and_user( password = shell.create_application_database_and_user( username=username, database=self._database ) + + rw_endpoint = ( + exposed_read_write_endpoint + if self.external_connectivity + else router_read_write_endpoint + ) + ro_endpoint = ( + exposed_read_only_endpoint if self.external_connectivity else router_read_only_endpoint + ) + self._set_databag( username=username, password=password, - router_read_write_endpoint=router_read_write_endpoint, - router_read_only_endpoint=router_read_only_endpoint, + router_read_write_endpoint=rw_endpoint, + router_read_only_endpoint=ro_endpoint, ) @@ -162,6 +179,7 @@ class RelationEndpoint: def __init__(self, charm_: "abstract_charm.MySQLRouterCharm") -> None: self._interface = data_interfaces.DatabaseProvides(charm_, relation_name=self._NAME) + charm_.framework.observe(charm_.on[self._NAME].relation_created, charm_.reconcile) charm_.framework.observe(self._interface.on.database_requested, charm_.reconcile) charm_.framework.observe(charm_.on[self._NAME].relation_broken, charm_.reconcile) @@ -179,12 +197,32 @@ def _shared_users(self) -> typing.List[_RelationWithSharedUser]: pass return shared_users + def external_connectivity(self, event) -> bool: + """Whether any of the relations are marked as external.""" + requested_users = [] + for relation in self._interface.relations: + try: + requested_users.append( + _RelationThatRequestedUser( + relation=relation, interface=self._interface, event=event + ) + ) + except ( + _RelationBreaking, + remote_databag.IncompleteDatabag, + _UnsupportedExtraUserRole, + ): + pass + return any(relation.external_connectivity for relation in requested_users) + def reconcile_users( self, *, event, router_read_write_endpoint: str, router_read_only_endpoint: str, + exposed_read_write_endpoint: str, + exposed_read_only_endpoint: str, shell: mysql_shell.Shell, ) -> None: """Create requested users and delete inactive users. @@ -216,6 +254,8 @@ def reconcile_users( relation.create_database_and_user( router_read_write_endpoint=router_read_write_endpoint, router_read_only_endpoint=router_read_only_endpoint, + exposed_read_write_endpoint=exposed_read_write_endpoint, + exposed_read_only_endpoint=exposed_read_only_endpoint, shell=shell, ) for relation in self._shared_users: diff --git a/src/relations/secrets.py b/src/relations/secrets.py index dfad79a7..e6d7b20d 100644 --- a/src/relations/secrets.py +++ b/src/relations/secrets.py @@ -27,44 +27,42 @@ class RelationSecrets: def __init__( self, charm: "abstract_charm.MySQLRouterCharm", - peer_relation_name: str, + relation_name: str, app_secret_fields: typing.List[str] = [], unit_secret_fields: typing.List[str] = [], ) -> None: self._charm = charm - self._peer_relation_name = peer_relation_name + self._relation_name = relation_name self._peer_relation_app = data_interfaces.DataPeer( charm, - relation_name=peer_relation_name, + relation_name=relation_name, additional_secret_fields=app_secret_fields, - secret_field_name=self._SECRET_INTERNAL_LABEL, deleted_label=self._SECRET_DELETED_LABEL, ) self._peer_relation_unit = data_interfaces.DataPeerUnit( charm, - relation_name=peer_relation_name, + relation_name=relation_name, additional_secret_fields=unit_secret_fields, - secret_field_name=self._SECRET_INTERNAL_LABEL, deleted_label=self._SECRET_DELETED_LABEL, ) - def peer_relation_data(self, scope: Scopes) -> data_interfaces.DataPeer: + def _peer_relation_data(self, scope: Scopes) -> data_interfaces.DataPeer: """Returns the peer relation data per scope.""" if scope == APP_SCOPE: return self._peer_relation_app elif scope == UNIT_SCOPE: return self._peer_relation_unit - def get_secret(self, scope: Scopes, key: str) -> typing.Optional[str]: + def get_value(self, scope: Scopes, key: str) -> typing.Optional[str]: """Get secret from the secret storage.""" if scope not in typing.get_args(Scopes): raise ValueError("Unknown secret scope") - peers = self._charm.model.get_relation(self._peer_relation_name) - return self.peer_relation_data(scope).fetch_my_relation_field(peers.id, key) + peers = self._charm.model.get_relation(self._relation_name) + return self._peer_relation_data(scope).fetch_my_relation_field(peers.id, key) - def set_secret( + def set_value( self, scope: Scopes, key: str, value: typing.Optional[str] ) -> typing.Optional[str]: """Set secret from the secret storage.""" @@ -72,15 +70,15 @@ def set_secret( raise ValueError("Unknown secret scope") if not value: - return self.remove_secret(scope, key) + return self._remove_value(scope, key) - peers = self._charm.model.get_relation(self._peer_relation_name) - self.peer_relation_data(scope).update_relation_data(peers.id, {key: value}) + peers = self._charm.model.get_relation(self._relation_name) + self._peer_relation_data(scope).update_relation_data(peers.id, {key: value}) - def remove_secret(self, scope: Scopes, key: str) -> None: + def _remove_value(self, scope: Scopes, key: str) -> None: """Removing a secret.""" if scope not in typing.get_args(Scopes): raise ValueError("Unknown secret scope") - peers = self._charm.model.get_relation(self._peer_relation_name) - self.peer_relation_data(scope).delete_relation_data(peers.id, [key]) + peers = self._charm.model.get_relation(self._relation_name) + self._peer_relation_data(scope).delete_relation_data(peers.id, [key]) diff --git a/src/relations/tls.py b/src/relations/tls.py new file mode 100644 index 00000000..f94c4c1b --- /dev/null +++ b/src/relations/tls.py @@ -0,0 +1,286 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Relation to TLS certificate provider""" + +import base64 +import dataclasses +import json +import logging +import re +import socket +import typing + +import charms.tls_certificates_interface.v2.tls_certificates as tls_certificates +import ops + +import relations.secrets + +if typing.TYPE_CHECKING: + import abstract_charm + +logger = logging.getLogger(__name__) + +_PEER_RELATION_ENDPOINT_NAME = "tls" + +_TLS_REQUESTED_CSR = "tls-requested-csr" +_TLS_ACTIVE_CSR = "tls-active-csr" +_TLS_CERTIFICATE = "tls-certificate" +_TLS_CA = "tls-ca" +_TLS_CHAIN = "tls-chain" +_TLS_PRIVATE_KEY = "tls-private-key" +_TLS_FIELDS = [ + _TLS_REQUESTED_CSR, + _TLS_ACTIVE_CSR, + _TLS_CERTIFICATE, + _TLS_CA, + _TLS_CHAIN, + _TLS_PRIVATE_KEY, +] + + +def _generate_private_key() -> str: + """Generate TLS private key.""" + return tls_certificates.generate_private_key().decode("utf-8") + + +# TODO python3.10 min version: Add `(kw_only=True)` +@dataclasses.dataclass +class _Relation: + """Relation to TLS certificate provider""" + + _charm: "abstract_charm.MySQLRouterCharm" + _interface: tls_certificates.TLSCertificatesRequiresV2 + _secrets: relations.secrets.RelationSecrets + + @property + def certificate_saved(self) -> bool: + """Whether a TLS certificate is available to use""" + for value in ( + self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_CERTIFICATE), + self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_CA), + ): + if not value: + return False + return True + + @property + def key(self) -> str: + """The TLS private key""" + private_key = self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_PRIVATE_KEY) + if not private_key: + private_key = _generate_private_key() + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_PRIVATE_KEY, private_key) + return private_key + + @property + def certificate(self) -> str: + """The TLS certificate""" + return self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_CERTIFICATE) + + @property + def certificate_authority(self) -> str: + """The TLS certificate authority""" + return self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_CA) + + def save_certificate(self, event: tls_certificates.CertificateAvailableEvent) -> None: + """Save TLS certificate in peer relation unit databag.""" + if ( + event.certificate_signing_request.strip() + != self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_REQUESTED_CSR).strip() + ): + logger.warning("Unknown certificate received. Ignoring.") + return + if ( + self.certificate_saved + and event.certificate_signing_request.strip() + == self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_ACTIVE_CSR) + ): + # Workaround for https://github.com/canonical/tls-certificates-operator/issues/34 + logger.debug("TLS certificate already saved.") + return + logger.debug(f"Saving TLS certificate {event=}") + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_CERTIFICATE, event.certificate) + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_CA, event.ca) + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_CHAIN, json.dumps(event.chain)) + self._secrets.set_value( + relations.secrets.UNIT_SCOPE, + _TLS_ACTIVE_CSR, + self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_REQUESTED_CSR), + ) + logger.debug(f"Saved TLS certificate {event=}") + self._charm.reconcile(event=None) + + def _generate_csr(self, *, event, key: bytes) -> bytes: + """Generate certificate signing request (CSR).""" + sans_ip = ["127.0.0.1"] # needed for the HTTP server when related with COS + if self._charm.is_externally_accessible(event=event): + sans_ip.append(self._charm.host_address) + + return tls_certificates.generate_csr( + private_key=key, + subject=socket.getfqdn(), + organization=self._charm.app.name, + sans_ip=sans_ip, + ) + + def request_certificate_creation(self, *, event): + """Request new TLS certificate from related provider charm.""" + logger.debug("Requesting TLS certificate creation") + csr = self._generate_csr(event=event, key=self.key.encode("utf-8")) + self._interface.request_certificate_creation(certificate_signing_request=csr) + self._secrets.set_value( + relations.secrets.UNIT_SCOPE, _TLS_REQUESTED_CSR, csr.decode("utf-8") + ) + logger.debug("Requested TLS certificate creation") + + def request_certificate_renewal(self, *, event): + """Request TLS certificate renewal from related provider charm.""" + logger.debug("Requesting TLS certificate renewal") + old_csr = self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_ACTIVE_CSR).encode( + "utf-8" + ) + new_csr = self._generate_csr(event=event, key=self.key.encode("utf-8")) + self._interface.request_certificate_renewal( + old_certificate_signing_request=old_csr, new_certificate_signing_request=new_csr + ) + self._secrets.set_value( + relations.secrets.UNIT_SCOPE, _TLS_REQUESTED_CSR, new_csr.decode("utf-8") + ) + logger.debug("Requested TLS certificate renewal") + + +class RelationEndpoint(ops.Object): + """Relation endpoint and handlers for TLS certificate provider""" + + NAME = "certificates" + + def __init__(self, charm_: "abstract_charm.MySQLRouterCharm") -> None: + super().__init__(charm_, self.NAME) + self._charm = charm_ + self._interface = tls_certificates.TLSCertificatesRequiresV2(self._charm, self.NAME) + + self._secrets = relations.secrets.RelationSecrets( + charm_, + _PEER_RELATION_ENDPOINT_NAME, + unit_secret_fields=[_TLS_PRIVATE_KEY], + ) + + self.framework.observe( + self._charm.on["set-tls-private-key"].action, + self._on_set_tls_private_key, + ) + self.framework.observe( + self._charm.on[self.NAME].relation_created, self._on_tls_relation_created + ) + self.framework.observe( + self._charm.on[self.NAME].relation_broken, self._on_tls_relation_broken + ) + + self.framework.observe( + self._interface.on.certificate_available, self._on_certificate_available + ) + self.framework.observe( + self._interface.on.certificate_expiring, self._on_certificate_expiring + ) + + @property + def _relation(self) -> typing.Optional[_Relation]: + if not self._charm.model.get_relation(self.NAME): + return + return _Relation( + _charm=self._charm, + _interface=self._interface, + _secrets=self._secrets, + ) + + @property + def certificate_saved(self) -> bool: + """Whether a TLS certificate is available to use""" + if self._relation is None: + return False + return self._relation.certificate_saved + + @property + def key(self) -> typing.Optional[str]: + """The TLS private key""" + if self._relation is None: + return None + return self._relation.key + + @property + def certificate(self) -> typing.Optional[str]: + """The TLS certificate""" + if self._relation is None: + return None + return self._relation.certificate + + @property + def certificate_authority(self) -> typing.Optional[str]: + """The TLS certificate authority""" + if self._relation is None: + return None + return self._relation.certificate_authority + + @staticmethod + def _parse_tls_key(raw_content: str) -> str: + """Parse TLS key from plain text or base64 format.""" + if re.match(r"(-+(BEGIN|END) [A-Z ]+-+)", raw_content): + return re.sub( + r"(-+(BEGIN|END) [A-Z ]+-+)", + "\n\\1\n", + raw_content, + ) + return base64.b64decode(raw_content).decode("utf-8") + + def _on_set_tls_private_key(self, event: ops.ActionEvent) -> None: + """Handle action to set unit TLS private key.""" + logger.debug("Handling set TLS private key action") + if key := event.params.get("internal-key"): + key = self._parse_tls_key(key) + else: + key = _generate_private_key() + event.log("No key provided. Generated new key.") + logger.debug("No TLS key provided via action. Generated new key.") + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_PRIVATE_KEY, key) + event.log("Saved TLS private key") + logger.debug("Saved TLS private key") + if self._relation is None: + event.log( + "No TLS certificate relation active. Relate a certificate provider charm to enable TLS." + ) + logger.debug("No TLS certificate relation active. Skipped certificate request") + else: + try: + self._relation.request_certificate_creation(event=event) + except Exception as e: + event.fail(f"Failed to request certificate: {e}") + logger.exception( + "Failed to request certificate after TLS private key set via action" + ) + raise + logger.debug("Handled set TLS private key action") + + def _on_tls_relation_created(self, event) -> None: + """Request certificate when TLS relation created.""" + self._relation.request_certificate_creation(event=event) + + def _on_tls_relation_broken(self, _) -> None: + """Delete TLS certificate.""" + logger.debug("Deleting TLS certificate") + for field in _TLS_FIELDS: + self._secrets.set_value(relations.secrets.UNIT_SCOPE, field, None) + self._charm.reconcile(event=None) + logger.debug("Deleted TLS certificate") + + def _on_certificate_available(self, event: tls_certificates.CertificateAvailableEvent) -> None: + """Save TLS certificate.""" + self._relation.save_certificate(event) + + def _on_certificate_expiring(self, event: tls_certificates.CertificateExpiringEvent) -> None: + """Request the new certificate when old certificate is expiring.""" + if event.certificate != self.certificate: + logger.warning("Unknown certificate expiring") + return + + self._relation.request_certificate_renewal(event=event) diff --git a/src/snap.py b/src/snap.py index 8886bfd1..e924fb6d 100644 --- a/src/snap.py +++ b/src/snap.py @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) _SNAP_NAME = "charmed-mysql" -REVISION = "98" # Keep in sync with `workload_version` file +REVISION = "102" # Keep in sync with `workload_version` file _snap = snap_lib.SnapCache()[_SNAP_NAME] _UNIX_USERNAME = "snap_daemon" @@ -154,11 +154,12 @@ class Snap(container.Container): _SERVICE_NAME = "mysqlrouter-service" _EXPORTER_SERVICE_NAME = "mysqlrouter-exporter" - def __init__(self) -> None: + def __init__(self, *, unit_name: str) -> None: super().__init__( mysql_router_command=f"{_SNAP_NAME}.mysqlrouter", mysql_shell_command=f"{_SNAP_NAME}.mysqlsh", mysql_router_password_command=f"{_SNAP_NAME}.mysqlrouter-passwd", + unit_name=unit_name, ) @property @@ -175,11 +176,19 @@ def mysql_router_exporter_service_enabled(self) -> bool: def update_mysql_router_service(self, *, enabled: bool, tls: bool = None) -> None: super().update_mysql_router_service(enabled=enabled, tls=tls) + if tls: - raise NotImplementedError # TODO VM TLS + _snap.set({"mysqlrouter.extra-options": f"--extra-config {self.tls_config_file}"}) + else: + _snap.unset("mysqlrouter.extra-options") + + router_is_running = _snap.services[self._SERVICE_NAME]["active"] if enabled: - _snap.start([self._SERVICE_NAME], enable=True) + if router_is_running: + _snap.restart([self._SERVICE_NAME]) + else: + _snap.start([self._SERVICE_NAME], enable=True) else: _snap.stop([self._SERVICE_NAME], disable=True) @@ -193,9 +202,6 @@ def update_mysql_router_exporter_service( certificate_filename: str = None, certificate_authority_filename: str = None, ) -> None: - if tls: - raise NotImplementedError - super().update_mysql_router_exporter_service( enabled=enabled, config=config, @@ -211,14 +217,31 @@ def update_mysql_router_exporter_service( "mysqlrouter-exporter.user": config.username, "mysqlrouter-exporter.password": config.password, "mysqlrouter-exporter.url": config.url, + "mysqlrouter-exporter.service-name": self._unit_name.replace("/", "-"), } ) + if tls: + _snap.set( + { + "mysqlrouter.tls-cacert-path": certificate_authority_filename, + "mysqlrouter.tls-cert-path": certificate_filename, + "mysqlrouter.tls-key-path": key_filename, + } + ) + else: + _snap.unset("mysqlrouter.tls-cacert-path") + _snap.unset("mysqlrouter.tls-cert-path") + _snap.unset("mysqlrouter.tls-key-path") _snap.start([self._EXPORTER_SERVICE_NAME], enable=True) else: + _snap.stop([self._EXPORTER_SERVICE_NAME], disable=True) _snap.unset("mysqlrouter-exporter.user") _snap.unset("mysqlrouter-exporter.password") _snap.unset("mysqlrouter-exporter.url") - _snap.stop([self._EXPORTER_SERVICE_NAME], disable=True) + _snap.unset("mysqlrouter-exporter.service-name") + _snap.unset("mysqlrouter.tls-cacert-path") + _snap.unset("mysqlrouter.tls-cert-path") + _snap.unset("mysqlrouter.tls-key-path") def upgrade(self, unit: ops.Unit) -> None: """Upgrade snap.""" diff --git a/src/workload.py b/src/workload.py index 920c3da4..01818b3e 100644 --- a/src/workload.py +++ b/src/workload.py @@ -76,7 +76,9 @@ def version(self) -> str: return component return "" - def upgrade(self, *, unit: ops.Unit, tls: bool) -> None: + def upgrade( + self, *, event, unit: ops.Unit, tls: bool, exporter_config: "relations.cos.ExporterConfig" + ) -> None: """Upgrade MySQL Router. Only applies to machine charm @@ -99,9 +101,10 @@ def _tls_config_file_data(self) -> str: return config_string @property - def _custom_tls_enabled(self) -> bool: + def _custom_certificate(self) -> typing.Optional[str]: """Whether custom TLS certs are enabled for MySQL Router""" - return self._tls_key_file.exists() and self._tls_certificate_file.exists() + if self._tls_key_file.exists() and self._tls_certificate_file.exists(): + return self._tls_certificate_file.read_text() def cleanup_monitoring_user(self) -> None: """Clean up router REST API user for mysqlrouter exporter.""" @@ -115,8 +118,6 @@ def cleanup_monitoring_user(self) -> None: def _disable_exporter(self) -> None: """Stop and disable MySQL Router exporter service, keeping router enabled.""" - if not self._container.mysql_router_exporter_service_enabled: - return logger.debug("Disabling MySQL Router exporter service") self._container.update_mysql_router_exporter_service(enabled=False) self.cleanup_monitoring_user() @@ -142,9 +143,21 @@ def _disable_tls(self) -> None: file.unlink(missing_ok=True) logger.debug("Deleted TLS files") + def _disable_router(self) -> None: + """Disable router and clean up corresponding router files.""" + logger.debug("Disabling MySQL Router service") + self._container.update_mysql_router_service(enabled=False) + self._logrotate.disable() + self._container.router_config_directory.rmtree() + self._container.router_config_directory.mkdir() + self._router_data_directory.rmtree() + self._router_data_directory.mkdir() + logger.debug("Disabled MySQL Router service") + def reconcile( self, *, + event, tls: bool, unit_name: str, exporter_config: "relations.cos.ExporterConfig", @@ -157,16 +170,11 @@ def reconcile( raise ValueError("`key` and `certificate` arguments required when tls=True") if self._container.mysql_router_service_enabled: - logger.debug("Disabling MySQL Router service") - self._container.update_mysql_router_service(enabled=False) - self._logrotate.disable() - self._container.router_config_directory.rmtree() - self._container.router_config_directory.mkdir() - self._router_data_directory.rmtree() - self._router_data_directory.mkdir() - logger.debug("Disabled MySQL Router service") - - self._disable_exporter() + self._disable_router() + + if self._container.mysql_router_exporter_service_enabled: + self._disable_exporter() + self._disable_tls() @property @@ -246,15 +254,17 @@ def _get_bootstrap_command( "--conf-use-gr-notifications", ] - def _bootstrap_router(self, *, tls: bool) -> None: + def _bootstrap_router(self, *, event, tls: bool) -> None: """Bootstrap MySQL Router.""" logger.debug( f"Bootstrapping router {tls=}, {self._connection_info.host=}, {self._connection_info.port=}" ) # Redact password from log - logged_command = self._get_bootstrap_command(self._connection_info.redacted) + logged_command = self._get_bootstrap_command( + event=event, connection_info=self._connection_info.redacted + ) - command = self._get_bootstrap_command(self._connection_info) + command = self._get_bootstrap_command(event=event, connection_info=self._connection_info) try: self._container.run_mysql_router(command, timeout=30) except container.CalledProcessError as e: @@ -304,20 +314,52 @@ def _router_username(self) -> str: """ return self._parse_username_from_config(self._container.router_config_file.read_text()) - def _restart(self, *, tls: bool) -> None: + def _restart(self, *, event, tls: bool) -> None: """Restart MySQL Router to enable or disable TLS.""" logger.debug("Restarting MySQL Router") assert self._container.mysql_router_service_enabled is True self._container.update_mysql_router_service(enabled=True, tls=tls) logger.debug("Restarted MySQL Router") - self._charm.wait_until_mysql_router_ready() + self._charm.wait_until_mysql_router_ready(event=event) # wait_until_mysql_router_ready will set WaitingStatus—override it with current charm # status self._charm.set_status(event=None) + def _enable_router(self, *, event, tls: bool, unit_name: str) -> None: + """Enable router after setting up all the necessary prerequisites.""" + logger.debug("Enabling MySQL Router service") + self._cleanup_after_upgrade_or_potential_container_restart() + # create an empty credentials file, if the file does not exist + self._container.create_router_rest_api_credentials_file() + self._bootstrap_router(event=event, tls=tls) + self.shell.add_attributes_to_mysql_router_user( + username=self._router_username, router_id=self._router_id, unit_name=unit_name + ) + self._container.update_mysql_router_service(enabled=True, tls=tls) + self._logrotate.enable() + logger.debug("Enabled MySQL Router service") + self._charm.wait_until_mysql_router_ready(event=event) + + def _enable_exporter( + self, *, tls: bool, exporter_config: "relations.cos.ExporterConfig" + ) -> None: + """Enable the mysqlrouter exporter.""" + logger.debug("Enabling MySQL Router exporter service") + self.setup_monitoring_user() + self._container.update_mysql_router_exporter_service( + enabled=True, + config=exporter_config, + tls=tls, + key_filename=str(self._tls_key_file), + certificate_filename=str(self._tls_certificate_file), + certificate_authority_filename=str(self._tls_certificate_authority_file), + ) + logger.debug("Enabled MySQL Router exporter service") + def reconcile( self, *, + event, tls: bool, unit_name: str, exporter_config: "relations.cos.ExporterConfig", @@ -331,49 +373,36 @@ def reconcile( "`key`, `certificate`, and `certificate_authority` arguments required when tls=True" ) - # self._custom_tls_enabled` will change after we enable or disable TLS - tls_was_enabled = self._custom_tls_enabled + # `self._custom_certificate` will change after we enable/disable TLS + custom_certificate = self._custom_certificate if tls: self._enable_tls( key=key, certificate=certificate, certificate_authority=certificate_authority ) - if not tls_was_enabled and self._container.mysql_router_service_enabled: - self._restart(tls=tls) + if custom_certificate != certificate and self._container.mysql_router_service_enabled: + self._restart(event=event, tls=tls) else: self._disable_tls() - if tls_was_enabled and self._container.mysql_router_service_enabled: - self._restart(tls=tls) + if custom_certificate and self._container.mysql_router_service_enabled: + self._restart(event=event, tls=tls) # If the host or port changes, MySQL Router will receive topology change # notifications from MySQL. # Therefore, if the host or port changes, we do not need to restart MySQL Router. + is_charm_exposed = self._charm.is_externally_accessible(event=event) + socket_file_exists = self._container.path("/run/mysqlrouter/mysql.sock").exists() + require_rebootstrap = is_charm_exposed == socket_file_exists + if require_rebootstrap: + self._disable_router() + if not self._container.mysql_router_service_enabled: - logger.debug("Enabling MySQL Router service") - self._cleanup_after_upgrade_or_potential_container_restart() - self._container.create_router_rest_api_credentials_file() # create an empty credentials file - self._bootstrap_router(tls=tls) - self.shell.add_attributes_to_mysql_router_user( - username=self._router_username, router_id=self._router_id, unit_name=unit_name - ) - self._container.update_mysql_router_service(enabled=True, tls=tls) - self._logrotate.enable() - logger.debug("Enabled MySQL Router service") - self._charm.wait_until_mysql_router_ready() + self._enable_router(event=event, tls=tls, unit_name=unit_name) if (not self._container.mysql_router_exporter_service_enabled and exporter_config) or ( - self._container.mysql_router_exporter_service_enabled and tls_was_enabled != tls + self._container.mysql_router_exporter_service_enabled + and custom_certificate != certificate ): - logger.debug("Enabling MySQL Router exporter service") - self.setup_monitoring_user() - self._container.update_mysql_router_exporter_service( - enabled=True, - config=exporter_config, - tls=tls, - key_filename=str(self._tls_key_file), - certificate_filename=str(self._tls_certificate_file), - certificate_authority_filename=str(self._tls_certificate_authority_file), - ) - logger.debug("Enabled MySQL Router exporter service") + self._enable_exporter(tls=tls, exporter_config=exporter_config) elif self._container.mysql_router_exporter_service_enabled and not exporter_config: self._disable_exporter() @@ -391,15 +420,22 @@ def status(self) -> typing.Optional[ops.StatusBase]: "Router was manually removed from MySQL ClusterSet. Remove & re-deploy unit" ) - def upgrade(self, *, unit: ops.Unit, tls: bool) -> None: + def upgrade( + self, *, event, unit: ops.Unit, tls: bool, exporter_config: "relations.cos.ExporterConfig" + ) -> None: enabled = self._container.mysql_router_service_enabled + exporter_enabled = self._container.mysql_router_exporter_service_enabled + if exporter_enabled: + self._disable_exporter() if enabled: logger.debug("Disabling MySQL Router service before upgrade") - self.disable() - super().upgrade(unit=unit, tls=tls) + self._disable_router() + super().upgrade(unit=unit, tls=tls, exporter_config=exporter_config) if enabled: logger.debug("Re-enabling MySQL Router service after upgrade") - self.enable(tls=tls, unit_name=unit.name) + self._enable_router(event=event, tls=tls, unit_name=unit.name) + if exporter_enabled: + self._enable_exporter(tls=tls, exporter_config=exporter_config) def _wait_until_http_server_authenticates(self) -> None: """Wait until active connection with router HTTP server using monitoring credentials.""" diff --git a/templates/tls.cnf b/templates/tls.cnf new file mode 100644 index 00000000..1f9331a8 --- /dev/null +++ b/templates/tls.cnf @@ -0,0 +1,8 @@ +[DEFAULT] +client_ssl_mode=REQUIRED +client_ssl_key=$tls_ssl_key_file +client_ssl_cert=$tls_ssl_cert_file + +[http_server] +ssl_key=$tls_ssl_key_file +ssl_cert=$tls_ssl_cert_file diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 595ddf87..c2dec5d8 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -3,7 +3,7 @@ import itertools import tempfile -from typing import Dict, List +from typing import Dict, List, Optional from juju.unit import Unit from pytest_operator.plugin import OpsTest @@ -44,11 +44,12 @@ async def get_inserted_data_by_application(unit: Unit) -> str: return result.results.get("data") -async def execute_queries_on_unit( +async def execute_queries_against_unit( unit_address: str, username: str, password: str, queries: List[str], + port: int = 3306, commit: bool = False, ) -> List: """Execute given MySQL queries on a unit. @@ -67,6 +68,7 @@ async def execute_queries_on_unit( "user": username, "password": password, "host": unit_address, + "port": port, "raise_on_warnings": False, } @@ -222,3 +224,21 @@ async def stop_running_flush_mysqlrouter_cronjobs(ops_test: OpsTest, unit_name: with attempt: if await get_process_pid(ops_test, unit_name, "logrotate"): raise Exception("Failed to stop the flush_mysql_logs logrotate process") + + +async def get_tls_certificate_issuer( + ops_test: OpsTest, + unit_name: str, + socket: Optional[str] = None, + host: Optional[str] = None, + port: Optional[int] = None, +) -> str: + connect_args = f"-unix {socket}" if socket else f"-connect {host}:{port}" + get_tls_certificate_issuer_commands = [ + "ssh", + unit_name, + f"openssl s_client -showcerts -starttls mysql {connect_args} < /dev/null | openssl x509 -text | grep Issuer", + ] + return_code, issuer, _ = await ops_test.juju(*get_tls_certificate_issuer_commands) + assert return_code == 0, f"failed to get TLS certificate issuer on {unit_name=}" + return issuer diff --git a/tests/integration/juju_.py b/tests/integration/juju_.py new file mode 100644 index 00000000..261ad920 --- /dev/null +++ b/tests/integration/juju_.py @@ -0,0 +1,9 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +import importlib.metadata + +# libjuju version != juju agent version, but the major version should be identical—which is good +# enough to check for secrets +_libjuju_version = importlib.metadata.version("juju") +is_3_or_higher = int(_libjuju_version.split(".")[0]) >= 3 diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py new file mode 100644 index 00000000..4a3192b1 --- /dev/null +++ b/tests/integration/test_data_integrator.py @@ -0,0 +1,217 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +import asyncio +import logging +import typing + +import pytest +import tenacity +from pytest_operator.plugin import OpsTest + +from . import juju_ +from .helpers import execute_queries_against_unit, get_tls_certificate_issuer + +logger = logging.getLogger(__name__) + +MYSQL_APP_NAME = "mysql" +MYSQL_ROUTER_APP_NAME = "mysqlrouter" +DATA_INTEGRATOR_APP_NAME = "data-integrator" +SLOW_TIMEOUT = 15 * 60 +RETRY_TIMEOUT = 60 +TEST_DATABASE = "testdatabase" +TEST_TABLE = "testtable" + +if juju_.is_3_or_higher: + TLS_APP_NAME = "self-signed-certificates" + TLS_CONFIG = {"ca-common-name": "Test CA"} +else: + TLS_APP_NAME = "tls-certificates-operator" + TLS_CONFIG = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} + + +async def get_data_integrator_credentials(ops_test: OpsTest) -> typing.Dict: + """Helper to get the credentials from the deployed data integrator""" + data_integrator_unit = ops_test.model.applications[DATA_INTEGRATOR_APP_NAME].units[0] + action = await data_integrator_unit.run_action(action_name="get-credentials") + result = await action.wait() + if juju_.is_3_or_higher: + assert result.results["return-code"] == 0 + else: + assert result.results["Code"] == "0" + assert result.results["ok"] == "True" + return result.results["mysql"] + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_external_connectivity_with_data_integrator( + ops_test: OpsTest, mysql_router_charm_series: str +) -> None: + """Test encryption when backend database is using TLS.""" + logger.info("Deploy and relate all applications") + async with ops_test.fast_forward(): + # deploy mysql first + await ops_test.model.deploy( + MYSQL_APP_NAME, channel="8.0/edge", config={"profile": "testing"}, num_units=1 + ) + data_integrator_config = {"database-name": TEST_DATABASE} + + # ROUTER + mysqlrouter_charm = await ops_test.build_charm(".") + + # tls, data-integrator and router + await asyncio.gather( + ops_test.model.deploy( + mysqlrouter_charm, + application_name=MYSQL_ROUTER_APP_NAME, + num_units=None, + series=mysql_router_charm_series, + ), + ops_test.model.deploy( + TLS_APP_NAME, application_name=TLS_APP_NAME, channel="stable", config=TLS_CONFIG + ), + ops_test.model.deploy( + DATA_INTEGRATOR_APP_NAME, + application_name=DATA_INTEGRATOR_APP_NAME, + channel="latest/stable", + series=mysql_router_charm_series, + config=data_integrator_config, + ), + ) + + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:backend-database", f"{MYSQL_APP_NAME}:database" + ) + await ops_test.model.relate( + f"{DATA_INTEGRATOR_APP_NAME}:mysql", f"{MYSQL_ROUTER_APP_NAME}:database" + ) + + logger.info("Waiting for applications to become active") + # We can safely wait only for test application to be ready, given that it will + # only become active once all the other applications are ready. + await ops_test.model.wait_for_idle( + [DATA_INTEGRATOR_APP_NAME], status="active", timeout=SLOW_TIMEOUT + ) + + credentials = await get_data_integrator_credentials(ops_test) + databases = await execute_queries_against_unit( + credentials["endpoints"].split(",")[0].split(":")[0], + credentials["username"], + credentials["password"], + ["SHOW DATABASES;"], + port=credentials["endpoints"].split(",")[0].split(":")[1], + ) + assert TEST_DATABASE in databases + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_external_connectivity_with_data_integrator_and_tls(ops_test: OpsTest) -> None: + """Test data integrator along with TLS operator""" + logger.info("Ensuring no data exists in the test database") + + credentials = await get_data_integrator_credentials(ops_test) + [database_host, database_port] = credentials["endpoints"].split(",")[0].split(":") + mysqlrouter_unit = ops_test.model.applications[MYSQL_ROUTER_APP_NAME].units[0] + + show_tables_sql = [ + f"SHOW TABLES IN {TEST_DATABASE};", + ] + tables = await execute_queries_against_unit( + database_host, + credentials["username"], + credentials["password"], + show_tables_sql, + port=database_port, + ) + assert len(tables) == 0, f"Unexpected tables in the {TEST_DATABASE} database" + + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + host=database_host, + port=database_port, + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" + + logger.info(f"Relating mysqlrouter with {TLS_APP_NAME}") + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" + ) + + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + host=database_host, + port=database_port, + ) + assert ( + "CN = Test CA" in issuer + ), f"Expected mysqlrouter certificate from {TLS_APP_NAME}" + + create_table_and_insert_data_sql = [ + f"CREATE TABLE {TEST_DATABASE}.{TEST_TABLE} (id int, primary key(id));", + f"INSERT INTO {TEST_DATABASE}.{TEST_TABLE} VALUES (1), (2);", + ] + await execute_queries_against_unit( + database_host, + credentials["username"], + credentials["password"], + create_table_and_insert_data_sql, + port=database_port, + commit=True, + ) + + select_data_sql = [ + f"SELECT * FROM {TEST_DATABASE}.{TEST_TABLE};", + ] + data = await execute_queries_against_unit( + database_host, + credentials["username"], + credentials["password"], + select_data_sql, + port=database_port, + ) + assert data == [1, 2], f"Unexpected data in table {TEST_DATABASE}.{TEST_TABLE}" + + logger.info(f"Removing relation between mysqlrouter and {TLS_APP_NAME}") + await ops_test.model.applications[MYSQL_ROUTER_APP_NAME].remove_relation( + f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" + ) + + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + host=database_host, + port=database_port, + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" + + select_data_sql = [ + f"SELECT * FROM {TEST_DATABASE}.{TEST_TABLE};", + ] + data = await execute_queries_against_unit( + database_host, + credentials["username"], + credentials["password"], + select_data_sql, + port=database_port, + ) + assert data == [1, 2], f"Unexpected data in table {TEST_DATABASE}.{TEST_TABLE}" diff --git a/tests/integration/test_database.py b/tests/integration/test_database.py index 86f34a8a..b073a62b 100644 --- a/tests/integration/test_database.py +++ b/tests/integration/test_database.py @@ -9,7 +9,7 @@ from pytest_operator.plugin import OpsTest from .helpers import ( - execute_queries_on_unit, + execute_queries_against_unit, get_inserted_data_by_application, get_server_config_credentials, ) @@ -118,7 +118,7 @@ async def test_database_relation(ops_test: OpsTest, mysql_router_charm_series: s select_inserted_data_sql = ( f"SELECT data FROM `{TEST_DATABASE}`.{TEST_TABLE} WHERE data = '{inserted_data}'", ) - selected_data = await execute_queries_on_unit( + selected_data = await execute_queries_against_unit( mysql_unit_address, server_config_credentials["username"], server_config_credentials["password"], diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index 5ab1f672..63d6739c 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -4,12 +4,15 @@ import asyncio import logging -import time import pytest +import tenacity import urllib3 from pytest_operator.plugin import OpsTest +from . import juju_ +from .helpers import get_tls_certificate_issuer + logger = logging.getLogger(__name__) MYSQL_APP_NAME = "mysql" @@ -17,6 +20,14 @@ APPLICATION_APP_NAME = "mysql-test-app" GRAFANA_AGENT_APP_NAME = "grafana-agent" SLOW_TIMEOUT = 25 * 60 +RETRY_TIMEOUT = 3 * 60 + +if juju_.is_3_or_higher: + TLS_APP_NAME = "self-signed-certificates" + TLS_CONFIG = {"ca-common-name": "Test CA"} +else: + TLS_APP_NAME = "tls-certificates-operator" + TLS_CONFIG = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} @pytest.mark.group(1) @@ -126,26 +137,155 @@ async def test_exporter_endpoint(ops_test: OpsTest, mysql_router_charm_series: s f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(30) + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") + assert ( + jmx_resp.status == 200 + ), "❌ cannot connect to metrics endpoint with relation with cos" + assert "mysqlrouter_route_health" in str( + jmx_resp.data + ), "❌ did not find expected metric in response" + + logger.info("Removing relation between mysqlrouter and grafana agent") + await mysql_router_app.remove_relation( + f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" + ) + + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" + else: + assert False, "❌ can connect to metrics endpoint without relation with cos" + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: + """Test that the exporter endpoint works when related with TLS""" + http = urllib3.PoolManager() + + mysql_router_app = ops_test.model.applications[MYSQL_ROUTER_APP_NAME] + mysql_router_unit = mysql_router_app.units[0] + + issuer = await get_tls_certificate_issuer( + ops_test, + mysql_router_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" + + logger.info(f"Deploying {TLS_APP_NAME}") + await ops_test.model.deploy( + TLS_APP_NAME, + application_name=TLS_APP_NAME, + channel="stable", + config=TLS_CONFIG, + series="jammy", + ) + await ops_test.model.wait_for_idle([TLS_APP_NAME], status="active", timeout=SLOW_TIMEOUT) + + logger.info(f"Relating mysqlrouter with {TLS_APP_NAME}") + + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" + ) + + mysql_test_app = ops_test.model.applications[APPLICATION_APP_NAME] + unit_address = await mysql_test_app.units[0].get_public_address() + + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" + else: + assert False, "❌ can connect to metrics endpoint without relation with cos" + + logger.info("Relating mysqlrouter with grafana agent") + await ops_test.model.relate( + f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" + ) + + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") + assert ( + jmx_resp.status == 200 + ), "❌ cannot connect to metrics endpoint with relation with cos" + assert "mysqlrouter_route_health" in str( + jmx_resp.data + ), "❌ did not find expected metric in response" - jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") - assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" - assert "mysqlrouter_route_health" in str( - jmx_resp.data - ), "❌ did not find expected metric in response" + issuer = await get_tls_certificate_issuer( + ops_test, + mysql_router_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert "CN = Test CA" in issuer, f"Expected mysqlrouter certificate from {TLS_APP_NAME}" logger.info("Removing relation between mysqlrouter and grafana agent") await mysql_router_app.remove_relation( f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(30) + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" + else: + assert False, "❌ can connect to metrics endpoint without relation with cos" - try: - http.request("GET", f"http://{unit_address}:49152/metrics") - except urllib3.exceptions.MaxRetryError as e: - assert ( - "[Errno 111] Connection refused" in e.reason.args[0] - ), "❌ expected connection refused error" - else: - assert False, "❌ can connect to metrics endpoint without relation with cos" + logger.info(f"Removing relation between mysqlrouter and {TLS_APP_NAME}") + await mysql_router_app.remove_relation( + f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" + ) + + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysql_router_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" diff --git a/tests/integration/test_tls.py b/tests/integration/test_tls.py index c871f970..27a533af 100644 --- a/tests/integration/test_tls.py +++ b/tests/integration/test_tls.py @@ -1,89 +1,142 @@ -# Copyright 2023 Canonical Ltd. +# Copyright 2024 Canonical Ltd. # See LICENSE file for licensing details. -# flake8: noqa -# TODO: enable & remove noqa -# import asyncio -# import logging -# -# import pytest -# from pytest_operator.plugin import OpsTest -# -# logger = logging.getLogger(__name__) -# -# MYSQL_APP_NAME = "mysql" -# MYSQL_ROUTER_APP_NAME = "mysqlrouter" -# TEST_APP_NAME = "mysql-test-app" -# TLS_APP_NAME = "tls-certificates-operator" -# SLOW_TIMEOUT = 15 * 60 -# MODEL_CONFIG = {"logging-config": "=INFO;unit=DEBUG"} -# -# -# @pytest.mark.group(1) -# @pytest.mark.abort_on_fail -# async def test_build_deploy_and_relate(ops_test: OpsTest, mysql_router_charm_series: str) -> None: -# """Test encryption when backend database is using TLS.""" -# # Deploy TLS Certificates operator. -# await ops_test.model.set_config(MODEL_CONFIG) -# logger.info("Deploy and relate all applications") -# async with ops_test.fast_forward(): -# # deploy mysql first -# await ops_test.model.deploy( -# MYSQL_APP_NAME, channel="8.0/edge", config={"profile": "testing"}, num_units=3 -# ) -# tls_config = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} -# -# # ROUTER -# mysqlrouter_charm = await ops_test.build_charm(".") -# -# # tls, test app and router -# await asyncio.gather( -# ops_test.model.deploy( -# mysqlrouter_charm, -# application_name=MYSQL_ROUTER_APP_NAME, -# num_units=None, -# series=mysql_router_charm_series, -# ), -# ops_test.model.deploy( -# TLS_APP_NAME, application_name=TLS_APP_NAME, channel="stable", config=tls_config -# ), -# ops_test.model.deploy( -# TEST_APP_NAME, application_name=TEST_APP_NAME, channel="latest/edge" -# ), -# ) -# -# await ops_test.model.relate( -# f"{MYSQL_ROUTER_APP_NAME}:backend-database", f"{MYSQL_APP_NAME}:database" -# ) -# await ops_test.model.relate( -# f"{TEST_APP_NAME}:database", f"{MYSQL_ROUTER_APP_NAME}:database" -# ) -# -# logger.info("Waiting for applications to become active") -# # We can safely wait only for test application to be ready, given that it will -# # only become active once all the other applications are ready. -# await ops_test.model.wait_for_idle([TEST_APP_NAME], status="active", timeout=15 * 60) -# -# -# @pytest.mark.group(1) -# async def test_connected_encryption(ops_test: OpsTest) -> None: -# """Test encryption when backend database is using TLS.""" -# test_app_unit = ops_test.model.applications[TEST_APP_NAME].units[0] -# -# logger.info("Relating TLS with backend database") -# await ops_test.model.relate(TLS_APP_NAME, MYSQL_APP_NAME) -# -# # Wait for hooks start reconfiguring app -# await ops_test.model.block_until( -# lambda: ops_test.model.applications[MYSQL_APP_NAME].status != "active", timeout=4 * 60 -# ) -# await ops_test.model.wait_for_idle(status="active", timeout=15 * 60) -# -# logger.info("Get cipher when TLS is enforced") -# action = await test_app_unit.run_action("get-session-ssl-cipher") -# result = await action.wait() -# -# cipher = result.results["cipher"] -# # this assertion should be true even when TLS is not related to the backend database -# # because by default mysqlrouter will use TLS, unless explicitly disabled, which we never do -# assert cipher == "TLS_AES_256_GCM_SHA384", "Cipher not set" +import asyncio +import logging + +import pytest +import tenacity +from pytest_operator.plugin import OpsTest + +from . import juju_ +from .helpers import get_tls_certificate_issuer + +logger = logging.getLogger(__name__) + +MYSQL_APP_NAME = "mysql" +MYSQL_ROUTER_APP_NAME = "mysqlrouter" +TEST_APP_NAME = "mysql-test-app" +SLOW_TIMEOUT = 15 * 60 +RETRY_TIMEOUT = 60 + +if juju_.is_3_or_higher: + TLS_APP_NAME = "self-signed-certificates" + TLS_CONFIG = {"ca-common-name": "Test CA"} +else: + TLS_APP_NAME = "tls-certificates-operator" + TLS_CONFIG = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_build_deploy_and_relate(ops_test: OpsTest, mysql_router_charm_series: str) -> None: + """Test encryption when backend database is using TLS.""" + logger.info("Deploy and relate all applications") + async with ops_test.fast_forward(): + # deploy mysql first + await ops_test.model.deploy( + MYSQL_APP_NAME, + channel="8.0/edge", + application_name=MYSQL_APP_NAME, + config={"profile": "testing"}, + num_units=1, + ) + + # ROUTER + mysqlrouter_charm = await ops_test.build_charm(".") + + # tls, test app and router + await asyncio.gather( + ops_test.model.deploy( + mysqlrouter_charm, + application_name=MYSQL_ROUTER_APP_NAME, + num_units=None, + series=mysql_router_charm_series, + ), + ops_test.model.deploy( + TLS_APP_NAME, + application_name=TLS_APP_NAME, + channel="stable", + config=TLS_CONFIG, + series="jammy", + ), + ops_test.model.deploy( + TEST_APP_NAME, + application_name=TEST_APP_NAME, + channel="latest/edge", + series=mysql_router_charm_series, + ), + ) + + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:backend-database", f"{MYSQL_APP_NAME}:database" + ) + await ops_test.model.relate( + f"{TEST_APP_NAME}:database", f"{MYSQL_ROUTER_APP_NAME}:database" + ) + + logger.info("Waiting for applications to become active") + # We can safely wait only for test application to be ready, given that it will + # only become active once all the other applications are ready. + await ops_test.model.wait_for_idle([TEST_APP_NAME], status="active", timeout=SLOW_TIMEOUT) + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_connected_encryption(ops_test: OpsTest) -> None: + """Test encryption when backend database is using TLS.""" + mysqlrouter_unit = ops_test.model.applications[MYSQL_ROUTER_APP_NAME].units[0] + + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" + + logger.info("Relating TLS with mysqlrouter") + await ops_test.model.relate(TLS_APP_NAME, MYSQL_ROUTER_APP_NAME) + + logger.info("Getting certificate issuer after relating with tls operator") + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "CN = Test CA" in issuer + ), f"Expected mysqlrouter certificate from {TLS_APP_NAME}" + + logger.info("Removing relation TLS with mysqlrouter") + await ops_test.model.applications[MYSQL_ROUTER_APP_NAME].remove_relation( + f"{TLS_APP_NAME}:certificates", f"{MYSQL_ROUTER_APP_NAME}:certificates" + ) + + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated CA certificate" diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index e0b10f49..8da3e362 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -3,8 +3,8 @@ from unittest.mock import PropertyMock +import ops import pytest -from ops import JujuVersion from pytest_mock import MockerFixture import snap @@ -33,7 +33,7 @@ def disable_tenacity_retry(monkeypatch): @pytest.fixture(autouse=True) def patch(monkeypatch): monkeypatch.setattr( - "abstract_charm.MySQLRouterCharm.wait_until_mysql_router_ready", + "machine_charm.MachineSubordinateRouterCharm.wait_until_mysql_router_ready", lambda *args, **kwargs: None, ) monkeypatch.setattr("workload.AuthenticatedWorkload._router_username", "") @@ -77,8 +77,10 @@ def start(self, services: list[str] = None, *_, **__): for service in services: assert service in ("mysqlrouter-service", "mysqlrouter-exporter") - self.services["mysqlrouter-service"]["active"] = "mysqlrouter-service" in services - self.services["mysqlrouter-exporter"]["active"] = "mysqlrouter-exporter" in services + if "mysqlrouter-service" in services: + self.services["mysqlrouter-service"]["active"] = True + if "mysqlrouter-exporter" in services: + self.services["mysqlrouter-exporter"]["active"] = True def stop(self, services: list[str] = None, *_, **__): for service in services: @@ -89,6 +91,12 @@ def stop(self, services: list[str] = None, *_, **__): if "mysqlrouter-exporter" in services: self.services["mysqlrouter-exporter"]["active"] = False + def restart(self, services: list[str] = []): + if "mysqlrouter-service" in services: + self.services["mysqlrouter-service"]["active"] = True + if "mysqlrouter-exporter" in services: + self.services["mysqlrouter-exporter"]["active"] = True + monkeypatch.setattr(snap, "_snap", Snap()) monkeypatch.setattr( @@ -100,6 +108,30 @@ def stop(self, services: list[str] = None, *_, **__): monkeypatch.setattr("snap._Path.mkdir", lambda *args, **kwargs: None) monkeypatch.setattr("snap._Path.rmtree", lambda *args, **kwargs: None) + def _network_get(*args, **kwargs) -> dict: + """Patch for the not-yet-implemented testing backend needed for `bind_address`. + + This can be used for cases such as: + self.model.get_binding(event.relation).network.bind_address + Will always return '10.1.157.116' + """ + return ops.model.Network( + { + "bind-addresses": [ + { + "mac-address": "", + "interface-name": "", + "addresses": [{"hostname": "", "value": "10.1.157.116", "cidr": ""}], + } + ], + "bind-address": "10.1.157.116", + "egress-subnets": ["10.152.183.65/32"], + "ingress-addresses": ["10.152.183.65"], + } + ) + + monkeypatch.setattr("ops.model.Binding._network_get", _network_get) + @pytest.fixture(autouse=True, params=["juju2", "juju3"]) def juju_has_secrets(mocker: MockerFixture, request): @@ -110,11 +142,11 @@ def juju_has_secrets(mocker: MockerFixture, request): """ if request.param == "juju3": mocker.patch.object( - JujuVersion, "has_secrets", new_callable=PropertyMock + ops.JujuVersion, "has_secrets", new_callable=PropertyMock ).return_value = False return False else: mocker.patch.object( - JujuVersion, "has_secrets", new_callable=PropertyMock + ops.JujuVersion, "has_secrets", new_callable=PropertyMock ).return_value = True return True