From 0e93ef557607dde3a7061b97a20f7989ceb1e65c Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 29 Mar 2024 09:14:48 -0400 Subject: [PATCH 01/31] WIP: Expose read-write and read-only endpoints when related to data-integrator --- charmcraft.yaml | 6 + .../v2/tls_certificates.py | 1959 +++++++++++++++++ metadata.yaml | 4 + poetry.lock | 314 ++- pyproject.toml | 3 + src/abstract_charm.py | 3 + src/machine_charm.py | 16 + src/relations/database_providers_wrapper.py | 8 + src/relations/database_provides.py | 17 + src/socket_workload.py | 3 +- 10 files changed, 2302 insertions(+), 31 deletions(-) create mode 100644 lib/charms/tls_certificates_interface/v2/tls_certificates.py diff --git a/charmcraft.yaml b/charmcraft.yaml index 33dd2864..47fab5e7 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -27,6 +27,12 @@ parts: echo 'ERROR: Use "tox run -e build" instead of calling "charmcraft pack" directly' >&2 exit 1 fi + build-packages: + - libffi-dev + - libssl-dev + - pkg-config + - rustc + - cargo charm-entrypoint: src/machine_charm.py prime: - charm_version diff --git a/lib/charms/tls_certificates_interface/v2/tls_certificates.py b/lib/charms/tls_certificates_interface/v2/tls_certificates.py new file mode 100644 index 00000000..9f67833b --- /dev/null +++ b/lib/charms/tls_certificates_interface/v2/tls_certificates.py @@ -0,0 +1,1959 @@ +# Copyright 2021 Canonical Ltd. +# See LICENSE file for licensing details. + + +"""Library for the tls-certificates relation. + +This library contains the Requires and Provides classes for handling the tls-certificates +interface. + +## Getting Started +From a charm directory, fetch the library using `charmcraft`: + +```shell +charmcraft fetch-lib charms.tls_certificates_interface.v2.tls_certificates +``` + +Add the following libraries to the charm's `requirements.txt` file: +- jsonschema +- cryptography + +Add the following section to the charm's `charmcraft.yaml` file: +```yaml +parts: + charm: + build-packages: + - libffi-dev + - libssl-dev + - rustc + - cargo +``` + +### Provider charm +The provider charm is the charm providing certificates to another charm that requires them. In +this example, the provider charm is storing its private key using a peer relation interface called +`replicas`. + +Example: +```python +from charms.tls_certificates_interface.v2.tls_certificates import ( + CertificateCreationRequestEvent, + CertificateRevocationRequestEvent, + TLSCertificatesProvidesV2, + generate_private_key, +) +from ops.charm import CharmBase, InstallEvent +from ops.main import main +from ops.model import ActiveStatus, WaitingStatus + + +def generate_ca(private_key: bytes, subject: str) -> str: + return "whatever ca content" + + +def generate_certificate(ca: str, private_key: str, csr: str) -> str: + return "Whatever certificate" + + +class ExampleProviderCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + self.certificates = TLSCertificatesProvidesV2(self, "certificates") + self.framework.observe( + self.certificates.on.certificate_request, + self._on_certificate_request + ) + self.framework.observe( + self.certificates.on.certificate_revocation_request, + self._on_certificate_revocation_request + ) + self.framework.observe(self.on.install, self._on_install) + + def _on_install(self, event: InstallEvent) -> None: + private_key_password = b"banana" + private_key = generate_private_key(password=private_key_password) + ca_certificate = generate_ca(private_key=private_key, subject="whatever") + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + replicas_relation.data[self.app].update( + { + "private_key_password": "banana", + "private_key": private_key, + "ca_certificate": ca_certificate, + } + ) + self.unit.status = ActiveStatus() + + def _on_certificate_request(self, event: CertificateCreationRequestEvent) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + ca_certificate = replicas_relation.data[self.app].get("ca_certificate") + private_key = replicas_relation.data[self.app].get("private_key") + certificate = generate_certificate( + ca=ca_certificate, + private_key=private_key, + csr=event.certificate_signing_request, + ) + + self.certificates.set_relation_certificate( + certificate=certificate, + certificate_signing_request=event.certificate_signing_request, + ca=ca_certificate, + chain=[ca_certificate, certificate], + relation_id=event.relation_id, + ) + + def _on_certificate_revocation_request(self, event: CertificateRevocationRequestEvent) -> None: + # Do what you want to do with this information + pass + + +if __name__ == "__main__": + main(ExampleProviderCharm) +``` + +### Requirer charm +The requirer charm is the charm requiring certificates from another charm that provides them. In +this example, the requirer charm is storing its certificates using a peer relation interface called +`replicas`. + +Example: +```python +from charms.tls_certificates_interface.v2.tls_certificates import ( + CertificateAvailableEvent, + CertificateExpiringEvent, + CertificateRevokedEvent, + TLSCertificatesRequiresV2, + generate_csr, + generate_private_key, +) +from ops.charm import CharmBase, RelationJoinedEvent +from ops.main import main +from ops.model import ActiveStatus, WaitingStatus +from typing import Union + + +class ExampleRequirerCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + self.cert_subject = "whatever" + self.certificates = TLSCertificatesRequiresV2(self, "certificates") + self.framework.observe(self.on.install, self._on_install) + self.framework.observe( + self.on.certificates_relation_joined, self._on_certificates_relation_joined + ) + self.framework.observe( + self.certificates.on.certificate_available, self._on_certificate_available + ) + self.framework.observe( + self.certificates.on.certificate_expiring, self._on_certificate_expiring + ) + self.framework.observe( + self.certificates.on.certificate_invalidated, self._on_certificate_invalidated + ) + self.framework.observe( + self.certificates.on.all_certificates_invalidated, + self._on_all_certificates_invalidated + ) + + def _on_install(self, event) -> None: + private_key_password = b"banana" + private_key = generate_private_key(password=private_key_password) + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + replicas_relation.data[self.app].update( + {"private_key_password": "banana", "private_key": private_key.decode()} + ) + + def _on_certificates_relation_joined(self, event: RelationJoinedEvent) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + private_key_password = replicas_relation.data[self.app].get("private_key_password") + private_key = replicas_relation.data[self.app].get("private_key") + csr = generate_csr( + private_key=private_key.encode(), + private_key_password=private_key_password.encode(), + subject=self.cert_subject, + ) + replicas_relation.data[self.app].update({"csr": csr.decode()}) + self.certificates.request_certificate_creation(certificate_signing_request=csr) + + def _on_certificate_available(self, event: CertificateAvailableEvent) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + replicas_relation.data[self.app].update({"certificate": event.certificate}) + replicas_relation.data[self.app].update({"ca": event.ca}) + replicas_relation.data[self.app].update({"chain": event.chain}) + self.unit.status = ActiveStatus() + + def _on_certificate_expiring( + self, event: Union[CertificateExpiringEvent, CertificateInvalidatedEvent] + ) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + old_csr = replicas_relation.data[self.app].get("csr") + private_key_password = replicas_relation.data[self.app].get("private_key_password") + private_key = replicas_relation.data[self.app].get("private_key") + new_csr = generate_csr( + private_key=private_key.encode(), + private_key_password=private_key_password.encode(), + subject=self.cert_subject, + ) + self.certificates.request_certificate_renewal( + old_certificate_signing_request=old_csr, + new_certificate_signing_request=new_csr, + ) + replicas_relation.data[self.app].update({"csr": new_csr.decode()}) + + def _certificate_revoked(self) -> None: + old_csr = replicas_relation.data[self.app].get("csr") + private_key_password = replicas_relation.data[self.app].get("private_key_password") + private_key = replicas_relation.data[self.app].get("private_key") + new_csr = generate_csr( + private_key=private_key.encode(), + private_key_password=private_key_password.encode(), + subject=self.cert_subject, + ) + self.certificates.request_certificate_renewal( + old_certificate_signing_request=old_csr, + new_certificate_signing_request=new_csr, + ) + replicas_relation.data[self.app].update({"csr": new_csr.decode()}) + replicas_relation.data[self.app].pop("certificate") + replicas_relation.data[self.app].pop("ca") + replicas_relation.data[self.app].pop("chain") + self.unit.status = WaitingStatus("Waiting for new certificate") + + def _on_certificate_invalidated(self, event: CertificateInvalidatedEvent) -> None: + replicas_relation = self.model.get_relation("replicas") + if not replicas_relation: + self.unit.status = WaitingStatus("Waiting for peer relation to be created") + event.defer() + return + if event.reason == "revoked": + self._certificate_revoked() + if event.reason == "expired": + self._on_certificate_expiring(event) + + def _on_all_certificates_invalidated(self, event: AllCertificatesInvalidatedEvent) -> None: + # Do what you want with this information, probably remove all certificates. + pass + + +if __name__ == "__main__": + main(ExampleRequirerCharm) +``` + +You can relate both charms by running: + +```bash +juju relate +``` + +""" # noqa: D405, D410, D411, D214, D416 + +import copy +import json +import logging +import uuid +from contextlib import suppress +from datetime import datetime, timedelta, timezone +from ipaddress import IPv4Address +from typing import Any, Dict, List, Literal, Optional, Union + +from cryptography import x509 +from cryptography.hazmat._oid import ExtensionOID +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives.serialization import pkcs12 +from jsonschema import exceptions, validate +from ops.charm import ( + CharmBase, + CharmEvents, + RelationBrokenEvent, + RelationChangedEvent, + SecretExpiredEvent, + UpdateStatusEvent, +) +from ops.framework import EventBase, EventSource, Handle, Object +from ops.jujuversion import JujuVersion +from ops.model import ModelError, Relation, RelationDataContent, SecretNotFoundError + +# The unique Charmhub library identifier, never change it +LIBID = "afd8c2bccf834997afce12c2706d2ede" + +# Increment this major API version when introducing breaking changes +LIBAPI = 2 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 28 + +PYDEPS = ["cryptography", "jsonschema"] + +REQUIRER_JSON_SCHEMA = { + "$schema": "http://json-schema.org/draft-04/schema#", + "$id": "https://canonical.github.io/charm-relation-interfaces/interfaces/tls_certificates/v1/schemas/requirer.json", + "type": "object", + "title": "`tls_certificates` requirer root schema", + "description": "The `tls_certificates` root schema comprises the entire requirer databag for this interface.", # noqa: E501 + "examples": [ + { + "certificate_signing_requests": [ + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----\\nMIICWjCCAUICAQAwFTETMBEGA1UEAwwKYmFuYW5hLmNvbTCCASIwDQYJKoZIhvcN\\nAQEBBQADggEPADCCAQoCggEBANWlx9wE6cW7Jkb4DZZDOZoEjk1eDBMJ+8R4pyKp\\nFBeHMl1SQSDt6rAWsrfL3KOGiIHqrRY0B5H6c51L8LDuVrJG0bPmyQ6rsBo3gVke\\nDSivfSLtGvHtp8lwYnIunF8r858uYmblAR0tdXQNmnQvm+6GERvURQ6sxpgZ7iLC\\npPKDoPt+4GKWL10FWf0i82FgxWC2KqRZUtNbgKETQuARLig7etBmCnh20zmynorA\\ncY7vrpTPAaeQpGLNqqYvKV9W6yWVY08V+nqARrFrjk3vSioZSu8ZJUdZ4d9++SGl\\nbH7A6e77YDkX9i/dQ3Pa/iDtWO3tXS2MvgoxX1iSWlGNOHcCAwEAAaAAMA0GCSqG\\nSIb3DQEBCwUAA4IBAQCW1fKcHessy/ZhnIwAtSLznZeZNH8LTVOzkhVd4HA7EJW+\\nKVLBx8DnN7L3V2/uPJfHiOg4Rx7fi7LkJPegl3SCqJZ0N5bQS/KvDTCyLG+9E8Y+\\n7wqCmWiXaH1devimXZvazilu4IC2dSks2D8DPWHgsOdVks9bme8J3KjdNMQudegc\\newWZZ1Dtbd+Rn7cpKU3jURMwm4fRwGxbJ7iT5fkLlPBlyM/yFEik4SmQxFYrZCQg\\n0f3v4kBefTh5yclPy5tEH+8G0LMsbbo3dJ5mPKpAShi0QEKDLd7eR1R/712lYTK4\\ndi4XaEfqERgy68O4rvb4PGlJeRGS7AmL7Ss8wfAq\\n-----END CERTIFICATE REQUEST-----\\n" # noqa: E501 + }, + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----\\nMIICWjCCAUICAQAwFTETMBEGA1UEAwwKYmFuYW5hLmNvbTCCASIwDQYJKoZIhvcN\\nAQEBBQADggEPADCCAQoCggEBAMk3raaX803cHvzlBF9LC7KORT46z4VjyU5PIaMb\\nQLIDgYKFYI0n5hf2Ra4FAHvOvEmW7bjNlHORFEmvnpcU5kPMNUyKFMTaC8LGmN8z\\nUBH3aK+0+FRvY4afn9tgj5435WqOG9QdoDJ0TJkjJbJI9M70UOgL711oU7ql6HxU\\n4d2ydFK9xAHrBwziNHgNZ72L95s4gLTXf0fAHYf15mDA9U5yc+YDubCKgTXzVySQ\\nUx73VCJLfC/XkZIh559IrnRv5G9fu6BMLEuBwAz6QAO4+/XidbKWN4r2XSq5qX4n\\n6EPQQWP8/nd4myq1kbg6Q8w68L/0YdfjCmbyf2TuoWeImdUCAwEAAaAAMA0GCSqG\\nSIb3DQEBCwUAA4IBAQBIdwraBvpYo/rl5MH1+1Um6HRg4gOdQPY5WcJy9B9tgzJz\\nittRSlRGTnhyIo6fHgq9KHrmUthNe8mMTDailKFeaqkVNVvk7l0d1/B90Kz6OfmD\\nxN0qjW53oP7y3QB5FFBM8DjqjmUnz5UePKoX4AKkDyrKWxMwGX5RoET8c/y0y9jp\\nvSq3Wh5UpaZdWbe1oVY8CqMVUEVQL2DPjtopxXFz2qACwsXkQZxWmjvZnRiP8nP8\\nbdFaEuh9Q6rZ2QdZDEtrU4AodPU3NaukFr5KlTUQt3w/cl+5//zils6G5zUWJ2pN\\ng7+t9PTvXHRkH+LnwaVnmsBFU2e05qADQbfIn7JA\\n-----END CERTIFICATE REQUEST-----\\n" # noqa: E501 + }, + ] + } + ], + "properties": { + "certificate_signing_requests": { + "type": "array", + "items": { + "type": "object", + "properties": { + "certificate_signing_request": {"type": "string"}, + "ca": {"type": "boolean"}, + }, + "required": ["certificate_signing_request"], + }, + } + }, + "required": ["certificate_signing_requests"], + "additionalProperties": True, +} + +PROVIDER_JSON_SCHEMA = { + "$schema": "http://json-schema.org/draft-04/schema#", + "$id": "https://canonical.github.io/charm-relation-interfaces/interfaces/tls_certificates/v1/schemas/provider.json", + "type": "object", + "title": "`tls_certificates` provider root schema", + "description": "The `tls_certificates` root schema comprises the entire provider databag for this interface.", # noqa: E501 + "examples": [ + { + "certificates": [ + { + "ca": "-----BEGIN CERTIFICATE-----\\nMIIDJTCCAg2gAwIBAgIUMsSK+4FGCjW6sL/EXMSxColmKw8wDQYJKoZIhvcNAQEL\\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIyMDcyOTIx\\nMTgyN1oXDTIzMDcyOTIxMTgyN1owIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdo\\nYXRldmVyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA55N9DkgFWbJ/\\naqcdQhso7n1kFvt6j/fL1tJBvRubkiFMQJnZFtekfalN6FfRtA3jq+nx8o49e+7t\\nLCKT0xQ+wufXfOnxv6/if6HMhHTiCNPOCeztUgQ2+dfNwRhYYgB1P93wkUVjwudK\\n13qHTTZ6NtEF6EzOqhOCe6zxq6wrr422+ZqCvcggeQ5tW9xSd/8O1vNID/0MTKpy\\nET3drDtBfHmiUEIBR3T3tcy6QsIe4Rz/2sDinAcM3j7sG8uY6drh8jY3PWar9til\\nv2l4qDYSU8Qm5856AB1FVZRLRJkLxZYZNgreShAIYgEd0mcyI2EO/UvKxsIcxsXc\\nd45GhGpKkwIDAQABo1cwVTAfBgNVHQ4EGAQWBBRXBrXKh3p/aFdQjUcT/UcvICBL\\nODAhBgNVHSMEGjAYgBYEFFcGtcqHen9oV1CNRxP9Ry8gIEs4MA8GA1UdEwEB/wQF\\nMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGmCEvcoFUrT9e133SHkgF/ZAgzeIziO\\nBjfAdU4fvAVTVfzaPm0yBnGqzcHyacCzbZjKQpaKVgc5e6IaqAQtf6cZJSCiJGhS\\nJYeosWrj3dahLOUAMrXRr8G/Ybcacoqc+osKaRa2p71cC3V6u2VvcHRV7HDFGJU7\\noijbdB+WhqET6Txe67rxZCJG9Ez3EOejBJBl2PJPpy7m1Ml4RR+E8YHNzB0lcBzc\\nEoiJKlDfKSO14E2CPDonnUoWBJWjEvJys3tbvKzsRj2fnLilytPFU0gH3cEjCopi\\nzFoWRdaRuNHYCqlBmso1JFDl8h4fMmglxGNKnKRar0WeGyxb4xXBGpI=\\n-----END CERTIFICATE-----\\n", # noqa: E501 + "chain": [ + "-----BEGIN CERTIFICATE-----\\nMIIDJTCCAg2gAwIBAgIUMsSK+4FGCjW6sL/EXMSxColmKw8wDQYJKoZIhvcNAQEL\\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIyMDcyOTIx\\nMTgyN1oXDTIzMDcyOTIxMTgyN1owIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdo\\nYXRldmVyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA55N9DkgFWbJ/\\naqcdQhso7n1kFvt6j/fL1tJBvRubkiFMQJnZFtekfalN6FfRtA3jq+nx8o49e+7t\\nLCKT0xQ+wufXfOnxv6/if6HMhHTiCNPOCeztUgQ2+dfNwRhYYgB1P93wkUVjwudK\\n13qHTTZ6NtEF6EzOqhOCe6zxq6wrr422+ZqCvcggeQ5tW9xSd/8O1vNID/0MTKpy\\nET3drDtBfHmiUEIBR3T3tcy6QsIe4Rz/2sDinAcM3j7sG8uY6drh8jY3PWar9til\\nv2l4qDYSU8Qm5856AB1FVZRLRJkLxZYZNgreShAIYgEd0mcyI2EO/UvKxsIcxsXc\\nd45GhGpKkwIDAQABo1cwVTAfBgNVHQ4EGAQWBBRXBrXKh3p/aFdQjUcT/UcvICBL\\nODAhBgNVHSMEGjAYgBYEFFcGtcqHen9oV1CNRxP9Ry8gIEs4MA8GA1UdEwEB/wQF\\nMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGmCEvcoFUrT9e133SHkgF/ZAgzeIziO\\nBjfAdU4fvAVTVfzaPm0yBnGqzcHyacCzbZjKQpaKVgc5e6IaqAQtf6cZJSCiJGhS\\nJYeosWrj3dahLOUAMrXRr8G/Ybcacoqc+osKaRa2p71cC3V6u2VvcHRV7HDFGJU7\\noijbdB+WhqET6Txe67rxZCJG9Ez3EOejBJBl2PJPpy7m1Ml4RR+E8YHNzB0lcBzc\\nEoiJKlDfKSO14E2CPDonnUoWBJWjEvJys3tbvKzsRj2fnLilytPFU0gH3cEjCopi\\nzFoWRdaRuNHYCqlBmso1JFDl8h4fMmglxGNKnKRar0WeGyxb4xXBGpI=\\n-----END CERTIFICATE-----\\n" # noqa: E501, W505 + ], + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----\nMIICWjCCAUICAQAwFTETMBEGA1UEAwwKYmFuYW5hLmNvbTCCASIwDQYJKoZIhvcN\nAQEBBQADggEPADCCAQoCggEBANWlx9wE6cW7Jkb4DZZDOZoEjk1eDBMJ+8R4pyKp\nFBeHMl1SQSDt6rAWsrfL3KOGiIHqrRY0B5H6c51L8LDuVrJG0bPmyQ6rsBo3gVke\nDSivfSLtGvHtp8lwYnIunF8r858uYmblAR0tdXQNmnQvm+6GERvURQ6sxpgZ7iLC\npPKDoPt+4GKWL10FWf0i82FgxWC2KqRZUtNbgKETQuARLig7etBmCnh20zmynorA\ncY7vrpTPAaeQpGLNqqYvKV9W6yWVY08V+nqARrFrjk3vSioZSu8ZJUdZ4d9++SGl\nbH7A6e77YDkX9i/dQ3Pa/iDtWO3tXS2MvgoxX1iSWlGNOHcCAwEAAaAAMA0GCSqG\nSIb3DQEBCwUAA4IBAQCW1fKcHessy/ZhnIwAtSLznZeZNH8LTVOzkhVd4HA7EJW+\nKVLBx8DnN7L3V2/uPJfHiOg4Rx7fi7LkJPegl3SCqJZ0N5bQS/KvDTCyLG+9E8Y+\n7wqCmWiXaH1devimXZvazilu4IC2dSks2D8DPWHgsOdVks9bme8J3KjdNMQudegc\newWZZ1Dtbd+Rn7cpKU3jURMwm4fRwGxbJ7iT5fkLlPBlyM/yFEik4SmQxFYrZCQg\n0f3v4kBefTh5yclPy5tEH+8G0LMsbbo3dJ5mPKpAShi0QEKDLd7eR1R/712lYTK4\ndi4XaEfqERgy68O4rvb4PGlJeRGS7AmL7Ss8wfAq\n-----END CERTIFICATE REQUEST-----\n", # noqa: E501 + "certificate": "-----BEGIN CERTIFICATE-----\nMIICvDCCAaQCFFPAOD7utDTsgFrm0vS4We18OcnKMA0GCSqGSIb3DQEBCwUAMCAx\nCzAJBgNVBAYTAlVTMREwDwYDVQQDDAh3aGF0ZXZlcjAeFw0yMjA3MjkyMTE5Mzha\nFw0yMzA3MjkyMTE5MzhaMBUxEzARBgNVBAMMCmJhbmFuYS5jb20wggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVpcfcBOnFuyZG+A2WQzmaBI5NXgwTCfvE\neKciqRQXhzJdUkEg7eqwFrK3y9yjhoiB6q0WNAeR+nOdS/Cw7layRtGz5skOq7Aa\nN4FZHg0or30i7Rrx7afJcGJyLpxfK/OfLmJm5QEdLXV0DZp0L5vuhhEb1EUOrMaY\nGe4iwqTyg6D7fuBili9dBVn9IvNhYMVgtiqkWVLTW4ChE0LgES4oO3rQZgp4dtM5\nsp6KwHGO766UzwGnkKRizaqmLylfVusllWNPFfp6gEaxa45N70oqGUrvGSVHWeHf\nfvkhpWx+wOnu+2A5F/Yv3UNz2v4g7Vjt7V0tjL4KMV9YklpRjTh3AgMBAAEwDQYJ\nKoZIhvcNAQELBQADggEBAChjRzuba8zjQ7NYBVas89Oy7u++MlS8xWxh++yiUsV6\nWMk3ZemsPtXc1YmXorIQohtxLxzUPm2JhyzFzU/sOLmJQ1E/l+gtZHyRCwsb20fX\nmphuJsMVd7qv/GwEk9PBsk2uDqg4/Wix0Rx5lf95juJP7CPXQJl5FQauf3+LSz0y\nwF/j+4GqvrwsWr9hKOLmPdkyKkR6bHKtzzsxL9PM8GnElk2OpaPMMnzbL/vt2IAt\nxK01ZzPxCQCzVwHo5IJO5NR/fIyFbEPhxzG17QsRDOBR9fl9cOIvDeSO04vyZ+nz\n+kA2c3fNrZFAtpIlOOmFh8Q12rVL4sAjI5mVWnNEgvI=\n-----END CERTIFICATE-----\n", # noqa: E501 + } + ] + }, + { + "certificates": [ + { + "ca": "-----BEGIN CERTIFICATE-----\\nMIIDJTCCAg2gAwIBAgIUMsSK+4FGCjW6sL/EXMSxColmKw8wDQYJKoZIhvcNAQEL\\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIyMDcyOTIx\\nMTgyN1oXDTIzMDcyOTIxMTgyN1owIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdo\\nYXRldmVyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA55N9DkgFWbJ/\\naqcdQhso7n1kFvt6j/fL1tJBvRubkiFMQJnZFtekfalN6FfRtA3jq+nx8o49e+7t\\nLCKT0xQ+wufXfOnxv6/if6HMhHTiCNPOCeztUgQ2+dfNwRhYYgB1P93wkUVjwudK\\n13qHTTZ6NtEF6EzOqhOCe6zxq6wrr422+ZqCvcggeQ5tW9xSd/8O1vNID/0MTKpy\\nET3drDtBfHmiUEIBR3T3tcy6QsIe4Rz/2sDinAcM3j7sG8uY6drh8jY3PWar9til\\nv2l4qDYSU8Qm5856AB1FVZRLRJkLxZYZNgreShAIYgEd0mcyI2EO/UvKxsIcxsXc\\nd45GhGpKkwIDAQABo1cwVTAfBgNVHQ4EGAQWBBRXBrXKh3p/aFdQjUcT/UcvICBL\\nODAhBgNVHSMEGjAYgBYEFFcGtcqHen9oV1CNRxP9Ry8gIEs4MA8GA1UdEwEB/wQF\\nMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGmCEvcoFUrT9e133SHkgF/ZAgzeIziO\\nBjfAdU4fvAVTVfzaPm0yBnGqzcHyacCzbZjKQpaKVgc5e6IaqAQtf6cZJSCiJGhS\\nJYeosWrj3dahLOUAMrXRr8G/Ybcacoqc+osKaRa2p71cC3V6u2VvcHRV7HDFGJU7\\noijbdB+WhqET6Txe67rxZCJG9Ez3EOejBJBl2PJPpy7m1Ml4RR+E8YHNzB0lcBzc\\nEoiJKlDfKSO14E2CPDonnUoWBJWjEvJys3tbvKzsRj2fnLilytPFU0gH3cEjCopi\\nzFoWRdaRuNHYCqlBmso1JFDl8h4fMmglxGNKnKRar0WeGyxb4xXBGpI=\\n-----END CERTIFICATE-----\\n", # noqa: E501 + "chain": [ + "-----BEGIN CERTIFICATE-----\\nMIIDJTCCAg2gAwIBAgIUMsSK+4FGCjW6sL/EXMSxColmKw8wDQYJKoZIhvcNAQEL\\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIyMDcyOTIx\\nMTgyN1oXDTIzMDcyOTIxMTgyN1owIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdo\\nYXRldmVyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA55N9DkgFWbJ/\\naqcdQhso7n1kFvt6j/fL1tJBvRubkiFMQJnZFtekfalN6FfRtA3jq+nx8o49e+7t\\nLCKT0xQ+wufXfOnxv6/if6HMhHTiCNPOCeztUgQ2+dfNwRhYYgB1P93wkUVjwudK\\n13qHTTZ6NtEF6EzOqhOCe6zxq6wrr422+ZqCvcggeQ5tW9xSd/8O1vNID/0MTKpy\\nET3drDtBfHmiUEIBR3T3tcy6QsIe4Rz/2sDinAcM3j7sG8uY6drh8jY3PWar9til\\nv2l4qDYSU8Qm5856AB1FVZRLRJkLxZYZNgreShAIYgEd0mcyI2EO/UvKxsIcxsXc\\nd45GhGpKkwIDAQABo1cwVTAfBgNVHQ4EGAQWBBRXBrXKh3p/aFdQjUcT/UcvICBL\\nODAhBgNVHSMEGjAYgBYEFFcGtcqHen9oV1CNRxP9Ry8gIEs4MA8GA1UdEwEB/wQF\\nMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGmCEvcoFUrT9e133SHkgF/ZAgzeIziO\\nBjfAdU4fvAVTVfzaPm0yBnGqzcHyacCzbZjKQpaKVgc5e6IaqAQtf6cZJSCiJGhS\\nJYeosWrj3dahLOUAMrXRr8G/Ybcacoqc+osKaRa2p71cC3V6u2VvcHRV7HDFGJU7\\noijbdB+WhqET6Txe67rxZCJG9Ez3EOejBJBl2PJPpy7m1Ml4RR+E8YHNzB0lcBzc\\nEoiJKlDfKSO14E2CPDonnUoWBJWjEvJys3tbvKzsRj2fnLilytPFU0gH3cEjCopi\\nzFoWRdaRuNHYCqlBmso1JFDl8h4fMmglxGNKnKRar0WeGyxb4xXBGpI=\\n-----END CERTIFICATE-----\\n" # noqa: E501, W505 + ], + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----\nMIICWjCCAUICAQAwFTETMBEGA1UEAwwKYmFuYW5hLmNvbTCCASIwDQYJKoZIhvcN\nAQEBBQADggEPADCCAQoCggEBANWlx9wE6cW7Jkb4DZZDOZoEjk1eDBMJ+8R4pyKp\nFBeHMl1SQSDt6rAWsrfL3KOGiIHqrRY0B5H6c51L8LDuVrJG0bPmyQ6rsBo3gVke\nDSivfSLtGvHtp8lwYnIunF8r858uYmblAR0tdXQNmnQvm+6GERvURQ6sxpgZ7iLC\npPKDoPt+4GKWL10FWf0i82FgxWC2KqRZUtNbgKETQuARLig7etBmCnh20zmynorA\ncY7vrpTPAaeQpGLNqqYvKV9W6yWVY08V+nqARrFrjk3vSioZSu8ZJUdZ4d9++SGl\nbH7A6e77YDkX9i/dQ3Pa/iDtWO3tXS2MvgoxX1iSWlGNOHcCAwEAAaAAMA0GCSqG\nSIb3DQEBCwUAA4IBAQCW1fKcHessy/ZhnIwAtSLznZeZNH8LTVOzkhVd4HA7EJW+\nKVLBx8DnN7L3V2/uPJfHiOg4Rx7fi7LkJPegl3SCqJZ0N5bQS/KvDTCyLG+9E8Y+\n7wqCmWiXaH1devimXZvazilu4IC2dSks2D8DPWHgsOdVks9bme8J3KjdNMQudegc\newWZZ1Dtbd+Rn7cpKU3jURMwm4fRwGxbJ7iT5fkLlPBlyM/yFEik4SmQxFYrZCQg\n0f3v4kBefTh5yclPy5tEH+8G0LMsbbo3dJ5mPKpAShi0QEKDLd7eR1R/712lYTK4\ndi4XaEfqERgy68O4rvb4PGlJeRGS7AmL7Ss8wfAq\n-----END CERTIFICATE REQUEST-----\n", # noqa: E501 + "certificate": "-----BEGIN CERTIFICATE-----\nMIICvDCCAaQCFFPAOD7utDTsgFrm0vS4We18OcnKMA0GCSqGSIb3DQEBCwUAMCAx\nCzAJBgNVBAYTAlVTMREwDwYDVQQDDAh3aGF0ZXZlcjAeFw0yMjA3MjkyMTE5Mzha\nFw0yMzA3MjkyMTE5MzhaMBUxEzARBgNVBAMMCmJhbmFuYS5jb20wggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVpcfcBOnFuyZG+A2WQzmaBI5NXgwTCfvE\neKciqRQXhzJdUkEg7eqwFrK3y9yjhoiB6q0WNAeR+nOdS/Cw7layRtGz5skOq7Aa\nN4FZHg0or30i7Rrx7afJcGJyLpxfK/OfLmJm5QEdLXV0DZp0L5vuhhEb1EUOrMaY\nGe4iwqTyg6D7fuBili9dBVn9IvNhYMVgtiqkWVLTW4ChE0LgES4oO3rQZgp4dtM5\nsp6KwHGO766UzwGnkKRizaqmLylfVusllWNPFfp6gEaxa45N70oqGUrvGSVHWeHf\nfvkhpWx+wOnu+2A5F/Yv3UNz2v4g7Vjt7V0tjL4KMV9YklpRjTh3AgMBAAEwDQYJ\nKoZIhvcNAQELBQADggEBAChjRzuba8zjQ7NYBVas89Oy7u++MlS8xWxh++yiUsV6\nWMk3ZemsPtXc1YmXorIQohtxLxzUPm2JhyzFzU/sOLmJQ1E/l+gtZHyRCwsb20fX\nmphuJsMVd7qv/GwEk9PBsk2uDqg4/Wix0Rx5lf95juJP7CPXQJl5FQauf3+LSz0y\nwF/j+4GqvrwsWr9hKOLmPdkyKkR6bHKtzzsxL9PM8GnElk2OpaPMMnzbL/vt2IAt\nxK01ZzPxCQCzVwHo5IJO5NR/fIyFbEPhxzG17QsRDOBR9fl9cOIvDeSO04vyZ+nz\n+kA2c3fNrZFAtpIlOOmFh8Q12rVL4sAjI5mVWnNEgvI=\n-----END CERTIFICATE-----\n", # noqa: E501 + "revoked": True, + } + ] + }, + ], + "properties": { + "certificates": { + "$id": "#/properties/certificates", + "type": "array", + "items": { + "$id": "#/properties/certificates/items", + "type": "object", + "required": ["certificate_signing_request", "certificate", "ca", "chain"], + "properties": { + "certificate_signing_request": { + "$id": "#/properties/certificates/items/certificate_signing_request", + "type": "string", + }, + "certificate": { + "$id": "#/properties/certificates/items/certificate", + "type": "string", + }, + "ca": {"$id": "#/properties/certificates/items/ca", "type": "string"}, + "chain": { + "$id": "#/properties/certificates/items/chain", + "type": "array", + "items": { + "type": "string", + "$id": "#/properties/certificates/items/chain/items", + }, + }, + "revoked": { + "$id": "#/properties/certificates/items/revoked", + "type": "boolean", + }, + }, + "additionalProperties": True, + }, + } + }, + "required": ["certificates"], + "additionalProperties": True, +} + + +logger = logging.getLogger(__name__) + + +class CertificateAvailableEvent(EventBase): + """Charm Event triggered when a TLS certificate is available.""" + + def __init__( + self, + handle: Handle, + certificate: str, + certificate_signing_request: str, + ca: str, + chain: List[str], + ): + super().__init__(handle) + self.certificate = certificate + self.certificate_signing_request = certificate_signing_request + self.ca = ca + self.chain = chain + + def snapshot(self) -> dict: + """Return snapshot.""" + return { + "certificate": self.certificate, + "certificate_signing_request": self.certificate_signing_request, + "ca": self.ca, + "chain": self.chain, + } + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.certificate = snapshot["certificate"] + self.certificate_signing_request = snapshot["certificate_signing_request"] + self.ca = snapshot["ca"] + self.chain = snapshot["chain"] + + +class CertificateExpiringEvent(EventBase): + """Charm Event triggered when a TLS certificate is almost expired.""" + + def __init__(self, handle, certificate: str, expiry: str): + """CertificateExpiringEvent. + + Args: + handle (Handle): Juju framework handle + certificate (str): TLS Certificate + expiry (str): Datetime string representing the time at which the certificate + won't be valid anymore. + """ + super().__init__(handle) + self.certificate = certificate + self.expiry = expiry + + def snapshot(self) -> dict: + """Return snapshot.""" + return {"certificate": self.certificate, "expiry": self.expiry} + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.certificate = snapshot["certificate"] + self.expiry = snapshot["expiry"] + + +class CertificateInvalidatedEvent(EventBase): + """Charm Event triggered when a TLS certificate is invalidated.""" + + def __init__( + self, + handle: Handle, + reason: Literal["expired", "revoked"], + certificate: str, + certificate_signing_request: str, + ca: str, + chain: List[str], + ): + super().__init__(handle) + self.reason = reason + self.certificate_signing_request = certificate_signing_request + self.certificate = certificate + self.ca = ca + self.chain = chain + + def snapshot(self) -> dict: + """Return snapshot.""" + return { + "reason": self.reason, + "certificate_signing_request": self.certificate_signing_request, + "certificate": self.certificate, + "ca": self.ca, + "chain": self.chain, + } + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.reason = snapshot["reason"] + self.certificate_signing_request = snapshot["certificate_signing_request"] + self.certificate = snapshot["certificate"] + self.ca = snapshot["ca"] + self.chain = snapshot["chain"] + + +class AllCertificatesInvalidatedEvent(EventBase): + """Charm Event triggered when all TLS certificates are invalidated.""" + + def __init__(self, handle: Handle): + super().__init__(handle) + + def snapshot(self) -> dict: + """Return snapshot.""" + return {} + + def restore(self, snapshot: dict): + """Restore snapshot.""" + pass + + +class CertificateCreationRequestEvent(EventBase): + """Charm Event triggered when a TLS certificate is required.""" + + def __init__( + self, + handle: Handle, + certificate_signing_request: str, + relation_id: int, + is_ca: bool = False, + ): + super().__init__(handle) + self.certificate_signing_request = certificate_signing_request + self.relation_id = relation_id + self.is_ca = is_ca + + def snapshot(self) -> dict: + """Return snapshot.""" + return { + "certificate_signing_request": self.certificate_signing_request, + "relation_id": self.relation_id, + "is_ca": self.is_ca, + } + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.certificate_signing_request = snapshot["certificate_signing_request"] + self.relation_id = snapshot["relation_id"] + self.is_ca = snapshot["is_ca"] + + +class CertificateRevocationRequestEvent(EventBase): + """Charm Event triggered when a TLS certificate needs to be revoked.""" + + def __init__( + self, + handle: Handle, + certificate: str, + certificate_signing_request: str, + ca: str, + chain: str, + ): + super().__init__(handle) + self.certificate = certificate + self.certificate_signing_request = certificate_signing_request + self.ca = ca + self.chain = chain + + def snapshot(self) -> dict: + """Return snapshot.""" + return { + "certificate": self.certificate, + "certificate_signing_request": self.certificate_signing_request, + "ca": self.ca, + "chain": self.chain, + } + + def restore(self, snapshot: dict): + """Restore snapshot.""" + self.certificate = snapshot["certificate"] + self.certificate_signing_request = snapshot["certificate_signing_request"] + self.ca = snapshot["ca"] + self.chain = snapshot["chain"] + + +def _load_relation_data(relation_data_content: RelationDataContent) -> dict: + """Load relation data from the relation data bag. + + Json loads all data. + + Args: + relation_data_content: Relation data from the databag + + Returns: + dict: Relation data in dict format. + """ + certificate_data = {} + try: + for key in relation_data_content: + try: + certificate_data[key] = json.loads(relation_data_content[key]) + except (json.decoder.JSONDecodeError, TypeError): + certificate_data[key] = relation_data_content[key] + except ModelError: + pass + return certificate_data + + +def _get_closest_future_time( + expiry_notification_time: datetime, expiry_time: datetime +) -> datetime: + """Return expiry_notification_time if not in the past, otherwise return expiry_time. + + Args: + expiry_notification_time (datetime): Notification time of impending expiration + expiry_time (datetime): Expiration time + + Returns: + datetime: expiry_notification_time if not in the past, expiry_time otherwise + """ + return ( + expiry_notification_time + if datetime.now(timezone.utc) < expiry_notification_time + else expiry_time + ) + + +def _get_certificate_expiry_time(certificate: str) -> Optional[datetime]: + """Extract expiry time from a certificate string. + + Args: + certificate (str): x509 certificate as a string + + Returns: + Optional[datetime]: Expiry datetime or None + """ + try: + certificate_object = x509.load_pem_x509_certificate(data=certificate.encode()) + return certificate_object.not_valid_after_utc + except ValueError: + logger.warning("Could not load certificate.") + return None + + +def generate_ca( + private_key: bytes, + subject: str, + private_key_password: Optional[bytes] = None, + validity: int = 365, + country: str = "US", +) -> bytes: + """Generate a CA Certificate. + + Args: + private_key (bytes): Private key + subject (str): Common Name that can be an IP or a Full Qualified Domain Name (FQDN). + private_key_password (bytes): Private key password + validity (int): Certificate validity time (in days) + country (str): Certificate Issuing country + + Returns: + bytes: CA Certificate. + """ + private_key_object = serialization.load_pem_private_key( + private_key, password=private_key_password + ) + subject_name = x509.Name( + [ + x509.NameAttribute(x509.NameOID.COUNTRY_NAME, country), + x509.NameAttribute(x509.NameOID.COMMON_NAME, subject), + ] + ) + subject_identifier_object = x509.SubjectKeyIdentifier.from_public_key( + private_key_object.public_key() # type: ignore[arg-type] + ) + subject_identifier = key_identifier = subject_identifier_object.public_bytes() + key_usage = x509.KeyUsage( + digital_signature=True, + key_encipherment=True, + key_cert_sign=True, + key_agreement=False, + content_commitment=False, + data_encipherment=False, + crl_sign=False, + encipher_only=False, + decipher_only=False, + ) + cert = ( + x509.CertificateBuilder() + .subject_name(subject_name) + .issuer_name(subject_name) + .public_key(private_key_object.public_key()) # type: ignore[arg-type] + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.now(timezone.utc)) + .not_valid_after(datetime.now(timezone.utc) + timedelta(days=validity)) + .add_extension(x509.SubjectKeyIdentifier(digest=subject_identifier), critical=False) + .add_extension( + x509.AuthorityKeyIdentifier( + key_identifier=key_identifier, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ), + critical=False, + ) + .add_extension(key_usage, critical=True) + .add_extension( + x509.BasicConstraints(ca=True, path_length=None), + critical=True, + ) + .sign(private_key_object, hashes.SHA256()) # type: ignore[arg-type] + ) + return cert.public_bytes(serialization.Encoding.PEM) + + +def get_certificate_extensions( + authority_key_identifier: bytes, + csr: x509.CertificateSigningRequest, + alt_names: Optional[List[str]], + is_ca: bool, +) -> List[x509.Extension]: + """Generate a list of certificate extensions from a CSR and other known information. + + Args: + authority_key_identifier (bytes): Authority key identifier + csr (x509.CertificateSigningRequest): CSR + alt_names (list): List of alt names to put on cert - prefer putting SANs in CSR + is_ca (bool): Whether the certificate is a CA certificate + + Returns: + List[x509.Extension]: List of extensions + """ + cert_extensions_list: List[x509.Extension] = [ + x509.Extension( + oid=ExtensionOID.AUTHORITY_KEY_IDENTIFIER, + value=x509.AuthorityKeyIdentifier( + key_identifier=authority_key_identifier, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ), + critical=False, + ), + x509.Extension( + oid=ExtensionOID.SUBJECT_KEY_IDENTIFIER, + value=x509.SubjectKeyIdentifier.from_public_key(csr.public_key()), + critical=False, + ), + x509.Extension( + oid=ExtensionOID.BASIC_CONSTRAINTS, + critical=True, + value=x509.BasicConstraints(ca=is_ca, path_length=None), + ), + ] + + sans: List[x509.GeneralName] = [] + san_alt_names = [x509.DNSName(name) for name in alt_names] if alt_names else [] + sans.extend(san_alt_names) + try: + loaded_san_ext = csr.extensions.get_extension_for_class(x509.SubjectAlternativeName) + sans.extend( + [x509.DNSName(name) for name in loaded_san_ext.value.get_values_for_type(x509.DNSName)] + ) + sans.extend( + [x509.IPAddress(ip) for ip in loaded_san_ext.value.get_values_for_type(x509.IPAddress)] + ) + sans.extend( + [ + x509.RegisteredID(oid) + for oid in loaded_san_ext.value.get_values_for_type(x509.RegisteredID) + ] + ) + except x509.ExtensionNotFound: + pass + + if sans: + cert_extensions_list.append( + x509.Extension( + oid=ExtensionOID.SUBJECT_ALTERNATIVE_NAME, + critical=False, + value=x509.SubjectAlternativeName(sans), + ) + ) + + if is_ca: + cert_extensions_list.append( + x509.Extension( + ExtensionOID.KEY_USAGE, + critical=True, + value=x509.KeyUsage( + digital_signature=False, + content_commitment=False, + key_encipherment=False, + data_encipherment=False, + key_agreement=False, + key_cert_sign=True, + crl_sign=True, + encipher_only=False, + decipher_only=False, + ), + ) + ) + + existing_oids = {ext.oid for ext in cert_extensions_list} + for extension in csr.extensions: + if extension.oid == ExtensionOID.SUBJECT_ALTERNATIVE_NAME: + continue + if extension.oid in existing_oids: + logger.warning("Extension %s is managed by the TLS provider, ignoring.", extension.oid) + continue + cert_extensions_list.append(extension) + + return cert_extensions_list + + +def generate_certificate( + csr: bytes, + ca: bytes, + ca_key: bytes, + ca_key_password: Optional[bytes] = None, + validity: int = 365, + alt_names: Optional[List[str]] = None, + is_ca: bool = False, +) -> bytes: + """Generate a TLS certificate based on a CSR. + + Args: + csr (bytes): CSR + ca (bytes): CA Certificate + ca_key (bytes): CA private key + ca_key_password: CA private key password + validity (int): Certificate validity (in days) + alt_names (list): List of alt names to put on cert - prefer putting SANs in CSR + is_ca (bool): Whether the certificate is a CA certificate + + Returns: + bytes: Certificate + """ + csr_object = x509.load_pem_x509_csr(csr) + subject = csr_object.subject + ca_pem = x509.load_pem_x509_certificate(ca) + issuer = ca_pem.issuer + private_key = serialization.load_pem_private_key(ca_key, password=ca_key_password) + + certificate_builder = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(csr_object.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.now(timezone.utc)) + .not_valid_after(datetime.now(timezone.utc) + timedelta(days=validity)) + ) + extensions = get_certificate_extensions( + authority_key_identifier=ca_pem.extensions.get_extension_for_class( + x509.SubjectKeyIdentifier + ).value.key_identifier, + csr=csr_object, + alt_names=alt_names, + is_ca=is_ca, + ) + for extension in extensions: + try: + certificate_builder = certificate_builder.add_extension( + extval=extension.value, + critical=extension.critical, + ) + except ValueError as e: + logger.warning("Failed to add extension %s: %s", extension.oid, e) + + cert = certificate_builder.sign(private_key, hashes.SHA256()) # type: ignore[arg-type] + return cert.public_bytes(serialization.Encoding.PEM) + + +def generate_pfx_package( + certificate: bytes, + private_key: bytes, + package_password: str, + private_key_password: Optional[bytes] = None, +) -> bytes: + """Generate a PFX package to contain the TLS certificate and private key. + + Args: + certificate (bytes): TLS certificate + private_key (bytes): Private key + package_password (str): Password to open the PFX package + private_key_password (bytes): Private key password + + Returns: + bytes: + """ + private_key_object = serialization.load_pem_private_key( + private_key, password=private_key_password + ) + certificate_object = x509.load_pem_x509_certificate(certificate) + name = certificate_object.subject.rfc4514_string() + pfx_bytes = pkcs12.serialize_key_and_certificates( + name=name.encode(), + cert=certificate_object, + key=private_key_object, # type: ignore[arg-type] + cas=None, + encryption_algorithm=serialization.BestAvailableEncryption(package_password.encode()), + ) + return pfx_bytes + + +def generate_private_key( + password: Optional[bytes] = None, + key_size: int = 2048, + public_exponent: int = 65537, +) -> bytes: + """Generate a private key. + + Args: + password (bytes): Password for decrypting the private key + key_size (int): Key size in bytes + public_exponent: Public exponent. + + Returns: + bytes: Private Key + """ + private_key = rsa.generate_private_key( + public_exponent=public_exponent, + key_size=key_size, + ) + key_bytes = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=( + serialization.BestAvailableEncryption(password) + if password + else serialization.NoEncryption() + ), + ) + return key_bytes + + +def generate_csr( # noqa: C901 + private_key: bytes, + subject: str, + add_unique_id_to_subject_name: bool = True, + organization: Optional[str] = None, + email_address: Optional[str] = None, + country_name: Optional[str] = None, + private_key_password: Optional[bytes] = None, + sans: Optional[List[str]] = None, + sans_oid: Optional[List[str]] = None, + sans_ip: Optional[List[str]] = None, + sans_dns: Optional[List[str]] = None, + additional_critical_extensions: Optional[List] = None, +) -> bytes: + """Generate a CSR using private key and subject. + + Args: + private_key (bytes): Private key + subject (str): CSR Common Name that can be an IP or a Full Qualified Domain Name (FQDN). + add_unique_id_to_subject_name (bool): Whether a unique ID must be added to the CSR's + subject name. Always leave to "True" when the CSR is used to request certificates + using the tls-certificates relation. + organization (str): Name of organization. + email_address (str): Email address. + country_name (str): Country Name. + private_key_password (bytes): Private key password + sans (list): Use sans_dns - this will be deprecated in a future release + List of DNS subject alternative names (keeping it for now for backward compatibility) + sans_oid (list): List of registered ID SANs + sans_dns (list): List of DNS subject alternative names (similar to the arg: sans) + sans_ip (list): List of IP subject alternative names + additional_critical_extensions (list): List of critical additional extension objects. + Object must be a x509 ExtensionType. + + Returns: + bytes: CSR + """ + signing_key = serialization.load_pem_private_key(private_key, password=private_key_password) + subject_name = [x509.NameAttribute(x509.NameOID.COMMON_NAME, subject)] + if add_unique_id_to_subject_name: + unique_identifier = uuid.uuid4() + subject_name.append( + x509.NameAttribute(x509.NameOID.X500_UNIQUE_IDENTIFIER, str(unique_identifier)) + ) + if organization: + subject_name.append(x509.NameAttribute(x509.NameOID.ORGANIZATION_NAME, organization)) + if email_address: + subject_name.append(x509.NameAttribute(x509.NameOID.EMAIL_ADDRESS, email_address)) + if country_name: + subject_name.append(x509.NameAttribute(x509.NameOID.COUNTRY_NAME, country_name)) + csr = x509.CertificateSigningRequestBuilder(subject_name=x509.Name(subject_name)) + + _sans: List[x509.GeneralName] = [] + if sans_oid: + _sans.extend([x509.RegisteredID(x509.ObjectIdentifier(san)) for san in sans_oid]) + if sans_ip: + _sans.extend([x509.IPAddress(IPv4Address(san)) for san in sans_ip]) + if sans: + _sans.extend([x509.DNSName(san) for san in sans]) + if sans_dns: + _sans.extend([x509.DNSName(san) for san in sans_dns]) + if _sans: + csr = csr.add_extension(x509.SubjectAlternativeName(set(_sans)), critical=False) + + if additional_critical_extensions: + for extension in additional_critical_extensions: + csr = csr.add_extension(extension, critical=True) + + signed_certificate = csr.sign(signing_key, hashes.SHA256()) # type: ignore[arg-type] + return signed_certificate.public_bytes(serialization.Encoding.PEM) + + +def csr_matches_certificate(csr: str, cert: str) -> bool: + """Check if a CSR matches a certificate. + + Args: + csr (str): Certificate Signing Request as a string + cert (str): Certificate as a string + Returns: + bool: True/False depending on whether the CSR matches the certificate. + """ + try: + csr_object = x509.load_pem_x509_csr(csr.encode("utf-8")) + cert_object = x509.load_pem_x509_certificate(cert.encode("utf-8")) + + if csr_object.public_key().public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) != cert_object.public_key().public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ): + return False + if ( + csr_object.public_key().public_numbers().n # type: ignore[union-attr] + != cert_object.public_key().public_numbers().n # type: ignore[union-attr] + ): + return False + except ValueError: + logger.warning("Could not load certificate or CSR.") + return False + return True + + +class CertificatesProviderCharmEvents(CharmEvents): + """List of events that the TLS Certificates provider charm can leverage.""" + + certificate_creation_request = EventSource(CertificateCreationRequestEvent) + certificate_revocation_request = EventSource(CertificateRevocationRequestEvent) + + +class CertificatesRequirerCharmEvents(CharmEvents): + """List of events that the TLS Certificates requirer charm can leverage.""" + + certificate_available = EventSource(CertificateAvailableEvent) + certificate_expiring = EventSource(CertificateExpiringEvent) + certificate_invalidated = EventSource(CertificateInvalidatedEvent) + all_certificates_invalidated = EventSource(AllCertificatesInvalidatedEvent) + + +class TLSCertificatesProvidesV2(Object): + """TLS certificates provider class to be instantiated by TLS certificates providers.""" + + on = CertificatesProviderCharmEvents() # type: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relationship_name: str): + super().__init__(charm, relationship_name) + self.framework.observe( + charm.on[relationship_name].relation_changed, self._on_relation_changed + ) + self.charm = charm + self.relationship_name = relationship_name + + def _load_app_relation_data(self, relation: Relation) -> dict: + """Load relation data from the application relation data bag. + + Json loads all data. + + Args: + relation: Relation data from the application databag + + Returns: + dict: Relation data in dict format. + """ + # If unit is not leader, it does not try to reach relation data. + if not self.model.unit.is_leader(): + return {} + return _load_relation_data(relation.data[self.charm.app]) + + def _add_certificate( + self, + relation_id: int, + certificate: str, + certificate_signing_request: str, + ca: str, + chain: List[str], + ) -> None: + """Add certificate to relation data. + + Args: + relation_id (int): Relation id + certificate (str): Certificate + certificate_signing_request (str): Certificate Signing Request + ca (str): CA Certificate + chain (list): CA Chain + + Returns: + None + """ + relation = self.model.get_relation( + relation_name=self.relationship_name, relation_id=relation_id + ) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} does not exist - " + f"The certificate request can't be completed" + ) + new_certificate = { + "certificate": certificate, + "certificate_signing_request": certificate_signing_request, + "ca": ca, + "chain": chain, + } + provider_relation_data = self._load_app_relation_data(relation) + provider_certificates = provider_relation_data.get("certificates", []) + certificates = copy.deepcopy(provider_certificates) + if new_certificate in certificates: + logger.info("Certificate already in relation data - Doing nothing") + return + certificates.append(new_certificate) + relation.data[self.model.app]["certificates"] = json.dumps(certificates) + + def _remove_certificate( + self, + relation_id: int, + certificate: Optional[str] = None, + certificate_signing_request: Optional[str] = None, + ) -> None: + """Remove certificate from a given relation based on user provided certificate or csr. + + Args: + relation_id (int): Relation id + certificate (str): Certificate (optional) + certificate_signing_request: Certificate signing request (optional) + + Returns: + None + """ + relation = self.model.get_relation( + relation_name=self.relationship_name, + relation_id=relation_id, + ) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} with relation id {relation_id} does not exist" + ) + provider_relation_data = self._load_app_relation_data(relation) + provider_certificates = provider_relation_data.get("certificates", []) + certificates = copy.deepcopy(provider_certificates) + for certificate_dict in certificates: + if certificate and certificate_dict["certificate"] == certificate: + certificates.remove(certificate_dict) + if ( + certificate_signing_request + and certificate_dict["certificate_signing_request"] == certificate_signing_request + ): + certificates.remove(certificate_dict) + relation.data[self.model.app]["certificates"] = json.dumps(certificates) + + @staticmethod + def _relation_data_is_valid(certificates_data: dict) -> bool: + """Use JSON schema validator to validate relation data content. + + Args: + certificates_data (dict): Certificate data dictionary as retrieved from relation data. + + Returns: + bool: True/False depending on whether the relation data follows the json schema. + """ + try: + validate(instance=certificates_data, schema=REQUIRER_JSON_SCHEMA) + return True + except exceptions.ValidationError: + return False + + def revoke_all_certificates(self) -> None: + """Revoke all certificates of this provider. + + This method is meant to be used when the Root CA has changed. + """ + for relation in self.model.relations[self.relationship_name]: + provider_relation_data = self._load_app_relation_data(relation) + provider_certificates = copy.deepcopy(provider_relation_data.get("certificates", [])) + for certificate in provider_certificates: + certificate["revoked"] = True + relation.data[self.model.app]["certificates"] = json.dumps(provider_certificates) + + def set_relation_certificate( + self, + certificate: str, + certificate_signing_request: str, + ca: str, + chain: List[str], + relation_id: int, + ) -> None: + """Add certificates to relation data. + + Args: + certificate (str): Certificate + certificate_signing_request (str): Certificate signing request + ca (str): CA Certificate + chain (list): CA Chain + relation_id (int): Juju relation ID + + Returns: + None + """ + if not self.model.unit.is_leader(): + return + certificates_relation = self.model.get_relation( + relation_name=self.relationship_name, relation_id=relation_id + ) + if not certificates_relation: + raise RuntimeError(f"Relation {self.relationship_name} does not exist") + self._remove_certificate( + certificate_signing_request=certificate_signing_request.strip(), + relation_id=relation_id, + ) + self._add_certificate( + relation_id=relation_id, + certificate=certificate.strip(), + certificate_signing_request=certificate_signing_request.strip(), + ca=ca.strip(), + chain=[cert.strip() for cert in chain], + ) + + def remove_certificate(self, certificate: str) -> None: + """Remove a given certificate from relation data. + + Args: + certificate (str): TLS Certificate + + Returns: + None + """ + certificates_relation = self.model.relations[self.relationship_name] + if not certificates_relation: + raise RuntimeError(f"Relation {self.relationship_name} does not exist") + for certificate_relation in certificates_relation: + self._remove_certificate(certificate=certificate, relation_id=certificate_relation.id) + + def get_issued_certificates( + self, relation_id: Optional[int] = None + ) -> Dict[str, List[Dict[str, str]]]: + """Return a dictionary of issued certificates. + + It returns certificates from all relations if relation_id is not specified. + Certificates are returned per application name and CSR. + + Returns: + dict: Certificates per application name. + """ + certificates: Dict[str, List[Dict[str, str]]] = {} + relations = ( + [ + relation + for relation in self.model.relations[self.relationship_name] + if relation.id == relation_id + ] + if relation_id is not None + else self.model.relations.get(self.relationship_name, []) + ) + for relation in relations: + provider_relation_data = self._load_app_relation_data(relation) + provider_certificates = provider_relation_data.get("certificates", []) + + certificates[relation.app.name] = [] # type: ignore[union-attr] + for certificate in provider_certificates: + if not certificate.get("revoked", False): + certificates[relation.app.name].append( # type: ignore[union-attr] + { + "csr": certificate["certificate_signing_request"], + "certificate": certificate["certificate"], + } + ) + + return certificates + + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """Handle relation changed event. + + Looks at the relation data and either emits: + - certificate request event: If the unit relation data contains a CSR for which + a certificate does not exist in the provider relation data. + - certificate revocation event: If the provider relation data contains a CSR for which + a csr does not exist in the requirer relation data. + + Args: + event: Juju event + + Returns: + None + """ + if event.unit is None: + logger.error("Relation_changed event does not have a unit.") + return + if not self.model.unit.is_leader(): + return + requirer_relation_data = _load_relation_data(event.relation.data[event.unit]) + provider_relation_data = self._load_app_relation_data(event.relation) + if not self._relation_data_is_valid(requirer_relation_data): + logger.debug("Relation data did not pass JSON Schema validation") + return + provider_certificates = provider_relation_data.get("certificates", []) + requirer_csrs = requirer_relation_data.get("certificate_signing_requests", []) + provider_csrs = [ + certificate_creation_request["certificate_signing_request"] + for certificate_creation_request in provider_certificates + ] + requirer_unit_certificate_requests = [ + { + "csr": certificate_creation_request["certificate_signing_request"], + "is_ca": certificate_creation_request.get("ca", False), + } + for certificate_creation_request in requirer_csrs + ] + for certificate_request in requirer_unit_certificate_requests: + if certificate_request["csr"] not in provider_csrs: + self.on.certificate_creation_request.emit( + certificate_signing_request=certificate_request["csr"], + relation_id=event.relation.id, + is_ca=certificate_request["is_ca"], + ) + self._revoke_certificates_for_which_no_csr_exists(relation_id=event.relation.id) + + def _revoke_certificates_for_which_no_csr_exists(self, relation_id: int) -> None: + """Revoke certificates for which no unit has a CSR. + + Goes through all generated certificates and compare against the list of CSRs for all units + of a given relationship. + + Args: + relation_id (int): Relation id + + Returns: + None + """ + certificates_relation = self.model.get_relation( + relation_name=self.relationship_name, relation_id=relation_id + ) + if not certificates_relation: + raise RuntimeError(f"Relation {self.relationship_name} does not exist") + provider_relation_data = self._load_app_relation_data(certificates_relation) + list_of_csrs: List[str] = [] + for unit in certificates_relation.units: + requirer_relation_data = _load_relation_data(certificates_relation.data[unit]) + requirer_csrs = requirer_relation_data.get("certificate_signing_requests", []) + list_of_csrs.extend(csr["certificate_signing_request"] for csr in requirer_csrs) + provider_certificates = provider_relation_data.get("certificates", []) + for certificate in provider_certificates: + if certificate["certificate_signing_request"] not in list_of_csrs: + self.on.certificate_revocation_request.emit( + certificate=certificate["certificate"], + certificate_signing_request=certificate["certificate_signing_request"], + ca=certificate["ca"], + chain=certificate["chain"], + ) + self.remove_certificate(certificate=certificate["certificate"]) + + def get_outstanding_certificate_requests( + self, relation_id: Optional[int] = None + ) -> List[Dict[str, Union[int, str, List[Dict[str, str]]]]]: + """Return CSR's for which no certificate has been issued. + + Example return: [ + { + "relation_id": 0, + "application_name": "tls-certificates-requirer", + "unit_name": "tls-certificates-requirer/0", + "unit_csrs": [ + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + "is_ca": false + } + ] + } + ] + + Args: + relation_id (int): Relation id + + Returns: + list: List of dictionaries that contain the unit's csrs + that don't have a certificate issued. + """ + all_unit_csr_mappings = copy.deepcopy(self.get_requirer_csrs(relation_id=relation_id)) + filtered_all_unit_csr_mappings: List[Dict[str, Union[int, str, List[Dict[str, str]]]]] = [] + for unit_csr_mapping in all_unit_csr_mappings: + csrs_without_certs = [] + for csr in unit_csr_mapping["unit_csrs"]: # type: ignore[union-attr] + if not self.certificate_issued_for_csr( + app_name=unit_csr_mapping["application_name"], # type: ignore[arg-type] + csr=csr["certificate_signing_request"], # type: ignore[index] + relation_id=relation_id, + ): + csrs_without_certs.append(csr) + if csrs_without_certs: + unit_csr_mapping["unit_csrs"] = csrs_without_certs # type: ignore[assignment] + filtered_all_unit_csr_mappings.append(unit_csr_mapping) + return filtered_all_unit_csr_mappings + + def get_requirer_csrs( + self, relation_id: Optional[int] = None + ) -> List[Dict[str, Union[int, str, List[Dict[str, str]]]]]: + """Return a list of requirers' CSRs grouped by unit. + + It returns CSRs from all relations if relation_id is not specified. + CSRs are returned per relation id, application name and unit name. + + Returns: + list: List of dictionaries that contain the unit's csrs + with the following information + relation_id, application_name and unit_name. + """ + unit_csr_mappings: List[Dict[str, Union[int, str, List[Dict[str, str]]]]] = [] + + relations = ( + [ + relation + for relation in self.model.relations[self.relationship_name] + if relation.id == relation_id + ] + if relation_id is not None + else self.model.relations.get(self.relationship_name, []) + ) + + for relation in relations: + for unit in relation.units: + requirer_relation_data = _load_relation_data(relation.data[unit]) + unit_csrs_list = requirer_relation_data.get("certificate_signing_requests", []) + unit_csr_mappings.append( + { + "relation_id": relation.id, + "application_name": relation.app.name, # type: ignore[union-attr] + "unit_name": unit.name, + "unit_csrs": unit_csrs_list, + } + ) + return unit_csr_mappings + + def certificate_issued_for_csr( + self, app_name: str, csr: str, relation_id: Optional[int] + ) -> bool: + """Check whether a certificate has been issued for a given CSR. + + Args: + app_name (str): Application name that the CSR belongs to. + csr (str): Certificate Signing Request. + relation_id (Optional[int]): Relation ID + Returns: + bool: True/False depending on whether a certificate has been issued for the given CSR. + """ + issued_certificates_per_csr = self.get_issued_certificates(relation_id=relation_id)[ + app_name + ] + for issued_pair in issued_certificates_per_csr: + if "csr" in issued_pair and issued_pair["csr"] == csr: + return csr_matches_certificate(csr, issued_pair["certificate"]) + return False + + +class TLSCertificatesRequiresV2(Object): + """TLS certificates requirer class to be instantiated by TLS certificates requirers.""" + + on = CertificatesRequirerCharmEvents() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relationship_name: str, + expiry_notification_time: int = 168, + ): + """Generate/use private key and observes relation changed event. + + Args: + charm: Charm object + relationship_name: Juju relation name + expiry_notification_time (int): Time difference between now and expiry (in hours). + Used to trigger the CertificateExpiring event. Default: 7 days. + """ + super().__init__(charm, relationship_name) + self.relationship_name = relationship_name + self.charm = charm + self.expiry_notification_time = expiry_notification_time + self.framework.observe( + charm.on[relationship_name].relation_changed, self._on_relation_changed + ) + self.framework.observe( + charm.on[relationship_name].relation_broken, self._on_relation_broken + ) + if JujuVersion.from_environ().has_secrets: + self.framework.observe(charm.on.secret_expired, self._on_secret_expired) + else: + self.framework.observe(charm.on.update_status, self._on_update_status) + + @property + def _requirer_csrs(self) -> List[Dict[str, Union[bool, str]]]: + """Return list of requirer's CSRs from relation unit data. + + Example: + [ + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + "ca": false + } + ] + """ + relation = self.model.get_relation(self.relationship_name) + if not relation: + raise RuntimeError(f"Relation {self.relationship_name} does not exist") + requirer_relation_data = _load_relation_data(relation.data[self.model.unit]) + return requirer_relation_data.get("certificate_signing_requests", []) + + @property + def _provider_certificates(self) -> List[Dict[str, str]]: + """Return list of certificates from the provider's relation data.""" + relation = self.model.get_relation(self.relationship_name) + if not relation: + logger.debug("No relation: %s", self.relationship_name) + return [] + if not relation.app: + logger.debug("No remote app in relation: %s", self.relationship_name) + return [] + provider_relation_data = _load_relation_data(relation.data[relation.app]) + if not self._relation_data_is_valid(provider_relation_data): + logger.warning("Provider relation data did not pass JSON Schema validation") + return [] + return provider_relation_data.get("certificates", []) + + def _add_requirer_csr(self, csr: str, is_ca: bool) -> None: + """Add CSR to relation data. + + Args: + csr (str): Certificate Signing Request + is_ca (bool): Whether the certificate is a CA certificate + + Returns: + None + """ + relation = self.model.get_relation(self.relationship_name) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} does not exist - " + f"The certificate request can't be completed" + ) + new_csr_dict: Dict[str, Union[bool, str]] = { + "certificate_signing_request": csr, + "ca": is_ca, + } + if new_csr_dict in self._requirer_csrs: + logger.info("CSR already in relation data - Doing nothing") + return + requirer_csrs = copy.deepcopy(self._requirer_csrs) + requirer_csrs.append(new_csr_dict) + relation.data[self.model.unit]["certificate_signing_requests"] = json.dumps(requirer_csrs) + + def _remove_requirer_csr(self, csr: str) -> None: + """Remove CSR from relation data. + + Args: + csr (str): Certificate signing request + + Returns: + None + """ + relation = self.model.get_relation(self.relationship_name) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} does not exist - " + f"The certificate request can't be completed" + ) + requirer_csrs = copy.deepcopy(self._requirer_csrs) + if not requirer_csrs: + logger.info("No CSRs in relation data - Doing nothing") + return + for requirer_csr in requirer_csrs: + if requirer_csr["certificate_signing_request"] == csr: + requirer_csrs.remove(requirer_csr) + relation.data[self.model.unit]["certificate_signing_requests"] = json.dumps(requirer_csrs) + + def request_certificate_creation( + self, certificate_signing_request: bytes, is_ca: bool = False + ) -> None: + """Request TLS certificate to provider charm. + + Args: + certificate_signing_request (bytes): Certificate Signing Request + is_ca (bool): Whether the certificate is a CA certificate + + Returns: + None + """ + relation = self.model.get_relation(self.relationship_name) + if not relation: + raise RuntimeError( + f"Relation {self.relationship_name} does not exist - " + f"The certificate request can't be completed" + ) + self._add_requirer_csr(certificate_signing_request.decode().strip(), is_ca=is_ca) + logger.info("Certificate request sent to provider") + + def request_certificate_revocation(self, certificate_signing_request: bytes) -> None: + """Remove CSR from relation data. + + The provider of this relation is then expected to remove certificates associated to this + CSR from the relation data as well and emit a request_certificate_revocation event for the + provider charm to interpret. + + Args: + certificate_signing_request (bytes): Certificate Signing Request + + Returns: + None + """ + self._remove_requirer_csr(certificate_signing_request.decode().strip()) + logger.info("Certificate revocation sent to provider") + + def request_certificate_renewal( + self, old_certificate_signing_request: bytes, new_certificate_signing_request: bytes + ) -> None: + """Renew certificate. + + Removes old CSR from relation data and adds new one. + + Args: + old_certificate_signing_request: Old CSR + new_certificate_signing_request: New CSR + + Returns: + None + """ + try: + self.request_certificate_revocation( + certificate_signing_request=old_certificate_signing_request + ) + except RuntimeError: + logger.warning("Certificate revocation failed.") + self.request_certificate_creation( + certificate_signing_request=new_certificate_signing_request + ) + logger.info("Certificate renewal request completed.") + + def get_assigned_certificates(self) -> List[Dict[str, str]]: + """Get a list of certificates that were assigned to this unit. + + Returns: + List of certificates. For example: + [ + { + "ca": "-----BEGIN CERTIFICATE-----...", + "chain": [ + "-----BEGIN CERTIFICATE-----..." + ], + "certificate": "-----BEGIN CERTIFICATE-----...", + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + } + ] + """ + final_list = [] + for csr in self.get_certificate_signing_requests(fulfilled_only=True): + assert isinstance(csr["certificate_signing_request"], str) + if cert := self._find_certificate_in_relation_data(csr["certificate_signing_request"]): + final_list.append(cert) + return final_list + + def get_expiring_certificates(self) -> List[Dict[str, str]]: + """Get a list of certificates that were assigned to this unit that are expiring or expired. + + Returns: + List of certificates. For example: + [ + { + "ca": "-----BEGIN CERTIFICATE-----...", + "chain": [ + "-----BEGIN CERTIFICATE-----..." + ], + "certificate": "-----BEGIN CERTIFICATE-----...", + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + } + ] + """ + final_list = [] + for csr in self.get_certificate_signing_requests(fulfilled_only=True): + assert isinstance(csr["certificate_signing_request"], str) + if cert := self._find_certificate_in_relation_data(csr["certificate_signing_request"]): + expiry_time = _get_certificate_expiry_time(cert["certificate"]) + if not expiry_time: + continue + expiry_notification_time = expiry_time - timedelta( + hours=self.expiry_notification_time + ) + if datetime.now(timezone.utc) > expiry_notification_time: + final_list.append(cert) + return final_list + + def get_certificate_signing_requests( + self, + fulfilled_only: bool = False, + unfulfilled_only: bool = False, + ) -> List[Dict[str, Union[bool, str]]]: + """Get the list of CSR's that were sent to the provider. + + You can choose to get only the CSR's that have a certificate assigned or only the CSR's + that don't. + + Args: + fulfilled_only (bool): This option will discard CSRs that don't have certificates yet. + unfulfilled_only (bool): This option will discard CSRs that have certificates signed. + + Returns: + List of CSR dictionaries. For example: + [ + { + "certificate_signing_request": "-----BEGIN CERTIFICATE REQUEST-----...", + "ca": false + } + ] + """ + final_list = [] + for csr in self._requirer_csrs: + assert isinstance(csr["certificate_signing_request"], str) + cert = self._find_certificate_in_relation_data(csr["certificate_signing_request"]) + if (unfulfilled_only and cert) or (fulfilled_only and not cert): + continue + final_list.append(csr) + + return final_list + + @staticmethod + def _relation_data_is_valid(certificates_data: dict) -> bool: + """Check whether relation data is valid based on json schema. + + Args: + certificates_data: Certificate data in dict format. + + Returns: + bool: Whether relation data is valid. + """ + try: + validate(instance=certificates_data, schema=PROVIDER_JSON_SCHEMA) + return True + except exceptions.ValidationError: + return False + + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """Handle relation changed event. + + Goes through all providers certificates that match a requested CSR. + + If the provider certificate is revoked, emit a CertificateInvalidateEvent, + otherwise emit a CertificateAvailableEvent. + + When Juju secrets are available, remove the secret for revoked certificate, + or add a secret with the correct expiry time for new certificates. + + + Args: + event: Juju event + + Returns: + None + """ + requirer_csrs = [ + certificate_creation_request["certificate_signing_request"] + for certificate_creation_request in self._requirer_csrs + ] + for certificate in self._provider_certificates: + if certificate["certificate_signing_request"] in requirer_csrs: + if certificate.get("revoked", False): + if JujuVersion.from_environ().has_secrets: + with suppress(SecretNotFoundError): + secret = self.model.get_secret( + label=f"{LIBID}-{certificate['certificate_signing_request']}" + ) + secret.remove_all_revisions() + self.on.certificate_invalidated.emit( + reason="revoked", + certificate=certificate["certificate"], + certificate_signing_request=certificate["certificate_signing_request"], + ca=certificate["ca"], + chain=certificate["chain"], + ) + else: + if JujuVersion.from_environ().has_secrets: + try: + secret = self.model.get_secret( + label=f"{LIBID}-{certificate['certificate_signing_request']}" + ) + secret.set_content({"certificate": certificate["certificate"]}) + secret.set_info( + expire=self._get_next_secret_expiry_time( + certificate["certificate"] + ), + ) + except SecretNotFoundError: + secret = self.charm.unit.add_secret( + {"certificate": certificate["certificate"]}, + label=f"{LIBID}-{certificate['certificate_signing_request']}", + expire=self._get_next_secret_expiry_time( + certificate["certificate"] + ), + ) + self.on.certificate_available.emit( + certificate_signing_request=certificate["certificate_signing_request"], + certificate=certificate["certificate"], + ca=certificate["ca"], + chain=certificate["chain"], + ) + + def _get_next_secret_expiry_time(self, certificate: str) -> Optional[datetime]: + """Return the expiry time or expiry notification time. + + Extracts the expiry time from the provided certificate, calculates the + expiry notification time and return the closest of the two, that is in + the future. + + Args: + certificate: x509 certificate + + Returns: + Optional[datetime]: None if the certificate expiry time cannot be read, + next expiry time otherwise. + """ + expiry_time = _get_certificate_expiry_time(certificate) + if not expiry_time: + return None + expiry_notification_time = expiry_time - timedelta(hours=self.expiry_notification_time) + return _get_closest_future_time(expiry_notification_time, expiry_time) + + def _on_relation_broken(self, event: RelationBrokenEvent) -> None: + """Handle relation broken event. + + Emitting `all_certificates_invalidated` from `relation-broken` rather + than `relation-departed` since certs are stored in app data. + + Args: + event: Juju event + + Returns: + None + """ + self.on.all_certificates_invalidated.emit() + + def _on_secret_expired(self, event: SecretExpiredEvent) -> None: + """Handle secret expired event. + + Loads the certificate from the secret, and will emit 1 of 2 + events. + + If the certificate is not yet expired, emits CertificateExpiringEvent + and updates the expiry time of the secret to the exact expiry time on + the certificate. + + If the certificate is expired, emits CertificateInvalidedEvent and + deletes the secret. + + Args: + event (SecretExpiredEvent): Juju event + """ + if not event.secret.label or not event.secret.label.startswith(f"{LIBID}-"): + return + csr = event.secret.label[len(f"{LIBID}-") :] + certificate_dict = self._find_certificate_in_relation_data(csr) + if not certificate_dict: + # A secret expired but we did not find matching certificate. Cleaning up + event.secret.remove_all_revisions() + return + + expiry_time = _get_certificate_expiry_time(certificate_dict["certificate"]) + if not expiry_time: + # A secret expired but matching certificate is invalid. Cleaning up + event.secret.remove_all_revisions() + return + + if datetime.now(timezone.utc) < expiry_time: + logger.warning("Certificate almost expired") + self.on.certificate_expiring.emit( + certificate=certificate_dict["certificate"], + expiry=expiry_time.isoformat(), + ) + event.secret.set_info( + expire=_get_certificate_expiry_time(certificate_dict["certificate"]), + ) + else: + logger.warning("Certificate is expired") + self.on.certificate_invalidated.emit( + reason="expired", + certificate=certificate_dict["certificate"], + certificate_signing_request=certificate_dict["certificate_signing_request"], + ca=certificate_dict["ca"], + chain=certificate_dict["chain"], + ) + self.request_certificate_revocation(certificate_dict["certificate"].encode()) + event.secret.remove_all_revisions() + + def _find_certificate_in_relation_data(self, csr: str) -> Optional[Dict[str, Any]]: + """Return the certificate that match the given CSR.""" + for certificate_dict in self._provider_certificates: + if certificate_dict["certificate_signing_request"] != csr: + continue + return certificate_dict + return None + + def _on_update_status(self, event: UpdateStatusEvent) -> None: + """Handle update status event. + + Goes through each certificate in the "certificates" relation and checks their expiry date. + If they are close to expire (<7 days), emits a CertificateExpiringEvent event and if + they are expired, emits a CertificateExpiredEvent. + + Args: + event (UpdateStatusEvent): Juju event + + Returns: + None + """ + for certificate_dict in self._provider_certificates: + expiry_time = _get_certificate_expiry_time(certificate_dict["certificate"]) + if not expiry_time: + continue + time_difference = expiry_time - datetime.now(timezone.utc) + if time_difference.total_seconds() < 0: + logger.warning("Certificate is expired") + self.on.certificate_invalidated.emit( + reason="expired", + certificate=certificate_dict["certificate"], + certificate_signing_request=certificate_dict["certificate_signing_request"], + ca=certificate_dict["ca"], + chain=certificate_dict["chain"], + ) + self.request_certificate_revocation(certificate_dict["certificate"].encode()) + continue + if time_difference.total_seconds() < (self.expiry_notification_time * 60 * 60): + logger.warning("Certificate almost expired") + self.on.certificate_expiring.emit( + certificate=certificate_dict["certificate"], + expiry=expiry_time.isoformat(), + ) diff --git a/metadata.yaml b/metadata.yaml index f42fbdff..f51e4bc4 100644 --- a/metadata.yaml +++ b/metadata.yaml @@ -38,6 +38,10 @@ requires: juju-info: interface: juju-info scope: container + certificates: + interface: tls-certificates + optional: true + limit: 1 peers: cos: interface: cos diff --git a/poetry.lock b/poetry.lock index a9e2072d..1355dfc7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "appnope" @@ -28,6 +28,25 @@ six = "*" [package.extras] test = ["astroid", "pytest"] +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + [[package]] name = "backcall" version = "0.2.0" @@ -428,47 +447,56 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.3" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, - {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, - {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, - {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -622,6 +650,24 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -741,6 +787,44 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jsonschema" +version = "4.21.1" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +jsonschema-specifications = ">=2023.03.6" +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +referencing = ">=0.31.0" + [[package]] name = "juju" version = "3.2.0.1" @@ -851,6 +935,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1107,6 +1201,17 @@ files = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + [[package]] name = "platformdirs" version = "3.10.0" @@ -1634,6 +1739,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1641,8 +1747,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1659,6 +1773,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1666,11 +1781,27 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "referencing" +version = "0.34.0" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"}, + {file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + [[package]] name = "requests" version = "2.31.0" @@ -1710,6 +1841,114 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "rpds-py" +version = "0.18.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, +] + [[package]] name = "rsa" version = "4.9" @@ -2076,7 +2315,22 @@ files = [ {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"}, ] +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "4622b1846ed62d249eedb507994529fdb6e7c73c0cde3b1f0d40e69503ca68c2" +content-hash = "0bea5c8e076448e3adca86681ddd5ab42ce1e8398a264b4d5a3b0e5bcdf03a63" diff --git a/pyproject.toml b/pyproject.toml index a66522f0..768433ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,9 @@ jinja2 = "^3.1.2" [tool.poetry.group.charm-libs.dependencies] # data_platform_libs/v0/data_interfaces.py ops = ">=2.0.0" +# tls_certificates_interface/v1/tls_certificates.py +cryptography = "*" +jsonschema = "*" # grafana_agent/v0/cos_agent.py pydantic = "<2" cosl = "*" diff --git a/src/abstract_charm.py b/src/abstract_charm.py index 9923f330..e37335c3 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -116,6 +116,9 @@ def _tls_certificate(self) -> str: # TODO VM TLS: Update property after implementing TLS on machine_charm return None + def is_exposed(self, relation=None) -> bool: + return self._database_provides.is_exposed(relation) + def _cos_exporter_config(self, event) -> typing.Optional[relations.cos.ExporterConfig]: """Returns the exporter config for MySQLRouter exporter if cos relation exists""" cos_relation_exists = self._cos.relation_exists and not self._cos.is_relation_breaking( diff --git a/src/machine_charm.py b/src/machine_charm.py index 02d8acc7..357082df 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -27,6 +27,9 @@ class MachineSubordinateRouterCharm(abstract_charm.MySQLRouterCharm): """MySQL Router machine subordinate charm""" + READ_WRITE_PORT = 6446 + READ_ONLY_PORT = 6447 + def __init__(self, *args) -> None: super().__init__(*args) # DEPRECATED shared-db: Enable legacy "mysql-shared" interface @@ -67,12 +70,21 @@ def _logrotate(self) -> machine_logrotate.LogRotate: def _cos(self) -> relations.cos.COSRelation: return self._cos_relation + @property + def _host_address(self) -> str: + """The host address for the machine.""" + return self.model.get_binding(upgrade.PEER_RELATION_ENDPOINT_NAME).network.bind_address + @property def _read_write_endpoint(self) -> str: + if self._database_provides.is_exposed(): + return f"{self._host_address}:{self.READ_WRITE_PORT}" return f'file://{self._container.path("/run/mysqlrouter/mysql.sock")}' @property def _read_only_endpoint(self) -> str: + if self._database_provides.is_exposed(): + return f"{self._host_address}:{self.READ_ONLY_PORT}" return f'file://{self._container.path("/run/mysqlrouter/mysqlro.sock")}' # ======================= @@ -119,6 +131,10 @@ def _on_force_upgrade_action(self, event: ops.ActionEvent) -> None: event.set_results({"result": f"Forcefully upgraded {self.unit.name}"}) logger.debug("Forced upgrade") + def reconcile(self, event=None) -> None: + self.database_provides.reconcile_ports() + super().reconcile(event=event) + if __name__ == "__main__": ops.main.main(MachineSubordinateRouterCharm) diff --git a/src/relations/database_providers_wrapper.py b/src/relations/database_providers_wrapper.py index 382e2554..40185708 100644 --- a/src/relations/database_providers_wrapper.py +++ b/src/relations/database_providers_wrapper.py @@ -38,6 +38,14 @@ def __init__( charm_ ) + def is_exposed(self, _) -> bool: + """Whether the relation is exposed""" + return self._database_provides.is_exposed() + + def reconcile_ports(self) -> None: + """Reconcile ports for this unit""" + self._database_provides.reconcile_ports() + def reconcile_users( self, *, diff --git a/src/relations/database_provides.py b/src/relations/database_provides.py index e97483aa..7ae5a564 100644 --- a/src/relations/database_provides.py +++ b/src/relations/database_provides.py @@ -162,6 +162,8 @@ class RelationEndpoint: def __init__(self, charm_: "abstract_charm.MySQLRouterCharm") -> None: self._interface = data_interfaces.DatabaseProvides(charm_, relation_name=self._NAME) + self._charm = charm_ + charm_.framework.observe(charm_.on[self._NAME].relation_created, charm_.reconcile) charm_.framework.observe(self._interface.on.database_requested, charm_.reconcile) charm_.framework.observe(charm_.on[self._NAME].relation_broken, charm_.reconcile) @@ -179,6 +181,21 @@ def _shared_users(self) -> typing.List[_RelationWithSharedUser]: pass return shared_users + def is_exposed(self) -> bool: + """Whether the relation is exposed.""" + relation_data = self._interface.fetch_relation_data(fields=["external-node-connectivity"]) + return any( + [data.get("external-node-connectivity") == "true" for data in relation_data.values()] + ) + + def reconcile_ports(self) -> None: + """Reconcile ports for this unit""" + if self.is_exposed(): + ports = [self._charm.READ_WRITE_PORT, self._charm.READ_ONLY_PORT] + else: + ports = [] + self._charm.unit.set_ports(ports) + def reconcile_users( self, *, diff --git a/src/socket_workload.py b/src/socket_workload.py index 411fd8fe..55600690 100644 --- a/src/socket_workload.py +++ b/src/socket_workload.py @@ -20,10 +20,11 @@ class AuthenticatedSocketWorkload(workload.AuthenticatedWorkload): # TODO python3.10 min version: Use `list` instead of `typing.List` def _get_bootstrap_command(self, password: str) -> typing.List[str]: command = super()._get_bootstrap_command(password) + bind_address = "0.0.0.0" if self._charm._database_provides.is_exposed() else "127.0.0.1" command.extend( [ "--conf-bind-address", - "127.0.0.1", + bind_address, "--conf-use-sockets", # For unix sockets, authentication fails on first connection if this option is not # set. Workaround for https://bugs.mysql.com/bug.php?id=107291 From ad8b1296c319f8a96557f04c8184de7068da309b Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Tue, 9 Apr 2024 20:37:29 +0000 Subject: [PATCH 02/31] A working version of charm with simultaneous relations with COS and TLS --- actions.yaml | 9 + .../data_platform_libs/v0/data_interfaces.py | 485 +++++++++++++++--- metadata.yaml | 2 + src/abstract_charm.py | 36 +- src/container.py | 2 + src/machine_charm.py | 34 +- ...socket_workload.py => machine_workload.py} | 40 +- src/relations/cos.py | 6 +- src/relations/secrets.py | 32 +- src/relations/tls.py | 299 +++++++++++ src/snap.py | 29 +- templates/tls.cnf | 8 + 12 files changed, 858 insertions(+), 124 deletions(-) rename src/{socket_workload.py => machine_workload.py} (63%) create mode 100644 src/relations/tls.py create mode 100644 templates/tls.cnf diff --git a/actions.yaml b/actions.yaml index 2f00cb65..63b7b506 100644 --- a/actions.yaml +++ b/actions.yaml @@ -5,3 +5,12 @@ resume-upgrade: description: Upgrade remaining units (after you manually verified that upgraded units are healthy). force-upgrade: description: Force upgrade of this unit. +set-tls-private-key: + description: + Set the private key, which will be used for certificate signing requests (CSR). Run + for each unit separately. + params: + internal-key: + type: string + description: The content of private key for internal communications with + clients. Content will be auto-generated if this option is not specified. diff --git a/lib/charms/data_platform_libs/v0/data_interfaces.py b/lib/charms/data_platform_libs/v0/data_interfaces.py index d24aa6ff..df23735e 100644 --- a/lib/charms/data_platform_libs/v0/data_interfaces.py +++ b/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -295,10 +295,21 @@ def _on_topic_requested(self, event: TopicRequestedEvent): import json import logging from abc import ABC, abstractmethod -from collections import namedtuple +from collections import UserDict, namedtuple from datetime import datetime from enum import Enum -from typing import Callable, Dict, List, Optional, Set, Tuple, Union +from typing import ( + Callable, + Dict, + ItemsView, + KeysView, + List, + Optional, + Set, + Tuple, + Union, + ValuesView, +) from ops import JujuVersion, Model, Secret, SecretInfo, SecretNotFoundError from ops.charm import ( @@ -320,7 +331,7 @@ def _on_topic_requested(self, event: TopicRequestedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 29 +LIBPATCH = 33 PYDEPS = ["ops>=2.0.0"] @@ -337,21 +348,46 @@ def _on_topic_requested(self, event: TopicRequestedEvent): PROV_SECRET_PREFIX = "secret-" REQ_SECRET_FIELDS = "requested-secrets" +GROUP_MAPPING_FIELD = "secret_group_mapping" +GROUP_SEPARATOR = "@" + + +class SecretGroup(str): + """Secret groups specific type.""" + + +class SecretGroupsAggregate(str): + """Secret groups with option to extend with additional constants.""" + def __init__(self): + self.USER = SecretGroup("user") + self.TLS = SecretGroup("tls") + self.EXTRA = SecretGroup("extra") -class SecretGroup(Enum): - """Secret groups as constants.""" + def __setattr__(self, name, value): + """Setting internal constants.""" + if name in self.__dict__: + raise RuntimeError("Can't set constant!") + else: + super().__setattr__(name, SecretGroup(value)) + + def groups(self) -> list: + """Return the list of stored SecretGroups.""" + return list(self.__dict__.values()) + + def get_group(self, group: str) -> Optional[SecretGroup]: + """If the input str translates to a group name, return that.""" + return SecretGroup(group) if group in self.groups() else None - USER = "user" - TLS = "tls" - EXTRA = "extra" + +SECRET_GROUPS = SecretGroupsAggregate() class DataInterfacesError(Exception): """Common ancestor for DataInterfaces related exceptions.""" -class SecretError(Exception): +class SecretError(DataInterfacesError): """Common ancestor for Secrets related exceptions.""" @@ -367,6 +403,10 @@ class SecretsIllegalUpdateError(SecretError): """Secrets aren't yet available for Juju version used.""" +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + def get_encoded_dict( relation: Relation, member: Union[Unit, Application], field: str ) -> Optional[Dict[str, str]]: @@ -467,6 +507,34 @@ def wrapper(self, *args, **kwargs): return wrapper +def dynamic_secrets_only(f): + """Decorator to ensure that certain operations would be only executed when NO static secrets are defined.""" + + def wrapper(self, *args, **kwargs): + if self.static_secret_fields: + raise IllegalOperationError( + "Unsafe usage of statically and dynamically defined secrets, aborting." + ) + return f(self, *args, **kwargs) + + return wrapper + + +def either_static_or_dynamic_secrets(f): + """Decorator to ensure that static and dynamic secrets won't be used in parallel.""" + + def wrapper(self, *args, **kwargs): + if self.static_secret_fields and set(self.current_secret_fields) - set( + self.static_secret_fields + ): + raise IllegalOperationError( + "Unsafe usage of statically and dynamically defined secrets, aborting." + ) + return f(self, *args, **kwargs) + + return wrapper + + class Scope(Enum): """Peer relations scope.""" @@ -474,6 +542,11 @@ class Scope(Enum): UNIT = "unit" +################################################################################ +# Secrets internal caching +################################################################################ + + class CachedSecret: """Locally cache a secret. @@ -609,9 +682,110 @@ def remove(self, label: str) -> None: logging.error("Non-existing Juju Secret was attempted to be removed %s", label) +################################################################################ +# Relation Data base/abstract ancestors (i.e. parent classes) +################################################################################ + + # Base Data +class DataDict(UserDict): + """Python Standard Library 'dict' - like representation of Relation Data.""" + + def __init__(self, relation_data: "Data", relation_id: int): + self.relation_data = relation_data + self.relation_id = relation_id + + @property + def data(self) -> Dict[str, str]: + """Return the full content of the Abstract Relation Data dictionary.""" + result = self.relation_data.fetch_my_relation_data([self.relation_id]) + try: + result_remote = self.relation_data.fetch_relation_data([self.relation_id]) + except NotImplementedError: + result_remote = {self.relation_id: {}} + if result: + result_remote[self.relation_id].update(result[self.relation_id]) + return result_remote.get(self.relation_id, {}) + + def __setitem__(self, key: str, item: str) -> None: + """Set an item of the Abstract Relation Data dictionary.""" + self.relation_data.update_relation_data(self.relation_id, {key: item}) + + def __getitem__(self, key: str) -> str: + """Get an item of the Abstract Relation Data dictionary.""" + result = None + if not (result := self.relation_data.fetch_my_relation_field(self.relation_id, key)): + try: + result = self.relation_data.fetch_relation_field(self.relation_id, key) + except NotImplementedError: + pass + if not result: + raise KeyError + return result + + def __eq__(self, d: dict) -> bool: + """Equality.""" + return self.data == d + + def __repr__(self) -> str: + """String representation Abstract Relation Data dictionary.""" + return repr(self.data) + + def __len__(self) -> int: + """Length of the Abstract Relation Data dictionary.""" + return len(self.data) + + def __delitem__(self, key: str) -> None: + """Delete an item of the Abstract Relation Data dictionary.""" + self.relation_data.delete_relation_data(self.relation_id, [key]) + + def has_key(self, key: str) -> bool: + """Does the key exist in the Abstract Relation Data dictionary?""" + return key in self.data + + def update(self, items: Dict[str, str]): + """Update the Abstract Relation Data dictionary.""" + self.relation_data.update_relation_data(self.relation_id, items) + + def keys(self) -> KeysView[str]: + """Keys of the Abstract Relation Data dictionary.""" + return self.data.keys() + + def values(self) -> ValuesView[str]: + """Values of the Abstract Relation Data dictionary.""" + return self.data.values() + + def items(self) -> ItemsView[str, str]: + """Items of the Abstract Relation Data dictionary.""" + return self.data.items() + + def pop(self, item: str) -> str: + """Pop an item of the Abstract Relation Data dictionary.""" + result = self.relation_data.fetch_my_relation_field(self.relation_id, item) + if not result: + raise KeyError(f"Item {item} doesn't exist.") + self.relation_data.delete_relation_data(self.relation_id, [item]) + return result + + def __contains__(self, item: str) -> bool: + """Does the Abstract Relation Data dictionary contain item?""" + return item in self.data.values() + + def __iter__(self): + """Iterate through the Abstract Relation Data dictionary.""" + return iter(self.data) + + def get(self, key: str, default: Optional[str] = None) -> Optional[str]: + """Safely get an item of the Abstract Relation Data dictionary.""" + try: + if result := self[key]: + return result + except KeyError: + return default + + class Data(ABC): """Base relation data mainpulation (abstract) class.""" @@ -619,11 +793,11 @@ class Data(ABC): # Local map to associate mappings with secrets potentially as a group SECRET_LABEL_MAP = { - "username": SecretGroup.USER, - "password": SecretGroup.USER, - "uris": SecretGroup.USER, - "tls": SecretGroup.TLS, - "tls-ca": SecretGroup.TLS, + "username": SECRET_GROUPS.USER, + "password": SECRET_GROUPS.USER, + "uris": SECRET_GROUPS.USER, + "tls": SECRET_GROUPS.TLS, + "tls-ca": SECRET_GROUPS.TLS, } def __init__( @@ -656,6 +830,11 @@ def secrets_enabled(self): self._jujuversion = JujuVersion.from_environ() return self._jujuversion.has_secrets + @property + def secret_label_map(self): + """Exposing secret-label map via a property -- could be overridden in descendants!""" + return self.SECRET_LABEL_MAP + # Mandatory overrides for internal/helper methods @abstractmethod @@ -710,11 +889,11 @@ def _generate_secret_label( relation_name: str, relation_id: int, group_mapping: SecretGroup ) -> str: """Generate unique group_mappings for secrets within a relation context.""" - return f"{relation_name}.{relation_id}.{group_mapping.value}.secret" + return f"{relation_name}.{relation_id}.{group_mapping}.secret" def _generate_secret_field_name(self, group_mapping: SecretGroup) -> str: """Generate unique group_mappings for secrets within a relation context.""" - return f"{PROV_SECRET_PREFIX}{group_mapping.value}" + return f"{PROV_SECRET_PREFIX}{group_mapping}" def _relation_from_secret_label(self, secret_label: str) -> Optional[Relation]: """Retrieve the relation that belongs to a secret label.""" @@ -739,8 +918,7 @@ def _relation_from_secret_label(self, secret_label: str) -> Optional[Relation]: except ModelError: return - @classmethod - def _group_secret_fields(cls, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: + def _group_secret_fields(self, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: """Helper function to arrange secret mappings under their group. NOTE: All unrecognized items end up in the 'extra' secret bucket. @@ -748,44 +926,42 @@ def _group_secret_fields(cls, secret_fields: List[str]) -> Dict[SecretGroup, Lis """ secret_fieldnames_grouped = {} for key in secret_fields: - if group := cls.SECRET_LABEL_MAP.get(key): + if group := self.secret_label_map.get(key): secret_fieldnames_grouped.setdefault(group, []).append(key) else: - secret_fieldnames_grouped.setdefault(SecretGroup.EXTRA, []).append(key) + secret_fieldnames_grouped.setdefault(SECRET_GROUPS.EXTRA, []).append(key) return secret_fieldnames_grouped def _get_group_secret_contents( self, relation: Relation, group: SecretGroup, - secret_fields: Optional[Union[Set[str], List[str]]] = None, + secret_fields: Union[Set[str], List[str]] = [], ) -> Dict[str, str]: """Helper function to retrieve collective, requested contents of a secret.""" - if not secret_fields: - secret_fields = [] - if (secret := self._get_relation_secret(relation.id, group)) and ( secret_data := secret.get_content() ): - return {k: v for k, v in secret_data.items() if k in secret_fields} + return { + k: v for k, v in secret_data.items() if not secret_fields or k in secret_fields + } return {} - @classmethod def _content_for_secret_group( - cls, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup + self, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup ) -> Dict[str, str]: """Select : pairs from input, that belong to this particular Secret group.""" - if group_mapping == SecretGroup.EXTRA: + if group_mapping == SECRET_GROUPS.EXTRA: return { k: v for k, v in content.items() - if k in secret_fields and k not in cls.SECRET_LABEL_MAP.keys() + if k in secret_fields and k not in self.secret_label_map.keys() } return { k: v for k, v in content.items() - if k in secret_fields and cls.SECRET_LABEL_MAP.get(k) == group_mapping + if k in secret_fields and self.secret_label_map.get(k) == group_mapping } @juju_secrets_only @@ -929,6 +1105,10 @@ def _delete_relation_data_without_secrets( # Public interface methods # Handling Relation Fields seamlessly, regardless if in databag or a Juju Secret + def as_dict(self, relation_id: int) -> UserDict: + """Dict behavior representation of the Abstract Data.""" + return DataDict(self, relation_id) + def get_relation(self, relation_name, relation_id) -> Relation: """Safe way of retrieving a relation.""" relation = self._model.get_relation(relation_name, relation_id) @@ -1363,7 +1543,7 @@ def _register_secrets_to_relation(self, relation: Relation, params_name_list: Li if not relation.app: return - for group in SecretGroup: + for group in SECRET_GROUPS.groups(): secret_field = self._generate_secret_field_name(group) if secret_field in params_name_list: if secret_uri := relation.data[relation.app].get(secret_field): @@ -1497,7 +1677,7 @@ def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: if self.relation_data.secret_fields: # pyright: ignore [reportAttributeAccessIssue] set_encoded_field( event.relation, - self.charm.app, + self.relation_data.component, REQ_SECRET_FIELDS, self.relation_data.secret_fields, # pyright: ignore [reportAttributeAccessIssue] ) @@ -1508,13 +1688,15 @@ def _on_secret_changed_event(self, event: RelationChangedEvent) -> None: raise NotImplementedError -# Base DataPeer +################################################################################ +# Peer Relation Data +################################################################################ class DataPeerData(RequirerData, ProviderData): """Represents peer relations data.""" - SECRET_FIELDS = ["operator-password"] + SECRET_FIELDS = [] SECRET_FIELD_NAME = "internal_secret" SECRET_LABEL_MAP = {} @@ -1524,6 +1706,7 @@ def __init__( relation_name: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, ): @@ -1537,6 +1720,18 @@ def __init__( ) self.secret_field_name = secret_field_name if secret_field_name else self.SECRET_FIELD_NAME self.deleted_label = deleted_label + self._secret_label_map = {} + # Secrets that are being dynamically added within the scope of this event handler run + self._new_secrets = [] + + for group, fields in additional_secret_group_mapping.items(): + if group not in SECRET_GROUPS.groups(): + setattr(SECRET_GROUPS, group, group) + for field in fields: + secret_group = SECRET_GROUPS.get_group(group) + internal_field = self._field_to_internal_name(field, secret_group) + self._secret_label_map.setdefault(group, []).append(internal_field) + self._secret_fields.append(internal_field) @property def scope(self) -> Optional[Scope]: @@ -1546,15 +1741,158 @@ def scope(self) -> Optional[Scope]: if isinstance(self.component, Unit): return Scope.UNIT + @property + def secret_label_map(self) -> Dict[str, str]: + """Property storing secret mappings.""" + return self._secret_label_map + + @property + def static_secret_fields(self) -> List[str]: + """Re-definition of the property in a way that dynamically extended list is retrieved.""" + return self._secret_fields + + @property + def secret_fields(self) -> List[str]: + """Re-definition of the property in a way that dynamically extended list is retrieved.""" + return ( + self.static_secret_fields if self.static_secret_fields else self.current_secret_fields + ) + + @property + def current_secret_fields(self) -> List[str]: + """Helper method to get all currently existing secret fields (added statically or dynamically).""" + if not self.secrets_enabled: + return [] + + if len(self._model.relations[self.relation_name]) > 1: + raise ValueError(f"More than one peer relation on {self.relation_name}") + + relation = self._model.relations[self.relation_name][0] + fields = [] + for group in SECRET_GROUPS.groups(): + if content := self._get_group_secret_contents(relation, group): + fields += [self._field_to_internal_name(field, group) for field in content] + return list(set(fields) | set(self._new_secrets)) + + @juju_secrets_only + @dynamic_secrets_only + def set_secret( + self, + relation_id: int, + field: str, + value: str, + group_mapping: Optional[SecretGroup] = None, + ) -> None: + """Public interface method to add a Relation Data field specifically as a Juju Secret. + + Args: + relation_id: ID of the relation + field: The secret field that is to be added + value: The string value of the secret + group_mapping: The name of the "secret group", in case the field is to be added to an existing secret + """ + full_field = self._field_to_internal_name(field, group_mapping) + if full_field not in self.current_secret_fields: + self._new_secrets.append(full_field) + self.update_relation_data(relation_id, {full_field: value}) + + # Unlike for set_secret(), there's no harm using this operation with static secrets + # The restricion is only added to keep the concept clear + @juju_secrets_only + @dynamic_secrets_only + def get_secret( + self, + relation_id: int, + field: str, + group_mapping: Optional[SecretGroup] = None, + ) -> Optional[str]: + """Public interface method to fetch secrets only.""" + full_field = self._field_to_internal_name(field, group_mapping) + if full_field not in self.current_secret_fields: + raise SecretsUnavailableError( + f"Secret {field} from group {group_mapping} was not found" + ) + return self.fetch_my_relation_field(relation_id, full_field) + + @juju_secrets_only + @dynamic_secrets_only + def delete_secret( + self, + relation_id: int, + field: str, + group_mapping: Optional[SecretGroup] = None, + ) -> Optional[str]: + """Public interface method to delete secrets only.""" + full_field = self._field_to_internal_name(field, group_mapping) + if full_field not in self.current_secret_fields: + logger.warning(f"Secret {field} from group {group_mapping} was not found") + self.delete_relation_data(relation_id, [full_field]) + + # Helpers + + @staticmethod + def _field_to_internal_name(field: str, group: Optional[SecretGroup]) -> str: + if not group or group == SECRET_GROUPS.EXTRA: + return field + return f"{field}{GROUP_SEPARATOR}{group}" + + @staticmethod + def _internal_name_to_field(name: str) -> Tuple[str, SecretGroup]: + parts = name.split(GROUP_SEPARATOR) + if not len(parts) > 1: + return (parts[0], SECRET_GROUPS.EXTRA) + secret_group = SECRET_GROUPS.get_group(parts[1]) + if not secret_group: + raise ValueError(f"Invalid secret field {name}") + return (parts[0], secret_group) + + def _group_secret_fields(self, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: + """Helper function to arrange secret mappings under their group. + + NOTE: All unrecognized items end up in the 'extra' secret bucket. + Make sure only secret fields are passed! + """ + secret_fieldnames_grouped = {} + for key in secret_fields: + field, group = self._internal_name_to_field(key) + secret_fieldnames_grouped.setdefault(group, []).append(field) + return secret_fieldnames_grouped + + def _content_for_secret_group( + self, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup + ) -> Dict[str, str]: + """Select : pairs from input, that belong to this particular Secret group.""" + if group_mapping == SECRET_GROUPS.EXTRA: + return {k: v for k, v in content.items() if k in self.secret_fields} + return { + self._internal_name_to_field(k)[0]: v + for k, v in content.items() + if k in self.secret_fields + } + + # Event handlers + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + pass + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + + # Overrides of Relation Data handling functions + def _generate_secret_label( self, relation_name: str, relation_id: int, group_mapping: SecretGroup ) -> str: - members = [self._model.app.name] + members = [self._model.app.name, self.relation_name] if self.scope: members.append(self.scope.value) + if group_mapping != SECRET_GROUPS.EXTRA: + members.append(group_mapping) return f"{'.'.join(members)}" - def _generate_secret_field_name(self, group_mapping: SecretGroup = SecretGroup.EXTRA) -> str: + def _generate_secret_field_name(self, group_mapping: SecretGroup = SECRET_GROUPS.EXTRA) -> str: """Generate unique group_mappings for secrets within a relation context.""" return f"{self.secret_field_name}" @@ -1562,7 +1900,7 @@ def _generate_secret_field_name(self, group_mapping: SecretGroup = SecretGroup.E def _get_relation_secret( self, relation_id: int, - group_mapping: SecretGroup = SecretGroup.EXTRA, + group_mapping: SecretGroup = SECRET_GROUPS.EXTRA, relation_name: Optional[str] = None, ) -> Optional[CachedSecret]: """Retrieve a Juju Secret specifically for peer relations. @@ -1596,13 +1934,18 @@ def _get_group_secret_contents( self, relation: Relation, group: SecretGroup, - secret_fields: Optional[Union[Set[str], List[str]]] = None, + secret_fields: Union[Set[str], List[str]] = [], ) -> Dict[str, str]: """Helper function to retrieve collective, requested contents of a secret.""" + secret_fields = [self._internal_name_to_field(k)[0] for k in secret_fields] result = super()._get_group_secret_contents(relation, group, secret_fields) if not self.deleted_label: return result - return {key: result[key] for key in result if result[key] != self.deleted_label} + return { + self._field_to_internal_name(key, group): result[key] + for key in result + if result[key] != self.deleted_label + } def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: """For Rolling Upgrades -- when moving from databag to secrets usage. @@ -1618,14 +1961,7 @@ def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: if self._fetch_relation_data_without_secrets(self.component, relation, [field]): self._delete_relation_data_without_secrets(self.component, relation, [field]) - def _fetch_specific_relation_data( - self, relation: Relation, fields: Optional[List[str]] - ) -> Dict[str, str]: - """Fetch data available (directily or indirectly -- i.e. secrets) from the relation.""" - return self._fetch_relation_data_with_secrets( - self.component, self.secret_fields, relation, fields - ) - + @either_static_or_dynamic_secrets def _fetch_my_specific_relation_data( self, relation: Relation, fields: Optional[List[str]] ) -> Dict[str, str]: @@ -1634,6 +1970,7 @@ def _fetch_my_specific_relation_data( self.component, self.secret_fields, relation, fields ) + @either_static_or_dynamic_secrets def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: """Update data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" self._remove_secret_from_databag(relation, list(data.keys())) @@ -1649,6 +1986,7 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non normal_content = {k: v for k, v in data.items() if k in normal_fields} self._update_relation_data_without_secrets(self.component, relation, normal_content) + @either_static_or_dynamic_secrets def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" if self.secret_fields and self.deleted_label: @@ -1698,13 +2036,22 @@ def fetch_relation_field( "fetch_my_relation_data() and fetch_my_relation_field()" ) + def fetch_my_relation_field( + self, relation_id: int, field: str, relation_name: Optional[str] = None + ) -> Optional[str]: + """Get a single field from the relation data -- owner side. + + Re-implementing the inherited function due to field@group conversion + """ + if relation_data := self.fetch_my_relation_data([relation_id], [field], relation_name): + return relation_data.get(relation_id, {}).get(self._internal_name_to_field(field)[0]) + # Public functions -- inherited fetch_my_relation_data = Data.fetch_my_relation_data - fetch_my_relation_field = Data.fetch_my_relation_field -class DataPeerEventHandlers(EventHandlers): +class DataPeerEventHandlers(RequirerEventHandlers): """Requires-side of the relation.""" def __init__(self, charm: CharmBase, relation_data: RequirerData, unique_key: str = ""): @@ -1729,6 +2076,7 @@ def __init__( relation_name: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, unique_key: str = "", @@ -1739,6 +2087,7 @@ def __init__( relation_name, extra_user_roles, additional_secret_fields, + additional_secret_group_mapping, secret_field_name, deleted_label, ) @@ -1763,6 +2112,7 @@ def __init__( relation_name: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, unique_key: str = "", @@ -1773,6 +2123,7 @@ def __init__( relation_name, extra_user_roles, additional_secret_fields, + additional_secret_group_mapping, secret_field_name, deleted_label, ) @@ -1787,6 +2138,14 @@ def __init__(self, unit: Unit, *args, **kwargs): self.local_unit = unit self.component = unit + def update_relation_data(self, relation_id: int, data: dict) -> None: + """This method makes no sense for a Other Peer Relation.""" + raise NotImplementedError("It's not possible to update data of another unit.") + + def delete_relation_data(self, relation_id: int, fields: List[str]) -> None: + """This method makes no sense for a Other Peer Relation.""" + raise NotImplementedError("It's not possible to delete data of another unit.") + class DataPeerOtherUnitEventHandlers(DataPeerEventHandlers): """Requires-side of the relation.""" @@ -1807,23 +2166,29 @@ def __init__( relation_name: str, extra_user_roles: Optional[str] = None, additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, - unique_key: str = "", ): - DataPeerData.__init__( + DataPeerOtherUnitData.__init__( self, + unit, charm.model, relation_name, extra_user_roles, additional_secret_fields, + additional_secret_group_mapping, secret_field_name, deleted_label, ) - DataPeerEventHandlers.__init__(self, charm, self, unique_key) + DataPeerOtherUnitEventHandlers.__init__(self, charm, self) -# General events +################################################################################ +# Cross-charm Relatoins Data Handling and Evenets +################################################################################ + +# Generic events class ExtraRoleEvent(RelationEvent): @@ -2390,7 +2755,7 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: # Check if the database is created # (the database charm shared the credentials). - secret_field_user = self.relation_data._generate_secret_field_name(SecretGroup.USER) + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) if ( "username" in diff.added and "password" in diff.added ) or secret_field_user in diff.added: @@ -2462,7 +2827,11 @@ def __init__( DatabaseRequirerEventHandlers.__init__(self, charm, self) -# Kafka related events +################################################################################ +# Charm-specific Relations Data and Events +################################################################################ + +# Kafka Events class KafkaProvidesEvent(RelationEvent): @@ -2704,7 +3073,7 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): self.relation_data._register_secrets_to_relation(event.relation, diff.added) - secret_field_user = self.relation_data._generate_secret_field_name(SecretGroup.USER) + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) if ( "username" in diff.added and "password" in diff.added ) or secret_field_user in diff.added: @@ -2949,8 +3318,8 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): self.relation_data._register_secrets_to_relation(event.relation, diff.added) - secret_field_user = self.relation_data._generate_secret_field_name(SecretGroup.USER) - secret_field_tls = self.relation_data._generate_secret_field_name(SecretGroup.TLS) + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) + secret_field_tls = self.relation_data._generate_secret_field_name(SECRET_GROUPS.TLS) updates = {"username", "password", "tls", "tls-ca", secret_field_user, secret_field_tls} if len(set(diff._asdict().keys()) - updates) < len(diff): logger.info("authentication updated at: %s", datetime.now()) diff --git a/metadata.yaml b/metadata.yaml index f51e4bc4..215c7406 100644 --- a/metadata.yaml +++ b/metadata.yaml @@ -43,6 +43,8 @@ requires: optional: true limit: 1 peers: + tls: + interface: tls cos: interface: cos upgrade-version-a: diff --git a/src/abstract_charm.py b/src/abstract_charm.py index 4a82db8c..6546f9c5 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -18,6 +18,7 @@ import relations.cos import relations.database_provides import relations.database_requires +import relations.tls import server_exceptions import upgrade import workload @@ -60,6 +61,7 @@ def __init__(self, *args) -> None: self.on[upgrade.PEER_RELATION_ENDPOINT_NAME].relation_created, self._upgrade_relation_created, ) + self.tls = relations.tls.RelationEndpoint(self) @property @abc.abstractmethod @@ -95,30 +97,28 @@ def _read_only_endpoint(self) -> str: """MySQL Router read-only endpoint""" @property + @abc.abstractmethod def _tls_certificate_saved(self) -> bool: """Whether a TLS certificate is available to use""" - # TODO VM TLS: Update property after implementing TLS on machine_charm - return False @property + @abc.abstractmethod def _tls_key(self) -> typing.Optional[str]: """Custom TLS key""" - # TODO VM TLS: Update property after implementing TLS on machine_charm - return None @property + @abc.abstractmethod + def _tls_certificate_authority(self) -> typing.Optional[str]: + """Custom TLS certificate authority""" + + @property + @abc.abstractmethod def _tls_certificate(self) -> typing.Optional[str]: """Custom TLS certificate""" - # TODO VM TLS: Update property after implementing TLS on machine_charm - return None + @abc.abstractmethod def is_exposed(self, relation=None) -> bool: - return self._database_provides.is_exposed(relation) - - @property - def _tls_certificate_authority(self) -> typing.Optional[str]: - # TODO VM TLS: Update property after implementing TLS on machine charm - return None + """Whether router is exposed externally""" def _cos_exporter_config(self, event) -> typing.Optional[relations.cos.ExporterConfig]: """Returns the exporter config for MySQLRouter exporter if cos relation exists""" @@ -208,9 +208,13 @@ def wait_until_mysql_router_ready(self) -> None: wait=tenacity.wait_fixed(5), ): with attempt: - for port in (6446, 6447): - with socket.socket() as s: - assert s.connect_ex(("localhost", port)) == 0 + if self.is_exposed(): + for port in (6446, 6447): + with socket.socket() as s: + assert s.connect_ex(("localhost", port)) == 0 + else: + assert self._container.path("/run/mysqlrouter/mysql.sock").exists() + assert self._container.path("/run/mysqlrouter/mysqlro.sock").exists() except AssertionError: logger.exception("Unable to connect to MySQL Router") raise @@ -290,7 +294,7 @@ def reconcile(self, event=None) -> None: # noqa: C901 ) if workload_.container_ready: workload_.reconcile( - tls=self._tls_certificate_saved, + tls=self.tls.relation_exists and not self.tls.is_relation_breaking(event), unit_name=self.unit.name, exporter_config=self._cos_exporter_config(event), key=self._tls_key, diff --git a/src/container.py b/src/container.py index ed8774dc..43a2180c 100644 --- a/src/container.py +++ b/src/container.py @@ -98,10 +98,12 @@ def __init__( mysql_router_command: str, mysql_shell_command: str, mysql_router_password_command: str, + unit_name: str, ) -> None: self._mysql_router_command = mysql_router_command self._mysql_shell_command = mysql_shell_command self._mysql_router_password_command = mysql_router_password_command + self.unit_name = unit_name @property @abc.abstractmethod diff --git a/src/machine_charm.py b/src/machine_charm.py index 7da61c15..3236fef2 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -14,9 +14,9 @@ import abstract_charm import machine_logrotate import machine_upgrade +import machine_workload import relations.database_providers_wrapper import snap -import socket_workload import upgrade logger = logging.getLogger(__name__) @@ -35,7 +35,7 @@ def __init__(self, *args) -> None: self._database_provides = relations.database_providers_wrapper.RelationEndpoint( self, self._database_provides ) - self._authenticated_workload_type = socket_workload.AuthenticatedSocketWorkload + self._authenticated_workload_type = machine_workload.AuthenticatedMachineWorkload self.framework.observe(self.on.install, self._on_install) self.framework.observe(self.on.remove, self._on_remove) self.framework.observe(self.on.upgrade_charm, self._on_upgrade_charm) @@ -50,7 +50,7 @@ def _subordinate_relation_endpoint_names(self) -> typing.Optional[typing.Iterabl @property def _container(self) -> snap.Snap: - return snap.Snap() + return snap.Snap(unit_name=self.unit.name) @property def _upgrade(self) -> typing.Optional[machine_upgrade.Upgrade]: @@ -66,20 +66,42 @@ def _logrotate(self) -> machine_logrotate.LogRotate: @property def _host_address(self) -> str: """The host address for the machine.""" - return self.model.get_binding(upgrade.PEER_RELATION_ENDPOINT_NAME).network.bind_address + return self.model.get_binding("juju-info").network.bind_address @property def _read_write_endpoint(self) -> str: - if self._database_provides.is_exposed: + if self.is_exposed(): return f"{self._host_address}:{self.READ_WRITE_PORT}" return f'file://{self._container.path("/run/mysqlrouter/mysql.sock")}' @property def _read_only_endpoint(self) -> str: - if self._database_provides.is_exposed: + if self.is_exposed(): return f"{self._host_address}:{self.READ_ONLY_PORT}" return f'file://{self._container.path("/run/mysqlrouter/mysqlro.sock")}' + @property + def _tls_certificate_saved(self) -> bool: + """Whether a TLS certificate is available to use""" + return self.tls.certificate_saved + + @property + def _tls_key(self) -> typing.Optional[str]: + """Custom TLS key""" + return self.tls.key + + @property + def _tls_certificate(self) -> typing.Optional[str]: + """Custom TLS certificate""" + return self.tls.certificate + + @property + def _tls_certificate_authority(self) -> typing.Optional[str]: + return self.tls.certificate_authority + + def is_exposed(self, relation=None) -> bool: + return self._database_provides.is_exposed + # ======================= # Handlers # ======================= diff --git a/src/socket_workload.py b/src/machine_workload.py similarity index 63% rename from src/socket_workload.py rename to src/machine_workload.py index 8c3d1b4d..050de09f 100644 --- a/src/socket_workload.py +++ b/src/machine_workload.py @@ -14,33 +14,38 @@ logger = logging.getLogger(__name__) -class AuthenticatedSocketWorkload(workload.AuthenticatedWorkload): +class AuthenticatedMachineWorkload(workload.AuthenticatedWorkload): """Workload with connection to MySQL cluster and with Unix sockets enabled""" # TODO python3.10 min version: Use `list` instead of `typing.List` def _get_bootstrap_command(self, password: str) -> typing.List[str]: command = super()._get_bootstrap_command(password) - bind_address = "0.0.0.0" if self._charm._database_provides.is_exposed else "127.0.0.1" - command.extend( - [ - "--conf-bind-address", - bind_address, - "--conf-use-sockets", - # For unix sockets, authentication fails on first connection if this option is not - # set. Workaround for https://bugs.mysql.com/bug.php?id=107291 - "--conf-set-option", - "DEFAULT.server_ssl_mode=PREFERRED", - ] - ) + if self._charm.is_exposed(): + command.extend( + [ + "--conf-bind-address", + "0.0.0.0", + ] + ) + else: + command.extend( + [ + "--conf-use-sockets", + # For unix sockets, authentication fails on first connection if this option is not + # set. Workaround for https://bugs.mysql.com/bug.php?id=107291 + "--conf-set-option", + "DEFAULT.server_ssl_mode=PREFERRED", + ] + ) return command - def _update_configured_socket_file_locations(self) -> None: + def _update_configured_socket_file_locations_and_bind_address(self) -> None: """Update configured socket file locations from `/tmp` to `/run/mysqlrouter`. Called after MySQL Router bootstrap & before MySQL Router service is enabled Change configured location of socket files before socket files are created by MySQL Router - service. + service. Also remove bind_address and bind_port for all router services: rw, ro, x_rw, x_ro Needed since `/tmp` inside a snap is not accessible to non-root users. The socket files must be accessible to applications related via database_provides endpoint. @@ -54,6 +59,8 @@ def _update_configured_socket_file_locations(self) -> None: section["socket"] = str( self._container.path("/run/mysqlrouter") / pathlib.PurePath(section["socket"]).name ) + del section["bind_address"] + del section["bind_port"] with io.StringIO() as output: config.write(output) self._container.router_config_file.write_text(output.getvalue()) @@ -61,4 +68,5 @@ def _update_configured_socket_file_locations(self) -> None: def _bootstrap_router(self, *, tls: bool) -> None: super()._bootstrap_router(tls=tls) - self._update_configured_socket_file_locations() + if not self._charm.is_exposed(): + self._update_configured_socket_file_locations_and_bind_address() diff --git a/src/relations/cos.py b/src/relations/cos.py index 93919d67..2041fe7d 100644 --- a/src/relations/cos.py +++ b/src/relations/cos.py @@ -86,21 +86,21 @@ def relation_exists(self) -> bool: def get_monitoring_password(self) -> str: """Gets the monitoring password from unit peer data, or generate and cache it.""" - monitoring_password = self._secrets.get_secret( + monitoring_password = self._secrets.get_value( relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY ) if monitoring_password: return monitoring_password monitoring_password = utils.generate_password() - self._secrets.set_secret( + self._secrets.set_value( relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY, monitoring_password ) return monitoring_password def _reset_monitoring_password(self) -> None: """Reset the monitoring password from unit peer data.""" - self._secrets.set_secret(relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY, None) + self._secrets.set_value(relations.secrets.UNIT_SCOPE, self._MONITORING_PASSWORD_KEY, None) def is_relation_breaking(self, event) -> bool: """Whether relation will be broken after the current event is handled.""" diff --git a/src/relations/secrets.py b/src/relations/secrets.py index dfad79a7..e6d7b20d 100644 --- a/src/relations/secrets.py +++ b/src/relations/secrets.py @@ -27,44 +27,42 @@ class RelationSecrets: def __init__( self, charm: "abstract_charm.MySQLRouterCharm", - peer_relation_name: str, + relation_name: str, app_secret_fields: typing.List[str] = [], unit_secret_fields: typing.List[str] = [], ) -> None: self._charm = charm - self._peer_relation_name = peer_relation_name + self._relation_name = relation_name self._peer_relation_app = data_interfaces.DataPeer( charm, - relation_name=peer_relation_name, + relation_name=relation_name, additional_secret_fields=app_secret_fields, - secret_field_name=self._SECRET_INTERNAL_LABEL, deleted_label=self._SECRET_DELETED_LABEL, ) self._peer_relation_unit = data_interfaces.DataPeerUnit( charm, - relation_name=peer_relation_name, + relation_name=relation_name, additional_secret_fields=unit_secret_fields, - secret_field_name=self._SECRET_INTERNAL_LABEL, deleted_label=self._SECRET_DELETED_LABEL, ) - def peer_relation_data(self, scope: Scopes) -> data_interfaces.DataPeer: + def _peer_relation_data(self, scope: Scopes) -> data_interfaces.DataPeer: """Returns the peer relation data per scope.""" if scope == APP_SCOPE: return self._peer_relation_app elif scope == UNIT_SCOPE: return self._peer_relation_unit - def get_secret(self, scope: Scopes, key: str) -> typing.Optional[str]: + def get_value(self, scope: Scopes, key: str) -> typing.Optional[str]: """Get secret from the secret storage.""" if scope not in typing.get_args(Scopes): raise ValueError("Unknown secret scope") - peers = self._charm.model.get_relation(self._peer_relation_name) - return self.peer_relation_data(scope).fetch_my_relation_field(peers.id, key) + peers = self._charm.model.get_relation(self._relation_name) + return self._peer_relation_data(scope).fetch_my_relation_field(peers.id, key) - def set_secret( + def set_value( self, scope: Scopes, key: str, value: typing.Optional[str] ) -> typing.Optional[str]: """Set secret from the secret storage.""" @@ -72,15 +70,15 @@ def set_secret( raise ValueError("Unknown secret scope") if not value: - return self.remove_secret(scope, key) + return self._remove_value(scope, key) - peers = self._charm.model.get_relation(self._peer_relation_name) - self.peer_relation_data(scope).update_relation_data(peers.id, {key: value}) + peers = self._charm.model.get_relation(self._relation_name) + self._peer_relation_data(scope).update_relation_data(peers.id, {key: value}) - def remove_secret(self, scope: Scopes, key: str) -> None: + def _remove_value(self, scope: Scopes, key: str) -> None: """Removing a secret.""" if scope not in typing.get_args(Scopes): raise ValueError("Unknown secret scope") - peers = self._charm.model.get_relation(self._peer_relation_name) - self.peer_relation_data(scope).delete_relation_data(peers.id, [key]) + peers = self._charm.model.get_relation(self._relation_name) + self._peer_relation_data(scope).delete_relation_data(peers.id, [key]) diff --git a/src/relations/tls.py b/src/relations/tls.py new file mode 100644 index 00000000..1c13abf7 --- /dev/null +++ b/src/relations/tls.py @@ -0,0 +1,299 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Relation to TLS certificate provider""" + +import base64 +import dataclasses +import json +import logging +import re +import socket +import typing + +import charms.tls_certificates_interface.v2.tls_certificates as tls_certificates +import ops + +import relations.secrets + +if typing.TYPE_CHECKING: + import kubernetes_charm + +logger = logging.getLogger(__name__) + +_PEER_RELATION_ENDPOINT_NAME = "tls" + +_TLS_REQUESTED_CSR = "tls-requested-csr" +_TLS_ACTIVE_CSR = "tls-active-csr" +_TLS_CERTIFICATE = "tls-certificate" +_TLS_CA = "tls-ca" +_TLS_CHAIN = "tls-chain" +_TLS_PRIVATE_KEY = "tls-private-key" +_TLS_FIELDS = [ + _TLS_REQUESTED_CSR, + _TLS_ACTIVE_CSR, + _TLS_CERTIFICATE, + _TLS_CA, + _TLS_CHAIN, + _TLS_PRIVATE_KEY, +] + + +def _generate_private_key() -> str: + """Generate TLS private key.""" + return tls_certificates.generate_private_key().decode("utf-8") + + +@dataclasses.dataclass(kw_only=True) +class _Relation: + """Relation to TLS certificate provider""" + + _charm: "kubernetes_charm.KubernetesRouterCharm" + _interface: tls_certificates.TLSCertificatesRequiresV2 + _secrets: relations.secrets.RelationSecrets + + @property + def certificate_saved(self) -> bool: + """Whether a TLS certificate is available to use""" + for value in ( + self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_CERTIFICATE), + self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_CA), + ): + if not value: + return False + return True + + @property + def key(self) -> str: + """The TLS private key""" + private_key = self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_PRIVATE_KEY) + if not private_key: + private_key = _generate_private_key() + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_PRIVATE_KEY, private_key) + return private_key + + @property + def certificate(self) -> str: + """The TLS certificate""" + return self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_CERTIFICATE) + + @property + def certificate_authority(self) -> str: + """The TLS certificate authority""" + return self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_CA) + + def save_certificate(self, event: tls_certificates.CertificateAvailableEvent) -> None: + """Save TLS certificate in peer relation unit databag.""" + if ( + event.certificate_signing_request.strip() + != self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_REQUESTED_CSR).strip() + ): + logger.warning("Unknown certificate received. Ignoring.") + return + if ( + self.certificate_saved + and event.certificate_signing_request.strip() + == self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_ACTIVE_CSR) + ): + # Workaround for https://github.com/canonical/tls-certificates-operator/issues/34 + logger.debug("TLS certificate already saved.") + return + logger.debug(f"Saving TLS certificate {event=}") + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_CERTIFICATE, event.certificate) + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_CA, event.ca) + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_CHAIN, json.dumps(event.chain)) + self._secrets.set_value( + relations.secrets.UNIT_SCOPE, + _TLS_ACTIVE_CSR, + self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_REQUESTED_CSR), + ) + logger.debug(f"Saved TLS certificate {event=}") + self._charm.reconcile(event=None) + + def _generate_csr(self, key: bytes) -> bytes: + """Generate certificate signing request (CSR).""" + return tls_certificates.generate_csr( + private_key=key, + subject=socket.getfqdn(), + organization=self._charm.app.name, + sans_ip=[ + str(self._charm.model.get_binding("juju-info").network.bind_address), + "127.0.0.1", + ], + ) + + def request_certificate_creation(self): + """Request new TLS certificate from related provider charm.""" + logger.debug("Requesting TLS certificate creation") + csr = self._generate_csr(self.key.encode("utf-8")) + self._interface.request_certificate_creation(certificate_signing_request=csr) + self._secrets.set_value( + relations.secrets.UNIT_SCOPE, _TLS_REQUESTED_CSR, csr.decode("utf-8") + ) + logger.debug("Requested TLS certificate creation") + + def request_certificate_renewal(self): + """Request TLS certificate renewal from related provider charm.""" + logger.debug("Requesting TLS certificate renewal") + old_csr = self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_ACTIVE_CSR).encode( + "utf-8" + ) + new_csr = self._generate_csr(self.key.encode("utf-8")) + self._interface.request_certificate_renewal( + old_certificate_signing_request=old_csr, new_certificate_signing_request=new_csr + ) + self._secrets.set_value( + relations.secrets.UNIT_SCOPE, _TLS_REQUESTED_CSR, new_csr.decode("utf-8") + ) + logger.debug("Requested TLS certificate renewal") + + +class RelationEndpoint(ops.Object): + """Relation endpoint and handlers for TLS certificate provider""" + + NAME = "certificates" + + def __init__(self, charm_: "kubernetes_charm.KubernetesRouterCharm") -> None: + super().__init__(charm_, self.NAME) + self._charm = charm_ + self._interface = tls_certificates.TLSCertificatesRequiresV2(self._charm, self.NAME) + + self._secrets = relations.secrets.RelationSecrets( + charm_, + _PEER_RELATION_ENDPOINT_NAME, + unit_secret_fields=[_TLS_PRIVATE_KEY], + ) + + self.framework.observe( + self._charm.on["set-tls-private-key"].action, + self._on_set_tls_private_key, + ) + self.framework.observe( + self._charm.on[self.NAME].relation_created, self._on_tls_relation_created + ) + self.framework.observe( + self._charm.on[self.NAME].relation_broken, self._on_tls_relation_broken + ) + + self.framework.observe( + self._interface.on.certificate_available, self._on_certificate_available + ) + self.framework.observe( + self._interface.on.certificate_expiring, self._on_certificate_expiring + ) + + @property + def _relation(self) -> typing.Optional[_Relation]: + if not self._charm.model.get_relation(self.NAME): + return + return _Relation( + _charm=self._charm, + _interface=self._interface, + _secrets=self._secrets, + ) + + @property + def certificate_saved(self) -> bool: + """Whether a TLS certificate is available to use""" + if self._relation is None: + return False + return self._relation.certificate_saved + + @property + def key(self) -> typing.Optional[str]: + """The TLS private key""" + if self._relation is None: + return None + return self._relation.key + + @property + def certificate(self) -> typing.Optional[str]: + """The TLS certificate""" + if self._relation is None: + return None + return self._relation.certificate + + @property + def certificate_authority(self) -> typing.Optional[str]: + """The TLS certificate authority""" + if self._relation is None: + return None + return self._relation.certificate_authority + + @property + def relation_exists(self) -> bool: + """Whether relation with cos exists.""" + return len(self._charm.model.relations.get(self.NAME, [])) == 1 + + @staticmethod + def _parse_tls_key(raw_content: str) -> str: + """Parse TLS key from plain text or base64 format.""" + if re.match(r"(-+(BEGIN|END) [A-Z ]+-+)", raw_content): + return re.sub( + r"(-+(BEGIN|END) [A-Z ]+-+)", + "\n\\1\n", + raw_content, + ) + return base64.b64decode(raw_content).decode("utf-8") + + def is_relation_breaking(self, event) -> bool: + """Whether relation will be broken after the current event is handled.""" + if not self.relation_exists: + return False + + return ( + isinstance(event, ops.RelationBrokenEvent) + and event.relation.id == self._charm.model.relations[self.NAME][0].id + ) + + def _on_set_tls_private_key(self, event: ops.ActionEvent) -> None: + """Handle action to set unit TLS private key.""" + logger.debug("Handling set TLS private key action") + if key := event.params.get("internal-key"): + key = self._parse_tls_key(key) + else: + key = _generate_private_key() + event.log("No key provided. Generated new key.") + logger.debug("No TLS key provided via action. Generated new key.") + self._secrets.set_value(relations.secrets.UNIT_SCOPE, _TLS_PRIVATE_KEY, key) + event.log("Saved TLS private key") + logger.debug("Saved TLS private key") + if self._relation is None: + event.log( + "No TLS certificate relation active. Relate a certificate provider charm to enable TLS." + ) + logger.debug("No TLS certificate relation active. Skipped certificate request") + else: + try: + self._relation.request_certificate_creation() + except Exception as e: + event.fail(f"Failed to request certificate: {e}") + logger.exception( + "Failed to request certificate after TLS private key set via action" + ) + raise + logger.debug("Handled set TLS private key action") + + def _on_tls_relation_created(self, _) -> None: + """Request certificate when TLS relation created.""" + self._relation.request_certificate_creation() + + def _on_tls_relation_broken(self, _) -> None: + """Delete TLS certificate.""" + logger.debug("Deleting TLS certificate") + for field in _TLS_FIELDS: + self._secrets.set_value(relations.secrets.UNIT_SCOPE, field, None) + self._charm.reconcile(event=None) + logger.debug("Deleted TLS certificate") + + def _on_certificate_available(self, event: tls_certificates.CertificateAvailableEvent) -> None: + """Save TLS certificate.""" + self._relation.save_certificate(event) + + def _on_certificate_expiring(self, event: tls_certificates.CertificateExpiringEvent) -> None: + """Request the new certificate when old certificate is expiring.""" + if event.certificate != self.certificate: + logger.warning("Unknown certificate expiring") + return + + self._relation.request_certificate_renewal() diff --git a/src/snap.py b/src/snap.py index 8886bfd1..bbaf7eb8 100644 --- a/src/snap.py +++ b/src/snap.py @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) _SNAP_NAME = "charmed-mysql" -REVISION = "98" # Keep in sync with `workload_version` file +REVISION = "101" # Keep in sync with `workload_version` file _snap = snap_lib.SnapCache()[_SNAP_NAME] _UNIX_USERNAME = "snap_daemon" @@ -154,11 +154,12 @@ class Snap(container.Container): _SERVICE_NAME = "mysqlrouter-service" _EXPORTER_SERVICE_NAME = "mysqlrouter-exporter" - def __init__(self) -> None: + def __init__(self, *, unit_name: str) -> None: super().__init__( mysql_router_command=f"{_SNAP_NAME}.mysqlrouter", mysql_shell_command=f"{_SNAP_NAME}.mysqlsh", mysql_router_password_command=f"{_SNAP_NAME}.mysqlrouter-passwd", + unit_name=unit_name, ) @property @@ -175,10 +176,23 @@ def mysql_router_exporter_service_enabled(self) -> bool: def update_mysql_router_service(self, *, enabled: bool, tls: bool = None) -> None: super().update_mysql_router_service(enabled=enabled, tls=tls) + if tls: - raise NotImplementedError # TODO VM TLS + _snap.set({"mysqlrouter.extra-options": f"--extra-config {self.tls_config_file}"}) + else: + _snap.unset("mysqlrouter.extra-options") + + router_is_running = any( + [ + properties.get("active") + for service, properties in _snap.services.items() + if service == self._SERVICE_NAME + ] + ) - if enabled: + if enabled and router_is_running: + _snap.restart([self._SERVICE_NAME]) + elif enabled: _snap.start([self._SERVICE_NAME], enable=True) else: _snap.stop([self._SERVICE_NAME], disable=True) @@ -193,9 +207,6 @@ def update_mysql_router_exporter_service( certificate_filename: str = None, certificate_authority_filename: str = None, ) -> None: - if tls: - raise NotImplementedError - super().update_mysql_router_exporter_service( enabled=enabled, config=config, @@ -211,14 +222,16 @@ def update_mysql_router_exporter_service( "mysqlrouter-exporter.user": config.username, "mysqlrouter-exporter.password": config.password, "mysqlrouter-exporter.url": config.url, + "mysqlrouter-exporter.service-name": self.unit_name.replace("/", "-"), } ) _snap.start([self._EXPORTER_SERVICE_NAME], enable=True) else: + _snap.stop([self._EXPORTER_SERVICE_NAME], disable=True) _snap.unset("mysqlrouter-exporter.user") _snap.unset("mysqlrouter-exporter.password") _snap.unset("mysqlrouter-exporter.url") - _snap.stop([self._EXPORTER_SERVICE_NAME], disable=True) + _snap.unset("mysqlrouter-exporter.service-name") def upgrade(self, unit: ops.Unit) -> None: """Upgrade snap.""" diff --git a/templates/tls.cnf b/templates/tls.cnf new file mode 100644 index 00000000..1f9331a8 --- /dev/null +++ b/templates/tls.cnf @@ -0,0 +1,8 @@ +[DEFAULT] +client_ssl_mode=REQUIRED +client_ssl_key=$tls_ssl_key_file +client_ssl_cert=$tls_ssl_cert_file + +[http_server] +ssl_key=$tls_ssl_key_file +ssl_cert=$tls_ssl_cert_file From 6dae91c295ce34c96a37288708c2755ca27b9259 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 10 Apr 2024 12:46:12 +0000 Subject: [PATCH 03/31] Avoid using kw_only in dataclasses due to python3.8 in juju 3.1.7 --- src/relations/tls.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/relations/tls.py b/src/relations/tls.py index 1c13abf7..0b4ea645 100644 --- a/src/relations/tls.py +++ b/src/relations/tls.py @@ -44,7 +44,8 @@ def _generate_private_key() -> str: return tls_certificates.generate_private_key().decode("utf-8") -@dataclasses.dataclass(kw_only=True) +# TODO python3.10 min version: Add `(kw_only=True)` +@dataclasses.dataclass class _Relation: """Relation to TLS certificate provider""" From 716da08928515b9d56337133812d9ccddb37b0cd Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Thu, 11 Apr 2024 12:39:24 +0000 Subject: [PATCH 04/31] Add TLS integration test + test COS when related to TLS operator --- poetry.lock | 2 +- pyproject.toml | 2 +- tests/integration/helpers.py | 20 ++- tests/integration/test_exporter.py | 74 +++++++++++ tests/integration/test_tls.py | 197 ++++++++++++++++------------- 5 files changed, 205 insertions(+), 90 deletions(-) diff --git a/poetry.lock b/poetry.lock index 83ff58ca..3b783245 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2335,4 +2335,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "3137e22bdb7d028fe7336192328f3201066688a4af29d3c5b6af125f8543b1a9" +content-hash = "161b70cb8f6ac7ee421f66c670c2926bd971359f83451983d796ded2f0a0a23d" diff --git a/pyproject.toml b/pyproject.toml index a374ace6..18e6715f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ requests = "^2.31.0" # data_platform_libs/v0/data_interfaces.py ops = ">=2.0.0" # tls_certificates_interface/v1/tls_certificates.py -cryptography = "*" +cryptography = ">=42.0.5" jsonschema = "*" # grafana_agent/v0/cos_agent.py pydantic = "<2" diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 595ddf87..fbedb086 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -3,7 +3,7 @@ import itertools import tempfile -from typing import Dict, List +from typing import Dict, List, Optional from juju.unit import Unit from pytest_operator.plugin import OpsTest @@ -222,3 +222,21 @@ async def stop_running_flush_mysqlrouter_cronjobs(ops_test: OpsTest, unit_name: with attempt: if await get_process_pid(ops_test, unit_name, "logrotate"): raise Exception("Failed to stop the flush_mysql_logs logrotate process") + + +async def get_tls_certificate_issuer( + ops_test: OpsTest, + unit_name: str, + socket: Optional[str] = None, + host: Optional[str] = None, + port: Optional[int] = None, +) -> str: + connect_args = f"-unix {socket}" if socket else f"-connect {host}:{port}" + get_tls_certificate_issuer_commands = [ + "ssh", + unit_name, + f"openssl s_client -showcerts -starttls mysql {connect_args} < /dev/null | openssl x509 -text | grep Issuer", + ] + return_code, issuer, _ = await ops_test.juju(*get_tls_certificate_issuer_commands) + assert return_code == 0, f"failed to get TLS certificate issuer on {unit_name=}" + return issuer diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index 5ab1f672..7e6987c8 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -16,6 +16,7 @@ MYSQL_ROUTER_APP_NAME = "mysql-router" APPLICATION_APP_NAME = "mysql-test-app" GRAFANA_AGENT_APP_NAME = "grafana-agent" +TLS_APP_NAME = "tls-certificates-operator" SLOW_TIMEOUT = 25 * 60 @@ -149,3 +150,76 @@ async def test_exporter_endpoint(ops_test: OpsTest, mysql_router_charm_series: s ), "❌ expected connection refused error" else: assert False, "❌ can connect to metrics endpoint without relation with cos" + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: + """Test that the exporter endpoint works when related with TLS""" + http = urllib3.PoolManager() + + mysql_router_app = ops_test.model.applications[MYSQL_ROUTER_APP_NAME] + + logger.info(f"Deploying {TLS_APP_NAME}") + await ops_test.model.deploy( + TLS_APP_NAME, + application_name=TLS_APP_NAME, + channel="stable", + config={"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"}, + ) + + await ops_test.model.wait_for_idle([TLS_APP_NAME], status="active", timeout=SLOW_TIMEOUT) + + logger.info(f"Relation mysqlrouter with {TLS_APP_NAME}") + + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" + ) + + time.sleep(30) + + mysql_test_app = ops_test.model.applications[APPLICATION_APP_NAME] + unit_address = await mysql_test_app.units[0].get_public_address() + + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" + else: + assert False, "❌ can connect to metrics endpoint without relation with cos" + + logger.info("Relating mysqlrouter with grafana agent") + await ops_test.model.relate( + f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" + ) + + time.sleep(30) + + jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") + assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" + assert "mysqlrouter_route_health" in str( + jmx_resp.data + ), "❌ did not find expected metric in response" + + logger.info("Removing relation between mysqlrouter and grafana agent") + await mysql_router_app.remove_relation( + f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" + ) + + time.sleep(30) + + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" + else: + assert False, "❌ can connect to metrics endpoint without relation with cos" + + logger.info(f"Removing relation between mysqlrouter and {TLS_APP_NAME}") + await mysql_router_app.remove_relation( + f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" + ) diff --git a/tests/integration/test_tls.py b/tests/integration/test_tls.py index c871f970..c42df7a8 100644 --- a/tests/integration/test_tls.py +++ b/tests/integration/test_tls.py @@ -1,89 +1,112 @@ -# Copyright 2023 Canonical Ltd. +# Copyright 2024 Canonical Ltd. # See LICENSE file for licensing details. -# flake8: noqa -# TODO: enable & remove noqa -# import asyncio -# import logging -# -# import pytest -# from pytest_operator.plugin import OpsTest -# -# logger = logging.getLogger(__name__) -# -# MYSQL_APP_NAME = "mysql" -# MYSQL_ROUTER_APP_NAME = "mysqlrouter" -# TEST_APP_NAME = "mysql-test-app" -# TLS_APP_NAME = "tls-certificates-operator" -# SLOW_TIMEOUT = 15 * 60 -# MODEL_CONFIG = {"logging-config": "=INFO;unit=DEBUG"} -# -# -# @pytest.mark.group(1) -# @pytest.mark.abort_on_fail -# async def test_build_deploy_and_relate(ops_test: OpsTest, mysql_router_charm_series: str) -> None: -# """Test encryption when backend database is using TLS.""" -# # Deploy TLS Certificates operator. -# await ops_test.model.set_config(MODEL_CONFIG) -# logger.info("Deploy and relate all applications") -# async with ops_test.fast_forward(): -# # deploy mysql first -# await ops_test.model.deploy( -# MYSQL_APP_NAME, channel="8.0/edge", config={"profile": "testing"}, num_units=3 -# ) -# tls_config = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} -# -# # ROUTER -# mysqlrouter_charm = await ops_test.build_charm(".") -# -# # tls, test app and router -# await asyncio.gather( -# ops_test.model.deploy( -# mysqlrouter_charm, -# application_name=MYSQL_ROUTER_APP_NAME, -# num_units=None, -# series=mysql_router_charm_series, -# ), -# ops_test.model.deploy( -# TLS_APP_NAME, application_name=TLS_APP_NAME, channel="stable", config=tls_config -# ), -# ops_test.model.deploy( -# TEST_APP_NAME, application_name=TEST_APP_NAME, channel="latest/edge" -# ), -# ) -# -# await ops_test.model.relate( -# f"{MYSQL_ROUTER_APP_NAME}:backend-database", f"{MYSQL_APP_NAME}:database" -# ) -# await ops_test.model.relate( -# f"{TEST_APP_NAME}:database", f"{MYSQL_ROUTER_APP_NAME}:database" -# ) -# -# logger.info("Waiting for applications to become active") -# # We can safely wait only for test application to be ready, given that it will -# # only become active once all the other applications are ready. -# await ops_test.model.wait_for_idle([TEST_APP_NAME], status="active", timeout=15 * 60) -# -# -# @pytest.mark.group(1) -# async def test_connected_encryption(ops_test: OpsTest) -> None: -# """Test encryption when backend database is using TLS.""" -# test_app_unit = ops_test.model.applications[TEST_APP_NAME].units[0] -# -# logger.info("Relating TLS with backend database") -# await ops_test.model.relate(TLS_APP_NAME, MYSQL_APP_NAME) -# -# # Wait for hooks start reconfiguring app -# await ops_test.model.block_until( -# lambda: ops_test.model.applications[MYSQL_APP_NAME].status != "active", timeout=4 * 60 -# ) -# await ops_test.model.wait_for_idle(status="active", timeout=15 * 60) -# -# logger.info("Get cipher when TLS is enforced") -# action = await test_app_unit.run_action("get-session-ssl-cipher") -# result = await action.wait() -# -# cipher = result.results["cipher"] -# # this assertion should be true even when TLS is not related to the backend database -# # because by default mysqlrouter will use TLS, unless explicitly disabled, which we never do -# assert cipher == "TLS_AES_256_GCM_SHA384", "Cipher not set" +import asyncio +import logging +import time + +import pytest +from pytest_operator.plugin import OpsTest + +from .helpers import get_tls_certificate_issuer + +logger = logging.getLogger(__name__) + +MYSQL_APP_NAME = "mysql" +MYSQL_ROUTER_APP_NAME = "mysqlrouter" +TEST_APP_NAME = "mysql-test-app" +TLS_APP_NAME = "tls-certificates-operator" +SLOW_TIMEOUT = 15 * 60 +MODEL_CONFIG = {"logging-config": "=INFO;unit=DEBUG"} + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_build_deploy_and_relate(ops_test: OpsTest, mysql_router_charm_series: str) -> None: + """Test encryption when backend database is using TLS.""" + await ops_test.model.set_config(MODEL_CONFIG) + logger.info("Deploy and relate all applications") + async with ops_test.fast_forward(): + # deploy mysql first + await ops_test.model.deploy( + MYSQL_APP_NAME, channel="8.0/edge", config={"profile": "testing"}, num_units=1 + ) + tls_config = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} + + # ROUTER + mysqlrouter_charm = await ops_test.build_charm(".") + + # tls, test app and router + await asyncio.gather( + ops_test.model.deploy( + mysqlrouter_charm, + application_name=MYSQL_ROUTER_APP_NAME, + num_units=None, + series=mysql_router_charm_series, + ), + ops_test.model.deploy( + TLS_APP_NAME, application_name=TLS_APP_NAME, channel="stable", config=tls_config + ), + ops_test.model.deploy( + TEST_APP_NAME, + application_name=TEST_APP_NAME, + channel="latest/edge", + series=mysql_router_charm_series, + ), + ) + + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:backend-database", f"{MYSQL_APP_NAME}:database" + ) + await ops_test.model.relate( + f"{TEST_APP_NAME}:database", f"{MYSQL_ROUTER_APP_NAME}:database" + ) + + logger.info("Waiting for applications to become active") + # We can safely wait only for test application to be ready, given that it will + # only become active once all the other applications are ready. + await ops_test.model.wait_for_idle([TEST_APP_NAME], status="active", timeout=SLOW_TIMEOUT) + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_connected_encryption(ops_test: OpsTest) -> None: + """Test encryption when backend database is using TLS.""" + mysqlrouter_unit = ops_test.model.applications[MYSQL_ROUTER_APP_NAME].units[0] + + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated CA certificate" + + logger.info("Relating TLS with mysqlrouter") + await ops_test.model.relate(TLS_APP_NAME, MYSQL_ROUTER_APP_NAME) + + time.sleep(30) + + logger.info("Getting certificate issuer after relating with tls operator") + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert "CN = Test CA" in issuer, f"Expected mysqlrouter certificate from {TLS_APP_NAME}" + + logger.info("Removing relation TLS with mysqlrouter") + await ops_test.model.applications[MYSQL_ROUTER_APP_NAME].remove_relation( + f"{TLS_APP_NAME}:certificates", f"{MYSQL_ROUTER_APP_NAME}:certificates" + ) + + time.sleep(30) + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated CA certificate" From 8293c949f09e2b845a27a379950891d0d23c6b11 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Thu, 11 Apr 2024 21:04:55 +0000 Subject: [PATCH 05/31] Add integration test for data integrator and tls operator; for external node connectivity --- tests/integration/helpers.py | 4 +- tests/integration/juju_.py | 9 + tests/integration/test_data_integrator.py | 203 ++++++++++++++++++++++ tests/integration/test_database.py | 4 +- tests/integration/test_exporter.py | 12 +- tests/integration/test_tls.py | 16 +- 6 files changed, 237 insertions(+), 11 deletions(-) create mode 100644 tests/integration/juju_.py create mode 100644 tests/integration/test_data_integrator.py diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index fbedb086..c2dec5d8 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -44,11 +44,12 @@ async def get_inserted_data_by_application(unit: Unit) -> str: return result.results.get("data") -async def execute_queries_on_unit( +async def execute_queries_against_unit( unit_address: str, username: str, password: str, queries: List[str], + port: int = 3306, commit: bool = False, ) -> List: """Execute given MySQL queries on a unit. @@ -67,6 +68,7 @@ async def execute_queries_on_unit( "user": username, "password": password, "host": unit_address, + "port": port, "raise_on_warnings": False, } diff --git a/tests/integration/juju_.py b/tests/integration/juju_.py new file mode 100644 index 00000000..968962e0 --- /dev/null +++ b/tests/integration/juju_.py @@ -0,0 +1,9 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +import importlib.metadata + +# libjuju version != juju agent version, but the major version should be identical—which is good +# enough to check for secrets +_libjuju_version = importlib.metadata.version("juju") +has_secrets = int(_libjuju_version.split(".")[0]) >= 3 diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py new file mode 100644 index 00000000..54d86ce0 --- /dev/null +++ b/tests/integration/test_data_integrator.py @@ -0,0 +1,203 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +import asyncio +import logging +import time +import typing + +import pytest +from pytest_operator.plugin import OpsTest + +from . import juju_ +from .helpers import execute_queries_against_unit, get_tls_certificate_issuer + +logger = logging.getLogger(__name__) + +MYSQL_APP_NAME = "mysql" +MYSQL_ROUTER_APP_NAME = "mysqlrouter" +DATA_INTEGRATOR_APP_NAME = "data-integrator" +SLOW_TIMEOUT = 15 * 60 +TEST_DATABASE = "testdatabase" +TEST_TABLE = "testtable" + +if juju_.has_secrets: + TLS_APP_NAME = "self-signed-certificates" + TLS_CONFIG = {"ca-common-name": "Test CA"} +else: + TLS_APP_NAME = "tls-certificates-operator" + TLS_CONFIG = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} + + +async def get_data_integrator_credentials(ops_test: OpsTest) -> typing.Dict: + """Helper to get the credentials from the deployed data integrator""" + data_integrator_unit = ops_test.model.applications[DATA_INTEGRATOR_APP_NAME].units[0] + action = await data_integrator_unit.run_action(action_name="get-credentials") + result = await action.wait() + assert result.results["return-code"] == 0 + assert result.results["ok"] == "True" + return result.results["mysql"] + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_external_connectivity_with_data_integrator( + ops_test: OpsTest, mysql_router_charm_series: str +) -> None: + """Test encryption when backend database is using TLS.""" + logger.info("Deploy and relate all applications") + async with ops_test.fast_forward(): + # deploy mysql first + await ops_test.model.deploy( + MYSQL_APP_NAME, channel="8.0/edge", config={"profile": "testing"}, num_units=1 + ) + data_integrator_config = {"database-name": TEST_DATABASE} + + # ROUTER + mysqlrouter_charm = await ops_test.build_charm(".") + + # tls, data-integrator and router + await asyncio.gather( + ops_test.model.deploy( + mysqlrouter_charm, + application_name=MYSQL_ROUTER_APP_NAME, + num_units=None, + series=mysql_router_charm_series, + ), + ops_test.model.deploy( + TLS_APP_NAME, application_name=TLS_APP_NAME, channel="stable", config=TLS_CONFIG + ), + ops_test.model.deploy( + DATA_INTEGRATOR_APP_NAME, + application_name=DATA_INTEGRATOR_APP_NAME, + channel="latest/edge", + series=mysql_router_charm_series, + config=data_integrator_config, + ), + ) + + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:backend-database", f"{MYSQL_APP_NAME}:database" + ) + await ops_test.model.relate( + f"{DATA_INTEGRATOR_APP_NAME}:mysql", f"{MYSQL_ROUTER_APP_NAME}:database" + ) + + logger.info("Waiting for applications to become active") + # We can safely wait only for test application to be ready, given that it will + # only become active once all the other applications are ready. + await ops_test.model.wait_for_idle( + [DATA_INTEGRATOR_APP_NAME], status="active", timeout=SLOW_TIMEOUT + ) + + credentials = await get_data_integrator_credentials(ops_test) + databases = await execute_queries_against_unit( + credentials["endpoints"].split(",")[0].split(":")[0], + credentials["username"], + credentials["password"], + ["SHOW DATABASES;"], + port=credentials["endpoints"].split(",")[0].split(":")[1], + ) + assert TEST_DATABASE in databases + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_external_connectivity_with_data_integrator_and_tls(ops_test: OpsTest) -> None: + """Test data integrator along with TLS operator""" + logger.info("Ensuring no data exists in the test database") + + credentials = await get_data_integrator_credentials(ops_test) + [database_host, database_port] = credentials["endpoints"].split(",")[0].split(":") + mysqlrouter_unit = ops_test.model.applications[MYSQL_ROUTER_APP_NAME].units[0] + + show_tables_sql = [ + f"SHOW TABLES IN {TEST_DATABASE};", + ] + tables = await execute_queries_against_unit( + database_host, + credentials["username"], + credentials["password"], + show_tables_sql, + port=database_port, + ) + assert len(tables) == 0, f"Unexpected tables in the {TEST_DATABASE} database" + + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + host=database_host, + port=database_port, + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" + + logger.info(f"Relating mysqlrouter with {TLS_APP_NAME}") + await ops_test.model.relate( + f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" + ) + + time.sleep(30) + + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + host=database_host, + port=database_port, + ) + assert "CN = Test CA" in issuer, f"Expected mysqlrouter certificate from {TLS_APP_NAME}" + + create_table_and_insert_data_sql = [ + f"CREATE TABLE {TEST_DATABASE}.{TEST_TABLE} (id int, primary key(id));", + f"INSERT INTO {TEST_DATABASE}.{TEST_TABLE} VALUES (1), (2);", + ] + await execute_queries_against_unit( + database_host, + credentials["username"], + credentials["password"], + create_table_and_insert_data_sql, + port=database_port, + commit=True, + ) + + select_data_sql = [ + f"SELECT * FROM {TEST_DATABASE}.{TEST_TABLE};", + ] + data = await execute_queries_against_unit( + database_host, + credentials["username"], + credentials["password"], + select_data_sql, + port=database_port, + ) + assert data == [1, 2], f"Unexpected data in table {TEST_DATABASE}.{TEST_TABLE}" + + logger.info(f"Removing relation between mysqlrouter and {TLS_APP_NAME}") + await ops_test.model.applications[MYSQL_ROUTER_APP_NAME].remove_relation( + f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" + ) + + time.sleep(30) + + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + host=database_host, + port=database_port, + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" + + select_data_sql = [ + f"SELECT * FROM {TEST_DATABASE}.{TEST_TABLE};", + ] + data = await execute_queries_against_unit( + database_host, + credentials["username"], + credentials["password"], + select_data_sql, + port=database_port, + ) + assert data == [1, 2], f"Unexpected data in table {TEST_DATABASE}.{TEST_TABLE}" diff --git a/tests/integration/test_database.py b/tests/integration/test_database.py index 86f34a8a..b073a62b 100644 --- a/tests/integration/test_database.py +++ b/tests/integration/test_database.py @@ -9,7 +9,7 @@ from pytest_operator.plugin import OpsTest from .helpers import ( - execute_queries_on_unit, + execute_queries_against_unit, get_inserted_data_by_application, get_server_config_credentials, ) @@ -118,7 +118,7 @@ async def test_database_relation(ops_test: OpsTest, mysql_router_charm_series: s select_inserted_data_sql = ( f"SELECT data FROM `{TEST_DATABASE}`.{TEST_TABLE} WHERE data = '{inserted_data}'", ) - selected_data = await execute_queries_on_unit( + selected_data = await execute_queries_against_unit( mysql_unit_address, server_config_credentials["username"], server_config_credentials["password"], diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index 7e6987c8..187fd372 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -10,15 +10,23 @@ import urllib3 from pytest_operator.plugin import OpsTest +from . import juju_ + logger = logging.getLogger(__name__) MYSQL_APP_NAME = "mysql" MYSQL_ROUTER_APP_NAME = "mysql-router" APPLICATION_APP_NAME = "mysql-test-app" GRAFANA_AGENT_APP_NAME = "grafana-agent" -TLS_APP_NAME = "tls-certificates-operator" SLOW_TIMEOUT = 25 * 60 +if juju_.has_secrets: + TLS_APP_NAME = "self-signed-certificates" + TLS_CONFIG = {"ca-common-name": "Test CA"} +else: + TLS_APP_NAME = "tls-certificates-operator" + TLS_CONFIG = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} + @pytest.mark.group(1) @pytest.mark.abort_on_fail @@ -165,7 +173,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: TLS_APP_NAME, application_name=TLS_APP_NAME, channel="stable", - config={"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"}, + config=TLS_CONFIG, ) await ops_test.model.wait_for_idle([TLS_APP_NAME], status="active", timeout=SLOW_TIMEOUT) diff --git a/tests/integration/test_tls.py b/tests/integration/test_tls.py index c42df7a8..28c144e5 100644 --- a/tests/integration/test_tls.py +++ b/tests/integration/test_tls.py @@ -8,6 +8,7 @@ import pytest from pytest_operator.plugin import OpsTest +from . import juju_ from .helpers import get_tls_certificate_issuer logger = logging.getLogger(__name__) @@ -15,23 +16,26 @@ MYSQL_APP_NAME = "mysql" MYSQL_ROUTER_APP_NAME = "mysqlrouter" TEST_APP_NAME = "mysql-test-app" -TLS_APP_NAME = "tls-certificates-operator" SLOW_TIMEOUT = 15 * 60 -MODEL_CONFIG = {"logging-config": "=INFO;unit=DEBUG"} + +if juju_.has_secrets: + TLS_APP_NAME = "self-signed-certificates" + TLS_CONFIG = {"ca-common-name": "Test CA"} +else: + TLS_APP_NAME = "tls-certificates-operator" + TLS_CONFIG = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} @pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_build_deploy_and_relate(ops_test: OpsTest, mysql_router_charm_series: str) -> None: """Test encryption when backend database is using TLS.""" - await ops_test.model.set_config(MODEL_CONFIG) logger.info("Deploy and relate all applications") async with ops_test.fast_forward(): # deploy mysql first await ops_test.model.deploy( MYSQL_APP_NAME, channel="8.0/edge", config={"profile": "testing"}, num_units=1 ) - tls_config = {"generate-self-signed-certificates": "true", "ca-common-name": "Test CA"} # ROUTER mysqlrouter_charm = await ops_test.build_charm(".") @@ -45,7 +49,7 @@ async def test_build_deploy_and_relate(ops_test: OpsTest, mysql_router_charm_ser series=mysql_router_charm_series, ), ops_test.model.deploy( - TLS_APP_NAME, application_name=TLS_APP_NAME, channel="stable", config=tls_config + TLS_APP_NAME, application_name=TLS_APP_NAME, channel="stable", config=TLS_CONFIG ), ops_test.model.deploy( TEST_APP_NAME, @@ -81,7 +85,7 @@ async def test_connected_encryption(ops_test: OpsTest) -> None: ) assert ( "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer - ), "Expected mysqlrouter autogenerated CA certificate" + ), "Expected mysqlrouter autogenerated certificate" logger.info("Relating TLS with mysqlrouter") await ops_test.model.relate(TLS_APP_NAME, MYSQL_ROUTER_APP_NAME) From 5b3627b29d82735986ae668f802d24f55116ffbf Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Thu, 11 Apr 2024 21:54:42 +0000 Subject: [PATCH 06/31] Skip data integrator tests on focal --- tests/integration/markers.py | 10 ++++++++++ tests/integration/test_data_integrator.py | 5 ++++- tests/integration/test_exporter.py | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 tests/integration/markers.py diff --git a/tests/integration/markers.py b/tests/integration/markers.py new file mode 100644 index 00000000..41d6def0 --- /dev/null +++ b/tests/integration/markers.py @@ -0,0 +1,10 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +import pytest + +from . import juju_ + +only_with_juju_secrets = pytest.mark.skipif( + not juju_.has_secrets, reason="Requires juju version w/secrets" +) diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index 54d86ce0..8fed6c5a 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -9,8 +9,9 @@ import pytest from pytest_operator.plugin import OpsTest -from . import juju_ from .helpers import execute_queries_against_unit, get_tls_certificate_issuer +from . import juju_ +from . import markers logger = logging.getLogger(__name__) @@ -41,6 +42,7 @@ async def get_data_integrator_credentials(ops_test: OpsTest) -> typing.Dict: @pytest.mark.group(1) @pytest.mark.abort_on_fail +@markers.only_with_juju_secrets async def test_external_connectivity_with_data_integrator( ops_test: OpsTest, mysql_router_charm_series: str ) -> None: @@ -103,6 +105,7 @@ async def test_external_connectivity_with_data_integrator( @pytest.mark.group(1) @pytest.mark.abort_on_fail +@markers.only_with_juju_secrets async def test_external_connectivity_with_data_integrator_and_tls(ops_test: OpsTest) -> None: """Test data integrator along with TLS operator""" logger.info("Ensuring no data exists in the test database") diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index 187fd372..1255e653 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -178,7 +178,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: await ops_test.model.wait_for_idle([TLS_APP_NAME], status="active", timeout=SLOW_TIMEOUT) - logger.info(f"Relation mysqlrouter with {TLS_APP_NAME}") + logger.info(f"Relating mysqlrouter with {TLS_APP_NAME}") await ops_test.model.relate( f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" From 56c0c94d4781f278d879ae45055fd8740ca046d5 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Thu, 11 Apr 2024 21:59:01 +0000 Subject: [PATCH 07/31] Run format --- tests/integration/test_data_integrator.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index 8fed6c5a..637f46fc 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -9,9 +9,8 @@ import pytest from pytest_operator.plugin import OpsTest +from . import juju_, markers from .helpers import execute_queries_against_unit, get_tls_certificate_issuer -from . import juju_ -from . import markers logger = logging.getLogger(__name__) From 61e2c8cb08b1be49a86bd417e8123afdaf7fb803 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Thu, 11 Apr 2024 22:19:38 +0000 Subject: [PATCH 08/31] Use latest/stable for data integrator --- tests/integration/test_data_integrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index 637f46fc..da0168f8 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -71,7 +71,7 @@ async def test_external_connectivity_with_data_integrator( ops_test.model.deploy( DATA_INTEGRATOR_APP_NAME, application_name=DATA_INTEGRATOR_APP_NAME, - channel="latest/edge", + channel="latest/stable", series=mysql_router_charm_series, config=data_integrator_config, ), From 6b47d0959ea23ec2ea418a1e91ca58c8595f75f9 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 12 Apr 2024 11:35:55 +0000 Subject: [PATCH 09/31] Avoid running data integration tests when focal only --- .github/workflows/ci.yaml | 5 ++++- tests/integration/markers.py | 10 ---------- tests/integration/test_data_integrator.py | 4 +--- 3 files changed, 5 insertions(+), 14 deletions(-) delete mode 100644 tests/integration/markers.py diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 421eee2c..6835f1db 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -116,6 +116,7 @@ jobs: - juju-snap-channel: "2.9/stable" agent-version: "2.9.45" libjuju-version: "2.9.44.1" + if: ${{ ! (matrix.groups.path_to_test_file == "tests/integration/test_data_integrator.py" && matrix.ubuntu-versions.series == "focal") }} name: ${{ matrix.juju-snap-channel }} - (GH hosted) ${{ matrix.groups.job_name }} | ${{ matrix.ubuntu-versions.series }} needs: - lint @@ -156,7 +157,9 @@ jobs: echo "mark_expression=not unstable" >> "$GITHUB_OUTPUT" fi - name: Run integration tests - run: tox run -e integration -- "${{ matrix.groups.path_to_test_file }}" --group="${{ matrix.groups.group_number }}" -m '${{ steps.select-test-stability.outputs.mark_expression }}' --mysql-router-charm-series=${{ matrix.ubuntu-versions.series }} --mysql-router-charm-bases-index=${{ matrix.ubuntu-versions.bases-index }} + if: + run: + tox run -e integration -- "${{ matrix.groups.path_to_test_file }}" --group="${{ matrix.groups.group_number }}" -m '${{ steps.select-test-stability.outputs.mark_expression }}' --mysql-router-charm-series=${{ matrix.ubuntu-versions.series }} --mysql-router-charm-bases-index=${{ matrix.ubuntu-versions.bases-index }} env: LIBJUJU_VERSION_SPECIFIER: ${{ matrix.libjuju-version }} SECRETS_FROM_GITHUB: | diff --git a/tests/integration/markers.py b/tests/integration/markers.py deleted file mode 100644 index 41d6def0..00000000 --- a/tests/integration/markers.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright 2024 Canonical Ltd. -# See LICENSE file for licensing details. - -import pytest - -from . import juju_ - -only_with_juju_secrets = pytest.mark.skipif( - not juju_.has_secrets, reason="Requires juju version w/secrets" -) diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index da0168f8..d4d84c7e 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -9,7 +9,7 @@ import pytest from pytest_operator.plugin import OpsTest -from . import juju_, markers +from . import juju_ from .helpers import execute_queries_against_unit, get_tls_certificate_issuer logger = logging.getLogger(__name__) @@ -41,7 +41,6 @@ async def get_data_integrator_credentials(ops_test: OpsTest) -> typing.Dict: @pytest.mark.group(1) @pytest.mark.abort_on_fail -@markers.only_with_juju_secrets async def test_external_connectivity_with_data_integrator( ops_test: OpsTest, mysql_router_charm_series: str ) -> None: @@ -104,7 +103,6 @@ async def test_external_connectivity_with_data_integrator( @pytest.mark.group(1) @pytest.mark.abort_on_fail -@markers.only_with_juju_secrets async def test_external_connectivity_with_data_integrator_and_tls(ops_test: OpsTest) -> None: """Test data integrator along with TLS operator""" logger.info("Ensuring no data exists in the test database") From eddf296e3a56dc8b1b0ab66bb8f63f4e3eea219f Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 12 Apr 2024 11:38:43 +0000 Subject: [PATCH 10/31] Attempt at correcting ci.yaml invalid workflow file --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6835f1db..e0dad988 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -116,7 +116,7 @@ jobs: - juju-snap-channel: "2.9/stable" agent-version: "2.9.45" libjuju-version: "2.9.44.1" - if: ${{ ! (matrix.groups.path_to_test_file == "tests/integration/test_data_integrator.py" && matrix.ubuntu-versions.series == "focal") }} + if: (! (${{ matrix.groups.path_to_test_file }} == "tests/integration/test_data_integrator.py" && ${{ matrix.ubuntu-versions.series == "focal" }})) name: ${{ matrix.juju-snap-channel }} - (GH hosted) ${{ matrix.groups.job_name }} | ${{ matrix.ubuntu-versions.series }} needs: - lint From 5fa3bc783528c1259ae10005be8602f6100f6d54 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 12 Apr 2024 11:41:43 +0000 Subject: [PATCH 11/31] Use matrix exclude instead of if conditions --- .github/workflows/ci.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e0dad988..9e2de215 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -116,7 +116,9 @@ jobs: - juju-snap-channel: "2.9/stable" agent-version: "2.9.45" libjuju-version: "2.9.44.1" - if: (! (${{ matrix.groups.path_to_test_file }} == "tests/integration/test_data_integrator.py" && ${{ matrix.ubuntu-versions.series == "focal" }})) + exclude: + - groups.path_to_test_file: "tests/integration/test_data_integrator.py" + ubuntu-versions.series: "focal" name: ${{ matrix.juju-snap-channel }} - (GH hosted) ${{ matrix.groups.job_name }} | ${{ matrix.ubuntu-versions.series }} needs: - lint From 26235fd2bf5046adb305df7a653da32b8d069e5c Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 12 Apr 2024 11:56:09 +0000 Subject: [PATCH 12/31] Remove quotes from exclusion group --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9e2de215..1c90c294 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -117,8 +117,8 @@ jobs: agent-version: "2.9.45" libjuju-version: "2.9.44.1" exclude: - - groups.path_to_test_file: "tests/integration/test_data_integrator.py" - ubuntu-versions.series: "focal" + - groups.path_to_test_file: tests/integration/test_data_integrator.py + ubuntu-versions.series: focal name: ${{ matrix.juju-snap-channel }} - (GH hosted) ${{ matrix.groups.job_name }} | ${{ matrix.ubuntu-versions.series }} needs: - lint From 3657e83fd880b63d3933a0a7de98866d32fc1fae Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 12 Apr 2024 12:05:57 +0000 Subject: [PATCH 13/31] Another attempt at matrix exclusion --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1c90c294..2c84fdc2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -117,8 +117,8 @@ jobs: agent-version: "2.9.45" libjuju-version: "2.9.44.1" exclude: - - groups.path_to_test_file: tests/integration/test_data_integrator.py - ubuntu-versions.series: focal + - groups: {path_to_test_file: tests/integration/test_data_integrator.py} + ubuntu-versions: {series: focal} name: ${{ matrix.juju-snap-channel }} - (GH hosted) ${{ matrix.groups.job_name }} | ${{ matrix.ubuntu-versions.series }} needs: - lint From b7afd299858fa5dea6601f7dc95df946493cfad6 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 12 Apr 2024 12:20:51 +0000 Subject: [PATCH 14/31] Use juju_.has_secrets to be correctly handle action result return code in data integrator test --- tests/integration/test_data_integrator.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index d4d84c7e..e573717c 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -34,7 +34,10 @@ async def get_data_integrator_credentials(ops_test: OpsTest) -> typing.Dict: data_integrator_unit = ops_test.model.applications[DATA_INTEGRATOR_APP_NAME].units[0] action = await data_integrator_unit.run_action(action_name="get-credentials") result = await action.wait() - assert result.results["return-code"] == 0 + if juju_.has_secrets: + assert result.results["Code"] == 0 + else: + assert result.results["return-code"] == 0 assert result.results["ok"] == "True" return result.results["mysql"] From d9c072ca38592cb73e9285f37238a674bfc01ddc Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 12 Apr 2024 12:38:23 +0000 Subject: [PATCH 15/31] Fix mixup in return code keys across juju versions --- tests/integration/test_data_integrator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index e573717c..b3a9232e 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -35,9 +35,9 @@ async def get_data_integrator_credentials(ops_test: OpsTest) -> typing.Dict: action = await data_integrator_unit.run_action(action_name="get-credentials") result = await action.wait() if juju_.has_secrets: - assert result.results["Code"] == 0 + assert result.results["return-ode"] == 0 else: - assert result.results["return-code"] == 0 + assert result.results["Code"] == 0 assert result.results["ok"] == "True" return result.results["mysql"] From 79fd8efc15daa9f058a30ad30c7be72530ee2d11 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 12 Apr 2024 12:42:54 +0000 Subject: [PATCH 16/31] Fix typo --- tests/integration/test_data_integrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index b3a9232e..e5b6be34 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -35,7 +35,7 @@ async def get_data_integrator_credentials(ops_test: OpsTest) -> typing.Dict: action = await data_integrator_unit.run_action(action_name="get-credentials") result = await action.wait() if juju_.has_secrets: - assert result.results["return-ode"] == 0 + assert result.results["return-code"] == 0 else: assert result.results["Code"] == 0 assert result.results["ok"] == "True" From ee4b09862143abb00c36232590ac72f383d91e5b Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 17 Apr 2024 20:11:17 +0000 Subject: [PATCH 17/31] Address PR feedback --- .github/workflows/ci.yaml | 4 +- poetry.lock | 2 +- pyproject.toml | 8 ++-- src/abstract_charm.py | 53 +++++++++++++++++++-- src/container.py | 2 +- src/machine_charm.py | 38 ++++++++++----- src/relations/database_providers_wrapper.py | 8 ++-- src/relations/database_provides.py | 36 +++++++++----- src/relations/tls.py | 30 ++++-------- src/snap.py | 19 +++----- src/workload.py | 24 ++++++---- tests/integration/test_data_integrator.py | 2 +- tests/unit/conftest.py | 30 ++++++++++-- 13 files changed, 168 insertions(+), 88 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2c84fdc2..9c3915d7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -159,9 +159,7 @@ jobs: echo "mark_expression=not unstable" >> "$GITHUB_OUTPUT" fi - name: Run integration tests - if: - run: - tox run -e integration -- "${{ matrix.groups.path_to_test_file }}" --group="${{ matrix.groups.group_number }}" -m '${{ steps.select-test-stability.outputs.mark_expression }}' --mysql-router-charm-series=${{ matrix.ubuntu-versions.series }} --mysql-router-charm-bases-index=${{ matrix.ubuntu-versions.bases-index }} + run: tox run -e integration -- "${{ matrix.groups.path_to_test_file }}" --group="${{ matrix.groups.group_number }}" -m '${{ steps.select-test-stability.outputs.mark_expression }}' --mysql-router-charm-series=${{ matrix.ubuntu-versions.series }} --mysql-router-charm-bases-index=${{ matrix.ubuntu-versions.bases-index }} env: LIBJUJU_VERSION_SPECIFIER: ${{ matrix.libjuju-version }} SECRETS_FROM_GITHUB: | diff --git a/poetry.lock b/poetry.lock index 3b783245..86e877dc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2335,4 +2335,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "161b70cb8f6ac7ee421f66c670c2926bd971359f83451983d796ded2f0a0a23d" +content-hash = "c5644d4eb9a9c63d32b04b39388f7cee93eaf1b5e4063ddd0876c73a06ee5145" diff --git a/pyproject.toml b/pyproject.toml index 18e6715f..7a2b6d76 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ authors = [] [tool.poetry.dependencies] python = "^3.8.1" # ^3.8.1 required by flake8 -ops = "^2.0.0" +ops = "^2.8.0" tenacity = "^8.2.3" poetry-core = "^1.7.0" jinja2 = "^3.1.2" @@ -18,7 +18,7 @@ requests = "^2.31.0" [tool.poetry.group.charm-libs.dependencies] # data_platform_libs/v0/data_interfaces.py -ops = ">=2.0.0" +ops = ">=2.8.0" # tls_certificates_interface/v1/tls_certificates.py cryptography = ">=42.0.5" jsonschema = "*" @@ -52,7 +52,7 @@ pytest = "^7.4.0" pytest-xdist = "^3.3.1" pytest-cov = "^4.1.0" ops-scenario = "^5.4.1" -ops = ">=2.0.0" +ops = ">=2.8.0" pytest-mock = "^3.11.1" [tool.poetry.group.integration.dependencies] @@ -64,7 +64,7 @@ pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workf juju = "3.2.0.1" mysql-connector-python = "~8.0.33" tenacity = "^8.2.2" -ops = ">=2.0.0" +ops = ">=2.8.0" pytest-mock = "^3.11.1" diff --git a/src/abstract_charm.py b/src/abstract_charm.py index 6546f9c5..f9830612 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -29,6 +29,9 @@ class MySQLRouterCharm(ops.CharmBase, abc.ABC): """MySQL Router charm""" + _READ_WRITE_PORT = 6446 + _READ_ONLY_PORT = 6447 + def __init__(self, *args) -> None: super().__init__(*args) # Instantiate before registering other event observers @@ -96,6 +99,16 @@ def _read_write_endpoint(self) -> str: def _read_only_endpoint(self) -> str: """MySQL Router read-only endpoint""" + @property + @abc.abstractmethod + def _exposed_read_write_endpoint(self) -> str: + """The exposed read-write endpoint""" + + @property + @abc.abstractmethod + def _exposed_read_only_endpoint(self) -> str: + """The exposed read-only endpoint""" + @property @abc.abstractmethod def _tls_certificate_saved(self) -> bool: @@ -116,6 +129,11 @@ def _tls_certificate_authority(self) -> typing.Optional[str]: def _tls_certificate(self) -> typing.Optional[str]: """Custom TLS certificate""" + @property + @abc.abstractmethod + def _substrate(self) -> str: + """Returns the substrate of the charm: vm or k8s""" + @abc.abstractmethod def is_exposed(self, relation=None) -> bool: """Whether router is exposed externally""" @@ -208,19 +226,38 @@ def wait_until_mysql_router_ready(self) -> None: wait=tenacity.wait_fixed(5), ): with attempt: - if self.is_exposed(): + if self._substrate == "k8s" or self.is_exposed(): for port in (6446, 6447): with socket.socket() as s: assert s.connect_ex(("localhost", port)) == 0 else: - assert self._container.path("/run/mysqlrouter/mysql.sock").exists() - assert self._container.path("/run/mysqlrouter/mysqlro.sock").exists() + for socket_file in ( + "/run/mysqlrouter/mysql.sock", + "/run/mysqlrouter/mysqlro.sock", + ): + assert self._container.path(socket_file).exists() + with socket.socket(socket.AF_UNIX) as s: + assert s.connect_ex(str(self._container.path(socket_file))) == 0 except AssertionError: logger.exception("Unable to connect to MySQL Router") raise else: logger.debug("MySQL Router is ready") + @abc.abstractmethod + def _reconcile_node_port(self, event) -> None: + """Reconcile node port. + + Only applies to Kubernetes charm + """ + + @abc.abstractmethod + def _reconcile_ports(self) -> None: + """Reconcile exposed ports. + + Only applies to Machine charm + """ + # ======================= # Handlers # ======================= @@ -286,21 +323,29 @@ def reconcile(self, event=None) -> None: # noqa: C901 and isinstance(workload_, workload.AuthenticatedWorkload) and workload_.container_ready ): + self._reconcile_node_port(event=event) self._database_provides.reconcile_users( event=event, router_read_write_endpoint=self._read_write_endpoint, router_read_only_endpoint=self._read_only_endpoint, + exposed_read_write_endpoint=self._exposed_read_write_endpoint, + exposed_read_only_endpoint=self._exposed_read_only_endpoint, shell=workload_.shell, ) if workload_.container_ready: workload_.reconcile( - tls=self.tls.relation_exists and not self.tls.is_relation_breaking(event), + tls=self._tls_certificate_saved, unit_name=self.unit.name, exporter_config=self._cos_exporter_config(event), key=self._tls_key, certificate=self._tls_certificate, certificate_authority=self._tls_certificate_authority, ) + if not self._upgrade.in_progress and isinstance( + workload_, workload.AuthenticatedWorkload + ): + self._reconcile_ports() + # Empty waiting status means we're waiting for database requires relation before # starting workload if not workload_.status or workload_.status == ops.WaitingStatus(): diff --git a/src/container.py b/src/container.py index 43a2180c..2a42e574 100644 --- a/src/container.py +++ b/src/container.py @@ -103,7 +103,7 @@ def __init__( self._mysql_router_command = mysql_router_command self._mysql_shell_command = mysql_shell_command self._mysql_router_password_command = mysql_router_password_command - self.unit_name = unit_name + self._unit_name = unit_name @property @abc.abstractmethod diff --git a/src/machine_charm.py b/src/machine_charm.py index 3236fef2..fcd61feb 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -26,9 +26,6 @@ class MachineSubordinateRouterCharm(abstract_charm.MySQLRouterCharm): """MySQL Router machine subordinate charm""" - READ_WRITE_PORT = 6446 - READ_ONLY_PORT = 6447 - def __init__(self, *args) -> None: super().__init__(*args) # DEPRECATED shared-db: Enable legacy "mysql-shared" interface @@ -64,22 +61,26 @@ def _logrotate(self) -> machine_logrotate.LogRotate: return machine_logrotate.LogRotate(container_=self._container) @property - def _host_address(self) -> str: + def host_address(self) -> str: """The host address for the machine.""" - return self.model.get_binding("juju-info").network.bind_address + return str(self.model.get_binding("juju-info").network.bind_address) @property def _read_write_endpoint(self) -> str: - if self.is_exposed(): - return f"{self._host_address}:{self.READ_WRITE_PORT}" return f'file://{self._container.path("/run/mysqlrouter/mysql.sock")}' @property def _read_only_endpoint(self) -> str: - if self.is_exposed(): - return f"{self._host_address}:{self.READ_ONLY_PORT}" return f'file://{self._container.path("/run/mysqlrouter/mysqlro.sock")}' + @property + def _exposed_read_write_endpoint(self) -> str: + return f"{self.host_address}:{self._READ_WRITE_PORT}" + + @property + def _exposed_read_only_endpoint(self) -> str: + return f"{self.host_address}:{self._READ_ONLY_PORT}" + @property def _tls_certificate_saved(self) -> bool: """Whether a TLS certificate is available to use""" @@ -102,6 +103,21 @@ def _tls_certificate_authority(self) -> typing.Optional[str]: def is_exposed(self, relation=None) -> bool: return self._database_provides.is_exposed + def _reconcile_node_port(self, event) -> None: + """Only applies to Kubernetes charm, so no-op.""" + pass + + def _reconcile_ports(self) -> None: + if self.is_exposed(): + ports = [self._READ_WRITE_PORT, self._READ_ONLY_PORT] + else: + ports = [] + self.unit.set_ports(*ports) + + @property + def _substrate(self) -> str: + return "vm" + # ======================= # Handlers # ======================= @@ -146,10 +162,6 @@ def _on_force_upgrade_action(self, event: ops.ActionEvent) -> None: event.set_results({"result": f"Forcefully upgraded {self.unit.name}"}) logger.debug("Forced upgrade") - def reconcile(self, event=None) -> None: - self._database_provides.reconcile_ports() - super().reconcile(event=event) - if __name__ == "__main__": ops.main.main(MachineSubordinateRouterCharm) diff --git a/src/relations/database_providers_wrapper.py b/src/relations/database_providers_wrapper.py index 51300f1f..2f594ddc 100644 --- a/src/relations/database_providers_wrapper.py +++ b/src/relations/database_providers_wrapper.py @@ -43,16 +43,14 @@ def is_exposed(self) -> bool: """Whether the relation is exposed""" return self._database_provides.is_exposed - def reconcile_ports(self) -> None: - """Reconcile ports for this unit""" - self._database_provides.reconcile_ports() - def reconcile_users( self, *, event, router_read_write_endpoint: str, router_read_only_endpoint: str, + exposed_read_write_endpoint: str, + exposed_read_only_endpoint: str, shell: mysql_shell.Shell, ) -> None: """Create requested users and delete inactive users. @@ -65,6 +63,8 @@ def reconcile_users( event=event, router_read_write_endpoint=router_read_write_endpoint, router_read_only_endpoint=router_read_only_endpoint, + exposed_read_write_endpoint=exposed_read_write_endpoint, + exposed_read_only_endpoint=exposed_read_only_endpoint, shell=shell, ) self._deprecated_shared_db.reconcile_users(event=event, shell=shell) diff --git a/src/relations/database_provides.py b/src/relations/database_provides.py index e6f98cd5..be95576f 100644 --- a/src/relations/database_provides.py +++ b/src/relations/database_provides.py @@ -70,6 +70,11 @@ def __init__( # Application charm databag databag = remote_databag.RemoteDatabag(interface=interface, relation=relation) self._database: str = databag["database"] + # Whether endpoints should be externally accessible + # (e.g. when related to `data-integrator` charm) + # Implements DA073 - Add Expose Flag to the Database Interface + # https://docs.google.com/document/d/1Y7OZWwMdvF8eEMuVKrqEfuFV3JOjpqLHL7_GPqJpRHU + self._external_connectivity = databag.get("external-node-connectivity") == "true" if databag.get("extra-user-roles"): raise _UnsupportedExtraUserRole( app_name=relation.app.name, endpoint_name=relation.name @@ -100,6 +105,8 @@ def create_database_and_user( *, router_read_write_endpoint: str, router_read_only_endpoint: str, + exposed_read_write_endpoint: str, + exposed_read_only_endpoint: str, shell: mysql_shell.Shell, ) -> None: """Create database & user and update databag.""" @@ -115,11 +122,23 @@ def create_database_and_user( password = shell.create_application_database_and_user( username=username, database=self._database ) + + rw_endpoint = ( + exposed_read_write_endpoint + if self._external_connectivity + else router_read_write_endpoint + ) + ro_endpoint = ( + exposed_read_only_endpoint + if self._external_connectivity + else router_read_only_endpoint + ) + self._set_databag( username=username, password=password, - router_read_write_endpoint=router_read_write_endpoint, - router_read_only_endpoint=router_read_only_endpoint, + router_read_write_endpoint=rw_endpoint, + router_read_only_endpoint=ro_endpoint, ) @@ -189,21 +208,14 @@ def is_exposed(self) -> bool: [data.get("external-node-connectivity") == "true" for data in relation_data.values()] ) - def reconcile_ports(self) -> None: - """Reconcile ports for this unit""" - if self.is_exposed: - self._charm.unit.open_port("tcp", self._charm.READ_WRITE_PORT) - self._charm.unit.open_port("tcp", self._charm.READ_ONLY_PORT) - else: - self._charm.unit.close_port("tcp", self._charm.READ_WRITE_PORT) - self._charm.unit.close_port("tcp", self._charm.READ_ONLY_PORT) - def reconcile_users( self, *, event, router_read_write_endpoint: str, router_read_only_endpoint: str, + exposed_read_write_endpoint: str, + exposed_read_only_endpoint: str, shell: mysql_shell.Shell, ) -> None: """Create requested users and delete inactive users. @@ -235,6 +247,8 @@ def reconcile_users( relation.create_database_and_user( router_read_write_endpoint=router_read_write_endpoint, router_read_only_endpoint=router_read_only_endpoint, + exposed_read_write_endpoint=exposed_read_write_endpoint, + exposed_read_only_endpoint=exposed_read_only_endpoint, shell=shell, ) for relation in self._shared_users: diff --git a/src/relations/tls.py b/src/relations/tls.py index 0b4ea645..5a704110 100644 --- a/src/relations/tls.py +++ b/src/relations/tls.py @@ -17,7 +17,7 @@ import relations.secrets if typing.TYPE_CHECKING: - import kubernetes_charm + import abstract_charm logger = logging.getLogger(__name__) @@ -49,7 +49,7 @@ def _generate_private_key() -> str: class _Relation: """Relation to TLS certificate provider""" - _charm: "kubernetes_charm.KubernetesRouterCharm" + _charm: "abstract_charm.MySQLRouterCharm" _interface: tls_certificates.TLSCertificatesRequiresV2 _secrets: relations.secrets.RelationSecrets @@ -113,14 +113,15 @@ def save_certificate(self, event: tls_certificates.CertificateAvailableEvent) -> def _generate_csr(self, key: bytes) -> bytes: """Generate certificate signing request (CSR).""" + sans_ip = ["127.0.0.1"] # needed for the HTTP server when related with COS + if self._charm.is_exposed(): + sans_ip.append(self._charm.host_address) + return tls_certificates.generate_csr( private_key=key, subject=socket.getfqdn(), organization=self._charm.app.name, - sans_ip=[ - str(self._charm.model.get_binding("juju-info").network.bind_address), - "127.0.0.1", - ], + sans_ip=sans_ip, ) def request_certificate_creation(self): @@ -154,7 +155,7 @@ class RelationEndpoint(ops.Object): NAME = "certificates" - def __init__(self, charm_: "kubernetes_charm.KubernetesRouterCharm") -> None: + def __init__(self, charm_: "abstract_charm.MySQLRouterCharm") -> None: super().__init__(charm_, self.NAME) self._charm = charm_ self._interface = tls_certificates.TLSCertificatesRequiresV2(self._charm, self.NAME) @@ -221,11 +222,6 @@ def certificate_authority(self) -> typing.Optional[str]: return None return self._relation.certificate_authority - @property - def relation_exists(self) -> bool: - """Whether relation with cos exists.""" - return len(self._charm.model.relations.get(self.NAME, [])) == 1 - @staticmethod def _parse_tls_key(raw_content: str) -> str: """Parse TLS key from plain text or base64 format.""" @@ -237,16 +233,6 @@ def _parse_tls_key(raw_content: str) -> str: ) return base64.b64decode(raw_content).decode("utf-8") - def is_relation_breaking(self, event) -> bool: - """Whether relation will be broken after the current event is handled.""" - if not self.relation_exists: - return False - - return ( - isinstance(event, ops.RelationBrokenEvent) - and event.relation.id == self._charm.model.relations[self.NAME][0].id - ) - def _on_set_tls_private_key(self, event: ops.ActionEvent) -> None: """Handle action to set unit TLS private key.""" logger.debug("Handling set TLS private key action") diff --git a/src/snap.py b/src/snap.py index bbaf7eb8..57cc4d0b 100644 --- a/src/snap.py +++ b/src/snap.py @@ -182,18 +182,13 @@ def update_mysql_router_service(self, *, enabled: bool, tls: bool = None) -> Non else: _snap.unset("mysqlrouter.extra-options") - router_is_running = any( - [ - properties.get("active") - for service, properties in _snap.services.items() - if service == self._SERVICE_NAME - ] - ) + router_is_running = _snap.services[self._SERVICE_NAME]["active"] - if enabled and router_is_running: - _snap.restart([self._SERVICE_NAME]) - elif enabled: - _snap.start([self._SERVICE_NAME], enable=True) + if enabled: + if router_is_running: + _snap.restart([self._SERVICE_NAME]) + else: + _snap.start([self._SERVICE_NAME], enable=True) else: _snap.stop([self._SERVICE_NAME], disable=True) @@ -222,7 +217,7 @@ def update_mysql_router_exporter_service( "mysqlrouter-exporter.user": config.username, "mysqlrouter-exporter.password": config.password, "mysqlrouter-exporter.url": config.url, - "mysqlrouter-exporter.service-name": self.unit_name.replace("/", "-"), + "mysqlrouter-exporter.service-name": self._unit_name.replace("/", "-"), } ) _snap.start([self._EXPORTER_SERVICE_NAME], enable=True) diff --git a/src/workload.py b/src/workload.py index 920c3da4..74538bc6 100644 --- a/src/workload.py +++ b/src/workload.py @@ -99,9 +99,11 @@ def _tls_config_file_data(self) -> str: return config_string @property - def _custom_tls_enabled(self) -> bool: + def _custom_certificate(self) -> typing.Optional[str]: """Whether custom TLS certs are enabled for MySQL Router""" - return self._tls_key_file.exists() and self._tls_certificate_file.exists() + if not self._tls_key_file.exists() or not self._tls_certificate_file.exists(): + return None + return self._tls_certificate_file.read_text() def cleanup_monitoring_user(self) -> None: """Clean up router REST API user for mysqlrouter exporter.""" @@ -331,26 +333,29 @@ def reconcile( "`key`, `certificate`, and `certificate_authority` arguments required when tls=True" ) - # self._custom_tls_enabled` will change after we enable or disable TLS - tls_was_enabled = self._custom_tls_enabled + custom_certificate = self._custom_certificate if tls: self._enable_tls( key=key, certificate=certificate, certificate_authority=certificate_authority ) - if not tls_was_enabled and self._container.mysql_router_service_enabled: + if custom_certificate != certificate and self._container.mysql_router_service_enabled: self._restart(tls=tls) else: self._disable_tls() - if tls_was_enabled and self._container.mysql_router_service_enabled: + if custom_certificate and self._container.mysql_router_service_enabled: self._restart(tls=tls) # If the host or port changes, MySQL Router will receive topology change # notifications from MySQL. # Therefore, if the host or port changes, we do not need to restart MySQL Router. - if not self._container.mysql_router_service_enabled: + is_charm_exposed = self._charm.is_exposed() + socket_file_exists = self._container.path("/run/mysqlrouter/mysql.sock").exists() + require_rebootstrap = is_charm_exposed == socket_file_exists + if not self._container.mysql_router_service_enabled or require_rebootstrap: logger.debug("Enabling MySQL Router service") self._cleanup_after_upgrade_or_potential_container_restart() - self._container.create_router_rest_api_credentials_file() # create an empty credentials file + # create an empty credentials file, if the file does not exist + self._container.create_router_rest_api_credentials_file() self._bootstrap_router(tls=tls) self.shell.add_attributes_to_mysql_router_user( username=self._router_username, router_id=self._router_id, unit_name=unit_name @@ -361,7 +366,8 @@ def reconcile( self._charm.wait_until_mysql_router_ready() if (not self._container.mysql_router_exporter_service_enabled and exporter_config) or ( - self._container.mysql_router_exporter_service_enabled and tls_was_enabled != tls + self._container.mysql_router_exporter_service_enabled + and custom_certificate != certificate ): logger.debug("Enabling MySQL Router exporter service") self.setup_monitoring_user() diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index e5b6be34..172b85c3 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -37,7 +37,7 @@ async def get_data_integrator_credentials(ops_test: OpsTest) -> typing.Dict: if juju_.has_secrets: assert result.results["return-code"] == 0 else: - assert result.results["Code"] == 0 + assert result.results["Code"] == "0" assert result.results["ok"] == "True" return result.results["mysql"] diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index e0b10f49..88f04d0d 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -3,8 +3,8 @@ from unittest.mock import PropertyMock +import ops import pytest -from ops import JujuVersion from pytest_mock import MockerFixture import snap @@ -100,6 +100,30 @@ def stop(self, services: list[str] = None, *_, **__): monkeypatch.setattr("snap._Path.mkdir", lambda *args, **kwargs: None) monkeypatch.setattr("snap._Path.rmtree", lambda *args, **kwargs: None) + def _network_get(*args, **kwargs) -> dict: + """Patch for the not-yet-implemented testing backend needed for `bind_address`. + + This can be used for cases such as: + self.model.get_binding(event.relation).network.bind_address + Will always return '10.1.157.116' + """ + return ops.model.Network( + { + "bind-addresses": [ + { + "mac-address": "", + "interface-name": "", + "addresses": [{"hostname": "", "value": "10.1.157.116", "cidr": ""}], + } + ], + "bind-address": "10.1.157.116", + "egress-subnets": ["10.152.183.65/32"], + "ingress-addresses": ["10.152.183.65"], + } + ) + + monkeypatch.setattr("ops.model.Binding._network_get", _network_get) + @pytest.fixture(autouse=True, params=["juju2", "juju3"]) def juju_has_secrets(mocker: MockerFixture, request): @@ -110,11 +134,11 @@ def juju_has_secrets(mocker: MockerFixture, request): """ if request.param == "juju3": mocker.patch.object( - JujuVersion, "has_secrets", new_callable=PropertyMock + ops.JujuVersion, "has_secrets", new_callable=PropertyMock ).return_value = False return False else: mocker.patch.object( - JujuVersion, "has_secrets", new_callable=PropertyMock + ops.JujuVersion, "has_secrets", new_callable=PropertyMock ).return_value = True return True From 6f4b271b41d78744ec7971e149e2a3c31e55a292 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 17 Apr 2024 20:30:53 +0000 Subject: [PATCH 18/31] Add missing monkeypatch method for unit tests --- tests/unit/conftest.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 88f04d0d..4db64af1 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -77,8 +77,10 @@ def start(self, services: list[str] = None, *_, **__): for service in services: assert service in ("mysqlrouter-service", "mysqlrouter-exporter") - self.services["mysqlrouter-service"]["active"] = "mysqlrouter-service" in services - self.services["mysqlrouter-exporter"]["active"] = "mysqlrouter-exporter" in services + if "mysqlrouter-service" in services: + self.services["mysqlrouter-service"]["active"] = True + if "mysqlrouter-exporter" in services: + self.services["mysqlrouter-exporter"]["active"] = True def stop(self, services: list[str] = None, *_, **__): for service in services: @@ -89,6 +91,12 @@ def stop(self, services: list[str] = None, *_, **__): if "mysqlrouter-exporter" in services: self.services["mysqlrouter-exporter"]["active"] = False + def restart(self, services: list[str] = []): + if "mysqlrouter-service" in services: + self.services["mysqlrouter-service"]["active"] = True + if "mysqlrouter-exporter" in services: + self.services["mysqlrouter-exporter"]["active"] = True + monkeypatch.setattr(snap, "_snap", Snap()) monkeypatch.setattr( From eba3c8fdc7d9990095fe14abdbceb413bddf1467 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Thu, 18 Apr 2024 09:36:39 +0000 Subject: [PATCH 19/31] Avoid re-bootstrapping in workload's reconcile method --- src/abstract_charm.py | 2 +- src/machine_charm.py | 2 +- src/workload.py | 5 +---- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/abstract_charm.py b/src/abstract_charm.py index f9830612..a4d15833 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -135,7 +135,7 @@ def _substrate(self) -> str: """Returns the substrate of the charm: vm or k8s""" @abc.abstractmethod - def is_exposed(self, relation=None) -> bool: + def is_exposed(self, relation=None) -> typing.Optional[bool]: """Whether router is exposed externally""" def _cos_exporter_config(self, event) -> typing.Optional[relations.cos.ExporterConfig]: diff --git a/src/machine_charm.py b/src/machine_charm.py index fcd61feb..b2d252e1 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -100,7 +100,7 @@ def _tls_certificate(self) -> typing.Optional[str]: def _tls_certificate_authority(self) -> typing.Optional[str]: return self.tls.certificate_authority - def is_exposed(self, relation=None) -> bool: + def is_exposed(self, relation=None) -> typing.Optional[bool]: return self._database_provides.is_exposed def _reconcile_node_port(self, event) -> None: diff --git a/src/workload.py b/src/workload.py index 74538bc6..5d92e660 100644 --- a/src/workload.py +++ b/src/workload.py @@ -348,10 +348,7 @@ def reconcile( # If the host or port changes, MySQL Router will receive topology change # notifications from MySQL. # Therefore, if the host or port changes, we do not need to restart MySQL Router. - is_charm_exposed = self._charm.is_exposed() - socket_file_exists = self._container.path("/run/mysqlrouter/mysql.sock").exists() - require_rebootstrap = is_charm_exposed == socket_file_exists - if not self._container.mysql_router_service_enabled or require_rebootstrap: + if not self._container.mysql_router_service_enabled: logger.debug("Enabling MySQL Router service") self._cleanup_after_upgrade_or_potential_container_restart() # create an empty credentials file, if the file does not exist From 2e276baaca63a6ec0489da1c7f2b1eda15cb9cef Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Thu, 18 Apr 2024 10:59:57 +0000 Subject: [PATCH 20/31] Update charmed-mysql snap revision to latest 8.0/edge revision --- src/snap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/snap.py b/src/snap.py index 57cc4d0b..678726df 100644 --- a/src/snap.py +++ b/src/snap.py @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) _SNAP_NAME = "charmed-mysql" -REVISION = "101" # Keep in sync with `workload_version` file +REVISION = "102" # Keep in sync with `workload_version` file _snap = snap_lib.SnapCache()[_SNAP_NAME] _UNIX_USERNAME = "snap_daemon" From 0cf8b71945c9b688de614df16bead63c0a5b968f Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Fri, 19 Apr 2024 19:37:48 +0000 Subject: [PATCH 21/31] Address PR feedback --- poetry.lock | 2 +- pyproject.toml | 8 +-- src/abstract_charm.py | 51 ++++------------- src/machine_charm.py | 61 +++++++++++++-------- src/machine_workload.py | 4 +- src/relations/database_providers_wrapper.py | 4 +- src/relations/database_provides.py | 3 +- src/relations/tls.py | 2 +- src/workload.py | 11 ++-- tests/unit/conftest.py | 2 +- 10 files changed, 67 insertions(+), 81 deletions(-) diff --git a/poetry.lock b/poetry.lock index 86e877dc..dde40646 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2335,4 +2335,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "c5644d4eb9a9c63d32b04b39388f7cee93eaf1b5e4063ddd0876c73a06ee5145" +content-hash = "b9001a690635d55fcf91bd8b6ab037ca454b318484a61626138fd73289a1e201" diff --git a/pyproject.toml b/pyproject.toml index 7a2b6d76..ab4c37ea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ authors = [] [tool.poetry.dependencies] python = "^3.8.1" # ^3.8.1 required by flake8 -ops = "^2.8.0" +ops = "^2.12.0" tenacity = "^8.2.3" poetry-core = "^1.7.0" jinja2 = "^3.1.2" @@ -18,7 +18,7 @@ requests = "^2.31.0" [tool.poetry.group.charm-libs.dependencies] # data_platform_libs/v0/data_interfaces.py -ops = ">=2.8.0" +ops = ">=2.12.0" # tls_certificates_interface/v1/tls_certificates.py cryptography = ">=42.0.5" jsonschema = "*" @@ -52,7 +52,7 @@ pytest = "^7.4.0" pytest-xdist = "^3.3.1" pytest-cov = "^4.1.0" ops-scenario = "^5.4.1" -ops = ">=2.8.0" +ops = ">=2.12.0" pytest-mock = "^3.11.1" [tool.poetry.group.integration.dependencies] @@ -64,7 +64,7 @@ pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workf juju = "3.2.0.1" mysql-connector-python = "~8.0.33" tenacity = "^8.2.2" -ops = ">=2.8.0" +ops = ">=2.12.0" pytest-mock = "^3.11.1" diff --git a/src/abstract_charm.py b/src/abstract_charm.py index a4d15833..92e72d2e 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -5,11 +5,9 @@ import abc import logging -import socket import typing import ops -import tenacity import container import lifecycle @@ -31,6 +29,8 @@ class MySQLRouterCharm(ops.CharmBase, abc.ABC): _READ_WRITE_PORT = 6446 _READ_ONLY_PORT = 6447 + _READ_WRITE_X_PORT = 6448 + _READ_ONLY_X_PORT = 6449 def __init__(self, *args) -> None: super().__init__(*args) @@ -111,32 +111,28 @@ def _exposed_read_only_endpoint(self) -> str: @property @abc.abstractmethod + def is_exposed(self) -> typing.Optional[bool]: + """Whether router is exposed externally""" + + @property def _tls_certificate_saved(self) -> bool: """Whether a TLS certificate is available to use""" + return self.tls.certificate_saved @property - @abc.abstractmethod def _tls_key(self) -> typing.Optional[str]: """Custom TLS key""" + return self.tls.key @property - @abc.abstractmethod def _tls_certificate_authority(self) -> typing.Optional[str]: """Custom TLS certificate authority""" + return self.tls.certificate_authority @property - @abc.abstractmethod def _tls_certificate(self) -> typing.Optional[str]: """Custom TLS certificate""" - - @property - @abc.abstractmethod - def _substrate(self) -> str: - """Returns the substrate of the charm: vm or k8s""" - - @abc.abstractmethod - def is_exposed(self, relation=None) -> typing.Optional[bool]: - """Whether router is exposed externally""" + return self.tls.certificate def _cos_exporter_config(self, event) -> typing.Optional[relations.cos.ExporterConfig]: """Returns the exporter config for MySQLRouter exporter if cos relation exists""" @@ -212,37 +208,12 @@ def set_status(self, *, event, app=True, unit=True) -> None: self.unit.status = self._determine_unit_status(event=event) logger.debug(f"Set unit status to {self.unit.status}") + @abc.abstractmethod def wait_until_mysql_router_ready(self) -> None: """Wait until a connection to MySQL Router is possible. Retry every 5 seconds for up to 30 seconds. """ - logger.debug("Waiting until MySQL Router is ready") - self.unit.status = ops.MaintenanceStatus("MySQL Router starting") - try: - for attempt in tenacity.Retrying( - reraise=True, - stop=tenacity.stop_after_delay(30), - wait=tenacity.wait_fixed(5), - ): - with attempt: - if self._substrate == "k8s" or self.is_exposed(): - for port in (6446, 6447): - with socket.socket() as s: - assert s.connect_ex(("localhost", port)) == 0 - else: - for socket_file in ( - "/run/mysqlrouter/mysql.sock", - "/run/mysqlrouter/mysqlro.sock", - ): - assert self._container.path(socket_file).exists() - with socket.socket(socket.AF_UNIX) as s: - assert s.connect_ex(str(self._container.path(socket_file))) == 0 - except AssertionError: - logger.exception("Unable to connect to MySQL Router") - raise - else: - logger.debug("MySQL Router is ready") @abc.abstractmethod def _reconcile_node_port(self, event) -> None: diff --git a/src/machine_charm.py b/src/machine_charm.py index b2d252e1..4409825f 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -7,9 +7,11 @@ """MySQL Router machine charm""" import logging +import socket import typing import ops +import tenacity import abstract_charm import machine_logrotate @@ -82,41 +84,52 @@ def _exposed_read_only_endpoint(self) -> str: return f"{self.host_address}:{self._READ_ONLY_PORT}" @property - def _tls_certificate_saved(self) -> bool: - """Whether a TLS certificate is available to use""" - return self.tls.certificate_saved - - @property - def _tls_key(self) -> typing.Optional[str]: - """Custom TLS key""" - return self.tls.key - - @property - def _tls_certificate(self) -> typing.Optional[str]: - """Custom TLS certificate""" - return self.tls.certificate - - @property - def _tls_certificate_authority(self) -> typing.Optional[str]: - return self.tls.certificate_authority - - def is_exposed(self, relation=None) -> typing.Optional[bool]: - return self._database_provides.is_exposed + def is_exposed(self) -> typing.Optional[bool]: + return self._database_provides.external_connectivity def _reconcile_node_port(self, event) -> None: """Only applies to Kubernetes charm, so no-op.""" pass def _reconcile_ports(self) -> None: - if self.is_exposed(): + if self.is_exposed: ports = [self._READ_WRITE_PORT, self._READ_ONLY_PORT] else: ports = [] self.unit.set_ports(*ports) - @property - def _substrate(self) -> str: - return "vm" + def wait_until_mysql_router_ready(self) -> None: + logger.debug("Waiting until MySQL Router is ready") + self.unit.status = ops.MaintenanceStatus("MySQL Router starting") + try: + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(30), + wait=tenacity.wait_fixed(5), + ): + with attempt: + if self.is_exposed: + for port in ( + self._READ_WRITE_PORT, + self._READ_ONLY_PORT, + self._READ_WRITE_X_PORT, + self._READ_ONLY_X_PORT, + ): + with socket.socket() as s: + assert s.connect_ex(("localhost", port)) == 0 + else: + for socket_file in ( + "/run/mysqlrouter/mysql.sock", + "/run/mysqlrouter/mysqlro.sock", + ): + assert self._container.path(socket_file).exists() + with socket.socket(socket.AF_UNIX) as s: + assert s.connect_ex(str(self._container.path(socket_file))) == 0 + except AssertionError: + logger.exception("Unable to connect to MySQL Router") + raise + else: + logger.debug("MySQL Router is ready") # ======================= # Handlers diff --git a/src/machine_workload.py b/src/machine_workload.py index 050de09f..e8a87332 100644 --- a/src/machine_workload.py +++ b/src/machine_workload.py @@ -20,7 +20,7 @@ class AuthenticatedMachineWorkload(workload.AuthenticatedWorkload): # TODO python3.10 min version: Use `list` instead of `typing.List` def _get_bootstrap_command(self, password: str) -> typing.List[str]: command = super()._get_bootstrap_command(password) - if self._charm.is_exposed(): + if self._charm.is_exposed: command.extend( [ "--conf-bind-address", @@ -68,5 +68,5 @@ def _update_configured_socket_file_locations_and_bind_address(self) -> None: def _bootstrap_router(self, *, tls: bool) -> None: super()._bootstrap_router(tls=tls) - if not self._charm.is_exposed(): + if not self._charm.is_exposed: self._update_configured_socket_file_locations_and_bind_address() diff --git a/src/relations/database_providers_wrapper.py b/src/relations/database_providers_wrapper.py index 2f594ddc..3dc22a8b 100644 --- a/src/relations/database_providers_wrapper.py +++ b/src/relations/database_providers_wrapper.py @@ -39,9 +39,9 @@ def __init__( ) @property - def is_exposed(self) -> bool: + def external_connectivity(self) -> bool: """Whether the relation is exposed""" - return self._database_provides.is_exposed + return self._database_provides.external_connectivity def reconcile_users( self, diff --git a/src/relations/database_provides.py b/src/relations/database_provides.py index be95576f..8ef50f51 100644 --- a/src/relations/database_provides.py +++ b/src/relations/database_provides.py @@ -181,7 +181,6 @@ class RelationEndpoint: def __init__(self, charm_: "abstract_charm.MySQLRouterCharm") -> None: self._interface = data_interfaces.DatabaseProvides(charm_, relation_name=self._NAME) - self._charm = charm_ charm_.framework.observe(charm_.on[self._NAME].relation_created, charm_.reconcile) charm_.framework.observe(self._interface.on.database_requested, charm_.reconcile) @@ -201,7 +200,7 @@ def _shared_users(self) -> typing.List[_RelationWithSharedUser]: return shared_users @property - def is_exposed(self) -> bool: + def external_connectivity(self) -> bool: """Whether the relation is exposed.""" relation_data = self._interface.fetch_relation_data(fields=["external-node-connectivity"]) return any( diff --git a/src/relations/tls.py b/src/relations/tls.py index 5a704110..4d18e110 100644 --- a/src/relations/tls.py +++ b/src/relations/tls.py @@ -114,7 +114,7 @@ def save_certificate(self, event: tls_certificates.CertificateAvailableEvent) -> def _generate_csr(self, key: bytes) -> bytes: """Generate certificate signing request (CSR).""" sans_ip = ["127.0.0.1"] # needed for the HTTP server when related with COS - if self._charm.is_exposed(): + if self._charm.is_exposed: sans_ip.append(self._charm.host_address) return tls_certificates.generate_csr( diff --git a/src/workload.py b/src/workload.py index 5d92e660..0043c4c9 100644 --- a/src/workload.py +++ b/src/workload.py @@ -101,9 +101,8 @@ def _tls_config_file_data(self) -> str: @property def _custom_certificate(self) -> typing.Optional[str]: """Whether custom TLS certs are enabled for MySQL Router""" - if not self._tls_key_file.exists() or not self._tls_certificate_file.exists(): - return None - return self._tls_certificate_file.read_text() + if self._tls_key_file.exists() and self._tls_certificate_file.exists(): + return self._tls_certificate_file.read_text() def cleanup_monitoring_user(self) -> None: """Clean up router REST API user for mysqlrouter exporter.""" @@ -333,6 +332,7 @@ def reconcile( "`key`, `certificate`, and `certificate_authority` arguments required when tls=True" ) + # `self._custom_certificate` will change after we enable/disable TLS custom_certificate = self._custom_certificate if tls: self._enable_tls( @@ -348,7 +348,10 @@ def reconcile( # If the host or port changes, MySQL Router will receive topology change # notifications from MySQL. # Therefore, if the host or port changes, we do not need to restart MySQL Router. - if not self._container.mysql_router_service_enabled: + is_charm_exposed = self._charm.is_exposed + socket_file_exists = self._container.path("/run/mysqlrouter/mysql.sock").exists() + require_rebootstrap = is_charm_exposed == socket_file_exists + if not self._container.mysql_router_service_enabled or require_rebootstrap: logger.debug("Enabling MySQL Router service") self._cleanup_after_upgrade_or_potential_container_restart() # create an empty credentials file, if the file does not exist diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 4db64af1..8da3e362 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -33,7 +33,7 @@ def disable_tenacity_retry(monkeypatch): @pytest.fixture(autouse=True) def patch(monkeypatch): monkeypatch.setattr( - "abstract_charm.MySQLRouterCharm.wait_until_mysql_router_ready", + "machine_charm.MachineSubordinateRouterCharm.wait_until_mysql_router_ready", lambda *args, **kwargs: None, ) monkeypatch.setattr("workload.AuthenticatedWorkload._router_username", "") From 0a36768a90d2d5d11e35ef86bed110e60d742491 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Mon, 22 Apr 2024 12:43:42 +0000 Subject: [PATCH 22/31] Extend wait time in exporter tests + revert ops dependency to <2.10.0 --- poetry.lock | 11 ++++------- pyproject.toml | 11 ++++++----- tests/integration/test_exporter.py | 10 +++++----- 3 files changed, 15 insertions(+), 17 deletions(-) diff --git a/poetry.lock b/poetry.lock index dde40646..d75e16e9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1073,22 +1073,19 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "ops" -version = "2.12.0" +version = "2.9.0" description = "The Python library behind great charms" optional = false python-versions = ">=3.8" files = [ - {file = "ops-2.12.0-py3-none-any.whl", hash = "sha256:b6f7db8aa2886351d0a2527f0df6c8a34e0d9cf90ddfbb91e734f73259df8ddf"}, - {file = "ops-2.12.0.tar.gz", hash = "sha256:7d88522914728caa13aaf1689637f8b573eaf5d38b7f2b8cf135406ee6ef0fc3"}, + {file = "ops-2.9.0-py3-none-any.whl", hash = "sha256:1d443e4d45e0c2443b8334d37a177287f22a12ee0cb02a30cf7c3159316cb643"}, + {file = "ops-2.9.0.tar.gz", hash = "sha256:d3c541659eded56f42f9c18270408cc6313895968f1360b3f1de75c99cc99ada"}, ] [package.dependencies] PyYAML = "==6.*" websocket-client = "==1.*" -[package.extras] -docs = ["canonical-sphinx-extensions", "furo", "linkify-it-py", "myst-parser", "pyspelling", "sphinx (==6.2.1)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-design", "sphinx-notfound-page", "sphinx-tabs", "sphinxcontrib-jquery", "sphinxext-opengraph"] - [[package]] name = "ops-scenario" version = "5.4.1" @@ -2335,4 +2332,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "b9001a690635d55fcf91bd8b6ab037ca454b318484a61626138fd73289a1e201" +content-hash = "de9ecf1614faca0d8cf34a62e4da500d2f883c983f0e59ada726bb3090a5e586" diff --git a/pyproject.toml b/pyproject.toml index ab4c37ea..c98ec95b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ authors = [] [tool.poetry.dependencies] python = "^3.8.1" # ^3.8.1 required by flake8 -ops = "^2.12.0" +ops = "<2.10.0" tenacity = "^8.2.3" poetry-core = "^1.7.0" jinja2 = "^3.1.2" @@ -18,8 +18,9 @@ requests = "^2.31.0" [tool.poetry.group.charm-libs.dependencies] # data_platform_libs/v0/data_interfaces.py -ops = ">=2.12.0" -# tls_certificates_interface/v1/tls_certificates.py +ops = "<2.10.0" +# tls_certificates_interface/v2/tls_certificates.py +# tls_certificates lib v2 uses a feature only available in cryptography >=42.0.5 cryptography = ">=42.0.5" jsonschema = "*" # grafana_agent/v0/cos_agent.py @@ -52,7 +53,7 @@ pytest = "^7.4.0" pytest-xdist = "^3.3.1" pytest-cov = "^4.1.0" ops-scenario = "^5.4.1" -ops = ">=2.12.0" +ops = "<2.10.0" pytest-mock = "^3.11.1" [tool.poetry.group.integration.dependencies] @@ -64,7 +65,7 @@ pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workf juju = "3.2.0.1" mysql-connector-python = "~8.0.33" tenacity = "^8.2.2" -ops = ">=2.12.0" +ops = "<2.10.0" pytest-mock = "^3.11.1" diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index 1255e653..133055e9 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -135,7 +135,7 @@ async def test_exporter_endpoint(ops_test: OpsTest, mysql_router_charm_series: s f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(30) + time.sleep(60) jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" @@ -148,7 +148,7 @@ async def test_exporter_endpoint(ops_test: OpsTest, mysql_router_charm_series: s f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(30) + time.sleep(60) try: http.request("GET", f"http://{unit_address}:49152/metrics") @@ -184,7 +184,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" ) - time.sleep(30) + time.sleep(60) mysql_test_app = ops_test.model.applications[APPLICATION_APP_NAME] unit_address = await mysql_test_app.units[0].get_public_address() @@ -203,7 +203,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(30) + time.sleep(60) jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" @@ -216,7 +216,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(30) + time.sleep(60) try: http.request("GET", f"http://{unit_address}:49152/metrics") From 1134e434b2ef8bc46ba96ce64c11a94f84dd086b Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Mon, 22 Apr 2024 15:02:10 +0000 Subject: [PATCH 23/31] Use tenacity to retry checking exporter endpoints instead of using time.sleep() --- tests/integration/test_exporter.py | 110 +++++++++++++++++------------ 1 file changed, 65 insertions(+), 45 deletions(-) diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index 133055e9..953c0220 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -4,7 +4,7 @@ import asyncio import logging -import time +import tenacity import pytest import urllib3 @@ -135,29 +135,37 @@ async def test_exporter_endpoint(ops_test: OpsTest, mysql_router_charm_series: s f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(60) - - jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") - assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" - assert "mysqlrouter_route_health" in str( - jmx_resp.data - ), "❌ did not find expected metric in response" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(120), + wait=tenacity.wait_fixed(10), + ): + with attempt: + jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") + assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" + assert "mysqlrouter_route_health" in str( + jmx_resp.data + ), "❌ did not find expected metric in response" logger.info("Removing relation between mysqlrouter and grafana agent") await mysql_router_app.remove_relation( f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(60) - - try: - http.request("GET", f"http://{unit_address}:49152/metrics") - except urllib3.exceptions.MaxRetryError as e: - assert ( - "[Errno 111] Connection refused" in e.reason.args[0] - ), "❌ expected connection refused error" - else: - assert False, "❌ can connect to metrics endpoint without relation with cos" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(120), + wait=tenacity.wait_fixed(10), + ): + with attempt: + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" + else: + assert False, "❌ can connect to metrics endpoint without relation with cos" @pytest.mark.group(1) @@ -184,48 +192,60 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" ) - time.sleep(60) - mysql_test_app = ops_test.model.applications[APPLICATION_APP_NAME] unit_address = await mysql_test_app.units[0].get_public_address() - try: - http.request("GET", f"http://{unit_address}:49152/metrics") - except urllib3.exceptions.MaxRetryError as e: - assert ( - "[Errno 111] Connection refused" in e.reason.args[0] - ), "❌ expected connection refused error" - else: - assert False, "❌ can connect to metrics endpoint without relation with cos" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(120), + wait=tenacity.wait_fixed(10), + ): + with attempt: + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" + else: + assert False, "❌ can connect to metrics endpoint without relation with cos" logger.info("Relating mysqlrouter with grafana agent") await ops_test.model.relate( f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(60) - - jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") - assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" - assert "mysqlrouter_route_health" in str( - jmx_resp.data - ), "❌ did not find expected metric in response" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(120), + wait=tenacity.wait_fixed(10), + ): + with attempt: + jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") + assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" + assert "mysqlrouter_route_health" in str( + jmx_resp.data + ), "❌ did not find expected metric in response" logger.info("Removing relation between mysqlrouter and grafana agent") await mysql_router_app.remove_relation( f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" ) - time.sleep(60) - - try: - http.request("GET", f"http://{unit_address}:49152/metrics") - except urllib3.exceptions.MaxRetryError as e: - assert ( - "[Errno 111] Connection refused" in e.reason.args[0] - ), "❌ expected connection refused error" - else: - assert False, "❌ can connect to metrics endpoint without relation with cos" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(120), + wait=tenacity.wait_fixed(10), + ): + with attempt: + try: + http.request("GET", f"http://{unit_address}:49152/metrics") + except urllib3.exceptions.MaxRetryError as e: + assert ( + "[Errno 111] Connection refused" in e.reason.args[0] + ), "❌ expected connection refused error" + else: + assert False, "❌ can connect to metrics endpoint without relation with cos" logger.info(f"Removing relation between mysqlrouter and {TLS_APP_NAME}") await mysql_router_app.remove_relation( From 41d76165db67088b5fb5045eb100c2eacd7e6922 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Mon, 22 Apr 2024 15:10:11 +0000 Subject: [PATCH 24/31] Run format --- tests/integration/test_exporter.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index 953c0220..578435db 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -4,9 +4,9 @@ import asyncio import logging -import tenacity import pytest +import tenacity import urllib3 from pytest_operator.plugin import OpsTest @@ -142,7 +142,9 @@ async def test_exporter_endpoint(ops_test: OpsTest, mysql_router_charm_series: s ): with attempt: jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") - assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" + assert ( + jmx_resp.status == 200 + ), "❌ cannot connect to metrics endpoint with relation with cos" assert "mysqlrouter_route_health" in str( jmx_resp.data ), "❌ did not find expected metric in response" @@ -222,7 +224,9 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: ): with attempt: jmx_resp = http.request("GET", f"http://{unit_address}:49152/metrics") - assert jmx_resp.status == 200, "❌ cannot connect to metrics endpoint with relation with cos" + assert ( + jmx_resp.status == 200 + ), "❌ cannot connect to metrics endpoint with relation with cos" assert "mysqlrouter_route_health" in str( jmx_resp.data ), "❌ did not find expected metric in response" From 6f604884375bcf7e936e06865d12b5627b18bc85 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Mon, 22 Apr 2024 15:29:19 +0000 Subject: [PATCH 25/31] Update data_interfaces charm lib to v0.34 --- .../data_platform_libs/v0/data_interfaces.py | 264 ++++++++++++------ 1 file changed, 178 insertions(+), 86 deletions(-) diff --git a/lib/charms/data_platform_libs/v0/data_interfaces.py b/lib/charms/data_platform_libs/v0/data_interfaces.py index df23735e..3ce69e15 100644 --- a/lib/charms/data_platform_libs/v0/data_interfaces.py +++ b/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -331,7 +331,7 @@ def _on_topic_requested(self, event: TopicRequestedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 33 +LIBPATCH = 34 PYDEPS = ["ops>=2.0.0"] @@ -493,6 +493,7 @@ def wrapper(self, *args, **kwargs): return return f(self, *args, **kwargs) + wrapper.leader_only = True return wrapper @@ -559,6 +560,7 @@ def __init__( component: Union[Application, Unit], label: str, secret_uri: Optional[str] = None, + legacy_labels: List[str] = [], ): self._secret_meta = None self._secret_content = {} @@ -566,16 +568,25 @@ def __init__( self.label = label self._model = model self.component = component + self.legacy_labels = legacy_labels + self.current_label = None - def add_secret(self, content: Dict[str, str], relation: Relation) -> Secret: + def add_secret( + self, + content: Dict[str, str], + relation: Optional[Relation] = None, + label: Optional[str] = None, + ) -> Secret: """Create a new secret.""" if self._secret_uri: raise SecretAlreadyExistsError( "Secret is already defined with uri %s", self._secret_uri ) - secret = self.component.add_secret(content, label=self.label) - if relation.app != self._model.app: + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: # If it's not a peer relation, grant is to be applied secret.grant(relation) self._secret_uri = secret.id @@ -588,13 +599,20 @@ def meta(self) -> Optional[Secret]: if not self._secret_meta: if not (self._secret_uri or self.label): return - try: - self._secret_meta = self._model.get_secret(label=self.label) - except SecretNotFoundError: - if self._secret_uri: - self._secret_meta = self._model.get_secret( - id=self._secret_uri, label=self.label - ) + + for label in [self.label] + self.legacy_labels: + try: + self._secret_meta = self._model.get_secret(label=label) + except SecretNotFoundError: + pass + else: + if label != self.label: + self.current_label = label + break + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) return self._secret_meta def get_content(self) -> Dict[str, str]: @@ -618,12 +636,30 @@ def get_content(self) -> Dict[str, str]: self._secret_content = self.meta.get_content() return self._secret_content + def _move_to_new_label_if_needed(self): + """Helper function to re-create the secret with a different label.""" + if not self.current_label or not (self.meta and self._secret_meta): + return + + # Create a new secret with the new label + old_meta = self._secret_meta + content = self._secret_meta.get_content() + + # I wish we could just check if we are the owners of the secret... + try: + self._secret_meta = self.add_secret(content, label=self.label) + except ModelError as err: + if "this unit is not the leader" not in str(err): + raise + old_meta.remove_all_revisions() + def set_content(self, content: Dict[str, str]) -> None: """Setting cached secret content.""" if not self.meta: return if content: + self._move_to_new_label_if_needed() self.meta.set_content(content) self._secret_content = content else: @@ -655,10 +691,14 @@ def __init__(self, model: Model, component: Union[Application, Unit]): self.component = component self._secrets: Dict[str, CachedSecret] = {} - def get(self, label: str, uri: Optional[str] = None) -> Optional[CachedSecret]: + def get( + self, label: str, uri: Optional[str] = None, legacy_labels: List[str] = [] + ) -> Optional[CachedSecret]: """Getting a secret from Juju Secret store or cache.""" if not self._secrets.get(label): - secret = CachedSecret(self._model, self.component, label, uri) + secret = CachedSecret( + self._model, self.component, label, uri, legacy_labels=legacy_labels + ) if secret.meta: self._secrets[label] = secret return self._secrets.get(label) @@ -676,10 +716,14 @@ def add(self, label: str, content: Dict[str, str], relation: Relation) -> Cached def remove(self, label: str) -> None: """Remove a secret from the cache.""" if secret := self.get(label): - secret.remove() - self._secrets.pop(label) - else: - logging.error("Non-existing Juju Secret was attempted to be removed %s", label) + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) ################################################################################ @@ -716,11 +760,21 @@ def __setitem__(self, key: str, item: str) -> None: def __getitem__(self, key: str) -> str: """Get an item of the Abstract Relation Data dictionary.""" result = None - if not (result := self.relation_data.fetch_my_relation_field(self.relation_id, key)): + + # Avoiding "leader_only" error when cross-charm non-leader unit, not to report useless error + if ( + not hasattr(self.relation_data.fetch_my_relation_field, "leader_only") + or self.relation_data.component != self.relation_data.local_app + or self.relation_data.local_unit.is_leader() + ): + result = self.relation_data.fetch_my_relation_field(self.relation_id, key) + + if not result: try: result = self.relation_data.fetch_relation_field(self.relation_id, key) except NotImplementedError: pass + if not result: raise KeyError return result @@ -1095,7 +1149,7 @@ def _delete_relation_data_without_secrets( try: relation.data[component].pop(field) except KeyError: - logger.error( + logger.debug( "Non-existing field '%s' was attempted to be removed from the databag (relation ID: %s)", str(field), str(relation.id), @@ -1351,7 +1405,7 @@ def _delete_relation_secret( try: new_content.pop(field) except KeyError: - logging.error( + logging.debug( "Non-existing secret was attempted to be removed %s, %s", str(relation.id), str(field), @@ -1723,6 +1777,7 @@ def __init__( self._secret_label_map = {} # Secrets that are being dynamically added within the scope of this event handler run self._new_secrets = [] + self._additional_secret_group_mapping = additional_secret_group_mapping for group, fields in additional_secret_group_mapping.items(): if group not in SECRET_GROUPS.groups(): @@ -1769,12 +1824,15 @@ def current_secret_fields(self) -> List[str]: relation = self._model.relations[self.relation_name][0] fields = [] + + ignores = [SECRET_GROUPS.get_group("user"), SECRET_GROUPS.get_group("tls")] for group in SECRET_GROUPS.groups(): + if group in ignores: + continue if content := self._get_group_secret_contents(relation, group): - fields += [self._field_to_internal_name(field, group) for field in content] + fields += list(content.keys()) return list(set(fields) | set(self._new_secrets)) - @juju_secrets_only @dynamic_secrets_only def set_secret( self, @@ -1792,13 +1850,13 @@ def set_secret( group_mapping: The name of the "secret group", in case the field is to be added to an existing secret """ full_field = self._field_to_internal_name(field, group_mapping) - if full_field not in self.current_secret_fields: + if self.secrets_enabled and full_field not in self.current_secret_fields: self._new_secrets.append(full_field) - self.update_relation_data(relation_id, {full_field: value}) + if self._no_group_with_databag(field, full_field): + self.update_relation_data(relation_id, {full_field: value}) # Unlike for set_secret(), there's no harm using this operation with static secrets # The restricion is only added to keep the concept clear - @juju_secrets_only @dynamic_secrets_only def get_secret( self, @@ -1808,13 +1866,15 @@ def get_secret( ) -> Optional[str]: """Public interface method to fetch secrets only.""" full_field = self._field_to_internal_name(field, group_mapping) - if full_field not in self.current_secret_fields: - raise SecretsUnavailableError( - f"Secret {field} from group {group_mapping} was not found" - ) - return self.fetch_my_relation_field(relation_id, full_field) + if ( + self.secrets_enabled + and full_field not in self.current_secret_fields + and field not in self.current_secret_fields + ): + return + if self._no_group_with_databag(field, full_field): + return self.fetch_my_relation_field(relation_id, full_field) - @juju_secrets_only @dynamic_secrets_only def delete_secret( self, @@ -1824,9 +1884,11 @@ def delete_secret( ) -> Optional[str]: """Public interface method to delete secrets only.""" full_field = self._field_to_internal_name(field, group_mapping) - if full_field not in self.current_secret_fields: + if self.secrets_enabled and full_field not in self.current_secret_fields: logger.warning(f"Secret {field} from group {group_mapping} was not found") - self.delete_relation_data(relation_id, [full_field]) + return + if self._no_group_with_databag(field, full_field): + self.delete_relation_data(relation_id, [full_field]) # Helpers @@ -1870,6 +1932,73 @@ def _content_for_secret_group( if k in self.secret_fields } + # Backwards compatibility + + def _check_deleted_label(self, relation, fields) -> None: + """Helper function for legacy behavior.""" + current_data = self.fetch_my_relation_data([relation.id], fields) + if current_data is not None: + # Check if the secret we wanna delete actually exists + # Given the "deleted label", here we can't rely on the default mechanism (i.e. 'key not found') + if non_existent := (set(fields) & set(self.secret_fields)) - set( + current_data.get(relation.id, []) + ): + logger.debug( + "Non-existing secret %s was attempted to be removed.", + ", ".join(non_existent), + ) + + def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: + """For Rolling Upgrades -- when moving from databag to secrets usage. + + Practically what happens here is to remove stuff from the databag that is + to be stored in secrets. + """ + if not self.secret_fields: + return + + secret_fields_passed = set(self.secret_fields) & set(fields) + for field in secret_fields_passed: + if self._fetch_relation_data_without_secrets(self.component, relation, [field]): + self._delete_relation_data_without_secrets(self.component, relation, [field]) + + def _remove_secret_field_name_from_databag(self, relation) -> None: + """Making sure that the old databag URI is gone. + + This action should not be executed more than once. + """ + # Nothing to do if 'internal-secret' is not in the databag + if not (relation.data[self.component].get(self._generate_secret_field_name())): + return + + # Making sure that the secret receives its label + # (This should have happened by the time we get here, rather an extra security measure.) + secret = self._get_relation_secret(relation.id) + + # Either app scope secret with leader executing, or unit scope secret + leader_or_unit_scope = self.component != self.local_app or self.local_unit.is_leader() + if secret and leader_or_unit_scope: + # Databag reference to the secret URI can be removed, now that it's labelled + relation.data[self.component].pop(self._generate_secret_field_name(), None) + + def _previous_labels(self) -> List[str]: + """Generator for legacy secret label names, for backwards compatibility.""" + result = [] + members = [self._model.app.name] + if self.scope: + members.append(self.scope.value) + result.append(f"{'.'.join(members)}") + return result + + def _no_group_with_databag(self, field: str, full_field: str) -> bool: + """Check that no secret group is attempted to be used together with databag.""" + if not self.secrets_enabled and full_field != field: + logger.error( + f"Can't access {full_field}: no secrets available (i.e. no secret groups either)." + ) + return False + return True + # Event handlers def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: @@ -1885,7 +2014,7 @@ def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: def _generate_secret_label( self, relation_name: str, relation_id: int, group_mapping: SecretGroup ) -> str: - members = [self._model.app.name, self.relation_name] + members = [relation_name, self._model.app.name] if self.scope: members.append(self.scope.value) if group_mapping != SECRET_GROUPS.EXTRA: @@ -1919,16 +2048,12 @@ def _get_relation_secret( label = self._generate_secret_label(relation_name, relation_id, group_mapping) secret_uri = relation.data[self.component].get(self._generate_secret_field_name(), None) - # Fetching the secret with fallback to URI (in case label is not yet known) - # Label would we "stuck" on the secret in case it is found - secret = self.secrets.get(label, secret_uri) - - # Either app scope secret with leader executing, or unit scope secret - leader_or_unit_scope = self.component != self.local_app or self.local_unit.is_leader() - if secret_uri and secret and leader_or_unit_scope: - # Databag reference to the secret URI can be removed, now that it's labelled - relation.data[self.component].pop(self._generate_secret_field_name(), None) - return secret + # URI or legacy label is only to applied when moving single legacy secret to a (new) label + if group_mapping == SECRET_GROUPS.EXTRA: + # Fetching the secret with fallback to URI (in case label is not yet known) + # Label would we "stuck" on the secret in case it is found + return self.secrets.get(label, secret_uri, legacy_labels=self._previous_labels()) + return self.secrets.get(label) def _get_group_secret_contents( self, @@ -1939,27 +2064,11 @@ def _get_group_secret_contents( """Helper function to retrieve collective, requested contents of a secret.""" secret_fields = [self._internal_name_to_field(k)[0] for k in secret_fields] result = super()._get_group_secret_contents(relation, group, secret_fields) - if not self.deleted_label: - return result - return { - self._field_to_internal_name(key, group): result[key] - for key in result - if result[key] != self.deleted_label - } - - def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: - """For Rolling Upgrades -- when moving from databag to secrets usage. - - Practically what happens here is to remove stuff from the databag that is - to be stored in secrets. - """ - if not self.secret_fields: - return - - secret_fields_passed = set(self.secret_fields) & set(fields) - for field in secret_fields_passed: - if self._fetch_relation_data_without_secrets(self.component, relation, [field]): - self._delete_relation_data_without_secrets(self.component, relation, [field]) + if self.deleted_label: + result = {key: result[key] for key in result if result[key] != self.deleted_label} + if self._additional_secret_group_mapping: + return {self._field_to_internal_name(key, group): result[key] for key in result} + return result @either_static_or_dynamic_secrets def _fetch_my_specific_relation_data( @@ -1982,6 +2091,7 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non data=data, uri_to_databag=False, ) + self._remove_secret_field_name_from_databag(relation) normal_content = {k: v for k, v in data.items() if k in normal_fields} self._update_relation_data_without_secrets(self.component, relation, normal_content) @@ -1990,17 +2100,8 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" if self.secret_fields and self.deleted_label: - current_data = self.fetch_my_relation_data([relation.id], fields) - if current_data is not None: - # Check if the secret we wanna delete actually exists - # Given the "deleted label", here we can't rely on the default mechanism (i.e. 'key not found') - if non_existent := (set(fields) & set(self.secret_fields)) - set( - current_data.get(relation.id, []) - ): - logger.error( - "Non-existing secret %s was attempted to be removed.", - ", ".join(non_existent), - ) + # Legacy, backwards compatibility + self._check_deleted_label(relation, fields) _, normal_fields = self._process_secret_fields( relation, @@ -2036,19 +2137,10 @@ def fetch_relation_field( "fetch_my_relation_data() and fetch_my_relation_field()" ) - def fetch_my_relation_field( - self, relation_id: int, field: str, relation_name: Optional[str] = None - ) -> Optional[str]: - """Get a single field from the relation data -- owner side. - - Re-implementing the inherited function due to field@group conversion - """ - if relation_data := self.fetch_my_relation_data([relation_id], [field], relation_name): - return relation_data.get(relation_id, {}).get(self._internal_name_to_field(field)[0]) - # Public functions -- inherited fetch_my_relation_data = Data.fetch_my_relation_data + fetch_my_relation_field = Data.fetch_my_relation_field class DataPeerEventHandlers(RequirerEventHandlers): From 40beb971b4fd0d20b361d89fdd7825d195e5c62f Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Mon, 22 Apr 2024 18:54:12 +0000 Subject: [PATCH 26/31] Miscellaneous integration test improvements --- tests/integration/juju_.py | 2 +- tests/integration/test_data_integrator.py | 4 +- tests/integration/test_exporter.py | 3 +- tests/integration/test_tls.py | 83 +++++++++++++++-------- 4 files changed, 59 insertions(+), 33 deletions(-) diff --git a/tests/integration/juju_.py b/tests/integration/juju_.py index 968962e0..261ad920 100644 --- a/tests/integration/juju_.py +++ b/tests/integration/juju_.py @@ -6,4 +6,4 @@ # libjuju version != juju agent version, but the major version should be identical—which is good # enough to check for secrets _libjuju_version = importlib.metadata.version("juju") -has_secrets = int(_libjuju_version.split(".")[0]) >= 3 +is_3_or_higher = int(_libjuju_version.split(".")[0]) >= 3 diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index 172b85c3..61fdbec7 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -21,7 +21,7 @@ TEST_DATABASE = "testdatabase" TEST_TABLE = "testtable" -if juju_.has_secrets: +if juju_.is_3_or_higher: TLS_APP_NAME = "self-signed-certificates" TLS_CONFIG = {"ca-common-name": "Test CA"} else: @@ -34,7 +34,7 @@ async def get_data_integrator_credentials(ops_test: OpsTest) -> typing.Dict: data_integrator_unit = ops_test.model.applications[DATA_INTEGRATOR_APP_NAME].units[0] action = await data_integrator_unit.run_action(action_name="get-credentials") result = await action.wait() - if juju_.has_secrets: + if juju_.is_3_or_higher: assert result.results["return-code"] == 0 else: assert result.results["Code"] == "0" diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index 578435db..8930d0e1 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -20,7 +20,7 @@ GRAFANA_AGENT_APP_NAME = "grafana-agent" SLOW_TIMEOUT = 25 * 60 -if juju_.has_secrets: +if juju_.is_3_or_higher: TLS_APP_NAME = "self-signed-certificates" TLS_CONFIG = {"ca-common-name": "Test CA"} else: @@ -184,6 +184,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: application_name=TLS_APP_NAME, channel="stable", config=TLS_CONFIG, + series="jammy", ) await ops_test.model.wait_for_idle([TLS_APP_NAME], status="active", timeout=SLOW_TIMEOUT) diff --git a/tests/integration/test_tls.py b/tests/integration/test_tls.py index 28c144e5..3be592ff 100644 --- a/tests/integration/test_tls.py +++ b/tests/integration/test_tls.py @@ -3,9 +3,9 @@ import asyncio import logging -import time import pytest +import tenacity from pytest_operator.plugin import OpsTest from . import juju_ @@ -18,7 +18,7 @@ TEST_APP_NAME = "mysql-test-app" SLOW_TIMEOUT = 15 * 60 -if juju_.has_secrets: +if juju_.is_3_or_higher: TLS_APP_NAME = "self-signed-certificates" TLS_CONFIG = {"ca-common-name": "Test CA"} else: @@ -34,7 +34,11 @@ async def test_build_deploy_and_relate(ops_test: OpsTest, mysql_router_charm_ser async with ops_test.fast_forward(): # deploy mysql first await ops_test.model.deploy( - MYSQL_APP_NAME, channel="8.0/edge", config={"profile": "testing"}, num_units=1 + MYSQL_APP_NAME, + channel="8.0/edge", + application_name=MYSQL_APP_NAME, + config={"profile": "testing"}, + num_units=1, ) # ROUTER @@ -49,7 +53,11 @@ async def test_build_deploy_and_relate(ops_test: OpsTest, mysql_router_charm_ser series=mysql_router_charm_series, ), ops_test.model.deploy( - TLS_APP_NAME, application_name=TLS_APP_NAME, channel="stable", config=TLS_CONFIG + TLS_APP_NAME, + application_name=TLS_APP_NAME, + channel="stable", + config=TLS_CONFIG, + series="jammy", ), ops_test.model.deploy( TEST_APP_NAME, @@ -78,39 +86,56 @@ async def test_connected_encryption(ops_test: OpsTest) -> None: """Test encryption when backend database is using TLS.""" mysqlrouter_unit = ops_test.model.applications[MYSQL_ROUTER_APP_NAME].units[0] - issuer = await get_tls_certificate_issuer( - ops_test, - mysqlrouter_unit.name, - socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", - ) - assert ( - "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer - ), "Expected mysqlrouter autogenerated certificate" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(60), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" logger.info("Relating TLS with mysqlrouter") await ops_test.model.relate(TLS_APP_NAME, MYSQL_ROUTER_APP_NAME) - time.sleep(30) - logger.info("Getting certificate issuer after relating with tls operator") - issuer = await get_tls_certificate_issuer( - ops_test, - mysqlrouter_unit.name, - socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", - ) - assert "CN = Test CA" in issuer, f"Expected mysqlrouter certificate from {TLS_APP_NAME}" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(60), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "CN = Test CA" in issuer + ), f"Expected mysqlrouter certificate from {TLS_APP_NAME}" logger.info("Removing relation TLS with mysqlrouter") await ops_test.model.applications[MYSQL_ROUTER_APP_NAME].remove_relation( f"{TLS_APP_NAME}:certificates", f"{MYSQL_ROUTER_APP_NAME}:certificates" ) - time.sleep(30) - issuer = await get_tls_certificate_issuer( - ops_test, - mysqlrouter_unit.name, - socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", - ) - assert ( - "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer - ), "Expected mysqlrouter autogenerated CA certificate" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(60), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated CA certificate" From 1b0bb1962ebb60f41593659f4b0b5a4bfe6c6eaa Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 24 Apr 2024 11:59:29 +0000 Subject: [PATCH 27/31] Address PR feedback + fix broken upgrades --- poetry.lock | 2 +- pyproject.toml | 6 +- src/abstract_charm.py | 9 +- src/machine_charm.py | 9 +- src/machine_logrotate.py | 6 +- src/machine_upgrade.py | 13 ++- src/machine_workload.py | 13 ++- src/relations/database_providers_wrapper.py | 7 +- src/relations/database_provides.py | 32 +++--- src/relations/tls.py | 2 +- src/snap.py | 11 ++ src/workload.py | 110 ++++++++++++-------- tests/integration/test_data_integrator.py | 53 ++++++---- tests/integration/test_exporter.py | 45 ++++++-- tests/integration/test_tls.py | 7 +- 15 files changed, 213 insertions(+), 112 deletions(-) diff --git a/poetry.lock b/poetry.lock index d75e16e9..c3ec1c9c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2332,4 +2332,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "de9ecf1614faca0d8cf34a62e4da500d2f883c983f0e59ada726bb3090a5e586" +content-hash = "9b4b28d998e962972d662e15a1c14976b3e459296951b967616b37ebbd36c7c7" diff --git a/pyproject.toml b/pyproject.toml index c98ec95b..cb38f1e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,7 @@ authors = [] [tool.poetry.dependencies] python = "^3.8.1" # ^3.8.1 required by flake8 +# there is a breaking change in ops 2.10.0: https://github.com/canonical/operator/pull/1091#issuecomment-1888644075 ops = "<2.10.0" tenacity = "^8.2.3" poetry-core = "^1.7.0" @@ -18,6 +19,7 @@ requests = "^2.31.0" [tool.poetry.group.charm-libs.dependencies] # data_platform_libs/v0/data_interfaces.py +# there is a breaking change in ops 2.10.0: https://github.com/canonical/operator/pull/1091#issuecomment-1888644075 ops = "<2.10.0" # tls_certificates_interface/v2/tls_certificates.py # tls_certificates lib v2 uses a feature only available in cryptography >=42.0.5 @@ -53,7 +55,7 @@ pytest = "^7.4.0" pytest-xdist = "^3.3.1" pytest-cov = "^4.1.0" ops-scenario = "^5.4.1" -ops = "<2.10.0" +ops = ">=2.0.0" pytest-mock = "^3.11.1" [tool.poetry.group.integration.dependencies] @@ -65,7 +67,7 @@ pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workf juju = "3.2.0.1" mysql-connector-python = "~8.0.33" tenacity = "^8.2.2" -ops = "<2.10.0" +ops = ">=2.0.0" pytest-mock = "^3.11.1" diff --git a/src/abstract_charm.py b/src/abstract_charm.py index 92e72d2e..6dc12ab0 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -109,10 +109,9 @@ def _exposed_read_write_endpoint(self) -> str: def _exposed_read_only_endpoint(self) -> str: """The exposed read-only endpoint""" - @property @abc.abstractmethod - def is_exposed(self) -> typing.Optional[bool]: - """Whether router is exposed externally""" + def is_externally_accessible(self, event=None) -> typing.Optional[bool]: + """Whether router is externally accessible""" @property def _tls_certificate_saved(self) -> bool: @@ -265,7 +264,9 @@ def reconcile(self, event=None) -> None: # noqa: C901 if self._upgrade.unit_state == "outdated": if self._upgrade.authorized: self._upgrade.upgrade_unit( - workload_=workload_, tls=self._tls_certificate_saved + workload_=workload_, + tls=self._tls_certificate_saved, + exporter_config=self._cos_exporter_config(event), ) else: self.set_status(event=event) diff --git a/src/machine_charm.py b/src/machine_charm.py index 4409825f..0434122b 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -83,16 +83,15 @@ def _exposed_read_write_endpoint(self) -> str: def _exposed_read_only_endpoint(self) -> str: return f"{self.host_address}:{self._READ_ONLY_PORT}" - @property - def is_exposed(self) -> typing.Optional[bool]: - return self._database_provides.external_connectivity + def is_externally_accessible(self, event=None) -> typing.Optional[bool]: + return self._database_provides.external_connectivity(event) def _reconcile_node_port(self, event) -> None: """Only applies to Kubernetes charm, so no-op.""" pass def _reconcile_ports(self) -> None: - if self.is_exposed: + if self.is_externally_accessible(): ports = [self._READ_WRITE_PORT, self._READ_ONLY_PORT] else: ports = [] @@ -108,7 +107,7 @@ def wait_until_mysql_router_ready(self) -> None: wait=tenacity.wait_fixed(5), ): with attempt: - if self.is_exposed: + if self.is_externally_accessible(): for port in ( self._READ_WRITE_PORT, self._READ_ONLY_PORT, diff --git a/src/machine_logrotate.py b/src/machine_logrotate.py index 00b148e3..0552bfd5 100644 --- a/src/machine_logrotate.py +++ b/src/machine_logrotate.py @@ -51,6 +51,8 @@ def enable(self) -> None: def disable(self) -> None: logger.debug("Removing cron job for log rotation of mysqlrouter") - self._logrotate_config.unlink() - self._cron_file.unlink() + if self._logrotate_config.exists(): + self._logrotate_config.unlink() + if self._cron_file.exists(): + self._cron_file.unlink() logger.debug("Removed cron job for log rotation of mysqlrouter") diff --git a/src/machine_upgrade.py b/src/machine_upgrade.py index ab9922e6..628f0a98 100644 --- a/src/machine_upgrade.py +++ b/src/machine_upgrade.py @@ -17,6 +17,9 @@ import upgrade import workload +if typing.TYPE_CHECKING: + import relations.cos + logger = logging.getLogger(__name__) @@ -135,9 +138,15 @@ def authorized(self) -> bool: return False return False - def upgrade_unit(self, *, workload_: workload.Workload, tls: bool) -> None: + def upgrade_unit( + self, + *, + workload_: workload.Workload, + tls: bool, + exporter_config: "relations.cos.ExporterConfig", + ) -> None: logger.debug(f"Upgrading {self.authorized=}") self.unit_state = "upgrading" - workload_.upgrade(unit=self._unit, tls=tls) + workload_.upgrade(unit=self._unit, tls=tls, exporter_config=exporter_config) self._unit_databag["snap_revision"] = snap.REVISION logger.debug(f"Saved {snap.REVISION=} in unit databag after upgrade") diff --git a/src/machine_workload.py b/src/machine_workload.py index e8a87332..ddeff442 100644 --- a/src/machine_workload.py +++ b/src/machine_workload.py @@ -20,7 +20,7 @@ class AuthenticatedMachineWorkload(workload.AuthenticatedWorkload): # TODO python3.10 min version: Use `list` instead of `typing.List` def _get_bootstrap_command(self, password: str) -> typing.List[str]: command = super()._get_bootstrap_command(password) - if self._charm.is_exposed: + if self._charm.is_externally_accessible(): command.extend( [ "--conf-bind-address", @@ -35,17 +35,18 @@ def _get_bootstrap_command(self, password: str) -> typing.List[str]: # set. Workaround for https://bugs.mysql.com/bug.php?id=107291 "--conf-set-option", "DEFAULT.server_ssl_mode=PREFERRED", + "--conf-skip-tcp", ] ) return command - def _update_configured_socket_file_locations_and_bind_address(self) -> None: + def _update_configured_socket_file_locations(self) -> None: """Update configured socket file locations from `/tmp` to `/run/mysqlrouter`. Called after MySQL Router bootstrap & before MySQL Router service is enabled Change configured location of socket files before socket files are created by MySQL Router - service. Also remove bind_address and bind_port for all router services: rw, ro, x_rw, x_ro + service. Needed since `/tmp` inside a snap is not accessible to non-root users. The socket files must be accessible to applications related via database_provides endpoint. @@ -59,8 +60,6 @@ def _update_configured_socket_file_locations_and_bind_address(self) -> None: section["socket"] = str( self._container.path("/run/mysqlrouter") / pathlib.PurePath(section["socket"]).name ) - del section["bind_address"] - del section["bind_port"] with io.StringIO() as output: config.write(output) self._container.router_config_file.write_text(output.getvalue()) @@ -68,5 +67,5 @@ def _update_configured_socket_file_locations_and_bind_address(self) -> None: def _bootstrap_router(self, *, tls: bool) -> None: super()._bootstrap_router(tls=tls) - if not self._charm.is_exposed: - self._update_configured_socket_file_locations_and_bind_address() + if not self._charm.is_externally_accessible(): + self._update_configured_socket_file_locations() diff --git a/src/relations/database_providers_wrapper.py b/src/relations/database_providers_wrapper.py index 3dc22a8b..3c21cec1 100644 --- a/src/relations/database_providers_wrapper.py +++ b/src/relations/database_providers_wrapper.py @@ -38,10 +38,9 @@ def __init__( charm_ ) - @property - def external_connectivity(self) -> bool: - """Whether the relation is exposed""" - return self._database_provides.external_connectivity + def external_connectivity(self, event) -> bool: + """Whether any of the relations are marked as external.""" + return self._database_provides.external_connectivity(event) def reconcile_users( self, diff --git a/src/relations/database_provides.py b/src/relations/database_provides.py index 8ef50f51..08bf6dc5 100644 --- a/src/relations/database_provides.py +++ b/src/relations/database_provides.py @@ -74,7 +74,7 @@ def __init__( # (e.g. when related to `data-integrator` charm) # Implements DA073 - Add Expose Flag to the Database Interface # https://docs.google.com/document/d/1Y7OZWwMdvF8eEMuVKrqEfuFV3JOjpqLHL7_GPqJpRHU - self._external_connectivity = databag.get("external-node-connectivity") == "true" + self.external_connectivity = databag.get("external-node-connectivity") == "true" if databag.get("extra-user-roles"): raise _UnsupportedExtraUserRole( app_name=relation.app.name, endpoint_name=relation.name @@ -125,13 +125,11 @@ def create_database_and_user( rw_endpoint = ( exposed_read_write_endpoint - if self._external_connectivity + if self.external_connectivity else router_read_write_endpoint ) ro_endpoint = ( - exposed_read_only_endpoint - if self._external_connectivity - else router_read_only_endpoint + exposed_read_only_endpoint if self.external_connectivity else router_read_only_endpoint ) self._set_databag( @@ -199,13 +197,23 @@ def _shared_users(self) -> typing.List[_RelationWithSharedUser]: pass return shared_users - @property - def external_connectivity(self) -> bool: - """Whether the relation is exposed.""" - relation_data = self._interface.fetch_relation_data(fields=["external-node-connectivity"]) - return any( - [data.get("external-node-connectivity") == "true" for data in relation_data.values()] - ) + def external_connectivity(self, event) -> bool: + """Whether any of the relations are marked as external.""" + requested_users = [] + for relation in self._interface.relations: + try: + requested_users.append( + _RelationThatRequestedUser( + relation=relation, interface=self._interface, event=event + ) + ) + except ( + _RelationBreaking, + remote_databag.IncompleteDatabag, + _UnsupportedExtraUserRole, + ): + pass + return any(relation.external_connectivity for relation in requested_users) def reconcile_users( self, diff --git a/src/relations/tls.py b/src/relations/tls.py index 4d18e110..a52bb9fa 100644 --- a/src/relations/tls.py +++ b/src/relations/tls.py @@ -114,7 +114,7 @@ def save_certificate(self, event: tls_certificates.CertificateAvailableEvent) -> def _generate_csr(self, key: bytes) -> bytes: """Generate certificate signing request (CSR).""" sans_ip = ["127.0.0.1"] # needed for the HTTP server when related with COS - if self._charm.is_exposed: + if self._charm.is_externally_accessible(): sans_ip.append(self._charm.host_address) return tls_certificates.generate_csr( diff --git a/src/snap.py b/src/snap.py index 678726df..939967e4 100644 --- a/src/snap.py +++ b/src/snap.py @@ -220,6 +220,14 @@ def update_mysql_router_exporter_service( "mysqlrouter-exporter.service-name": self._unit_name.replace("/", "-"), } ) + if tls: + _snap.set( + { + "mysqlrouter.tls-cacert-path": certificate_authority_filename, + "mysqlrouter.tls-cert-path": certificate_filename, + "mysqlrouter.tls-key-path": key_filename, + } + ) _snap.start([self._EXPORTER_SERVICE_NAME], enable=True) else: _snap.stop([self._EXPORTER_SERVICE_NAME], disable=True) @@ -227,6 +235,9 @@ def update_mysql_router_exporter_service( _snap.unset("mysqlrouter-exporter.password") _snap.unset("mysqlrouter-exporter.url") _snap.unset("mysqlrouter-exporter.service-name") + _snap.unset("mysqlrouter.tls-cacert-path") + _snap.unset("mysqlrouter.tls-cert-path") + _snap.unset("mysqlrouter.tls-key-path") def upgrade(self, unit: ops.Unit) -> None: """Upgrade snap.""" diff --git a/src/workload.py b/src/workload.py index 0043c4c9..96a1b132 100644 --- a/src/workload.py +++ b/src/workload.py @@ -76,7 +76,9 @@ def version(self) -> str: return component return "" - def upgrade(self, *, unit: ops.Unit, tls: bool) -> None: + def upgrade( + self, *, unit: ops.Unit, tls: bool, exporter_config: "relations.cos.ExporterConfig" + ) -> None: """Upgrade MySQL Router. Only applies to machine charm @@ -116,8 +118,6 @@ def cleanup_monitoring_user(self) -> None: def _disable_exporter(self) -> None: """Stop and disable MySQL Router exporter service, keeping router enabled.""" - if not self._container.mysql_router_exporter_service_enabled: - return logger.debug("Disabling MySQL Router exporter service") self._container.update_mysql_router_exporter_service(enabled=False) self.cleanup_monitoring_user() @@ -143,6 +143,17 @@ def _disable_tls(self) -> None: file.unlink(missing_ok=True) logger.debug("Deleted TLS files") + def _disable_router(self) -> None: + """Disable router and clean up corresponding router files.""" + logger.debug("Disabling MySQL Router service") + self._container.update_mysql_router_service(enabled=False) + self._logrotate.disable() + self._container.router_config_directory.rmtree() + self._container.router_config_directory.mkdir() + self._router_data_directory.rmtree() + self._router_data_directory.mkdir() + logger.debug("Disabled MySQL Router service") + def reconcile( self, *, @@ -158,16 +169,11 @@ def reconcile( raise ValueError("`key` and `certificate` arguments required when tls=True") if self._container.mysql_router_service_enabled: - logger.debug("Disabling MySQL Router service") - self._container.update_mysql_router_service(enabled=False) - self._logrotate.disable() - self._container.router_config_directory.rmtree() - self._container.router_config_directory.mkdir() - self._router_data_directory.rmtree() - self._router_data_directory.mkdir() - logger.debug("Disabled MySQL Router service") - - self._disable_exporter() + self._disable_router() + + if self._container.mysql_router_exporter_service_enabled: + self._disable_exporter() + self._disable_tls() @property @@ -316,6 +322,37 @@ def _restart(self, *, tls: bool) -> None: # status self._charm.set_status(event=None) + def _enable_router(self, *, tls: bool, unit_name: str) -> None: + """Enable router after setting up all the necessary prerequisites.""" + logger.debug("Enabling MySQL Router service") + self._cleanup_after_upgrade_or_potential_container_restart() + # create an empty credentials file, if the file does not exist + self._container.create_router_rest_api_credentials_file() + self._bootstrap_router(tls=tls) + self.shell.add_attributes_to_mysql_router_user( + username=self._router_username, router_id=self._router_id, unit_name=unit_name + ) + self._container.update_mysql_router_service(enabled=True, tls=tls) + self._logrotate.enable() + logger.debug("Enabled MySQL Router service") + self._charm.wait_until_mysql_router_ready() + + def _enable_exporter( + self, *, tls: bool, exporter_config: "relations.cos.ExporterConfig" + ) -> None: + """Enable the mysqlrouter exporter.""" + logger.debug("Enabling MySQL Router exporter service") + self.setup_monitoring_user() + self._container.update_mysql_router_exporter_service( + enabled=True, + config=exporter_config, + tls=tls, + key_filename=str(self._tls_key_file), + certificate_filename=str(self._tls_certificate_file), + certificate_authority_filename=str(self._tls_certificate_authority_file), + ) + logger.debug("Enabled MySQL Router exporter service") + def reconcile( self, *, @@ -348,38 +385,20 @@ def reconcile( # If the host or port changes, MySQL Router will receive topology change # notifications from MySQL. # Therefore, if the host or port changes, we do not need to restart MySQL Router. - is_charm_exposed = self._charm.is_exposed + is_charm_exposed = self._charm.is_externally_accessible() socket_file_exists = self._container.path("/run/mysqlrouter/mysql.sock").exists() require_rebootstrap = is_charm_exposed == socket_file_exists - if not self._container.mysql_router_service_enabled or require_rebootstrap: - logger.debug("Enabling MySQL Router service") - self._cleanup_after_upgrade_or_potential_container_restart() - # create an empty credentials file, if the file does not exist - self._container.create_router_rest_api_credentials_file() - self._bootstrap_router(tls=tls) - self.shell.add_attributes_to_mysql_router_user( - username=self._router_username, router_id=self._router_id, unit_name=unit_name - ) - self._container.update_mysql_router_service(enabled=True, tls=tls) - self._logrotate.enable() - logger.debug("Enabled MySQL Router service") - self._charm.wait_until_mysql_router_ready() + if require_rebootstrap: + self._disable_router() + + if not self._container.mysql_router_service_enabled: + self._enable_router(tls=tls, unit_name=unit_name) if (not self._container.mysql_router_exporter_service_enabled and exporter_config) or ( self._container.mysql_router_exporter_service_enabled and custom_certificate != certificate ): - logger.debug("Enabling MySQL Router exporter service") - self.setup_monitoring_user() - self._container.update_mysql_router_exporter_service( - enabled=True, - config=exporter_config, - tls=tls, - key_filename=str(self._tls_key_file), - certificate_filename=str(self._tls_certificate_file), - certificate_authority_filename=str(self._tls_certificate_authority_file), - ) - logger.debug("Enabled MySQL Router exporter service") + self._enable_exporter(tls=tls, exporter_config=exporter_config) elif self._container.mysql_router_exporter_service_enabled and not exporter_config: self._disable_exporter() @@ -397,15 +416,22 @@ def status(self) -> typing.Optional[ops.StatusBase]: "Router was manually removed from MySQL ClusterSet. Remove & re-deploy unit" ) - def upgrade(self, *, unit: ops.Unit, tls: bool) -> None: + def upgrade( + self, *, unit: ops.Unit, tls: bool, exporter_config: "relations.cos.ExporterConfig" + ) -> None: enabled = self._container.mysql_router_service_enabled + exporter_enabled = self._container.mysql_router_exporter_service_enabled + if exporter_enabled: + self._disable_exporter() if enabled: logger.debug("Disabling MySQL Router service before upgrade") - self.disable() - super().upgrade(unit=unit, tls=tls) + self._disable_router() + super().upgrade(unit=unit, tls=tls, exporter_config=exporter_config) if enabled: logger.debug("Re-enabling MySQL Router service after upgrade") - self.enable(tls=tls, unit_name=unit.name) + self._enable_router(tls=tls, unit_name=unit.name) + if exporter_enabled: + self._enable_exporter(tls=tls, exporter_config=exporter_config) def _wait_until_http_server_authenticates(self) -> None: """Wait until active connection with router HTTP server using monitoring credentials.""" diff --git a/tests/integration/test_data_integrator.py b/tests/integration/test_data_integrator.py index 61fdbec7..4a3192b1 100644 --- a/tests/integration/test_data_integrator.py +++ b/tests/integration/test_data_integrator.py @@ -3,10 +3,10 @@ import asyncio import logging -import time import typing import pytest +import tenacity from pytest_operator.plugin import OpsTest from . import juju_ @@ -18,6 +18,7 @@ MYSQL_ROUTER_APP_NAME = "mysqlrouter" DATA_INTEGRATOR_APP_NAME = "data-integrator" SLOW_TIMEOUT = 15 * 60 +RETRY_TIMEOUT = 60 TEST_DATABASE = "testdatabase" TEST_TABLE = "testtable" @@ -141,15 +142,21 @@ async def test_external_connectivity_with_data_integrator_and_tls(ops_test: OpsT f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" ) - time.sleep(30) - - issuer = await get_tls_certificate_issuer( - ops_test, - mysqlrouter_unit.name, - host=database_host, - port=database_port, - ) - assert "CN = Test CA" in issuer, f"Expected mysqlrouter certificate from {TLS_APP_NAME}" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + host=database_host, + port=database_port, + ) + assert ( + "CN = Test CA" in issuer + ), f"Expected mysqlrouter certificate from {TLS_APP_NAME}" create_table_and_insert_data_sql = [ f"CREATE TABLE {TEST_DATABASE}.{TEST_TABLE} (id int, primary key(id));", @@ -181,17 +188,21 @@ async def test_external_connectivity_with_data_integrator_and_tls(ops_test: OpsT f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" ) - time.sleep(30) - - issuer = await get_tls_certificate_issuer( - ops_test, - mysqlrouter_unit.name, - host=database_host, - port=database_port, - ) - assert ( - "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer - ), "Expected mysqlrouter autogenerated certificate" + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysqlrouter_unit.name, + host=database_host, + port=database_port, + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" select_data_sql = [ f"SELECT * FROM {TEST_DATABASE}.{TEST_TABLE};", diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index 8930d0e1..d4fdebb4 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -11,6 +11,7 @@ from pytest_operator.plugin import OpsTest from . import juju_ +from .helpers import get_tls_certificate_issuer logger = logging.getLogger(__name__) @@ -19,6 +20,7 @@ APPLICATION_APP_NAME = "mysql-test-app" GRAFANA_AGENT_APP_NAME = "grafana-agent" SLOW_TIMEOUT = 25 * 60 +RETRY_TIMEOUT = 3 * 60 if juju_.is_3_or_higher: TLS_APP_NAME = "self-signed-certificates" @@ -137,7 +139,7 @@ async def test_exporter_endpoint(ops_test: OpsTest, mysql_router_charm_series: s for attempt in tenacity.Retrying( reraise=True, - stop=tenacity.stop_after_delay(120), + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), wait=tenacity.wait_fixed(10), ): with attempt: @@ -156,7 +158,7 @@ async def test_exporter_endpoint(ops_test: OpsTest, mysql_router_charm_series: s for attempt in tenacity.Retrying( reraise=True, - stop=tenacity.stop_after_delay(120), + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), wait=tenacity.wait_fixed(10), ): with attempt: @@ -177,6 +179,16 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: http = urllib3.PoolManager() mysql_router_app = ops_test.model.applications[MYSQL_ROUTER_APP_NAME] + mysql_router_unit = mysql_router_app.units[0] + + issuer = await get_tls_certificate_issuer( + ops_test, + mysql_router_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" logger.info(f"Deploying {TLS_APP_NAME}") await ops_test.model.deploy( @@ -186,7 +198,6 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: config=TLS_CONFIG, series="jammy", ) - await ops_test.model.wait_for_idle([TLS_APP_NAME], status="active", timeout=SLOW_TIMEOUT) logger.info(f"Relating mysqlrouter with {TLS_APP_NAME}") @@ -200,7 +211,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: for attempt in tenacity.Retrying( reraise=True, - stop=tenacity.stop_after_delay(120), + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), wait=tenacity.wait_fixed(10), ): with attempt: @@ -220,7 +231,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: for attempt in tenacity.Retrying( reraise=True, - stop=tenacity.stop_after_delay(120), + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), wait=tenacity.wait_fixed(10), ): with attempt: @@ -232,6 +243,13 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: jmx_resp.data ), "❌ did not find expected metric in response" + issuer = await get_tls_certificate_issuer( + ops_test, + mysql_router_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert "CN = Test CA" in issuer, "Expected mysqlrouter autogenerated certificate" + logger.info("Removing relation between mysqlrouter and grafana agent") await mysql_router_app.remove_relation( f"{GRAFANA_AGENT_APP_NAME}:cos-agent", f"{MYSQL_ROUTER_APP_NAME}:cos-agent" @@ -239,7 +257,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: for attempt in tenacity.Retrying( reraise=True, - stop=tenacity.stop_after_delay(120), + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), wait=tenacity.wait_fixed(10), ): with attempt: @@ -256,3 +274,18 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: await mysql_router_app.remove_relation( f"{MYSQL_ROUTER_APP_NAME}:certificates", f"{TLS_APP_NAME}:certificates" ) + + for attempt in tenacity.Retrying( + reraise=True, + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), + wait=tenacity.wait_fixed(10), + ): + with attempt: + issuer = await get_tls_certificate_issuer( + ops_test, + mysql_router_unit.name, + socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", + ) + assert ( + "Issuer: CN = MySQL_Router_Auto_Generated_CA_Certificate" in issuer + ), "Expected mysqlrouter autogenerated certificate" diff --git a/tests/integration/test_tls.py b/tests/integration/test_tls.py index 3be592ff..27a533af 100644 --- a/tests/integration/test_tls.py +++ b/tests/integration/test_tls.py @@ -17,6 +17,7 @@ MYSQL_ROUTER_APP_NAME = "mysqlrouter" TEST_APP_NAME = "mysql-test-app" SLOW_TIMEOUT = 15 * 60 +RETRY_TIMEOUT = 60 if juju_.is_3_or_higher: TLS_APP_NAME = "self-signed-certificates" @@ -88,7 +89,7 @@ async def test_connected_encryption(ops_test: OpsTest) -> None: for attempt in tenacity.Retrying( reraise=True, - stop=tenacity.stop_after_delay(60), + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), wait=tenacity.wait_fixed(10), ): with attempt: @@ -107,7 +108,7 @@ async def test_connected_encryption(ops_test: OpsTest) -> None: logger.info("Getting certificate issuer after relating with tls operator") for attempt in tenacity.Retrying( reraise=True, - stop=tenacity.stop_after_delay(60), + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), wait=tenacity.wait_fixed(10), ): with attempt: @@ -127,7 +128,7 @@ async def test_connected_encryption(ops_test: OpsTest) -> None: for attempt in tenacity.Retrying( reraise=True, - stop=tenacity.stop_after_delay(60), + stop=tenacity.stop_after_delay(RETRY_TIMEOUT), wait=tenacity.wait_fixed(10), ): with attempt: From 10748cfac3232392bc0cb6a5da8acb0dba3f0be5 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 24 Apr 2024 13:41:45 +0000 Subject: [PATCH 28/31] Address PR feedback --- poetry.lock | 2 +- pyproject.toml | 5 ++--- src/abstract_charm.py | 8 ++++---- src/machine_charm.py | 6 +++--- src/machine_logrotate.py | 6 ++---- src/snap.py | 4 ++++ 6 files changed, 16 insertions(+), 15 deletions(-) diff --git a/poetry.lock b/poetry.lock index c3ec1c9c..4025fb0e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2332,4 +2332,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "9b4b28d998e962972d662e15a1c14976b3e459296951b967616b37ebbd36c7c7" +content-hash = "4efb0ad3c8e6d914741f689cd67e03785fb32ce25157cde71cdd0c9affa64a56" diff --git a/pyproject.toml b/pyproject.toml index cb38f1e9..793912ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ authors = [] [tool.poetry.dependencies] python = "^3.8.1" # ^3.8.1 required by flake8 # there is a breaking change in ops 2.10.0: https://github.com/canonical/operator/pull/1091#issuecomment-1888644075 -ops = "<2.10.0" +ops = "^2.6.0,<2.10.0" tenacity = "^8.2.3" poetry-core = "^1.7.0" jinja2 = "^3.1.2" @@ -19,8 +19,7 @@ requests = "^2.31.0" [tool.poetry.group.charm-libs.dependencies] # data_platform_libs/v0/data_interfaces.py -# there is a breaking change in ops 2.10.0: https://github.com/canonical/operator/pull/1091#issuecomment-1888644075 -ops = "<2.10.0" +ops = ">=2.0.0" # tls_certificates_interface/v2/tls_certificates.py # tls_certificates lib v2 uses a feature only available in cryptography >=42.0.5 cryptography = ">=42.0.5" diff --git a/src/abstract_charm.py b/src/abstract_charm.py index 29fc188d..a06e5cb5 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -111,7 +111,7 @@ def _exposed_read_only_endpoint(self) -> str: @abc.abstractmethod def is_externally_accessible(self, event=None) -> typing.Optional[bool]: - """Whether router is externally accessible""" + """Whether endpoints should be externally accessible""" @property def _tls_certificate_saved(self) -> bool: @@ -215,14 +215,14 @@ def wait_until_mysql_router_ready(self) -> None: """ @abc.abstractmethod - def _reconcile_node_port(self, event) -> None: + def _reconcile_node_port(self, *, event) -> None: """Reconcile node port. Only applies to Kubernetes charm """ @abc.abstractmethod - def _reconcile_ports(self) -> None: + def _reconcile_ports(self, *, event) -> None: """Reconcile exposed ports. Only applies to Machine charm @@ -320,7 +320,7 @@ def reconcile(self, event=None) -> None: # noqa: C901 if not self._upgrade.in_progress and isinstance( workload_, workload.AuthenticatedWorkload ): - self._reconcile_ports() + self._reconcile_ports(event=event) # Empty waiting status means we're waiting for database requires relation before # starting workload diff --git a/src/machine_charm.py b/src/machine_charm.py index bf9da991..9374b2e0 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -86,12 +86,12 @@ def _exposed_read_only_endpoint(self) -> str: def is_externally_accessible(self, event=None) -> typing.Optional[bool]: return self._database_provides.external_connectivity(event) - def _reconcile_node_port(self, event) -> None: + def _reconcile_node_port(self, *, event) -> None: """Only applies to Kubernetes charm, so no-op.""" pass - def _reconcile_ports(self) -> None: - if self.is_externally_accessible(): + def _reconcile_ports(self, *, event) -> None: + if self.is_externally_accessible(event): ports = [self._READ_WRITE_PORT, self._READ_ONLY_PORT] else: ports = [] diff --git a/src/machine_logrotate.py b/src/machine_logrotate.py index 0552bfd5..58a47e80 100644 --- a/src/machine_logrotate.py +++ b/src/machine_logrotate.py @@ -51,8 +51,6 @@ def enable(self) -> None: def disable(self) -> None: logger.debug("Removing cron job for log rotation of mysqlrouter") - if self._logrotate_config.exists(): - self._logrotate_config.unlink() - if self._cron_file.exists(): - self._cron_file.unlink() + self._logrotate_config.unlink(missing_ok=True) + self._cron_file.unlink(missing_ok=True) logger.debug("Removed cron job for log rotation of mysqlrouter") diff --git a/src/snap.py b/src/snap.py index 939967e4..e924fb6d 100644 --- a/src/snap.py +++ b/src/snap.py @@ -228,6 +228,10 @@ def update_mysql_router_exporter_service( "mysqlrouter.tls-key-path": key_filename, } ) + else: + _snap.unset("mysqlrouter.tls-cacert-path") + _snap.unset("mysqlrouter.tls-cert-path") + _snap.unset("mysqlrouter.tls-key-path") _snap.start([self._EXPORTER_SERVICE_NAME], enable=True) else: _snap.stop([self._EXPORTER_SERVICE_NAME], disable=True) From 03d4ed4765345e84c094170c988b5b596376c490 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 24 Apr 2024 13:57:54 +0000 Subject: [PATCH 29/31] Fix assertion message in integration test --- tests/integration/test_exporter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_exporter.py b/tests/integration/test_exporter.py index d4fdebb4..63d6739c 100644 --- a/tests/integration/test_exporter.py +++ b/tests/integration/test_exporter.py @@ -248,7 +248,7 @@ async def test_exporter_endpoint_with_tls(ops_test: OpsTest) -> None: mysql_router_unit.name, socket="/var/snap/charmed-mysql/common/run/mysqlrouter/mysql.sock", ) - assert "CN = Test CA" in issuer, "Expected mysqlrouter autogenerated certificate" + assert "CN = Test CA" in issuer, f"Expected mysqlrouter certificate from {TLS_APP_NAME}" logger.info("Removing relation between mysqlrouter and grafana agent") await mysql_router_app.remove_relation( From 0978187f9b60c3c39d9f8f7cfbb39011ec0cdb90 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 24 Apr 2024 15:29:05 +0000 Subject: [PATCH 30/31] Address PR feedback --- src/abstract_charm.py | 11 ++++++++--- src/machine_charm.py | 10 +++++----- src/machine_upgrade.py | 3 ++- src/machine_workload.py | 17 +++++++++++------ src/relations/tls.py | 20 ++++++++++---------- src/workload.py | 34 +++++++++++++++++++--------------- 6 files changed, 55 insertions(+), 40 deletions(-) diff --git a/src/abstract_charm.py b/src/abstract_charm.py index a06e5cb5..b395d87e 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -110,8 +110,11 @@ def _exposed_read_only_endpoint(self) -> str: """The exposed read-only endpoint""" @abc.abstractmethod - def is_externally_accessible(self, event=None) -> typing.Optional[bool]: - """Whether endpoints should be externally accessible""" + def is_externally_accessible(self, *, event) -> typing.Optional[bool]: + """Whether endpoints should be externally accessible. + + Only defined in vm charm to return True/False. In k8s charm, returns None. + """ @property def _tls_certificate_saved(self) -> bool: @@ -208,7 +211,7 @@ def set_status(self, *, event, app=True, unit=True) -> None: logger.debug(f"Set unit status to {self.unit.status}") @abc.abstractmethod - def wait_until_mysql_router_ready(self) -> None: + def wait_until_mysql_router_ready(self, *, event) -> None: """Wait until a connection to MySQL Router is possible. Retry every 5 seconds for up to 30 seconds. @@ -268,6 +271,7 @@ def reconcile(self, event=None) -> None: # noqa: C901 if self._upgrade.unit_state == "outdated": if self._upgrade.authorized: self._upgrade.upgrade_unit( + event=event, workload_=workload_, tls=self._tls_certificate_saved, exporter_config=self._cos_exporter_config(event), @@ -310,6 +314,7 @@ def reconcile(self, event=None) -> None: # noqa: C901 ) if workload_.container_ready: workload_.reconcile( + event=event, tls=self._tls_certificate_saved, unit_name=self.unit.name, exporter_config=self._cos_exporter_config(event), diff --git a/src/machine_charm.py b/src/machine_charm.py index 9374b2e0..fb14c9a5 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -83,7 +83,7 @@ def _exposed_read_write_endpoint(self) -> str: def _exposed_read_only_endpoint(self) -> str: return f"{self.host_address}:{self._READ_ONLY_PORT}" - def is_externally_accessible(self, event=None) -> typing.Optional[bool]: + def is_externally_accessible(self, *, event) -> typing.Optional[bool]: return self._database_provides.external_connectivity(event) def _reconcile_node_port(self, *, event) -> None: @@ -91,13 +91,13 @@ def _reconcile_node_port(self, *, event) -> None: pass def _reconcile_ports(self, *, event) -> None: - if self.is_externally_accessible(event): + if self.is_externally_accessible(event=event): ports = [self._READ_WRITE_PORT, self._READ_ONLY_PORT] else: ports = [] self.unit.set_ports(*ports) - def wait_until_mysql_router_ready(self) -> None: + def wait_until_mysql_router_ready(self, *, event) -> None: logger.debug("Waiting until MySQL Router is ready") self.unit.status = ops.MaintenanceStatus("MySQL Router starting") try: @@ -107,7 +107,7 @@ def wait_until_mysql_router_ready(self) -> None: wait=tenacity.wait_fixed(5), ): with attempt: - if self.is_externally_accessible(): + if self.is_externally_accessible(event=event): for port in ( self._READ_WRITE_PORT, self._READ_ONLY_PORT, @@ -173,7 +173,7 @@ def _on_force_upgrade_action(self, event: ops.ActionEvent) -> None: logger.debug("Forcing upgrade") event.log(f"Forcefully upgrading {self.unit.name}") self._upgrade.upgrade_unit( - workload_=self.get_workload(event=None), tls=self._tls_certificate_saved + event=event, workload_=self.get_workload(event=None), tls=self._tls_certificate_saved ) self.reconcile() event.set_results({"result": f"Forcefully upgraded {self.unit.name}"}) diff --git a/src/machine_upgrade.py b/src/machine_upgrade.py index f8413ae7..58366218 100644 --- a/src/machine_upgrade.py +++ b/src/machine_upgrade.py @@ -158,13 +158,14 @@ def authorized(self) -> bool: def upgrade_unit( self, *, + event, workload_: workload.Workload, tls: bool, exporter_config: "relations.cos.ExporterConfig", ) -> None: logger.debug(f"Upgrading {self.authorized=}") self.unit_state = "upgrading" - workload_.upgrade(unit=self._unit, tls=tls, exporter_config=exporter_config) + workload_.upgrade(event=event, unit=self._unit, tls=tls, exporter_config=exporter_config) self._unit_workload_container_version = snap.REVISION self._unit_workload_version = self._current_versions["workload"] logger.debug( diff --git a/src/machine_workload.py b/src/machine_workload.py index ddeff442..62fee82e 100644 --- a/src/machine_workload.py +++ b/src/machine_workload.py @@ -11,6 +11,9 @@ import workload +if typing.TYPE_CHECKING: + import relations.database_requires + logger = logging.getLogger(__name__) @@ -18,9 +21,11 @@ class AuthenticatedMachineWorkload(workload.AuthenticatedWorkload): """Workload with connection to MySQL cluster and with Unix sockets enabled""" # TODO python3.10 min version: Use `list` instead of `typing.List` - def _get_bootstrap_command(self, password: str) -> typing.List[str]: - command = super()._get_bootstrap_command(password) - if self._charm.is_externally_accessible(): + def _get_bootstrap_command( + self, *, event, connection_info: "relations.database_requires.ConnectionInformation" + ) -> typing.List[str]: + command = super()._get_bootstrap_command(connection_info) + if self._charm.is_externally_accessible(event=event): command.extend( [ "--conf-bind-address", @@ -65,7 +70,7 @@ def _update_configured_socket_file_locations(self) -> None: self._container.router_config_file.write_text(output.getvalue()) logger.debug("Updated configured socket file locations") - def _bootstrap_router(self, *, tls: bool) -> None: - super()._bootstrap_router(tls=tls) - if not self._charm.is_externally_accessible(): + def _bootstrap_router(self, *, event, tls: bool) -> None: + super()._bootstrap_router(event=event, tls=tls) + if not self._charm.is_externally_accessible(event=event): self._update_configured_socket_file_locations() diff --git a/src/relations/tls.py b/src/relations/tls.py index a52bb9fa..7b5f79c2 100644 --- a/src/relations/tls.py +++ b/src/relations/tls.py @@ -111,10 +111,10 @@ def save_certificate(self, event: tls_certificates.CertificateAvailableEvent) -> logger.debug(f"Saved TLS certificate {event=}") self._charm.reconcile(event=None) - def _generate_csr(self, key: bytes) -> bytes: + def _generate_csr(self, *, event, key: bytes) -> bytes: """Generate certificate signing request (CSR).""" sans_ip = ["127.0.0.1"] # needed for the HTTP server when related with COS - if self._charm.is_externally_accessible(): + if self._charm.is_externally_accessible(event=event): sans_ip.append(self._charm.host_address) return tls_certificates.generate_csr( @@ -124,23 +124,23 @@ def _generate_csr(self, key: bytes) -> bytes: sans_ip=sans_ip, ) - def request_certificate_creation(self): + def request_certificate_creation(self, *, event): """Request new TLS certificate from related provider charm.""" logger.debug("Requesting TLS certificate creation") - csr = self._generate_csr(self.key.encode("utf-8")) + csr = self._generate_csr(event=event, key=self.key.encode("utf-8")) self._interface.request_certificate_creation(certificate_signing_request=csr) self._secrets.set_value( relations.secrets.UNIT_SCOPE, _TLS_REQUESTED_CSR, csr.decode("utf-8") ) logger.debug("Requested TLS certificate creation") - def request_certificate_renewal(self): + def request_certificate_renewal(self, *, event): """Request TLS certificate renewal from related provider charm.""" logger.debug("Requesting TLS certificate renewal") old_csr = self._secrets.get_value(relations.secrets.UNIT_SCOPE, _TLS_ACTIVE_CSR).encode( "utf-8" ) - new_csr = self._generate_csr(self.key.encode("utf-8")) + new_csr = self._generate_csr(event=event, key=self.key.encode("utf-8")) self._interface.request_certificate_renewal( old_certificate_signing_request=old_csr, new_certificate_signing_request=new_csr ) @@ -252,7 +252,7 @@ def _on_set_tls_private_key(self, event: ops.ActionEvent) -> None: logger.debug("No TLS certificate relation active. Skipped certificate request") else: try: - self._relation.request_certificate_creation() + self._relation.request_certificate_creation(event=event) except Exception as e: event.fail(f"Failed to request certificate: {e}") logger.exception( @@ -261,9 +261,9 @@ def _on_set_tls_private_key(self, event: ops.ActionEvent) -> None: raise logger.debug("Handled set TLS private key action") - def _on_tls_relation_created(self, _) -> None: + def _on_tls_relation_created(self, event) -> None: """Request certificate when TLS relation created.""" - self._relation.request_certificate_creation() + self._relation.request_certificate_creation(event) def _on_tls_relation_broken(self, _) -> None: """Delete TLS certificate.""" @@ -283,4 +283,4 @@ def _on_certificate_expiring(self, event: tls_certificates.CertificateExpiringEv logger.warning("Unknown certificate expiring") return - self._relation.request_certificate_renewal() + self._relation.request_certificate_renewal(event) diff --git a/src/workload.py b/src/workload.py index 96a1b132..df974f16 100644 --- a/src/workload.py +++ b/src/workload.py @@ -77,7 +77,7 @@ def version(self) -> str: return "" def upgrade( - self, *, unit: ops.Unit, tls: bool, exporter_config: "relations.cos.ExporterConfig" + self, *, event, unit: ops.Unit, tls: bool, exporter_config: "relations.cos.ExporterConfig" ) -> None: """Upgrade MySQL Router. @@ -157,6 +157,7 @@ def _disable_router(self) -> None: def reconcile( self, *, + event, tls: bool, unit_name: str, exporter_config: "relations.cos.ExporterConfig", @@ -253,15 +254,17 @@ def _get_bootstrap_command( "--conf-use-gr-notifications", ] - def _bootstrap_router(self, *, tls: bool) -> None: + def _bootstrap_router(self, *, event, tls: bool) -> None: """Bootstrap MySQL Router.""" logger.debug( f"Bootstrapping router {tls=}, {self._connection_info.host=}, {self._connection_info.port=}" ) # Redact password from log - logged_command = self._get_bootstrap_command(self._connection_info.redacted) + logged_command = self._get_bootstrap_command( + event=event, connection_info=self._connection_info.redacted + ) - command = self._get_bootstrap_command(self._connection_info) + command = self._get_bootstrap_command(event=event, connection_info=self._connection_info) try: self._container.run_mysql_router(command, timeout=30) except container.CalledProcessError as e: @@ -311,31 +314,31 @@ def _router_username(self) -> str: """ return self._parse_username_from_config(self._container.router_config_file.read_text()) - def _restart(self, *, tls: bool) -> None: + def _restart(self, *, event, tls: bool) -> None: """Restart MySQL Router to enable or disable TLS.""" logger.debug("Restarting MySQL Router") assert self._container.mysql_router_service_enabled is True self._container.update_mysql_router_service(enabled=True, tls=tls) logger.debug("Restarted MySQL Router") - self._charm.wait_until_mysql_router_ready() + self._charm.wait_until_mysql_router_ready(event) # wait_until_mysql_router_ready will set WaitingStatus—override it with current charm # status self._charm.set_status(event=None) - def _enable_router(self, *, tls: bool, unit_name: str) -> None: + def _enable_router(self, *, event, tls: bool, unit_name: str) -> None: """Enable router after setting up all the necessary prerequisites.""" logger.debug("Enabling MySQL Router service") self._cleanup_after_upgrade_or_potential_container_restart() # create an empty credentials file, if the file does not exist self._container.create_router_rest_api_credentials_file() - self._bootstrap_router(tls=tls) + self._bootstrap_router(event=event, tls=tls) self.shell.add_attributes_to_mysql_router_user( username=self._router_username, router_id=self._router_id, unit_name=unit_name ) self._container.update_mysql_router_service(enabled=True, tls=tls) self._logrotate.enable() logger.debug("Enabled MySQL Router service") - self._charm.wait_until_mysql_router_ready() + self._charm.wait_until_mysql_router_ready(event) def _enable_exporter( self, *, tls: bool, exporter_config: "relations.cos.ExporterConfig" @@ -356,6 +359,7 @@ def _enable_exporter( def reconcile( self, *, + event, tls: bool, unit_name: str, exporter_config: "relations.cos.ExporterConfig", @@ -376,23 +380,23 @@ def reconcile( key=key, certificate=certificate, certificate_authority=certificate_authority ) if custom_certificate != certificate and self._container.mysql_router_service_enabled: - self._restart(tls=tls) + self._restart(event=event, tls=tls) else: self._disable_tls() if custom_certificate and self._container.mysql_router_service_enabled: - self._restart(tls=tls) + self._restart(event=event, tls=tls) # If the host or port changes, MySQL Router will receive topology change # notifications from MySQL. # Therefore, if the host or port changes, we do not need to restart MySQL Router. - is_charm_exposed = self._charm.is_externally_accessible() + is_charm_exposed = self._charm.is_externally_accessible(event=event) socket_file_exists = self._container.path("/run/mysqlrouter/mysql.sock").exists() require_rebootstrap = is_charm_exposed == socket_file_exists if require_rebootstrap: self._disable_router() if not self._container.mysql_router_service_enabled: - self._enable_router(tls=tls, unit_name=unit_name) + self._enable_router(event=event, tls=tls, unit_name=unit_name) if (not self._container.mysql_router_exporter_service_enabled and exporter_config) or ( self._container.mysql_router_exporter_service_enabled @@ -417,7 +421,7 @@ def status(self) -> typing.Optional[ops.StatusBase]: ) def upgrade( - self, *, unit: ops.Unit, tls: bool, exporter_config: "relations.cos.ExporterConfig" + self, *, event, unit: ops.Unit, tls: bool, exporter_config: "relations.cos.ExporterConfig" ) -> None: enabled = self._container.mysql_router_service_enabled exporter_enabled = self._container.mysql_router_exporter_service_enabled @@ -429,7 +433,7 @@ def upgrade( super().upgrade(unit=unit, tls=tls, exporter_config=exporter_config) if enabled: logger.debug("Re-enabling MySQL Router service after upgrade") - self._enable_router(tls=tls, unit_name=unit.name) + self._enable_router(event=event, tls=tls, unit_name=unit.name) if exporter_enabled: self._enable_exporter(tls=tls, exporter_config=exporter_config) From f8828dc7521bdc0113f1c05ebe0e4d5438efd9b6 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 24 Apr 2024 16:35:51 +0000 Subject: [PATCH 31/31] Fix bugs introduced while addressing PR feedback; pass event as kwarg --- src/relations/tls.py | 4 ++-- src/workload.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/relations/tls.py b/src/relations/tls.py index 7b5f79c2..f94c4c1b 100644 --- a/src/relations/tls.py +++ b/src/relations/tls.py @@ -263,7 +263,7 @@ def _on_set_tls_private_key(self, event: ops.ActionEvent) -> None: def _on_tls_relation_created(self, event) -> None: """Request certificate when TLS relation created.""" - self._relation.request_certificate_creation(event) + self._relation.request_certificate_creation(event=event) def _on_tls_relation_broken(self, _) -> None: """Delete TLS certificate.""" @@ -283,4 +283,4 @@ def _on_certificate_expiring(self, event: tls_certificates.CertificateExpiringEv logger.warning("Unknown certificate expiring") return - self._relation.request_certificate_renewal(event) + self._relation.request_certificate_renewal(event=event) diff --git a/src/workload.py b/src/workload.py index df974f16..01818b3e 100644 --- a/src/workload.py +++ b/src/workload.py @@ -320,7 +320,7 @@ def _restart(self, *, event, tls: bool) -> None: assert self._container.mysql_router_service_enabled is True self._container.update_mysql_router_service(enabled=True, tls=tls) logger.debug("Restarted MySQL Router") - self._charm.wait_until_mysql_router_ready(event) + self._charm.wait_until_mysql_router_ready(event=event) # wait_until_mysql_router_ready will set WaitingStatus—override it with current charm # status self._charm.set_status(event=None) @@ -338,7 +338,7 @@ def _enable_router(self, *, event, tls: bool, unit_name: str) -> None: self._container.update_mysql_router_service(enabled=True, tls=tls) self._logrotate.enable() logger.debug("Enabled MySQL Router service") - self._charm.wait_until_mysql_router_ready(event) + self._charm.wait_until_mysql_router_ready(event=event) def _enable_exporter( self, *, tls: bool, exporter_config: "relations.cos.ExporterConfig"