From 7686dd0278adea33e74197b1e4a1105e14b19084 Mon Sep 17 00:00:00 2001 From: Dragomir Penev <6687393+dragomirp@users.noreply.github.com> Date: Tue, 25 Mar 2025 04:48:39 +0200 Subject: [PATCH 01/21] [MISC] Use latest/stable lxd (#804) * Use latest stable lxd * Test tweaks * Test tweaks --- concierge.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/concierge.yaml b/concierge.yaml index 15a78cc947..29d78b95b5 100644 --- a/concierge.yaml +++ b/concierge.yaml @@ -5,6 +5,7 @@ providers: lxd: enable: true bootstrap: true + channel: latest/stable host: snaps: jhack: From 5b34439898bfe201294268504d7a2176cccffc39 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 26 Mar 2025 23:23:32 +0100 Subject: [PATCH 02/21] Update canonical/data-platform-workflows action to v31.0.1 (#805) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/approve_renovate_pr.yaml | 2 +- .github/workflows/check_pr.yaml | 2 +- .github/workflows/ci.yaml | 4 ++-- .github/workflows/promote.yaml | 2 +- .github/workflows/release.yaml | 2 +- .github/workflows/sync_docs.yaml | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/approve_renovate_pr.yaml b/.github/workflows/approve_renovate_pr.yaml index 422766f800..450111576b 100644 --- a/.github/workflows/approve_renovate_pr.yaml +++ b/.github/workflows/approve_renovate_pr.yaml @@ -10,6 +10,6 @@ on: jobs: approve-pr: name: Approve Renovate pull request - uses: canonical/data-platform-workflows/.github/workflows/approve_renovate_pr.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/approve_renovate_pr.yaml@v31.0.1 permissions: pull-requests: write # Needed to approve PR diff --git a/.github/workflows/check_pr.yaml b/.github/workflows/check_pr.yaml index beaa1541a3..84c56d20c7 100644 --- a/.github/workflows/check_pr.yaml +++ b/.github/workflows/check_pr.yaml @@ -15,4 +15,4 @@ on: jobs: check-pr: name: Check pull request - uses: canonical/data-platform-workflows/.github/workflows/check_charm_pr.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/check_charm_pr.yaml@v31.0.1 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9d203b8633..cdf5a0e2e6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -27,7 +27,7 @@ on: jobs: lint: name: Lint - uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v31.0.1 unit-test: name: Unit test charm @@ -49,7 +49,7 @@ jobs: build: name: Build charm - uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v31.0.1 integration-test: name: Integration test charm diff --git a/.github/workflows/promote.yaml b/.github/workflows/promote.yaml index 7b4c329c6f..03e7bc7a29 100644 --- a/.github/workflows/promote.yaml +++ b/.github/workflows/promote.yaml @@ -25,7 +25,7 @@ on: jobs: promote: name: Promote charm - uses: canonical/data-platform-workflows/.github/workflows/_promote_charm.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/_promote_charm.yaml@v31.0.1 with: track: '14' from-risk: ${{ inputs.from-risk }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 5e893a873a..28b9ddc5e1 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -27,7 +27,7 @@ jobs: name: Release charm needs: - ci-tests - uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v31.0.1 with: channel: 14/edge artifact-prefix: ${{ needs.ci-tests.outputs.artifact-prefix }} diff --git a/.github/workflows/sync_docs.yaml b/.github/workflows/sync_docs.yaml index 475fe4dae0..4b6e361979 100644 --- a/.github/workflows/sync_docs.yaml +++ b/.github/workflows/sync_docs.yaml @@ -10,7 +10,7 @@ on: jobs: sync-docs: name: Sync docs from Discourse - uses: canonical/data-platform-workflows/.github/workflows/sync_docs.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/sync_docs.yaml@v31.0.1 with: reviewers: a-velasco,izmalk permissions: From 667756029e3e92e97e0745b3023647fd845ccee1 Mon Sep 17 00:00:00 2001 From: Dragomir Penev <6687393+dragomirp@users.noreply.github.com> Date: Fri, 28 Mar 2025 15:15:44 +0200 Subject: [PATCH 03/21] [DPE-6874] Poll all members in the cluster topology script (#810) * Poll all members in the cluster topology script * Dual branch config * Unit tests and bugfixes * Add peers when starting the observer * Retry sync up checks --- .github/renovate.json5 | 26 +------ .github/workflows/check_pr.yaml | 1 + scripts/cluster_topology_observer.py | 45 +++++++++--- src/charm.py | 42 +++++------ src/cluster_topology_observer.py | 7 +- tests/integration/ha_tests/helpers.py | 12 ++- tests/unit/test_charm.py | 6 -- tests/unit/test_cluster_topology_observer.py | 77 ++++++++++++++------ 8 files changed, 124 insertions(+), 92 deletions(-) diff --git a/.github/renovate.json5 b/.github/renovate.json5 index 34085c9225..cd60ef68a5 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -6,6 +6,7 @@ reviewers: [ 'team:data-platform-postgresql', ], + "baseBranches": ["main", "/^*\\/edge$/"], packageRules: [ { matchPackageNames: [ @@ -13,32 +14,7 @@ ], allowedVersions: '<2.0.0', }, - { - matchManagers: [ - 'custom.regex', - ], - matchDepNames: [ - 'juju', - ], - matchDatasources: [ - 'pypi', - ], - allowedVersions: '<3', - groupName: 'Juju agents', - }, ], customManagers: [ - { - customType: 'regex', - fileMatch: [ - '^\\.github/workflows/[^/]+\\.ya?ml$', - ], - matchStrings: [ - '(libjuju: )==(?.*?) +# renovate: latest libjuju 2', - ], - depNameTemplate: 'juju', - datasourceTemplate: 'pypi', - versioningTemplate: 'loose', - }, ], } diff --git a/.github/workflows/check_pr.yaml b/.github/workflows/check_pr.yaml index 84c56d20c7..f613a6aed9 100644 --- a/.github/workflows/check_pr.yaml +++ b/.github/workflows/check_pr.yaml @@ -11,6 +11,7 @@ on: - edited branches: - main + - '*/edge' jobs: check-pr: diff --git a/scripts/cluster_topology_observer.py b/scripts/cluster_topology_observer.py index 346d461319..c41ee34207 100644 --- a/scripts/cluster_topology_observer.py +++ b/scripts/cluster_topology_observer.py @@ -8,6 +8,7 @@ import sys from ssl import CERT_NONE, create_default_context from time import sleep +from urllib.parse import urljoin from urllib.request import urlopen API_REQUEST_TIMEOUT = 5 @@ -17,6 +18,10 @@ LOG_FILE_PATH = "/var/log/cluster_topology_observer.log" +class UnreachableUnitsError(Exception): + """Cannot reach any known cluster member.""" + + def dispatch(run_cmd, unit, charm_dir): """Use the input juju-run command to dispatch a :class:`ClusterTopologyChangeEvent`.""" dispatch_sub_cmd = "JUJU_DISPATCH_PATH=hooks/cluster_topology_change {}/dispatch" @@ -29,25 +34,43 @@ def main(): Watch the Patroni API cluster info. When changes are detected, dispatch the change event. """ - patroni_url, run_cmd, unit, charm_dir = sys.argv[1:] + patroni_urls, run_cmd, unit, charm_dir = sys.argv[1:] previous_cluster_topology = {} + urls = [urljoin(url, PATRONI_CLUSTER_STATUS_ENDPOINT) for url in patroni_urls.split(",")] + member_name = unit.replace("/", "-") while True: # Disable TLS chain verification context = create_default_context() context.check_hostname = False context.verify_mode = CERT_NONE - # Scheme is generated by the charm - resp = urlopen( # noqa: S310 - f"{patroni_url}/{PATRONI_CLUSTER_STATUS_ENDPOINT}", - timeout=API_REQUEST_TIMEOUT, - context=context, - ) - cluster_status = json.loads(resp.read()) - current_cluster_topology = { - member["name"]: member["role"] for member in cluster_status["members"] - } + cluster_status = None + for url in urls: + try: + # Scheme is generated by the charm + resp = urlopen( # noqa: S310 + url, + timeout=API_REQUEST_TIMEOUT, + context=context, + ) + cluster_status = json.loads(resp.read()) + break + except Exception as e: + print(f"Failed to contact {url} with {e}") + continue + if not cluster_status: + raise UnreachableUnitsError("Unable to reach cluster members") + current_cluster_topology = {} + urls = [] + for member in cluster_status["members"]: + current_cluster_topology[member["name"]] = member["role"] + member_url = urljoin(member["api_url"], PATRONI_CLUSTER_STATUS_ENDPOINT) + # Call the current unit first + if member["name"] == member_name: + urls.insert(0, member_url) + else: + urls.append(member_url) # If it's the first time the cluster topology was retrieved, then store it and use # it for subsequent checks. diff --git a/src/charm.py b/src/charm.py index 30ca8117a9..ad4f6c2d1a 100755 --- a/src/charm.py +++ b/src/charm.py @@ -385,30 +385,22 @@ def primary_endpoint(self) -> str | None: logger.debug("primary endpoint early exit: Peer relation not joined yet.") return None try: - for attempt in Retrying(stop=stop_after_delay(5), wait=wait_fixed(3)): - with attempt: - primary = self._patroni.get_primary() - if primary is None and (standby_leader := self._patroni.get_standby_leader()): - primary = standby_leader - primary_endpoint = self._patroni.get_member_ip(primary) - # Force a retry if there is no primary or the member that was - # returned is not in the list of the current cluster members - # (like when the cluster was not updated yet after a failed switchover). - if not primary_endpoint or primary_endpoint not in self._units_ips: - # TODO figure out why peer data is not available - if ( - primary_endpoint - and len(self._units_ips) == 1 - and len(self._peers.units) > 1 - ): - logger.warning( - "Possibly incoplete peer data: Will not map primary IP to unit IP" - ) - return primary_endpoint - logger.debug( - "primary endpoint early exit: Primary IP not in cached peer list." - ) - primary_endpoint = None + primary = self._patroni.get_primary() + if primary is None and (standby_leader := self._patroni.get_standby_leader()): + primary = standby_leader + primary_endpoint = self._patroni.get_member_ip(primary) + # Force a retry if there is no primary or the member that was + # returned is not in the list of the current cluster members + # (like when the cluster was not updated yet after a failed switchover). + if not primary_endpoint or primary_endpoint not in self._units_ips: + # TODO figure out why peer data is not available + if primary_endpoint and len(self._units_ips) == 1 and len(self._peers.units) > 1: + logger.warning( + "Possibly incoplete peer data: Will not map primary IP to unit IP" + ) + return primary_endpoint + logger.debug("primary endpoint early exit: Primary IP not in cached peer list.") + primary_endpoint = None except RetryError: return None else: @@ -952,6 +944,8 @@ def _units_ips(self) -> set[str]: # Get all members IPs and remove the current unit IP from the list. addresses = {self._get_unit_ip(unit) for unit in self._peers.units} addresses.add(self._unit_ip) + if None in addresses: + addresses.remove(None) return addresses @property diff --git a/src/cluster_topology_observer.py b/src/cluster_topology_observer.py index 6aff9b29c3..aa85a0e48e 100644 --- a/src/cluster_topology_observer.py +++ b/src/cluster_topology_observer.py @@ -70,12 +70,17 @@ def start_observer(self): if "JUJU_CONTEXT_ID" in new_env: new_env.pop("JUJU_CONTEXT_ID") + urls = [self._charm._patroni._patroni_url] + [ + self._charm._patroni._patroni_url.replace(self._charm._patroni.unit_ip, peer) + for peer in list(self._charm._patroni.peers_ips) + ] + # Input is generated by the charm pid = subprocess.Popen( # noqa: S603 [ "/usr/bin/python3", "scripts/cluster_topology_observer.py", - self._charm._patroni._patroni_url, + ",".join(urls), self._run_cmd, self._charm.unit.name, self._charm.charm_dir, diff --git a/tests/integration/ha_tests/helpers.py b/tests/integration/ha_tests/helpers.py index d9ea25543d..c22cc85c2c 100644 --- a/tests/integration/ha_tests/helpers.py +++ b/tests/integration/ha_tests/helpers.py @@ -240,13 +240,17 @@ async def is_cluster_updated( # Verify that no writes to the database were missed after stopping the writes. logger.info("checking that no writes to the database were missed after stopping the writes") - total_expected_writes = await check_writes(ops_test, use_ip_from_inside) + for attempt in Retrying(stop=stop_after_attempt(3), wait=wait_fixed(5), reraise=True): + with attempt: + total_expected_writes = await check_writes(ops_test, use_ip_from_inside) # Verify that old primary is up-to-date. logger.info("checking that the former primary is up to date with the cluster after restarting") - assert await is_secondary_up_to_date( - ops_test, primary_name, total_expected_writes, use_ip_from_inside - ), "secondary not up to date with the cluster after restarting." + for attempt in Retrying(stop=stop_after_attempt(3), wait=wait_fixed(5), reraise=True): + with attempt: + assert await is_secondary_up_to_date( + ops_test, primary_name, total_expected_writes, use_ip_from_inside + ), "secondary not up to date with the cluster after restarting." async def check_writes( diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 7de1a502cf..e500a8d099 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -177,8 +177,6 @@ def test_patroni_scrape_config_tls(harness): def test_primary_endpoint(harness): with ( - patch("charm.stop_after_delay", new_callable=PropertyMock) as _stop_after_delay, - patch("charm.wait_fixed", new_callable=PropertyMock) as _wait_fixed, patch( "charm.PostgresqlOperatorCharm._units_ips", new_callable=PropertyMock, @@ -190,10 +188,6 @@ def test_primary_endpoint(harness): _patroni.return_value.get_primary.return_value = sentinel.primary assert harness.charm.primary_endpoint == "1.1.1.1" - # Check needed to ensure a fast charm deployment. - _stop_after_delay.assert_called_once_with(5) - _wait_fixed.assert_called_once_with(3) - _patroni.return_value.get_member_ip.assert_called_once_with(sentinel.primary) _patroni.return_value.get_primary.assert_called_once_with() diff --git a/tests/unit/test_cluster_topology_observer.py b/tests/unit/test_cluster_topology_observer.py index f079990251..3d0495b8eb 100644 --- a/tests/unit/test_cluster_topology_observer.py +++ b/tests/unit/test_cluster_topology_observer.py @@ -1,7 +1,9 @@ # Copyright 2023 Canonical Ltd. # See LICENSE file for licensing details. import signal -from unittest.mock import Mock, PropertyMock, patch +import sys +from json import dumps +from unittest.mock import Mock, PropertyMock, call, patch, sentinel import pytest from ops.charm import CharmBase @@ -13,25 +15,7 @@ ClusterTopologyChangeCharmEvents, ClusterTopologyObserver, ) -from scripts.cluster_topology_observer import dispatch - - -# This method will be used by the mock to replace requests.get -def mocked_requests_get(*args, **kwargs): - class MockResponse: - def __init__(self, json_data): - self.json_data = json_data - - def json(self): - return self.json_data - - data = { - "http://server1/cluster": { - "members": [{"name": "postgresql-0", "host": "1.1.1.1", "role": "leader", "lag": "1"}] - } - } - if args[0] in data: - return MockResponse(data[args[0]]) +from scripts.cluster_topology_observer import UnreachableUnitsError, dispatch, main class MockCharm(CharmBase): @@ -48,7 +32,7 @@ def _on_cluster_topology_change(self, _) -> None: @property def _patroni(self) -> Patroni: - return Mock(_patroni_url="http://1.1.1.1:8008/", verify=True) + return Mock(_patroni_url="http://1.1.1.1:8008/", peers_ips={}, verify=True) @property def _peers(self) -> Relation | None: @@ -153,3 +137,54 @@ def test_dispatch(harness): harness.charm.unit.name, f"JUJU_DISPATCH_PATH=hooks/cluster_topology_change {charm_dir}/dispatch", ]) + + +def test_main(): + with ( + patch.object( + sys, + "argv", + ["cmd", "http://server1:8008,http://server2:8008", "run_cmd", "unit/0", "charm_dir"], + ), + patch("scripts.cluster_topology_observer.sleep", return_value=None), + patch("scripts.cluster_topology_observer.urlopen") as _urlopen, + patch("scripts.cluster_topology_observer.subprocess") as _subprocess, + patch( + "scripts.cluster_topology_observer.create_default_context", + return_value=sentinel.sslcontext, + ), + ): + response1 = { + "members": [ + {"name": "unit-2", "api_url": "http://server3:8008/patroni", "role": "standby"}, + {"name": "unit-0", "api_url": "http://server1:8008/patroni", "role": "leader"}, + ] + } + mock1 = Mock() + mock1.read.return_value = dumps(response1) + response2 = { + "members": [ + {"name": "unit-2", "api_url": "https://server3:8008/patroni", "role": "leader"}, + ] + } + mock2 = Mock() + mock2.read.return_value = dumps(response2) + _urlopen.side_effect = [mock1, Exception, mock2] + with pytest.raises(UnreachableUnitsError): + main() + assert _urlopen.call_args_list == [ + # Iteration 1. server2 is not called + call("http://server1:8008/cluster", timeout=5, context=sentinel.sslcontext), + # Iteration 2 local unit server1 is called first + call("http://server1:8008/cluster", timeout=5, context=sentinel.sslcontext), + call("http://server3:8008/cluster", timeout=5, context=sentinel.sslcontext), + # Iteration 3 Last known member is server3 + call("https://server3:8008/cluster", timeout=5, context=sentinel.sslcontext), + ] + + _subprocess.run.assert_called_once_with([ + "run_cmd", + "-u", + "unit/0", + "JUJU_DISPATCH_PATH=hooks/cluster_topology_change charm_dir/dispatch", + ]) From d35b18dd19fb61b1cea25e37e95ee1f67b388cc6 Mon Sep 17 00:00:00 2001 From: Marcelo Henrique Neppel Date: Fri, 4 Apr 2025 09:21:38 -0300 Subject: [PATCH 04/21] [DPE-6572] Add wal_keep_size config option (#799) * Add wal_keep_size config option Signed-off-by: Marcelo Henrique Neppel * Remove parameter addition Signed-off-by: Marcelo Henrique Neppel * Reset durability_wal_keep_size value to PG default Signed-off-by: Marcelo Henrique Neppel --------- Signed-off-by: Marcelo Henrique Neppel --- config.yaml | 6 ++++++ src/charm.py | 1 + src/config.py | 10 ++++++++++ tests/integration/test_db_admin.py | 2 +- 4 files changed, 18 insertions(+), 1 deletion(-) diff --git a/config.yaml b/config.yaml index efcc18d790..8819015d79 100644 --- a/config.yaml +++ b/config.yaml @@ -32,6 +32,12 @@ options: crashes and there are replicas. type: string default: "on" + durability_wal_keep_size: + description: | + Sets the minimum size of the WAL file to be kept for the replication. + Allowed values are: from 0 to 2147483647. + type: int + default: 4096 experimental_max_connections: type: int description: | diff --git a/src/charm.py b/src/charm.py index ad4f6c2d1a..accd6844be 100755 --- a/src/charm.py +++ b/src/charm.py @@ -1919,6 +1919,7 @@ def update_config(self, is_creating_backup: bool = False, no_peers: bool = False self._patroni.bulk_update_parameters_controller_by_patroni({ "max_connections": max_connections, "max_prepared_transactions": self.config.memory_max_prepared_transactions, + "wal_keep_size": self.config.durability_wal_keep_size, }) self._handle_postgresql_restart_need(enable_tls) diff --git a/src/config.py b/src/config.py index d5cb030102..a755ca0f6d 100644 --- a/src/config.py +++ b/src/config.py @@ -23,6 +23,7 @@ class CharmConfig(BaseConfigModel): connection_statement_timeout: int | None cpu_parallel_leader_participation: bool | None durability_synchronous_commit: str | None + durability_wal_keep_size: int | None experimental_max_connections: int | None instance_default_text_search_config: str | None instance_max_locks_per_transaction: int | None @@ -205,6 +206,15 @@ def durability_synchronous_commit_values(cls, value: str) -> str | None: return value + @validator("durability_wal_keep_size") + @classmethod + def durability_wal_keep_size_values(cls, value: int) -> int | None: + """Check durability_wal_keep_size config option is between 0 and 2147483647.""" + if value < 0 or value > 2147483647: + raise ValueError("Value is not between 0 and 2147483647") + + return value + @validator("instance_password_encryption") @classmethod def instance_password_encryption_values(cls, value: str) -> str | None: diff --git a/tests/integration/test_db_admin.py b/tests/integration/test_db_admin.py index 763b7a93cb..81cc03907b 100644 --- a/tests/integration/test_db_admin.py +++ b/tests/integration/test_db_admin.py @@ -44,7 +44,7 @@ async def test_landscape_scalable_bundle_db(ops_test: OpsTest, charm: str) -> No application_name=DATABASE_APP_NAME, num_units=DATABASE_UNITS, base=CHARM_BASE, - config={"profile": "testing"}, + config={"durability_wal_keep_size": 0, "profile": "testing"}, ) # Deploy and test the Landscape Scalable bundle (using this PostgreSQL charm). From dec80ef2ca39620250689e6e91494fef71519327 Mon Sep 17 00:00:00 2001 From: Andreia Date: Tue, 8 Apr 2025 12:54:00 +0200 Subject: [PATCH 05/21] Create pull_request_template.md (#814) --- .github/pull_request_template.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 .github/pull_request_template.md diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..9a15830dbd --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,7 @@ +## Issue + +## Solution + +## Checklist +- [ ] I have added or updated any relevant documentation. +- [ ] I have cleaned any remaining cloud resources from my accounts. From 727900b8ab87e9360462790105e98fca9862f061 Mon Sep 17 00:00:00 2001 From: Marcelo Henrique Neppel Date: Wed, 9 Apr 2025 08:53:55 -0300 Subject: [PATCH 06/21] Create SECURITY.md (#822) --- SECURITY.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..ddba86ad9e --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,25 @@ +# Security policy + +## What qualifies as a security issue + +Credentials leakage, outdated dependencies with known vulnerabilities, and +other issues that could lead to unprivileged or unauthorized access to the +database or the system. + +## Reporting a vulnerability + +The easiest way to report a security issue is through +[GitHub](https://github.com/canonical/postgresql-operator/security/advisories/new). See +[Privately reporting a security +vulnerability](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability) +for instructions. + +The repository admins will be notified of the issue and will work with you +to determine whether the issue qualifies as a security issue and, if so, in +which component. We will then handle figuring out a fix, getting a CVE +assigned and coordinating the release of the fix. + +The [Ubuntu Security disclosure and embargo +policy](https://ubuntu.com/security/disclosure-policy) contains more +information about what you can expect when you contact us, and what we +expect from you. From f874a7bef91bbc62e850e12a45eb63f3347a6540 Mon Sep 17 00:00:00 2001 From: Vladimir Izmalkov <48120135+izmalk@users.noreply.github.com> Date: Thu, 10 Apr 2025 09:20:19 +0100 Subject: [PATCH 07/21] Update README file's security section (#827) * Refactor headings for syntax best practice * Update the Security section --- README.md | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 7c3e83c644..cbfdaf3a37 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,7 @@ To deploy on Kubernetes, please use [Charmed PostgreSQL K8s Operator](https://ch This operator provides a PostgreSQL database with replication enabled: one primary instance and one (or more) hot standby replicas. The Operator in this repository is a Python script which wraps PostgreSQL versions distributed by Ubuntu Jammy series and adding [Patroni](https://github.com/zalando/patroni) on top of it, providing lifecycle management and handling events (install, configure, integrate, remove, etc). ## README contents + * [Basic usage](#basic-usage): Deploy and scale Charmerd PostgreSQL * [Integrations](#integrations-relations): Supported interfaces for integrations * [Contributing](#contributing) @@ -20,6 +21,7 @@ This operator provides a PostgreSQL database with replication enabled: one prima ## Basic usage ### Deployment + Bootstrap a [lxd controller](https://juju.is/docs/olm/lxd#heading--create-a-controller) and create a new Juju model: ```shell @@ -46,7 +48,9 @@ To add replicas to an existing deployment, see the [Add replicas](#add-replicas) >It is generally recommended to have an odd number of units to avoid a "[split-brain](https://en.wikipedia.org/wiki/Split-brain_(computing))" scenario ### Primary replica + To retrieve the primary replica, use the action `get-primary` on any of the units running PostgreSQL. + ```shell juju run postgresql/leader get-primary ``` @@ -58,17 +62,21 @@ Similarly, the primary replica is displayed as a status message in `juju status` #### Add replicas To add more replicas one can use the `juju add-unit` functionality i.e. + ```shell juju add-unit postgresql -n ``` + The implementation of `add-unit` allows the operator to add more than one unit, but functions internally by adding one replica at a time. This is done to avoid multiple replicas syncing from the primary at the same time. #### Remove replicas To scale down the number of replicas the `juju remove-unit` functionality may be used i.e. + ```shell juju remove-unit postgresql ``` + The implementation of `remove-unit` allows the operator to remove more than one unit. The functionality of `remove-unit` functions by removing one replica at a time to avoid downtime. ### Password rotation @@ -76,9 +84,11 @@ The implementation of `remove-unit` allows the operator to remove more than one #### Charm users To rotate the password of users internal to the Charmed PostgreSQL operator, use the `set-password` action as follows: + ```shell juju run postgresql/leader set-password username= password= ``` + >[!NOTE] >Currently, internal users are `operator`, `replication`, `backup` and `rewind`. These users should not be used outside the operator. @@ -90,7 +100,7 @@ To rotate the passwords of users created for integrated applications, the integr Supported [integrations](https://juju.is/docs/olm/relations): -#### New `postgresql_client` interface: +#### New `postgresql_client` interface Current charm relies on [Data Platform libraries](https://charmhub.io/data-platform-libs). Your application should define an interface in `metadata.yaml`: @@ -125,14 +135,15 @@ To remove a relation: juju remove-relation postgresql ``` -#### Legacy `pgsql` interface: +#### Legacy `pgsql` interface + We have also added support for the two database legacy relations from the [original version](https://launchpad.net/postgresql-charm) of the charm via the `pgsql` interface. Please note that these relations will be deprecated. ```shell juju relate postgresql:db mailman3-core juju relate postgresql:db-admin landscape-server ``` -#### `tls-certificates` interface: +#### `tls-certificates` interface The Charmed PostgreSQL Operator also supports TLS encryption on internal and external connections. Below is an example of enabling TLS with the [self-signed certificates charm](https://charmhub.io/self-signed-certificates). @@ -146,19 +157,24 @@ juju integrate postgresql self-signed-certificates # Disable TLS by removing relation. juju remove-relation postgresql self-signed-certificates ``` + >[!WARNING] >The TLS settings shown here are for self-signed-certificates, which are not recommended for production clusters. See the guide [Security with X.509 certificates](https://charmhub.io/topics/security-with-x-509-certificates) for an overview of available certificates charms. ## Security -Security issues in the Charmed PostgreSQL Operator can be reported through [LaunchPad](https://wiki.ubuntu.com/DebuggingSecurity#How%20to%20File). Please do not use GitHub to submit security issues. + +Security issues in the Charmed PostgreSQL Operator can be reported through [private security reports](https://github.com/canonical/postgresql-operator/security/advisories/new) on GitHub. +For more information, see the [Security policy](SECURITY.md). ## Contributing + * For best practices on how to write and contribute to charms, see the [Juju SDK docs](https://juju.is/docs/sdk/how-to) * For more specific developer guidance for contributions to Charmed PostgreSQL, see the file [CONTRIBUTING.md](CONTRIBUTING.md) * Report security issues for the Charmed PostgreSQL Operator through [LaunchPad](https://wiki.ubuntu.com/DebuggingSecurity#How%20to%20File). * Report technical issues, bug reports and feature requests through the [GitHub Issues tab](https://github.com/canonical/postgresql-operator/issues). ## Licensing and trademark + The Charmed PostgreSQL Operator is distributed under the [Apache Software License, version 2.0](https://github.com/canonical/postgresql-operator/blob/main/LICENSE). It installs, operates and depends on [PostgreSQL](https://www.postgresql.org/ftp/source/), which is licensed under the [PostgreSQL License](https://www.postgresql.org/about/licence/), a liberal Open Source license similar to the BSD or MIT licenses. PostgreSQL is a trademark or registered trademark of PostgreSQL Global Development Group. Other trademarks are property of their respective owners. From 852d80c80d53166408c20bc8045d214c76cffd3c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 09:21:04 -0300 Subject: [PATCH 08/21] Sync docs from Discourse (#796) Co-authored-by: GitHub Actions <41898282+github-actions[bot]@users.noreply.github.com> --- docs/explanation.md | 18 ++- docs/explanation/e-architecture.md | 4 + docs/explanation/e-users.md | 15 ++- docs/how-to.md | 6 +- docs/how-to/h-deploy.md | 2 +- docs/how-to/h-enable-profiling.md | 174 +++++++++++++++++++++++++++++ docs/how-to/h-enable-tls.md | 16 +-- docs/how-to/h-external-access.md | 2 +- docs/how-to/h-integrate.md | 6 +- docs/overview.md | 10 +- docs/reference.md | 5 +- docs/reference/r-contacts.md | 11 +- docs/reference/r-sos-report.md | 114 +++++++++++++++++++ docs/tutorial/t-enable-tls.md | 4 +- docs/tutorial/t-set-up.md | 2 +- 15 files changed, 352 insertions(+), 37 deletions(-) create mode 100644 docs/how-to/h-enable-profiling.md create mode 100644 docs/reference/r-sos-report.md diff --git a/docs/explanation.md b/docs/explanation.md index 8fc131b9e6..05c2add64e 100644 --- a/docs/explanation.md +++ b/docs/explanation.md @@ -1,14 +1,22 @@ # Explanation -This section contains pages with more detailed explanations that provide additional context about some of the key concepts behind the PostgreSQL charm: +This section contains pages with more detailed explanations that provide additional context about key concepts behind the PostgreSQL charm. +## Core concepts and design * [Architecture] * [Interfaces and endpoints] +* [Juju] +* [Legacy charm] + +## Operational concepts * [Connection pooling] * [Users] * [Logs] -* [Juju] -* [Legacy charm] + +## Security and hardening +* [Security hardening guide][Security] + * [Cryptography] + @@ -18,4 +26,6 @@ This section contains pages with more detailed explanations that provide additio [Logs]: /t/12099 [Juju]: /t/11985 [Legacy charm]: /t/10690 -[Connection pooling]: /t/15777 \ No newline at end of file +[Connection pooling]: /t/15777 +[Security]: /t/16852 +[Cryptography]: /t/16853 \ No newline at end of file diff --git a/docs/explanation/e-architecture.md b/docs/explanation/e-architecture.md index 25beb72f43..2a9885f514 100644 --- a/docs/explanation/e-architecture.md +++ b/docs/explanation/e-architecture.md @@ -75,6 +75,10 @@ The snap "charmed-postgresql" also ships list of tools used by charm: The charm "[PostgreSQL Test App](https://charmhub.io/postgresql-test-app)" is a Canonical test application to validate the charm installation / functionality and perform the basic performance tests. +### GLAuth + +GLAuth is a secure, easy-to-use and open-sourced LDAP server which provides capabilities to centrally manage accounts across infrastructures. The charm is only available for Kubernetes clouds, under the [GLAuth-K8s operator](https://charmhub.io/glauth-k8s) page, so a cross-controller relation is needed in order to integrate both charms. + ### Grafana Grafana is an open-source visualization tools that allows to query, visualize, alert on, and visualize metrics from mixed datasources in configurable dashboards for observability. This charms is shipped with its own Grafana dashboard and supports integration with the [Grafana Operator](https://charmhub.io/grafana-k8s) to simplify observability. Please follow [COS Monitoring](/t/10600) setup. diff --git a/docs/explanation/e-users.md b/docs/explanation/e-users.md index 2a2c85d186..f0f9ce86da 100644 --- a/docs/explanation/e-users.md +++ b/docs/explanation/e-users.md @@ -1,9 +1,10 @@ # Charm Users explanations -There are two types of users in PostgreSQL: +There are three types of users in PostgreSQL: * Internal users (used by charm operator) -* Relation/integration users (used by related applications) +* Relation users (used by related applications) * Extra user roles (if default permissions are not enough) +* Identity users (used when LDAP is enabled) ## Internal users explanations: @@ -72,7 +73,7 @@ unit-postgresql-1: **Note**: the action `set-password` must be executed on juju leader unit (to update peer relation data with new value). -## Relation/integration users explanations: +## Relation users explanations: The operator created a dedicated user for every application related/integrated with database. Those users are removed on the juju relation/integration removal request. However, DB data stays in place and can be reused on re-created relations (using new user credentials): @@ -99,4 +100,10 @@ postgres=# \du When an application charm requests a new user through the relation/integration it can specify that the user should have the `admin` role in the `extra-user-roles` field. The `admin` role enables the new user to read and write to all databases (for the `postgres` system database it can only read data) and also to create and delete non-system databases. -**Note**: `extra-user-roles` is supported by modern interface `postgresql_client` only and missing for legacy `pgsql` interface. Read more about the supported charm interfaces [here](/t/10251). \ No newline at end of file +**Note**: `extra-user-roles` is supported by modern interface `postgresql_client` only and missing for legacy `pgsql` interface. Read more about the supported charm interfaces [here](/t/10251). + + +## Identity users explanations: +The operator considers Identity users all those that are automatically created when the LDAP integration is enabled, or in other words, the [GLAuth](https://charmhub.io/glauth-k8s) charm is related/integrated. + +When synchronized from the LDAP server, these users do not have any permissions by default, so the LDAP group they belonged to must be mapped to a PostgreSQL pre-defined authorization role by using the `ldap_map` configuration option. \ No newline at end of file diff --git a/docs/how-to.md b/docs/how-to.md index 497d33ce18..c21e3a5086 100644 --- a/docs/how-to.md +++ b/docs/how-to.md @@ -4,7 +4,7 @@ The following guides cover key processes and common tasks for managing and using ## Deployment and setup -The following guides walk you through the details of how to install different cloud services and bootstrap them to Juju: +Installation of different cloud services with Juju: * [Sunbeam] * [LXD] * [MAAS] @@ -13,7 +13,7 @@ The following guides walk you through the details of how to install different cl * [Azure] * [Multi-availability zones (AZ)][Multi-AZ] -The following guides cover some specific deployment scenarios and architectures: +Specific deployment scenarios and architectures: * [Terraform] * [Air-gapped] * [TLS VIP access] @@ -54,7 +54,7 @@ The following guides cover some specific deployment scenarios and architectures: ## Development -This section is aimed at charm developers looking to support PostgreSQL integrations with their charm. +This section is for charm developers looking to support PostgreSQL integrations with their charm. * [Integrate with your charm] * [Migrate data via pg_dump] diff --git a/docs/how-to/h-deploy.md b/docs/how-to/h-deploy.md index 4769d863c6..f73fcb80ac 100644 --- a/docs/how-to/h-deploy.md +++ b/docs/how-to/h-deploy.md @@ -21,7 +21,7 @@ Then, either continue with the `juju` client **or** use the `terraform juju` cli To deploy with the `juju` client: ```shell -juju deploy postgresql +juju deploy postgresql -n ``` > See also: [`juju deploy` command](https://canonical-juju.readthedocs-hosted.com/en/latest/user/reference/juju-cli/list-of-juju-cli-commands/deploy/) diff --git a/docs/how-to/h-enable-profiling.md b/docs/how-to/h-enable-profiling.md new file mode 100644 index 0000000000..aabc52bc34 --- /dev/null +++ b/docs/how-to/h-enable-profiling.md @@ -0,0 +1,174 @@ +[note] +**Note**: All commands are written for `juju >= v3.1` + +If you're using `juju 2.9`, check the [`juju 3.0` Release Notes](https://juju.is/docs/juju/roadmap#heading--juju-3-0-0---22-oct-2022). +[/note] + +# Enable profiling with Parca + +This guide contains the steps to enable profiling with [Parca](https://www.parca.dev/docs/overview/) for your PostgreSQL application. + +## Summary +* [Prerequisites](#prerequisites) +* [Set up the Parca backend](#set-up-the-parca-backend) + * [Charmed Parca K8s](#charmed-parca-k8s) + * [Polar Signals Cloud](#polar-signals-cloud) +* [View profiles](#view-profiles) + +--- + +## Prerequisites + +[note type=caution] +**Do not skip this section** if you are deploying PostgreSQL in an LXD model or if your base is `ubuntu@22.04`. +[/note] + + +This guide assumes you already have a juju model with Charmed PostgreSQL deployed. + +> See: [How to deploy PostgreSQL](/t/16811) + +In order for your Charmed PostgreSQL deployment to be correctly set up for integration with Parca, there are two important considerations: +* [LXD virtualization type](#lxd-virtualization-type) +* [Base (Ubuntu version)](#base-ubuntu-version) + +### LXD virtualization type + +**If you are deploying Charmed PostgreSQL in a LXD model, you will need to ensure that LXD's virtualization type is set to `virtual-machine` for the Juju application.** + +This is because LXD does not allow `/sys/kernel/tracing` to be mounted in a system container (even in privileged mode) due to security isolation concerns. + +To ensure that a virtual machine is used instead of a system container, you would need to add constraints, for example: +``` +juju deploy postgresql --constraints="virt-type=virtual-machine"`. +``` + +### Base (Ubuntu version) +**If your base is `ubuntu@22.04`, you will need to ensure that your are using the `generic` flavor of Linux.** +> See the output of `uname -r` to confirm. + +If you do not have the `generic` flavor, you can enable it on a unit to be profiled as follows: + +``` +juju ssh postgresql/0 bash +sudo apt-get update && sudo apt-get install linux-image-virtual +sudo apt-get autopurge linux-image-kvm +``` + +If your application is deployed in an LXD model, run the following command: +``` +rm /etc/default/grub.d/40-force-partuuid.cfg +``` + +Open the `/etc/default/grub` file with your editor of choice and replace the line that starts with `GRUB_DEFAULT=` with: +``` +release=$(linux-version list | grep -e '-generic$' | sort -V | tail -n1) +GRUB_DEFAULT="Advanced options for Ubuntu>Ubuntu, with Linux $release" +``` + +Exit out of the `/etc/default/grub file`, update GRUB, and reboot: +``` +sudo update-grub +sudo reboot +``` + +Nothing needs to be done if the base is `ubuntu@24.04`, which already loads the kernel symbol table for debugging by default. + +## Set up the Parca backend + +There are two potential backends: +* [Charmed Parca K8s](#charmed-parca-k8s) (requires COS and cross-model integrations) +* [Polar Signals Cloud](#polar-signals-cloud) (COS is optional) + +### Charmed Parca K8s + +This section goes through the steps for enabling profiling with Charmed Parca K8s as the backend. + +#### 1. Deploy `cos-lite` and `parca-k8s` + +Refer to [Getting started on MicroK8s](https://charmhub.io/topics/canonical-observability-stack/tutorials/install-microk8s) and deploy the `cos-lite` bundle from the `latest/edge` track in a Kubernetes environment. + +Then, refer to [Deploy Charmed Parca on top of COS-lite](https://discourse.charmhub.io/t/how-to-deploy-charmed-parca-on-top-of-cos-lite/16579) to deploy Charmed Parca K8s in the same model as the `cos-lite` bundle. + +#### 2. Offer interfaces + +Offer interfaces for cross-model integrations: + +``` +juju offer :parca-store-endpoint +``` + +#### 3. Deploy and integrate `parca-agent` with `postgresql` + +Switch to the model containing the Charmed PostgreSQL deployment, deploy Charmed Parca Agent, and integrate it with Charmed PostgreSQL: + +``` +juju switch : + +juju deploy parca-agent --channel latest/edge +juju integrate postgresql parca-agent +``` + +#### 4. Integrate `parca-agent` with `parca-k8s` + +Consume the parca offer from [Step 2](#2-offer-interfaces) and integrate with them: + +``` +juju find-offers : +``` + +> :exclamation: Do not miss the colon "`:`" in the command above. + +Below is a sample output where `k8s` is the K8s controller name and `cos` is the model where `cos-lite` and `parca-k8s` are deployed: + +``` +Store URL Access Interfaces +k8s admin/cos.parca admin parca_store:parca-store-endpoint +``` + +Next, consume this offer so that is reachable from the current machine model: + +``` +juju consume k8s:admin/cos.parca +``` + +Finally, relate Charmed Parca Agent with the consumed offer endpoint: +``` +juju integrate parca-agent parca +``` + +### Polar Signals Cloud + +This section goes through the steps for enabling profiling with Polar Signals Cloud (PSC) as the backend. + +[note] +With PSC, `cos-lite` and `parca-k8s` are not required. This section goes through the recommended setup, where `polar-signals-cloud-integrator` is deployed in the same model as `postgresql`, and `parca-agent` is used to relay traffic to PSC. + +If you would like to use `parca-k8s` to relay traffic to PSC instead, refer to [Steps 1 and 2](#1-deploy-cos-lite-and-parca-k8s) in the Charmed Parca K8s section. +[/note] + +#### 1. Deploy and integrate `parca-agent` with `postgresql` + +In the machine model where PostgreSQL is deployed, deploy `parca-agent` and integrate it with `postgresql`: + +``` +juju deploy parca-agent --channel latest/edge +juju integrate postgresql parca-agent +``` + +#### 2. Integrate `parca-agent` with `polar-signals-cloud-integrator` + +Follow the guide [How to integrate with Polar Signals Cloud](https://discourse.charmhub.io/t/charmed-parca-docs-how-to-integrate-with-polar-signals-cloud/16559). + + +## View profiles + +After the backend setup is complete, the profiles for the machines where the PostgreSQL units are running will be accessible from the Parca web interface. + +If you are running Charmed Parca K8s, you can also access the link for Parca's web interface from COS catalogue (`juju run traefik/0 show-proxied-endpoints` in the K8s model where `cos-lite` is deployed). + +![Example profile with Parca Web UI690x753](upload://zFOOKY8nokrg2Q4xUVTbD8UGjD3.png) + +Furthermore, if you have `cos-lite` deployed, you can use Grafana to explore profiles under the `Explore` section with `parca-k8s` as the data source. + +![Example profile with Grafana's Parca plugin|690x383](upload://w3G5STYOxMZHCpIA48gEJHUniLi.jpeg) \ No newline at end of file diff --git a/docs/how-to/h-enable-tls.md b/docs/how-to/h-enable-tls.md index c8d8640870..c5a872288d 100644 --- a/docs/how-to/h-enable-tls.md +++ b/docs/how-to/h-enable-tls.md @@ -32,15 +32,9 @@ First, deploy the TLS charm: juju deploy self-signed-certificates ``` -To enable TLS on `postgresql`, integrate the two applications: +To enable TLS integrate (formerly known as “relate”) the two applications: ```shell -juju integrate self-signed-certificates postgresql -``` - -## Disable TLS -Disable TLS by removing the integration. -```shell -juju remove-relation self-signed-certificates postgresql +juju integrate postgresql:certificates self-signed-certificates:certificates ``` ## Check certificates in use @@ -79,4 +73,10 @@ Updates can also be done with auto-generated keys: juju run postgresql/0 set-tls-private-key juju run postgresql/1 set-tls-private-key juju run postgresql/2 set-tls-private-key +``` + +## Disable TLS +Disable TLS by removing the integration. +```shell +juju remove-relation postgresql:certificates self-signed-certificates:certificates ``` \ No newline at end of file diff --git a/docs/how-to/h-external-access.md b/docs/how-to/h-external-access.md index 7182b626d9..023177692b 100644 --- a/docs/how-to/h-external-access.md +++ b/docs/how-to/h-external-access.md @@ -8,7 +8,7 @@ This page summarizes resources for setting up deployments where an external appl There are many possible ways to connect the Charmed PostgreSQL database from outside of the LAN where the database cluster is located. The available options are heavily dependent on the cloud/hardware/virtualization in use. -One of the possible options is to use [virtual IP addresses (VIP)](https://en.wikipedia.org/wiki/Virtual_IP_address) which the charm PgBouncer provides with assistance from the charm/interface `hacluster`. Please follow the [PgBouncer documentation](https://charmhub.io/pgbouncer/docs/h-external-access?channel=1/stable) for such configuration. +One of the possible options is to use [virtual IP addresses (VIP)](https://en.wikipedia.org/wiki/Virtual_IP_address) which the charm PgBouncer provides with assistance from the charm/interface `hacluster`. Please follow the [PgBouncer documentation](https://charmhub.io/pgbouncer/docs/h-external-access) for such configuration. > See also: [How to deploy for external TLS VIP access](/t/16576). diff --git a/docs/how-to/h-integrate.md b/docs/how-to/h-integrate.md index 54a9761380..fa8438764e 100644 --- a/docs/how-to/h-integrate.md +++ b/docs/how-to/h-integrate.md @@ -25,7 +25,9 @@ This guide shows how to integrate Charmed PostgreSQL with both charmed and non-c Integrations with charmed applications are supported via the modern [`postgresql_client`](https://github.com/canonical/charm-relation-interfaces/blob/main/interfaces/postgresql_client/v0/README.md) interface, and the legacy `psql` interface from the [original version](https://launchpad.net/postgresql-charm) of the charm. -> You can see which existing charms are compatible with PostgreSQL in the [Integrations](https://charmhub.io/postgresql/integrations) tab. +[note] +You can see which existing charms are compatible with PostgreSQL in the [Integrations](https://charmhub.io/postgresql/integrations) tab. +[/note] ### Modern `postgresql_client` interface To integrate with a charmed application that supports the `postgresql_client` interface, run @@ -41,7 +43,7 @@ juju remove-relation postgresql ### Legacy `pgsql` interface [note type="caution"] Note that this interface is **deprecated**. -See more information in [Explanation > Legacy charm](/t/10690). +See the [legacy charm explanation page](/t/10690). [/note] To integrate via the legacy interface, run diff --git a/docs/overview.md b/docs/overview.md index 273a9b0f53..958348ebd8 100644 --- a/docs/overview.md +++ b/docs/overview.md @@ -77,6 +77,7 @@ PostgreSQL is a trademark or registered trademark of PostgreSQL Global Developme | 3 | h-enable-monitoring | [Enable monitoring](/t/10600) | | 3 | h-enable-alert-rules | [Enable alert rules](/t/13084) | | 3 | h-enable-tracing | [Enable tracing](/t/14521) | +| 3 | h-enable-profiling | [Enable profiling](/t/17172) | | 2 | h-upgrade | [Upgrade](/t/12086) | | 3 | h-upgrade-minor | [Perform a minor upgrade](/t/12089) | | 3 | h-rollback-minor | [Perform a minor rollback](/t/12090) | @@ -94,20 +95,21 @@ PostgreSQL is a trademark or registered trademark of PostgreSQL Global Developme | 2 | r-software-testing | [Software testing](/t/11773) | | 2 | r-performance | [Performance and resources](/t/11974) | | 2 | r-troubleshooting | [Troubleshooting](/t/11864) | +| 3 | r-sos-report | [SOS report](/t/17228) | | 2 | r-plugins-extensions | [Plugins/extensions](/t/10946) | | 2 | r-alert-rules | [Alert rules](/t/15841) | | 2 | r-statuses | [Statuses](/t/10844) | | 2 | r-contacts | [Contacts](/t/11863) | | 1 | explanation | [Explanation](/t/16768) | | 2 | e-architecture | [Architecture](/t/11857) | -| 2 | e-security | [Security](/t/16852) | -| 2 | e-cryptography | [Cryptography](/t/16853) | | 2 | e-interfaces-endpoints | [Interfaces and endpoints](/t/10251) | +| 2 | e-juju-details | [Juju](/t/11985) | +| 2 | e-legacy-charm | [Legacy charm](/t/10690) | | 2 | e-connection-pooling| [Connection pooling](/t/15777) | | 2 | e-users | [Users](/t/10798) | | 2 | e-logs | [Logs](/t/12099) | -| 2 | e-juju-details | [Juju](/t/11985) | -| 2 | e-legacy-charm | [Legacy charm](/t/10690) | +| 2 | e-security | [Security](/t/16852) | +| 3 | e-cryptography | [Cryptography](/t/16853) | | 1 | search | [Search](https://canonical.com/data/docs/postgresql/iaas) | [/details] diff --git a/docs/reference.md b/docs/reference.md index f686a8f86b..2decbe04cb 100644 --- a/docs/reference.md +++ b/docs/reference.md @@ -1,6 +1,6 @@ # Reference -The Reference section of our documentation contains pages for technical specifications, APIs, release notes, and other reference material for fast lookup. +Technical specifications, APIs, release notes, and other reference material for fast lookup. **In the left sidebar navigation**, you will find the following references: | Page | Description | @@ -20,8 +20,7 @@ The Reference section of our documentation contains pages for technical specific | Page | Description | |----------------------------------------------------------------------------|---------------------------------------------------------| -| [Resources](https://charmhub.io/postgresql/resources) | :construction: Under construction | | [Integrations](https://charmhub.io/postgresql/integrations) | Integration/relation interfaces supported by this charm | | [Libraries](https://charmhub.io/postgresql/libraries) | VM charm library is empty as charm uses [K8s library](https://charmhub.io/postgresql-k8s/libraries/) (more info [here](/t/11857)) | -| [Configuration](https://charmhub.io/postgresql/configuration) | Application configuration parameters (more info [here](https://juju.is/docs/juju/configuration?&_ga=2.95573596.1153611399.1713171630-773562698.1708605078#heading--application-configuration)) | +| [Configuration](https://charmhub.io/postgresql/configuration) | Application configuration parameters | | [Actions](https://charmhub.io/postgresql/actions) | Juju actions supported by this charm | \ No newline at end of file diff --git a/docs/reference/r-contacts.md b/docs/reference/r-contacts.md index 8447eab17f..ed3075fe4a 100644 --- a/docs/reference/r-contacts.md +++ b/docs/reference/r-contacts.md @@ -1,15 +1,18 @@ # Contact -Charmed PostgreSQL is an open source project that warmly welcomes community contributions, suggestions, fixes, and constructive feedback. -* Raise software issues or feature requests on [**GitHub**](https://github.com/canonical/postgresql-operator/issues/new/choose) -* Report security issues through [**Launchpad**](https://wiki.ubuntu.com/DebuggingSecurity#How%20to%20File) -* Contact the Canonical Data Platform team through our [Matrix](https://matrix.to/#/#charmhub-data-platform:ubuntu.com) channel. +Charmed PostgreSQL is an open source project that warmly welcomes community contributions, suggestions, fixes, and constructive feedback: + +* [Raise](https://github.com/canonical/postgresql-operator/issues/new/choose) software issues or feature requests +* [Report](https://github.com/canonical/postgresql-operator/security/advisories/new) security issues +* Contact the Canonical Data Platform team directly through our [Matrix](https://matrix.to/#/#charmhub-data-platform:ubuntu.com) channel. + [note] Our legacy [Mattermost](https://chat.charmhub.io/charmhub/channels/data-platform) channel is read-only until January 31, 2025. [/note] Useful links: + * [Canonical Data Fabric](https://ubuntu.com/data/) * [Charmed PostgreSQL](https://charmhub.io/postgresql) * [Git sources for Charmed PostgreSQL](https://github.com/canonical/postgresql-operator) diff --git a/docs/reference/r-sos-report.md b/docs/reference/r-sos-report.md new file mode 100644 index 0000000000..7c1a3a0441 --- /dev/null +++ b/docs/reference/r-sos-report.md @@ -0,0 +1,114 @@ +# SoS report + +SoS is an extensible, portable, support data collection tool primarily aimed at Linux distributions and other UNIX-like operating systems. + +You can use the [built-in version of SoS](#use-built-in-sos-recommended), or [retrieve the latest version from GitHub](#use-the-latest-sos-plugins-from-git). + +[note type=caution] +Always review the collected data before sharing it! + +The generated archive may contain sensitive data. Its content should be reviewed by the originating organization before being passed to any third party. +[/note] + +## Use built-in SoS (recommended) + +Charmed PostgreSQL shipped with built-in [SoS](https://github.com/sosreport/sos) reporting tool. You can use it to collect logs on a specific unit or multiple units at once. + +### Collect logs on one unit + +To generate the troubleshooting report archiving all logs simply call `sos report` inside the relevant Juju unit: +```shell +juju ssh postgresql/0 +> sudo sos report -o system,systemd,snap,charmed_postgresql --low-priority +``` +As a result, the archive will all logs will be generated and can be shared with your support representative: +```shell +... +Your sos report has been generated and saved in: + /tmp/sosreport-juju-d4c067-1-2025-04-07-chdmwlz.tar.xz + + Size 216.36KiB + Owner root + sha256 b3bf2c54bb3d934ca61ab2a17c19e98b2abeafd45589f268b62bd02f6b2a4c5f +... +``` + +Use `juju scp` to copy logs from Juju unit to localhost: +```shell +juju scp postgresql/0:/tmp/sosreport-juju-d4c067-1-2025-04-07-chdmwlz.tar.xz . +``` + +[details="Example output"] +```shell +juju ssh postgresql/0 + +ubuntu@juju-d4c067-1:~$ sudo sos report -o system,systemd,snap,charmed_postgresql --low-priority + +sos report (version 4.8.2) + +This command will collect system configuration and diagnostic +information from this Ubuntu system. + +For more information on Canonical visit: + + Community Website : https://www.ubuntu.com/ + Commercial Support : https://www.canonical.com + +The generated archive may contain data considered sensitive and its +content should be reviewed by the originating organization before being +passed to any third party. + +No changes will be made to system configuration. + + +Press ENTER to continue, or CTRL-C to quit. + +Optionally, please enter the case id that you are generating this report for []: + + Setting up archive ... + Setting up plugins ... + Running plugins. Please wait ... + + Starting 4/4 systemd [Running: charmed_postgresql system snap systemd] + Finishing plugins [Running: charmed_postgresql] + Finished running plugins +Creating compressed archive... + +Your sos report has been generated and saved in: + /tmp/sosreport-juju-d4c067-1-2025-04-07-qntyqpz.tar.xz + + Size 285.60KiB + Owner root + sha256 d318d7de7595ebae1bce093a375129fafbd397c297317de3006fbfe45e93d43d + +Please send this file to your support representative. + +ubuntu@juju-d4c067-1:~$ exit + +juju scp postgresql/0:/tmp/sosreport-juju-d4c067-1-2025-04-07-qntyqpz.tar.xz . +``` +[/details] + +### Collect logs from several units + +The sos tool allows you to collect logs from several Juju units (replace labels with proper Juju unit like `postgresql/0`). + +Run the following command from outside the Juju units: +```shell +sos collect --cluster-type juju --no-local -c "juju.units=,,<...>" --batch -o system,systemd,snap,charmed_postgresql --low-priority +``` + +## Use the latest sos plugins from Git + +The latest version of [sos plugins](https://github.com/sosreport/sos/tree/main/sos/report/plugins) can be retrieved from Git easily: +```shell +git clone https://github.com/sosreport/sos.git +cd sos +sudo ./bin/sos report -­a +``` + +The output will be the same to the example above providing archive with all logs as above: +```shell +Your sos report has been generated and saved in: + /tmp/sosreport-juju-d4c067-1-2025-04-07-egosqmi.tar.xz +``` \ No newline at end of file diff --git a/docs/tutorial/t-enable-tls.md b/docs/tutorial/t-enable-tls.md index 0a425966fb..55f3b1b7e4 100644 --- a/docs/tutorial/t-enable-tls.md +++ b/docs/tutorial/t-enable-tls.md @@ -55,7 +55,7 @@ Machine State Address Inst id Series AZ Message To enable TLS on Charmed PostgreSQL VM, integrate the two applications: ```shell -juju integrate postgresql self-signed-certificates +juju integrate postgresql:certificates self-signed-certificates:certificates ``` PostgreSQL is now using TLS certificate generated by the `self-signed-certificates` charm. @@ -72,7 +72,7 @@ verify error:num=19:self-signed certificate in certificate chain

Remove TLS certificate

To remove the external TLS, remove the integration: ```shell -juju remove-relation postgresql self-signed-certificates +juju remove-relation postgresql:certificates self-signed-certificates:certificates ``` If you once again check the TLS certificates in use via the OpenSSL client, you will see something similar to the output below: diff --git a/docs/tutorial/t-set-up.md b/docs/tutorial/t-set-up.md index bf2ddb57b9..3223686f32 100644 --- a/docs/tutorial/t-set-up.md +++ b/docs/tutorial/t-set-up.md @@ -1,4 +1,4 @@ -> [Charmed PostgreSQL K8s Tutorial](/t/9707) > 1. Set up the environment +> [Charmed PostgreSQL VM Tutorial](/t/9707) > 1. Set up the environment # Set up the environment From 7897bfce5becfc44ba924f3837e382fed4e54d2b Mon Sep 17 00:00:00 2001 From: Dragomir Penev <6687393+dragomirp@users.noreply.github.com> Date: Fri, 11 Apr 2025 05:03:56 +0300 Subject: [PATCH 09/21] [MISC] Conditional checksum calculation (#812) * Bump boto * Conditional checksum calculation --- poetry.lock | 74 +++++++++++++++++++------------------- pyproject.toml | 2 +- src/backups.py | 51 +++++++++++--------------- tests/unit/test_backups.py | 33 ++++++++++++++++- 4 files changed, 90 insertions(+), 70 deletions(-) diff --git a/poetry.lock b/poetry.lock index c776be907a..c70ec34a9b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "allure-pytest" @@ -68,7 +68,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -100,12 +100,12 @@ files = [ ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "backoff" @@ -173,34 +173,34 @@ typecheck = ["mypy"] [[package]] name = "boto3" -version = "1.35.99" +version = "1.37.22" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" groups = ["main", "integration"] files = [ - {file = "boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71"}, - {file = "boto3-1.35.99.tar.gz", hash = "sha256:e0abd794a7a591d90558e92e29a9f8837d25ece8e3c120e530526fe27eba5fca"}, + {file = "boto3-1.37.22-py3-none-any.whl", hash = "sha256:a14324d5fa5f4fea00c0e3c69754cbd28100f7fe194693eeecf2dc07446cf4ef"}, + {file = "boto3-1.37.22.tar.gz", hash = "sha256:78a0ec0aafbf6044104c98ad80b69e6d1c83d8233fda2c2d241029e6c705c510"}, ] [package.dependencies] -botocore = ">=1.35.99,<1.36.0" +botocore = ">=1.37.22,<1.38.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.10.0,<0.11.0" +s3transfer = ">=0.11.0,<0.12.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.99" +version = "1.37.22" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" groups = ["main", "integration"] files = [ - {file = "botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445"}, - {file = "botocore-1.35.99.tar.gz", hash = "sha256:1eab44e969c39c5f3d9a3104a0836c24715579a455f12b3979a31d7cde51b3c3"}, + {file = "botocore-1.37.22-py3-none-any.whl", hash = "sha256:184db7c9314d13002bc827f511a5140574b5da1acda342d51e093dad6317de98"}, + {file = "botocore-1.37.22.tar.gz", hash = "sha256:b3b26f1a90236bcd17d4092f8c85a256b44e9955a16b633319a2f5678d605e9f"}, ] [package.dependencies] @@ -209,7 +209,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.22.0)"] +crt = ["awscrt (==0.23.8)"] [[package]] name = "cachetools" @@ -433,7 +433,7 @@ files = [ [package.extras] dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] hard-encoding-detection = ["chardet"] -toml = ["tomli"] +toml = ["tomli ; python_version < \"3.11\""] types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] [[package]] @@ -546,7 +546,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" @@ -593,10 +593,10 @@ files = [ cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] @@ -630,7 +630,7 @@ files = [ wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] [[package]] name = "exceptiongroup" @@ -661,7 +661,7 @@ files = [ ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] name = "google-auth" @@ -759,7 +759,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -816,7 +816,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" @@ -875,7 +875,7 @@ typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] kernel = ["ipykernel"] matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] @@ -1398,8 +1398,8 @@ cryptography = ">=3.3" pynacl = ">=1.5" [package.extras] -all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] -gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +all = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] +gssapi = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] invoke = ["invoke (>=2.0)"] [[package]] @@ -2219,21 +2219,21 @@ files = [ [[package]] name = "s3transfer" -version = "0.10.4" +version = "0.11.4" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" groups = ["main", "integration"] files = [ - {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, - {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, + {file = "s3transfer-0.11.4-py3-none-any.whl", hash = "sha256:ac265fa68318763a03bf2dc4f39d5cbd6a9e178d81cc9483ad27da33637e320d"}, + {file = "s3transfer-0.11.4.tar.gz", hash = "sha256:559f161658e1cf0a911f45940552c696735f5c74e64362e515f333ebed87d679"}, ] [package.dependencies] -botocore = ">=1.33.2,<2.0a.0" +botocore = ">=1.37.4,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] +crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"] [[package]] name = "six" @@ -2408,7 +2408,7 @@ files = [ ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2623,14 +2623,14 @@ files = [ ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.10" -content-hash = "32f3b67d60393e53e28f1c3856b0c6f5c32ea538d4ae6cae1847cdd37a001ceb" +content-hash = "d6d1a9eb1d741b25733acdae41d5ad4c8f3a1948bb519a31b676c5f6b1f21550" diff --git a/pyproject.toml b/pyproject.toml index 82e0129586..c044782906 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ requires-poetry = ">=2.0.0" [tool.poetry.dependencies] python = "^3.10" ops = "^2.18.1" -boto3 = "^1.35.99" +boto3 = "^1.37.22" pgconnstr = "^1.0.1" requests = "^2.32.3" tenacity = "^9.0.0" diff --git a/src/backups.py b/src/backups.py index 903ba028f6..4c414d75e8 100644 --- a/src/backups.py +++ b/src/backups.py @@ -100,6 +100,23 @@ def _tls_ca_chain_filename(self) -> str: return f"{self.charm._storage_path}/pgbackrest-tls-ca-chain.crt" return "" + def _get_s3_session_resource(self, s3_parameters: dict): + session = boto3.session.Session( + aws_access_key_id=s3_parameters["access-key"], + aws_secret_access_key=s3_parameters["secret-key"], + region_name=s3_parameters["region"], + ) + return session.resource( + "s3", + endpoint_url=self._construct_endpoint(s3_parameters), + verify=(self._tls_ca_chain_filename or None), + config=botocore.client.Config( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + ), + ) + def _are_backup_settings_ok(self) -> tuple[bool, str | None]: """Validates whether backup settings are OK.""" if self.model.get_relation(self.relation_name) is None: @@ -258,18 +275,9 @@ def _create_bucket_if_not_exists(self) -> None: bucket_name = s3_parameters["bucket"] region = s3_parameters.get("region") - session = boto3.session.Session( - aws_access_key_id=s3_parameters["access-key"], - aws_secret_access_key=s3_parameters["secret-key"], - region_name=s3_parameters["region"], - ) try: - s3 = session.resource( - "s3", - endpoint_url=self._construct_endpoint(s3_parameters), - verify=(self._tls_ca_chain_filename or None), - ) + s3 = self._get_s3_session_resource(s3_parameters) except ValueError as e: logger.exception("Failed to create a session '%s' in region=%s.", bucket_name, region) raise e @@ -1372,17 +1380,7 @@ def _upload_content_to_s3( processed_s3_path = os.path.join(s3_parameters["path"], s3_path).lstrip("/") try: logger.info(f"Uploading content to bucket={bucket_name}, path={processed_s3_path}") - session = boto3.session.Session( - aws_access_key_id=s3_parameters["access-key"], - aws_secret_access_key=s3_parameters["secret-key"], - region_name=s3_parameters["region"], - ) - - s3 = session.resource( - "s3", - endpoint_url=self._construct_endpoint(s3_parameters), - verify=(self._tls_ca_chain_filename or None), - ) + s3 = self._get_s3_session_resource(s3_parameters) bucket = s3.Bucket(bucket_name) with tempfile.NamedTemporaryFile() as temp_file: @@ -1415,16 +1413,7 @@ def _read_content_from_s3(self, s3_path: str, s3_parameters: dict) -> str | None processed_s3_path = os.path.join(s3_parameters["path"], s3_path).lstrip("/") try: logger.info(f"Reading content from bucket={bucket_name}, path={processed_s3_path}") - session = boto3.session.Session( - aws_access_key_id=s3_parameters["access-key"], - aws_secret_access_key=s3_parameters["secret-key"], - region_name=s3_parameters["region"], - ) - s3 = session.resource( - "s3", - endpoint_url=self._construct_endpoint(s3_parameters), - verify=(self._tls_ca_chain_filename or None), - ) + s3 = self._get_s3_session_resource(s3_parameters) bucket = s3.Bucket(bucket_name) with BytesIO() as buf: bucket.download_fileobj(processed_s3_path, buf) diff --git a/tests/unit/test_backups.py b/tests/unit/test_backups.py index 3df0653190..1ca2d3a469 100644 --- a/tests/unit/test_backups.py +++ b/tests/unit/test_backups.py @@ -342,6 +342,7 @@ def test_create_bucket_if_not_exists(harness, tls_ca_chain_filename): new_callable=PropertyMock(return_value=tls_ca_chain_filename), ) as _tls_ca_chain_filename, patch("charm.PostgreSQLBackups._retrieve_s3_parameters") as _retrieve_s3_parameters, + patch("backups.botocore.client.Config") as _config, ): # Test when there are missing S3 parameters. _retrieve_s3_parameters.return_value = ([], ["bucket", "access-key", "secret-key"]) @@ -366,13 +367,22 @@ def test_create_bucket_if_not_exists(harness, tls_ca_chain_filename): # Test when the bucket already exists. _resource.reset_mock() + _config.reset_mock() _resource.side_effect = None head_bucket = _resource.return_value.Bucket.return_value.meta.client.head_bucket create = _resource.return_value.Bucket.return_value.create wait_until_exists = _resource.return_value.Bucket.return_value.wait_until_exists harness.charm.backup._create_bucket_if_not_exists() _resource.assert_called_once_with( - "s3", endpoint_url="test-endpoint", verify=(tls_ca_chain_filename or None) + "s3", + endpoint_url="test-endpoint", + verify=(tls_ca_chain_filename or None), + config=_config.return_value, + ) + _config.assert_called_once_with( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", ) head_bucket.assert_called_once() create.assert_not_called() @@ -1950,6 +1960,7 @@ def test_upload_content_to_s3(harness, tls_ca_chain_filename): patch("tempfile.NamedTemporaryFile") as _named_temporary_file, patch("charm.PostgreSQLBackups._construct_endpoint") as _construct_endpoint, patch("boto3.session.Session.resource") as _resource, + patch("backups.botocore.client.Config") as _config, patch( "charm.PostgreSQLBackups._tls_ca_chain_filename", new_callable=PropertyMock(return_value=tls_ca_chain_filename), @@ -1977,11 +1988,18 @@ def test_upload_content_to_s3(harness, tls_ca_chain_filename): "s3", endpoint_url="https://s3.us-east-1.amazonaws.com", verify=(tls_ca_chain_filename or None), + config=_config.return_value, + ) + _config.assert_called_once_with( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", ) _named_temporary_file.assert_not_called() upload_file.assert_not_called() _resource.reset_mock() + _config.reset_mock() _resource.side_effect = None upload_file.side_effect = S3UploadFailedError assert not harness.charm.backup._upload_content_to_s3(content, s3_path, s3_parameters) @@ -1989,12 +2007,19 @@ def test_upload_content_to_s3(harness, tls_ca_chain_filename): "s3", endpoint_url="https://s3.us-east-1.amazonaws.com", verify=(tls_ca_chain_filename or None), + config=_config.return_value, + ) + _config.assert_called_once_with( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", ) _named_temporary_file.assert_called_once() upload_file.assert_called_once_with("/tmp/test-file", "test-path/test-file.") # Test when the upload succeeds _resource.reset_mock() + _config.reset_mock() _named_temporary_file.reset_mock() upload_file.reset_mock() upload_file.side_effect = None @@ -2003,6 +2028,12 @@ def test_upload_content_to_s3(harness, tls_ca_chain_filename): "s3", endpoint_url="https://s3.us-east-1.amazonaws.com", verify=(tls_ca_chain_filename or None), + config=_config.return_value, + ) + _config.assert_called_once_with( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", ) _named_temporary_file.assert_called_once() upload_file.assert_called_once_with("/tmp/test-file", "test-path/test-file.") From 03598a48e221bc3719563912584e0812124d4c4e Mon Sep 17 00:00:00 2001 From: Marcelo Henrique Neppel Date: Tue, 15 Apr 2025 08:32:29 -0300 Subject: [PATCH 10/21] [DPE-6218] Static code analysis (#828) * Create tiobe_scan.yaml * Remove push trigger --- .github/workflows/tiobe_scan.yaml | 44 +++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 .github/workflows/tiobe_scan.yaml diff --git a/.github/workflows/tiobe_scan.yaml b/.github/workflows/tiobe_scan.yaml new file mode 100644 index 0000000000..b81b022ef0 --- /dev/null +++ b/.github/workflows/tiobe_scan.yaml @@ -0,0 +1,44 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +name: Weekly TICS scan + +on: + schedule: + - cron: "0 2 * * 6" # Every Saturday 2:00 AM UTC + workflow_dispatch: + +jobs: + TICS: + runs-on: ubuntu-24.04 + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Create and activate virtual environment + run: | + python3 -m venv .venv + . .venv/bin/activate + pip install flake8 poetry pylint pytest tox + poetry install --all-groups + echo PATH="$PATH" >> "$GITHUB_ENV" + + - name: Run coverage tests + run: | + tox -e unit + + - name: Move results to the necessary folder for TICS + run: | + mkdir -p .cover + mv coverage.xml .cover/cobertura.xml + + - name: TICS GitHub Action + uses: tiobe/tics-github-action@v3 + with: + mode: qserver + project: postgresql-operator + viewerUrl: https://canonical.tiobe.com/tiobeweb/TICS/api/cfg?name=default + branchdir: ${{ env.GITHUB_WORKSPACE }} + ticsAuthToken: ${{ secrets.TICSAUTHTOKEN }} + installTics: true + calc: ALL From 0f9ab15f9cf700b4a164dd69672235d16f4077a3 Mon Sep 17 00:00:00 2001 From: Dragomir Penev <6687393+dragomirp@users.noreply.github.com> Date: Tue, 15 Apr 2025 16:39:29 +0300 Subject: [PATCH 11/21] [MISC] Disable landscape subordinate test lxd (#831) * Set series for ubuntu-advantage test and disable the landscape test * Revert to LTS LXD --- concierge.yaml | 1 - tests/integration/test_subordinates.py | 10 ++++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/concierge.yaml b/concierge.yaml index 29d78b95b5..15a78cc947 100644 --- a/concierge.yaml +++ b/concierge.yaml @@ -5,7 +5,6 @@ providers: lxd: enable: true bootstrap: true - channel: latest/stable host: snaps: jhack: diff --git a/tests/integration/test_subordinates.py b/tests/integration/test_subordinates.py index 585bd765db..e41cd96219 100644 --- a/tests/integration/test_subordinates.py +++ b/tests/integration/test_subordinates.py @@ -46,6 +46,9 @@ async def test_deploy(ops_test: OpsTest, charm: str, check_subordinate_env_vars) channel="latest/edge", num_units=0, base=CHARM_BASE, + # TODO switch back to series when pylib juju can figure out the base: + # https://github.com/juju/python-libjuju/issues/1240 + series="jammy", ), ops_test.model.deploy( LS_CLIENT, @@ -61,12 +64,11 @@ async def test_deploy(ops_test: OpsTest, charm: str, check_subordinate_env_vars) ) await ops_test.model.wait_for_idle(apps=[DATABASE_APP_NAME], status="active", timeout=2000) - await ops_test.model.relate(f"{DATABASE_APP_NAME}:juju-info", f"{LS_CLIENT}:container") await ops_test.model.relate( f"{DATABASE_APP_NAME}:juju-info", f"{UBUNTU_PRO_APP_NAME}:juju-info" ) await ops_test.model.wait_for_idle( - apps=[LS_CLIENT, UBUNTU_PRO_APP_NAME, DATABASE_APP_NAME], status="active" + apps=[UBUNTU_PRO_APP_NAME, DATABASE_APP_NAME], status="active" ) @@ -74,7 +76,7 @@ async def test_scale_up(ops_test: OpsTest, check_subordinate_env_vars): await scale_application(ops_test, DATABASE_APP_NAME, 4) await ops_test.model.wait_for_idle( - apps=[LS_CLIENT, UBUNTU_PRO_APP_NAME, DATABASE_APP_NAME], status="active", timeout=1500 + apps=[UBUNTU_PRO_APP_NAME, DATABASE_APP_NAME], status="active", timeout=1500 ) @@ -82,5 +84,5 @@ async def test_scale_down(ops_test: OpsTest, check_subordinate_env_vars): await scale_application(ops_test, DATABASE_APP_NAME, 3) await ops_test.model.wait_for_idle( - apps=[LS_CLIENT, UBUNTU_PRO_APP_NAME, DATABASE_APP_NAME], status="active", timeout=1500 + apps=[UBUNTU_PRO_APP_NAME, DATABASE_APP_NAME], status="active", timeout=1500 ) From a014fd4f0454a903e1f69ae85e94622d388f9613 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 03:20:37 +0300 Subject: [PATCH 12/21] Update charmcraft.yaml build tools (#815) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- charmcraft.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/charmcraft.yaml b/charmcraft.yaml index 93a258dad9..cd332e9ed5 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -27,7 +27,7 @@ parts: PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==25.0.1 # renovate: charmcraft-pip-latest # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry - curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.7/uv-installer.sh | sh # renovate: charmcraft-uv-latest + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.14/uv-installer.sh | sh # renovate: charmcraft-uv-latest # poetry 2.0.0 requires Python >=3.9 if ! "$HOME/.local/bin/uv" python find '>=3.9' then @@ -35,7 +35,7 @@ parts: # (to reduce the number of Python versions we use) "$HOME/.local/bin/uv" python install 3.10.12 # renovate: charmcraft-python-ubuntu-22.04 fi - "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.1.1 --with poetry-plugin-export==1.9.0 # renovate: charmcraft-poetry-latest + "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.1.2 --with poetry-plugin-export==1.9.0 # renovate: charmcraft-poetry-latest ln -sf "$HOME/.local/bin/poetry" /usr/local/bin/poetry # "charm-poetry" part name is arbitrary; use for consistency @@ -75,7 +75,7 @@ parts: # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive rustup set profile minimal - rustup default 1.85.0 # renovate: charmcraft-rust-latest + rustup default 1.86.0 # renovate: charmcraft-rust-latest craftctl default # Include requirements.txt in *.charm artifact for easier debugging From a11291793901833d1925ee0ed892d88a1fd71c20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sinclert=20P=C3=A9rez?= Date: Wed, 16 Apr 2025 09:03:47 +0200 Subject: [PATCH 13/21] [MISC] Update snapped PostgreSQL (#832) --- src/constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/constants.py b/src/constants.py index 0fa63efd04..4dd91be625 100644 --- a/src/constants.py +++ b/src/constants.py @@ -35,7 +35,7 @@ SNAP_PACKAGES = [ ( POSTGRESQL_SNAP_NAME, - {"revision": {"aarch64": "156", "x86_64": "155"}}, + {"revision": {"aarch64": "168", "x86_64": "167"}}, ) ] From 0ebf12d6da02fae267d7748c2510401c354e1a63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sinclert=20P=C3=A9rez?= Date: Wed, 16 Apr 2025 09:03:55 +0200 Subject: [PATCH 14/21] [DPE-6345] LDAP I: Create access groups (#823) --- lib/charms/postgresql_k8s/v0/postgresql.py | 134 ++++++++++++++++++++- src/charm.py | 6 + src/relations/db.py | 6 +- src/relations/postgresql_provider.py | 10 +- src/upgrade.py | 12 ++ tests/unit/test_db.py | 13 +- tests/unit/test_postgresql_provider.py | 5 +- 7 files changed, 177 insertions(+), 9 deletions(-) diff --git a/lib/charms/postgresql_k8s/v0/postgresql.py b/lib/charms/postgresql_k8s/v0/postgresql.py index 9fe1957e4f..f5c4d0e02b 100644 --- a/lib/charms/postgresql_k8s/v0/postgresql.py +++ b/lib/charms/postgresql_k8s/v0/postgresql.py @@ -35,7 +35,19 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 46 +LIBPATCH = 47 + +# Groups to distinguish HBA access +ACCESS_GROUP_IDENTITY = "identity_access" +ACCESS_GROUP_INTERNAL = "internal_access" +ACCESS_GROUP_RELATION = "relation_access" + +# List of access groups to filter role assignments by +ACCESS_GROUPS = [ + ACCESS_GROUP_IDENTITY, + ACCESS_GROUP_INTERNAL, + ACCESS_GROUP_RELATION, +] # Groups to distinguish database permissions PERMISSIONS_GROUP_ADMIN = "admin" @@ -57,10 +69,18 @@ logger = logging.getLogger(__name__) +class PostgreSQLAssignGroupError(Exception): + """Exception raised when assigning to a group fails.""" + + class PostgreSQLCreateDatabaseError(Exception): """Exception raised when creating a database fails.""" +class PostgreSQLCreateGroupError(Exception): + """Exception raised when creating a group fails.""" + + class PostgreSQLCreateUserError(Exception): """Exception raised when creating a user fails.""" @@ -93,6 +113,10 @@ class PostgreSQLGetPostgreSQLVersionError(Exception): """Exception raised when retrieving PostgreSQL version fails.""" +class PostgreSQLListGroupsError(Exception): + """Exception raised when retrieving PostgreSQL groups list fails.""" + + class PostgreSQLListUsersError(Exception): """Exception raised when retrieving PostgreSQL users list fails.""" @@ -160,6 +184,24 @@ def _connect_to_database( connection.autocommit = True return connection + def create_access_groups(self) -> None: + """Create access groups to distinguish HBA authentication methods.""" + connection = None + try: + with self._connect_to_database() as connection, connection.cursor() as cursor: + for group in ACCESS_GROUPS: + cursor.execute( + SQL("CREATE ROLE {} NOLOGIN;").format( + Identifier(group), + ) + ) + except psycopg2.Error as e: + logger.error(f"Failed to create access groups: {e}") + raise PostgreSQLCreateGroupError() from e + finally: + if connection is not None: + connection.close() + def create_database( self, database: str, @@ -321,6 +363,50 @@ def delete_user(self, user: str) -> None: logger.error(f"Failed to delete user: {e}") raise PostgreSQLDeleteUserError() from e + def grant_internal_access_group_memberships(self) -> None: + """Grant membership to the internal access-group to existing internal users.""" + connection = None + try: + with self._connect_to_database() as connection, connection.cursor() as cursor: + for user in self.system_users: + cursor.execute( + SQL("GRANT {} TO {};").format( + Identifier(ACCESS_GROUP_INTERNAL), + Identifier(user), + ) + ) + except psycopg2.Error as e: + logger.error(f"Failed to grant internal access group memberships: {e}") + raise PostgreSQLAssignGroupError() from e + finally: + if connection is not None: + connection.close() + + def grant_relation_access_group_memberships(self) -> None: + """Grant membership to the relation access-group to existing relation users.""" + rel_users = self.list_users_from_relation() + if not rel_users: + return + + connection = None + try: + with self._connect_to_database() as connection, connection.cursor() as cursor: + rel_groups = SQL(",").join(Identifier(group) for group in [ACCESS_GROUP_RELATION]) + rel_users = SQL(",").join(Identifier(user) for user in rel_users) + + cursor.execute( + SQL("GRANT {groups} TO {users};").format( + groups=rel_groups, + users=rel_users, + ) + ) + except psycopg2.Error as e: + logger.error(f"Failed to grant relation access group memberships: {e}") + raise PostgreSQLAssignGroupError() from e + finally: + if connection is not None: + connection.close() + def enable_disable_extensions( self, extensions: Dict[str, bool], database: Optional[str] = None ) -> None: @@ -534,12 +620,34 @@ def is_tls_enabled(self, check_current_host: bool = False) -> bool: # Connection errors happen when PostgreSQL has not started yet. return False + def list_access_groups(self) -> Set[str]: + """Returns the list of PostgreSQL database access groups. + + Returns: + List of PostgreSQL database access groups. + """ + connection = None + try: + with self._connect_to_database() as connection, connection.cursor() as cursor: + cursor.execute( + "SELECT groname FROM pg_catalog.pg_group WHERE groname LIKE '%_access';" + ) + access_groups = cursor.fetchall() + return {group[0] for group in access_groups} + except psycopg2.Error as e: + logger.error(f"Failed to list PostgreSQL database access groups: {e}") + raise PostgreSQLListGroupsError() from e + finally: + if connection is not None: + connection.close() + def list_users(self) -> Set[str]: """Returns the list of PostgreSQL database users. Returns: List of PostgreSQL database users. """ + connection = None try: with self._connect_to_database() as connection, connection.cursor() as cursor: cursor.execute("SELECT usename FROM pg_catalog.pg_user;") @@ -548,6 +656,30 @@ def list_users(self) -> Set[str]: except psycopg2.Error as e: logger.error(f"Failed to list PostgreSQL database users: {e}") raise PostgreSQLListUsersError() from e + finally: + if connection is not None: + connection.close() + + def list_users_from_relation(self) -> Set[str]: + """Returns the list of PostgreSQL database users that were created by a relation. + + Returns: + List of PostgreSQL database users. + """ + connection = None + try: + with self._connect_to_database() as connection, connection.cursor() as cursor: + cursor.execute( + "SELECT usename FROM pg_catalog.pg_user WHERE usename LIKE 'relation_id_%';" + ) + usernames = cursor.fetchall() + return {username[0] for username in usernames} + except psycopg2.Error as e: + logger.error(f"Failed to list PostgreSQL database users: {e}") + raise PostgreSQLListUsersError() from e + finally: + if connection is not None: + connection.close() def list_valid_privileges_and_roles(self) -> Tuple[Set[str], Set[str]]: """Returns two sets with valid privileges and roles. diff --git a/src/charm.py b/src/charm.py index accd6844be..ee1a4c9ceb 100755 --- a/src/charm.py +++ b/src/charm.py @@ -23,6 +23,7 @@ from charms.grafana_agent.v0.cos_agent import COSAgentProvider, charm_tracing_config from charms.operator_libs_linux.v2 import snap from charms.postgresql_k8s.v0.postgresql import ( + ACCESS_GROUPS, REQUIRED_PLUGINS, PostgreSQL, PostgreSQLCreateUserError, @@ -1360,6 +1361,11 @@ def _start_primary(self, event: StartEvent) -> None: self.postgresql.set_up_database() + access_groups = self.postgresql.list_access_groups() + if access_groups != set(ACCESS_GROUPS): + self.postgresql.create_access_groups() + self.postgresql.grant_internal_access_group_memberships() + self.postgresql_client_relation.oversee_users() # Set the flag to enable the replicas to start the Patroni service. diff --git a/src/relations/db.py b/src/relations/db.py index 5f7d8f9ea5..a77070fc3b 100644 --- a/src/relations/db.py +++ b/src/relations/db.py @@ -7,6 +7,7 @@ from collections.abc import Iterable from charms.postgresql_k8s.v0.postgresql import ( + ACCESS_GROUP_RELATION, PostgreSQLCreateDatabaseError, PostgreSQLCreateUserError, PostgreSQLGetPostgreSQLVersionError, @@ -198,10 +199,11 @@ def set_up_relation(self, relation: Relation) -> bool: # non-leader units when the cluster topology changes. self.charm.set_secret(APP_SCOPE, user, password) self.charm.set_secret(APP_SCOPE, f"{user}-database", database) + self.charm.postgresql.create_user( + user, password, self.admin, extra_user_roles=[ACCESS_GROUP_RELATION] + ) - self.charm.postgresql.create_user(user, password, self.admin) plugins = self.charm.get_plugins() - self.charm.postgresql.create_database( database, user, plugins=plugins, client_relations=self.charm.client_relations ) diff --git a/src/relations/postgresql_provider.py b/src/relations/postgresql_provider.py index c2fa16add0..ef0dec90dc 100644 --- a/src/relations/postgresql_provider.py +++ b/src/relations/postgresql_provider.py @@ -10,6 +10,8 @@ DatabaseRequestedEvent, ) from charms.postgresql_k8s.v0.postgresql import ( + ACCESS_GROUP_RELATION, + ACCESS_GROUPS, INVALID_EXTRA_USER_ROLE_BLOCKING_MESSAGE, PostgreSQLCreateDatabaseError, PostgreSQLCreateUserError, @@ -71,7 +73,10 @@ def _sanitize_extra_roles(extra_roles: str | None) -> list[str]: if extra_roles is None: return [] - return [role.lower() for role in extra_roles.split(",")] + # Make sure the access-groups are not in the list + extra_roles_list = [role.lower() for role in extra_roles.split(",")] + extra_roles_list = [role for role in extra_roles_list if role not in ACCESS_GROUPS] + return extra_roles_list def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: """Generate password and handle user and database creation for the related application.""" @@ -93,8 +98,9 @@ def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: # Retrieve the database name and extra user roles using the charm library. database = event.database - # Make sure that certain groups are not in the list + # Make sure the relation access-group is added to the list extra_user_roles = self._sanitize_extra_roles(event.extra_user_roles) + extra_user_roles.append(ACCESS_GROUP_RELATION) try: # Creates the user and the database for this specific relation. diff --git a/src/upgrade.py b/src/upgrade.py index c24d2952af..db4fb5f978 100644 --- a/src/upgrade.py +++ b/src/upgrade.py @@ -12,6 +12,7 @@ DependencyModel, UpgradeGrantedEvent, ) +from charms.postgresql_k8s.v0.postgresql import ACCESS_GROUPS from ops.model import MaintenanceStatus, RelationDataContent, WaitingStatus from pydantic import BaseModel from tenacity import RetryError, Retrying, stop_after_attempt, wait_fixed @@ -247,6 +248,17 @@ def _prepare_upgrade_from_legacy(self) -> None: extra_user_roles="pg_monitor", ) self.charm.postgresql.set_up_database() + self._set_up_new_access_roles_for_legacy() + + def _set_up_new_access_roles_for_legacy(self) -> None: + """Create missing access groups and their memberships.""" + access_groups = self.charm.postgresql.list_access_groups() + if access_groups == set(ACCESS_GROUPS): + return + + self.charm.postgresql.create_access_groups() + self.charm.postgresql.grant_internal_access_group_memberships() + self.charm.postgresql.grant_relation_access_group_memberships() @property def unit_upgrade_data(self) -> RelationDataContent: diff --git a/tests/unit/test_db.py b/tests/unit/test_db.py index d5ebd95e97..a400ef708e 100644 --- a/tests/unit/test_db.py +++ b/tests/unit/test_db.py @@ -5,6 +5,7 @@ import pytest from charms.postgresql_k8s.v0.postgresql import ( + ACCESS_GROUP_RELATION, PostgreSQLCreateDatabaseError, PostgreSQLCreateUserError, PostgreSQLGetPostgreSQLVersionError, @@ -226,7 +227,9 @@ def test_set_up_relation(harness): ) assert harness.charm.legacy_db_relation.set_up_relation(relation) user = f"relation-{rel_id}" - postgresql_mock.create_user.assert_called_once_with(user, "test-password", False) + postgresql_mock.create_user.assert_called_once_with( + user, "test-password", False, extra_user_roles=[ACCESS_GROUP_RELATION] + ) postgresql_mock.create_database.assert_called_once_with( DATABASE, user, plugins=["pgaudit"], client_relations=[relation] ) @@ -253,7 +256,9 @@ def test_set_up_relation(harness): {"database": DATABASE}, ) assert harness.charm.legacy_db_relation.set_up_relation(relation) - postgresql_mock.create_user.assert_called_once_with(user, "test-password", False) + postgresql_mock.create_user.assert_called_once_with( + user, "test-password", False, extra_user_roles=[ACCESS_GROUP_RELATION] + ) postgresql_mock.create_database.assert_called_once_with( DATABASE, user, plugins=["pgaudit"], client_relations=[relation] ) @@ -274,7 +279,9 @@ def test_set_up_relation(harness): {"database": ""}, ) assert harness.charm.legacy_db_relation.set_up_relation(relation) - postgresql_mock.create_user.assert_called_once_with(user, "test-password", False) + postgresql_mock.create_user.assert_called_once_with( + user, "test-password", False, extra_user_roles=[ACCESS_GROUP_RELATION] + ) postgresql_mock.create_database.assert_called_once_with( "test_database", user, plugins=["pgaudit"], client_relations=[relation] ) diff --git a/tests/unit/test_postgresql_provider.py b/tests/unit/test_postgresql_provider.py index 27efac383a..13b065d299 100644 --- a/tests/unit/test_postgresql_provider.py +++ b/tests/unit/test_postgresql_provider.py @@ -5,6 +5,7 @@ import pytest from charms.postgresql_k8s.v0.postgresql import ( + ACCESS_GROUP_RELATION, PostgreSQLCreateDatabaseError, PostgreSQLCreateUserError, PostgreSQLGetPostgreSQLVersionError, @@ -124,10 +125,12 @@ def test_on_database_requested(harness): # Assert that the correct calls were made. user = f"relation-{rel_id}" + expected_user_roles = [role.lower() for role in EXTRA_USER_ROLES.split(",")] + expected_user_roles.append(ACCESS_GROUP_RELATION) postgresql_mock.create_user.assert_called_once_with( user, "test-password", - extra_user_roles=[role.lower() for role in EXTRA_USER_ROLES.split(",")], + extra_user_roles=expected_user_roles, ) database_relation = harness.model.get_relation(RELATION_NAME) client_relations = [database_relation] From 3c2c894f3131465ce8db8f287ad19c4919c2b3da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sinclert=20P=C3=A9rez?= Date: Wed, 16 Apr 2025 09:04:06 +0200 Subject: [PATCH 15/21] [DPE-6345] LDAP II: Include charm libs (#824) --- .../v0/certificate_transfer.py | 432 +++++++++++++ lib/charms/glauth_k8s/v0/ldap.py | 571 ++++++++++++++++++ .../postgresql_k8s/v0/postgresql_tls.py | 87 ++- metadata.yaml | 7 + pyproject.toml | 2 + src/charm.py | 30 + src/constants.py | 2 + tests/integration/helpers.py | 4 +- tests/integration/test_backups_pitr_aws.py | 4 +- tests/integration/test_backups_pitr_gcp.py | 4 +- tests/integration/test_tls.py | 4 +- tests/unit/test_charm.py | 44 +- 12 files changed, 1174 insertions(+), 17 deletions(-) create mode 100644 lib/charms/certificate_transfer_interface/v0/certificate_transfer.py create mode 100644 lib/charms/glauth_k8s/v0/ldap.py diff --git a/lib/charms/certificate_transfer_interface/v0/certificate_transfer.py b/lib/charms/certificate_transfer_interface/v0/certificate_transfer.py new file mode 100644 index 0000000000..d5c2aa1692 --- /dev/null +++ b/lib/charms/certificate_transfer_interface/v0/certificate_transfer.py @@ -0,0 +1,432 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Library for the certificate_transfer relation. + +This library contains the Requires and Provides classes for handling the +ertificate-transfer interface. + +## Getting Started +From a charm directory, fetch the library using `charmcraft`: + +```shell +charmcraft fetch-lib charms.certificate_transfer_interface.v0.certificate_transfer +``` + +### Provider charm +The provider charm is the charm providing public certificates to another charm that requires them. + +Example: +```python +from ops.charm import CharmBase, RelationJoinedEvent +from ops.main import main + +from lib.charms.certificate_transfer_interface.v0.certificate_transfer import( + CertificateTransferProvides, +) + + +class DummyCertificateTransferProviderCharm(CharmBase): + def __init__(self, *args): + super().__init__(*args) + self.certificate_transfer = CertificateTransferProvides(self, "certificates") + self.framework.observe( + self.on.certificates_relation_joined, self._on_certificates_relation_joined + ) + + def _on_certificates_relation_joined(self, event: RelationJoinedEvent): + certificate = "my certificate" + ca = "my CA certificate" + chain = ["certificate 1", "certificate 2"] + self.certificate_transfer.set_certificate( + certificate=certificate, ca=ca, chain=chain, relation_id=event.relation.id + ) + + +if __name__ == "__main__": + main(DummyCertificateTransferProviderCharm) +``` + +### Requirer charm +The requirer charm is the charm requiring certificates from another charm that provides them. + +Example: +```python + +from ops.charm import CharmBase +from ops.main import main + +from lib.charms.certificate_transfer_interface.v0.certificate_transfer import ( + CertificateAvailableEvent, + CertificateRemovedEvent, + CertificateTransferRequires, +) + + +class DummyCertificateTransferRequirerCharm(CharmBase): + def __init__(self, *args): + super().__init__(*args) + self.certificate_transfer = CertificateTransferRequires(self, "certificates") + self.framework.observe( + self.certificate_transfer.on.certificate_available, self._on_certificate_available + ) + self.framework.observe( + self.certificate_transfer.on.certificate_removed, self._on_certificate_removed + ) + + def _on_certificate_available(self, event: CertificateAvailableEvent): + print(event.certificate) + print(event.ca) + print(event.chain) + print(event.relation_id) + + def _on_certificate_removed(self, event: CertificateRemovedEvent): + print(event.relation_id) + + +if __name__ == "__main__": + main(DummyCertificateTransferRequirerCharm) +``` + +You can relate both charms by running: + +```bash +juju relate +``` + +""" + +import json +import logging +from typing import List, Mapping + +from jsonschema import exceptions, validate # type: ignore[import-untyped] +from ops import Relation +from ops.charm import ( + CharmBase, + CharmEvents, + RelationBrokenEvent, + RelationChangedEvent, + RelationCreatedEvent, +) +from ops.framework import EventBase, EventSource, Handle, Object + +# The unique Charmhub library identifier, never change it +LIBID = "3785165b24a743f2b0c60de52db25c8b" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 11 + +PYDEPS = ["jsonschema"] + + +logger = logging.getLogger(__name__) + + +PROVIDER_JSON_SCHEMA = { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://canonical.github.io/charm-relation-interfaces/interfaces/certificate_transfer/schemas/provider.json", + "type": "object", + "title": "`certificate_transfer` provider schema", + "description": "The `certificate_transfer` root schema comprises the entire provider application databag for this interface.", + "default": {}, + "examples": [ + { + "certificate": "-----BEGIN CERTIFICATE-----\nMIIC6DCCAdCgAwIBAgIUW42TU9LSjEZLMCclWrvSwAsgRtcwDQYJKoZIhvcNAQEL\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIzMDMyNDE4\nNDMxOVoXDTI0MDMyMzE4NDMxOVowPDELMAkGA1UEAwwCb2sxLTArBgNVBC0MJGUw\nNjVmMWI3LTE2OWEtNDE5YS1iNmQyLTc3OWJkOGM4NzIwNjCCASIwDQYJKoZIhvcN\nAQEBBQADggEPADCCAQoCggEBAK42ixoklDH5K5i1NxXo/AFACDa956pE5RA57wlC\nBfgUYaIDRmv7TUVJh6zoMZSD6wjSZl3QgP7UTTZeHbvs3QE9HUwEkH1Lo3a8vD3z\neqsE2vSnOkpWWnPbfxiQyrTm77/LAWBt7lRLRLdfL6WcucD3wsGqm58sWXM3HG0f\nSN7PHCZUFqU6MpkHw8DiKmht5hBgWG+Vq3Zw8MNaqpwb/NgST3yYdcZwb58G2FTS\nZvDSdUfRmD/mY7TpciYV8EFylXNNFkth8oGNLunR9adgZ+9IunfRKj1a7S5GSwXU\nAZDaojw+8k5i3ikztsWH11wAVCiLj/3euIqq95z8xGycnKcCAwEAATANBgkqhkiG\n9w0BAQsFAAOCAQEAWMvcaozgBrZ/MAxzTJmp5gZyLxmMNV6iT9dcqbwzDtDtBvA/\n46ux6ytAQ+A7Bd3AubvozwCr1Id6g66ae0blWYRRZmF8fDdX/SBjIUkv7u9A3NVQ\nXN9gsEvK9pdpfN4ZiflfGSLdhM1STHycLmhG6H5s7HklbukMRhQi+ejbSzm/wiw1\nipcxuKhSUIVNkTLusN5b+HE2gwF1fn0K0z5jWABy08huLgbaEKXJEx5/FKLZGJga\nfpIzAdf25kMTu3gggseaAmzyX3AtT1i8A8nqYfe8fnnVMkvud89kq5jErv/hlMC9\n49g5yWQR2jilYYM3j9BHDuB+Rs+YS5BCep1JnQ==\n-----END CERTIFICATE-----\n", + "ca": "-----BEGIN CERTIFICATE-----\nMIIC6DCCAdCgAwIBAgIUdiBwE/CtaBXJl3MArjZen6Y8kigwDQYJKoZIhvcNAQEL\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIzMDMyNDE4\nNDg1OVoXDTI0MDMyMzE4NDg1OVowPDELMAkGA1UEAwwCb2sxLTArBgNVBC0MJDEw\nMDdjNDBhLWUwYzMtNDVlOS05YTAxLTVlYjY0NWQ0ZmEyZDCCASIwDQYJKoZIhvcN\nAQEBBQADggEPADCCAQoCggEBANOnUl6JDlXpLMRr/PxgtfE/E5Yk6E/TkPkPL/Kk\ntUGjEi42XZDg9zn3U6cjTDYu+rfKY2jiitfsduW6DQIkEpz3AvbuCMbbgnFpcjsB\nYysLSMTmuz/AVPrfnea/tQTALcONCSy1VhAjGSr81ZRSMB4khl9StSauZrbkpJ1P\nshqkFSUyAi31mKrnXz0Es/v0Yi0FzAlgWrZ4u1Ld+Bo2Xz7oK4mHf7/93Jc+tEaM\nIqG6ocD0q8bjPp0tlSxftVADNUzWlZfM6fue5EXzOsKqyDrxYOSchfU9dNzKsaBX\nkxbHEeSUPJeYYj7aVPEfAs/tlUGsoXQvwWfRie8grp2BoLECAwEAATANBgkqhkiG\n9w0BAQsFAAOCAQEACZARBpHYH6Gr2a1ka0mCWfBmOZqfDVan9rsI5TCThoylmaXW\nquEiZ2LObI+5faPzxSBhr9TjJlQamsd4ywout7pHKN8ZGqrCMRJ1jJbUfobu1n2k\nUOsY4+jzV1IRBXJzj64fLal4QhUNv341lAer6Vz3cAyRk7CK89b/DEY0x+jVpyZT\n1osx9JtsOmkDTgvdStGzq5kPKWOfjwHkmKQaZXliCgqbhzcCERppp1s/sX6K7nIh\n4lWiEmzUSD3Hngk51KGWlpZszO5KQ4cSZ3HUt/prg+tt0ROC3pY61k+m5dDUa9M8\nRtMI6iTjzSj/UV8DiAx0yeM+bKoy4jGeXmaL3g==\n-----END CERTIFICATE-----\n", + "chain": [ + "-----BEGIN CERTIFICATE-----\nMIIC6DCCAdCgAwIBAgIUW42TU9LSjEZLMCclWrvSwAsgRtcwDQYJKoZIhvcNAQEL\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIzMDMyNDE4\nNDMxOVoXDTI0MDMyMzE4NDMxOVowPDELMAkGA1UEAwwCb2sxLTArBgNVBC0MJGUw\nNjVmMWI3LTE2OWEtNDE5YS1iNmQyLTc3OWJkOGM4NzIwNjCCASIwDQYJKoZIhvcN\nAQEBBQADggEPADCCAQoCggEBAK42ixoklDH5K5i1NxXo/AFACDa956pE5RA57wlC\nBfgUYaIDRmv7TUVJh6zoMZSD6wjSZl3QgP7UTTZeHbvs3QE9HUwEkH1Lo3a8vD3z\neqsE2vSnOkpWWnPbfxiQyrTm77/LAWBt7lRLRLdfL6WcucD3wsGqm58sWXM3HG0f\nSN7PHCZUFqU6MpkHw8DiKmht5hBgWG+Vq3Zw8MNaqpwb/NgST3yYdcZwb58G2FTS\nZvDSdUfRmD/mY7TpciYV8EFylXNNFkth8oGNLunR9adgZ+9IunfRKj1a7S5GSwXU\nAZDaojw+8k5i3ikztsWH11wAVCiLj/3euIqq95z8xGycnKcCAwEAATANBgkqhkiG\n9w0BAQsFAAOCAQEAWMvcaozgBrZ/MAxzTJmp5gZyLxmMNV6iT9dcqbwzDtDtBvA/\n46ux6ytAQ+A7Bd3AubvozwCr1Id6g66ae0blWYRRZmF8fDdX/SBjIUkv7u9A3NVQ\nXN9gsEvK9pdpfN4ZiflfGSLdhM1STHycLmhG6H5s7HklbukMRhQi+ejbSzm/wiw1\nipcxuKhSUIVNkTLusN5b+HE2gwF1fn0K0z5jWABy08huLgbaEKXJEx5/FKLZGJga\nfpIzAdf25kMTu3gggseaAmzyX3AtT1i8A8nqYfe8fnnVMkvud89kq5jErv/hlMC9\n49g5yWQR2jilYYM3j9BHDuB+Rs+YS5BCep1JnQ==\n-----END CERTIFICATE-----\n", + "-----BEGIN CERTIFICATE-----\nMIIC6DCCAdCgAwIBAgIUdiBwE/CtaBXJl3MArjZen6Y8kigwDQYJKoZIhvcNAQEL\nBQAwIDELMAkGA1UEBhMCVVMxETAPBgNVBAMMCHdoYXRldmVyMB4XDTIzMDMyNDE4\nNDg1OVoXDTI0MDMyMzE4NDg1OVowPDELMAkGA1UEAwwCb2sxLTArBgNVBC0MJDEw\nMDdjNDBhLWUwYzMtNDVlOS05YTAxLTVlYjY0NWQ0ZmEyZDCCASIwDQYJKoZIhvcN\nAQEBBQADggEPADCCAQoCggEBANOnUl6JDlXpLMRr/PxgtfE/E5Yk6E/TkPkPL/Kk\ntUGjEi42XZDg9zn3U6cjTDYu+rfKY2jiitfsduW6DQIkEpz3AvbuCMbbgnFpcjsB\nYysLSMTmuz/AVPrfnea/tQTALcONCSy1VhAjGSr81ZRSMB4khl9StSauZrbkpJ1P\nshqkFSUyAi31mKrnXz0Es/v0Yi0FzAlgWrZ4u1Ld+Bo2Xz7oK4mHf7/93Jc+tEaM\nIqG6ocD0q8bjPp0tlSxftVADNUzWlZfM6fue5EXzOsKqyDrxYOSchfU9dNzKsaBX\nkxbHEeSUPJeYYj7aVPEfAs/tlUGsoXQvwWfRie8grp2BoLECAwEAATANBgkqhkiG\n9w0BAQsFAAOCAQEACZARBpHYH6Gr2a1ka0mCWfBmOZqfDVan9rsI5TCThoylmaXW\nquEiZ2LObI+5faPzxSBhr9TjJlQamsd4ywout7pHKN8ZGqrCMRJ1jJbUfobu1n2k\nUOsY4+jzV1IRBXJzj64fLal4QhUNv341lAer6Vz3cAyRk7CK89b/DEY0x+jVpyZT\n1osx9JtsOmkDTgvdStGzq5kPKWOfjwHkmKQaZXliCgqbhzcCERppp1s/sX6K7nIh\n4lWiEmzUSD3Hngk51KGWlpZszO5KQ4cSZ3HUt/prg+tt0ROC3pY61k+m5dDUa9M8\nRtMI6iTjzSj/UV8DiAx0yeM+bKoy4jGeXmaL3g==\n-----END CERTIFICATE-----\n", + ], + "version": 0, + } + ], + "properties": { + "certificate": { + "$id": "#/properties/certificate", + "type": "string", + "title": "Public TLS certificate", + "description": "Public TLS certificate", + }, + "ca": { + "$id": "#/properties/ca", + "type": "string", + "title": "CA public TLS certificate", + "description": "CA Public TLS certificate", + }, + "chain": { + "$id": "#/properties/chain", + "type": "array", + "items": {"type": "string", "$id": "#/properties/chain/items"}, + "title": "CA public TLS certificate chain", + "description": "CA public TLS certificate chain", + }, + "version": { + "$id": "#/properties/version", + "type": "integer", + "title": "Interface version", + "minimum": 0, + "description": "Highest supported version of this interface", + }, + }, + "anyOf": [{"required": ["certificate"]}, {"required": ["ca"]}, {"required": ["chain"]}], + "additionalProperties": True, +} + + +class CertificateAvailableEvent(EventBase): + """Charm Event triggered when a TLS certificate is available.""" + + def __init__( + self, + handle: Handle, + certificate: str, + ca: str, + chain: List[str], + relation_id: int, + ): + super().__init__(handle) + self.certificate = certificate + self.ca = ca + self.chain = chain + self.relation_id = relation_id + + def snapshot(self) -> dict: + """Return snapshot.""" + return { + "certificate": self.certificate, + "ca": self.ca, + "chain": self.chain, + "relation_id": self.relation_id, + } + + def restore(self, snapshot: dict): + """Restores snapshot.""" + self.certificate = snapshot["certificate"] + self.ca = snapshot["ca"] + self.chain = snapshot["chain"] + self.relation_id = snapshot["relation_id"] + + +class CertificateRemovedEvent(EventBase): + """Charm Event triggered when a TLS certificate is removed.""" + + def __init__(self, handle: Handle, relation_id: int): + super().__init__(handle) + self.relation_id = relation_id + + def snapshot(self) -> dict: + """Return snapshot.""" + return {"relation_id": self.relation_id} + + def restore(self, snapshot: dict): + """Restores snapshot.""" + self.relation_id = snapshot["relation_id"] + + +def _load_relation_data(raw_relation_data: Mapping[str, str]) -> dict: + """Load relation data from the relation data bag. + + Args: + raw_relation_data: Relation data from the databag + + Returns: + dict: Relation data in dict format. + """ + loaded_relation_data = {} + for key in raw_relation_data: + try: + loaded_relation_data[key] = json.loads(raw_relation_data[key]) + except (json.decoder.JSONDecodeError, TypeError): + loaded_relation_data[key] = raw_relation_data[key] + return loaded_relation_data + + +class CertificateTransferRequirerCharmEvents(CharmEvents): + """List of events that the Certificate Transfer requirer charm can leverage.""" + + certificate_available = EventSource(CertificateAvailableEvent) + certificate_removed = EventSource(CertificateRemovedEvent) + + +class CertificateTransferProvides(Object): + """Certificate Transfer provider class.""" + + def __init__(self, charm: CharmBase, relationship_name: str): + super().__init__(charm, relationship_name) + self.charm = charm + self.relationship_name = relationship_name + + def set_certificate( + self, + certificate: str, + ca: str, + chain: List[str], + relation_id: int, + ) -> None: + """Add certificates to relation data. + + Args: + certificate (str): Certificate + ca (str): CA Certificate + chain (list): CA Chain + relation_id (int): Juju relation ID + + Returns: + None + """ + relation = self.model.get_relation( + relation_name=self.relationship_name, + relation_id=relation_id, + ) + if not relation: + raise RuntimeError( + f"No relation found with relation name {self.relationship_name} and " + f"relation ID {relation_id}" + ) + relation.data[self.model.unit]["certificate"] = certificate + relation.data[self.model.unit]["ca"] = ca + relation.data[self.model.unit]["chain"] = json.dumps(chain) + relation.data[self.model.unit]["version"] = str(LIBAPI) + + def remove_certificate(self, relation_id: int) -> None: + """Remove a given certificate from relation data. + + Args: + relation_id (int): Relation ID + + Returns: + None + """ + relation = self.model.get_relation( + relation_name=self.relationship_name, + relation_id=relation_id, + ) + if not relation: + logger.warning( + "Can't remove certificate - Non-existent relation '%s'", self.relationship_name + ) + return + unit_relation_data = relation.data[self.model.unit] + certificate_removed = False + if "certificate" in unit_relation_data: + relation.data[self.model.unit].pop("certificate") + certificate_removed = True + if "ca" in unit_relation_data: + relation.data[self.model.unit].pop("ca") + certificate_removed = True + if "chain" in unit_relation_data: + relation.data[self.model.unit].pop("chain") + certificate_removed = True + + if certificate_removed: + logger.warning("Certificate removed from relation data") + else: + logger.warning("Can't remove certificate - No certificate in relation data") + + +class CertificateTransferRequires(Object): + """TLS certificates requirer class to be instantiated by TLS certificates requirers.""" + + on = CertificateTransferRequirerCharmEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + relationship_name: str, + ): + """Generates/use private key and observes relation changed event. + + Args: + charm: Charm object + relationship_name: Juju relation name + """ + super().__init__(charm, relationship_name) + self.relationship_name = relationship_name + self.charm = charm + self.framework.observe( + charm.on[relationship_name].relation_changed, self._on_relation_changed + ) + self.framework.observe( + charm.on[relationship_name].relation_broken, self._on_relation_broken + ) + self.framework.observe( + charm.on[relationship_name].relation_created, self._on_relation_created + ) + + @staticmethod + def _relation_data_is_valid(relation_data: dict) -> bool: + """Return whether relation data is valid based on json schema. + + Args: + relation_data: Relation data in dict format. + + Returns: + bool: Whether relation data is valid. + """ + try: + validate(instance=relation_data, schema=PROVIDER_JSON_SCHEMA) + return True + except exceptions.ValidationError: + return False + + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """Emit certificate available event. + + Args: + event: Juju event + + Returns: + None + """ + if not event.unit: + logger.info("No remote unit in relation: %s", self.relationship_name) + return + remote_unit_relation_data = _load_relation_data(event.relation.data[event.unit]) + if not self._relation_data_is_valid(remote_unit_relation_data): + logger.warning( + "Provider relation data did not pass JSON Schema validation: %s", + event.relation.data[event.unit], + ) + return + self.on.certificate_available.emit( + certificate=remote_unit_relation_data.get("certificate"), + ca=remote_unit_relation_data.get("ca"), + chain=remote_unit_relation_data.get("chain"), + relation_id=event.relation.id, + ) + + def _on_relation_broken(self, event: RelationBrokenEvent) -> None: + """Handle relation broken event. + + Args: + event: Juju event + + Returns: + None + """ + self.on.certificate_removed.emit(relation_id=event.relation.id) + + def _on_relation_created(self, event: RelationCreatedEvent) -> None: + """Handle relation created event. + + Args: + event: Juju event + + Returns: + None + """ + if self.model.unit.is_leader(): + event.relation.data[self.model.app]["version"] = str(LIBAPI) + + def is_ready(self, relation: Relation) -> bool: + """Check if the relation is ready by checking that it has valid relation data.""" + relation_data = _load_relation_data(relation.data[relation.units.pop()]) + if not self._relation_data_is_valid(relation_data): + logger.warning("Provider relation data did not pass JSON Schema validation: ") + return False + return True diff --git a/lib/charms/glauth_k8s/v0/ldap.py b/lib/charms/glauth_k8s/v0/ldap.py new file mode 100644 index 0000000000..b68e1d2f30 --- /dev/null +++ b/lib/charms/glauth_k8s/v0/ldap.py @@ -0,0 +1,571 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""# Juju Charm Library for the `ldap` Juju Interface. + +This juju charm library contains the Provider and Requirer classes for handling +the `ldap` interface. + +## Requirer Charm + +The requirer charm is expected to: + +- Provide information for the provider charm to deliver LDAP related +information in the juju integration, in order to communicate with the LDAP +server and authenticate LDAP operations +- Listen to the custom juju event `LdapReadyEvent` to obtain the LDAP +related information from the integration +- Listen to the custom juju event `LdapUnavailableEvent` to handle the +situation when the LDAP integration is broken + +```python + +from charms.glauth_k8s.v0.ldap import ( + LdapRequirer, + LdapReadyEvent, + LdapUnavailableEvent, +) + +class RequirerCharm(CharmBase): + # LDAP requirer charm that integrates with an LDAP provider charm. + + def __init__(self, *args): + super().__init__(*args) + + self.ldap_requirer = LdapRequirer(self) + self.framework.observe( + self.ldap_requirer.on.ldap_ready, + self._on_ldap_ready, + ) + self.framework.observe( + self.ldap_requirer.on.ldap_unavailable, + self._on_ldap_unavailable, + ) + + def _on_ldap_ready(self, event: LdapReadyEvent) -> None: + # Consume the LDAP related information + ldap_data = self.ldap_requirer.consume_ldap_relation_data( + relation=event.relation, + ) + + # Configure the LDAP requirer charm + ... + + def _on_ldap_unavailable(self, event: LdapUnavailableEvent) -> None: + # Handle the situation where the LDAP integration is broken + ... +``` + +As shown above, the library offers custom juju events to handle specific +situations, which are listed below: + +- ldap_ready: event emitted when the LDAP related information is ready for +requirer charm to use. +- ldap_unavailable: event emitted when the LDAP integration is broken. + +Additionally, the requirer charmed operator needs to declare the `ldap` +interface in the `metadata.yaml`: + +```yaml +requires: + ldap: + interface: ldap +``` + +## Provider Charm + +The provider charm is expected to: + +- Use the information provided by the requirer charm to provide LDAP related +information for the requirer charm to connect and authenticate to the LDAP +server +- Listen to the custom juju event `LdapRequestedEvent` to offer LDAP related +information in the integration + +```python + +from charms.glauth_k8s.v0.ldap import ( + LdapProvider, + LdapRequestedEvent, +) + +class ProviderCharm(CharmBase): + # LDAP provider charm. + + def __init__(self, *args): + super().__init__(*args) + + self.ldap_provider = LdapProvider(self) + self.framework.observe( + self.ldap_provider.on.ldap_requested, + self._on_ldap_requested, + ) + + def _on_ldap_requested(self, event: LdapRequestedEvent) -> None: + # Consume the information provided by the requirer charm + requirer_data = event.data + + # Prepare the LDAP related information using the requirer's data + ldap_data = ... + + # Update the integration data + self.ldap_provider.update_relations_app_data( + relation.id, + ldap_data, + ) +``` + +As shown above, the library offers custom juju events to handle specific +situations, which are listed below: + +- ldap_requested: event emitted when the requirer charm is requesting the +LDAP related information in order to connect and authenticate to the LDAP server +""" + +import json +from functools import wraps +from string import Template +from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union + +import ops +from ops.charm import ( + CharmBase, + RelationBrokenEvent, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, +) +from ops.framework import EventSource, Handle, Object, ObjectEvents +from ops.model import Relation, SecretNotFoundError +from pydantic import StrictBool, ValidationError, version + +# The unique CharmHub library identifier, never change it +LIBID = "5a535b3c4d0b40da98e29867128e57b9" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 10 + +PYDEPS = ["pydantic"] + +DEFAULT_RELATION_NAME = "ldap" +BIND_ACCOUNT_SECRET_LABEL_TEMPLATE = Template("relation-$relation_id-bind-account-secret") + +PYDANTIC_IS_V1 = int(version.VERSION.split(".")[0]) < 2 +if PYDANTIC_IS_V1: + # Pydantic v1 backwards compatibility logic, + # see https://docs.pydantic.dev/latest/migration/ for more info. + # This does not offer complete backwards compatibility + + from pydantic import BaseModel as BaseModelV1 + from pydantic import Field as FieldV1 + from pydantic import validator + from pydantic.main import ModelMetaclass + + def Field(*args: Any, **kwargs: Any) -> FieldV1: # noqa N802 + if frozen := kwargs.pop("frozen", None): + kwargs["allow_mutations"] = not frozen + return FieldV1(*args, **kwargs) + + def field_validator(*args: Any, **kwargs: Any) -> Callable: + if kwargs.get("mode") == "before": + kwargs.pop("mode") + kwargs["pre"] = True + return validator(*args, **kwargs) + + encoders_config = {} + + def field_serializer(*fields: str, mode: Optional[str] = None) -> Callable: + def _field_serializer(f: Callable, *args: Any, **kwargs: Any) -> Callable: + @wraps(f) + def wrapper(self: object, *args: Any, **kwargs: Any) -> Any: + return f(self, *args, **kwargs) + + encoders_config[wrapper] = fields + return wrapper + + return _field_serializer + + class ModelCompatibilityMeta(ModelMetaclass): + def __init__(self, name: str, bases: Tuple[object], attrs: Dict) -> None: + if not hasattr(self, "_encoders"): + self._encoders = {} + + self._encoders.update({ + encoder: func + for func in attrs.values() + if callable(func) and func in encoders_config + for encoder in encoders_config[func] + }) + + super().__init__(name, bases, attrs) + + class BaseModel(BaseModelV1, metaclass=ModelCompatibilityMeta): + def model_dump(self, *args: Any, **kwargs: Any) -> Dict: + d = self.dict(*args, **kwargs) + for name, f in self._encoders.items(): + d[name] = f(self, d[name]) + return d + +else: + from pydantic import ( # type: ignore[no-redef] + BaseModel, + Field, + field_serializer, + field_validator, + ) + + +def leader_unit(func: Callable) -> Callable: + @wraps(func) + def wrapper( + obj: Union["LdapProvider", "LdapRequirer"], *args: Any, **kwargs: Any + ) -> Optional[Any]: + if not obj.unit.is_leader(): + return None + + return func(obj, *args, **kwargs) + + return wrapper + + +@leader_unit +def _update_relation_app_databag( + ldap: Union["LdapProvider", "LdapRequirer"], relation: Relation, data: dict +) -> None: + if relation is None: + return + + data = {k: str(v) if v else "" for k, v in data.items()} + relation.data[ldap.app].update(data) + + +class Secret: + def __init__(self, secret: ops.Secret = None) -> None: + self._secret: ops.Secret = secret + + @property + def uri(self) -> str: + return self._secret.id if self._secret else "" + + @classmethod + def load( + cls, + charm: CharmBase, + label: str, + *, + content: Optional[dict[str, str]] = None, + ) -> "Secret": + try: + secret = charm.model.get_secret(label=label) + except SecretNotFoundError: + secret = charm.app.add_secret(label=label, content=content) + + return Secret(secret) + + @classmethod + def create_or_update(cls, charm: CharmBase, label: str, content: dict[str, str]) -> "Secret": + try: + secret = charm.model.get_secret(label=label) + secret.set_content(content=content) + except SecretNotFoundError: + secret = charm.app.add_secret(label=label, content=content) + + return Secret(secret) + + def grant(self, relation: Relation) -> None: + self._secret.grant(relation) + + def remove(self) -> None: + self._secret.remove_all_revisions() + + +class LdapProviderBaseData(BaseModel): + urls: List[str] = Field(frozen=True) + ldaps_urls: List[str] = Field(frozen=True) + base_dn: str = Field(frozen=True) + starttls: StrictBool = Field(frozen=True) + + @field_validator("urls", mode="before") + @classmethod + def validate_ldap_urls(cls, vs: List[str] | str) -> List[str]: + if isinstance(vs, str): + vs = json.loads(vs) + if isinstance(vs, str): + vs = [vs] + + for v in vs: + if not v.startswith("ldap://"): + raise ValidationError.from_exception_data("Invalid LDAP URL scheme.") + + return vs + + @field_validator("ldaps_urls", mode="before") + @classmethod + def validate_ldaps_urls(cls, vs: List[str] | str) -> List[str]: + if isinstance(vs, str): + vs = json.loads(vs) + if isinstance(vs, str): + vs = [vs] + + for v in vs: + if not v.startswith("ldaps://"): + raise ValidationError.from_exception_data("Invalid LDAPS URL scheme.") + + return vs + + @field_serializer("urls", "ldaps_urls") + def serialize_list(self, urls: List[str]) -> str: + return str(json.dumps(urls)) + + @field_validator("starttls", mode="before") + @classmethod + def deserialize_bool(cls, v: str | bool) -> bool: + if isinstance(v, str): + return True if v.casefold() == "true" else False + + return v + + @field_serializer("starttls") + def serialize_bool(self, starttls: bool) -> str: + return str(starttls) + + +class LdapProviderData(LdapProviderBaseData): + bind_dn: str = Field(frozen=True) + bind_password: str = Field(exclude=True) + bind_password_secret: Optional[str] = None + auth_method: Literal["simple"] = Field(frozen=True) + + +class LdapRequirerData(BaseModel): + user: str = Field(frozen=True) + group: str = Field(frozen=True) + + +class LdapRequestedEvent(RelationEvent): + """An event emitted when the LDAP integration is built.""" + + def __init__(self, handle: Handle, relation: Relation) -> None: + super().__init__(handle, relation, relation.app) + + @property + def data(self) -> Optional[LdapRequirerData]: + relation_data = self.relation.data.get(self.relation.app) + return LdapRequirerData(**relation_data) if relation_data else None + + +class LdapProviderEvents(ObjectEvents): + ldap_requested = EventSource(LdapRequestedEvent) + + +class LdapReadyEvent(RelationEvent): + """An event when the LDAP related information is ready.""" + + +class LdapUnavailableEvent(RelationEvent): + """An event when the LDAP integration is unavailable.""" + + +class LdapRequirerEvents(ObjectEvents): + ldap_ready = EventSource(LdapReadyEvent) + ldap_unavailable = EventSource(LdapUnavailableEvent) + + +class LdapProvider(Object): + on = LdapProviderEvents() + + def __init__( + self, + charm: CharmBase, + relation_name: str = DEFAULT_RELATION_NAME, + ) -> None: + super().__init__(charm, relation_name) + + self.charm = charm + self.app = charm.app + self.unit = charm.unit + self._relation_name = relation_name + + self.framework.observe( + self.charm.on[self._relation_name].relation_changed, + self._on_relation_changed, + ) + self.framework.observe( + self.charm.on[self._relation_name].relation_broken, + self._on_relation_broken, + ) + + @leader_unit + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """Handle the event emitted when the requirer charm provides the necessary data.""" + self.on.ldap_requested.emit(event.relation) + + @leader_unit + def _on_relation_broken(self, event: RelationBrokenEvent) -> None: + """Handle the event emitted when the LDAP integration is broken.""" + secret = Secret.load( + self.charm, + label=BIND_ACCOUNT_SECRET_LABEL_TEMPLATE.substitute(relation_id=event.relation.id), + ) + secret.remove() + + def get_bind_password(self, relation_id: int) -> Optional[str]: + """Retrieve the bind account password for a given integration.""" + try: + secret = self.charm.model.get_secret( + label=BIND_ACCOUNT_SECRET_LABEL_TEMPLATE.substitute(relation_id=relation_id) + ) + except SecretNotFoundError: + return None + return secret.get_content().get("password") + + def update_relations_app_data( + self, + data: Union[LdapProviderBaseData, LdapProviderData], + /, + relation_id: Optional[int] = None, + ) -> None: + """An API for the provider charm to provide the LDAP related information.""" + if not (relations := self.charm.model.relations.get(self._relation_name)): + return + + if relation_id is not None and isinstance(data, LdapProviderData): + relations = [relation for relation in relations if relation.id == relation_id] + secret = Secret.create_or_update( + self.charm, + BIND_ACCOUNT_SECRET_LABEL_TEMPLATE.substitute(relation_id=relation_id), + {"password": data.bind_password}, + ) + secret.grant(relations[0]) + data.bind_password_secret = secret.uri + + for relation in relations: + _update_relation_app_databag(self.charm, relation, data.model_dump()) + + +class LdapRequirer(Object): + """An LDAP requirer to consume data delivered by an LDAP provider charm.""" + + on = LdapRequirerEvents() + + def __init__( + self, + charm: CharmBase, + relation_name: str = DEFAULT_RELATION_NAME, + *, + data: Optional[LdapRequirerData] = None, + ) -> None: + super().__init__(charm, relation_name) + + self.charm = charm + self.app = charm.app + self.unit = charm.unit + self._relation_name = relation_name + self._data = data + + self.framework.observe( + self.charm.on[self._relation_name].relation_created, + self._on_ldap_relation_created, + ) + self.framework.observe( + self.charm.on[self._relation_name].relation_changed, + self._on_ldap_relation_changed, + ) + self.framework.observe( + self.charm.on[self._relation_name].relation_broken, + self._on_ldap_relation_broken, + ) + + def _on_ldap_relation_created(self, event: RelationCreatedEvent) -> None: + """Handle the event emitted when an LDAP integration is created.""" + user = self._data.user if self._data else self.app.name + group = self._data.group if self._data else self.model.name + _update_relation_app_databag(self.charm, event.relation, {"user": user, "group": group}) + + def _on_ldap_relation_changed(self, event: RelationChangedEvent) -> None: + """Handle the event emitted when the LDAP related information is ready.""" + provider_app = event.relation.app + + if not event.relation.data.get(provider_app): + return + + self.on.ldap_ready.emit(event.relation) + + def _on_ldap_relation_broken(self, event: RelationBrokenEvent) -> None: + """Handle the event emitted when the LDAP integration is broken.""" + self.on.ldap_unavailable.emit(event.relation) + + def consume_ldap_relation_data( + self, + /, + relation: Optional[Relation] = None, + relation_id: Optional[int] = None, + ) -> Optional[LdapProviderData]: + """An API for the requirer charm to consume the LDAP related information in the application databag.""" + if not relation: + relation = self.charm.model.get_relation(self._relation_name, relation_id) + + if not relation: + return None + + provider_data = dict(relation.data.get(relation.app)) + if secret_id := provider_data.get("bind_password_secret"): + secret = self.charm.model.get_secret(id=secret_id) + provider_data["bind_password"] = secret.get_content().get("password") + return LdapProviderData(**provider_data) if provider_data else None + + def _is_relation_active(self, relation: Relation) -> bool: + """Whether the relation is active based on contained data.""" + try: + _ = repr(relation.data) + return True + except (RuntimeError, ops.ModelError): + return False + + @property + def relations(self) -> List[Relation]: + """The list of Relation instances associated with this relation_name.""" + return [ + relation + for relation in self.charm.model.relations[self._relation_name] + if self._is_relation_active(relation) + ] + + def _ready_for_relation(self, relation: Relation) -> bool: + if not relation.app: + return False + + return "urls" in relation.data[relation.app] and "bind_dn" in relation.data[relation.app] + + def ready(self, relation_id: Optional[int] = None) -> bool: + """Check if the resource has been created. + + This function can be used to check if the Provider answered with data in the charm code + when outside an event callback. + + Args: + relation_id (int, optional): When provided the check is done only for the relation id + provided, otherwise the check is done for all relations + + Returns: + True or False + + Raises: + IndexError: If relation_id is provided but that relation does not exist + """ + if relation_id is None: + return ( + all(self._ready_for_relation(relation) for relation in self.relations) + if self.relations + else False + ) + + try: + relation = [relation for relation in self.relations if relation.id == relation_id][0] + return self._ready_for_relation(relation) + except IndexError: + raise IndexError(f"relation id {relation_id} cannot be accessed") diff --git a/lib/charms/postgresql_k8s/v0/postgresql_tls.py b/lib/charms/postgresql_k8s/v0/postgresql_tls.py index f55543e0cb..2aeaa52af6 100644 --- a/lib/charms/postgresql_k8s/v0/postgresql_tls.py +++ b/lib/charms/postgresql_k8s/v0/postgresql_tls.py @@ -6,8 +6,9 @@ This class handles certificate request and renewal through the interaction with the TLS Certificates Operator. -This library needs that https://charmhub.io/tls-certificates-interface/libraries/tls_certificates -library is imported to work. +This library needs that the following libraries are imported to work: +- https://charmhub.io/certificate-transfer-interface/libraries/certificate_transfer +- https://charmhub.io/tls-certificates-interface/libraries/tls_certificates It also needs the following methods in the charm class: — get_hostname_by_unit: to retrieve the DNS hostname of the unit. @@ -24,6 +25,15 @@ import socket from typing import List, Optional +from charms.certificate_transfer_interface.v0.certificate_transfer import ( + CertificateAvailableEvent as CertificateAddedEvent, +) +from charms.certificate_transfer_interface.v0.certificate_transfer import ( + CertificateRemovedEvent as CertificateRemovedEvent, +) +from charms.certificate_transfer_interface.v0.certificate_transfer import ( + CertificateTransferRequires, +) from charms.tls_certificates_interface.v2.tls_certificates import ( CertificateAvailableEvent, CertificateExpiringEvent, @@ -45,11 +55,12 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version. -LIBPATCH = 13 +LIBPATCH = 14 logger = logging.getLogger(__name__) SCOPE = "unit" -TLS_RELATION = "certificates" +TLS_CREATION_RELATION = "certificates" +TLS_TRANSFER_RELATION = "receive-ca-cert" class PostgreSQLTLS(Object): @@ -63,18 +74,29 @@ def __init__( self.charm = charm self.peer_relation = peer_relation self.additional_dns_names = additional_dns_names or [] - self.certs = TLSCertificatesRequiresV2(self.charm, TLS_RELATION) + self.certs_creation = TLSCertificatesRequiresV2(self.charm, TLS_CREATION_RELATION) + self.certs_transfer = CertificateTransferRequires(self.charm, TLS_TRANSFER_RELATION) self.framework.observe( self.charm.on.set_tls_private_key_action, self._on_set_tls_private_key ) self.framework.observe( - self.charm.on[TLS_RELATION].relation_joined, self._on_tls_relation_joined + self.charm.on[TLS_CREATION_RELATION].relation_joined, self._on_tls_relation_joined + ) + self.framework.observe( + self.charm.on[TLS_CREATION_RELATION].relation_broken, self._on_tls_relation_broken + ) + self.framework.observe( + self.certs_creation.on.certificate_available, self._on_certificate_available + ) + self.framework.observe( + self.certs_creation.on.certificate_expiring, self._on_certificate_expiring ) self.framework.observe( - self.charm.on[TLS_RELATION].relation_broken, self._on_tls_relation_broken + self.certs_transfer.on.certificate_available, self._on_certificate_added + ) + self.framework.observe( + self.certs_transfer.on.certificate_removed, self._on_certificate_removed ) - self.framework.observe(self.certs.on.certificate_available, self._on_certificate_available) - self.framework.observe(self.certs.on.certificate_expiring, self._on_certificate_expiring) def _on_set_tls_private_key(self, event: ActionEvent) -> None: """Set the TLS private key, which will be used for requesting the certificate.""" @@ -93,8 +115,8 @@ def _request_certificate(self, param: Optional[str]): self.charm.set_secret(SCOPE, "key", key.decode("utf-8")) self.charm.set_secret(SCOPE, "csr", csr.decode("utf-8")) - if self.charm.model.get_relation(TLS_RELATION): - self.certs.request_certificate_creation(certificate_signing_request=csr) + if self.charm.model.get_relation(TLS_CREATION_RELATION): + self.certs_creation.request_certificate_creation(certificate_signing_request=csr) @staticmethod def _parse_tls_file(raw_content: str) -> bytes: @@ -117,6 +139,7 @@ def _on_tls_relation_broken(self, event: RelationBrokenEvent) -> None: self.charm.set_secret(SCOPE, "ca", None) self.charm.set_secret(SCOPE, "cert", None) self.charm.set_secret(SCOPE, "chain", None) + if not self.charm.update_config(): logger.debug("Cannot update config at this moment") event.defer() @@ -163,12 +186,52 @@ def _on_certificate_expiring(self, event: CertificateExpiringEvent) -> None: subject=self.charm.get_hostname_by_unit(self.charm.unit.name), **self._get_sans(), ) - self.certs.request_certificate_renewal( + self.certs_creation.request_certificate_renewal( old_certificate_signing_request=old_csr, new_certificate_signing_request=new_csr, ) self.charm.set_secret(SCOPE, "csr", new_csr.decode("utf-8")) + def _on_certificate_added(self, event: CertificateAddedEvent) -> None: + """Enable TLS when TLS certificate is added.""" + relation = self.charm.model.get_relation(TLS_TRANSFER_RELATION, event.relation_id) + if relation is None: + logger.error("Relationship not established anymore.") + return + + secret_name = f"ca-{relation.app.name}" + self.charm.set_secret(SCOPE, secret_name, event.ca) + + try: + if not self.charm.push_ca_file_into_workload(secret_name): + logger.debug("Cannot push TLS certificates at this moment") + event.defer() + return + except (PebbleConnectionError, PathError, ProtocolError, RetryError) as e: + logger.error("Cannot push TLS certificates: %r", e) + event.defer() + return + + def _on_certificate_removed(self, event: CertificateRemovedEvent) -> None: + """Disable TLS when TLS certificate is removed.""" + relation = self.charm.model.get_relation(TLS_TRANSFER_RELATION, event.relation_id) + if relation is None: + logger.error("Relationship not established anymore.") + return + + secret_name = f"ca-{relation.app.name}" + self.charm.set_secret(SCOPE, secret_name, None) + + try: + if not self.charm.clean_ca_file_from_workload(secret_name): + logger.debug("Cannot clean CA certificates at this moment") + event.defer() + return + except (PebbleConnectionError, PathError, ProtocolError, RetryError) as e: + logger.error("Cannot clean CA certificates: %r", e) + event.defer() + return + def _get_sans(self) -> dict: """Create a list of Subject Alternative Names for a PostgreSQL unit. diff --git a/metadata.yaml b/metadata.yaml index 94cb47ec89..61d7ae7533 100644 --- a/metadata.yaml +++ b/metadata.yaml @@ -49,10 +49,17 @@ requires: interface: tls-certificates limit: 1 optional: true + receive-ca-cert: + interface: certificate_transfer + optional: true s3-parameters: interface: s3 limit: 1 optional: true + ldap: + interface: ldap + limit: 1 + optional: true tracing: interface: tracing limit: 1 diff --git a/pyproject.toml b/pyproject.toml index c044782906..1a7c1b7d51 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,8 @@ pydantic = "^1.10" cosl = ">=0.0.50" # tls_certificates_interface/v2/tls_certificates.py cryptography = "*" +# certificate_transfer_interface/v0/certificate_transfer.py +# tls_certificates_interface/v2/tls_certificates.py jsonschema = "*" # tempo_coordinator_k8s/v0/charm_tracing.py opentelemetry-exporter-otlp-proto-http = "1.21.0" diff --git a/src/charm.py b/src/charm.py index ee1a4c9ceb..7a9e49830d 100755 --- a/src/charm.py +++ b/src/charm.py @@ -96,6 +96,7 @@ TLS_KEY_FILE, TRACING_PROTOCOL, UNIT_SCOPE, + UPDATE_CERTS_BIN_PATH, USER, USER_PASSWORD_KEY, ) @@ -191,6 +192,8 @@ def __init__(self, *args): self.framework.observe(self.on.update_status, self._on_update_status) self.cluster_name = self.app.name self._member_name = self.unit.name.replace("/", "-") + + self._certs_path = "/usr/local/share/ca-certificates" self._storage_path = self.meta.storages["pgdata"].location self.upgrade = PostgreSQLUpgrade( @@ -1803,6 +1806,33 @@ def push_tls_files_to_workload(self) -> bool: logger.exception("TLS files failed to push. Error in config update") return False + def push_ca_file_into_workload(self, secret_name: str) -> bool: + """Move CA certificates file into the PostgreSQL storage path.""" + certs = self.get_secret(UNIT_SCOPE, secret_name) + if certs is not None: + certs_file = Path(self._certs_path, f"{secret_name}.crt") + certs_file.write_text(certs) + subprocess.check_call([UPDATE_CERTS_BIN_PATH]) # noqa: S603 + + try: + return self.update_config() + except Exception: + logger.exception("CA file failed to push. Error in config update") + return False + + def clean_ca_file_from_workload(self, secret_name: str) -> bool: + """Cleans up CA certificates from the PostgreSQL storage path.""" + certs_file = Path(self._certs_path, f"{secret_name}.crt") + certs_file.unlink() + + subprocess.check_call([UPDATE_CERTS_BIN_PATH]) # noqa: S603 + + try: + return self.update_config() + except Exception: + logger.exception("CA file failed to clean. Error in config update") + return False + def _reboot_on_detached_storage(self, event: EventBase) -> None: """Reboot on detached storage. diff --git a/src/constants.py b/src/constants.py index 4dd91be625..7a431a1640 100644 --- a/src/constants.py +++ b/src/constants.py @@ -56,6 +56,8 @@ POSTGRESQL_DATA_PATH = f"{SNAP_DATA_PATH}/postgresql" POSTGRESQL_LOGS_PATH = f"{SNAP_LOGS_PATH}/postgresql" +UPDATE_CERTS_BIN_PATH = "/usr/sbin/update-ca-certificates" + PGBACKREST_CONFIGURATION_FILE = f"--config={PGBACKREST_CONF_PATH}/pgbackrest.conf" METRICS_PORT = "9187" diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 6fe8fd144f..9794364a4b 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -1131,7 +1131,9 @@ async def backup_operations( config={"profile": "testing"}, ) - await ops_test.model.relate(database_app_name, tls_certificates_app_name) + await ops_test.model.relate( + f"{database_app_name}:certificates", f"{tls_certificates_app_name}:certificates" + ) async with ops_test.fast_forward(fast_interval="60s"): await ops_test.model.wait_for_idle(apps=[database_app_name], status="active", timeout=1000) diff --git a/tests/integration/test_backups_pitr_aws.py b/tests/integration/test_backups_pitr_aws.py index d835110179..d291337b7c 100644 --- a/tests/integration/test_backups_pitr_aws.py +++ b/tests/integration/test_backups_pitr_aws.py @@ -68,7 +68,9 @@ async def pitr_backup_operations( logger.info( "integrating self-signed-certificates with postgresql and waiting them to stabilize" ) - await ops_test.model.relate(database_app_name, tls_certificates_app_name) + await ops_test.model.relate( + f"{database_app_name}:certificates", f"{tls_certificates_app_name}:certificates" + ) async with ops_test.fast_forward(fast_interval="60s"): await ops_test.model.wait_for_idle( apps=[database_app_name, tls_certificates_app_name], status="active", timeout=1000 diff --git a/tests/integration/test_backups_pitr_gcp.py b/tests/integration/test_backups_pitr_gcp.py index 40b9e3a41f..99ecb5f72d 100644 --- a/tests/integration/test_backups_pitr_gcp.py +++ b/tests/integration/test_backups_pitr_gcp.py @@ -68,7 +68,9 @@ async def pitr_backup_operations( logger.info( "integrating self-signed-certificates with postgresql and waiting them to stabilize" ) - await ops_test.model.relate(database_app_name, tls_certificates_app_name) + await ops_test.model.relate( + f"{database_app_name}:certificates", f"{tls_certificates_app_name}:certificates" + ) async with ops_test.fast_forward(fast_interval="60s"): await ops_test.model.wait_for_idle( apps=[database_app_name, tls_certificates_app_name], status="active", timeout=1000 diff --git a/tests/integration/test_tls.py b/tests/integration/test_tls.py index 8a02a03755..f37131907b 100644 --- a/tests/integration/test_tls.py +++ b/tests/integration/test_tls.py @@ -66,7 +66,9 @@ async def test_tls_enabled(ops_test: OpsTest) -> None: ) # Relate it to the PostgreSQL to enable TLS. - await ops_test.model.relate(DATABASE_APP_NAME, tls_certificates_app_name) + await ops_test.model.relate( + f"{DATABASE_APP_NAME}:certificates", f"{tls_certificates_app_name}:certificates" + ) await ops_test.model.wait_for_idle(status="active", timeout=1500, raise_on_error=False) # Wait for all units enabling TLS. diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index e500a8d099..50776ef87d 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -41,7 +41,13 @@ SwitchoverFailedError, SwitchoverNotSyncError, ) -from constants import PEER, POSTGRESQL_SNAP_NAME, SECRET_INTERNAL_LABEL, SNAP_PACKAGES +from constants import ( + PEER, + POSTGRESQL_SNAP_NAME, + SECRET_INTERNAL_LABEL, + SNAP_PACKAGES, + UPDATE_CERTS_BIN_PATH, +) CREATE_CLUSTER_CONF_PATH = "/etc/postgresql-common/createcluster.d/pgcharm.conf" @@ -1766,6 +1772,42 @@ def test_push_tls_files_to_workload(harness): assert _render_file.call_count == 2 +def test_push_ca_file_into_workload(harness): + with ( + patch("charm.PostgresqlOperatorCharm.update_config") as _update_config, + patch("pathlib.Path.write_text") as _write_text, + patch("subprocess.check_call") as _check_call, + ): + harness.charm.set_secret("unit", "ca-app", "test-ca") + + assert harness.charm.push_ca_file_into_workload("ca-app") + _write_text.assert_called_once() + _check_call.assert_called_once_with([UPDATE_CERTS_BIN_PATH]) + _update_config.assert_called_once() + + +def test_clean_ca_file_from_workload(harness): + with ( + patch("charm.PostgresqlOperatorCharm.update_config") as _update_config, + patch("pathlib.Path.write_text") as _write_text, + patch("pathlib.Path.unlink") as _unlink, + patch("subprocess.check_call") as _check_call, + ): + harness.charm.set_secret("unit", "ca-app", "test-ca") + + assert harness.charm.push_ca_file_into_workload("ca-app") + _write_text.assert_called_once() + _check_call.assert_called_once_with([UPDATE_CERTS_BIN_PATH]) + _update_config.assert_called_once() + + _check_call.reset_mock() + _update_config.reset_mock() + + assert harness.charm.clean_ca_file_from_workload("ca-app") + _unlink.assert_called_once() + _check_call.assert_called_once_with([UPDATE_CERTS_BIN_PATH]) + + def test_is_workload_running(harness): with patch("charm.snap.SnapCache") as _snap_cache: pg_snap = _snap_cache.return_value[POSTGRESQL_SNAP_NAME] From 13b82793977e2cb0355eb1128cbb70385901f340 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sinclert=20P=C3=A9rez?= Date: Wed, 16 Apr 2025 19:20:49 +0200 Subject: [PATCH 16/21] [DPE-6345] LDAP III: Define config and handlers (#825) --- config.yaml | 8 +++- src/charm.py | 61 ++++++++++++++++++++++++++++-- src/cluster.py | 18 +++++++++ src/config.py | 1 + src/ldap.py | 66 +++++++++++++++++++++++++++++++++ templates/patroni.yml.j2 | 10 +++-- tests/unit/test_charm.py | 34 +++++++++++++++++ tests/unit/test_cluster.py | 18 +++++++++ tests/unit/test_ldap.py | 76 ++++++++++++++++++++++++++++++++++++++ 9 files changed, 285 insertions(+), 7 deletions(-) create mode 100644 src/ldap.py create mode 100644 tests/unit/test_ldap.py diff --git a/config.yaml b/config.yaml index 8819015d79..f509a0eca1 100644 --- a/config.yaml +++ b/config.yaml @@ -69,6 +69,12 @@ options: Enable synchronized sequential scans. type: boolean default: true + ldap_search_filter: + description: | + The LDAP search filter to match users with. + Example: (|(uid=$username)(email=$username)) + type: string + default: "(uid=$username)" logging_client_min_messages: description: | Sets the message levels that are sent to the client. @@ -889,4 +895,4 @@ options: Multixact age at which VACUUM should scan whole table to freeze tuples. Allowed values are: from 0 to 2000000000. type: int - default: 150000000 \ No newline at end of file + default: 150000000 diff --git a/src/charm.py b/src/charm.py index 7a9e49830d..d2c2b1ba5a 100755 --- a/src/charm.py +++ b/src/charm.py @@ -100,6 +100,7 @@ USER, USER_PASSWORD_KEY, ) +from ldap import PostgreSQLLDAP from relations.async_replication import ( REPLICATION_CONSUMER_RELATION, REPLICATION_OFFER_RELATION, @@ -135,6 +136,7 @@ class CannotConnectError(Exception): PostgreSQL, PostgreSQLAsyncReplication, PostgreSQLBackups, + PostgreSQLLDAP, PostgreSQLProvider, PostgreSQLTLS, PostgreSQLUpgrade, @@ -206,6 +208,7 @@ def __init__(self, *args): self.legacy_db_relation = DbProvides(self, admin=False) self.legacy_db_admin_relation = DbProvides(self, admin=True) self.backup = PostgreSQLBackups(self, "s3-parameters") + self.ldap = PostgreSQLLDAP(self, "ldap") self.tls = PostgreSQLTLS(self, PEER) self.async_replication = PostgreSQLAsyncReplication(self) self.restart_manager = RollingOpsManager( @@ -909,6 +912,21 @@ def _patroni(self) -> Patroni: self.get_secret(APP_SCOPE, PATRONI_PASSWORD_KEY), ) + @property + def is_connectivity_enabled(self) -> bool: + """Return whether this unit can be connected externally.""" + return self.unit_peer_data.get("connectivity", "on") == "on" + + @property + def is_ldap_charm_related(self) -> bool: + """Return whether this unit has an LDAP charm related.""" + return self.app_peer_data.get("ldap_enabled", "False") == "True" + + @property + def is_ldap_enabled(self) -> bool: + """Return whether this unit has LDAP enabled.""" + return self.is_ldap_charm_related and self.is_cluster_initialised + @property def is_primary(self) -> bool: """Return whether this unit is the primary instance.""" @@ -1407,12 +1425,16 @@ def _on_get_password(self, event: ActionEvent) -> None: If no user is provided, the password of the operator user is returned. """ username = event.params.get("username", USER) + if username not in PASSWORD_USERS and self.is_ldap_enabled: + event.fail("The action can be run only for system users when LDAP is enabled") + return if username not in PASSWORD_USERS: event.fail( - f"The action can be run only for users used by the charm or Patroni:" + f"The action can be run only for system users or Patroni:" f" {', '.join(PASSWORD_USERS)} not {username}" ) return + event.set_results({"password": self.get_secret(APP_SCOPE, f"{username}-password")}) def _on_set_password(self, event: ActionEvent) -> None: @@ -1423,9 +1445,12 @@ def _on_set_password(self, event: ActionEvent) -> None: return username = event.params.get("username", USER) + if username not in SYSTEM_USERS and self.is_ldap_enabled: + event.fail("The action can be run only for system users when LDAP is enabled") + return if username not in SYSTEM_USERS: event.fail( - f"The action can be run only for users used by the charm:" + f"The action can be run only for system users:" f" {', '.join(SYSTEM_USERS)} not {username}" ) return @@ -1911,8 +1936,9 @@ def update_config(self, is_creating_backup: bool = False, no_peers: bool = False # Update and reload configuration based on TLS files availability. self._patroni.render_patroni_yml_file( - connectivity=self.unit_peer_data.get("connectivity", "on") == "on", + connectivity=self.is_connectivity_enabled, is_creating_backup=is_creating_backup, + enable_ldap=self.is_ldap_enabled, enable_tls=enable_tls, backup_id=self.app_peer_data.get("restoring-backup"), pitr_target=self.app_peer_data.get("restore-to-time"), @@ -2177,6 +2203,35 @@ def get_plugins(self) -> list[str]: plugins.append(ext) return plugins + def get_ldap_parameters(self) -> dict: + """Returns the LDAP configuration to use.""" + if not self.is_cluster_initialised: + return {} + if not self.is_ldap_charm_related: + logger.debug("LDAP is not enabled") + return {} + + data = self.ldap.get_relation_data() + if data is None: + return {} + + params = { + "ldapbasedn": data.base_dn, + "ldapbinddn": data.bind_dn, + "ldapbindpasswd": data.bind_password, + "ldaptls": data.starttls, + "ldapurl": data.urls[0], + } + + # LDAP authentication parameters that are exclusive to + # one of the two supported modes (simple bind or search+bind) + # must be put at the very end of the parameters string + params.update({ + "ldapsearchfilter": self.config.ldap_search_filter, + }) + + return params + if __name__ == "__main__": main(PostgresqlOperatorCharm) diff --git a/src/cluster.py b/src/cluster.py index b321a4cac4..3ca1a46d4d 100644 --- a/src/cluster.py +++ b/src/cluster.py @@ -162,6 +162,17 @@ def _patroni_url(self) -> str: """Patroni REST API URL.""" return f"{'https' if self.tls_enabled else 'http'}://{self.unit_ip}:8008" + @staticmethod + def _dict_to_hba_string(_dict: dict[str, Any]) -> str: + """Transform a dictionary into a Host Based Authentication valid string.""" + for key, value in _dict.items(): + if isinstance(value, bool): + _dict[key] = int(value) + if isinstance(value, str): + _dict[key] = f'"{value}"' + + return " ".join(f"{key}={value}" for key, value in _dict.items()) + def bootstrap_cluster(self) -> bool: """Bootstrap a PostgreSQL cluster using Patroni.""" # Render the configuration files and start the cluster. @@ -610,6 +621,7 @@ def render_patroni_yml_file( self, connectivity: bool = False, is_creating_backup: bool = False, + enable_ldap: bool = False, enable_tls: bool = False, stanza: str | None = None, restore_stanza: str | None = None, @@ -626,6 +638,7 @@ def render_patroni_yml_file( Args: connectivity: whether to allow external connections to the database. is_creating_backup: whether this unit is creating a backup. + enable_ldap: whether to enable LDAP authentication. enable_tls: whether to enable TLS. stanza: name of the stanza created by pgBackRest. restore_stanza: name of the stanza used when restoring a backup. @@ -640,6 +653,9 @@ def render_patroni_yml_file( # Open the template patroni.yml file. with open("templates/patroni.yml.j2") as file: template = Template(file.read()) + + ldap_params = self.charm.get_ldap_parameters() + # Render the template file with the correct values. rendered = template.render( conf_path=PATRONI_CONF_PATH, @@ -648,6 +664,7 @@ def render_patroni_yml_file( log_path=PATRONI_LOGS_PATH, postgresql_log_path=POSTGRESQL_LOGS_PATH, data_path=POSTGRESQL_DATA_PATH, + enable_ldap=enable_ldap, enable_tls=enable_tls, member_name=self.member_name, partner_addrs=self.charm.async_replication.get_partner_addresses() @@ -677,6 +694,7 @@ def render_patroni_yml_file( primary_cluster_endpoint=self.charm.async_replication.get_primary_cluster_endpoint(), extra_replication_endpoints=self.charm.async_replication.get_standby_endpoints(), raft_password=self.raft_password, + ldap_parameters=self._dict_to_hba_string(ldap_params), patroni_password=self.patroni_password, ) self.render_file(f"{PATRONI_CONF_PATH}/patroni.yaml", rendered, 0o600) diff --git a/src/config.py b/src/config.py index a755ca0f6d..610723291b 100644 --- a/src/config.py +++ b/src/config.py @@ -29,6 +29,7 @@ class CharmConfig(BaseConfigModel): instance_max_locks_per_transaction: int | None instance_password_encryption: str | None instance_synchronize_seqscans: bool | None + ldap_search_filter: str | None logging_client_min_messages: str | None logging_log_connections: bool | None logging_log_disconnections: bool | None diff --git a/src/ldap.py b/src/ldap.py new file mode 100644 index 0000000000..ae737494d1 --- /dev/null +++ b/src/ldap.py @@ -0,0 +1,66 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +"""LDAP implementation.""" + +import logging + +from charms.glauth_k8s.v0.ldap import ( + LdapProviderData, + LdapReadyEvent, + LdapRequirer, + LdapUnavailableEvent, +) +from ops import Relation +from ops.framework import Object +from ops.model import ActiveStatus + +logger = logging.getLogger(__name__) + + +class PostgreSQLLDAP(Object): + """In this class, we manage PostgreSQL LDAP access.""" + + def __init__(self, charm, relation_name: str): + """Manager of PostgreSQL LDAP.""" + super().__init__(charm, "ldap") + self.charm = charm + self.relation_name = relation_name + + # LDAP relation handles the config options for LDAP access + self.ldap = LdapRequirer(self.charm, self.relation_name) + self.framework.observe(self.ldap.on.ldap_ready, self._on_ldap_ready) + self.framework.observe(self.ldap.on.ldap_unavailable, self._on_ldap_unavailable) + + @property + def _relation(self) -> Relation: + """Return the relation object.""" + return self.model.get_relation(self.relation_name) + + def _on_ldap_ready(self, _: LdapReadyEvent) -> None: + """Handler for the LDAP ready event.""" + logger.debug("Enabling LDAP connection") + if self.charm.unit.is_leader(): + self.charm.app_peer_data.update({"ldap_enabled": "True"}) + + self.charm.update_config() + self.charm.unit.status = ActiveStatus() + + def _on_ldap_unavailable(self, _: LdapUnavailableEvent) -> None: + """Handler for the LDAP unavailable event.""" + logger.debug("Disabling LDAP connection") + if self.charm.unit.is_leader(): + self.charm.app_peer_data.update({"ldap_enabled": "False"}) + + self.charm.update_config() + + def get_relation_data(self) -> LdapProviderData | None: + """Get the LDAP info from the LDAP Provider class.""" + data = self.ldap.consume_ldap_relation_data(relation=self._relation) + if data is None: + logger.warning("LDAP relation is not ready") + + if not self.charm.is_connectivity_enabled: + logger.warning("LDAP server will not be accessible") + + return data diff --git a/templates/patroni.yml.j2 b/templates/patroni.yml.j2 index 63d99c160f..0ff720d698 100644 --- a/templates/patroni.yml.j2 +++ b/templates/patroni.yml.j2 @@ -161,10 +161,14 @@ postgresql: {%- if not connectivity %} - {{ 'hostssl' if enable_tls else 'host' }} all all 0.0.0.0/0 reject - {{ 'hostssl' if enable_tls else 'host' }} all all {{ self_ip }} md5 - {% else %} - - {{ 'hostssl' if enable_tls else 'host' }} replication replication 127.0.0.1/32 md5 - {%- endif %} + {%- elif enable_ldap %} + - {{ 'hostssl' if enable_tls else 'host' }} all +identity_access 0.0.0.0/0 ldap {{ ldap_parameters }} + - {{ 'hostssl' if enable_tls else 'host' }} all +internal_access 0.0.0.0/0 md5 + - {{ 'hostssl' if enable_tls else 'host' }} all +relation_access 0.0.0.0/0 md5 + {%- else %} - {{ 'hostssl' if enable_tls else 'host' }} all all 0.0.0.0/0 md5 + {%- endif %} + - {{ 'hostssl' if enable_tls else 'host' }} replication replication 127.0.0.1/32 md5 # Allow replications connections from other cluster members. {%- for endpoint in extra_replication_endpoints %} - {{ 'hostssl' if enable_tls else 'host' }} replication replication {{ endpoint }}/32 md5 diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 50776ef87d..025ab68e01 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -1301,6 +1301,7 @@ def test_update_config(harness): _render_patroni_yml_file.assert_called_once_with( connectivity=True, is_creating_backup=False, + enable_ldap=False, enable_tls=False, backup_id=None, stanza=None, @@ -1325,6 +1326,7 @@ def test_update_config(harness): _render_patroni_yml_file.assert_called_once_with( connectivity=True, is_creating_backup=False, + enable_ldap=False, enable_tls=True, backup_id=None, stanza=None, @@ -2845,3 +2847,35 @@ def test_on_promote_to_primary(harness): harness.charm._on_promote_to_primary(event) _raft_reinitialisation.assert_called_once_with() assert harness.charm.unit_peer_data["raft_candidate"] == "True" + + +def test_get_ldap_parameters(harness): + with ( + patch("charm.PostgreSQLLDAP.get_relation_data") as _get_relation_data, + patch( + target="charm.PostgresqlOperatorCharm.is_cluster_initialised", + new_callable=PropertyMock, + return_value=True, + ) as _cluster_initialised, + ): + with harness.hooks_disabled(): + harness.update_relation_data( + harness.model.get_relation(PEER).id, + harness.charm.app.name, + {"ldap_enabled": "False"}, + ) + + harness.charm.get_ldap_parameters() + _get_relation_data.assert_not_called() + _get_relation_data.reset_mock() + + with harness.hooks_disabled(): + harness.update_relation_data( + harness.model.get_relation(PEER).id, + harness.charm.app.name, + {"ldap_enabled": "True"}, + ) + + harness.charm.get_ldap_parameters() + _get_relation_data.assert_called_once() + _get_relation_data.reset_mock() diff --git a/tests/unit/test_cluster.py b/tests/unit/test_cluster.py index 7dd1e1ddf7..4e02f37beb 100644 --- a/tests/unit/test_cluster.py +++ b/tests/unit/test_cluster.py @@ -181,6 +181,24 @@ def test_get_postgresql_version(peers_ips, patroni): _get_installed_snaps.assert_called_once_with() +def test_dict_to_hba_string(harness, patroni): + mock_data = { + "ldapbasedn": "dc=example,dc=net", + "ldapbinddn": "cn=serviceuser,dc=example,dc=net", + "ldapbindpasswd": "password", + "ldaptls": False, + "ldapurl": "ldap://0.0.0.0:3893", + } + + assert patroni._dict_to_hba_string(mock_data) == ( + 'ldapbasedn="dc=example,dc=net" ' + 'ldapbinddn="cn=serviceuser,dc=example,dc=net" ' + 'ldapbindpasswd="password" ' + "ldaptls=0 " + 'ldapurl="ldap://0.0.0.0:3893"' + ) + + def test_get_primary(peers_ips, patroni): with ( patch("requests.get", side_effect=mocked_requests_get) as _get, diff --git a/tests/unit/test_ldap.py b/tests/unit/test_ldap.py new file mode 100644 index 0000000000..9fe8edcbca --- /dev/null +++ b/tests/unit/test_ldap.py @@ -0,0 +1,76 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +from unittest.mock import ( + MagicMock, + patch, +) + +import pytest +from charms.glauth_k8s.v0.ldap import LdapProviderData +from ops.testing import Harness + +from charm import PostgresqlOperatorCharm +from constants import PEER + + +@pytest.fixture(autouse=True) +def harness(): + harness = Harness(PostgresqlOperatorCharm) + + # Set up the initial relation and hooks. + peer_relation_id = harness.add_relation(PEER, "postgresql") + harness.add_relation_unit(peer_relation_id, "postgresql/0") + harness.set_leader(True) + + harness.begin() + yield harness + harness.cleanup() + + +def test_on_ldap_ready(harness): + mock_event = MagicMock() + + with patch("charm.PostgresqlOperatorCharm.update_config") as _update_config: + harness.charm.ldap._on_ldap_ready(mock_event) + _update_config.assert_called_once() + + peer_rel_id = harness.model.get_relation(PEER).id + app_databag = harness.get_relation_data(peer_rel_id, harness.charm.app) + assert "ldap_enabled" in app_databag + + +def test_on_ldap_unavailable(harness): + mock_event = MagicMock() + + with patch("charm.PostgresqlOperatorCharm.update_config") as _update_config: + harness.charm.ldap._on_ldap_unavailable(mock_event) + _update_config.assert_called_once() + + peer_rel_id = harness.model.get_relation(PEER).id + app_databag = harness.get_relation_data(peer_rel_id, harness.charm.app) + assert app_databag["ldap_enabled"] == "False" + + +def test_get_relation_data(harness): + mock_data = LdapProviderData( + auth_method="simple", + base_dn="dc=example,dc=net", + bind_dn="cn=serviceuser,dc=example,dc=net", + bind_password="password", + bind_password_secret=None, + starttls=False, + ldaps_urls=[], + urls=[], + ) + + mock_data_dict = mock_data.model_dump(exclude_none=True) + mock_data_dict["bind_password"] = mock_data.bind_password + + assert harness.charm.ldap.get_relation_data() is None + + with harness.hooks_disabled(): + ldap_relation_id = harness.add_relation("ldap", "glauth-k8s") + harness.update_relation_data(ldap_relation_id, "glauth-k8s", mock_data_dict) + + assert harness.charm.ldap.get_relation_data() == mock_data From 327d491f39723ce76e4875d46f673412ac7be093 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sinclert=20P=C3=A9rez?= Date: Tue, 22 Apr 2025 09:17:59 +0200 Subject: [PATCH 17/21] [DPE-6345] LDAP IV: Define snap service (#838) --- src/charm.py | 95 +++++++++++++++++++++++++++++++--------- src/utils.py | 19 ++++++++ tests/unit/test_charm.py | 17 +++++++ tests/unit/test_utils.py | 20 ++++++++- 4 files changed, 130 insertions(+), 21 deletions(-) diff --git a/src/charm.py b/src/charm.py index d2c2b1ba5a..af3bdac712 100755 --- a/src/charm.py +++ b/src/charm.py @@ -16,6 +16,7 @@ from datetime import datetime from pathlib import Path from typing import Literal, get_args +from urllib.parse import urlparse import psycopg2 from charms.data_platform_libs.v0.data_interfaces import DataPeerData, DataPeerUnitData @@ -73,6 +74,7 @@ APP_SCOPE, BACKUP_USER, DATABASE_DEFAULT_NAME, + DATABASE_PORT, METRICS_PORT, MONITORING_PASSWORD_KEY, MONITORING_SNAP_SERVICE, @@ -110,7 +112,7 @@ from relations.postgresql_provider import PostgreSQLProvider from rotate_logs import RotateLogs from upgrade import PostgreSQLUpgrade, get_postgresql_dependencies_model -from utils import new_password +from utils import new_password, snap_refreshed logger = logging.getLogger(__name__) @@ -1316,29 +1318,86 @@ def _restart_services_after_reboot(self): self._patroni.start_patroni() self.backup.start_stop_pgbackrest_service() - def _setup_exporter(self) -> None: - """Set up postgresql_exporter options.""" - cache = snap.SnapCache() - postgres_snap = cache[POSTGRESQL_SNAP_NAME] + def _restart_metrics_service(self, postgres_snap: snap.Snap) -> None: + """Restart the monitoring service if the password was rotated.""" + try: + snap_password = postgres_snap.get("exporter.password") + except snap.SnapError: + logger.warning("Early exit: Trying to reset metrics service with no configuration set") + return None - if postgres_snap.revision != next( - filter(lambda snap_package: snap_package[0] == POSTGRESQL_SNAP_NAME, SNAP_PACKAGES) - )[1]["revision"].get(platform.machine()): - logger.debug( - "Early exit _setup_exporter: snap was not refreshed to the right version yet" - ) + if snap_password != self.get_secret(APP_SCOPE, MONITORING_PASSWORD_KEY): + self._setup_exporter(postgres_snap) + + def _restart_ldap_sync_service(self, postgres_snap: snap.Snap) -> None: + """Restart the LDAP sync service in case any configuration changed.""" + if not self._patroni.member_started: + logger.debug("Restart LDAP sync early exit: Patroni has not started yet") return + sync_service = postgres_snap.services["ldap-sync"] + + if not self.is_primary and sync_service["active"]: + logger.debug("Stopping LDAP sync service. It must only run in the primary") + postgres_snap.stop(services=["ldap-sync"]) + + if self.is_primary and not self.is_ldap_enabled: + logger.debug("Stopping LDAP sync service") + postgres_snap.stop(services=["ldap-sync"]) + return + + if self.is_primary and self.is_ldap_enabled: + self._setup_ldap_sync(postgres_snap) + + def _setup_exporter(self, postgres_snap: snap.Snap | None = None) -> None: + """Set up postgresql_exporter options.""" + if postgres_snap is None: + cache = snap.SnapCache() + postgres_snap = cache[POSTGRESQL_SNAP_NAME] + postgres_snap.set({ "exporter.user": MONITORING_USER, "exporter.password": self.get_secret(APP_SCOPE, MONITORING_PASSWORD_KEY), }) + if postgres_snap.services[MONITORING_SNAP_SERVICE]["active"] is False: postgres_snap.start(services=[MONITORING_SNAP_SERVICE], enable=True) else: postgres_snap.restart(services=[MONITORING_SNAP_SERVICE]) + self.unit_peer_data.update({"exporter-started": "True"}) + def _setup_ldap_sync(self, postgres_snap: snap.Snap | None = None) -> None: + """Set up postgresql_ldap_sync options.""" + if postgres_snap is None: + cache = snap.SnapCache() + postgres_snap = cache[POSTGRESQL_SNAP_NAME] + + ldap_params = self.get_ldap_parameters() + ldap_url = urlparse(ldap_params["ldapurl"]) + ldap_host = ldap_url.hostname + ldap_port = ldap_url.port + + ldap_base_dn = ldap_params["ldapbasedn"] + ldap_bind_username = ldap_params["ldapbinddn"] + ldap_bind_password = ldap_params["ldapbindpasswd"] + + postgres_snap.set({ + "ldap-sync.ldap_host": ldap_host, + "ldap-sync.ldap_port": ldap_port, + "ldap-sync.ldap_base_dn": ldap_base_dn, + "ldap-sync.ldap_bind_username": ldap_bind_username, + "ldap-sync.ldap_bind_password": ldap_bind_password, + "ldap-sync.postgres_host": "127.0.0.1", + "ldap-sync.postgres_port": DATABASE_PORT, + "ldap-sync.postgres_database": DATABASE_DEFAULT_NAME, + "ldap-sync.postgres_username": USER, + "ldap-sync.postgres_password": self._get_password(), + }) + + logger.debug("Starting LDAP sync service") + postgres_snap.restart(services=["ldap-sync"]) + def _start_primary(self, event: StartEvent) -> None: """Bootstrap the cluster.""" # Set some information needed by Patroni to bootstrap the cluster. @@ -1986,19 +2045,15 @@ def update_config(self, is_creating_backup: bool = False, no_peers: bool = False self._handle_postgresql_restart_need(enable_tls) - # Restart the monitoring service if the password was rotated cache = snap.SnapCache() postgres_snap = cache[POSTGRESQL_SNAP_NAME] - try: - snap_password = postgres_snap.get("exporter.password") - except snap.SnapError: - logger.warning( - "Early exit update_config: Trying to reset metrics service with no configuration set" - ) + if not snap_refreshed(postgres_snap.revision): + logger.debug("Early exit: snap was not refreshed to the right version yet") return True - if snap_password != self.get_secret(APP_SCOPE, MONITORING_PASSWORD_KEY): - self._setup_exporter() + + self._restart_metrics_service(postgres_snap) + self._restart_ldap_sync_service(postgres_snap) return True diff --git a/src/utils.py b/src/utils.py index b3f0e1abad..4f07ec87fe 100644 --- a/src/utils.py +++ b/src/utils.py @@ -3,9 +3,15 @@ """A collection of utility functions that are used in the charm.""" +import platform import secrets import string +from constants import ( + POSTGRESQL_SNAP_NAME, + SNAP_PACKAGES, +) + def new_password() -> str: """Generate a random password string. @@ -16,3 +22,16 @@ def new_password() -> str: choices = string.ascii_letters + string.digits password = "".join([secrets.choice(choices) for i in range(16)]) return password + + +def snap_refreshed(target_rev: str) -> bool: + """Whether the snap was refreshed to the target version.""" + arch = platform.machine() + + for snap_package in SNAP_PACKAGES: + snap_name = snap_package[0] + snap_revs = snap_package[1]["revision"] + if snap_name == POSTGRESQL_SNAP_NAME and target_rev != snap_revs.get(arch): + return False + + return True diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 025ab68e01..ee773ed31f 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -1270,10 +1270,17 @@ def test_restart(harness): def test_update_config(harness): with ( patch("subprocess.check_output", return_value=b"C"), + patch("charm.snap_refreshed", return_value=True), patch("charm.snap.SnapCache"), patch( "charm.PostgresqlOperatorCharm._handle_postgresql_restart_need" ) as _handle_postgresql_restart_need, + patch( + "charm.PostgresqlOperatorCharm._restart_metrics_service" + ) as _restart_metrics_service, + patch( + "charm.PostgresqlOperatorCharm._restart_ldap_sync_service" + ) as _restart_ldap_sync_service, patch("charm.Patroni.bulk_update_parameters_controller_by_patroni"), patch("charm.Patroni.member_started", new_callable=PropertyMock) as _member_started, patch( @@ -1313,10 +1320,14 @@ def test_update_config(harness): no_peers=False, ) _handle_postgresql_restart_need.assert_called_once_with(False) + _restart_ldap_sync_service.assert_called_once() + _restart_metrics_service.assert_called_once() assert "tls" not in harness.get_relation_data(rel_id, harness.charm.unit.name) # Test with TLS files available. _handle_postgresql_restart_need.reset_mock() + _restart_ldap_sync_service.reset_mock() + _restart_metrics_service.reset_mock() harness.update_relation_data( rel_id, harness.charm.unit.name, {"tls": ""} ) # Mock some data in the relation to test that it change. @@ -1338,6 +1349,8 @@ def test_update_config(harness): no_peers=False, ) _handle_postgresql_restart_need.assert_called_once() + _restart_ldap_sync_service.assert_called_once() + _restart_metrics_service.assert_called_once() assert "tls" not in harness.get_relation_data( rel_id, harness.charm.unit.name ) # The "tls" flag is set in handle_postgresql_restart_need. @@ -1347,6 +1360,8 @@ def test_update_config(harness): rel_id, harness.charm.unit.name, {"tls": ""} ) # Mock some data in the relation to test that it change. _handle_postgresql_restart_need.reset_mock() + _restart_ldap_sync_service.reset_mock() + _restart_metrics_service.reset_mock() harness.charm.update_config() _handle_postgresql_restart_need.assert_not_called() assert harness.get_relation_data(rel_id, harness.charm.unit.name)["tls"] == "enabled" @@ -1357,6 +1372,8 @@ def test_update_config(harness): ) # Mock some data in the relation to test that it doesn't change. harness.charm.update_config() _handle_postgresql_restart_need.assert_not_called() + _restart_ldap_sync_service.assert_not_called() + _restart_metrics_service.assert_not_called() assert "tls" not in harness.get_relation_data(rel_id, harness.charm.unit.name) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 6da8995d02..56b46a01ef 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -2,8 +2,10 @@ # See LICENSE file for licensing details. import re +from unittest.mock import patch -from utils import new_password +from constants import POSTGRESQL_SNAP_NAME +from utils import new_password, snap_refreshed def test_new_password(): @@ -16,3 +18,19 @@ def test_new_password(): second_password = new_password() assert re.fullmatch("[a-zA-Z0-9\b]{16}$", second_password) is not None assert second_password != first_password + + +def test_snap_refreshed(): + with patch( + "utils.SNAP_PACKAGES", + [(POSTGRESQL_SNAP_NAME, {"revision": {"aarch64": "100", "x86_64": "100"}})], + ): + assert snap_refreshed("100") is True + assert snap_refreshed("200") is False + + with patch( + "utils.SNAP_PACKAGES", + [(POSTGRESQL_SNAP_NAME, {"revision": {}})], + ): + assert snap_refreshed("100") is False + assert snap_refreshed("200") is False From 138095b5be1af7b7138028973019d850d94618c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sinclert=20P=C3=A9rez?= Date: Tue, 22 Apr 2025 14:51:54 +0200 Subject: [PATCH 18/21] [DPE-6345] LDAP V: Define mapping option (#849) --- config.yaml | 6 ++ lib/charms/postgresql_k8s/v0/postgresql.py | 69 +++++++++++++++++++++- src/charm.py | 7 +++ src/config.py | 1 + tests/unit/test_charm.py | 12 ++++ 5 files changed, 94 insertions(+), 1 deletion(-) diff --git a/config.yaml b/config.yaml index f509a0eca1..b20e0bf2ed 100644 --- a/config.yaml +++ b/config.yaml @@ -69,6 +69,12 @@ options: Enable synchronized sequential scans. type: boolean default: true + ldap_map: + description: | + List of mapped LDAP group names to PostgreSQL group names, separated by commas. + The map is used to assign LDAP synchronized users to PostgreSQL authorization groups. + Example: =,= + type: string ldap_search_filter: description: | The LDAP search filter to match users with. diff --git a/lib/charms/postgresql_k8s/v0/postgresql.py b/lib/charms/postgresql_k8s/v0/postgresql.py index f5c4d0e02b..b7eb90908b 100644 --- a/lib/charms/postgresql_k8s/v0/postgresql.py +++ b/lib/charms/postgresql_k8s/v0/postgresql.py @@ -35,7 +35,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 47 +LIBPATCH = 49 # Groups to distinguish HBA access ACCESS_GROUP_IDENTITY = "identity_access" @@ -776,6 +776,42 @@ def is_restart_pending(self) -> bool: if connection: connection.close() + @staticmethod + def build_postgresql_group_map(group_map: Optional[str]) -> List[Tuple]: + """Build the PostgreSQL authorization group-map. + + Args: + group_map: serialized group-map with the following format: + =, + =, + ... + + Returns: + List of LDAP group to PostgreSQL group tuples. + """ + if group_map is None: + return [] + + group_mappings = group_map.split(",") + group_mappings = (mapping.strip() for mapping in group_mappings) + group_map_list = [] + + for mapping in group_mappings: + mapping_parts = mapping.split("=") + if len(mapping_parts) != 2: + raise ValueError("The group-map must contain value pairs split by commas") + + ldap_group = mapping_parts[0] + psql_group = mapping_parts[1] + + if psql_group in [*ACCESS_GROUPS, PERMISSIONS_GROUP_ADMIN]: + logger.warning(f"Tried to assign LDAP users to forbidden group: {psql_group}") + continue + + group_map_list.append((ldap_group, psql_group)) + + return group_map_list + @staticmethod def build_postgresql_parameters( config_options: dict, available_memory: int, limit_memory: Optional[int] = None @@ -855,3 +891,34 @@ def validate_date_style(self, date_style: str) -> bool: return True except psycopg2.Error: return False + + def validate_group_map(self, group_map: Optional[str]) -> bool: + """Validate the PostgreSQL authorization group-map. + + Args: + group_map: serialized group-map with the following format: + =, + =, + ... + + Returns: + Whether the group-map is valid. + """ + if group_map is None: + return True + + try: + group_map = self.build_postgresql_group_map(group_map) + except ValueError: + return False + + for _, psql_group in group_map: + with self._connect_to_database() as connection, connection.cursor() as cursor: + query = SQL("SELECT TRUE FROM pg_roles WHERE rolname={};") + query = query.format(Literal(psql_group)) + cursor.execute(query) + + if cursor.fetchone() is None: + return False + + return True diff --git a/src/charm.py b/src/charm.py index af3bdac712..bc6fd78a3c 100755 --- a/src/charm.py +++ b/src/charm.py @@ -24,6 +24,7 @@ from charms.grafana_agent.v0.cos_agent import COSAgentProvider, charm_tracing_config from charms.operator_libs_linux.v2 import snap from charms.postgresql_k8s.v0.postgresql import ( + ACCESS_GROUP_IDENTITY, ACCESS_GROUPS, REQUIRED_PLUGINS, PostgreSQL, @@ -1381,6 +1382,7 @@ def _setup_ldap_sync(self, postgres_snap: snap.Snap | None = None) -> None: ldap_base_dn = ldap_params["ldapbasedn"] ldap_bind_username = ldap_params["ldapbinddn"] ldap_bind_password = ldap_params["ldapbindpasswd"] + ldap_group_mappings = self.postgresql.build_postgresql_group_map(self.config.ldap_map) postgres_snap.set({ "ldap-sync.ldap_host": ldap_host, @@ -1388,6 +1390,8 @@ def _setup_ldap_sync(self, postgres_snap: snap.Snap | None = None) -> None: "ldap-sync.ldap_base_dn": ldap_base_dn, "ldap-sync.ldap_bind_username": ldap_bind_username, "ldap-sync.ldap_bind_password": ldap_bind_password, + "ldap-sync.ldap_group_identity": json.dumps(ACCESS_GROUP_IDENTITY), + "ldap-sync.ldap_group_mappings": json.dumps(ldap_group_mappings), "ldap-sync.postgres_host": "127.0.0.1", "ldap-sync.postgres_port": DATABASE_PORT, "ldap-sync.postgres_database": DATABASE_DEFAULT_NAME, @@ -2067,6 +2071,9 @@ def _validate_config_options(self) -> None: "instance_default_text_search_config config option has an invalid value" ) + if not self.postgresql.validate_group_map(self.config.ldap_map): + raise ValueError("ldap_map config option has an invalid value") + if not self.postgresql.validate_date_style(self.config.request_date_style): raise ValueError("request_date_style config option has an invalid value") diff --git a/src/config.py b/src/config.py index 610723291b..16dda4d06f 100644 --- a/src/config.py +++ b/src/config.py @@ -29,6 +29,7 @@ class CharmConfig(BaseConfigModel): instance_max_locks_per_transaction: int | None instance_password_encryption: str | None instance_synchronize_seqscans: bool | None + ldap_map: str | None ldap_search_filter: str | None logging_client_min_messages: str | None logging_log_connections: bool | None diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index ee773ed31f..3ff54bccce 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -1445,6 +1445,7 @@ def test_validate_config_options(harness): ): _charm_lib.return_value.get_postgresql_text_search_configs.return_value = [] _charm_lib.return_value.validate_date_style.return_value = False + _charm_lib.return_value.validate_group_map.return_value = False _charm_lib.return_value.get_postgresql_timezones.return_value = [] # Test instance_default_text_search_config exception @@ -1463,6 +1464,17 @@ def test_validate_config_options(harness): "pg_catalog.test" ] + # Test ldap_map exception + with harness.hooks_disabled(): + harness.update_config({"ldap_map": "ldap_group="}) + + with pytest.raises(ValueError) as e: + harness.charm._validate_config_options() + assert str(e.value) == "ldap_map config option has an invalid value" + + _charm_lib.return_value.validate_group_map.assert_called_once_with("ldap_group=") + _charm_lib.return_value.validate_group_map.return_value = True + # Test request_date_style exception with harness.hooks_disabled(): harness.update_config({"request_date_style": "ISO, TEST"}) From 6434998e6646ef69e7c301070a8c927ba073ccd1 Mon Sep 17 00:00:00 2001 From: Dragomir Penev <6687393+dragomirp@users.noreply.github.com> Date: Tue, 22 Apr 2025 22:41:54 +0300 Subject: [PATCH 19/21] [MISC] Disable network cut tests on arm (#844) * Disable network cut tests on arm * Back to LXD 5 --- tests/integration/ha_tests/test_self_healing.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/integration/ha_tests/test_self_healing.py b/tests/integration/ha_tests/test_self_healing.py index f3ddc6fe88..a5b4faddac 100644 --- a/tests/integration/ha_tests/test_self_healing.py +++ b/tests/integration/ha_tests/test_self_healing.py @@ -9,6 +9,7 @@ from pytest_operator.plugin import OpsTest from tenacity import Retrying, stop_after_delay, wait_fixed +from .. import markers from ..helpers import ( CHARM_BASE, db_connect, @@ -380,6 +381,7 @@ async def test_forceful_restart_without_data_and_transaction_logs( @pytest.mark.abort_on_fail +@markers.amd64_only async def test_network_cut(ops_test: OpsTest, continuous_writes, primary_start_timeout): """Completely cut and restore network.""" # Locate primary unit. @@ -468,6 +470,7 @@ async def test_network_cut(ops_test: OpsTest, continuous_writes, primary_start_t @pytest.mark.abort_on_fail +@markers.amd64_only async def test_network_cut_without_ip_change( ops_test: OpsTest, continuous_writes, primary_start_timeout ): From f8ed45af59320b50f6ffe6b66e6ac30e49aa1673 Mon Sep 17 00:00:00 2001 From: Dragomir Penev <6687393+dragomirp@users.noreply.github.com> Date: Wed, 23 Apr 2025 05:52:41 +0300 Subject: [PATCH 20/21] [DPE-6815] disable pgaudit during extensions changes (#842) * disable pgaudit during extensions changes * Bump libs --- .../data_platform_libs/v0/data_interfaces.py | 1068 +++++++++++------ lib/charms/postgresql_k8s/v0/postgresql.py | 8 +- 2 files changed, 716 insertions(+), 360 deletions(-) diff --git a/lib/charms/data_platform_libs/v0/data_interfaces.py b/lib/charms/data_platform_libs/v0/data_interfaces.py index 7fff3c4751..c0ddec58c9 100644 --- a/lib/charms/data_platform_libs/v0/data_interfaces.py +++ b/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -331,7 +331,7 @@ def _on_topic_requested(self, event: TopicRequestedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 42 +LIBPATCH = 46 PYDEPS = ["ops>=2.0.0"] @@ -351,6 +351,7 @@ def _on_topic_requested(self, event: TopicRequestedEvent): PROV_SECRET_PREFIX = "secret-" +PROV_SECRET_FIELDS = "provided-secrets" REQ_SECRET_FIELDS = "requested-secrets" GROUP_MAPPING_FIELD = "secret_group_mapping" GROUP_SEPARATOR = "@" @@ -585,6 +586,7 @@ class SecretGroupsAggregate(str): def __init__(self): self.USER = SecretGroup("user") self.TLS = SecretGroup("tls") + self.MTLS = SecretGroup("mtls") self.EXTRA = SecretGroup("extra") def __setattr__(self, name, value): @@ -963,8 +965,11 @@ class Data(ABC): "read-only-uris": SECRET_GROUPS.USER, "tls": SECRET_GROUPS.TLS, "tls-ca": SECRET_GROUPS.TLS, + "mtls-cert": SECRET_GROUPS.MTLS, } + SECRET_FIELDS = [] + def __init__( self, model: Model, @@ -978,15 +983,13 @@ def __init__( self.component = self.local_app if self.SCOPE == Scope.APP else self.local_unit self.secrets = SecretCache(self._model, self.component) self.data_component = None + self._local_secret_fields = [] + self._remote_secret_fields = list(self.SECRET_FIELDS) @property def relations(self) -> List[Relation]: """The list of Relation instances associated with this relation_name.""" - return [ - relation - for relation in self._model.relations[self.relation_name] - if self._is_relation_active(relation) - ] + return self._model.relations[self.relation_name] @property def secrets_enabled(self): @@ -1000,38 +1003,250 @@ def secret_label_map(self): """Exposing secret-label map via a property -- could be overridden in descendants!""" return self.SECRET_LABEL_MAP + @property + def local_secret_fields(self) -> Optional[List[str]]: + """Local access to secrets field, in case they are being used.""" + if self.secrets_enabled: + return self._local_secret_fields + + @property + def remote_secret_fields(self) -> Optional[List[str]]: + """Local access to secrets field, in case they are being used.""" + if self.secrets_enabled: + return self._remote_secret_fields + + @property + def my_secret_groups(self) -> Optional[List[SecretGroup]]: + """Local access to secrets field, in case they are being used.""" + if self.secrets_enabled: + return [ + self.SECRET_LABEL_MAP[field] + for field in self._local_secret_fields + if field in self.SECRET_LABEL_MAP + ] + # Mandatory overrides for internal/helper methods - @abstractmethod + @juju_secrets_only def _get_relation_secret( self, relation_id: int, group_mapping: SecretGroup, relation_name: Optional[str] = None ) -> Optional[CachedSecret]: """Retrieve a Juju Secret that's been stored in the relation databag.""" - raise NotImplementedError + if not relation_name: + relation_name = self.relation_name + + label = self._generate_secret_label(relation_name, relation_id, group_mapping) + if secret := self.secrets.get(label): + return secret + + relation = self._model.get_relation(relation_name, relation_id) + if not relation: + return + + if secret_uri := self.get_secret_uri(relation, group_mapping): + return self.secrets.get(label, secret_uri) + # Mandatory overrides for requirer and peer, implemented for Provider + # Requirer uses local component and switched keys + # _local_secret_fields -> PROV_SECRET_FIELDS + # _remote_secret_fields -> REQ_SECRET_FIELDS + # provider uses remote component and + # _local_secret_fields -> REQ_SECRET_FIELDS + # _remote_secret_fields -> PROV_SECRET_FIELDS @abstractmethod + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + raise NotImplementedError + def _fetch_specific_relation_data( self, relation: Relation, fields: Optional[List[str]] ) -> Dict[str, str]: - """Fetch data available (directily or indirectly -- i.e. secrets) from the relation.""" - raise NotImplementedError + """Fetch data available (directily or indirectly -- i.e. secrets) from the relation (remote app data).""" + if not relation.app: + return {} + self._load_secrets_from_databag(relation) + return self._fetch_relation_data_with_secrets( + relation.app, self.remote_secret_fields, relation, fields + ) - @abstractmethod def _fetch_my_specific_relation_data( self, relation: Relation, fields: Optional[List[str]] - ) -> Dict[str, str]: - """Fetch data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" - raise NotImplementedError + ) -> dict: + """Fetch our own relation data.""" + # load secrets + self._load_secrets_from_databag(relation) + return self._fetch_relation_data_with_secrets( + self.local_app, + self.local_secret_fields, + relation, + fields, + ) - @abstractmethod def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: - """Update data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" - raise NotImplementedError + """Set values for fields not caring whether it's a secret or not.""" + self._load_secrets_from_databag(relation) + + _, normal_fields = self._process_secret_fields( + relation, + self.local_secret_fields, + list(data), + self._add_or_update_relation_secrets, + data=data, + ) + + normal_content = {k: v for k, v in data.items() if k in normal_fields} + self._update_relation_data_without_secrets(self.local_app, relation, normal_content) + + def _add_or_update_relation_secrets( + self, + relation: Relation, + group: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + uri_to_databag=True, + ) -> bool: + """Update contents for Secret group. If the Secret doesn't exist, create it.""" + if self._get_relation_secret(relation.id, group): + return self._update_relation_secret(relation, group, secret_fields, data) + + return self._add_relation_secret(relation, group, secret_fields, data, uri_to_databag) + + @juju_secrets_only + def _add_relation_secret( + self, + relation: Relation, + group_mapping: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + uri_to_databag=True, + ) -> bool: + """Add a new Juju Secret that will be registered in the relation databag.""" + if uri_to_databag and self.get_secret_uri(relation, group_mapping): + logging.error("Secret for relation %s already exists, not adding again", relation.id) + return False + + content = self._content_for_secret_group(data, secret_fields, group_mapping) + + label = self._generate_secret_label(self.relation_name, relation.id, group_mapping) + secret = self.secrets.add(label, content, relation) + + if uri_to_databag: + # According to lint we may not have a Secret ID + if not secret.meta or not secret.meta.id: + logging.error("Secret is missing Secret ID") + raise SecretError("Secret added but is missing Secret ID") + + self.set_secret_uri(relation, group_mapping, secret.meta.id) + + # Return the content that was added + return True + + @juju_secrets_only + def _update_relation_secret( + self, + relation: Relation, + group_mapping: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + ) -> bool: + """Update the contents of an existing Juju Secret, referred in the relation databag.""" + secret = self._get_relation_secret(relation.id, group_mapping) + + if not secret: + logging.error("Can't update secret for relation %s", relation.id) + return False + + content = self._content_for_secret_group(data, secret_fields, group_mapping) + + old_content = secret.get_content() + full_content = copy.deepcopy(old_content) + full_content.update(content) + secret.set_content(full_content) + + # Return True on success + return True + + @juju_secrets_only + def _delete_relation_secret( + self, relation: Relation, group: SecretGroup, secret_fields: List[str], fields: List[str] + ) -> bool: + """Update the contents of an existing Juju Secret, referred in the relation databag.""" + secret = self._get_relation_secret(relation.id, group) + + if not secret: + logging.error("Can't delete secret for relation %s", str(relation.id)) + return False + + old_content = secret.get_content() + new_content = copy.deepcopy(old_content) + for field in fields: + try: + new_content.pop(field) + except KeyError: + logging.debug( + "Non-existing secret was attempted to be removed %s, %s", + str(relation.id), + str(field), + ) + return False + + # Remove secret from the relation if it's fully gone + if not new_content: + field = self._generate_secret_field_name(group) + try: + relation.data[self.component].pop(field) + except KeyError: + pass + label = self._generate_secret_label(self.relation_name, relation.id, group) + self.secrets.remove(label) + else: + secret.set_content(new_content) + + # Return the content that was removed + return True - @abstractmethod def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" - raise NotImplementedError + if relation.app: + self._load_secrets_from_databag(relation) + + _, normal_fields = self._process_secret_fields( + relation, self.local_secret_fields, fields, self._delete_relation_secret, fields=fields + ) + self._delete_relation_data_without_secrets(self.local_app, relation, list(normal_fields)) + + def _register_secret_to_relation( + self, relation_name: str, relation_id: int, secret_id: str, group: SecretGroup + ): + """Fetch secrets and apply local label on them. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + label = self._generate_secret_label(relation_name, relation_id, group) + + # Fetching the Secret's meta information ensuring that it's locally getting registered with + CachedSecret(self._model, self.component, label, secret_id).meta + + def _register_secrets_to_relation(self, relation: Relation, params_name_list: List[str]): + """Make sure that secrets of the provided list are locally 'registered' from the databag. + + More on 'locally registered' magic is described in _register_secret_to_relation() method + """ + if not relation.app: + return + + for group in SECRET_GROUPS.groups(): + secret_field = self._generate_secret_field_name(group) + if secret_field in params_name_list and ( + secret_uri := self.get_secret_uri(relation, group) + ): + self._register_secret_to_relation(relation.name, relation.id, secret_uri, group) # Optional overrides @@ -1052,15 +1267,6 @@ def _legacy_apply_on_delete(self, fields: List[str]) -> None: # Internal helper methods - @staticmethod - def _is_relation_active(relation: Relation): - """Whether the relation is active based on contained data.""" - try: - _ = repr(relation.data) - return True - except (RuntimeError, ModelError): - return False - @staticmethod def _is_secret_field(field: str) -> bool: """Is the field in question a secret reference (URI) field or not?""" @@ -1178,7 +1384,6 @@ def _process_secret_fields( and (self.local_unit == self._model.unit and self.local_unit.is_leader()) and set(req_secret_fields) & set(relation.data[self.component]) ) - normal_fields = set(impacted_rel_fields) if req_secret_fields and self.secrets_enabled and not fallback_to_databag: normal_fields = normal_fields - set(req_secret_fields) @@ -1305,7 +1510,14 @@ def get_relation(self, relation_name, relation_id) -> Relation: def get_secret_uri(self, relation: Relation, group: SecretGroup) -> Optional[str]: """Get the secret URI for the corresponding group.""" secret_field = self._generate_secret_field_name(group) - return relation.data[self.component].get(secret_field) + # if the secret is not managed by this component, + # we need to fetch it from the other side + + # Fix for the linter + if self.my_secret_groups is None: + raise DataInterfacesError("Secrets are not enabled for this component") + component = self.component if group in self.my_secret_groups else relation.app + return relation.data[component].get(secret_field) def set_secret_uri(self, relation: Relation, group: SecretGroup, secret_uri: str) -> None: """Set the secret URI for the corresponding group.""" @@ -1434,6 +1646,32 @@ def __init__(self, charm: CharmBase, relation_data: Data, unique_key: str = ""): self._on_relation_changed_event, ) + self.framework.observe( + self.charm.on[relation_data.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + def _diff(self, event: RelationChangedEvent) -> Diff: """Retrieves the diff of the data in the relation changed databag. @@ -1446,11 +1684,6 @@ def _diff(self, event: RelationChangedEvent) -> Diff: """ return diff(event, self.relation_data.data_component) - @abstractmethod - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - # Base ProviderData and RequiresData @@ -1467,165 +1700,11 @@ def __init__( ) -> None: super().__init__(model, relation_name) self.data_component = self.local_app - - # Private methods handling secrets - - @juju_secrets_only - def _add_relation_secret( - self, - relation: Relation, - group_mapping: SecretGroup, - secret_fields: Set[str], - data: Dict[str, str], - uri_to_databag=True, - ) -> bool: - """Add a new Juju Secret that will be registered in the relation databag.""" - if uri_to_databag and self.get_secret_uri(relation, group_mapping): - logging.error("Secret for relation %s already exists, not adding again", relation.id) - return False - - content = self._content_for_secret_group(data, secret_fields, group_mapping) - - label = self._generate_secret_label(self.relation_name, relation.id, group_mapping) - secret = self.secrets.add(label, content, relation) - - # According to lint we may not have a Secret ID - if uri_to_databag and secret.meta and secret.meta.id: - self.set_secret_uri(relation, group_mapping, secret.meta.id) - - # Return the content that was added - return True - - @juju_secrets_only - def _update_relation_secret( - self, - relation: Relation, - group_mapping: SecretGroup, - secret_fields: Set[str], - data: Dict[str, str], - ) -> bool: - """Update the contents of an existing Juju Secret, referred in the relation databag.""" - secret = self._get_relation_secret(relation.id, group_mapping) - - if not secret: - logging.error("Can't update secret for relation %s", relation.id) - return False - - content = self._content_for_secret_group(data, secret_fields, group_mapping) - - old_content = secret.get_content() - full_content = copy.deepcopy(old_content) - full_content.update(content) - secret.set_content(full_content) - - # Return True on success - return True - - def _add_or_update_relation_secrets( - self, - relation: Relation, - group: SecretGroup, - secret_fields: Set[str], - data: Dict[str, str], - uri_to_databag=True, - ) -> bool: - """Update contents for Secret group. If the Secret doesn't exist, create it.""" - if self._get_relation_secret(relation.id, group): - return self._update_relation_secret(relation, group, secret_fields, data) - else: - return self._add_relation_secret(relation, group, secret_fields, data, uri_to_databag) - - @juju_secrets_only - def _delete_relation_secret( - self, relation: Relation, group: SecretGroup, secret_fields: List[str], fields: List[str] - ) -> bool: - """Update the contents of an existing Juju Secret, referred in the relation databag.""" - secret = self._get_relation_secret(relation.id, group) - - if not secret: - logging.error("Can't delete secret for relation %s", str(relation.id)) - return False - - old_content = secret.get_content() - new_content = copy.deepcopy(old_content) - for field in fields: - try: - new_content.pop(field) - except KeyError: - logging.debug( - "Non-existing secret was attempted to be removed %s, %s", - str(relation.id), - str(field), - ) - return False - - # Remove secret from the relation if it's fully gone - if not new_content: - field = self._generate_secret_field_name(group) - try: - relation.data[self.component].pop(field) - except KeyError: - pass - label = self._generate_secret_label(self.relation_name, relation.id, group) - self.secrets.remove(label) - else: - secret.set_content(new_content) - - # Return the content that was removed - return True - - # Mandatory internal overrides - - @juju_secrets_only - def _get_relation_secret( - self, relation_id: int, group_mapping: SecretGroup, relation_name: Optional[str] = None - ) -> Optional[CachedSecret]: - """Retrieve a Juju Secret that's been stored in the relation databag.""" - if not relation_name: - relation_name = self.relation_name - - label = self._generate_secret_label(relation_name, relation_id, group_mapping) - if secret := self.secrets.get(label): - return secret - - relation = self._model.get_relation(relation_name, relation_id) - if not relation: - return - - if secret_uri := self.get_secret_uri(relation, group_mapping): - return self.secrets.get(label, secret_uri) - - def _fetch_specific_relation_data( - self, relation: Relation, fields: Optional[List[str]] - ) -> Dict[str, str]: - """Fetching relation data for Provider. - - NOTE: Since all secret fields are in the Provider side of the databag, we don't need to worry about that - """ - if not relation.app: - return {} - - return self._fetch_relation_data_without_secrets(relation.app, relation, fields) - - def _fetch_my_specific_relation_data( - self, relation: Relation, fields: Optional[List[str]] - ) -> dict: - """Fetching our own relation data.""" - secret_fields = None - if relation.app: - secret_fields = get_encoded_list(relation, relation.app, REQ_SECRET_FIELDS) - - return self._fetch_relation_data_with_secrets( - self.local_app, - secret_fields, - relation, - fields, - ) + self._local_secret_fields = [] + self._remote_secret_fields = list(self.SECRET_FIELDS) def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: """Set values for fields not caring whether it's a secret or not.""" - req_secret_fields = [] - keys = set(data.keys()) if self.fetch_relation_field(relation.id, self.RESOURCE_FIELD) is None and ( keys - {"endpoints", "read-only-endpoints", "replset"} @@ -1633,31 +1712,7 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non raise PrematureDataAccessError( "Premature access to relation data, update is forbidden before the connection is initialized." ) - - if relation.app: - req_secret_fields = get_encoded_list(relation, relation.app, REQ_SECRET_FIELDS) - - _, normal_fields = self._process_secret_fields( - relation, - req_secret_fields, - list(data), - self._add_or_update_relation_secrets, - data=data, - ) - - normal_content = {k: v for k, v in data.items() if k in normal_fields} - self._update_relation_data_without_secrets(self.local_app, relation, normal_content) - - def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: - """Delete fields from the Relation not caring whether it's a secret or not.""" - req_secret_fields = [] - if relation.app: - req_secret_fields = get_encoded_list(relation, relation.app, REQ_SECRET_FIELDS) - - _, normal_fields = self._process_secret_fields( - relation, req_secret_fields, fields, self._delete_relation_secret, fields=fields - ) - self._delete_relation_data_without_secrets(self.local_app, relation, list(normal_fields)) + super()._update_relation_data(relation, data) # Public methods - "native" @@ -1697,6 +1752,16 @@ def set_tls_ca(self, relation_id: int, tls_ca: str) -> None: fetch_my_relation_data = leader_only(Data.fetch_my_relation_data) fetch_my_relation_field = leader_only(Data.fetch_my_relation_field) + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + requested_secrets = get_encoded_list(relation, relation.app, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(relation, relation.app, PROV_SECRET_FIELDS) + if requested_secrets is not None: + self._local_secret_fields = requested_secrets + + if provided_secrets is not None: + self._remote_secret_fields = provided_secrets + class RequirerData(Data): """Requirer-side of the relation.""" @@ -1713,52 +1778,18 @@ def __init__( """Manager of base client relations.""" super().__init__(model, relation_name) self.extra_user_roles = extra_user_roles - self._secret_fields = list(self.SECRET_FIELDS) + self._remote_secret_fields = list(self.SECRET_FIELDS) + self._local_secret_fields = [ + field + for field in self.SECRET_LABEL_MAP.keys() + if field not in self._remote_secret_fields + ] if additional_secret_fields: - self._secret_fields += additional_secret_fields + self._remote_secret_fields += additional_secret_fields self.data_component = self.local_unit - @property - def secret_fields(self) -> Optional[List[str]]: - """Local access to secrets field, in case they are being used.""" - if self.secrets_enabled: - return self._secret_fields - # Internal helper functions - def _register_secret_to_relation( - self, relation_name: str, relation_id: int, secret_id: str, group: SecretGroup - ): - """Fetch secrets and apply local label on them. - - [MAGIC HERE] - If we fetch a secret using get_secret(id=, label=), - then will be "stuck" on the Secret object, whenever it may - appear (i.e. as an event attribute, or fetched manually) on future occasions. - - This will allow us to uniquely identify the secret on Provider side (typically on - 'secret-changed' events), and map it to the corresponding relation. - """ - label = self._generate_secret_label(relation_name, relation_id, group) - - # Fetching the Secret's meta information ensuring that it's locally getting registered with - CachedSecret(self._model, self.component, label, secret_id).meta - - def _register_secrets_to_relation(self, relation: Relation, params_name_list: List[str]): - """Make sure that secrets of the provided list are locally 'registered' from the databag. - - More on 'locally registered' magic is described in _register_secret_to_relation() method - """ - if not relation.app: - return - - for group in SECRET_GROUPS.groups(): - secret_field = self._generate_secret_field_name(group) - if secret_field in params_name_list and ( - secret_uri := self.get_secret_uri(relation, group) - ): - self._register_secret_to_relation(relation.name, relation.id, secret_uri, group) - def _is_resource_created_for_relation(self, relation: Relation) -> bool: if not relation.app: return False @@ -1769,16 +1800,6 @@ def _is_resource_created_for_relation(self, relation: Relation) -> bool: return bool(data.get("username")) and bool(data.get("password")) # Public functions - - def get_secret_uri(self, relation: Relation, group: SecretGroup) -> Optional[str]: - """Getting relation secret URI for the corresponding Secret Group.""" - secret_field = self._generate_secret_field_name(group) - return relation.data[relation.app].get(secret_field) - - def set_secret_uri(self, relation: Relation, group: SecretGroup, uri: str) -> None: - """Setting relation secret URI is not possible for a Requirer.""" - raise NotImplementedError("Requirer can not change the relation secret URI.") - def is_resource_created(self, relation_id: Optional[int] = None) -> bool: """Check if the resource has been created. @@ -1805,70 +1826,28 @@ def is_resource_created(self, relation_id: Optional[int] = None) -> bool: raise IndexError(f"relation id {relation_id} cannot be accessed") else: return ( - all( - self._is_resource_created_for_relation(relation) for relation in self.relations - ) - if self.relations - else False - ) - - # Mandatory internal overrides - - @juju_secrets_only - def _get_relation_secret( - self, relation_id: int, group: SecretGroup, relation_name: Optional[str] = None - ) -> Optional[CachedSecret]: - """Retrieve a Juju Secret that's been stored in the relation databag.""" - if not relation_name: - relation_name = self.relation_name - - label = self._generate_secret_label(relation_name, relation_id, group) - return self.secrets.get(label) - - def _fetch_specific_relation_data( - self, relation, fields: Optional[List[str]] = None - ) -> Dict[str, str]: - """Fetching Requirer data -- that may include secrets.""" - if not relation.app: - return {} - return self._fetch_relation_data_with_secrets( - relation.app, self.secret_fields, relation, fields - ) - - def _fetch_my_specific_relation_data(self, relation, fields: Optional[List[str]]) -> dict: - """Fetching our own relation data.""" - return self._fetch_relation_data_without_secrets(self.local_app, relation, fields) - - def _update_relation_data(self, relation: Relation, data: dict) -> None: - """Updates a set of key-value pairs in the relation. - - This function writes in the application data bag, therefore, - only the leader unit can call it. - - Args: - relation: the particular relation. - data: dict containing the key-value pairs - that should be updated in the relation. - """ - return self._update_relation_data_without_secrets(self.local_app, relation, data) - - def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: - """Deletes a set of fields from the relation. - - This function writes in the application data bag, therefore, - only the leader unit can call it. - - Args: - relation: the particular relation. - fields: list containing the field names that should be removed from the relation. - """ - return self._delete_relation_data_without_secrets(self.local_app, relation, fields) + all( + self._is_resource_created_for_relation(relation) for relation in self.relations + ) + if self.relations + else False + ) # Public functions -- inherited fetch_my_relation_data = leader_only(Data.fetch_my_relation_data) fetch_my_relation_field = leader_only(Data.fetch_my_relation_field) + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + requested_secrets = get_encoded_list(relation, self.local_unit, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(relation, self.local_unit, PROV_SECRET_FIELDS) + if requested_secrets: + self._remote_secret_fields = requested_secrets + + if provided_secrets: + self._local_secret_fields = provided_secrets + class RequirerEventHandlers(EventHandlers): """Requires-side of the relation.""" @@ -1877,15 +1856,6 @@ def __init__(self, charm: CharmBase, relation_data: RequirerData, unique_key: st """Manager of base client relations.""" super().__init__(charm, relation_data, unique_key) - self.framework.observe( - self.charm.on[relation_data.relation_name].relation_created, - self._on_relation_created_event, - ) - self.framework.observe( - charm.on.secret_changed, - self._on_secret_changed_event, - ) - # Event handlers def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: @@ -1893,18 +1863,56 @@ def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: if not self.relation_data.local_unit.is_leader(): return - if self.relation_data.secret_fields: # pyright: ignore [reportAttributeAccessIssue] + if self.relation_data.remote_secret_fields: + if self.relation_data.SCOPE == Scope.APP: + set_encoded_field( + event.relation, + self.relation_data.local_app, + REQ_SECRET_FIELDS, + self.relation_data.remote_secret_fields, + ) + set_encoded_field( event.relation, - self.relation_data.component, + self.relation_data.local_unit, REQ_SECRET_FIELDS, - self.relation_data.secret_fields, # pyright: ignore [reportAttributeAccessIssue] + self.relation_data.remote_secret_fields, ) - @abstractmethod - def _on_secret_changed_event(self, event: RelationChangedEvent) -> None: + if self.relation_data.local_secret_fields: + if self.relation_data.SCOPE == Scope.APP: + set_encoded_field( + event.relation, + self.relation_data.local_app, + PROV_SECRET_FIELDS, + self.relation_data.local_secret_fields, + ) + set_encoded_field( + event.relation, + self.relation_data.local_unit, + PROV_SECRET_FIELDS, + self.relation_data.local_secret_fields, + ) + + +class ProviderEventHandlers(EventHandlers): + """Provider-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: ProviderData, unique_key: str = ""): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + + # Event handlers + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: """Event emitted when the relation data has changed.""" - raise NotImplementedError + requested_secrets = get_encoded_list(event.relation, event.relation.app, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(event.relation, event.relation.app, PROV_SECRET_FIELDS) + if requested_secrets is not None: + self.relation_data._local_secret_fields = requested_secrets + + if provided_secrets is not None: + self.relation_data._remote_secret_fields = provided_secrets ################################################################################ @@ -1955,7 +1963,7 @@ def __init__( secret_group = SECRET_GROUPS.get_group(group) internal_field = self._field_to_internal_name(field, secret_group) self._secret_label_map.setdefault(group, []).append(internal_field) - self._secret_fields.append(internal_field) + self._remote_secret_fields.append(internal_field) @property def scope(self) -> Optional[Scope]: @@ -1973,10 +1981,10 @@ def secret_label_map(self) -> Dict[str, str]: @property def static_secret_fields(self) -> List[str]: """Re-definition of the property in a way that dynamically extended list is retrieved.""" - return self._secret_fields + return self._remote_secret_fields @property - def secret_fields(self) -> List[str]: + def local_secret_fields(self) -> List[str]: """Re-definition of the property in a way that dynamically extended list is retrieved.""" return ( self.static_secret_fields if self.static_secret_fields else self.current_secret_fields @@ -1994,7 +2002,11 @@ def current_secret_fields(self) -> List[str]: relation = self._model.relations[self.relation_name][0] fields = [] - ignores = [SECRET_GROUPS.get_group("user"), SECRET_GROUPS.get_group("tls")] + ignores = [ + SECRET_GROUPS.get_group("user"), + SECRET_GROUPS.get_group("tls"), + SECRET_GROUPS.get_group("mtls"), + ] for group in SECRET_GROUPS.groups(): if group in ignores: continue @@ -2103,11 +2115,11 @@ def _content_for_secret_group( ) -> Dict[str, str]: """Select : pairs from input, that belong to this particular Secret group.""" if group_mapping == SECRET_GROUPS.EXTRA: - return {k: v for k, v in content.items() if k in self.secret_fields} + return {k: v for k, v in content.items() if k in self.local_secret_fields} return { self._internal_name_to_field(k)[0]: v for k, v in content.items() - if k in self.secret_fields + if k in self.local_secret_fields } def valid_field_pattern(self, field: str, full_field: str) -> bool: @@ -2122,6 +2134,16 @@ def valid_field_pattern(self, field: str, full_field: str) -> bool: return False return True + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + requested_secrets = get_encoded_list(relation, self.component, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(relation, self.component, PROV_SECRET_FIELDS) + if requested_secrets: + self._remote_secret_fields = requested_secrets + + if provided_secrets: + self._local_secret_fields = provided_secrets + ########################################################################## # Backwards compatibility / Upgrades ########################################################################## @@ -2177,7 +2199,7 @@ def _legacy_compat_check_deleted_label(self, relation, fields) -> None: if current_data is not None: # Check if the secret we wanna delete actually exists # Given the "deleted label", here we can't rely on the default mechanism (i.e. 'key not found') - if non_existent := (set(fields) & set(self.secret_fields)) - set( + if non_existent := (set(fields) & set(self.local_secret_fields)) - set( current_data.get(relation.id, []) ): logger.debug( @@ -2227,10 +2249,10 @@ def _legacy_migration_remove_secret_from_databag(self, relation, fields: List[st Practically what happens here is to remove stuff from the databag that is to be stored in secrets. """ - if not self.secret_fields: + if not self.local_secret_fields: return - secret_fields_passed = set(self.secret_fields) & set(fields) + secret_fields_passed = set(self.local_secret_fields) & set(fields) for field in secret_fields_passed: if self._fetch_relation_data_without_secrets(self.component, relation, [field]): self._delete_relation_data_without_secrets(self.component, relation, [field]) @@ -2342,15 +2364,17 @@ def _fetch_my_specific_relation_data( ) -> Dict[str, str]: """Fetch data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" return self._fetch_relation_data_with_secrets( - self.component, self.secret_fields, relation, fields + self.component, self.local_secret_fields, relation, fields ) @either_static_or_dynamic_secrets def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: """Update data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + self._load_secrets_from_databag(relation) + _, normal_fields = self._process_secret_fields( relation, - self.secret_fields, + self.local_secret_fields, list(data), self._add_or_update_relation_secrets, data=data, @@ -2363,17 +2387,22 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non @either_static_or_dynamic_secrets def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" - if self.secret_fields and self.deleted_label: + self._load_secrets_from_databag(relation) + if self.local_secret_fields and self.deleted_label: _, normal_fields = self._process_secret_fields( relation, - self.secret_fields, + self.local_secret_fields, fields, self._update_relation_secret, data=dict.fromkeys(fields, self.deleted_label), ) else: _, normal_fields = self._process_secret_fields( - relation, self.secret_fields, fields, self._delete_relation_secret, fields=fields + relation, + self.local_secret_fields, + fields, + self._delete_relation_secret, + fields=fields, ) self._delete_relation_data_without_secrets(self.component, relation, list(normal_fields)) @@ -2896,7 +2925,7 @@ def set_subordinated(self, relation_id: int) -> None: self.update_relation_data(relation_id, {"subordinated": "true"}) -class DatabaseProviderEventHandlers(EventHandlers): +class DatabaseProviderEventHandlers(ProviderEventHandlers): """Provider-side of the database relation handlers.""" on = DatabaseProvidesEvents() # pyright: ignore [reportAssignmentType] @@ -2911,6 +2940,7 @@ def __init__( def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) # Leader only if not self.relation_data.local_unit.is_leader(): return @@ -2924,6 +2954,10 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: event.relation, app=event.app, unit=event.unit ) + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + class DatabaseProvides(DatabaseProviderData, DatabaseProviderEventHandlers): """Provider-side of the database relations.""" @@ -3369,7 +3403,7 @@ def set_zookeeper_uris(self, relation_id: int, zookeeper_uris: str) -> None: self.update_relation_data(relation_id, {"zookeeper-uris": zookeeper_uris}) -class KafkaProviderEventHandlers(EventHandlers): +class KafkaProviderEventHandlers(ProviderEventHandlers): """Provider-side of the Kafka relation.""" on = KafkaProvidesEvents() # pyright: ignore [reportAssignmentType] @@ -3381,6 +3415,7 @@ def __init__(self, charm: CharmBase, relation_data: KafkaProviderData) -> None: def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) # Leader only if not self.relation_data.local_unit.is_leader(): return @@ -3613,7 +3648,7 @@ def set_version(self, relation_id: int, version: str) -> None: self.update_relation_data(relation_id, {"version": version}) -class OpenSearchProvidesEventHandlers(EventHandlers): +class OpenSearchProvidesEventHandlers(ProviderEventHandlers): """Provider-side of the OpenSearch relation.""" on = OpenSearchProvidesEvents() # pyright: ignore[reportAssignmentType] @@ -3625,6 +3660,8 @@ def __init__(self, charm: CharmBase, relation_data: OpenSearchProvidesData) -> N def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + # Leader only if not self.relation_data.local_unit.is_leader(): return @@ -3778,3 +3815,320 @@ def __init__( additional_secret_fields, ) OpenSearchRequiresEventHandlers.__init__(self, charm, self) + + +# Etcd related events + + +class EtcdProviderEvent(RelationEventWithSecret): + """Base class for Etcd events.""" + + @property + def prefix(self) -> Optional[str]: + """Returns the index that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("prefix") + + @property + def mtls_cert(self) -> Optional[str]: + """Returns TLS cert of the client.""" + if not self.relation.app: + return None + + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + secret_field = f"{PROV_SECRET_PREFIX}{SECRET_GROUPS.MTLS}" + if secret_uri := self.relation.data[self.app].get(secret_field): + secret = self.framework.model.get_secret(id=secret_uri) + content = secret.get_content(refresh=True) + if content: + return content.get("mtls-cert") + + +class MTLSCertUpdatedEvent(EtcdProviderEvent): + """Event emitted when the mtls relation is updated.""" + + def __init__(self, handle, relation, old_mtls_cert: Optional[str] = None, app=None, unit=None): + super().__init__(handle, relation, app, unit) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class EtcdProviderEvents(CharmEvents): + """Etcd events. + + This class defines the events that Etcd can emit. + """ + + mtls_cert_updated = EventSource(MTLSCertUpdatedEvent) + + +class EtcdReadyEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when the etcd relation is ready to be consumed.""" + + +class EtcdRequirerEvents(CharmEvents): + """Etcd events. + + This class defines the events that the etcd requirer can emit. + """ + + endpoints_changed = EventSource(DatabaseEndpointsChangedEvent) + etcd_ready = EventSource(EtcdReadyEvent) + + +# Etcd Provides and Requires Objects + + +class EtcdProviderData(ProviderData): + """Provider-side of the Etcd relation.""" + + RESOURCE_FIELD = "prefix" + + def __init__(self, model: Model, relation_name: str) -> None: + super().__init__(model, relation_name) + + def set_uris(self, relation_id: int, uris: str) -> None: + """Set the database connection URIs in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + uris: connection URIs. + """ + self.update_relation_data(relation_id, {"uris": uris}) + + def set_endpoints(self, relation_id: int, endpoints: str) -> None: + """Set the endpoints in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + endpoints: the endpoint addresses for etcd nodes "ip:port" format. + """ + self.update_relation_data(relation_id, {"endpoints": endpoints}) + + def set_version(self, relation_id: int, version: str) -> None: + """Set the etcd version in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + version: etcd API version. + """ + self.update_relation_data(relation_id, {"version": version}) + + def set_tls_ca(self, relation_id: int, tls_ca: str) -> None: + """Set the TLS CA in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + tls_ca: TLS certification authority. + """ + self.update_relation_data(relation_id, {"tls-ca": tls_ca, "tls": "True"}) + + +class EtcdProviderEventHandlers(ProviderEventHandlers): + """Provider-side of the Etcd relation.""" + + on = EtcdProviderEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: EtcdProviderData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + # register all new secrets with their labels + new_data_keys = list(event.relation.data[event.app].keys()) + if any(newval for newval in new_data_keys if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, new_data_keys) + + getattr(self.on, "mtls_cert_updated").emit(event.relation, app=event.app, unit=event.unit) + return + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + # mtls-cert is the only secret that can be updated + logger.info("mtls-cert updated") + getattr(self.on, "mtls_cert_updated").emit( + relation, app=relation.app, unit=remote_unit, old_mtls_cert=old_mtls_cert + ) + + +class EtcdProvides(EtcdProviderData, EtcdProviderEventHandlers): + """Provider-side of the Etcd relation.""" + + def __init__(self, charm: CharmBase, relation_name: str) -> None: + EtcdProviderData.__init__(self, charm.model, relation_name) + EtcdProviderEventHandlers.__init__(self, charm, self) + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + +class EtcdRequirerData(RequirerData): + """Requires data side of the Etcd relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + prefix: str, + mtls_cert: Optional[str], + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + ): + """Manager of Etcd client relations.""" + super().__init__(model, relation_name, extra_user_roles, additional_secret_fields) + self.prefix = prefix + self.mtls_cert = mtls_cert + + def set_mtls_cert(self, relation_id: int, mtls_cert: str) -> None: + """Set the mtls cert in the application relation databag / secret. + + Args: + relation_id: the identifier for a particular relation. + mtls_cert: mtls cert. + """ + self.update_relation_data(relation_id, {"mtls-cert": mtls_cert}) + + +class EtcdRequirerEventHandlers(RequirerEventHandlers): + """Requires events side of the Etcd relation.""" + + on = EtcdRequirerEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: EtcdRequirerData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the Etcd relation is created.""" + super()._on_relation_created_event(event) + + payload = { + "prefix": self.relation_data.prefix, + } + if self.relation_data.mtls_cert: + payload["mtls-cert"] = self.relation_data.mtls_cert + + self.relation_data.update_relation_data( + event.relation.id, + payload, + ) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the Etcd relation has changed. + + This event triggers individual custom events depending on the changing relation. + """ + # Check which data has changed to emit customs events. + diff = self._diff(event) + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) + secret_field_tls = self.relation_data._generate_secret_field_name(SECRET_GROUPS.TLS) + + # Emit a endpoints changed event if the etcd application added or changed this info + # in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + if ( + secret_field_tls in diff.added + or secret_field_tls in diff.changed + or secret_field_user in diff.added + or secret_field_user in diff.changed + or "username" in diff.added + or "username" in diff.changed + ): + # Emit the default event (the one without an alias). + logger.info("etcd ready on %s", datetime.now()) + getattr(self.on, "etcd_ready").emit(event.relation, app=event.app, unit=event.unit) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + # secret-user or secret-tls updated + logger.info("etcd_ready updated") + getattr(self.on, "etcd_ready").emit(relation, app=relation.app, unit=remote_unit) + + +class EtcdRequires(EtcdRequirerData, EtcdRequirerEventHandlers): + """Requires-side of the Etcd relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + prefix: str, + mtls_cert: Optional[str], + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + ) -> None: + EtcdRequirerData.__init__( + self, + charm.model, + relation_name, + prefix, + mtls_cert, + extra_user_roles, + additional_secret_fields, + ) + EtcdRequirerEventHandlers.__init__(self, charm, self) + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") diff --git a/lib/charms/postgresql_k8s/v0/postgresql.py b/lib/charms/postgresql_k8s/v0/postgresql.py index b7eb90908b..7e6a9d7631 100644 --- a/lib/charms/postgresql_k8s/v0/postgresql.py +++ b/lib/charms/postgresql_k8s/v0/postgresql.py @@ -35,7 +35,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 49 +LIBPATCH = 51 # Groups to distinguish HBA access ACCESS_GROUP_IDENTITY = "identity_access" @@ -153,7 +153,7 @@ def _configure_pgaudit(self, enable: bool) -> None: if enable: cursor.execute("ALTER SYSTEM SET pgaudit.log = 'ROLE,DDL,MISC,MISC_SET';") cursor.execute("ALTER SYSTEM SET pgaudit.log_client TO off;") - cursor.execute("ALTER SYSTEM SET pgaudit.log_parameter TO off") + cursor.execute("ALTER SYSTEM SET pgaudit.log_parameter TO off;") else: cursor.execute("ALTER SYSTEM RESET pgaudit.log;") cursor.execute("ALTER SYSTEM RESET pgaudit.log_client;") @@ -258,7 +258,7 @@ def create_database( raise PostgreSQLCreateDatabaseError() from e # Enable preset extensions - self.enable_disable_extensions({plugin: True for plugin in plugins}, database) + self.enable_disable_extensions(dict.fromkeys(plugins, True), database) def create_user( self, @@ -435,6 +435,8 @@ def enable_disable_extensions( for extension, enable in extensions.items(): ordered_extensions[extension] = enable + self._configure_pgaudit(False) + # Enable/disabled the extension in each database. for database in databases: with self._connect_to_database( From 26923b1cb82e5cc69764a2f72f82b35900b23041 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:21:44 +0300 Subject: [PATCH 21/21] Lock file maintenance Python dependencies (main) (#816) * Lock file maintenance Python dependencies * Fix linting --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Dragomir Penev --- poetry.lock | 964 ++++++++++++++++++++----------------- pyproject.toml | 22 +- src/charm.py | 9 +- src/cluster.py | 3 +- src/rotate_logs.py | 3 +- tests/unit/test_backups.py | 2 +- tests/unit/test_charm.py | 6 +- 7 files changed, 553 insertions(+), 456 deletions(-) diff --git a/poetry.lock b/poetry.lock index c70ec34a9b..497df5734d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,18 +2,18 @@ [[package]] name = "allure-pytest" -version = "2.13.5" +version = "2.14.0" description = "Allure pytest integration" optional = false python-versions = "*" groups = ["integration"] files = [ - {file = "allure-pytest-2.13.5.tar.gz", hash = "sha256:0ef8e1790c44a988db6b83c4d4f5e91451e2c4c8ea10601dfa88528d23afcf6e"}, - {file = "allure_pytest-2.13.5-py3-none-any.whl", hash = "sha256:94130bac32964b78058e62cf4b815ad97a5ac82a065e6dd2d43abac2be7640fc"}, + {file = "allure_pytest-2.14.0-py3-none-any.whl", hash = "sha256:6ddb68ef42bd5a2dfbcc136a184bf3e78e631ede7b8c54750026ffd407bda9af"}, + {file = "allure_pytest-2.14.0.tar.gz", hash = "sha256:2b485dc307755f8f3207783a69558ca1cc72f1e2c97bedc65c93fdb77adf328f"}, ] [package.dependencies] -allure-python-commons = "2.13.5" +allure-python-commons = "2.14.0" pytest = ">=4.5.0" [[package]] @@ -34,14 +34,14 @@ pytest = "*" [[package]] name = "allure-python-commons" -version = "2.13.5" -description = "('Contains the API for end users as well as helper functions and classes to build Allure adapters for Python test frameworks',)" +version = "2.14.0" +description = "Contains the API for end users as well as helper functions and classes to build Allure adapters for Python test frameworks" optional = false python-versions = ">=3.6" groups = ["integration"] files = [ - {file = "allure-python-commons-2.13.5.tar.gz", hash = "sha256:a232e7955811f988e49a4c1dd6c16cce7e9b81d0ea0422b1e5654d3254e2caf3"}, - {file = "allure_python_commons-2.13.5-py3-none-any.whl", hash = "sha256:8b0e837b6e32d810adec563f49e1d04127a5b6770e0232065b7cb09b9953980d"}, + {file = "allure_python_commons-2.14.0-py3-none-any.whl", hash = "sha256:9200f40abee697133e9ed9f68887cde996a24b9eb33fcf528da8fe50fae88e43"}, + {file = "allure_python_commons-2.14.0.tar.gz", hash = "sha256:9b217e2f6c74cdbd0e253f89059d4165346e95fcb28228fae333ff4dccea0bd5"}, ] [package.dependencies] @@ -50,14 +50,14 @@ pluggy = ">=0.4.0" [[package]] name = "anyio" -version = "4.8.0" +version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" groups = ["charm-libs"] files = [ - {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, - {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] [package.dependencies] @@ -67,8 +67,8 @@ sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -89,21 +89,21 @@ test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "attrs" -version = "25.1.0" +version = "25.3.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" groups = ["charm-libs", "integration", "unit"] files = [ - {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, - {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, ] [package.extras] benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] @@ -119,6 +119,63 @@ files = [ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] +[[package]] +name = "backports-datetime-fromisoformat" +version = "2.0.3" +description = "Backport of Python 3.11's datetime.fromisoformat" +optional = false +python-versions = ">3" +groups = ["integration"] +files = [ + {file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f681f638f10588fa3c101ee9ae2b63d3734713202ddfcfb6ec6cea0778a29d4"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:cd681460e9142f1249408e5aee6d178c6d89b49e06d44913c8fdfb6defda8d1c"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ee68bc8735ae5058695b76d3bb2aee1d137c052a11c8303f1e966aa23b72b65b"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8273fe7932db65d952a43e238318966eab9e49e8dd546550a41df12175cc2be4"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39d57ea50aa5a524bb239688adc1d1d824c31b6094ebd39aa164d6cadb85de22"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ac6272f87693e78209dc72e84cf9ab58052027733cd0721c55356d3c881791cf"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:44c497a71f80cd2bcfc26faae8857cf8e79388e3d5fbf79d2354b8c360547d58"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:6335a4c9e8af329cb1ded5ab41a666e1448116161905a94e054f205aa6d263bc"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2e4b66e017253cdbe5a1de49e0eecff3f66cd72bcb1229d7db6e6b1832c0443"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:43e2d648e150777e13bbc2549cc960373e37bf65bd8a5d2e0cef40e16e5d8dd0"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:4ce6326fd86d5bae37813c7bf1543bae9e4c215ec6f5afe4c518be2635e2e005"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7c8fac333bf860208fd522a5394369ee3c790d0aa4311f515fcc4b6c5ef8d75"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4da5ab3aa0cc293dc0662a0c6d1da1a011dc1edcbc3122a288cfed13a0b45"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58ea11e3bf912bd0a36b0519eae2c5b560b3cb972ea756e66b73fb9be460af01"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8a375c7dbee4734318714a799b6c697223e4bbb57232af37fbfff88fb48a14c6"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:ac677b1664c4585c2e014739f6678137c8336815406052349c85898206ec7061"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66ce47ee1ba91e146149cf40565c3d750ea1be94faf660ca733d8601e0848147"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8b7e069910a66b3bba61df35b5f879e5253ff0821a70375b9daf06444d046fa4"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:a3b5d1d04a9e0f7b15aa1e647c750631a873b298cdd1255687bb68779fe8eb35"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1b95986430e789c076610aea704db20874f0781b8624f648ca9fb6ef67c6e1"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffe5f793db59e2f1d45ec35a1cf51404fdd69df9f6952a0c87c3060af4c00e32"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:620e8e73bd2595dfff1b4d256a12b67fce90ece3de87b38e1dde46b910f46f4d"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4cf9c0a985d68476c1cabd6385c691201dda2337d7453fb4da9679ce9f23f4e7"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:d144868a73002e6e2e6fef72333e7b0129cecdd121aa8f1edba7107fd067255d"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e81b26497a17c29595bc7df20bc6a872ceea5f8c9d6537283945d4b6396aec10"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:5ba00ead8d9d82fd6123eb4891c566d30a293454e54e32ff7ead7644f5f7e575"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:24d574cb4072e1640b00864e94c4c89858033936ece3fc0e1c6f7179f120d0a8"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9735695a66aad654500b0193525e590c693ab3368478ce07b34b443a1ea5e824"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63d39709e17eb72685d052ac82acf0763e047f57c86af1b791505b1fec96915d"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1ea2cc84224937d6b9b4c07f5cb7c667f2bde28c255645ba27f8a675a7af8234"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4024e6d35a9fdc1b3fd6ac7a673bd16cb176c7e0b952af6428b7129a70f72cce"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5e2dcc94dc9c9ab8704409d86fcb5236316e9dcef6feed8162287634e3568f4c"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fa2de871801d824c255fac7e5e7e50f2be6c9c376fd9268b40c54b5e9da91f42"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:1314d4923c1509aa9696712a7bc0c7160d3b7acf72adafbbe6c558d523f5d491"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b750ecba3a8815ad8bc48311552f3f8ab99dd2326d29df7ff670d9c49321f48f"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d5117dce805d8a2f78baeddc8c6127281fa0a5e2c40c6dd992ba6b2b367876"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb35f607bd1cbe37b896379d5f5ed4dc298b536f4b959cb63180e05cacc0539d"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:61c74710900602637d2d145dda9720c94e303380803bf68811b2a151deec75c2"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ece59af54ebf67ecbfbbf3ca9066f5687879e36527ad69d8b6e3ac565d565a62"}, + {file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:d0a7c5f875068efe106f62233bc712d50db4d07c13c7db570175c7857a7b5dbd"}, + {file = "backports_datetime_fromisoformat-2.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90e202e72a3d5aae673fcc8c9a4267d56b2f532beeb9173361293625fe4d2039"}, + {file = "backports_datetime_fromisoformat-2.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2df98ef1b76f5a58bb493dda552259ba60c3a37557d848e039524203951c9f06"}, + {file = "backports_datetime_fromisoformat-2.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7100adcda5e818b5a894ad0626e38118bb896a347f40ebed8981155675b9ba7b"}, + {file = "backports_datetime_fromisoformat-2.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e410383f5d6a449a529d074e88af8bc80020bb42b402265f9c02c8358c11da5"}, + {file = "backports_datetime_fromisoformat-2.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2797593760da6bcc32c4a13fa825af183cd4bfd333c60b3dbf84711afca26ef"}, + {file = "backports_datetime_fromisoformat-2.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35a144fd681a0bea1013ccc4cd3fd4dc758ea17ee23dca019c02b82ec46fc0c4"}, + {file = "backports_datetime_fromisoformat-2.0.3.tar.gz", hash = "sha256:b58edc8f517b66b397abc250ecc737969486703a66eb97e01e6d51291b1a139d"}, +] + [[package]] name = "backports-strenum" version = "1.3.1" @@ -134,37 +191,63 @@ files = [ [[package]] name = "bcrypt" -version = "4.2.1" +version = "4.3.0" description = "Modern password hashing for your software and your servers" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["integration"] files = [ - {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139"}, - {file = "bcrypt-4.2.1-cp37-abi3-win32.whl", hash = "sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005"}, - {file = "bcrypt-4.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526"}, - {file = "bcrypt-4.2.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d"}, - {file = "bcrypt-4.2.1-cp39-abi3-win32.whl", hash = "sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf"}, - {file = "bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c"}, - {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e"}, - {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f"}, - {file = "bcrypt-4.2.1.tar.gz", hash = "sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe"}, + {file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"}, + {file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"}, + {file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"}, + {file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"}, + {file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"}, + {file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"}, + {file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"}, + {file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"}, ] [package.extras] @@ -173,34 +256,34 @@ typecheck = ["mypy"] [[package]] name = "boto3" -version = "1.37.22" +version = "1.38.0" description = "The AWS SDK for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "integration"] files = [ - {file = "boto3-1.37.22-py3-none-any.whl", hash = "sha256:a14324d5fa5f4fea00c0e3c69754cbd28100f7fe194693eeecf2dc07446cf4ef"}, - {file = "boto3-1.37.22.tar.gz", hash = "sha256:78a0ec0aafbf6044104c98ad80b69e6d1c83d8233fda2c2d241029e6c705c510"}, + {file = "boto3-1.38.0-py3-none-any.whl", hash = "sha256:96898facb164b47859d40a4271007824a0a791c3811a7079ce52459d753d4474"}, + {file = "boto3-1.38.0.tar.gz", hash = "sha256:8b6544eca17e31d1bfd538e5d152b96a68d6c92950352a0cd9679f89d217d53a"}, ] [package.dependencies] -botocore = ">=1.37.22,<1.38.0" +botocore = ">=1.38.0,<1.39.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.11.0,<0.12.0" +s3transfer = ">=0.12.0,<0.13.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.37.22" +version = "1.38.0" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "integration"] files = [ - {file = "botocore-1.37.22-py3-none-any.whl", hash = "sha256:184db7c9314d13002bc827f511a5140574b5da1acda342d51e093dad6317de98"}, - {file = "botocore-1.37.22.tar.gz", hash = "sha256:b3b26f1a90236bcd17d4092f8c85a256b44e9955a16b633319a2f5678d605e9f"}, + {file = "botocore-1.38.0-py3-none-any.whl", hash = "sha256:f9d58404796a44746d54c4a9318a8970fb4dbcbdc45aa0e75bf528af4213b6b5"}, + {file = "botocore-1.38.0.tar.gz", hash = "sha256:ac8997291bcfd28d329a779ceda429fbe9f8950ba051429a37ba93cbda025e94"}, ] [package.dependencies] @@ -213,14 +296,14 @@ crt = ["awscrt (==0.23.8)"] [[package]] name = "cachetools" -version = "5.5.1" +version = "5.5.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" groups = ["integration"] files = [ - {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, - {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] [[package]] @@ -451,14 +534,14 @@ files = [ [[package]] name = "cosl" -version = "0.0.55" +version = "0.0.57" description = "Utils for COS Lite charms" optional = false python-versions = ">=3.8" groups = ["charm-libs"] files = [ - {file = "cosl-0.0.55-py3-none-any.whl", hash = "sha256:bf641d611f982c8f494f3cf72ac4181b24e30c69504cfbd55aa8f54964797f90"}, - {file = "cosl-0.0.55.tar.gz", hash = "sha256:d3b8ee6f78302ac111d3a15d36c42a38c298a806161d762869513d348d778316"}, + {file = "cosl-0.0.57-py3-none-any.whl", hash = "sha256:0f2bac6dd84a636747e63acd6cb277f1cc642d3c5f80caa13cd9b51644b8ff67"}, + {file = "cosl-0.0.57.tar.gz", hash = "sha256:7ed489973e6e72a9b9533dddf0da1df3499a0aacdc513ad0b579cb04b4671b04"}, ] [package.dependencies] @@ -471,75 +554,75 @@ typing-extensions = "*" [[package]] name = "coverage" -version = "7.6.12" +version = "7.8.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["unit"] files = [ - {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, - {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, - {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, - {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, - {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, - {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, - {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, - {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, - {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, - {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, - {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, - {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, - {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, - {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, - {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, - {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, - {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, + {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, + {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, + {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, + {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, + {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, + {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, + {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, + {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, + {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, + {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, + {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, + {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, + {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, + {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, + {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, ] [package.dependencies] @@ -550,43 +633,47 @@ toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" -version = "44.0.1" +version = "44.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" groups = ["charm-libs", "integration"] files = [ - {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"}, - {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"}, - {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"}, - {file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"}, - {file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"}, - {file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"}, - {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"}, - {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"}, - {file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"}, - {file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"}, - {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"}, + {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, + {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, + {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, + {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, + {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, + {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, + {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, ] [package.dependencies] @@ -599,19 +686,19 @@ nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] name = "decorator" -version = "5.1.1" +version = "5.2.1" description = "Decorators for Humans" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" groups = ["integration"] files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, ] [[package]] @@ -665,14 +752,14 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "google-auth" -version = "2.38.0" +version = "2.39.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" groups = ["integration"] files = [ - {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, - {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, + {file = "google_auth-2.39.0-py2.py3-none-any.whl", hash = "sha256:0150b6711e97fb9f52fe599f55648950cc4540015565d8fbb31be2ad6e1548a2"}, + {file = "google_auth-2.39.0.tar.gz", hash = "sha256:73222d43cdc35a3aeacbfdcaf73142a97839f10de930550d89ebfe1d0a00cde7"}, ] [package.dependencies] @@ -681,30 +768,32 @@ pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] enterprise-cert = ["cryptography", "pyopenssl"] -pyjwt = ["cryptography (>=38.0.3)", "pyjwt (>=2.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] [[package]] name = "googleapis-common-protos" -version = "1.67.0" +version = "1.70.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" groups = ["charm-libs"] files = [ - {file = "googleapis_common_protos-1.67.0-py2.py3-none-any.whl", hash = "sha256:579de760800d13616f51cf8be00c876f00a9f146d3e6510e19d1f4111758b741"}, - {file = "googleapis_common_protos-1.67.0.tar.gz", hash = "sha256:21398025365f138be356d5923e9168737d94d46a72aefee4a6110a1f23463c86"}, + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, ] [package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] [[package]] name = "h11" @@ -720,14 +809,14 @@ files = [ [[package]] name = "httpcore" -version = "1.0.7" +version = "1.0.8" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" groups = ["charm-libs"] files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, + {file = "httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be"}, + {file = "httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad"}, ] [package.dependencies] @@ -820,14 +909,14 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9 [[package]] name = "iniconfig" -version = "2.0.0" +version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["integration", "unit"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] [[package]] @@ -849,14 +938,14 @@ tomli = {version = "*", markers = "python_version > \"3.6\" and python_version < [[package]] name = "ipython" -version = "8.32.0" +version = "8.35.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" groups = ["integration"] files = [ - {file = "ipython-8.32.0-py3-none-any.whl", hash = "sha256:cae85b0c61eff1fc48b0a8002de5958b6528fa9c8defb1894da63f42613708aa"}, - {file = "ipython-8.32.0.tar.gz", hash = "sha256:be2c91895b0b9ea7ba49d33b23e2040c352b33eb6a519cca7ce6e0c743444251"}, + {file = "ipython-8.35.0-py3-none-any.whl", hash = "sha256:e6b7470468ba6f1f0a7b116bb688a3ece2f13e2f94138e508201fad677a788ba"}, + {file = "ipython-8.35.0.tar.gz", hash = "sha256:d200b7d93c3f5883fc36ab9ce28a18249c7706e51347681f80a0aef9895f2520"}, ] [package.dependencies] @@ -884,7 +973,7 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] +test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] [[package]] name = "jedi" @@ -975,17 +1064,18 @@ referencing = ">=0.31.0" [[package]] name = "juju" -version = "3.6.1.0" +version = "3.6.1.1" description = "Python library for Juju" optional = false python-versions = ">=3.8.6" groups = ["integration"] files = [ - {file = "juju-3.6.1.0-py3-none-any.whl", hash = "sha256:28b6a10093f2e0243ad0ddd5ef25a3f59d710e9da5a188456ba704142819fbb3"}, - {file = "juju-3.6.1.0.tar.gz", hash = "sha256:59cfde55185bb53877a2bddc2855f3c48471537e130653d77984681676a448bc"}, + {file = "juju-3.6.1.1-py3-none-any.whl", hash = "sha256:19ede730130b03cd5a99850f812521c2eb93199771207b50e1edf86e5e47acb2"}, + {file = "juju-3.6.1.1.tar.gz", hash = "sha256:2289abd450515b7883f12f06d42f965d31939e711c496cb8713b4b058408e589"}, ] [package.dependencies] +backports-datetime-fromisoformat = ">=2.0.2" "backports.strenum" = {version = ">=1.3.1", markers = "python_version < \"3.11\""} hvac = "*" kubernetes = ">=12.0.1,<31.0.0" @@ -993,7 +1083,6 @@ macaroonbakery = ">=1.1,<2.0" packaging = "*" paramiko = ">=2.4.0" pyasn1 = ">=0.4.4" -pyRFC3339 = ">=1.0,<2.0" pyyaml = ">=5.1.2" toposort = ">=1.5,<2" typing-extensions = ">=4.5.0" @@ -1001,7 +1090,7 @@ typing_inspect = ">=0.6.0" websockets = ">=13.0.1" [package.extras] -dev = ["Twine", "freezegun", "pytest", "pytest-asyncio", "typing-inspect"] +dev = ["Twine", "freezegun", "pytest", "pytest-asyncio (<=0.25.0)", "typing-inspect"] docs = ["sphinx (==5.3.0)", "sphinx_rtd_theme", "sphinxcontrib-asyncio"] [[package]] @@ -1205,14 +1294,14 @@ traitlets = "*" [[package]] name = "mypy-extensions" -version = "1.0.0" +version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" groups = ["integration"] files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] @@ -1335,14 +1424,14 @@ files = [ [[package]] name = "ops" -version = "2.18.1" +version = "2.20.0" description = "The Python library behind great charms" optional = false python-versions = ">=3.8" groups = ["main", "charm-libs"] files = [ - {file = "ops-2.18.1-py3-none-any.whl", hash = "sha256:ba0312366e25b3ae90cf4b8d0af6ea6b612d4951500f856bce609cdb25c9bdeb"}, - {file = "ops-2.18.1.tar.gz", hash = "sha256:5619deb370c00ea851f9579b780a09b88b1a1d020e58e1ed81d31c8fb7b28c8a"}, + {file = "ops-2.20.0-py3-none-any.whl", hash = "sha256:94791a4b45f00c6902494a4934480c85947880b27f5ebf3a0ec32e8cc6279c99"}, + {file = "ops-2.20.0.tar.gz", hash = "sha256:be1dcfd0bb748839fbc200bbd073a6acf9648401c3729db22d8594ebb4301e05"}, ] [package.dependencies] @@ -1351,18 +1440,18 @@ websocket-client = "==1.*" [package.extras] docs = ["canonical-sphinx-extensions", "furo", "linkify-it-py", "myst-parser", "pyspelling", "sphinx (>=8.0.0,<8.1.0)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-design", "sphinx-notfound-page", "sphinx-tabs", "sphinxcontrib-jquery", "sphinxext-opengraph"] -testing = ["ops-scenario (>=7.0.5,<8)"] +testing = ["ops-scenario (==7.20.0)"] [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["integration", "unit"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] @@ -1464,26 +1553,26 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "poetry-core" -version = "2.1.1" +version = "2.1.2" description = "Poetry PEP 517 Build Backend" optional = false python-versions = "<4.0,>=3.9" groups = ["charm-libs"] files = [ - {file = "poetry_core-2.1.1-py3-none-any.whl", hash = "sha256:bc3b0382ab4d00d5d780277fd0aad1580eb4403613b37fc60fec407b5bee1fe6"}, - {file = "poetry_core-2.1.1.tar.gz", hash = "sha256:c1a1f6f00e4254742f40988a8caf665549101cf9991122cd5de1198897768b1a"}, + {file = "poetry_core-2.1.2-py3-none-any.whl", hash = "sha256:ecb1e8f7d4f071a21cd0feb8c19bd1aec80de6fb0e82aa9d809a591e544431b4"}, + {file = "poetry_core-2.1.2.tar.gz", hash = "sha256:f9dbbbd0ebf9755476a1d57f04b30e9aecf71ca9dc2fcd4b17aba92c0002aa04"}, ] [[package]] name = "prompt-toolkit" -version = "3.0.50" +version = "3.0.51" description = "Library for building powerful interactive command lines in Python" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.8" groups = ["integration"] files = [ - {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, - {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, + {file = "prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07"}, + {file = "prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed"}, ] [package.dependencies] @@ -1674,18 +1763,18 @@ files = [ [[package]] name = "pyasn1-modules" -version = "0.4.1" +version = "0.4.2" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" groups = ["integration"] files = [ - {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, - {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" +pyasn1 = ">=0.6.1,<0.7.0" [[package]] name = "pycparser" @@ -1853,14 +1942,14 @@ files = [ [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" groups = ["integration", "unit"] files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] @@ -1895,14 +1984,14 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-operator" -version = "0.40.0" +version = "0.42.0" description = "Fixtures for Operators" optional = false python-versions = "*" groups = ["integration"] files = [ - {file = "pytest_operator-0.40.0-py3-none-any.whl", hash = "sha256:1cfa93ab61b11e8d7bf58dbb1a39e75fcbfcc084781bb571fde08fda7e236713"}, - {file = "pytest_operator-0.40.0.tar.gz", hash = "sha256:45394ade32b7765b6ba89871b676d1fb8aa7578589f74df26ff0fca4692d1c7b"}, + {file = "pytest_operator-0.42.0-py3-none-any.whl", hash = "sha256:29ee3df46b5a47b435f63f7efa2e1433807ba723ac3890f86b88033f79b3e48c"}, + {file = "pytest_operator-0.42.0.tar.gz", hash = "sha256:389afb648dab91eb8f0e224cbe58f05598e850aafc46e589fce1705577309c69"}, ] [package.dependencies] @@ -1930,14 +2019,14 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2025.1" +version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" groups = ["integration"] files = [ - {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, - {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] [[package]] @@ -2063,127 +2152,138 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "rpds-py" -version = "0.22.3" +version = "0.24.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" groups = ["charm-libs", "unit"] files = [ - {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, - {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, - {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, - {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, - {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, - {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, - {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, - {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, - {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, - {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, - {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, - {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, - {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, + {file = "rpds_py-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:006f4342fe729a368c6df36578d7a348c7c716be1da0a1a0f86e3021f8e98724"}, + {file = "rpds_py-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2d53747da70a4e4b17f559569d5f9506420966083a31c5fbd84e764461c4444b"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8acd55bd5b071156bae57b555f5d33697998752673b9de554dd82f5b5352727"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7e80d375134ddb04231a53800503752093dbb65dad8dabacce2c84cccc78e964"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60748789e028d2a46fc1c70750454f83c6bdd0d05db50f5ae83e2db500b34da5"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e1daf5bf6c2be39654beae83ee6b9a12347cb5aced9a29eecf12a2d25fff664"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b221c2457d92a1fb3c97bee9095c874144d196f47c038462ae6e4a14436f7bc"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:66420986c9afff67ef0c5d1e4cdc2d0e5262f53ad11e4f90e5e22448df485bf0"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:43dba99f00f1d37b2a0265a259592d05fcc8e7c19d140fe51c6e6f16faabeb1f"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a88c0d17d039333a41d9bf4616bd062f0bd7aa0edeb6cafe00a2fc2a804e944f"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc31e13ce212e14a539d430428cd365e74f8b2d534f8bc22dd4c9c55b277b875"}, + {file = "rpds_py-0.24.0-cp310-cp310-win32.whl", hash = "sha256:fc2c1e1b00f88317d9de6b2c2b39b012ebbfe35fe5e7bef980fd2a91f6100a07"}, + {file = "rpds_py-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0145295ca415668420ad142ee42189f78d27af806fcf1f32a18e51d47dd2052"}, + {file = "rpds_py-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2d3ee4615df36ab8eb16c2507b11e764dcc11fd350bbf4da16d09cda11fcedef"}, + {file = "rpds_py-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf86f72d705fc2ef776bb7dd9e5fbba79d7e1f3e258bf9377f8204ad0fc1c51e"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c43583ea8517ed2e780a345dd9960896afc1327e8cf3ac8239c167530397440d"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cd031e63bc5f05bdcda120646a0d32f6d729486d0067f09d79c8db5368f4586"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34d90ad8c045df9a4259c47d2e16a3f21fdb396665c94520dbfe8766e62187a4"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e838bf2bb0b91ee67bf2b889a1a841e5ecac06dd7a2b1ef4e6151e2ce155c7ae"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04ecf5c1ff4d589987b4d9882872f80ba13da7d42427234fce8f22efb43133bc"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:630d3d8ea77eabd6cbcd2ea712e1c5cecb5b558d39547ac988351195db433f6c"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ebcb786b9ff30b994d5969213a8430cbb984cdd7ea9fd6df06663194bd3c450c"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:174e46569968ddbbeb8a806d9922f17cd2b524aa753b468f35b97ff9c19cb718"}, + {file = "rpds_py-0.24.0-cp311-cp311-win32.whl", hash = "sha256:5ef877fa3bbfb40b388a5ae1cb00636a624690dcb9a29a65267054c9ea86d88a"}, + {file = "rpds_py-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:e274f62cbd274359eff63e5c7e7274c913e8e09620f6a57aae66744b3df046d6"}, + {file = "rpds_py-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205"}, + {file = "rpds_py-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56"}, + {file = "rpds_py-0.24.0-cp312-cp312-win32.whl", hash = "sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30"}, + {file = "rpds_py-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034"}, + {file = "rpds_py-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2d8e4508e15fc05b31285c4b00ddf2e0eb94259c2dc896771966a163122a0c"}, + {file = "rpds_py-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f00c16e089282ad68a3820fd0c831c35d3194b7cdc31d6e469511d9bffc535c"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951cc481c0c395c4a08639a469d53b7d4afa252529a085418b82a6b43c45c240"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9ca89938dff18828a328af41ffdf3902405a19f4131c88e22e776a8e228c5a8"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed0ef550042a8dbcd657dfb284a8ee00f0ba269d3f2286b0493b15a5694f9fe8"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2356688e5d958c4d5cb964af865bea84db29971d3e563fb78e46e20fe1848b"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78884d155fd15d9f64f5d6124b486f3d3f7fd7cd71a78e9670a0f6f6ca06fb2d"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a4a535013aeeef13c5532f802708cecae8d66c282babb5cd916379b72110cf7"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:84e0566f15cf4d769dade9b366b7b87c959be472c92dffb70462dd0844d7cbad"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:823e74ab6fbaa028ec89615ff6acb409e90ff45580c45920d4dfdddb069f2120"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c61a2cb0085c8783906b2f8b1f16a7e65777823c7f4d0a6aaffe26dc0d358dd9"}, + {file = "rpds_py-0.24.0-cp313-cp313-win32.whl", hash = "sha256:60d9b630c8025b9458a9d114e3af579a2c54bd32df601c4581bd054e85258143"}, + {file = "rpds_py-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:6eea559077d29486c68218178ea946263b87f1c41ae7f996b1f30a983c476a5a"}, + {file = "rpds_py-0.24.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d09dc82af2d3c17e7dd17120b202a79b578d79f2b5424bda209d9966efeed114"}, + {file = "rpds_py-0.24.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5fc13b44de6419d1e7a7e592a4885b323fbc2f46e1f22151e3a8ed3b8b920405"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c347a20d79cedc0a7bd51c4d4b7dbc613ca4e65a756b5c3e57ec84bd43505b47"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20f2712bd1cc26a3cc16c5a1bfee9ed1abc33d4cdf1aabd297fe0eb724df4272"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad911555286884be1e427ef0dc0ba3929e6821cbeca2194b13dc415a462c7fd"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aeb3329c1721c43c58cae274d7d2ca85c1690d89485d9c63a006cb79a85771a"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a0f156e9509cee987283abd2296ec816225145a13ed0391df8f71bf1d789e2d"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa6800adc8204ce898c8a424303969b7aa6a5e4ad2789c13f8648739830323b7"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a18fc371e900a21d7392517c6f60fe859e802547309e94313cd8181ad9db004d"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9168764133fd919f8dcca2ead66de0105f4ef5659cbb4fa044f7014bed9a1797"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f6e3cec44ba05ee5cbdebe92d052f69b63ae792e7d05f1020ac5e964394080c"}, + {file = "rpds_py-0.24.0-cp313-cp313t-win32.whl", hash = "sha256:8ebc7e65ca4b111d928b669713865f021b7773350eeac4a31d3e70144297baba"}, + {file = "rpds_py-0.24.0-cp313-cp313t-win_amd64.whl", hash = "sha256:675269d407a257b8c00a6b58205b72eec8231656506c56fd429d924ca00bb350"}, + {file = "rpds_py-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a36b452abbf29f68527cf52e181fced56685731c86b52e852053e38d8b60bc8d"}, + {file = "rpds_py-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b3b397eefecec8e8e39fa65c630ef70a24b09141a6f9fc17b3c3a50bed6b50e"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdabcd3beb2a6dca7027007473d8ef1c3b053347c76f685f5f060a00327b8b65"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5db385bacd0c43f24be92b60c857cf760b7f10d8234f4bd4be67b5b20a7c0b6b"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8097b3422d020ff1c44effc40ae58e67d93e60d540a65649d2cdaf9466030791"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493fe54318bed7d124ce272fc36adbf59d46729659b2c792e87c3b95649cdee9"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8aa362811ccdc1f8dadcc916c6d47e554169ab79559319ae9fae7d7752d0d60c"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d8f9a6e7fd5434817526815f09ea27f2746c4a51ee11bb3439065f5fc754db58"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8205ee14463248d3349131bb8099efe15cd3ce83b8ef3ace63c7e976998e7124"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:921ae54f9ecba3b6325df425cf72c074cd469dea843fb5743a26ca7fb2ccb149"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32bab0a56eac685828e00cc2f5d1200c548f8bc11f2e44abf311d6b548ce2e45"}, + {file = "rpds_py-0.24.0-cp39-cp39-win32.whl", hash = "sha256:f5c0ed12926dec1dfe7d645333ea59cf93f4d07750986a586f511c0bc61fe103"}, + {file = "rpds_py-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:afc6e35f344490faa8276b5f2f7cbf71f88bc2cda4328e00553bd451728c571f"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:619ca56a5468f933d940e1bf431c6f4e13bef8e688698b067ae68eb4f9b30e3a"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b28e5122829181de1898c2c97f81c0b3246d49f585f22743a1246420bb8d399"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e5ab32cf9eb3647450bc74eb201b27c185d3857276162c101c0f8c6374e098"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:208b3a70a98cf3710e97cabdc308a51cd4f28aa6e7bb11de3d56cd8b74bab98d"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbc4362e06f950c62cad3d4abf1191021b2ffaf0b31ac230fbf0526453eee75e"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebea2821cdb5f9fef44933617be76185b80150632736f3d76e54829ab4a3b4d1"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4df06c35465ef4d81799999bba810c68d29972bf1c31db61bfdb81dd9d5bb"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3aa13bdf38630da298f2e0d77aca967b200b8cc1473ea05248f6c5e9c9bdb44"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:041f00419e1da7a03c46042453598479f45be3d787eb837af382bfc169c0db33"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8754d872a5dfc3c5bf9c0e059e8107451364a30d9fd50f1f1a85c4fb9481164"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:896c41007931217a343eff197c34513c154267636c8056fb409eafd494c3dcdc"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:92558d37d872e808944c3c96d0423b8604879a3d1c86fdad508d7ed91ea547d5"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f9e0057a509e096e47c87f753136c9b10d7a91842d8042c2ee6866899a717c0d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d6e109a454412ab82979c5b1b3aee0604eca4bbf9a02693bb9df027af2bfa91a"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1c892b1ec1f8cbd5da8de287577b455e388d9c328ad592eabbdcb6fc93bee5"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c39438c55983d48f4bb3487734d040e22dad200dab22c41e331cee145e7a50d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d7e8ce990ae17dda686f7e82fd41a055c668e13ddcf058e7fb5e9da20b57793"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ea7f4174d2e4194289cb0c4e172d83e79a6404297ff95f2875cf9ac9bced8ba"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb2954155bb8f63bb19d56d80e5e5320b61d71084617ed89efedb861a684baea"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04f2b712a2206e13800a8136b07aaedc23af3facab84918e7aa89e4be0260032"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:eda5c1e2a715a4cbbca2d6d304988460942551e4e5e3b7457b50943cd741626d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:9abc80fe8c1f87218db116016de575a7998ab1629078c90840e8d11ab423ee25"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a727fd083009bc83eb83d6950f0c32b3c94c8b80a9b667c87f4bd1274ca30ba"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e0f3ef95795efcd3b2ec3fe0a5bcfb5dadf5e3996ea2117427e524d4fbf309c6"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2c13777ecdbbba2077670285dd1fe50828c8742f6a4119dbef6f83ea13ad10fb"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e8d804c2ccd618417e96720ad5cd076a86fa3f8cb310ea386a3e6229bae7d1"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd822f019ccccd75c832deb7aa040bb02d70a92eb15a2f16c7987b7ad4ee8d83"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0047638c3aa0dbcd0ab99ed1e549bbf0e142c9ecc173b6492868432d8989a046"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5b66d1b201cc71bc3081bc2f1fc36b0c1f268b773e03bbc39066651b9e18391"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbcbb6db5582ea33ce46a5d20a5793134b5365110d84df4e30b9d37c6fd40ad3"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63981feca3f110ed132fd217bf7768ee8ed738a55549883628ee3da75bb9cb78"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3a55fc10fdcbf1a4bd3c018eea422c52cf08700cf99c28b5cb10fe97ab77a0d3"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:c30ff468163a48535ee7e9bf21bd14c7a81147c0e58a36c1078289a8ca7af0bd"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:369d9c6d4c714e36d4a03957b4783217a3ccd1e222cdd67d464a3a479fc17796"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:24795c099453e3721fda5d8ddd45f5dfcc8e5a547ce7b8e9da06fecc3832e26f"}, + {file = "rpds_py-0.24.0.tar.gz", hash = "sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e"}, ] [[package]] name = "rsa" -version = "4.9" +version = "4.9.1" description = "Pure-Python RSA implementation" optional = false -python-versions = ">=3.6,<4" +python-versions = "<4,>=3.6" groups = ["integration"] files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, ] [package.dependencies] @@ -2191,42 +2291,42 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.9.6" +version = "0.11.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["format"] files = [ - {file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"}, - {file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"}, - {file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"}, - {file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"}, - {file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"}, - {file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"}, - {file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"}, + {file = "ruff-0.11.6-py3-none-linux_armv6l.whl", hash = "sha256:d84dcbe74cf9356d1bdb4a78cf74fd47c740bf7bdeb7529068f69b08272239a1"}, + {file = "ruff-0.11.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9bc583628e1096148011a5d51ff3c836f51899e61112e03e5f2b1573a9b726de"}, + {file = "ruff-0.11.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f2959049faeb5ba5e3b378709e9d1bf0cab06528b306b9dd6ebd2a312127964a"}, + {file = "ruff-0.11.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63c5d4e30d9d0de7fedbfb3e9e20d134b73a30c1e74b596f40f0629d5c28a193"}, + {file = "ruff-0.11.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a4b9a4e1439f7d0a091c6763a100cef8fbdc10d68593df6f3cfa5abdd9246e"}, + {file = "ruff-0.11.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5edf270223dd622218256569636dc3e708c2cb989242262fe378609eccf1308"}, + {file = "ruff-0.11.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f55844e818206a9dd31ff27f91385afb538067e2dc0beb05f82c293ab84f7d55"}, + {file = "ruff-0.11.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d8f782286c5ff562e4e00344f954b9320026d8e3fae2ba9e6948443fafd9ffc"}, + {file = "ruff-0.11.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01c63ba219514271cee955cd0adc26a4083df1956d57847978383b0e50ffd7d2"}, + {file = "ruff-0.11.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15adac20ef2ca296dd3d8e2bedc6202ea6de81c091a74661c3666e5c4c223ff6"}, + {file = "ruff-0.11.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4dd6b09e98144ad7aec026f5588e493c65057d1b387dd937d7787baa531d9bc2"}, + {file = "ruff-0.11.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:45b2e1d6c0eed89c248d024ea95074d0e09988d8e7b1dad8d3ab9a67017a5b03"}, + {file = "ruff-0.11.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bd40de4115b2ec4850302f1a1d8067f42e70b4990b68838ccb9ccd9f110c5e8b"}, + {file = "ruff-0.11.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:77cda2dfbac1ab73aef5e514c4cbfc4ec1fbef4b84a44c736cc26f61b3814cd9"}, + {file = "ruff-0.11.6-py3-none-win32.whl", hash = "sha256:5151a871554be3036cd6e51d0ec6eef56334d74dfe1702de717a995ee3d5b287"}, + {file = "ruff-0.11.6-py3-none-win_amd64.whl", hash = "sha256:cce85721d09c51f3b782c331b0abd07e9d7d5f775840379c640606d3159cae0e"}, + {file = "ruff-0.11.6-py3-none-win_arm64.whl", hash = "sha256:3567ba0d07fb170b1b48d944715e3294b77f5b7679e8ba258199a250383ccb79"}, + {file = "ruff-0.11.6.tar.gz", hash = "sha256:bec8bcc3ac228a45ccc811e45f7eb61b950dbf4cf31a67fa89352574b01c7d79"}, ] [[package]] name = "s3transfer" -version = "0.11.4" +version = "0.12.0" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "integration"] files = [ - {file = "s3transfer-0.11.4-py3-none-any.whl", hash = "sha256:ac265fa68318763a03bf2dc4f39d5cbd6a9e178d81cc9483ad27da33637e320d"}, - {file = "s3transfer-0.11.4.tar.gz", hash = "sha256:559f161658e1cf0a911f45940552c696735f5c74e64362e515f333ebed87d679"}, + {file = "s3transfer-0.12.0-py3-none-any.whl", hash = "sha256:35b314d7d82865756edab59f7baebc6b477189e6ab4c53050e28c1de4d9cce18"}, + {file = "s3transfer-0.12.0.tar.gz", hash = "sha256:8ac58bc1989a3fdb7c7f3ee0918a66b160d038a147c7b5db1500930a607e9a1c"}, ] [package.dependencies] @@ -2281,14 +2381,14 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "tenacity" -version = "9.0.0" +version = "9.1.2" description = "Retry code until it succeeds" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "charm-libs", "integration"] files = [ - {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, - {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, + {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, + {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, ] [package.extras] @@ -2368,14 +2468,14 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "charm-libs", "integration", "unit"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] markers = {unit = "python_version < \"3.13\""} @@ -2397,14 +2497,14 @@ typing-extensions = ">=3.7.4" [[package]] name = "urllib3" -version = "2.3.0" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" groups = ["main", "charm-libs", "integration"] files = [ - {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, - {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] @@ -2444,81 +2544,81 @@ test = ["websockets"] [[package]] name = "websockets" -version = "15.0" +version = "15.0.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" groups = ["integration"] files = [ - {file = "websockets-15.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5e6ee18a53dd5743e6155b8ff7e8e477c25b29b440f87f65be8165275c87fef0"}, - {file = "websockets-15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ee06405ea2e67366a661ed313e14cf2a86e84142a3462852eb96348f7219cee3"}, - {file = "websockets-15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8711682a629bbcaf492f5e0af72d378e976ea1d127a2d47584fa1c2c080b436b"}, - {file = "websockets-15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94c4a9b01eede952442c088d415861b0cf2053cbd696b863f6d5022d4e4e2453"}, - {file = "websockets-15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45535fead66e873f411c1d3cf0d3e175e66f4dd83c4f59d707d5b3e4c56541c4"}, - {file = "websockets-15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e389efe46ccb25a1f93d08c7a74e8123a2517f7b7458f043bd7529d1a63ffeb"}, - {file = "websockets-15.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:67a04754d121ea5ca39ddedc3f77071651fb5b0bc6b973c71c515415b44ed9c5"}, - {file = "websockets-15.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bd66b4865c8b853b8cca7379afb692fc7f52cf898786537dfb5e5e2d64f0a47f"}, - {file = "websockets-15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a4cc73a6ae0a6751b76e69cece9d0311f054da9b22df6a12f2c53111735657c8"}, - {file = "websockets-15.0-cp310-cp310-win32.whl", hash = "sha256:89da58e4005e153b03fe8b8794330e3f6a9774ee9e1c3bd5bc52eb098c3b0c4f"}, - {file = "websockets-15.0-cp310-cp310-win_amd64.whl", hash = "sha256:4ff380aabd7a74a42a760ee76c68826a8f417ceb6ea415bd574a035a111fd133"}, - {file = "websockets-15.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dd24c4d256558429aeeb8d6c24ebad4e982ac52c50bc3670ae8646c181263965"}, - {file = "websockets-15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f83eca8cbfd168e424dfa3b3b5c955d6c281e8fc09feb9d870886ff8d03683c7"}, - {file = "websockets-15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4095a1f2093002c2208becf6f9a178b336b7572512ee0a1179731acb7788e8ad"}, - {file = "websockets-15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb915101dfbf318486364ce85662bb7b020840f68138014972c08331458d41f3"}, - {file = "websockets-15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45d464622314973d78f364689d5dbb9144e559f93dca11b11af3f2480b5034e1"}, - {file = "websockets-15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace960769d60037ca9625b4c578a6f28a14301bd2a1ff13bb00e824ac9f73e55"}, - {file = "websockets-15.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7cd4b1015d2f60dfe539ee6c95bc968d5d5fad92ab01bb5501a77393da4f596"}, - {file = "websockets-15.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4f7290295794b5dec470867c7baa4a14182b9732603fd0caf2a5bf1dc3ccabf3"}, - {file = "websockets-15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3abd670ca7ce230d5a624fd3d55e055215d8d9b723adee0a348352f5d8d12ff4"}, - {file = "websockets-15.0-cp311-cp311-win32.whl", hash = "sha256:110a847085246ab8d4d119632145224d6b49e406c64f1bbeed45c6f05097b680"}, - {file = "websockets-15.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7bbbe2cd6ed80aceef2a14e9f1c1b61683194c216472ed5ff33b700e784e37"}, - {file = "websockets-15.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cccc18077acd34c8072578394ec79563664b1c205f7a86a62e94fafc7b59001f"}, - {file = "websockets-15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4c22992e24f12de340ca5f824121a5b3e1a37ad4360b4e1aaf15e9d1c42582d"}, - {file = "websockets-15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1206432cc6c644f6fc03374b264c5ff805d980311563202ed7fef91a38906276"}, - {file = "websockets-15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d3cc75ef3e17490042c47e0523aee1bcc4eacd2482796107fd59dd1100a44bc"}, - {file = "websockets-15.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b89504227a5311610e4be16071465885a0a3d6b0e82e305ef46d9b064ce5fb72"}, - {file = "websockets-15.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56e3efe356416bc67a8e093607315951d76910f03d2b3ad49c4ade9207bf710d"}, - {file = "websockets-15.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f2205cdb444a42a7919690238fb5979a05439b9dbb73dd47c863d39640d85ab"}, - {file = "websockets-15.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aea01f40995fa0945c020228ab919b8dfc93fc8a9f2d3d705ab5b793f32d9e99"}, - {file = "websockets-15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9f8e33747b1332db11cf7fcf4a9512bef9748cb5eb4d3f7fbc8c30d75dc6ffc"}, - {file = "websockets-15.0-cp312-cp312-win32.whl", hash = "sha256:32e02a2d83f4954aa8c17e03fe8ec6962432c39aca4be7e8ee346b05a3476904"}, - {file = "websockets-15.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc02b159b65c05f2ed9ec176b715b66918a674bd4daed48a9a7a590dd4be1aa"}, - {file = "websockets-15.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d2244d8ab24374bed366f9ff206e2619345f9cd7fe79aad5225f53faac28b6b1"}, - {file = "websockets-15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3a302241fbe825a3e4fe07666a2ab513edfdc6d43ce24b79691b45115273b5e7"}, - {file = "websockets-15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10552fed076757a70ba2c18edcbc601c7637b30cdfe8c24b65171e824c7d6081"}, - {file = "websockets-15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53f97032b87a406044a1c33d1e9290cc38b117a8062e8a8b285175d7e2f99c9"}, - {file = "websockets-15.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1caf951110ca757b8ad9c4974f5cac7b8413004d2f29707e4d03a65d54cedf2b"}, - {file = "websockets-15.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bf1ab71f9f23b0a1d52ec1682a3907e0c208c12fef9c3e99d2b80166b17905f"}, - {file = "websockets-15.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bfcd3acc1a81f106abac6afd42327d2cf1e77ec905ae11dc1d9142a006a496b6"}, - {file = "websockets-15.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c8c5c8e1bac05ef3c23722e591ef4f688f528235e2480f157a9cfe0a19081375"}, - {file = "websockets-15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:86bfb52a9cfbcc09aba2b71388b0a20ea5c52b6517c0b2e316222435a8cdab72"}, - {file = "websockets-15.0-cp313-cp313-win32.whl", hash = "sha256:26ba70fed190708551c19a360f9d7eca8e8c0f615d19a574292b7229e0ae324c"}, - {file = "websockets-15.0-cp313-cp313-win_amd64.whl", hash = "sha256:ae721bcc8e69846af00b7a77a220614d9b2ec57d25017a6bbde3a99473e41ce8"}, - {file = "websockets-15.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c348abc5924caa02a62896300e32ea80a81521f91d6db2e853e6b1994017c9f6"}, - {file = "websockets-15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5294fcb410ed0a45d5d1cdedc4e51a60aab5b2b3193999028ea94afc2f554b05"}, - {file = "websockets-15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c24ba103ecf45861e2e1f933d40b2d93f5d52d8228870c3e7bf1299cd1cb8ff1"}, - {file = "websockets-15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc8821a03bcfb36e4e4705316f6b66af28450357af8a575dc8f4b09bf02a3dee"}, - {file = "websockets-15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc5ae23ada6515f31604f700009e2df90b091b67d463a8401c1d8a37f76c1d7"}, - {file = "websockets-15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ac67b542505186b3bbdaffbc303292e1ee9c8729e5d5df243c1f20f4bb9057e"}, - {file = "websockets-15.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c86dc2068f1c5ca2065aca34f257bbf4f78caf566eb230f692ad347da191f0a1"}, - {file = "websockets-15.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:30cff3ef329682b6182c01c568f551481774c476722020b8f7d0daacbed07a17"}, - {file = "websockets-15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98dcf978d4c6048965d1762abd534c9d53bae981a035bfe486690ba11f49bbbb"}, - {file = "websockets-15.0-cp39-cp39-win32.whl", hash = "sha256:37d66646f929ae7c22c79bc73ec4074d6db45e6384500ee3e0d476daf55482a9"}, - {file = "websockets-15.0-cp39-cp39-win_amd64.whl", hash = "sha256:24d5333a9b2343330f0f4eb88546e2c32a7f5c280f8dd7d3cc079beb0901781b"}, - {file = "websockets-15.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b499caef4bca9cbd0bd23cd3386f5113ee7378094a3cb613a2fa543260fe9506"}, - {file = "websockets-15.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:17f2854c6bd9ee008c4b270f7010fe2da6c16eac5724a175e75010aacd905b31"}, - {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89f72524033abbfde880ad338fd3c2c16e31ae232323ebdfbc745cbb1b3dcc03"}, - {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1657a9eecb29d7838e3b415458cc494e6d1b194f7ac73a34aa55c6fb6c72d1f3"}, - {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e413352a921f5ad5d66f9e2869b977e88d5103fc528b6deb8423028a2befd842"}, - {file = "websockets-15.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8561c48b0090993e3b2a54db480cab1d23eb2c5735067213bb90f402806339f5"}, - {file = "websockets-15.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:190bc6ef8690cd88232a038d1b15714c258f79653abad62f7048249b09438af3"}, - {file = "websockets-15.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:327adab7671f3726b0ba69be9e865bba23b37a605b585e65895c428f6e47e766"}, - {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd8ef197c87afe0a9009f7a28b5dc613bfc585d329f80b7af404e766aa9e8c7"}, - {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:789c43bf4a10cd067c24c321238e800b8b2716c863ddb2294d2fed886fa5a689"}, - {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7394c0b7d460569c9285fa089a429f58465db930012566c03046f9e3ab0ed181"}, - {file = "websockets-15.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ea4f210422b912ebe58ef0ad33088bc8e5c5ff9655a8822500690abc3b1232d"}, - {file = "websockets-15.0-py3-none-any.whl", hash = "sha256:51ffd53c53c4442415b613497a34ba0aa7b99ac07f1e4a62db5dcd640ae6c3c3"}, - {file = "websockets-15.0.tar.gz", hash = "sha256:ca36151289a15b39d8d683fd8b7abbe26fc50be311066c5f8dcf3cb8cee107ab"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, ] [[package]] @@ -2633,4 +2733,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.10" -content-hash = "d6d1a9eb1d741b25733acdae41d5ad4c8f3a1948bb519a31b676c5f6b1f21550" +content-hash = "fb48e4bd5f30c14061469b28bacf1f284648934eac48e1eadfecdb29c563a896" diff --git a/pyproject.toml b/pyproject.toml index 1a7c1b7d51..755ccbbd9f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,14 +7,14 @@ requires-poetry = ">=2.0.0" [tool.poetry.dependencies] python = "^3.10" -ops = "^2.18.1" -boto3 = "^1.37.22" +ops = "^2.20.0" +boto3 = "^1.38.0" pgconnstr = "^1.0.1" requests = "^2.32.3" -tenacity = "^9.0.0" +tenacity = "^9.1.2" psycopg2 = "^2.9.10" pydantic = "^1.10.21" -jinja2 = "^3.1.5" +jinja2 = "^3.1.6" pysyncobj = "^0.3.14" psutil = "^7.0.0" @@ -40,7 +40,7 @@ opentelemetry-exporter-otlp-proto-http = "1.21.0" optional = true [tool.poetry.group.format.dependencies] -ruff = "^0.9.6" +ruff = "^0.11.6" [tool.poetry.group.lint] optional = true @@ -52,8 +52,8 @@ codespell = "^2.4.1" optional = true [tool.poetry.group.unit.dependencies] -coverage = {extras = ["toml"], version = "^7.6.12"} -pytest = "^8.3.4" +coverage = {extras = ["toml"], version = "^7.8.0"} +pytest = "^8.3.5" pytest-asyncio = "*" parameterized = "^0.9.0" jsonschema = "^4.23.0" @@ -62,16 +62,16 @@ jsonschema = "^4.23.0" optional = true [tool.poetry.group.integration.dependencies] -pytest = "^8.3.4" -pytest-operator = "^0.40.0" +pytest = "^8.3.5" +pytest-operator = "^0.42.0" # renovate caret doesn't work: https://github.com/renovatebot/renovate/issues/26940 -juju = "<=3.6.1.0" +juju = "<=3.6.1.1" boto3 = "*" tenacity = "*" landscape-api-py3 = "^0.9.0" mailmanclient = "^3.3.5" psycopg2-binary = "^2.9.10" -allure-pytest = "^2.13.5" +allure-pytest = "^2.14.0" allure-pytest-default-results = "^0.1.2" # Testing tools configuration diff --git a/src/charm.py b/src/charm.py index bc6fd78a3c..b4bd28d455 100755 --- a/src/charm.py +++ b/src/charm.py @@ -1078,9 +1078,9 @@ def _on_install(self, event: InstallEvent) -> None: # This is needed due to https://bugs.launchpad.net/snapd/+bug/2011581. try: # Input is hardcoded - subprocess.check_call("mkdir -p /home/snap_daemon".split()) # noqa: S603 - subprocess.check_call("chown snap_daemon:snap_daemon /home/snap_daemon".split()) # noqa: S603 - subprocess.check_call("usermod -d /home/snap_daemon snap_daemon".split()) # noqa: S603 + subprocess.check_call(["mkdir", "-p", "/home/snap_daemon"]) # noqa: S607 + subprocess.check_call(["chown", "snap_daemon:snap_daemon", "/home/snap_daemon"]) # noqa: S607 + subprocess.check_call(["usermod", "-d", "/home/snap_daemon", "snap_daemon"]) # noqa: S607 except subprocess.CalledProcessError: logger.exception("Unable to create snap_daemon home dir") @@ -1933,8 +1933,7 @@ def _reboot_on_detached_storage(self, event: EventBase) -> None: logger.error("Data directory not attached. Reboot unit.") self.unit.status = WaitingStatus("Data directory not attached") with contextlib.suppress(subprocess.CalledProcessError): - # Call is constant - subprocess.check_call(["/usr/bin/systemctl", "reboot"]) # noqa: S603 + subprocess.check_call(["/usr/bin/systemctl", "reboot"]) def _restart(self, event: RunWithLock) -> None: """Restart PostgreSQL.""" diff --git a/src/cluster.py b/src/cluster.py index 3ca1a46d4d..7d4c70c33a 100644 --- a/src/cluster.py +++ b/src/cluster.py @@ -1067,5 +1067,4 @@ def update_patroni_restart_condition(self, new_condition: str) -> None: logger.debug(f"new patroni service file: {new_patroni_service}") with open(PATRONI_SERVICE_DEFAULT_PATH, "w") as patroni_service_file: patroni_service_file.write(new_patroni_service) - # Input is hardcoded - subprocess.run(["/bin/systemctl", "daemon-reload"]) # noqa: S603 + subprocess.run(["/bin/systemctl", "daemon-reload"]) diff --git a/src/rotate_logs.py b/src/rotate_logs.py index 01d4ca88b4..17aa74aeb9 100644 --- a/src/rotate_logs.py +++ b/src/rotate_logs.py @@ -45,8 +45,7 @@ def start_log_rotation(self): logging.info("Starting rotate logs process") - # Input is generated by the charm - pid = subprocess.Popen( # noqa: S603 + pid = subprocess.Popen( ["/usr/bin/python3", "scripts/rotate_logs.py"], # File should not close stdout=open(LOG_FILE_PATH, "a"), # noqa: SIM115 diff --git a/tests/unit/test_backups.py b/tests/unit/test_backups.py index 1ca2d3a469..5cd5cac8c4 100644 --- a/tests/unit/test_backups.py +++ b/tests/unit/test_backups.py @@ -507,7 +507,7 @@ def test_execute_command(harness): patch("pwd.getpwnam") as _getpwnam, ): # Test when the command fails. - command = "rm -r /var/lib/postgresql/data/pgdata".split() + command = ["rm", "-r", "/var/lib/postgresql/data/pgdata"] _run.return_value = CompletedProcess(command, 1, b"", b"fake stderr") assert harness.charm.backup._execute_command(command) == (1, "", "fake stderr") _run.assert_called_once_with( diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 3ff54bccce..bd18ab405f 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -92,9 +92,9 @@ def test_on_install(harness): pg_snap.alias.assert_any_call("patronictl") assert _check_call.call_count == 3 - _check_call.assert_any_call("mkdir -p /home/snap_daemon".split()) - _check_call.assert_any_call("chown snap_daemon:snap_daemon /home/snap_daemon".split()) - _check_call.assert_any_call("usermod -d /home/snap_daemon snap_daemon".split()) + _check_call.assert_any_call(["mkdir", "-p", "/home/snap_daemon"]) + _check_call.assert_any_call(["chown", "snap_daemon:snap_daemon", "/home/snap_daemon"]) + _check_call.assert_any_call(["usermod", "-d", "/home/snap_daemon", "snap_daemon"]) # Assert the status set by the event handler. assert isinstance(harness.model.unit.status, WaitingStatus)