From a16116a7e81a25e08f643b8af791edc9cbd43779 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 25 Mar 2025 10:50:16 -0300 Subject: [PATCH 01/15] Update charmcraft.yaml build tools (#903) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- charmcraft.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/charmcraft.yaml b/charmcraft.yaml index 93a258dad9..79e118c1da 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -27,7 +27,7 @@ parts: PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==25.0.1 # renovate: charmcraft-pip-latest # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry - curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.7/uv-installer.sh | sh # renovate: charmcraft-uv-latest + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.9/uv-installer.sh | sh # renovate: charmcraft-uv-latest # poetry 2.0.0 requires Python >=3.9 if ! "$HOME/.local/bin/uv" python find '>=3.9' then @@ -75,7 +75,7 @@ parts: # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive rustup set profile minimal - rustup default 1.85.0 # renovate: charmcraft-rust-latest + rustup default 1.85.1 # renovate: charmcraft-rust-latest craftctl default # Include requirements.txt in *.charm artifact for easier debugging From 7083f18f402d3c2e3f7a6212976cf71050f16892 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 25 Mar 2025 18:20:11 -0300 Subject: [PATCH 02/15] Update canonical/data-platform-workflows action to v31.0.1 (#902) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/approve_renovate_pr.yaml | 2 +- .github/workflows/check_pr.yaml | 2 +- .github/workflows/ci.yaml | 4 ++-- .github/workflows/promote.yaml | 2 +- .github/workflows/release.yaml | 2 +- .github/workflows/sync_docs.yaml | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/approve_renovate_pr.yaml b/.github/workflows/approve_renovate_pr.yaml index 422766f800..450111576b 100644 --- a/.github/workflows/approve_renovate_pr.yaml +++ b/.github/workflows/approve_renovate_pr.yaml @@ -10,6 +10,6 @@ on: jobs: approve-pr: name: Approve Renovate pull request - uses: canonical/data-platform-workflows/.github/workflows/approve_renovate_pr.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/approve_renovate_pr.yaml@v31.0.1 permissions: pull-requests: write # Needed to approve PR diff --git a/.github/workflows/check_pr.yaml b/.github/workflows/check_pr.yaml index ce3d160399..f613a6aed9 100644 --- a/.github/workflows/check_pr.yaml +++ b/.github/workflows/check_pr.yaml @@ -16,4 +16,4 @@ on: jobs: check-pr: name: Check pull request - uses: canonical/data-platform-workflows/.github/workflows/check_charm_pr.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/check_charm_pr.yaml@v31.0.1 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9d203b8633..cdf5a0e2e6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -27,7 +27,7 @@ on: jobs: lint: name: Lint - uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v31.0.1 unit-test: name: Unit test charm @@ -49,7 +49,7 @@ jobs: build: name: Build charm - uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v31.0.1 integration-test: name: Integration test charm diff --git a/.github/workflows/promote.yaml b/.github/workflows/promote.yaml index 7b4c329c6f..03e7bc7a29 100644 --- a/.github/workflows/promote.yaml +++ b/.github/workflows/promote.yaml @@ -25,7 +25,7 @@ on: jobs: promote: name: Promote charm - uses: canonical/data-platform-workflows/.github/workflows/_promote_charm.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/_promote_charm.yaml@v31.0.1 with: track: '14' from-risk: ${{ inputs.from-risk }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 7f057a397b..3121eedbb4 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -44,7 +44,7 @@ jobs: name: Release charm needs: - ci-tests - uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v31.0.1 with: channel: 14/edge artifact-prefix: ${{ needs.ci-tests.outputs.artifact-prefix }} diff --git a/.github/workflows/sync_docs.yaml b/.github/workflows/sync_docs.yaml index 42d9fb7e01..e22b702518 100644 --- a/.github/workflows/sync_docs.yaml +++ b/.github/workflows/sync_docs.yaml @@ -10,7 +10,7 @@ on: jobs: sync-docs: name: Sync docs from Discourse - uses: canonical/data-platform-workflows/.github/workflows/sync_docs.yaml@v31.0.0 + uses: canonical/data-platform-workflows/.github/workflows/sync_docs.yaml@v31.0.1 with: reviewers: a-velasco permissions: From 91ec95e818adb1cc010d0b4661eeaf5e549042be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sinclert=20P=C3=A9rez?= Date: Wed, 26 Mar 2025 14:59:56 +0100 Subject: [PATCH 03/15] [DPE-6344] LDAP IV: Define pebble service (#897) --- src/backups.py | 6 +- src/charm.py | 132 +++++++++++++++++++++++------ src/relations/async_replication.py | 6 +- tests/unit/test_charm.py | 42 +++++++-- 4 files changed, 144 insertions(+), 42 deletions(-) diff --git a/src/backups.py b/src/backups.py index 2bdc776374..d9a1fee86e 100644 --- a/src/backups.py +++ b/src/backups.py @@ -989,7 +989,7 @@ def _on_restore_action(self, event): # noqa: C901 # Stop the database service before performing the restore. logger.info("Stopping database service") try: - self.container.stop(self.charm._postgresql_service) + self.container.stop(self.charm.postgresql_service) except ChangeError as e: error_message = f"Failed to stop database service with error: {e!s}" logger.error(f"Restore failed: {error_message}") @@ -1047,7 +1047,7 @@ def _on_restore_action(self, event): # noqa: C901 # Start the database to start the restore process. logger.info("Configuring Patroni to restore the backup") - self.container.start(self.charm._postgresql_service) + self.container.start(self.charm.postgresql_service) event.set_results({"restore-status": "restore started"}) @@ -1221,7 +1221,7 @@ def _restart_database(self) -> None: """Removes the restoring backup flag and restart the database.""" self.charm.app_peer_data.update({"restoring-backup": "", "restore-to-time": ""}) self.charm.update_config() - self.container.start(self.charm._postgresql_service) + self.container.start(self.charm.postgresql_service) def _retrieve_s3_parameters(self) -> tuple[dict, list[str]]: """Retrieve S3 parameters from the S3 integrator relation.""" diff --git a/src/charm.py b/src/charm.py index 9c35d00c8e..3726e34ea9 100755 --- a/src/charm.py +++ b/src/charm.py @@ -14,6 +14,7 @@ import time from pathlib import Path from typing import Literal, get_args +from urllib.parse import urlparse # First platform-specific import, will fail on wrong architecture try: @@ -88,6 +89,7 @@ APP_SCOPE, BACKUP_USER, DATABASE_DEFAULT_NAME, + DATABASE_PORT, METRICS_PORT, MONITORING_PASSWORD_KEY, MONITORING_USER, @@ -195,10 +197,11 @@ def __init__(self, *args): deleted_label=SECRET_DELETED_LABEL, ) - self._postgresql_service = "postgresql" + self.postgresql_service = "postgresql" self.rotate_logs_service = "rotate-logs" self.pgbackrest_server_service = "pgbackrest server" - self._metrics_service = "metrics_server" + self.ldap_sync_service = "ldap-sync" + self.metrics_service = "metrics_server" self._unit = self.model.unit.name self._name = self.model.app.name self._namespace = self.model.name @@ -601,7 +604,7 @@ def _on_peer_relation_changed(self, event: HookEvent) -> None: # noqa: C901 logger.debug("on_peer_relation_changed early exit: Unit in blocked status") return - services = container.pebble.get_services(names=[self._postgresql_service]) + services = container.pebble.get_services(names=[self.postgresql_service]) if ( (self.is_cluster_restoring_backup or self.is_cluster_restoring_to_time) and len(services) > 0 @@ -1496,7 +1499,7 @@ def _on_update_status(self, _) -> None: if not self._on_update_status_early_exit_checks(container): return - services = container.pebble.get_services(names=[self._postgresql_service]) + services = container.pebble.get_services(names=[self.postgresql_service]) if len(services) == 0: # Service has not been added nor started yet, so don't try to check Patroni API. logger.debug("on_update_status early exit: Service has not been added nor started yet") @@ -1509,10 +1512,10 @@ def _on_update_status(self, _) -> None: and services[0].current != ServiceStatus.ACTIVE ): logger.warning( - f"{self._postgresql_service} pebble service inactive, restarting service" + f"{self.postgresql_service} pebble service inactive, restarting service" ) try: - container.restart(self._postgresql_service) + container.restart(self.postgresql_service) except ChangeError: logger.exception("Failed to restart patroni") # If service doesn't recover fast, exit and wait for next hook run to re-check @@ -1609,7 +1612,7 @@ def _handle_processes_failures(self) -> bool: # https://github.com/canonical/pebble/issues/149 is resolved. if not self._patroni.member_started and self._patroni.is_database_running: try: - container.restart(self._postgresql_service) + container.restart(self.postgresql_service) logger.info("restarted Patroni because it was not running") except ChangeError: logger.error("failed to restart Patroni after checking that it was not running") @@ -1746,6 +1749,37 @@ def _update_endpoints( endpoints.remove(endpoint) self._peers.data[self.app]["endpoints"] = json.dumps(endpoints) + def _generate_ldap_service(self) -> dict: + """Generate the LDAP service definition.""" + ldap_params = self.get_ldap_parameters() + + ldap_url = urlparse(ldap_params["ldapurl"]) + ldap_host = ldap_url.hostname + ldap_port = ldap_url.port + + ldap_base_dn = ldap_params["ldapbasedn"] + ldap_bind_username = ldap_params["ldapbinddn"] + ldap_bing_password = ldap_params["ldapbindpasswd"] + + return { + "override": "replace", + "summary": "synchronize LDAP users", + "command": "/start-ldap-synchronizer.sh", + "startup": "enabled", + "environment": { + "LDAP_HOST": ldap_host, + "LDAP_PORT": ldap_port, + "LDAP_BASE_DN": ldap_base_dn, + "LDAP_BIND_USERNAME": ldap_bind_username, + "LDAP_BIND_PASSWORD": ldap_bing_password, + "POSTGRES_HOST": "127.0.0.1", + "POSTGRES_PORT": DATABASE_PORT, + "POSTGRES_DATABASE": DATABASE_DEFAULT_NAME, + "POSTGRES_USERNAME": USER, + "POSTGRES_PASSWORD": self.get_secret(APP_SCOPE, USER_PASSWORD_KEY), + }, + } + def _generate_metrics_service(self) -> dict: """Generate the metrics service definition.""" return { @@ -1757,7 +1791,7 @@ def _generate_metrics_service(self) -> dict: if self.get_secret("app", MONITORING_PASSWORD_KEY) is not None else "disabled" ), - "after": [self._postgresql_service], + "after": [self.postgresql_service], "user": WORKLOAD_OS_USER, "group": WORKLOAD_OS_GROUP, "environment": { @@ -1776,7 +1810,7 @@ def _postgresql_layer(self) -> Layer: "summary": "postgresql + patroni layer", "description": "pebble config layer for postgresql + patroni", "services": { - self._postgresql_service: { + self.postgresql_service: { "override": "replace", "summary": "entrypoint of the postgresql + patroni image", "command": f"patroni {self._storage_path}/patroni.yml", @@ -1806,7 +1840,13 @@ def _postgresql_layer(self) -> Layer: "user": WORKLOAD_OS_USER, "group": WORKLOAD_OS_GROUP, }, - self._metrics_service: self._generate_metrics_service(), + self.ldap_sync_service: { + "override": "replace", + "summary": "synchronize LDAP users", + "command": "/start-ldap-synchronizer.sh", + "startup": "disabled", + }, + self.metrics_service: self._generate_metrics_service(), self.rotate_logs_service: { "override": "replace", "summary": "rotate logs", @@ -1815,7 +1855,7 @@ def _postgresql_layer(self) -> Layer: }, }, "checks": { - self._postgresql_service: { + self.postgresql_service: { "override": "replace", "level": "ready", "http": { @@ -1918,6 +1958,51 @@ def _restart(self, event: RunWithLock) -> None: # Start or stop the pgBackRest TLS server service when TLS certificate change. self.backup.start_stop_pgbackrest_service() + def _restart_metrics_service(self) -> None: + """Restart the monitoring service if the password was rotated.""" + container = self.unit.get_container("postgresql") + current_layer = container.get_plan() + + metrics_service = current_layer.services[self.metrics_service] + data_source_name = metrics_service.environment.get("DATA_SOURCE_NAME", "") + + if metrics_service and not data_source_name.startswith( + f"user={MONITORING_USER} password={self.get_secret('app', MONITORING_PASSWORD_KEY)} " + ): + container.add_layer( + self.metrics_service, + Layer({"services": {self.metrics_service: self._generate_metrics_service()}}), + combine=True, + ) + container.restart(self.metrics_service) + + def _restart_ldap_sync_service(self) -> None: + """Restart the LDAP sync service in case any configuration changed.""" + if not self._patroni.member_started: + logger.debug("Restart LDAP sync early exit: Patroni has not started yet") + return + + container = self.unit.get_container("postgresql") + sync_service = container.pebble.get_services(names=[self.ldap_sync_service]) + + if not self.is_primary and sync_service[0].is_running(): + logger.debug("Stopping LDAP sync service. It must only run in the primary") + container.stop(self.pg_ldap_sync_service) + + if self.is_primary and not self.is_ldap_enabled: + logger.debug("Stopping LDAP sync service") + container.stop(self.ldap_sync_service) + return + + if self.is_primary and self.is_ldap_enabled: + container.add_layer( + self.ldap_sync_service, + Layer({"services": {self.ldap_sync_service: self._generate_ldap_service()}}), + combine=True, + ) + logger.debug("Starting LDAP sync service") + container.restart(self.ldap_sync_service) + @property def _is_workload_running(self) -> bool: """Returns whether the workload is running (in an active state).""" @@ -1925,7 +2010,7 @@ def _is_workload_running(self) -> bool: if not container.can_connect(): return False - services = container.pebble.get_services(names=[self._postgresql_service]) + services = container.pebble.get_services(names=[self.postgresql_service]) if len(services) == 0: return False @@ -2009,21 +2094,12 @@ def update_config(self, is_creating_backup: bool = False) -> bool: }) self._handle_postgresql_restart_need() + self._restart_metrics_service() - # Restart the monitoring service if the password was rotated - container = self.unit.get_container("postgresql") - current_layer = container.get_plan() - if ( - metrics_service := current_layer.services[self._metrics_service] - ) and not metrics_service.environment.get("DATA_SOURCE_NAME", "").startswith( - f"user={MONITORING_USER} password={self.get_secret('app', MONITORING_PASSWORD_KEY)} " - ): - container.add_layer( - self._metrics_service, - Layer({"services": {self._metrics_service: self._generate_metrics_service()}}), - combine=True, - ) - container.restart(self._metrics_service) + # TODO: Un-comment + # When PostgreSQL-rock wrapping PostgreSQL-snap versions 162 / 163 gets published + # (i.e. snap contains https://github.com/canonical/charmed-postgresql-snap/pull/88) + # self._restart_ldap_sync_service() return True @@ -2102,14 +2178,14 @@ def _update_pebble_layers(self, replan: bool = True) -> None: # Check if there are any changes to layer services. if current_layer.services != new_layer.services: # Changes were made, add the new layer. - container.add_layer(self._postgresql_service, new_layer, combine=True) + container.add_layer(self.postgresql_service, new_layer, combine=True) logging.info("Added updated layer 'postgresql' to Pebble plan") if replan: container.replan() logging.info("Restarted postgresql service") if current_layer.checks != new_layer.checks: # Changes were made, add the new layer. - container.add_layer(self._postgresql_service, new_layer, combine=True) + container.add_layer(self.postgresql_service, new_layer, combine=True) logging.info("Updated health checks") def _unit_name_to_pod_name(self, unit_name: str) -> str: diff --git a/src/relations/async_replication.py b/src/relations/async_replication.py index 1700de12b8..9d34409de0 100644 --- a/src/relations/async_replication.py +++ b/src/relations/async_replication.py @@ -524,7 +524,7 @@ def _on_async_relation_changed(self, event: RelationChangedEvent) -> None: if ( not self.container.can_connect() - or len(self.container.pebble.get_services(names=[self.charm._postgresql_service])) == 0 + or len(self.container.pebble.get_services(names=[self.charm.postgresql_service])) == 0 ): logger.debug("Early exit on_async_relation_changed: container hasn't started yet.") event.defer() @@ -532,7 +532,7 @@ def _on_async_relation_changed(self, event: RelationChangedEvent) -> None: # Update the asynchronous replication configuration and start the database. self.charm.update_config() - self.container.start(self.charm._postgresql_service) + self.container.start(self.charm.postgresql_service) self._handle_database_start(event) @@ -694,7 +694,7 @@ def _stop_database(self, event: RelationChangedEvent) -> bool: logger.debug("Early exit on_async_relation_changed: following promoted cluster.") return False - self.container.stop(self.charm._postgresql_service) + self.container.stop(self.charm.postgresql_service) if self.charm.unit.is_leader(): # Remove the "cluster_initialised" flag to avoid self-healing in the update status hook. diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index f8aa050393..f93053c817 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -31,6 +31,7 @@ POSTGRESQL_CONTAINER = "postgresql" POSTGRESQL_SERVICE = "postgresql" +LDAP_SYNC_SERVICE = "ldap-sync" METRICS_SERVICE = "metrics_server" PGBACKREST_SERVER_SERVICE = "pgbackrest server" ROTATE_LOGS_SERVICE = "rotate-logs" @@ -956,6 +957,20 @@ def test_postgresql_layer(harness): "PATRONI_SUPERUSER_USERNAME": "operator", }, }, + PGBACKREST_SERVER_SERVICE: { + "override": "replace", + "summary": "pgBackRest server", + "command": PGBACKREST_SERVER_SERVICE, + "startup": "disabled", + "user": "postgres", + "group": "postgres", + }, + LDAP_SYNC_SERVICE: { + "override": "replace", + "summary": "synchronize LDAP users", + "command": "/start-ldap-synchronizer.sh", + "startup": "disabled", + }, METRICS_SERVICE: { "override": "replace", "summary": "postgresql metrics exporter", @@ -972,14 +987,6 @@ def test_postgresql_layer(harness): ), }, }, - PGBACKREST_SERVER_SERVICE: { - "override": "replace", - "summary": "pgBackRest server", - "command": PGBACKREST_SERVER_SERVICE, - "startup": "disabled", - "user": "postgres", - "group": "postgres", - }, ROTATE_LOGS_SERVICE: { "override": "replace", "summary": "rotate logs", @@ -1655,6 +1662,12 @@ def test_update_config(harness): patch( "charm.PostgresqlOperatorCharm._handle_postgresql_restart_need" ) as _handle_postgresql_restart_need, + patch( + "charm.PostgresqlOperatorCharm._restart_metrics_service" + ) as _restart_metrics_service, + patch( + "charm.PostgresqlOperatorCharm._restart_ldap_sync_service" + ) as _restart_ldap_sync_service, patch("charm.Patroni.bulk_update_parameters_controller_by_patroni"), patch("charm.Patroni.member_started", new_callable=PropertyMock) as _member_started, patch( @@ -1662,6 +1675,7 @@ def test_update_config(harness): ) as _is_workload_running, patch("charm.Patroni.render_patroni_yml_file") as _render_patroni_yml_file, patch("charm.PostgreSQLUpgrade") as _upgrade, + patch("charm.PostgresqlOperatorCharm.is_primary", return_value=False), patch( "charm.PostgresqlOperatorCharm.is_tls_enabled", new_callable=PropertyMock ) as _is_tls_enabled, @@ -1696,10 +1710,14 @@ def test_update_config(harness): parameters={"test": "test"}, ) _handle_postgresql_restart_need.assert_called_once() + _restart_metrics_service.assert_called_once() + # _restart_ldap_sync_service.assert_called_once() assert "tls" not in harness.get_relation_data(rel_id, harness.charm.unit.name) # Test with TLS files available. _handle_postgresql_restart_need.reset_mock() + _restart_metrics_service.reset_mock() + # _restart_ldap_sync_service.reset_mock() harness.update_relation_data( rel_id, harness.charm.unit.name, {"tls": ""} ) # Mock some data in the relation to test that it change. @@ -1721,6 +1739,8 @@ def test_update_config(harness): parameters={"test": "test"}, ) _handle_postgresql_restart_need.assert_called_once() + _restart_metrics_service.assert_called_once() + # _restart_ldap_sync_service.assert_called_once() assert "tls" not in harness.get_relation_data( rel_id, harness.charm.unit.name ) # The "tls" flag is set in handle_postgresql_restart_need. @@ -1730,8 +1750,12 @@ def test_update_config(harness): rel_id, harness.charm.unit.name, {"tls": ""} ) # Mock some data in the relation to test that it change. _handle_postgresql_restart_need.reset_mock() + _restart_metrics_service.reset_mock() + # _restart_ldap_sync_service.reset_mock() harness.charm.update_config() _handle_postgresql_restart_need.assert_not_called() + _restart_metrics_service.assert_not_called() + # _restart_ldap_sync_service.assert_not_called() assert harness.get_relation_data(rel_id, harness.charm.unit.name)["tls"] == "enabled" # Test with member not started yet. @@ -1740,6 +1764,8 @@ def test_update_config(harness): ) # Mock some data in the relation to test that it doesn't change. harness.charm.update_config() _handle_postgresql_restart_need.assert_not_called() + _restart_metrics_service.assert_not_called() + # _restart_ldap_sync_service.assert_not_called() assert "tls" not in harness.get_relation_data(rel_id, harness.charm.unit.name) From 1be9f1b6a473e97affad1183f85c6149de42aa1f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 26 Mar 2025 17:43:48 +0200 Subject: [PATCH 04/15] Update ghcr.io/canonical/charmed-postgresql:14.17-22.04_edge Docker digest to 5f8d51a (#908) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- metadata.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata.yaml b/metadata.yaml index d7e852e94c..c553484b8a 100644 --- a/metadata.yaml +++ b/metadata.yaml @@ -28,7 +28,7 @@ resources: postgresql-image: type: oci-image description: OCI image for PostgreSQL - upstream-source: ghcr.io/canonical/charmed-postgresql@sha256:90b7aa44cfbae32ba060a761f8ec31e35d7cd28bb845b69b3f78a3781153932f # renovate: oci-image tag: 14.17-22.04_edge + upstream-source: ghcr.io/canonical/charmed-postgresql@sha256:5f8d51aeef07a686c4ca4b2c873c890ffed3699b3ed105add5df02681b3e0953 # renovate: oci-image tag: 14.17-22.04_edge peers: database-peers: From 8a3957ed03f43443b3704557c99c34396ce323a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sinclert=20P=C3=A9rez?= Date: Fri, 28 Mar 2025 16:11:58 +0100 Subject: [PATCH 05/15] [DPE-6344] LDAP V: Define mapping option (#900) --- config.yaml | 6 ++ lib/charms/postgresql_k8s/v0/postgresql.py | 69 +++++++++++++++++++++- src/charm.py | 13 ++-- src/config.py | 1 + tests/unit/test_charm.py | 28 +++++---- tests/unit/test_postgresql.py | 48 +++++++++++++++ 6 files changed, 149 insertions(+), 16 deletions(-) diff --git a/config.yaml b/config.yaml index 86da6cbc82..2cf3d4e3cb 100644 --- a/config.yaml +++ b/config.yaml @@ -69,6 +69,12 @@ options: Enable synchronized sequential scans. type: boolean default: true + ldap_map: + description: | + List of mapped LDAP group names to PostgreSQL group names, separated by commas. + The map is used to assign LDAP synchronized users to PostgreSQL authorization groups. + Example: =,= + type: string ldap_search_filter: description: | The LDAP search filter to match users with. diff --git a/lib/charms/postgresql_k8s/v0/postgresql.py b/lib/charms/postgresql_k8s/v0/postgresql.py index 41be98a04f..5975197f1b 100644 --- a/lib/charms/postgresql_k8s/v0/postgresql.py +++ b/lib/charms/postgresql_k8s/v0/postgresql.py @@ -35,7 +35,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 47 +LIBPATCH = 48 # Groups to distinguish HBA access ACCESS_GROUP_IDENTITY = "identity_access" @@ -773,6 +773,42 @@ def is_restart_pending(self) -> bool: if connection: connection.close() + @staticmethod + def build_postgresql_group_map(group_map: Optional[str]) -> List[Tuple]: + """Build the PostgreSQL authorization group-map. + + Args: + group_map: serialized group-map with the following format: + =, + =, + ... + + Returns: + List of LDAP group to PostgreSQL group tuples. + """ + if group_map is None: + return [] + + group_mappings = group_map.split(",") + group_mappings = (mapping.strip() for mapping in group_mappings) + group_map_list = [] + + for mapping in group_mappings: + mapping_parts = mapping.split("=") + if len(mapping_parts) != 2: + raise ValueError("The group-map must contain value pairs split by commas") + + ldap_group = mapping_parts[0] + psql_group = mapping_parts[1] + + if psql_group in [*ACCESS_GROUPS, PERMISSIONS_GROUP_ADMIN]: + logger.warning(f"Tried to assign LDAP users to forbidden group: {psql_group}") + continue + + group_map_list.append((ldap_group, psql_group)) + + return group_map_list + @staticmethod def build_postgresql_parameters( config_options: dict, available_memory: int, limit_memory: Optional[int] = None @@ -852,3 +888,34 @@ def validate_date_style(self, date_style: str) -> bool: return True except psycopg2.Error: return False + + def validate_group_map(self, group_map: Optional[str]) -> bool: + """Validate the PostgreSQL authorization group-map. + + Args: + group_map: serialized group-map with the following format: + =, + =, + ... + + Returns: + Whether the group-map is valid. + """ + if group_map is None: + return True + + try: + group_map = self.build_postgresql_group_map(group_map) + except ValueError: + return False + + for _, psql_group in group_map: + with self._connect_to_database() as connection, connection.cursor() as cursor: + query = SQL("SELECT TRUE FROM pg_roles WHERE rolname={};") + query = query.format(Literal(psql_group)) + cursor.execute(query) + + if cursor.fetchone() is None: + return False + + return True diff --git a/src/charm.py b/src/charm.py index 3726e34ea9..b3f11cd0bb 100755 --- a/src/charm.py +++ b/src/charm.py @@ -36,6 +36,7 @@ from charms.grafana_k8s.v0.grafana_dashboard import GrafanaDashboardProvider from charms.loki_k8s.v1.loki_push_api import LogProxyConsumer from charms.postgresql_k8s.v0.postgresql import ( + ACCESS_GROUP_IDENTITY, ACCESS_GROUPS, REQUIRED_PLUGINS, PostgreSQL, @@ -1760,6 +1761,7 @@ def _generate_ldap_service(self) -> dict: ldap_base_dn = ldap_params["ldapbasedn"] ldap_bind_username = ldap_params["ldapbinddn"] ldap_bing_password = ldap_params["ldapbindpasswd"] + ldap_group_mappings = self.postgresql.build_postgresql_group_map(self.config.ldap_map) return { "override": "replace", @@ -1772,6 +1774,8 @@ def _generate_ldap_service(self) -> dict: "LDAP_BASE_DN": ldap_base_dn, "LDAP_BIND_USERNAME": ldap_bind_username, "LDAP_BIND_PASSWORD": ldap_bing_password, + "LDAP_GROUP_IDENTITY": json.dumps(ACCESS_GROUP_IDENTITY), + "LDAP_GROUP_MAPPINGS": json.dumps(ldap_group_mappings), "POSTGRES_HOST": "127.0.0.1", "POSTGRES_PORT": DATABASE_PORT, "POSTGRES_DATABASE": DATABASE_DEFAULT_NAME, @@ -2095,11 +2099,7 @@ def update_config(self, is_creating_backup: bool = False) -> bool: self._handle_postgresql_restart_need() self._restart_metrics_service() - - # TODO: Un-comment - # When PostgreSQL-rock wrapping PostgreSQL-snap versions 162 / 163 gets published - # (i.e. snap contains https://github.com/canonical/charmed-postgresql-snap/pull/88) - # self._restart_ldap_sync_service() + self._restart_ldap_sync_service() return True @@ -2113,6 +2113,9 @@ def _validate_config_options(self) -> None: "instance_default_text_search_config config option has an invalid value" ) + if not self.postgresql.validate_group_map(self.config.ldap_map): + raise ValueError("ldap_map config option has an invalid value") + if not self.postgresql.validate_date_style(self.config.request_date_style): raise ValueError("request_date_style config option has an invalid value") diff --git a/src/config.py b/src/config.py index 9e169efa95..9932a06d89 100644 --- a/src/config.py +++ b/src/config.py @@ -27,6 +27,7 @@ class CharmConfig(BaseConfigModel): instance_max_locks_per_transaction: int | None instance_password_encryption: str | None instance_synchronize_seqscans: bool | None + ldap_map: str | None ldap_search_filter: str | None logging_client_min_messages: str | None logging_log_connections: bool | None diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index f93053c817..c169047fd0 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -1107,6 +1107,7 @@ def test_validate_config_options(harness): harness.set_can_connect(POSTGRESQL_CONTAINER, True) _charm_lib.return_value.get_postgresql_text_search_configs.return_value = [] _charm_lib.return_value.validate_date_style.return_value = [] + _charm_lib.return_value.validate_group_map.return_value = False _charm_lib.return_value.get_postgresql_timezones.return_value = [] # Test instance_default_text_search_config exception @@ -1124,6 +1125,17 @@ def test_validate_config_options(harness): "pg_catalog.test" ] + # Test ldap_map exception + with harness.hooks_disabled(): + harness.update_config({"ldap_map": "ldap_group="}) + + with tc.assertRaises(ValueError) as e: + harness.charm._validate_config_options() + assert e.msg == "ldap_map config option has an invalid value" + + _charm_lib.return_value.validate_group_map.assert_called_once_with("ldap_group=") + _charm_lib.return_value.validate_group_map.return_value = True + # Test request_date_style exception with harness.hooks_disabled(): harness.update_config({"request_date_style": "ISO, TEST"}) @@ -1146,10 +1158,6 @@ def test_validate_config_options(harness): _charm_lib.return_value.get_postgresql_timezones.assert_called_once_with() _charm_lib.return_value.get_postgresql_timezones.return_value = ["TEST_ZONE"] - # - # Secrets - # - def test_scope_obj(harness): assert harness.charm._scope_obj("app") == harness.charm.framework.model.app @@ -1711,13 +1719,13 @@ def test_update_config(harness): ) _handle_postgresql_restart_need.assert_called_once() _restart_metrics_service.assert_called_once() - # _restart_ldap_sync_service.assert_called_once() + _restart_ldap_sync_service.assert_called_once() assert "tls" not in harness.get_relation_data(rel_id, harness.charm.unit.name) # Test with TLS files available. _handle_postgresql_restart_need.reset_mock() _restart_metrics_service.reset_mock() - # _restart_ldap_sync_service.reset_mock() + _restart_ldap_sync_service.reset_mock() harness.update_relation_data( rel_id, harness.charm.unit.name, {"tls": ""} ) # Mock some data in the relation to test that it change. @@ -1740,7 +1748,7 @@ def test_update_config(harness): ) _handle_postgresql_restart_need.assert_called_once() _restart_metrics_service.assert_called_once() - # _restart_ldap_sync_service.assert_called_once() + _restart_ldap_sync_service.assert_called_once() assert "tls" not in harness.get_relation_data( rel_id, harness.charm.unit.name ) # The "tls" flag is set in handle_postgresql_restart_need. @@ -1751,11 +1759,11 @@ def test_update_config(harness): ) # Mock some data in the relation to test that it change. _handle_postgresql_restart_need.reset_mock() _restart_metrics_service.reset_mock() - # _restart_ldap_sync_service.reset_mock() + _restart_ldap_sync_service.reset_mock() harness.charm.update_config() _handle_postgresql_restart_need.assert_not_called() _restart_metrics_service.assert_not_called() - # _restart_ldap_sync_service.assert_not_called() + _restart_ldap_sync_service.assert_not_called() assert harness.get_relation_data(rel_id, harness.charm.unit.name)["tls"] == "enabled" # Test with member not started yet. @@ -1765,7 +1773,7 @@ def test_update_config(harness): harness.charm.update_config() _handle_postgresql_restart_need.assert_not_called() _restart_metrics_service.assert_not_called() - # _restart_ldap_sync_service.assert_not_called() + _restart_ldap_sync_service.assert_not_called() assert "tls" not in harness.get_relation_data(rel_id, harness.charm.unit.name) diff --git a/tests/unit/test_postgresql.py b/tests/unit/test_postgresql.py index e87f9ba370..d62baec568 100644 --- a/tests/unit/test_postgresql.py +++ b/tests/unit/test_postgresql.py @@ -370,6 +370,27 @@ def test_get_last_archived_wal(harness): execute.assert_called_once_with("SELECT last_archived_wal FROM pg_stat_archiver;") +def test_build_postgresql_group_map(harness): + assert harness.charm.postgresql.build_postgresql_group_map(None) == [] + assert harness.charm.postgresql.build_postgresql_group_map("ldap_group=admin") == [] + + for group in ACCESS_GROUPS: + assert harness.charm.postgresql.build_postgresql_group_map(f"ldap_group={group}") == [] + + mapping_1 = "ldap_group_1=psql_group_1" + mapping_2 = "ldap_group_2=psql_group_2" + + assert harness.charm.postgresql.build_postgresql_group_map(f"{mapping_1},{mapping_2}") == [ + ("ldap_group_1", "psql_group_1"), + ("ldap_group_2", "psql_group_2"), + ] + try: + harness.charm.postgresql.build_postgresql_group_map(f"{mapping_1} {mapping_2}") + assert False + except ValueError: + assert True + + def test_build_postgresql_parameters(harness): # Test when not limit is imposed to the available memory. config_options = { @@ -463,3 +484,30 @@ def test_configure_pgaudit(harness): call("ALTER SYSTEM RESET pgaudit.log_parameter;"), call("SELECT pg_reload_conf();"), ]) + + +def test_validate_group_map(harness): + with patch( + "charms.postgresql_k8s.v0.postgresql.PostgreSQL._connect_to_database" + ) as _connect_to_database: + execute = _connect_to_database.return_value.__enter__.return_value.cursor.return_value.__enter__.return_value.execute + _connect_to_database.return_value.__enter__.return_value.cursor.return_value.__enter__.return_value.fetchone.return_value = None + + query = SQL("SELECT TRUE FROM pg_roles WHERE rolname={};") + + assert harness.charm.postgresql.validate_group_map(None) is True + + assert harness.charm.postgresql.validate_group_map("") is False + assert harness.charm.postgresql.validate_group_map("ldap_group=") is False + execute.assert_has_calls([ + call(query.format(Literal(""))), + ]) + + assert harness.charm.postgresql.validate_group_map("ldap_group=admin") is True + assert harness.charm.postgresql.validate_group_map("ldap_group=admin,") is False + assert harness.charm.postgresql.validate_group_map("ldap_group admin") is False + + assert harness.charm.postgresql.validate_group_map("ldap_group=missing_group") is False + execute.assert_has_calls([ + call(query.format(Literal("missing_group"))), + ]) From b85d54e74eba5926b0dc33419a3f9288692ac955 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 04:26:08 +0300 Subject: [PATCH 06/15] Update charmcraft.yaml build tools (#912) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- charmcraft.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/charmcraft.yaml b/charmcraft.yaml index 79e118c1da..11e43862c2 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -27,7 +27,7 @@ parts: PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==25.0.1 # renovate: charmcraft-pip-latest # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry - curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.9/uv-installer.sh | sh # renovate: charmcraft-uv-latest + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.11/uv-installer.sh | sh # renovate: charmcraft-uv-latest # poetry 2.0.0 requires Python >=3.9 if ! "$HOME/.local/bin/uv" python find '>=3.9' then @@ -35,7 +35,7 @@ parts: # (to reduce the number of Python versions we use) "$HOME/.local/bin/uv" python install 3.10.12 # renovate: charmcraft-python-ubuntu-22.04 fi - "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.1.1 --with poetry-plugin-export==1.9.0 # renovate: charmcraft-poetry-latest + "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.1.2 --with poetry-plugin-export==1.9.0 # renovate: charmcraft-poetry-latest ln -sf "$HOME/.local/bin/poetry" /usr/local/bin/poetry # "charm-poetry" part name is arbitrary; use for consistency From 5041520156a0a5cb005687acd7a137820b0a0b87 Mon Sep 17 00:00:00 2001 From: Marcelo Henrique Neppel Date: Fri, 4 Apr 2025 09:23:09 -0300 Subject: [PATCH 07/15] [DPE-6910] Remove duplicate parameters specification (#896) * Remove duplicate parameters specification Signed-off-by: Marcelo Henrique Neppel * Enable config test Signed-off-by: Marcelo Henrique Neppel * Fix linting Signed-off-by: Marcelo Henrique Neppel --------- Signed-off-by: Marcelo Henrique Neppel --- templates/patroni.yml.j2 | 5 ----- tests/integration/test_config.py | 4 +--- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/templates/patroni.yml.j2 b/templates/patroni.yml.j2 index aa416e7126..6bb262bb4b 100644 --- a/templates/patroni.yml.j2 +++ b/templates/patroni.yml.j2 @@ -47,11 +47,6 @@ bootstrap: logging_collector: 'on' wal_level: logical shared_preload_libraries: 'timescaledb,pgaudit' - {%- if pg_parameters %} - {%- for key, value in pg_parameters.items() %} - {{key}}: {{value}} - {%- endfor -%} - {% endif %} {%- if restoring_backup %} method: pgbackrest pgbackrest: diff --git a/tests/integration/test_config.py b/tests/integration/test_config.py index 229450b715..e1fc86485b 100644 --- a/tests/integration/test_config.py +++ b/tests/integration/test_config.py @@ -106,9 +106,7 @@ async def test_config_parameters(ops_test: OpsTest, charm) -> None: "optimizer_parallel_tuple_cost": ["-1", "0.1"] }, # config option is between 0 and 1.80E+308 {"profile": [test_string, "testing"]}, # config option is one of `testing` or `production` - # { - # "profile_limit_memory": {"127", "128"} - # }, # config option is between 128 and 9999999 + {"profile_limit_memory": ["127", "128"]}, # config option is between 128 and 9999999 { "request_backslash_quote": [test_string, "safe_encoding"] }, # config option is one of `safe_encoding` and `on` and `off` From 699e5c2b2bec8086ee60085f5e614d514b41d7ab Mon Sep 17 00:00:00 2001 From: Dragomir Penev <6687393+dragomirp@users.noreply.github.com> Date: Tue, 8 Apr 2025 16:42:32 +0300 Subject: [PATCH 08/15] [MISC] Conditional checksum calculation (#901) * Conditional checksum calculation * Converge s3 resource creation * Tactically deployed sleep * Early fail --- poetry.lock | 74 +++++++++++++-------------- pyproject.toml | 2 +- src/backups.py | 52 ++++++++----------- tests/integration/test_backups_gcp.py | 1 + tests/unit/test_backups.py | 33 +++++++++++- 5 files changed, 92 insertions(+), 70 deletions(-) diff --git a/poetry.lock b/poetry.lock index c624c0fbfc..00b6630685 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "allure-pytest" @@ -68,7 +68,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -100,12 +100,12 @@ files = [ ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "backoff" @@ -173,34 +173,34 @@ typecheck = ["mypy"] [[package]] name = "boto3" -version = "1.35.99" +version = "1.37.22" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" groups = ["main", "integration"] files = [ - {file = "boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71"}, - {file = "boto3-1.35.99.tar.gz", hash = "sha256:e0abd794a7a591d90558e92e29a9f8837d25ece8e3c120e530526fe27eba5fca"}, + {file = "boto3-1.37.22-py3-none-any.whl", hash = "sha256:a14324d5fa5f4fea00c0e3c69754cbd28100f7fe194693eeecf2dc07446cf4ef"}, + {file = "boto3-1.37.22.tar.gz", hash = "sha256:78a0ec0aafbf6044104c98ad80b69e6d1c83d8233fda2c2d241029e6c705c510"}, ] [package.dependencies] -botocore = ">=1.35.99,<1.36.0" +botocore = ">=1.37.22,<1.38.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.10.0,<0.11.0" +s3transfer = ">=0.11.0,<0.12.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.99" +version = "1.37.22" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" groups = ["main", "integration"] files = [ - {file = "botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445"}, - {file = "botocore-1.35.99.tar.gz", hash = "sha256:1eab44e969c39c5f3d9a3104a0836c24715579a455f12b3979a31d7cde51b3c3"}, + {file = "botocore-1.37.22-py3-none-any.whl", hash = "sha256:184db7c9314d13002bc827f511a5140574b5da1acda342d51e093dad6317de98"}, + {file = "botocore-1.37.22.tar.gz", hash = "sha256:b3b26f1a90236bcd17d4092f8c85a256b44e9955a16b633319a2f5678d605e9f"}, ] [package.dependencies] @@ -209,7 +209,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.22.0)"] +crt = ["awscrt (==0.23.8)"] [[package]] name = "cachetools" @@ -433,7 +433,7 @@ files = [ [package.extras] dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] hard-encoding-detection = ["chardet"] -toml = ["tomli"] +toml = ["tomli ; python_version < \"3.11\""] types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] [[package]] @@ -546,7 +546,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" @@ -593,10 +593,10 @@ files = [ cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] @@ -630,7 +630,7 @@ files = [ wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] [[package]] name = "exceptiongroup" @@ -661,7 +661,7 @@ files = [ ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] name = "google-auth" @@ -759,7 +759,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -816,7 +816,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" @@ -875,7 +875,7 @@ typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] kernel = ["ipykernel"] matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] @@ -1349,8 +1349,8 @@ cryptography = ">=3.3" pynacl = ">=1.5" [package.extras] -all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] -gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +all = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] +gssapi = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] invoke = ["invoke (>=2.0)"] [[package]] @@ -2135,21 +2135,21 @@ files = [ [[package]] name = "s3transfer" -version = "0.10.4" +version = "0.11.4" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" groups = ["main", "integration"] files = [ - {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, - {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, + {file = "s3transfer-0.11.4-py3-none-any.whl", hash = "sha256:ac265fa68318763a03bf2dc4f39d5cbd6a9e178d81cc9483ad27da33637e320d"}, + {file = "s3transfer-0.11.4.tar.gz", hash = "sha256:559f161658e1cf0a911f45940552c696735f5c74e64362e515f333ebed87d679"}, ] [package.dependencies] -botocore = ">=1.33.2,<2.0a.0" +botocore = ">=1.37.4,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] +crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"] [[package]] name = "six" @@ -2323,7 +2323,7 @@ files = [ ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2538,14 +2538,14 @@ files = [ ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.10" -content-hash = "38d461f9c341e81b1034d0b3d789f39a5b6cb7c5fe83dbf3845e334e8c93d9a2" +content-hash = "824e1bff0e19325e59732dac0846c5232574bd375d65c0e966025e06ca88bad6" diff --git a/pyproject.toml b/pyproject.toml index 7dbb9cebe7..1741674610 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ requires-poetry = ">=2.0.0" [tool.poetry.dependencies] python = "^3.10" ops = "^2.18.1" -boto3 = "^1.35.99" +boto3 = "^1.37.22" pgconnstr = "^1.0.1" requests = "^2.32.3" tenacity = "^9.0.0" diff --git a/src/backups.py b/src/backups.py index d9a1fee86e..ce85cb21ff 100644 --- a/src/backups.py +++ b/src/backups.py @@ -12,7 +12,7 @@ from datetime import datetime, timezone from io import BytesIO -import boto3 as boto3 +import boto3 import botocore from botocore.exceptions import ClientError from charms.data_platform_libs.v0.s3 import CredentialsChangedEvent, S3Requirer @@ -88,6 +88,23 @@ def _tls_ca_chain_filename(self) -> str: return f"{self.charm._storage_path}/pgbackrest-tls-ca-chain.crt" return "" + def _get_s3_session_resource(self, s3_parameters: dict): + session = boto3.session.Session( + aws_access_key_id=s3_parameters["access-key"], + aws_secret_access_key=s3_parameters["secret-key"], + region_name=s3_parameters["region"], + ) + return session.resource( + "s3", + endpoint_url=self._construct_endpoint(s3_parameters), + verify=(self._tls_ca_chain_filename or None), + config=botocore.client.Config( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + ), + ) + def _are_backup_settings_ok(self) -> tuple[bool, str | None]: """Validates whether backup settings are OK.""" if self.model.get_relation(self.relation_name) is None: @@ -227,18 +244,9 @@ def _create_bucket_if_not_exists(self) -> None: bucket_name = s3_parameters["bucket"] region = s3_parameters.get("region") - session = boto3.session.Session( - aws_access_key_id=s3_parameters["access-key"], - aws_secret_access_key=s3_parameters["secret-key"], - region_name=s3_parameters["region"], - ) try: - s3 = session.resource( - "s3", - endpoint_url=self._construct_endpoint(s3_parameters), - verify=(self._tls_ca_chain_filename or None), - ) + s3 = self._get_s3_session_resource(s3_parameters) except ValueError as e: logger.exception("Failed to create a session '%s' in region=%s.", bucket_name, region) raise e @@ -1316,17 +1324,8 @@ def _upload_content_to_s3( processed_s3_path = os.path.join(s3_parameters["path"], s3_path).lstrip("/") try: logger.info(f"Uploading content to bucket={bucket_name}, path={processed_s3_path}") - session = boto3.session.Session( - aws_access_key_id=s3_parameters["access-key"], - aws_secret_access_key=s3_parameters["secret-key"], - region_name=s3_parameters["region"], - ) - s3 = session.resource( - "s3", - endpoint_url=self._construct_endpoint(s3_parameters), - verify=(self._tls_ca_chain_filename or None), - ) + s3 = self._get_s3_session_resource(s3_parameters) bucket = s3.Bucket(bucket_name) with tempfile.NamedTemporaryFile() as temp_file: @@ -1359,16 +1358,7 @@ def _read_content_from_s3(self, s3_path: str, s3_parameters: dict) -> str | None processed_s3_path = os.path.join(s3_parameters["path"], s3_path).lstrip("/") try: logger.info(f"Reading content from bucket={bucket_name}, path={processed_s3_path}") - session = boto3.session.Session( - aws_access_key_id=s3_parameters["access-key"], - aws_secret_access_key=s3_parameters["secret-key"], - region_name=s3_parameters["region"], - ) - s3 = session.resource( - "s3", - endpoint_url=self._construct_endpoint(s3_parameters), - verify=(self._tls_ca_chain_filename or None), - ) + s3 = self._get_s3_session_resource(s3_parameters) bucket = s3.Bucket(bucket_name) with BytesIO() as buf: bucket.download_fileobj(processed_s3_path, buf) diff --git a/tests/integration/test_backups_gcp.py b/tests/integration/test_backups_gcp.py index d89068e809..af581fba63 100644 --- a/tests/integration/test_backups_gcp.py +++ b/tests/integration/test_backups_gcp.py @@ -72,6 +72,7 @@ async def test_backup_gcp(ops_test: OpsTest, charm, gcp_cloud_configs: tuple[dic ) +@pytest.mark.abort_on_fail async def test_restore_on_new_cluster( ops_test: OpsTest, charm, gcp_cloud_configs: tuple[dict, dict] ) -> None: diff --git a/tests/unit/test_backups.py b/tests/unit/test_backups.py index f2fd77dfb4..34e70d294d 100644 --- a/tests/unit/test_backups.py +++ b/tests/unit/test_backups.py @@ -331,6 +331,7 @@ def test_create_bucket_if_not_exists(harness, tls_ca_chain_filename): new_callable=PropertyMock(return_value=tls_ca_chain_filename), ) as _tls_ca_chain_filename, patch("charm.PostgreSQLBackups._retrieve_s3_parameters") as _retrieve_s3_parameters, + patch("backups.botocore.client.Config") as _config, ): # Test when there are missing S3 parameters. _retrieve_s3_parameters.return_value = ([], ["bucket", "access-key", "secret-key"]) @@ -357,13 +358,22 @@ def test_create_bucket_if_not_exists(harness, tls_ca_chain_filename): # Test when the bucket already exists. _resource.reset_mock() + _config.reset_mock() _resource.side_effect = None head_bucket = _resource.return_value.Bucket.return_value.meta.client.head_bucket create = _resource.return_value.Bucket.return_value.create wait_until_exists = _resource.return_value.Bucket.return_value.wait_until_exists harness.charm.backup._create_bucket_if_not_exists() _resource.assert_called_once_with( - "s3", endpoint_url="test-endpoint", verify=(tls_ca_chain_filename or None) + "s3", + endpoint_url="test-endpoint", + verify=(tls_ca_chain_filename or None), + config=_config.return_value, + ) + _config.assert_called_once_with( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", ) head_bucket.assert_called_once() create.assert_not_called() @@ -2003,6 +2013,7 @@ def test_upload_content_to_s3(harness, tls_ca_chain_filename): patch("tempfile.NamedTemporaryFile") as _named_temporary_file, patch("charm.PostgreSQLBackups._construct_endpoint") as _construct_endpoint, patch("boto3.session.Session.resource") as _resource, + patch("backups.botocore.client.Config") as _config, patch( "charm.PostgreSQLBackups._tls_ca_chain_filename", new_callable=PropertyMock(return_value=tls_ca_chain_filename), @@ -2030,11 +2041,18 @@ def test_upload_content_to_s3(harness, tls_ca_chain_filename): "s3", endpoint_url="https://s3.us-east-1.amazonaws.com", verify=(tls_ca_chain_filename or None), + config=_config.return_value, + ) + _config.assert_called_once_with( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", ) _named_temporary_file.assert_not_called() upload_file.assert_not_called() _resource.reset_mock() + _config.reset_mock() _resource.side_effect = None upload_file.side_effect = S3UploadFailedError assert harness.charm.backup._upload_content_to_s3(content, s3_path, s3_parameters) is False @@ -2042,12 +2060,19 @@ def test_upload_content_to_s3(harness, tls_ca_chain_filename): "s3", endpoint_url="https://s3.us-east-1.amazonaws.com", verify=(tls_ca_chain_filename or None), + config=_config.return_value, + ) + _config.assert_called_once_with( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", ) _named_temporary_file.assert_called_once() upload_file.assert_called_once_with("/tmp/test-file", "test-path/test-file.") # Test when the upload succeeds _resource.reset_mock() + _config.reset_mock() _named_temporary_file.reset_mock() upload_file.reset_mock() upload_file.side_effect = None @@ -2056,6 +2081,12 @@ def test_upload_content_to_s3(harness, tls_ca_chain_filename): "s3", endpoint_url="https://s3.us-east-1.amazonaws.com", verify=(tls_ca_chain_filename or None), + config=_config.return_value, + ) + _config.assert_called_once_with( + # https://github.com/boto/boto3/issues/4400#issuecomment-2600742103 + request_checksum_calculation="when_required", + response_checksum_validation="when_required", ) _named_temporary_file.assert_called_once() upload_file.assert_called_once_with("/tmp/test-file", "test-path/test-file.") From afea281ecfcb077855bc62021fc357d0565f1997 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 13:56:28 +0300 Subject: [PATCH 09/15] Update charmcraft.yaml build tools (#916) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- charmcraft.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/charmcraft.yaml b/charmcraft.yaml index 11e43862c2..d22f00d4cb 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -27,7 +27,7 @@ parts: PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==25.0.1 # renovate: charmcraft-pip-latest # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry - curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.11/uv-installer.sh | sh # renovate: charmcraft-uv-latest + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.13/uv-installer.sh | sh # renovate: charmcraft-uv-latest # poetry 2.0.0 requires Python >=3.9 if ! "$HOME/.local/bin/uv" python find '>=3.9' then @@ -75,7 +75,7 @@ parts: # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive rustup set profile minimal - rustup default 1.85.1 # renovate: charmcraft-rust-latest + rustup default 1.86.0 # renovate: charmcraft-rust-latest craftctl default # Include requirements.txt in *.charm artifact for easier debugging From fa2f7f8db0f5153c1ed30af4f941b8344bd29679 Mon Sep 17 00:00:00 2001 From: Marcelo Henrique Neppel Date: Wed, 9 Apr 2025 08:54:10 -0300 Subject: [PATCH 10/15] Create SECURITY.md (#914) --- SECURITY.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..1881a21566 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,25 @@ +# Security policy + +## What qualifies as a security issue + +Credentials leakage, outdated dependencies with known vulnerabilities, and +other issues that could lead to unprivileged or unauthorized access to the +database or the system. + +## Reporting a vulnerability + +The easiest way to report a security issue is through +[GitHub](https://github.com/canonical/postgresql-k8s-operator/security/advisories/new). See +[Privately reporting a security +vulnerability](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability) +for instructions. + +The repository admins will be notified of the issue and will work with you +to determine whether the issue qualifies as a security issue and, if so, in +which component. We will then handle figuring out a fix, getting a CVE +assigned and coordinating the release of the fix. + +The [Ubuntu Security disclosure and embargo +policy](https://ubuntu.com/security/disclosure-policy) contains more +information about what you can expect when you contact us, and what we +expect from you. From bce4974ec09b44f989359f7887bd53c13f8825b5 Mon Sep 17 00:00:00 2001 From: Andreia Date: Wed, 9 Apr 2025 15:04:49 +0200 Subject: [PATCH 11/15] Update pull_request_template.md (#918) --- .github/pull_request_template.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 05dc4e23ef..9a15830dbd 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,4 +1,7 @@ ## Issue - ## Solution + +## Checklist +- [ ] I have added or updated any relevant documentation. +- [ ] I have cleaned any remaining cloud resources from my accounts. From 073110fe56fa12acd0171fb4a9933875bd546b77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sinclert=20P=C3=A9rez?= Date: Thu, 10 Apr 2025 09:23:04 +0200 Subject: [PATCH 12/15] [MISC] Add missing connection vars (#920) --- lib/charms/postgresql_k8s/v0/postgresql.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/charms/postgresql_k8s/v0/postgresql.py b/lib/charms/postgresql_k8s/v0/postgresql.py index 5975197f1b..b7eb90908b 100644 --- a/lib/charms/postgresql_k8s/v0/postgresql.py +++ b/lib/charms/postgresql_k8s/v0/postgresql.py @@ -35,7 +35,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 48 +LIBPATCH = 49 # Groups to distinguish HBA access ACCESS_GROUP_IDENTITY = "identity_access" @@ -626,6 +626,7 @@ def list_access_groups(self) -> Set[str]: Returns: List of PostgreSQL database access groups. """ + connection = None try: with self._connect_to_database() as connection, connection.cursor() as cursor: cursor.execute( @@ -646,6 +647,7 @@ def list_users(self) -> Set[str]: Returns: List of PostgreSQL database users. """ + connection = None try: with self._connect_to_database() as connection, connection.cursor() as cursor: cursor.execute("SELECT usename FROM pg_catalog.pg_user;") @@ -664,6 +666,7 @@ def list_users_from_relation(self) -> Set[str]: Returns: List of PostgreSQL database users. """ + connection = None try: with self._connect_to_database() as connection, connection.cursor() as cursor: cursor.execute( From 3ce2aa5c7c8dce168a798ecf71cb2976c118d707 Mon Sep 17 00:00:00 2001 From: Vladimir Izmalkov <48120135+izmalk@users.noreply.github.com> Date: Thu, 10 Apr 2025 09:19:21 +0100 Subject: [PATCH 13/15] Update README file's security section (#921) * Add empty lines after headings * Update security section * Update link to make it clear that's not GitHub issues --- README.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e5abc71343..e9fe08081d 100644 --- a/README.md +++ b/README.md @@ -95,18 +95,24 @@ juju relate postgresql-k8s:db finos-waltz-k8s **Note:** The endpoint `db-admin` provides the same legacy interface `pgsql` with PostgreSQL admin-level privileges. It is NOT recommended to use it from security point of view. ## OCI Images + This charm uses pinned and tested version of the [charmed-postgresql](https://github.com/canonical/charmed-postgresql-rock/pkgs/container/charmed-postgresql) rock. ## Security -Security issues in the Charmed PostgreSQL K8s Operator can be reported through [LaunchPad](https://wiki.ubuntu.com/DebuggingSecurity#How%20to%20File). Please do not file GitHub issues about security issues. + +Security issues in the Charmed PostgreSQL K8s Operator can be reported through [private security reports](https://github.com/canonical/postgresql-k8s-operator/security/advisories/new) on GitHub. +For more information, see the [Security policy](SECURITY.md). ## Contributing + Please see the [Juju SDK docs](https://juju.is/docs/sdk) for guidelines on enhancements to this charm following best practice guidelines, and [CONTRIBUTING.md](https://github.com/canonical/postgresql-k8s-operator/blob/main/CONTRIBUTING.md) for developer guidance. ## License + The Charmed PostgreSQL K8s Operator [is distributed](https://github.com/canonical/postgresql-k8s-operator/blob/main/LICENSE) under the Apache Software License, version 2.0. It installs/operates/depends on [PostgreSQL](https://www.postgresql.org/ftp/source/), which [is licensed](https://www.postgresql.org/about/licence/) under PostgreSQL License, a liberal Open Source license, similar to the BSD or MIT licenses. ## Trademark Notice + PostgreSQL is a trademark or registered trademark of PostgreSQL Global Development Group. Other trademarks are property of their respective owners. From e060a91278a4246d6ccffde346fb6700756e3635 Mon Sep 17 00:00:00 2001 From: Marcelo Henrique Neppel Date: Thu, 10 Apr 2025 14:18:15 -0300 Subject: [PATCH 14/15] [DPE-6218] Static code analysis (#915) * Create actionlint.yaml * Create tiobe_scan.yaml * Add push event to trigger the workflow once * Install libpq-dev * Remove push event * Test adding unit venv to PATH * Test sourcing unit venv * Fix sourcing * Test installing dependencies * Activate virtual environment * Add poetry dependency * Fix TICS auth token variable * Move results to the right folder * Delete .github/actionlint.yaml * Install ops * Install dependencies through poetry * Install extra dependencies * Install dependencies from all groups * Remove unnecessary step * Remove permission * Remove push trigger * Add double quotes to environment variables Signed-off-by: Marcelo Henrique Neppel * Add push trigger * Remove push trigger Signed-off-by: Marcelo Henrique Neppel --------- Signed-off-by: Marcelo Henrique Neppel --- .github/workflows/tiobe_scan.yaml | 44 +++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 .github/workflows/tiobe_scan.yaml diff --git a/.github/workflows/tiobe_scan.yaml b/.github/workflows/tiobe_scan.yaml new file mode 100644 index 0000000000..53d27b6d81 --- /dev/null +++ b/.github/workflows/tiobe_scan.yaml @@ -0,0 +1,44 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +name: Weekly TICS scan + +on: + schedule: + - cron: "0 2 * * 6" # Every Saturday 2:00 AM UTC + workflow_dispatch: + +jobs: + TICS: + runs-on: ubuntu-24.04 + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Create and activate virtual environment + run: | + python3 -m venv .venv + . .venv/bin/activate + pip install flake8 poetry pylint pytest tox + poetry install --all-groups + echo PATH="$PATH" >> "$GITHUB_ENV" + + - name: Run coverage tests + run: | + tox -e unit + + - name: Move results to the necessary folder for TICS + run: | + mkdir -p .cover + mv coverage.xml .cover/cobertura.xml + + - name: TICS GitHub Action + uses: tiobe/tics-github-action@v3 + with: + mode: qserver + project: postgresql-k8s-operator + viewerUrl: https://canonical.tiobe.com/tiobeweb/TICS/api/cfg?name=default + branchdir: ${{ env.GITHUB_WORKSPACE }} + ticsAuthToken: ${{ secrets.TICSAUTHTOKEN }} + installTics: true + calc: ALL From 7ca1e26997c572d253b7cffb654d7adb1f0deb80 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 15 Apr 2025 10:39:04 -0300 Subject: [PATCH 15/15] Update dependency uv to v0.6.14 (#924) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- charmcraft.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charmcraft.yaml b/charmcraft.yaml index d22f00d4cb..cd332e9ed5 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -27,7 +27,7 @@ parts: PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==25.0.1 # renovate: charmcraft-pip-latest # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry - curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.13/uv-installer.sh | sh # renovate: charmcraft-uv-latest + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.6.14/uv-installer.sh | sh # renovate: charmcraft-uv-latest # poetry 2.0.0 requires Python >=3.9 if ! "$HOME/.local/bin/uv" python find '>=3.9' then