Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions lib/charms/postgresql_k8s/v0/postgresql.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 50
LIBPATCH = 51

# Groups to distinguish HBA access
ACCESS_GROUP_IDENTITY = "identity_access"
Expand Down Expand Up @@ -258,7 +258,7 @@ def create_database(
raise PostgreSQLCreateDatabaseError() from e

# Enable preset extensions
self.enable_disable_extensions({plugin: True for plugin in plugins}, database)
self.enable_disable_extensions(dict.fromkeys(plugins, True), database)

def create_user(
self,
Expand Down
29 changes: 29 additions & 0 deletions lib/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Copyright 2021 Canonical Ltd.
# See LICENSE file for licensing details.

# Linting tools configuration
[tool.ruff]
# preview and explicit preview are enabled for CPY001
preview = true
target-version = "py38"
src = [".."]
line-length = 99

[tool.ruff.lint]
explicit-preview-rules = true
select = ["A", "E", "W", "F", "C", "N", "D", "I001", "B", "CPY001", "RUF", "S", "SIM", "UP", "TC"]
# Ignore E501 because using black creates errors with this
# Ignore D107 Missing docstring in __init__
ignore = ["E501", "D107"]

[tool.ruff.lint.flake8-copyright]
# Check for properly formatted copyright header in each file
author = "Canonical Ltd."
notice-rgx = "Copyright\\s\\d{4}([-,]\\d{4})*\\s+"
min-file-size = 1

[tool.ruff.lint.mccabe]
max-complexity = 10

[tool.ruff.lint.pydocstyle]
convention = "google"
948 changes: 524 additions & 424 deletions poetry.lock

Large diffs are not rendered by default.

35 changes: 15 additions & 20 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@ requires-poetry = ">=2.0.0"

[tool.poetry.dependencies]
python = "^3.10"
ops = "^2.18.1"
boto3 = "^1.37.22"
ops = "^2.20.0"
boto3 = "^1.37.37"
pgconnstr = "^1.0.1"
requests = "^2.32.3"
tenacity = "^9.0.0"
tenacity = "^9.1.2"
pydantic = "^1.10.21"
jinja2 = "^3.1.5"
jinja2 = "^3.1.6"
lightkube = "^0.17.1"
lightkube-models = "^1.28.1.4"
psycopg2 = "^2.9.10"
Expand All @@ -40,7 +40,7 @@ jsonschema = "*"
optional = true

[tool.poetry.group.format.dependencies]
ruff = "^0.9.6"
ruff = "^0.11.6"

[tool.poetry.group.lint]
optional = true
Expand All @@ -52,23 +52,23 @@ codespell = "^2.4.1"
optional = true

[tool.poetry.group.unit.dependencies]
coverage = {extras = ["toml"], version = "^7.6.12"}
pytest = "^8.3.4"
coverage = {extras = ["toml"], version = "^7.8.0"}
pytest = "^8.3.5"

[tool.poetry.group.integration]
optional = true

[tool.poetry.group.integration.dependencies]
lightkube = "^0.17.1"
pytest = "^8.3.4"
pytest-operator = "^0.40.0"
pytest = "^8.3.5"
pytest-operator = "^0.42.0"
allure-pytest-default-results = "^0.1.2"
# renovate caret doesn't work: https://github.com/renovatebot/renovate/issues/26940
juju = "<=3.6.1.0"
juju = "<=3.6.1.1"
psycopg2-binary = "^2.9.10"
boto3 = "*"
tenacity = "^9.0.0"
allure-pytest = "^2.13.5"
tenacity = "^9.1.2"
allure-pytest = "^2.14.0"

[build-system]
requires = ["poetry-core>=1.0.0"]
Expand All @@ -88,24 +88,19 @@ exclude_lines = [
minversion = "6.0"
log_cli_level = "INFO"
asyncio_mode = "auto"
markers = ["juju2", "juju3", "juju_secrets"]

# Formatting tools configuration
[tool.black]
line-length = 99
target-version = ["py38"]
markers = ["juju3", "juju_secrets"]

# Linting tools configuration
[tool.ruff]
# preview and explicit preview are enabled for CPY001
preview = true
target-version = "py38"
target-version = "py310"
src = ["src", "."]
line-length = 99

[tool.ruff.lint]
explicit-preview-rules = true
select = ["A", "E", "W", "F", "C", "N", "D", "I001", "B", "CPY", "RUF", "S", "SIM", "UP", "TC"]
select = ["A", "E", "W", "F", "C", "N", "D", "I001", "B", "CPY001", "RUF", "S", "SIM", "UP", "TC"]
extend-ignore = [
"D203",
"D204",
Expand Down
2 changes: 1 addition & 1 deletion scripts/rotate_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def main():
"""Main loop that calls logrotate."""
while True:
# Command is hardcoded
subprocess.run(["/usr/sbin/logrotate", "-f", "/etc/logrotate.d/pgbackrest.logrotate"]) # noqa: S603
subprocess.run(["/usr/sbin/logrotate", "-f", "/etc/logrotate.d/pgbackrest.logrotate"])

# Wait 60 seconds before executing logrotate again.
time.sleep(60)
Expand Down
2 changes: 1 addition & 1 deletion src/backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def _create_bucket_if_not_exists(self) -> None:
def _empty_data_files(self) -> None:
"""Empty the PostgreSQL data directory in preparation of backup restore."""
try:
self.container.exec("rm -r /var/lib/postgresql/data/pgdata".split()).wait_output()
self.container.exec(["rm", "-r", "/var/lib/postgresql/data/pgdata"]).wait_output()
except ExecError as e:
# If previous PITR restore was unsuccessful, there is no such directory.
if "No such file or directory" not in e.stderr:
Expand Down
2 changes: 1 addition & 1 deletion src/relations/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""Postgres db and db-admin relation hooks & helpers."""

import logging
from typing import Iterable
from collections.abc import Iterable

from charms.postgresql_k8s.v0.postgresql import (
ACCESS_GROUP_RELATION,
Expand Down
22 changes: 13 additions & 9 deletions tests/integration/ha_tests/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,9 +352,10 @@ async def count_writes(
member_name = f"{member['model']}.{member['name']}"
connection = None
try:
with psycopg2.connect(
connection_string
) as connection, connection.cursor() as cursor:
with (
psycopg2.connect(connection_string) as connection,
connection.cursor() as cursor,
):
cursor.execute("SELECT COUNT(number), MAX(number) FROM continuous_writes;")
results = cursor.fetchone()
count[member_name] = results[0]
Expand Down Expand Up @@ -589,9 +590,10 @@ async def is_connection_possible(ops_test: OpsTest, unit_name: str) -> bool:
)
address = await asyncio.wait_for(get_unit_address(ops_test, unit_name), 15)

with db_connect(
host=address, password=password
) as connection, connection.cursor() as cursor:
with (
db_connect(host=address, password=password) as connection,
connection.cursor() as cursor,
):
cursor.execute("SELECT 1;")
success = cursor.fetchone()[0] == 1
connection.close()
Expand Down Expand Up @@ -791,9 +793,11 @@ async def is_secondary_up_to_date(ops_test: OpsTest, unit_name: str, expected_wr

try:
for attempt in Retrying(stop=stop_after_delay(60 * 3), wait=wait_fixed(3)):
with attempt, psycopg2.connect(
connection_string
) as connection, connection.cursor() as cursor:
with (
attempt,
psycopg2.connect(connection_string) as connection,
connection.cursor() as cursor,
):
cursor.execute("SELECT COUNT(number), MAX(number) FROM continuous_writes;")
results = cursor.fetchone()
if results[0] != expected_writes or results[1] != expected_writes:
Expand Down
7 changes: 4 additions & 3 deletions tests/integration/ha_tests/test_replication.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,10 @@ async def test_no_data_replicated_between_clusters(
for unit in ops_test.model.applications[new_cluster_app].units:
address = await get_unit_address(ops_test, unit.name)
try:
with db_connect(
host=address, password=password
) as connection, connection.cursor() as cursor:
with (
db_connect(host=address, password=password) as connection,
connection.cursor() as cursor,
):
cursor.execute(
"SELECT EXISTS (SELECT FROM information_schema.tables"
" WHERE table_schema = 'public' AND table_name = 'continuous_writes');"
Expand Down
11 changes: 7 additions & 4 deletions tests/integration/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,10 +343,13 @@ async def execute_query_on_unit(
The result of the query.
"""
extra_connection_parameters = f" sslmode={sslmode}" if sslmode is not None else ""
with psycopg2.connect(
f"dbname='{database}' user='operator' host='{unit_address}'"
f"password='{password}' connect_timeout=10{extra_connection_parameters}"
) as connection, connection.cursor() as cursor:
with (
psycopg2.connect(
f"dbname='{database}' user='operator' host='{unit_address}'"
f"password='{password}' connect_timeout=10{extra_connection_parameters}"
) as connection,
connection.cursor() as cursor,
):
cursor.execute(query)
output = list(itertools.chain(*cursor.fetchall()))
return output
Expand Down
7 changes: 4 additions & 3 deletions tests/integration/new_relations/test_relations_coherence.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,10 @@ async def test_relations(ops_test: OpsTest, charm):
connection = None
should_fail = database == DATABASE_DEFAULT_NAME
try:
with psycopg2.connect(
connection_string
) as connection, connection.cursor() as cursor:
with (
psycopg2.connect(connection_string) as connection,
connection.cursor() as cursor,
):
cursor.execute("SELECT data FROM test;")
data = cursor.fetchone()
assert data[0] == "some data"
Expand Down
7 changes: 4 additions & 3 deletions tests/integration/test_backups_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,10 @@ async def test_backup_aws(ops_test: OpsTest, charm, aws_cloud_configs: tuple[dic
stop=stop_after_attempt(10), wait=wait_exponential(multiplier=1, min=2, max=30)
):
with attempt:
with db_connect(
host=address, password=password
) as connection, connection.cursor() as cursor:
with (
db_connect(host=address, password=password) as connection,
connection.cursor() as cursor,
):
cursor.execute(
"SELECT EXISTS (SELECT FROM information_schema.tables"
" WHERE table_schema = 'public' AND table_name = 'backup_table_1');"
Expand Down
18 changes: 12 additions & 6 deletions tests/integration/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,12 @@ async def test_settings_are_correct(ops_test: OpsTest, unit_id: int):
# Connect to PostgreSQL.
host = await get_unit_address(ops_test, f"{APP_NAME}/{unit_id}")
logger.info("connecting to the database host: %s", host)
with psycopg2.connect(
f"dbname='postgres' user='operator' host='{host}' password='{password}' connect_timeout=1"
) as connection, connection.cursor() as cursor:
with (
psycopg2.connect(
f"dbname='postgres' user='operator' host='{host}' password='{password}' connect_timeout=1"
) as connection,
connection.cursor() as cursor,
):
assert connection.status == psycopg2.extensions.STATUS_READY

# Retrieve settings from PostgreSQL pg_settings table.
Expand Down Expand Up @@ -180,9 +183,12 @@ async def test_postgresql_parameters_change(ops_test: OpsTest) -> None:
host = await get_unit_address(ops_test, f"{APP_NAME}/{unit_id}")
logger.info("connecting to the database host: %s", host)
try:
with psycopg2.connect(
f"dbname='postgres' user='operator' host='{host}' password='{password}' connect_timeout=1"
) as connection, connection.cursor() as cursor:
with (
psycopg2.connect(
f"dbname='postgres' user='operator' host='{host}' password='{password}' connect_timeout=1"
) as connection,
connection.cursor() as cursor,
):
settings_names = [
"max_prepared_transactions",
"shared_buffers",
Expand Down
3 changes: 1 addition & 2 deletions tests/integration/test_wrong_arch.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import logging
import os
import pathlib
import typing

from pytest_operator.plugin import OpsTest

Expand All @@ -16,7 +15,7 @@


async def fetch_charm(
charm_path: typing.Union[str, os.PathLike],
charm_path: str | os.PathLike,
architecture: str,
) -> pathlib.Path:
"""Fetches packed charm from CI runner without checking for architecture."""
Expand Down
12 changes: 6 additions & 6 deletions tests/unit/test_backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ def test_create_bucket_if_not_exists(harness, tls_ca_chain_filename):
def test_empty_data_files(harness):
with patch("ops.model.Container.exec") as _exec:
# Test when the removal of the data files fails.
command = "rm -r /var/lib/postgresql/data/pgdata".split()
command = ["rm", "-r", "/var/lib/postgresql/data/pgdata"]
_exec.side_effect = ExecError(command=command, exit_code=1, stdout="", stderr="fake error")
try:
harness.charm.backup._empty_data_files()
Expand Down Expand Up @@ -455,7 +455,7 @@ def test_change_connectivity_to_database(harness):
def test_execute_command(harness):
with patch("ops.model.Container.exec") as _exec:
# Test when the command fails.
command = "rm -r /var/lib/postgresql/data/pgdata".split()
command = ["rm", "-r", "/var/lib/postgresql/data/pgdata"]
_exec.side_effect = ChangeError(
err="fake error",
change=Change(
Expand Down Expand Up @@ -951,7 +951,7 @@ def test_is_primary_pgbackrest_service_running(harness):
# Test when the pgBackRest fails to contact the primary server.
_get_primary.return_value = f"{harness.charm.app.name}/1"
_execute_command.side_effect = ExecError(
command="fake command".split(), exit_code=1, stdout="", stderr="fake error"
command=["fake", "command"], exit_code=1, stdout="", stderr="fake error"
)
assert harness.charm.backup._is_primary_pgbackrest_service_running is False
_execute_command.assert_called_once()
Expand Down Expand Up @@ -1265,7 +1265,7 @@ def test_on_create_backup_action(harness):
_upload_content_to_s3.return_value = True
_is_primary.return_value = True
_execute_command.side_effect = ExecError(
command="fake command".split(), exit_code=1, stdout="", stderr="fake error"
command=["fake", "command"], exit_code=1, stdout="", stderr="fake error"
)
harness.charm.backup._on_create_backup_action(mock_event)
update_config_calls = [
Expand Down Expand Up @@ -1396,7 +1396,7 @@ def test_on_list_backups_action(harness):
mock_event.reset_mock()
_are_backup_settings_ok.return_value = (True, None)
_generate_backup_list_output.side_effect = ExecError(
command="fake command".split(), exit_code=1, stdout="", stderr="fake error"
command=["fake", "command"], exit_code=1, stdout="", stderr="fake error"
)
harness.charm.backup._on_list_backups_action(mock_event)
_generate_backup_list_output.assert_called_once()
Expand Down Expand Up @@ -1551,7 +1551,7 @@ def test_on_restore_action(harness):
_restart_database.reset_mock()
_delete.side_effect = None
_empty_data_files.side_effect = ExecError(
command="fake command".split(), exit_code=1, stdout="", stderr="fake error"
command=["fake", "command"], exit_code=1, stdout="", stderr="fake error"
)
harness.charm.backup._on_restore_action(mock_event)
_empty_data_files.assert_called_once()
Expand Down
7 changes: 4 additions & 3 deletions tests/unit/test_rotate_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@


def test_main():
with patch("subprocess.run") as _run, patch(
"time.sleep", side_effect=[None, InterruptedError]
) as _sleep:
with (
patch("subprocess.run") as _run,
patch("time.sleep", side_effect=[None, InterruptedError]) as _sleep,
):
with contextlib.suppress(InterruptedError):
main()
assert _run.call_count == 2
Expand Down