From 21205d9009f32a32c711151fd34711e9deb294d2 Mon Sep 17 00:00:00 2001 From: Adam Dyess Date: Fri, 20 Sep 2024 11:16:33 -0500 Subject: [PATCH 1/2] Chore: Fix pylint issues with new version of pylint --- .../k8s/lib/charms/k8s/v0/k8sd_api_manager.py | 10 +- charms/worker/k8s/requirements.txt | 2 +- .../worker/k8s/scripts/update_alert_rules.py | 23 ++--- .../worker/k8s/scripts/update_dashboards.py | 8 +- charms/worker/k8s/src/charm.py | 21 ++-- charms/worker/k8s/src/containerd.py | 7 +- charms/worker/k8s/src/snap.py | 7 +- charms/worker/k8s/src/token_distributor.py | 98 ++++++++++++++----- .../worker/k8s/tests/unit/test_containerd.py | 2 +- .../k8s/tests/unit/test_k8sd_api_manager.py | 24 ++--- .../worker/k8s/tests/unit/test_reschedule.py | 22 ++--- charms/worker/k8s/tests/unit/test_snap.py | 2 +- pyproject.toml | 2 +- tox.ini | 9 +- 14 files changed, 143 insertions(+), 94 deletions(-) diff --git a/charms/worker/k8s/lib/charms/k8s/v0/k8sd_api_manager.py b/charms/worker/k8s/lib/charms/k8s/v0/k8sd_api_manager.py index afa79e1d..b3c43cf8 100644 --- a/charms/worker/k8s/lib/charms/k8s/v0/k8sd_api_manager.py +++ b/charms/worker/k8s/lib/charms/k8s/v0/k8sd_api_manager.py @@ -54,12 +54,12 @@ class ErrorCodes(enum.Enum): """Enumerate the response codes from the k8s api. Attributes: - StatusNodeUnavailable: returned when the node isn't in the cluster - StatusNodeInUse: returned when the node is in the cluster already + STATUS_NODE_UNAVAILABLE: returned when the node isn't in the cluster + STATUS_NODE_IN_USE: returned when the node is in the cluster already """ - StatusNodeUnavailable = 520 - StatusNodeInUse = 521 + STATUS_NODE_UNAVAILABLE = 520 + STATUS_NODE_IN_USE = 521 class K8sdAPIManagerError(Exception): @@ -320,7 +320,7 @@ class UserFacingClusterConfig(BaseModel): cloud_provider: Optional[str] = Field(None, alias="cloud-provider") -class UserFacingDatastoreConfig(BaseModel, allow_population_by_field_name=True): # type: ignore[call-arg] +class UserFacingDatastoreConfig(BaseModel, allow_population_by_field_name=True): """Aggregated configuration model for the user-facing datastore aspects of a cluster. Attributes: diff --git a/charms/worker/k8s/requirements.txt b/charms/worker/k8s/requirements.txt index 606c5d24..cd25f846 100644 --- a/charms/worker/k8s/requirements.txt +++ b/charms/worker/k8s/requirements.txt @@ -3,7 +3,7 @@ charm-lib-interface-external-cloud-provider @ git+https://github.com/charmed-kub charm-lib-node-base @ git+https://github.com/charmed-kubernetes/layer-kubernetes-node-base@9b212854e768f13c26cc907bed51444e97e51b50#subdirectory=ops charm-lib-reconciler @ git+https://github.com/charmed-kubernetes/charm-lib-reconciler@f818cc30d1a22be43ffdfecf7fbd9c3fd2967502 cosl==0.0.26 -ops==2.16.0 +ops==2.16.1 pydantic==1.10.18 PyYAML==6.0.2 tomli == 2.0.1 diff --git a/charms/worker/k8s/scripts/update_alert_rules.py b/charms/worker/k8s/scripts/update_alert_rules.py index bf00ae55..36fb77a8 100644 --- a/charms/worker/k8s/scripts/update_alert_rules.py +++ b/charms/worker/k8s/scripts/update_alert_rules.py @@ -52,11 +52,11 @@ def download_and_process_rule_files(temp_dir: Path): source_url = f"{SOURCE}/{file}" temp_file = temp_dir / file try: - logging.info(f"Downloading {source_url}") + logging.info("Downloading %s", source_url) with urlopen(source_url) as response: # nosec process_rule_file(response, temp_file, source_url) except URLError as e: - logging.error(f"Error fetching dashboard data: {e}") + logging.error("Error fetching dashboard data: %s", e) def process_rule_file(contents, destination_file: Path, source_url: str): @@ -88,7 +88,7 @@ def process_rule_file(contents, destination_file: Path, source_url: str): with destination_file.open("w") as file: file.write("\n".join(data)) - logging.info(f"Processed and saved to {destination_file}") + logging.info("Processed and saved to %s", destination_file) def move_processed_files(temp_dir): @@ -100,13 +100,13 @@ def move_processed_files(temp_dir): for temp_file in temp_dir.iterdir(): final_path = ALERT_RULES_DIR / temp_file.name shutil.move(str(temp_file), str(final_path)) - logging.info(f"Moved {temp_file.name} to {final_path}") + logging.info("Moved %s to %s", temp_file.name, final_path) def apply_patches(): """Apply patches to the downloaded and processed rule files.""" for patch_file in PATCHES_DIR.glob("*"): - logging.info(f"Applying patch {patch_file}") + logging.info("Applying patch %s", patch_file) subprocess.check_call(["/usr/bin/git", "apply", str(patch_file)]) @@ -114,14 +114,11 @@ def main(): """Fetch, process, and save AlertManager rules.""" with TemporaryDirectory() as temp_dir: temp_path = Path(temp_dir) - try: - download_and_process_rule_files(temp_path) - shutil.rmtree(ALERT_RULES_DIR, ignore_errors=True) - ALERT_RULES_DIR.mkdir(parents=True) - move_processed_files(temp_path) - apply_patches() - except Exception as e: - logging.error("An error occurred: %s" % e) + download_and_process_rule_files(temp_path) + shutil.rmtree(ALERT_RULES_DIR, ignore_errors=True) + ALERT_RULES_DIR.mkdir(parents=True) + move_processed_files(temp_path) + apply_patches() if __name__ == "__main__": diff --git a/charms/worker/k8s/scripts/update_dashboards.py b/charms/worker/k8s/scripts/update_dashboards.py index 337c4811..81e04eec 100644 --- a/charms/worker/k8s/scripts/update_dashboards.py +++ b/charms/worker/k8s/scripts/update_dashboards.py @@ -60,7 +60,7 @@ def fetch_dashboards(source_url: str): with urlopen(source_url) as response: # nosec return yaml.safe_load(response.read()) except URLError as e: - logging.error(f"Error fetching dashboard data: {e}") + logging.error("Error fetching dashboard data: %s", e) return None @@ -101,7 +101,7 @@ def prepare_dashboard(json_value): return json.dumps(json_value, indent=4).replace("$datasource", "$prometheusds") -def save_dashboard_to_file(name, data): +def save_dashboard_to_file(name, data: str): """Save the prepared dashboard JSON to a file. Args: @@ -109,9 +109,9 @@ def save_dashboard_to_file(name, data): data (str): file content to write """ filepath = os.path.join(TARGET_DIR, name) - with open(filepath, "w") as f: + with open(filepath, "w", encoding="utf-8") as f: f.write(data) - logging.info(f"Dashboard '{name}' saved to {filepath}") + logging.info("Dashboard '%s' saved to %s", name, filepath) def main(): diff --git a/charms/worker/k8s/src/charm.py b/charms/worker/k8s/src/charm.py index 51d078f7..05e0d389 100755 --- a/charms/worker/k8s/src/charm.py +++ b/charms/worker/k8s/src/charm.py @@ -95,6 +95,7 @@ def _cluster_departing_unit(event: ops.EventBase) -> Union[Literal[False], ops.U isinstance(event, ops.RelationDepartedEvent) and event.relation.name in ["k8s-cluster", "cluster"] and event.departing_unit + or False ) @@ -131,7 +132,7 @@ def __init__(self, *args): self.labeller = LabelMaker( self, kubeconfig_path=self._internal_kubeconfig, kubectl=KUBECTL_PATH ) - self._stored.set_default(is_dying=False, cluster_name="") + self._stored.set_default(is_dying=False, cluster_name=str()) self.cos_agent = COSAgentProvider( self, @@ -206,8 +207,7 @@ def get_cluster_name(self) -> str: Returns: the cluster name. """ - unit, node = self.unit.name, self.get_node_name() - if not self._stored.cluster_name: + if self._stored.cluster_name == "": if self.lead_control_plane and self.api_manager.is_cluster_bootstrapped(): # TODO: replace with API call once available from the snap self._stored.cluster_name = str(uuid.uuid4()) @@ -221,8 +221,9 @@ def get_cluster_name(self) -> str: ): self._stored.cluster_name = self.collector.cluster_name(relation, True) + unit, node = self.unit.name, self.get_node_name() log.info("%s(%s) current cluster-name=%s", unit, node, self._stored.cluster_name) - return self._stored.cluster_name + return str(self._stored.cluster_name) def get_node_name(self) -> str: """Return the lowercase hostname. @@ -282,8 +283,8 @@ def _bootstrap_k8s_snap(self): bootstrap_config = BootstrapConfig() self._configure_datastore(bootstrap_config) self._configure_cloud_provider(bootstrap_config) - bootstrap_config.service_cidr = self.config["service-cidr"] - bootstrap_config.control_plane_taints = self.config["register-with-taints"].split() + bootstrap_config.service_cidr = str(self.config["service-cidr"]) + bootstrap_config.control_plane_taints = str(self.config["register-with-taints"]).split() bootstrap_config.extra_sans = [_get_public_address()] status.add(ops.MaintenanceStatus("Bootstrapping Cluster")) @@ -309,7 +310,7 @@ def _config_containerd_registries(self): registries, config = [], "" containerd_relation = self.model.get_relation("containerd") if self.is_control_plane: - config = self.config["containerd_custom_registries"] + config = str(self.config["containerd_custom_registries"]) registries = containerd.parse_registries(config) else: registries = containerd.recover(containerd_relation) @@ -399,7 +400,7 @@ def _revoke_cluster_tokens(self, event: ops.EventBase): """ log.info("Garbage collect cluster tokens") to_remove = None - if self._stored.is_dying: + if self._stored.is_dying is True: to_remove = self.unit elif unit := _cluster_departing_unit(event): to_remove = unit @@ -650,7 +651,7 @@ def _evaluate_removal(self, event: ops.EventBase) -> bool: Returns: True if being removed, otherwise False """ - if self._stored.is_dying: + if self._stored.is_dying is True: pass elif (unit := _cluster_departing_unit(event)) and unit == self.unit: # Juju says I am being removed @@ -677,7 +678,7 @@ def _evaluate_removal(self, event: ops.EventBase) -> bool: elif isinstance(event, (ops.RemoveEvent, ops.StopEvent)): # If I myself am dying, its me! self._stored.is_dying = True - return self._stored.is_dying + return bool(self._stored.is_dying) def _is_node_present(self, node: str = "") -> bool: """Determine if node is in the kubernetes cluster. diff --git a/charms/worker/k8s/src/containerd.py b/charms/worker/k8s/src/containerd.py index ee49c155..2f2fbb7c 100644 --- a/charms/worker/k8s/src/containerd.py +++ b/charms/worker/k8s/src/containerd.py @@ -112,7 +112,7 @@ def __init__(self, *args, **kwargs): args: construction positional arguments kwargs: construction keyword arguments """ - super(Registry, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if not self.host and (host := self.url.host): self.host = host @@ -177,11 +177,10 @@ def auth_config_header(self) -> Dict[str, Any]: log.debug("Configure basic auth for %s (%s)", self.url, self.host) v = self.username.get_secret_value() + ":" + self.password.get_secret_value() return {"Authorization": "Basic " + base64.b64encode(v.encode()).decode()} - elif self.identitytoken: + if self.identitytoken: log.debug("Configure bearer token for %s (%s)", self.url, self.host) return {"Authorization": "Bearer " + self.identitytoken.get_secret_value()} - else: - return {} + return {} @property def hosts_toml(self) -> Dict[str, Any]: diff --git a/charms/worker/k8s/src/snap.py b/charms/worker/k8s/src/snap.py index cfadbc26..575a9915 100644 --- a/charms/worker/k8s/src/snap.py +++ b/charms/worker/k8s/src/snap.py @@ -104,8 +104,7 @@ def _parse_management_arguments() -> List[SnapArgument]: if not revision.exists(): raise snap_lib.SnapError(f"Failed to find file={revision}") try: - with revision.open() as f: - body = yaml.safe_load(f) + body = yaml.safe_load(revision.read_text(encoding="utf-8")) except yaml.YAMLError as e: log.error("Failed to load file=%s, %s", revision, e) raise snap_lib.SnapError(f"Failed to load file={revision}") @@ -117,7 +116,9 @@ def _parse_management_arguments() -> List[SnapArgument]: raise snap_lib.SnapError(f"Failed to find revision for arch={arch}") try: - args: List[SnapArgument] = [parse_obj_as(SnapArgument, arg) for arg in arch_spec] # type: ignore[arg-type] + args: List[SnapArgument] = [ + parse_obj_as(SnapArgument, arg) for arg in arch_spec # type: ignore[arg-type] + ] except ValidationError as e: log.warning("Failed to validate args=%s (%s)", arch_spec, e) raise snap_lib.SnapError("Failed to validate snap args") diff --git a/charms/worker/k8s/src/token_distributor.py b/charms/worker/k8s/src/token_distributor.py index 538bfd57..f3cf8ed9 100644 --- a/charms/worker/k8s/src/token_distributor.py +++ b/charms/worker/k8s/src/token_distributor.py @@ -6,7 +6,7 @@ import contextlib import logging from enum import Enum, auto -from typing import Dict, Optional +from typing import Dict, Optional, Protocol import charms.contextual_status as status import ops @@ -23,6 +23,24 @@ SECRET_ID = "{0}-secret-id" # nosec +class K8sCharm(Protocol): + """Typing for the K8sCharm. + + Attributes: + app (ops.Application): The application object. + model (ops.Model): The model object. + unit (ops.Unit): The unit object. + """ + + app: ops.Application + model: ops.Model + unit: ops.Unit + + def get_cluster_name(self) -> str: + """Get the cluster name.""" + ... # pylint: disable=unnecessary-ellipsis + + class TokenStrategy(Enum): """Enumeration defining strategy for token creation. @@ -49,11 +67,10 @@ class ClusterTokenType(Enum): NONE = "" -class TokenManager: +class TokenManager(Protocol): """Protocol for managing tokens. Attributes: - api_manager (K8sdAPIManager): An K8sdAPIManager object for interacting with k8sd API. allocator_needs_tokens: Whether or not the allocator needs tokens. strategy: The strategy for token creation. revoke_on_join: Whether or not to revoke a token once it's joined. @@ -69,7 +86,6 @@ def __init__(self, api_manager: K8sdAPIManager): Args: api_manager (K8sdAPIManager): An K8sdAPIManager object for interacting with k8sd API. """ - self.api_manager = api_manager def create(self, name: str, token_type: ClusterTokenType) -> SecretStr: """Create a token. @@ -77,11 +93,8 @@ def create(self, name: str, token_type: ClusterTokenType) -> SecretStr: Args: name (str): The name of the node. token_type (ClusterTokenType): The type of cluster token. - - Returns: - SecretStr: The created token. """ - return SecretStr("") + ... # pylint: disable=unnecessary-ellipsis def revoke(self, name: str, ignore_errors: bool): """Remove a token. @@ -90,10 +103,10 @@ def revoke(self, name: str, ignore_errors: bool): name (str): The name of the node. ignore_errors (bool): Whether or not errors can be ignored """ - ... + ... # pylint: disable=unnecessary-ellipsis -class ClusterTokenManager(TokenManager): +class ClusterTokenManager: """Class for managing cluster tokens. Attributes: @@ -106,6 +119,14 @@ class ClusterTokenManager(TokenManager): strategy: TokenStrategy = TokenStrategy.CLUSTER revoke_on_join = True + def __init__(self, api_manager: K8sdAPIManager): + """Initialize a ClusterTokenManager instance. + + Args: + api_manager (K8sdAPIManager): An K8sdAPIManager object for interacting with k8sd API. + """ + self.api_manager = api_manager + def create(self, name: str, token_type: ClusterTokenType) -> SecretStr: """Create a cluster token. @@ -132,7 +153,7 @@ def revoke(self, name: str, ignore_errors: bool): try: self.api_manager.remove_node(name) except (K8sdConnectionError, InvalidResponseError) as e: - if ignore_errors or e.code == ErrorCodes.StatusNodeUnavailable: + if ignore_errors or e.code == ErrorCodes.STATUS_NODE_UNAVAILABLE: # Let's just ignore some of these expected errors: # "Remote end closed connection without response" # "Failed to check if node is control-plane" @@ -142,7 +163,7 @@ def revoke(self, name: str, ignore_errors: bool): raise -class CosTokenManager(TokenManager): +class CosTokenManager: """Class for managing COS tokens. Attributes: @@ -155,27 +176,42 @@ class CosTokenManager(TokenManager): strategy: TokenStrategy = TokenStrategy.COS revoke_on_join = False - def create(self, name: str, _) -> SecretStr: + def __init__(self, api_manager: K8sdAPIManager): + """Initialize a CosTokenManager instance. + + Args: + api_manager (K8sdAPIManager): An K8sdAPIManager object for interacting with k8sd API. + """ + self.api_manager = api_manager + + def create(self, name: str, token_type: ClusterTokenType) -> SecretStr: """Create a COS token. Args: name (str): The name of the node. + token_type (ClusterTokenType): The type of cluster token (ignored) Returns: SecretStr: The created COS token. """ + # pylint: disable=unused-argument return self.api_manager.request_auth_token( username=f"system:cos:{name}", groups=["system:cos"] ) - def revoke(self, _: str, __): - """Remove a COS token intentionally left unimplemented.""" + def revoke(self, name: str, ignore_errors: bool): + """Remove a COS token intentionally left unimplemented. + + Args: + name (str): The name of the node. + ignore_errors (bool): Whether or not errors can be ignored + """ class TokenCollector: """Helper class for collecting tokens for units in a relation.""" - def __init__(self, charm: ops.CharmBase, node_name: str): + def __init__(self, charm: K8sCharm, node_name: str): """Initialize a TokenCollector instance. Args: @@ -207,7 +243,7 @@ def cluster_name(self, relation: ops.Relation, local: bool) -> str: Returns: the recovered cluster name from existing relations """ - cluster_name = "" + cluster_name: Optional[str] = "" if not local: # recover_cluster_name values = set() @@ -260,7 +296,7 @@ def recover_token(self, relation: ops.Relation): class TokenDistributor: """Helper class for distributing tokens to units in a relation.""" - def __init__(self, charm: ops.CharmBase, node_name: str, api_manager: K8sdAPIManager): + def __init__(self, charm: "K8sCharm", node_name: str, api_manager: K8sdAPIManager): """Initialize a TokenDistributor instance. Args: @@ -479,19 +515,15 @@ def revoke_tokens( ",".join(sorted(u.name for u in remaining)), ) - tokenizer = self.token_strategies.get(token_strategy) - assert tokenizer, f"Invalid token_strategy: {token_strategy}" # nosec - status.add( ops.MaintenanceStatus( f"Revoking {token_type.name.title()} {token_strategy.name.title()} tokens" ) ) - local_cluster = self.charm.get_cluster_name() + for unit in remove: if node_state := app_databag.get(unit): state, node = node_state.split("-", 1) - remote_cluster = (data := relation.data.get(unit)) and data.get("joined") log.info( "Revoking %s, token for %s unit=%s:%s %s node=%s", token_strategy.name.title(), @@ -503,7 +535,23 @@ def revoke_tokens( ) ignore_errors = self.node_name == node # removing myself ignore_errors |= state == "pending" # on pending tokens - ignore_errors |= local_cluster != remote_cluster # if cluster doesn't match - tokenizer.revoke(node, ignore_errors) + # if cluster doesn't match + ignore_errors |= self.charm.get_cluster_name() != joined_cluster(relation, unit) + self.token_strategies[token_strategy].revoke(node, ignore_errors) self.drop_node(relation, unit) self._revoke_juju_secret(relation, unit) + + +def joined_cluster(relation: ops.Relation, unit: ops.Unit) -> Optional[str]: + """Get the cluster name from this relation. + + Args: + relation (ops.Relation): The relation to check + unit (ops.Unit): The unit to check + + Returns: + the recovered cluster name from existing relations + """ + if data := relation.data.get(unit): + return data.get("joined") + return None diff --git a/charms/worker/k8s/tests/unit/test_containerd.py b/charms/worker/k8s/tests/unit/test_containerd.py index 6ec07306..07f126ed 100644 --- a/charms/worker/k8s/tests/unit/test_containerd.py +++ b/charms/worker/k8s/tests/unit/test_containerd.py @@ -4,8 +4,8 @@ # Learn more about testing at: https://juju.is/docs/sdk/testing """Unit tests containerd module.""" -import unittest.mock as mock from os import getgid, getuid +from unittest import mock import containerd import pytest diff --git a/charms/worker/k8s/tests/unit/test_k8sd_api_manager.py b/charms/worker/k8s/tests/unit/test_k8sd_api_manager.py index a37a3137..7c2c25f9 100644 --- a/charms/worker/k8s/tests/unit/test_k8sd_api_manager.py +++ b/charms/worker/k8s/tests/unit/test_k8sd_api_manager.py @@ -9,7 +9,7 @@ from socket import AF_UNIX, SOCK_STREAM from unittest.mock import MagicMock, call, patch -from lib.charms.k8s.v0.k8sd_api_manager import ( +from charms.k8s.v0.k8sd_api_manager import ( AuthTokenResponse, BaseRequestModel, BootstrapConfig, @@ -143,7 +143,7 @@ def setUp(self): self.mock_factory = MagicMock() self.api_manager = K8sdAPIManager(factory=self.mock_factory) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_check_k8sd_in_error(self, mock_send_request): """Test bootstrap.""" not_found = InvalidResponseError(code=404, msg="Not Found") @@ -160,7 +160,7 @@ def test_check_k8sd_in_error(self, mock_send_request): ) assert ie.exception.code == 504 - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_check_k8sd_not_found(self, mock_send_request): """Test bootstrap.""" not_found = InvalidResponseError(code=404, msg="Not Found") @@ -176,7 +176,7 @@ def test_check_k8sd_not_found(self, mock_send_request): ] ) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_check_k8sd_ready(self, mock_send_request): """Test bootstrap.""" not_found = InvalidResponseError(code=404, msg="Not Found") @@ -192,7 +192,7 @@ def test_check_k8sd_ready(self, mock_send_request): ] ) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_bootstrap_k8s_snap(self, mock_send_request): """Test bootstrap.""" mock_send_request.return_value = EmptyResponse(status_code=200, type="test", error_code=0) @@ -254,7 +254,7 @@ def test_create_join_token_success(self): headers={"Content-Type": "application/json"}, ) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_create_join_token(self, mock_send_request): """Test successful request for join token.""" mock_send_request.return_value = CreateJoinTokenResponse( @@ -269,7 +269,7 @@ def test_create_join_token(self, mock_send_request): {"name": "test-node", "worker": False}, ) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_create_join_token_worker(self, mock_send_request): """Test successful request for join token for a worker.""" mock_send_request.return_value = CreateJoinTokenResponse( @@ -284,7 +284,7 @@ def test_create_join_token_worker(self, mock_send_request): {"name": "test-node", "worker": True}, ) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_join_cluster_control_plane(self, mock_send_request): """Test successfully joining a cluster.""" mock_send_request.return_value = EmptyResponse(status_code=200, type="test", error_code=0) @@ -306,7 +306,7 @@ def test_join_cluster_control_plane(self, mock_send_request): }, ) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_join_cluster_worker(self, mock_send_request): """Test successfully joining a cluster.""" mock_send_request.return_value = EmptyResponse(status_code=200, type="test", error_code=0) @@ -322,7 +322,7 @@ def test_join_cluster_worker(self, mock_send_request): {"name": "test-node", "address": "127.0.0.1:6400", "token": "test-token"}, ) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_remove_node(self, mock_send_request): """Test successfully removing a node from the cluster.""" mock_send_request.return_value = EmptyResponse(status_code=200, type="test", error_code=0) @@ -332,7 +332,7 @@ def test_remove_node(self, mock_send_request): "/1.0/k8sd/cluster/remove", "POST", EmptyResponse, {"name": "test-node", "force": True} ) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_update_cluster_config(self, mock_send_request): """Test successfully updating cluster config.""" mock_send_request.return_value = EmptyResponse(status_code=200, type="test", error_code=0) @@ -364,7 +364,7 @@ def test_update_cluster_config(self, mock_send_request): }, ) - @patch("lib.charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") + @patch("charms.k8s.v0.k8sd_api_manager.K8sdAPIManager._send_request") def test_request_auth_token(self, mock_send_request): """Test successfully requesting auth-token.""" test_token = "foo:mytoken" diff --git a/charms/worker/k8s/tests/unit/test_reschedule.py b/charms/worker/k8s/tests/unit/test_reschedule.py index 412d5c51..752e1604 100644 --- a/charms/worker/k8s/tests/unit/test_reschedule.py +++ b/charms/worker/k8s/tests/unit/test_reschedule.py @@ -6,8 +6,8 @@ """Unit tests reschedule module.""" import subprocess -import unittest.mock as mock from pathlib import Path +from unittest import mock import ops import pytest @@ -45,21 +45,21 @@ def test_event_timer_properties(harness): @mock.patch("reschedule._execute_command") -def test_event_timer_is_active(exec, harness): +def test_event_timer_is_active(_exec, harness): """Test Event Timer is_active.""" - exec.return_value = 0 + _exec.return_value = 0 et = reschedule.EventTimer(harness.charm.unit) assert et.is_active("update-status") - exec.return_value = -1 + _exec.return_value = -1 et = reschedule.EventTimer(harness.charm.unit) assert not et.is_active("update-status") - exec.side_effect = subprocess.CalledProcessError(-1, []) + _exec.side_effect = subprocess.CalledProcessError(-1, []) et = reschedule.EventTimer(harness.charm.unit) with pytest.raises(reschedule.TimerStatusError): - not et.is_active("update-status") + assert not et.is_active("update-status") @mock.patch("reschedule.Path.write_text") @@ -79,9 +79,9 @@ def test_render_event_template(write_text, harness): @mock.patch("reschedule._execute_command") -def test_event_timer_ensure(exec, harness): +def test_event_timer_ensure(_exec, harness): """Test ensure on event timer.""" - exec.return_value = ("", 0) + _exec.return_value = ("", 0) et = reschedule.EventTimer(harness.charm.unit) with mock.patch.object(et, "_render_event_template") as rendered: @@ -104,9 +104,9 @@ def test_event_timer_ensure(exec, harness): @mock.patch("reschedule._execute_command") -def test_event_timer_disable(exec, harness): +def test_event_timer_disable(_exec, harness): """Test disable on event timer.""" - exec.return_value = ("", 0) + _exec.return_value = ("", 0) et = reschedule.EventTimer(harness.charm.unit) et.disable("update-status") @@ -115,7 +115,7 @@ def test_event_timer_disable(exec, harness): mock.call([sysctl, "stop", "k8s.update-status.timer"], check_exit=False), mock.call([sysctl, "disable", "k8s.update-status.timer"], check_exit=False), ] - exec.assert_has_calls(calls) + _exec.assert_has_calls(calls) @mock.patch("reschedule.EventTimer.ensure") diff --git a/charms/worker/k8s/tests/unit/test_snap.py b/charms/worker/k8s/tests/unit/test_snap.py index a3bbc472..19c1e4eb 100644 --- a/charms/worker/k8s/tests/unit/test_snap.py +++ b/charms/worker/k8s/tests/unit/test_snap.py @@ -8,8 +8,8 @@ import io import subprocess -import unittest.mock as mock from pathlib import Path +from unittest import mock import pytest import snap diff --git a/pyproject.toml b/pyproject.toml index 530a4a95..6be5fa48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ plugins = "pydantic.mypy" [tool.pylint] # Ignore too-few-public-methods due to pydantic models # Ignore no-self-argument due to pydantic validators -disable = "wrong-import-order,redefined-outer-name,too-many-instance-attributes,too-few-public-methods,no-self-argument,fixme,parse-error" +disable = "wrong-import-order,redefined-outer-name,too-many-instance-attributes,too-few-public-methods,no-self-argument,fixme,protected-access" # Ignore Pydantic check: https://github.com/pydantic/pydantic/issues/1961 extension-pkg-whitelist = "pydantic" # wokeignore:rule=whitelist diff --git a/tox.ini b/tox.ini index 0e1e1946..098fdd12 100644 --- a/tox.ini +++ b/tox.ini @@ -7,7 +7,8 @@ skip_missing_interpreters = True envlist = lint, unit, static, coverage-report [vars] -src_path = {toxinidir}/charms/worker/k8s/src {toxinidir}/charms/worker/k8s/lib/charms/k8s {toxinidir}/charms/worker/k8s/scripts +lib_path = {toxinidir}/charms/worker/k8s/lib +src_path = {toxinidir}/charms/worker/k8s/src {[vars]lib_path}/charms/k8s {toxinidir}/charms/worker/k8s/scripts tst_path = {toxinidir}/tests/ {toxinidir}/charms/worker/k8s/tests/ all_path = {[vars]tst_path} {[vars]src_path} @@ -32,8 +33,9 @@ commands = [testenv:lint] -allowlist_externals = tox description = Check code against coding style standards +setenv = + PYTHONPATH = {envdir}{:}{[vars]lib_path} deps = black codespell @@ -50,7 +52,8 @@ deps = pyproject-flake8 types-PyYAML types-requests - -r test_requirements.txt + -r{toxinidir}/test_requirements.txt + -r{toxinidir}/charms/worker/requirements.txt commands = pydocstyle {[vars]src_path} codespell {toxinidir} --skip {toxinidir}/.git --skip {toxinidir}/.tox \ From 46534d057c6b444a491d16cfc4da74b79dc8d8df Mon Sep 17 00:00:00 2001 From: Adam Dyess Date: Fri, 20 Sep 2024 12:27:56 -0500 Subject: [PATCH 2/2] Complete remove the Base Class|Protocol `TokenManager` --- charms/worker/k8s/src/token_distributor.py | 45 ++-------------------- 1 file changed, 3 insertions(+), 42 deletions(-) diff --git a/charms/worker/k8s/src/token_distributor.py b/charms/worker/k8s/src/token_distributor.py index f3cf8ed9..170cf75b 100644 --- a/charms/worker/k8s/src/token_distributor.py +++ b/charms/worker/k8s/src/token_distributor.py @@ -6,7 +6,7 @@ import contextlib import logging from enum import Enum, auto -from typing import Dict, Optional, Protocol +from typing import Dict, Optional, Protocol, Union import charms.contextual_status as status import ops @@ -67,45 +67,6 @@ class ClusterTokenType(Enum): NONE = "" -class TokenManager(Protocol): - """Protocol for managing tokens. - - Attributes: - allocator_needs_tokens: Whether or not the allocator needs tokens. - strategy: The strategy for token creation. - revoke_on_join: Whether or not to revoke a token once it's joined. - """ - - allocator_needs_tokens: bool - strategy: TokenStrategy - revoke_on_join: bool - - def __init__(self, api_manager: K8sdAPIManager): - """Initialize a TokenManager instance. - - Args: - api_manager (K8sdAPIManager): An K8sdAPIManager object for interacting with k8sd API. - """ - - def create(self, name: str, token_type: ClusterTokenType) -> SecretStr: - """Create a token. - - Args: - name (str): The name of the node. - token_type (ClusterTokenType): The type of cluster token. - """ - ... # pylint: disable=unnecessary-ellipsis - - def revoke(self, name: str, ignore_errors: bool): - """Remove a token. - - Args: - name (str): The name of the node. - ignore_errors (bool): Whether or not errors can be ignored - """ - ... # pylint: disable=unnecessary-ellipsis - - class ClusterTokenManager: """Class for managing cluster tokens. @@ -296,7 +257,7 @@ def recover_token(self, relation: ops.Relation): class TokenDistributor: """Helper class for distributing tokens to units in a relation.""" - def __init__(self, charm: "K8sCharm", node_name: str, api_manager: K8sdAPIManager): + def __init__(self, charm: K8sCharm, node_name: str, api_manager: K8sdAPIManager): """Initialize a TokenDistributor instance. Args: @@ -306,7 +267,7 @@ def __init__(self, charm: "K8sCharm", node_name: str, api_manager: K8sdAPIManage """ self.charm = charm self.node_name = node_name - self.token_strategies: Dict[TokenStrategy, TokenManager] = { + self.token_strategies: Dict[TokenStrategy, Union[ClusterTokenManager, CosTokenManager]] = { TokenStrategy.CLUSTER: ClusterTokenManager(api_manager), TokenStrategy.COS: CosTokenManager(api_manager), }