diff --git a/crd-template.yaml b/crd-template.yaml index b83d4f4..d34815b 100644 --- a/crd-template.yaml +++ b/crd-template.yaml @@ -30,7 +30,10 @@ spec: description: PhaseSecretSpec defines the desired state of PhaseSecret properties: phaseApp: - description: The Phase application to fetch secrets from. + description: The Phase application name to fetch secrets from. + type: string + phaseAppId: + description: The Phase application ID to fetch secrets from. type: string phaseAppEnv: description: The environment variable representing the app environment in Phase. @@ -63,6 +66,27 @@ spec: secretNamespace: description: The namespace where the Kubernetes Secret is located. type: string + kubernetesAuth: + type: object + required: + - phaseServiceAccountId + - serviceAccountRef + properties: + phaseServiceAccountId: + description: The service account ID in Phase. + type: string + serviceAccountRef: + type: object + required: + - name + - namespace + properties: + name: + description: The name of the Kubernetes ServiceAccount. + type: string + namespace: + description: The namespace of the Kubernetes ServiceAccount. + type: string phaseHost: description: Phase host to pull secrets from. type: string @@ -127,7 +151,7 @@ spec: type: integer default: 60 required: - - phaseApp + - phaseAppId - managedSecretReferences - phaseHost status: diff --git a/deploy-operator.yml b/deploy-operator.yml index 7195aaa..6720ee7 100644 --- a/deploy-operator.yml +++ b/deploy-operator.yml @@ -12,6 +12,7 @@ spec: labels: app: phase-kubernetes-operator spec: + serviceAccountName: phase-kubernetes-operator securityContext: runAsNonRoot: true runAsUser: 1000 @@ -19,3 +20,10 @@ spec: - name: phase-kubernetes-operator image: phasehq/kubernetes-operator:latest imagePullPolicy: IfNotPresent + resources: + limits: + cpu: 100m + memory: 128Mi + requests: + cpu: 100m + memory: 128Mi \ No newline at end of file diff --git a/service-account.yaml b/service-account.yaml new file mode 100644 index 0000000..05518ab --- /dev/null +++ b/service-account.yaml @@ -0,0 +1,6 @@ +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: phase-kubernetes-operator + namespace: default \ No newline at end of file diff --git a/src/cmd/__init__.py b/src/internals/__init__.py similarity index 100% rename from src/cmd/__init__.py rename to src/internals/__init__.py diff --git a/src/cmd/secrets/__init__.py b/src/internals/secrets/__init__.py similarity index 100% rename from src/cmd/secrets/__init__.py rename to src/internals/secrets/__init__.py diff --git a/src/cmd/secrets/fetch.py b/src/internals/secrets/fetch.py similarity index 92% rename from src/cmd/secrets/fetch.py rename to src/internals/secrets/fetch.py index 392f1df..1269086 100644 --- a/src/cmd/secrets/fetch.py +++ b/src/internals/secrets/fetch.py @@ -7,7 +7,7 @@ logging.basicConfig(level=logging.ERROR) logger = logging.getLogger(__name__) -def phase_secrets_fetch(phase_service_token=None, phase_service_host=None, env_name=None, phase_app=None, keys=None, tags=None, path='/'): +def phase_secrets_fetch(phase_service_token=None, phase_service_host=None, env_name=None, phase_app=None, phase_app_id=None, keys=None, tags=None, path='/'): """ Fetch and return secrets based on the provided environment, keys, and tags. """ @@ -15,7 +15,7 @@ def phase_secrets_fetch(phase_service_token=None, phase_service_host=None, env_n phase = Phase(init=False, pss=phase_service_token, host=phase_service_host) try: - all_secrets = phase.get(env_name=env_name, app_name=phase_app, tag=tags, path=path) + all_secrets = phase.get(env_name=env_name, app_name=phase_app, app_id=phase_app_id, tag=tags, path=path) resolved_secrets = [] for secret in all_secrets: try: diff --git a/src/main.py b/src/main.py index 0374185..efbb07a 100644 --- a/src/main.py +++ b/src/main.py @@ -1,38 +1,129 @@ import kopf -import base64 -import datetime -import kubernetes.client +from kubernetes import client from kubernetes.client.rest import ApiException -from kubernetes.client import AppsV1Api -from kubernetes.client import CoreV1Api -from cmd.secrets.fetch import phase_secrets_fetch -from utils.const import REDEPLOY_ANNOTATION -from utils.misc import transform_name - - -@kopf.timer('secrets.phase.dev', 'v1alpha1', 'phasesecrets', interval=60) +import base64 +from utils.auth.kubernetes import get_service_account_jwt +from utils.network import authenticate_service_account +from internals.secrets.fetch import phase_secrets_fetch +from typing import Dict +from utils.const import PHASE_CLOUD_API_HOST +from utils.cache import get_cached_token, update_cached_token +from utils.secrets.types import process_secrets +from utils.secrets.write import create_secret +from utils.workload.deploy import redeploy_affected_deployments +import base64 +from kubernetes import client +from typing import Dict +import json +import logging + +logging.basicConfig(level=logging.WARNING) + +def get_phase_service_token(auth_config: Dict, phase_host: str, namespace: str, logger) -> str: + logger.debug(f"Entering get_phase_service_token. Auth config: {json.dumps(auth_config)}") + logger.debug(f"Phase host: {phase_host}, Namespace: {namespace}") + + if 'serviceToken' in auth_config: + logger.debug("Using serviceToken authentication method") + service_token_secret_name = auth_config['serviceToken']['serviceTokenSecretReference']['secretName'] + service_token_secret_namespace = auth_config['serviceToken']['serviceTokenSecretReference'].get('secretNamespace', namespace) + logger.debug(f"Secret name: {service_token_secret_name}, Secret namespace: {service_token_secret_namespace}") + + try: + api_instance = client.CoreV1Api() + api_response = api_instance.read_namespaced_secret(service_token_secret_name, service_token_secret_namespace) + logger.debug("Successfully read the service token secret") + token = base64.b64decode(api_response.data['token']).decode('utf-8') + logger.debug("Successfully decoded the service token") + return token + except Exception as e: + logger.error(f"Error reading or decoding service token: {str(e)}") + raise + + elif 'kubernetesAuth' in auth_config: + logger.debug("Using kubernetesAuth authentication method") + kubernetes_auth = auth_config['kubernetesAuth'] + service_account_id = kubernetes_auth['phaseServiceAccountId'] + logger.debug(f"Service account ID: {service_account_id}") + + cached_token = get_cached_token(service_account_id) + if cached_token: + logger.debug("Using cached token") + return cached_token['token'] + + logger.debug("No valid cached token found. Proceeding with authentication.") + + try: + jwt_token = get_service_account_jwt() + logger.debug("Successfully retrieved service account JWT") + except Exception as e: + logger.error(f"Error getting service account JWT: {str(e)}") + raise + + try: + auth_response = authenticate_service_account( + host=phase_host, + auth_token=jwt_token, + service_account_id=service_account_id, + auth_type="kubernetes" + ) + logger.debug(f"Received auth response: {json.dumps(auth_response)}") + except Exception as e: + logger.error(f"Error authenticating with Phase API: {str(e)}") + raise + + if not auth_response or 'token' not in auth_response: + logger.error(f"Invalid auth response: {json.dumps(auth_response)}") + raise Exception("Failed to authenticate with Phase API") + + try: + update_cached_token(service_account_id, { + 'token': auth_response['token'], + 'id': auth_response['id'], + 'expiry': auth_response['expiry'] + }) + logger.debug("Successfully cached the new token") + except Exception as e: + logger.error(f"Error caching token: {str(e)}") + # Continue even if caching fails + + logger.info(f"Refreshed Phase service token for service account {service_account_id}") + return auth_response['token'] + else: + logger.error(f"Invalid auth config: {json.dumps(auth_config)}") + raise Exception("No valid authentication method found in the spec") + +@kopf.timer('secrets.phase.dev', 'v1alpha1', 'phasesecrets', interval=10) def sync_secrets(spec, name, namespace, logger, **kwargs): try: - api_instance = CoreV1Api() + # Get Config + api_instance = client.CoreV1Api() managed_secret_references = spec.get('managedSecretReferences', []) - phase_host = spec.get('phaseHost', 'https://console.phase.dev') + phase_host = spec.get('phaseHost', PHASE_CLOUD_API_HOST) phase_app = spec.get('phaseApp') + phase_app_id = spec.get('phaseAppId') phase_app_env = spec.get('phaseAppEnv', 'production') phase_app_env_path = spec.get('phaseAppEnvPath', '/') phase_app_env_tag = spec.get('phaseAppEnvTag') - service_token_secret_name = spec.get('authentication', {}).get('serviceToken', {}).get('serviceTokenSecretReference', {}).get('secretName', 'phase-service-token') - - api_response = api_instance.read_namespaced_secret(service_token_secret_name, namespace) - service_token = base64.b64decode(api_response.data['token']).decode('utf-8') - phase_secrets_dict = phase_secrets_fetch( - phase_service_token=service_token, - phase_service_host=phase_host, - phase_app=phase_app, - env_name=phase_app_env, - tags=phase_app_env_tag, - path=phase_app_env_path - ) + # Get Phase service token + auth_config = spec.get('authentication', {}) + phase_service_token = get_phase_service_token(auth_config, phase_host, namespace, logger) + + # Fetch secrets from Phase + try: + phase_secrets_dict = phase_secrets_fetch( + phase_service_token=phase_service_token, + phase_service_host=phase_host, + phase_app=phase_app, + phase_app_id=phase_app_id, + env_name=phase_app_env, + tags=phase_app_env_tag, + path=phase_app_env_path + ) + except ValueError as ve: + logger.warning(f"Failed to fetch secrets for PhaseSecret {name} in namespace {namespace}: {ve}") + return # Exit the function early, but don't crash the operator secret_changed = False for secret_reference in managed_secret_references: @@ -64,89 +155,6 @@ def sync_secrets(spec, name, namespace, logger, **kwargs): logger.info(f"Secrets for PhaseSecret {name} have been successfully updated in namespace {namespace}") except ApiException as e: - logger.error(f"Failed to fetch secrets for PhaseSecret {name} in namespace {namespace}: {e}") + logger.error(f"Kubernetes API error when handling PhaseSecret {name} in namespace {namespace}: {e}") except Exception as e: - logger.error(f"Unexpected error when handling PhaseSecret {name} in namespace {namespace}: {e}") - -def redeploy_affected_deployments(namespace, affected_secrets, logger, api_instance): - try: - apps_v1_api = AppsV1Api(api_instance.api_client) - deployments = apps_v1_api.list_namespaced_deployment(namespace) - for deployment in deployments.items: - if should_redeploy(deployment, affected_secrets): - patch_deployment_for_redeploy(deployment, namespace, apps_v1_api, logger) - except ApiException as e: - logger.error(f"Error listing deployments in namespace {namespace}: {e}") - -def should_redeploy(deployment, affected_secrets): - if not (deployment.metadata.annotations and REDEPLOY_ANNOTATION in deployment.metadata.annotations): - return False - - deployment_secrets = extract_deployment_secrets(deployment) - return any(secret in affected_secrets for secret in deployment_secrets) - -def extract_deployment_secrets(deployment): - secrets = [] - for container in deployment.spec.template.spec.containers: - if container.env_from: - for env_from in container.env_from: - if env_from.secret_ref: - secrets.append(env_from.secret_ref.name) - return secrets - -def patch_deployment_for_redeploy(deployment, namespace, apps_v1_api, logger): - try: - timestamp = datetime.datetime.utcnow().isoformat() - patch_body = { - "spec": { - "template": { - "metadata": { - "annotations": { - "phase.autoredeploy.timestamp": timestamp - } - } - } - } - } - apps_v1_api.patch_namespaced_deployment(name=deployment.metadata.name, namespace=namespace, body=patch_body) - logger.info(f"Triggered redeployment of {deployment.metadata.name} in namespace {namespace}") - except ApiException as e: - logger.error(f"Failed to patch deployment {deployment.metadata.name} in namespace {namespace}: {e}") - -def process_secrets(fetched_secrets, processors, secret_type, name_transformer): - processed_secrets = {} - for key, value in fetched_secrets.items(): - processor_info = processors.get(key, {}) - processor_type = processor_info.get('type', 'plain') - as_name = processor_info.get('asName', key) # Use asName for mapping - - # Check and process the value based on the processor type - if processor_type == 'base64': - # Assume value is already base64 encoded; do not re-encode - processed_value = value - elif processor_type == 'plain': - # Base64 encode the value - processed_value = base64.b64encode(value.encode()).decode() - else: - # Default to plain processing if processor type is not recognized - processed_value = base64.b64encode(value.encode()).decode() - - # Map the processed value to the asName - processed_secrets[as_name] = processed_value - - return processed_secrets - -def create_secret(api_instance, secret_name, secret_namespace, secret_type, secret_data, logger): - try: - response = api_instance.create_namespaced_secret( - namespace=secret_namespace, - body=kubernetes.client.V1Secret( - metadata=kubernetes.client.V1ObjectMeta(name=secret_name), - type=secret_type, - data=secret_data - ) - ) - if response: - logger.info(f"Created secret {secret_name} in namespace {secret_namespace}") - except ApiException as e: - logger.error(f"Failed to create secret {secret_name} in namespace {secret_namespace}: {e}") + logger.error(f"Unexpected error when handling PhaseSecret {name} in namespace {namespace}: {e}", exc_info=True) diff --git a/src/utils/auth/__init__.py b/src/utils/auth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/utils/auth/kubernetes.py b/src/utils/auth/kubernetes.py new file mode 100644 index 0000000..089d1e4 --- /dev/null +++ b/src/utils/auth/kubernetes.py @@ -0,0 +1,22 @@ +import os + +def get_service_account_jwt() -> str: + """ + Retrieve the JWT token from the current service account in a Kubernetes environment. + + This function reads the JWT token from the filesystem, which is the standard + location in a Kubernetes pod. + + Returns: + str: The service account JWT token + + Raises: + Exception: If the token cannot be retrieved from the filesystem + """ + token_path = '/var/run/secrets/kubernetes.io/serviceaccount/token' + + if os.path.exists(token_path): + with open(token_path, 'r') as token_file: + return token_file.read().strip() + + raise Exception("Service account JWT token not found. Are you running inside a Kubernetes pod?") diff --git a/src/utils/cache.py b/src/utils/cache.py new file mode 100644 index 0000000..1828e06 --- /dev/null +++ b/src/utils/cache.py @@ -0,0 +1,24 @@ +import os +import json +import time +from typing import Dict, Optional + +from utils.const import TOKEN_CACHE_DIR + +def get_cache_file_path(service_account_id: str) -> str: + return os.path.join(TOKEN_CACHE_DIR, f"{service_account_id}.json") + +def get_cached_token(service_account_id: str) -> Optional[Dict]: + cache_file = get_cache_file_path(service_account_id) + if os.path.exists(cache_file): + with open(cache_file, 'r') as f: + cache = json.load(f) + if time.time() < cache.get('expiry', 0): + return cache + return None + +def update_cached_token(service_account_id: str, token_data: Dict): + os.makedirs(TOKEN_CACHE_DIR, exist_ok=True) + cache_file = get_cache_file_path(service_account_id) + with open(cache_file, 'w') as f: + json.dump(token_data, f) diff --git a/src/utils/const.py b/src/utils/const.py index aa1a8a5..2380be6 100644 --- a/src/utils/const.py +++ b/src/utils/const.py @@ -39,6 +39,9 @@ PHASE_CLOUD_API_HOST = "https://console.phase.dev" + +TOKEN_CACHE_DIR = "/tmp/phase-token-cache" + pss_user_pattern = re.compile(r"^pss_user:v(\d+):([a-fA-F0-9]{64}):([a-fA-F0-9]{64}):([a-fA-F0-9]{64}):([a-fA-F0-9]{64})$") pss_service_pattern = re.compile(r"^pss_service:v(\d+):([a-fA-F0-9]{64}):([a-fA-F0-9]{64}):([a-fA-F0-9]{64}):([a-fA-F0-9]{64})$") diff --git a/src/utils/misc.py b/src/utils/misc.py index e3d156f..7309e9a 100644 --- a/src/utils/misc.py +++ b/src/utils/misc.py @@ -84,59 +84,54 @@ def get_default_user_id(all_ids=False) -> Union[str, List[str]]: return config_data.get("default-user") -def phase_get_context(user_data, app_name=None, env_name=None): +def phase_get_context(user_data, app_name=None, app_id=None, env_name=None): """ - Get the context (ID, name, and publicKey) for a specified application and environment or the default application and environment. - + Get the context (ID, name, and publicKey) for a specified application and environment. + Parameters: - user_data (dict): The user data from the API response. - app_name (str, optional): The name (or partial name) of the desired application. - env_name (str, optional): The name (or partial name) of the desired environment. - + - app_id (str, optional): The ID of the desired application. + Returns: - tuple: A tuple containing the application's name, application's ID, environment's name, environment's ID, and publicKey. Raises: - ValueError: If no matching application or environment is found. """ - app_id = None - # 1. Get the default app_id and env_name from .phase.json if available - try: - with open(PHASE_ENV_CONFIG, 'r') as f: - config_data = json.load(f) - default_env_name = config_data.get("defaultEnv") - app_id = config_data.get("appId") - except FileNotFoundError: - default_env_name = "Development" - app_id = None - - # 2. If env_name isn't explicitly provided, use the default - env_name = env_name or default_env_name - - # 3. Match the application using app_id or find the best match for partial app_name + # 1. Set default environment name if not provided + env_name = env_name or "Development" + + # 2. Find the application try: - if app_name: + if app_id: + application = next((app for app in user_data["apps"] if app["id"] == app_id), None) + if not application: + raise ValueError(f"No application found with the ID '{app_id}'.") + elif app_name: matching_apps = [app for app in user_data["apps"] if app_name.lower() in app["name"].lower()] if not matching_apps: raise ValueError(f"🔍 No application found with the name '{app_name}'.") # Sort matching applications by the length of their names, shorter names are likely to be more specific matches matching_apps.sort(key=lambda app: len(app["name"])) application = matching_apps[0] - elif app_id: - application = next((app for app in user_data["apps"] if app["id"] == app_id), None) - if not application: - raise ValueError(f"No application found with the ID '{app_id}'.") else: - raise ValueError("🤔 No application context provided. Please specify the 'phaseApp' field in your Kubernetes Custom Resource spec.") + raise ValueError("🤔 No application context provided. Please specify either 'app_name' or 'app_id'.") - # 4. Attempt to match environment with the exact name or a name that contains the env_name string + # 3. Find the environment environment = next((env for env in application["environment_keys"] if env_name.lower() in env["environment"]["name"].lower()), None) - if not environment: - raise ValueError(f"⚠️ Warning: The environment '{env_name}' either does not exist or you do not have access to it.") - - # Return application name, application ID, environment name, environment ID, and public key - return (application["name"], application["id"], environment["environment"]["name"], environment["environment"]["id"], environment["identity_key"]) + raise ValueError(f"⚠️ Warning: The environment '{env_name}' either does not exist or you do not have access to it.") + + # 4. Return application name, application ID, environment name, environment ID, and public key + return ( + application["name"], # Phase app name + application["id"], # Phase app id + environment["environment"]["name"], # Phase app env name + environment["environment"]["id"], # Phase app env id + environment["identity_key"] # Public key + ) except StopIteration: raise ValueError("🔍 Application or environment not found.") diff --git a/src/utils/network.py b/src/utils/network.py index ab67822..f910aa2 100644 --- a/src/utils/network.py +++ b/src/utils/network.py @@ -302,3 +302,46 @@ def delete_phase_secrets(token_type: str, app_token: str, environment_id: str, s handle_connection_error(e) except requests.exceptions.SSLError as e: handle_ssl_error(e) + + +def authenticate_service_account(host: str, auth_token: str, service_account_id: str, auth_type: str) -> Dict: + """ + Authenticate with the Phase Service Account using third-party authentication providers. + + :param host: Phase host + :param auth_token: The authentication token (e.g., JWT, IAM token) + :param service_account_id: The ID of the service account + :param auth_type: The type of authentication (e.g., 'kubernetes', 'aws', 'gcp', 'azure', 'oidc') + :return: The response from the Phase API or None if authentication fails + """ + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {auth_token}" + } + + auth_data = { + "service_account_id": service_account_id, + "auth_type": auth_type, + } + + if auth_type == "kubernetes": + auth_data["jwt"] = auth_token + # TODO: Implement other auth providers + # elif auth_type in ["aws", "gcp", "azure", "oidc"]: + # auth_data["token"] = auth_token + else: + raise ValueError(f"Unsupported auth_type: {auth_type}") + + URL = f"{host}/api/v1/service-accounts/{auth_type}/auth" + + try: + response = requests.post(URL, headers=headers, json=auth_data, verify=VERIFY_SSL) + handle_request_errors(response) + return response.json() + except requests.exceptions.ConnectionError as e: + handle_connection_error(e) + except requests.exceptions.SSLError as e: + handle_ssl_error(e) + except Exception as e: + print(f"Error authenticating with Phase API: {e}") + return None diff --git a/src/utils/phase_io.py b/src/utils/phase_io.py index cb84bc6..78c4c43 100644 --- a/src/utils/phase_io.py +++ b/src/utils/phase_io.py @@ -146,7 +146,7 @@ def create(self, key_value_pairs: List[Tuple[str, str]], env_name: str, app_name return create_phase_secrets(self._token_type, self._app_secret.app_token, env_id, secrets, self._api_host) - def get(self, env_name: str, keys: List[str] = None, app_name: str = None, tag: str = None, path: str = '/') -> List[Dict]: + def get(self, env_name: str, keys: List[str] = None, app_name: str = None, app_id:str = None, tag: str = None, path: str = '/') -> List[Dict]: """ Get secrets from Phase KMS based on key and environment, with support for personal overrides, optional tag matching, decrypting comments, and now including path support and key digest optimization. @@ -167,7 +167,7 @@ def get(self, env_name: str, keys: List[str] = None, app_name: str = None, tag: raise ValueError(f"Request failed with status code {user_response.status_code}: {user_response.text}") user_data = user_response.json() - app_name, app_id, env_name, env_id, public_key = phase_get_context(user_data, app_name=app_name, env_name=env_name) + app_name, app_id, env_name, env_id, public_key = phase_get_context(user_data, app_name=app_name, app_id=app_id, env_name=env_name) environment_key = self._find_matching_environment_key(user_data, env_id) if environment_key is None: diff --git a/src/utils/secrets/__init__.py b/src/utils/secrets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/utils/secrets/types.py b/src/utils/secrets/types.py new file mode 100644 index 0000000..cb3e926 --- /dev/null +++ b/src/utils/secrets/types.py @@ -0,0 +1,25 @@ +import base64 + +def process_secrets(fetched_secrets, processors, secret_type, name_transformer): + processed_secrets = {} + for key, value in fetched_secrets.items(): + processor_info = processors.get(key, {}) + processor_type = processor_info.get('type', 'plain') + as_name = processor_info.get('asName', key) # Use asName for mapping + + # Check and process the value based on the processor type + if processor_type == 'base64': + # Assume value is already base64 encoded; do not re-encode + processed_value = value + elif processor_type == 'plain': + # Base64 encode the value + processed_value = base64.b64encode(value.encode()).decode() + else: + # Default to plain processing if processor type is not recognized + processed_value = base64.b64encode(value.encode()).decode() + + # Map the processed value to the asName + processed_secrets[as_name] = processed_value + + return processed_secrets + diff --git a/src/utils/secrets/write.py b/src/utils/secrets/write.py new file mode 100644 index 0000000..6f84bd4 --- /dev/null +++ b/src/utils/secrets/write.py @@ -0,0 +1,18 @@ +import kubernetes +from kubernetes.client.rest import ApiException +import logging + +def create_secret(api_instance, secret_name, secret_namespace, secret_type, secret_data, logger): + try: + response = api_instance.create_namespaced_secret( + namespace=secret_namespace, + body=kubernetes.client.V1Secret( + metadata=kubernetes.client.V1ObjectMeta(name=secret_name), + type=secret_type, + data=secret_data + ) + ) + if response: + logger.info(f"Created secret {secret_name} in namespace {secret_namespace}") + except ApiException as e: + logger.error(f"Failed to create secret {secret_name} in namespace {secret_namespace}: {e}") \ No newline at end of file diff --git a/src/utils/workload/__init__.py b/src/utils/workload/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/utils/workload/deploy.py b/src/utils/workload/deploy.py new file mode 100644 index 0000000..3135c21 --- /dev/null +++ b/src/utils/workload/deploy.py @@ -0,0 +1,48 @@ +from kubernetes.client.rest import ApiException +from kubernetes.client import AppsV1Api +from utils.const import REDEPLOY_ANNOTATION + +def redeploy_affected_deployments(namespace, affected_secrets, logger, api_instance): + try: + apps_v1_api = AppsV1Api(api_instance.api_client) + deployments = apps_v1_api.list_namespaced_deployment(namespace) + for deployment in deployments.items: + if should_redeploy(deployment, affected_secrets): + patch_deployment_for_redeploy(deployment, namespace, apps_v1_api, logger) + except ApiException as e: + logger.error(f"Error listing deployments in namespace {namespace}: {e}") + +def should_redeploy(deployment, affected_secrets): + if not (deployment.metadata.annotations and REDEPLOY_ANNOTATION in deployment.metadata.annotations): + return False + + deployment_secrets = extract_deployment_secrets(deployment) + return any(secret in affected_secrets for secret in deployment_secrets) + +def extract_deployment_secrets(deployment): + secrets = [] + for container in deployment.spec.template.spec.containers: + if container.env_from: + for env_from in container.env_from: + if env_from.secret_ref: + secrets.append(env_from.secret_ref.name) + return secrets + +def patch_deployment_for_redeploy(deployment, namespace, apps_v1_api, logger): + try: + timestamp = datetime.datetime.utcnow().isoformat() + patch_body = { + "spec": { + "template": { + "metadata": { + "annotations": { + "phase.autoredeploy.timestamp": timestamp + } + } + } + } + } + apps_v1_api.patch_namespaced_deployment(name=deployment.metadata.name, namespace=namespace, body=patch_body) + logger.info(f"Triggered redeployment of {deployment.metadata.name} in namespace {namespace}") + except ApiException as e: + logger.error(f"Failed to patch deployment {deployment.metadata.name} in namespace {namespace}: {e}") \ No newline at end of file