diff --git a/.gitignore b/.gitignore index bc3ddab6b..ce60120aa 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,5 @@ cov.xml .venv cov_html htmlcov -.pytest_cache \ No newline at end of file +.pytest_cache +local_blob_storage diff --git a/docker-compose-addons.yml b/docker-compose-addons.yml index f4c8b47da..0cd834c1f 100644 --- a/docker-compose-addons.yml +++ b/docker-compose-addons.yml @@ -151,7 +151,8 @@ services: LOGGING_LEVEL: ${FIRMWARE_MANAGER_LOGGING_LEVEL} volumes: - ${GOOGLE_APPLICATION_CREDENTIALS}:/google/gcp_credentials.json + - ${HOST_BLOB_STORAGE_DIRECTORY}:/mnt/blob_storage logging: options: max-size: '10m' - max-file: '5' + max-file: '5' \ No newline at end of file diff --git a/resources/kubernetes/README.md b/resources/kubernetes/README.md index ba96f15d4..75beaa69f 100644 --- a/resources/kubernetes/README.md +++ b/resources/kubernetes/README.md @@ -8,7 +8,7 @@ The webapp and API both utilize a K8s Ingress to handle external access to the a The YAML files use GCP specific specifications for various values such as "networking.gke.io/managed-certificates". These values will not work on AWS and Azure but there should be equivalent fields that these specifications can be updated to if needing to deploy in another cloud environment. -The environment variables must be set according to the README documentation for each application. The iss-health-check application only supports GCP. +The environment variables must be set according to the README documentation for each application. The iss-health-check application supports GCP or postgres for storing keys. The environment variables for the iss-health-check application must be set according to the README documentation for the iss-health-check application. ## Useful Links diff --git a/resources/kubernetes/iss-health-check.yaml b/resources/kubernetes/iss-health-check.yaml index 24b842c99..7cd48b63a 100644 --- a/resources/kubernetes/iss-health-check.yaml +++ b/resources/kubernetes/iss-health-check.yaml @@ -1,4 +1,4 @@ -# This deployment is only usable in a GCP environment due to the GCP Secret Manager dependency +# If GCP is being used to store keys, this deployment will only be usable in a GCP environment due to the GCP Secret Manager dependency apiVersion: 'apps/v1' kind: 'Deployment' metadata: @@ -27,6 +27,8 @@ spec: ports: - containerPort: 8080 env: + - name: STORAGE_TYPE + value: GCP - name: GOOGLE_APPLICATION_CREDENTIALS value: '/home/secret/cv_credentials.json' - name: PROJECT_ID @@ -41,6 +43,8 @@ spec: value: '' - name: ISS_SCMS_VEHICLE_REST_ENDPOINT value: '' + - name: ISS_KEY_TABLE_NAME + value: '' - name: DB_USER value: '' - name: DB_PASS diff --git a/resources/sql_scripts/CVManager_CreateTables.sql b/resources/sql_scripts/CVManager_CreateTables.sql index bf1dd1adb..da02cf1d4 100644 --- a/resources/sql_scripts/CVManager_CreateTables.sql +++ b/resources/sql_scripts/CVManager_CreateTables.sql @@ -325,6 +325,21 @@ SELECT ro.rsu_id, org.name FROM public.rsu_organization AS ro JOIN public.organizations AS org ON ro.organization_id = org.organization_id; +-- Create iss keys table (id, iss_key, creation_date, expiration_date) +CREATE SEQUENCE public.iss_keys_iss_key_id_seq + INCREMENT 1 + START 1 + MINVALUE 1 + MAXVALUE 2147483647 + CACHE 1; + +CREATE TABLE IF NOT EXISTS public.iss_keys +( + iss_key_id integer NOT NULL DEFAULT nextval('iss_keys_iss_key_id_seq'::regclass), + common_name character varying(128) COLLATE pg_catalog.default NOT NULL, + token character varying(128) COLLATE pg_catalog.default NOT NULL +); + -- Create scms_health table CREATE SEQUENCE public.scms_health_scms_health_id_seq INCREMENT 1 diff --git a/sample.env b/sample.env index 854a6f0c6..3cfb926c3 100644 --- a/sample.env +++ b/sample.env @@ -130,9 +130,11 @@ KAFKA_BIGQUERY_TABLENAME = '' # --------------------------------------------------------------------- # Firmware Manager Addon: -BLOB_STORAGE_PROVIDER=GCP +BLOB_STORAGE_PROVIDER=DOCKER BLOB_STORAGE_BUCKET= GCP_PROJECT= +## Docker volume mount point for BLOB storage (if using Docker) +HOST_BLOB_STORAGE_DIRECTORY=./local_blob_storage # --------------------------------------------------------------------- # Geo-spatial message query Addon: diff --git a/services/addons/images/firmware_manager/README.md b/services/addons/images/firmware_manager/README.md index 533e6c46f..8842cfd08 100644 --- a/services/addons/images/firmware_manager/README.md +++ b/services/addons/images/firmware_manager/README.md @@ -16,7 +16,7 @@ This directory contains a microservice that runs within the CV Manager GKE Clust An RSU is determined to be ready for upgrade if its entry in the "rsus" table in PostgreSQL has its "target_firmware_version" set to be different than its "firmware_version". The Firmware Manager will ignore all devices with incompatible firmware upgrades set as their target firmware based on the "firmware_upgrade_rules" table. The CV Manager API will only offer CV Manager webapp users compatible options so this generally is a precaution. -Hosting firmware files is recommended to be done via the cloud. GCP cloud storage is the currently supported method. Alternatives can be added via the [download_blob.py](download_blob.py) script. Firmware storage must be organized by: `vendor/rsu-model/firmware-version/install_package`. +Hosting firmware files is recommended to be done via the cloud. GCP cloud storage is the currently supported method, but a directory mounted as a docker volume can also be used. Alternative cloud support can be added via the [download_blob.py](download_blob.py) script. Firmware storage must be organized by: `vendor/rsu-model/firmware-version/install_package`. Firmware upgrades have unique procedures based on RSU vendor/manufacturer. To avoid requiring a unique bash script for every single firmware upgrade, the Firmware Manager has been written to use vendor based upgrade scripts that have been thoroughly tested. An interface-like abstract class, [base_upgrader.py](base_upgrader.py), has been made for helping create upgrade scripts for vendors not yet supported. The Firmware Manager selects the script to use based off the RSU's "model" column in the "rsus" table. These scripts report back to the Firmware Manager on completion with a status of whether the upgrade was a success or failure. Regardless, the Firmware Manager will remove the process from its tracking and update the PostgreSQL database accordingly. @@ -40,7 +40,7 @@ Available REST endpoints: To properly run the firmware_manager microservice the following services are also required: -- Cloud based blob storage +- Blob storage (cloud-based or otherwise) - Firmware storage must be organized by: `vendor/rsu-model/firmware-version/install_package`. - CV Manager PostgreSQL database with data in the "rsus", "rsu_models", "manufacturers", "firmware_images", and "firmware_upgrade_rules" tables - Network connectivity from the environment the firmware_manager is deployed into to the blob storage and the RSUs @@ -70,6 +70,9 @@ GCP Required environment variables: - GCP_PROJECT - GCP project for the firmware cloud storage bucket - GOOGLE_APPLICATION_CREDENTIALS - Service account location. Recommended to attach as a volume. +Docker volume required environment variables: +- HOST_BLOB_STORAGE_DIRECTORY - Directory mounted as a docker volume for firmware storage. A relative path can be specified here. + ## Vendor Specific Requirements ### Commsignia diff --git a/services/addons/images/firmware_manager/download_blob.py b/services/addons/images/firmware_manager/download_blob.py index 5cfffdd9b..36901273d 100644 --- a/services/addons/images/firmware_manager/download_blob.py +++ b/services/addons/images/firmware_manager/download_blob.py @@ -3,13 +3,23 @@ import os -# Only supports GCP Bucket Storage for downloading blobs def download_gcp_blob(blob_name, destination_file_name): + """Download a file from a GCP Bucket Storage bucket to a local file. + + Args: + blob_name (str): The name of the file in the bucket. + destination_file_name (str): The name of the local file to download the bucket file to. + """ + + if not validate_file_type(blob_name): + return False + gcp_project = os.environ.get("GCP_PROJECT") bucket_name = os.environ.get("BLOB_STORAGE_BUCKET") storage_client = storage.Client(gcp_project) bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(blob_name) + if blob.exists(): blob.download_to_filename(destination_file_name) logging.info( @@ -17,3 +27,43 @@ def download_gcp_blob(blob_name, destination_file_name): ) return True return False + + +def download_docker_blob(blob_name, destination_file_name): + """Copy a file from a directory mounted as a volume in a Docker container to a local file. + + Args: + blob_name (str): The name of the file in the directory. + destination_file_name (str): The name of the local file to copy the directory file to. + """ + + if not validate_file_type(blob_name): + return False + + directory = "/mnt/blob_storage" + source_file_name = f"{directory}/{blob_name}" + os.system(f"cp {source_file_name} {destination_file_name}") + logging.info( + f"Copied storage object {blob_name} from directory {directory} to local file {destination_file_name}." + ) + return True + + +def validate_file_type(file_name): + """Validate the file type of the file to be downloaded. + + Args: + file_name (str): The name of the file to be downloaded. + """ + if not file_name.endswith(".tar"): + logging.error( + f"Unsupported file type for storage object {file_name}. Only .tar files are supported." + ) + return False + return True + + +class UnsupportedFileTypeException(Exception): + def __init__(self, message="Unsupported file type. Only .tar files are supported."): + self.message = message + super().__init__(self.message) diff --git a/services/addons/images/firmware_manager/sample.env b/services/addons/images/firmware_manager/sample.env index 97b7ebedb..8f045a07a 100644 --- a/services/addons/images/firmware_manager/sample.env +++ b/services/addons/images/firmware_manager/sample.env @@ -7,9 +7,11 @@ PG_DB_NAME="" PG_DB_USER="" PG_DB_PASS="" -# Blob storage variables -BLOB_STORAGE_PROVIDER="GCP" -BLOB_STORAGE_BUCKET="" +# Blob storage variables (only 'GCP' and 'DOCKER' are supported at this time) +BLOB_STORAGE_PROVIDER=DOCKER +BLOB_STORAGE_BUCKET= +## Docker volume mount point for BLOB storage (if using DOCKER) +HOST_BLOB_STORAGE_DIRECTORY=./local_blob_storage # For users using GCP cloud storage GCP_PROJECT="" diff --git a/services/addons/images/firmware_manager/upgrader.py b/services/addons/images/firmware_manager/upgrader.py index 54bb82f9b..88d871ad6 100644 --- a/services/addons/images/firmware_manager/upgrader.py +++ b/services/addons/images/firmware_manager/upgrader.py @@ -34,15 +34,22 @@ def download_blob(self, blob_name=None, local_file_name=None): # Create parent rsu_ip directory path = self.local_file_name[: self.local_file_name.rfind("/")] Path(path).mkdir(exist_ok=True) + blob_name = self.blob_name if blob_name is None else blob_name + local_file_name = ( + self.local_file_name if local_file_name is None else local_file_name + ) # Download blob, defaults to GCP blob storage - bsp = os.environ.get("BLOB_STORAGE_PROVIDER", "GCP") - if bsp == "GCP": - blob_name = self.blob_name if blob_name is None else blob_name - local_file_name = self.local_file_name if local_file_name is None else local_file_name + bspCaseInsensitive = os.environ.get( + "BLOB_STORAGE_PROVIDER", "DOCKER" + ).casefold() + if bspCaseInsensitive == "gcp": return download_blob.download_gcp_blob(blob_name, local_file_name) + elif bspCaseInsensitive == "docker": + return download_blob.download_docker_blob(blob_name, local_file_name) else: logging.error("Unsupported blob storage provider") + raise StorageProviderNotSupportedException # Notifies the firmware manager of the completion status for the upgrade # success is a boolean @@ -72,7 +79,7 @@ def wait_until_online(self): iter += 1 # 3 minutes pass with no response return -1 - + def check_online(self): iter = 0 # Ping once every second for 5 seconds to verify RSU is online @@ -86,12 +93,16 @@ def check_online(self): time.sleep(1) # 5 seconds pass with no response return False - + def send_error_email(self, type="Firmware Upgrader", err=""): try: email_addresses = os.environ.get("FW_EMAIL_RECIPIENTS").split(",") - subject = f"{self.rsu_ip} Firmware Upgrader Failure" if type == "Firmware Upgrader" else f"{self.rsu_ip} Firmware Upgrader Post Upgrade Script Failure" + subject = ( + f"{self.rsu_ip} Firmware Upgrader Failure" + if type == "Firmware Upgrader" + else f"{self.rsu_ip} Firmware Upgrader Post Upgrade Script Failure" + ) for email_address in email_addresses: emailSender = EmailSender( @@ -115,3 +126,8 @@ def send_error_email(self, type="Firmware Upgrader", err=""): @abc.abstractclassmethod def upgrade(self): pass + + +class StorageProviderNotSupportedException(Exception): + def __init__(self): + super().__init__("Unsupported blob storage provider") diff --git a/services/addons/images/iss_health_check/README.md b/services/addons/images/iss_health_check/README.md index 7a8f762b2..b1472790f 100644 --- a/services/addons/images/iss_health_check/README.md +++ b/services/addons/images/iss_health_check/README.md @@ -11,15 +11,15 @@ This directory contains a microservice that runs within the CV Manager GKE Cluster. The iss_health_checker application populates the CV Manager PostGreSQL database's 'scms_health' table with the current ISS SCMS statuses of all RSUs recorded in the 'rsus' table. These statuses are queried by this application from a provided ISS Green Hills SCMS API endpoint. -The application schedules the iss_health_checker script to run every 6 hours. A new SCMS API access key is generated every run of the script to ensure the access never expires. This is due to a limitation of the SCMS API not allowing permanent access keys. Access keys are stored in GCP Secret Manager to allow for versioning and encrypted storage. The application removes the previous access key from the SCMS API after runtime to reduce clutter of access keys on the API service account. +The application schedules the iss_health_checker script to run every 6 hours. A new SCMS API access key is generated every run of the script to ensure the access never expires. This is due to a limitation of the SCMS API not allowing permanent access keys. Access keys can be stored in GCP Secret Manager to allow for versioning and encrypted storage. The application removes the previous access key from the SCMS API after runtime to reduce clutter of access keys on the API service account. -Currently only GCP is supported to run this application due to a reliance on the GCP Secret Manager. Storing the access keys on a local volume is not recommended due to security vulnerabilities. Feel free to contribute to this application for secret manager equivalent support for other cloud environments. +Currently GCP & Postgres are the only supported storage solutions to run this application. Storing the access keys on a local volume is not recommended due to security vulnerabilities. Feel free to contribute to this application to support other storage solutions. ## Requirements To properly run the iss_health_checker microservice the following services are also required: -- GCP project and service account with GCP Secret Manager access +- GCP project and service account with GCP Secret Manager access (only required if STORAGE_TYPE is set to 'gcp') - CV Manager PostgreSQL database with at least one RSU inserted into the 'rsus' table - Service agreement with ISS Green Hills to have access to the SCMS API REST service endpoint - iss_health_checker must be deployed in the same environment or K8s cluster as the PostgreSQL database @@ -27,7 +27,9 @@ To properly run the iss_health_checker microservice the following services are a The iss_health_checker microservice expects the following environment variables to be set: -- GOOGLE_APPLICATION_CREDENTIALS - file location for GCP JSON service account key. +- STORAGE_TYPE - Storage solution for the SCMS API access keys. Currently only 'gcp' & 'postgres' are supported. +- GOOGLE_APPLICATION_CREDENTIALS - File location for GCP JSON service account key. Only required if STORAGE_TYPE is set to 'gcp'. +- ISS_KEY_TABLE_NAME - Postgres table name for the ISS SCMS API access keys. Only required if STORAGE_TYPE is set to 'postgres'. - PROJECT_ID - GCP project ID. - ISS_API_KEY - Initial ISS SCMS API access key to perform the first run of the script. This access key must not expire before the first runtime. - ISS_API_KEY_NAME - Human readable reference for the access key within ISS SCMS API. Generated access keys will utilize this same name. diff --git a/services/addons/images/iss_health_check/iss_health_checker.py b/services/addons/images/iss_health_check/iss_health_checker.py index 32a69b120..ec2d5a3d4 100644 --- a/services/addons/images/iss_health_check/iss_health_checker.py +++ b/services/addons/images/iss_health_check/iss_health_checker.py @@ -1,118 +1,173 @@ -from datetime import datetime -import requests -import logging -import os -import iss_token -import common.pgquery as pgquery - - -def get_rsu_data(): - result = {} - query = ( - "SELECT jsonb_build_object('rsu_id', rsu_id, 'iss_scms_id', iss_scms_id) " - "FROM public.rsus " - "WHERE iss_scms_id IS NOT NULL " - "ORDER BY rsu_id" - ) - data = pgquery.query_db(query) - - logging.debug("Parsing results...") - for point in data: - point_dict = dict(point[0]) - result[point_dict["iss_scms_id"]] = {"rsu_id": point_dict["rsu_id"]} - - return result - - -def get_scms_status_data(): - rsu_data = get_rsu_data() - - # Create GET request headers - iss_headers = {} - iss_headers["x-api-key"] = iss_token.get_token() - - # Create the GET request string - iss_base = os.environ["ISS_SCMS_VEHICLE_REST_ENDPOINT"] - project_id = os.environ["ISS_PROJECT_ID"] - page_size = 200 - page = 0 - messages_processed = 0 - - # Loop through all pages of enrolled devices - while True: - iss_request = iss_base + "?pageSize={}&page={}&project_id={}".format( - page_size, page, project_id - ) - logging.debug("GET: " + iss_request) - response = requests.get(iss_request, headers=iss_headers) - enrollment_list = response.json()["data"] - - if len(enrollment_list) == 0: - break - - # Loop through each device on current page - for enrollment_status in enrollment_list: - if enrollment_status["_id"] in rsu_data: - rsu_data[enrollment_status["_id"]][ - "provisionerCompany" - ] = enrollment_status["provisionerCompany_id"] - rsu_data[enrollment_status["_id"]]["entityType"] = enrollment_status[ - "entityType" - ] - rsu_data[enrollment_status["_id"]]["project_id"] = enrollment_status[ - "project_id" - ] - rsu_data[enrollment_status["_id"]]["deviceHealth"] = enrollment_status[ - "deviceHealth" - ] - - # If the device has yet to download its first set of certs, set the expiration time to when it was enrolled - if "authorizationCertInfo" in enrollment_status["enrollments"][0]: - rsu_data[enrollment_status["_id"]][ - "expiration" - ] = enrollment_status["enrollments"][0]["authorizationCertInfo"][ - "expireTimeOfLatestDownloadedCert" - ] - else: - rsu_data[enrollment_status["_id"]]["expiration"] = None - - messages_processed = messages_processed + 1 - - page = page + 1 - - logging.info("Processed {} messages".format(messages_processed)) - return rsu_data - - -def insert_scms_data(data): - logging.info("Inserting SCMS data into PostgreSQL...") - now_ts = datetime.strftime(datetime.now(), "%Y-%m-%dT%H:%M:%S.000Z") - - query = ( - 'INSERT INTO public.scms_health("timestamp", health, expiration, rsu_id) VALUES' - ) - for value in data.values(): - health = "1" if value["deviceHealth"] == "Healthy" else "0" - if value["expiration"]: - query = ( - query - + f" ('{now_ts}', '{health}', '{value['expiration']}', {value['rsu_id']})," - ) - else: - query = query + f" ('{now_ts}', '{health}', NULL, {value['rsu_id']})," - - pgquery.write_db(query[:-1]) - logging.info( - "SCMS data inserted {} messages into PostgreSQL...".format(len(data.values())) - ) - - -if __name__ == "__main__": - # Configure logging based on ENV var or use default if not set - log_level = ( - "INFO" if "LOGGING_LEVEL" not in os.environ else os.environ["LOGGING_LEVEL"] - ) - logging.basicConfig(format="%(levelname)s:%(message)s", level=log_level) - - scms_statuses = get_scms_status_data() - insert_scms_data(scms_statuses) +from datetime import datetime +import requests +import logging +import os +import iss_token +import common.pgquery as pgquery +from dataclasses import dataclass, field +from typing import Dict + + +# Set up logging +logger = logging.getLogger(__name__) + +@dataclass +class RsuDataWrapper: + rsu_data: Dict[str, Dict[str, str]] = field(default_factory=dict) + + def __init__(self, rsu_data): + self.rsu_data = rsu_data + + def get_dict(self): + return self.rsu_data + + def set_provisioner_company(self, scms_id, provisioner_company): + self.rsu_data[scms_id]["provisionerCompany"] = provisioner_company + + def set_entity_type(self, scms_id, entity_type): + self.rsu_data[scms_id]["entityType"] = entity_type + + def set_project_id(self, scms_id, project_id): + self.rsu_data[scms_id]["project_id"] = project_id + + def set_device_health(self, scms_id, device_health): + self.rsu_data[scms_id]["deviceHealth"] = device_health + + def set_expiration(self, scms_id, expiration): + self.rsu_data[scms_id]["expiration"] = expiration + + +def get_rsu_data() -> RsuDataWrapper: + """Get RSU data from PostgreSQL and return it in a wrapper object""" + + result = {} + query = ( + "SELECT jsonb_build_object('rsu_id', rsu_id, 'iss_scms_id', iss_scms_id) " + "FROM public.rsus " + "WHERE iss_scms_id IS NOT NULL " + "ORDER BY rsu_id" + ) + data = pgquery.query_db(query) + + logger.debug("Parsing results...") + for point in data: + point_dict = dict(point[0]) + result[point_dict["iss_scms_id"]] = {"rsu_id": point_dict["rsu_id"]} + + return RsuDataWrapper(result) + + +def get_scms_status_data(): + """Get SCMS status data from ISS and return it as a dictionary""" + + rsu_data = get_rsu_data() + + # Create GET request headers + iss_headers = {} + iss_headers["x-api-key"] = iss_token.get_token() + + # Create the GET request string + iss_base = os.environ["ISS_SCMS_VEHICLE_REST_ENDPOINT"] + project_id = os.environ["ISS_PROJECT_ID"] + page_size = 200 + page = 0 + messages_processed = 0 + + # Loop through all pages of enrolled devices + while True: + iss_request = iss_base + "?pageSize={}&page={}&project_id={}".format( + page_size, page, project_id + ) + logger.debug("GET: " + iss_request) + response = requests.get(iss_request, headers=iss_headers) + enrollment_list = response.json()["data"] + + if len(enrollment_list) == 0: + break + + # Loop through each device on current page + for enrollment_status in enrollment_list: + es_id = enrollment_status["_id"] + if es_id in rsu_data.get_dict(): + rsu_data.set_provisioner_company(es_id, enrollment_status["provisionerCompany_id"]) + rsu_data.set_entity_type(es_id, enrollment_status["entityType"]) + rsu_data.set_project_id(es_id, enrollment_status["project_id"]) + rsu_data.set_device_health(es_id, enrollment_status["deviceHealth"]) + + # If the device has yet to download its first set of certs, set the expiration time to when it was enrolled + if "authorizationCertInfo" in enrollment_status["enrollments"][0]: + rsu_data.set_expiration(es_id, enrollment_status["enrollments"][0]["authorizationCertInfo"]["expireTimeOfLatestDownloadedCert"]) + else: + rsu_data.set_expiration(es_id, None) + + messages_processed = messages_processed + 1 + + page = page + 1 + + logger.info("Processed {} messages".format(messages_processed)) + return rsu_data.get_dict() + + +def insert_scms_data(data): + logger.info("Inserting SCMS data into PostgreSQL...") + now_ts = datetime.strftime(datetime.now(), "%Y-%m-%dT%H:%M:%S.000Z") + + query = ( + 'INSERT INTO public.scms_health("timestamp", health, expiration, rsu_id) VALUES' + ) + for value in data.values(): + if validate_scms_data(value) is False: + continue + + health = "1" if value["deviceHealth"] == "Healthy" else "0" + if value["expiration"]: + query = ( + query + + f" ('{now_ts}', '{health}', '{value['expiration']}', {value['rsu_id']})," + ) + else: + query = query + f" ('{now_ts}', '{health}', NULL, {value['rsu_id']})," + + query = query[:-1] # remove comma + pgquery.write_db(query) + logger.info( + "SCMS data inserted {} messages into PostgreSQL...".format(len(data.values())) + ) + +def validate_scms_data(value): + """Validate the SCMS data + + Args: + value (dict): SCMS data + """ + + try: + value["rsu_id"] + except KeyError as e: + logger.warning("rsu_id not found in data, is it real data? exception: {}".format(e)) + return False + + try: + value["deviceHealth"] + except KeyError as e: + logger.warning("deviceHealth not found in data for RSU with id {}, is it real data? exception: {}".format(value["rsu_id"], e)) + return False + + try: + value["expiration"] + except KeyError as e: + logger.warning("expiration not found in data for RSU with id {}, is it real data? exception: {}".format(value["rsu_id"], e)) + return False + + return True + + +if __name__ == "__main__": + # Configure logging based on ENV var or use default if not set + log_level = ( + "INFO" if "LOGGING_LEVEL" not in os.environ else os.environ["LOGGING_LEVEL"] + ) + logging.basicConfig(format="%(levelname)s:%(message)s", level=log_level) + + scms_statuses = get_scms_status_data() + insert_scms_data(scms_statuses) \ No newline at end of file diff --git a/services/addons/images/iss_health_check/iss_token.py b/services/addons/images/iss_health_check/iss_token.py index 084b6067f..37cf9536e 100644 --- a/services/addons/images/iss_health_check/iss_token.py +++ b/services/addons/images/iss_health_check/iss_token.py @@ -1,116 +1,213 @@ -from google.cloud import secretmanager -import requests -import os -import json -import uuid -import logging - - -def create_secret(client, secret_id, parent): - """Create a new GCP secret in GCP Secret Manager - client: GCP Security Manager client - secret_id: ID of the secret being created - parent: GCP secret manager parent ID for the GCP project - """ - client.create_secret( - request={ - "parent": parent, - "secret_id": secret_id, - "secret": {"replication": {"automatic": {}}}, - } - ) - logging.debug("New secret created") - - -def check_if_secret_exists(client, secret_id, parent): - """Check if a secret exists in GCP Secret Manager - client: GCP Security Manager client - secret_id: ID of the secret being checked - parent: GCP secret manager parent ID for the GCP project - """ - for secret in client.list_secrets( - request=secretmanager.ListSecretsRequest(parent=parent) - ): - # secret names are in the form of "projects/project_id/secrets/secret_id" - if secret.name.split("/")[-1] == secret_id: - logging.debug(f"Secret {secret_id} exists") - return True - return False - - -def get_latest_secret_version(client, secret_id, parent): - """Get latest value of a secret from GCP Secret Manager - client: GCP Security Manager client - secret_id: ID for the secret being retrieved - parent: GCP secret manager parent ID for the GCP project - """ - response = client.access_secret_version( - request={"name": f"{parent}/secrets/{secret_id}/versions/latest"} - ) - return json.loads(response.payload.data.decode("UTF-8")) - - -def add_secret_version(client, secret_id, parent, data): - """Add a new version to an existing secret - client: GCP Security Manager client - secret_id: ID for the secret - parent: GCP secret manager parent ID for the GCP project - data: String value for the new version of the secret - """ - client.add_secret_version( - request={ - "parent": f"{parent}/secrets/{secret_id}", - "payload": {"data": str.encode(json.dumps(data))}, - } - ) - logging.debug("New version added") - - -def get_token(): - client = secretmanager.SecretManagerServiceClient() - secret_id = "iss-token-secret" - parent = f"projects/{os.environ['PROJECT_ID']}" - - # Check to see if the GCP secret exists - secret_exists = check_if_secret_exists(client, secret_id, parent) - - if secret_exists: - # Grab the latest token data - value = get_latest_secret_version(client, secret_id, parent) - friendly_name = value["name"] - token = value["token"] - logging.debug(f"Received token: {friendly_name}") - else: - # If there is no available ISS token secret, create secret - logging.debug("Secret does not exist, creating secret") - create_secret(client, secret_id, parent) - # Use environment variable for first run with new secret - token = os.environ["ISS_API_KEY"] - - # Pull new ISS SCMS API token - iss_base = os.environ["ISS_SCMS_TOKEN_REST_ENDPOINT"] - - # Create HTTP request headers - iss_headers = {"x-api-key": token} - - # Create the POST body - new_friendly_name = f"{os.environ['ISS_API_KEY_NAME']}_{str(uuid.uuid4())}" - iss_post_body = {"friendlyName": new_friendly_name, "expireDays": 1} - - # Create new ISS SCMS API Token to ensure its freshness - logging.debug("POST: " + iss_base) - response = requests.post(iss_base, json=iss_post_body, headers=iss_headers) - new_token = response.json()["Item"] - logging.debug(f"Received new token: {new_friendly_name}") - - if secret_exists: - # If exists, delete previous API key to prevent key clutter - iss_delete_body = {"friendlyName": friendly_name} - requests.delete(iss_base, json=iss_delete_body, headers=iss_headers) - logging.debug(f"Old token has been deleted from ISS SCMS: {friendly_name}") - - version_data = {"name": new_friendly_name, "token": new_token} - - add_secret_version(client, secret_id, parent, version_data) - - return new_token +from google.cloud import secretmanager +import common.pgquery as pgquery +import requests +import os +import json +import uuid +import logging + + +# Set up logging +logger = logging.getLogger(__name__) + +# Get storage type from environment variable +def get_storage_type(): + """Get the storage type for the ISS SCMS API token + """ + try : + os.environ["STORAGE_TYPE"] + except KeyError: + logger.error("STORAGE_TYPE environment variable not set, exiting") + exit(1) + + storageTypeCaseInsensitive = os.environ["STORAGE_TYPE"].casefold() + if storageTypeCaseInsensitive == "gcp": + return "gcp" + elif storageTypeCaseInsensitive == "postgres": + return "postgres" + else: + logger.error("STORAGE_TYPE environment variable not set to a valid value, exiting") + exit(1) + + +# GCP Secret Manager functions +def create_secret(client, secret_id, parent): + """Create a new GCP secret in GCP Secret Manager + client: GCP Security Manager client + secret_id: ID of the secret being created + parent: GCP secret manager parent ID for the GCP project + """ + client.create_secret( + request={ + "parent": parent, + "secret_id": secret_id, + "secret": {"replication": {"automatic": {}}}, + } + ) + logger.debug("New secret created") + + +def check_if_secret_exists(client, secret_id, parent): + """Check if a secret exists in GCP Secret Manager + client: GCP Security Manager client + secret_id: ID of the secret being checked + parent: GCP secret manager parent ID for the GCP project + """ + for secret in client.list_secrets( + request=secretmanager.ListSecretsRequest(parent=parent) + ): + # secret names are in the form of "projects/project_id/secrets/secret_id" + if secret.name.split("/")[-1] == secret_id: + logger.debug(f"Secret {secret_id} exists") + return True + return False + + +def get_latest_secret_version(client, secret_id, parent): + """Get latest value of a secret from GCP Secret Manager + client: GCP Security Manager client + secret_id: ID for the secret being retrieved + parent: GCP secret manager parent ID for the GCP project + """ + response = client.access_secret_version( + request={"name": f"{parent}/secrets/{secret_id}/versions/latest"} + ) + return json.loads(response.payload.data.decode("UTF-8")) + + +def add_secret_version(client, secret_id, parent, data): + """Add a new version to an existing secret + client: GCP Security Manager client + secret_id: ID for the secret + parent: GCP secret manager parent ID for the GCP project + data: String value for the new version of the secret + """ + client.add_secret_version( + request={ + "parent": f"{parent}/secrets/{secret_id}", + "payload": {"data": str.encode(json.dumps(data))}, + } + ) + logger.debug("New version added") + + +# Postgres functions +def check_if_data_exists(table_name): + """Check if data exists in the table + table_name: name of the table + """ + # create the query + query = f"SELECT * FROM {table_name}" + # execute the query + data = pgquery.query_db(query) + # check if data exists + if len(data) > 0: + return True + else: + return False + + +def get_latest_data(table_name): + """Get latest value of a token from the table + table_name: name of the table + """ + # create the query + query = f"SELECT * FROM {table_name} ORDER BY iss_key_id DESC LIMIT 1" + # execute the query + data = pgquery.query_db(query) + # return the data + toReturn = {} + toReturn["id"] = data[0][0] # id + toReturn["name"] = data[0][1] # common_name + toReturn["token"] = data[0][2] # token + logger.debug(f"Received token: {toReturn['name']} with id {toReturn['id']}") + return toReturn + + +def add_data(table_name, common_name, token): + """Add a new token to the table + table_name: name of the table + data: String value for the new token + """ + # create the query + query = f"INSERT INTO {table_name} (common_name, token) VALUES ('{common_name}', '{token}')" + # execute the query + pgquery.write_db(query) + + +# Main function +def get_token(): + storage_type = get_storage_type() + if storage_type == "gcp": + client = secretmanager.SecretManagerServiceClient() + secret_id = "iss-token-secret" + parent = f"projects/{os.environ['PROJECT_ID']}" + + # Check to see if the GCP secret exists + data_exists = check_if_secret_exists(client, secret_id, parent) + + if data_exists: + # Grab the latest token data + value = get_latest_secret_version(client, secret_id, parent) + friendly_name = value["name"] + token = value["token"] + logger.debug(f"Received token: {friendly_name}") + else: + # If there is no available ISS token secret, create secret + logger.debug("Secret does not exist, creating secret") + create_secret(client, secret_id, parent) + # Use environment variable for first run with new secret + token = os.environ["ISS_API_KEY"] + elif storage_type == "postgres": + key_table_name = os.environ["ISS_KEY_TABLE_NAME"] + + # check to see if data exists in the table + data_exists = check_if_data_exists(key_table_name) + + if data_exists: + # grab the latest token data + value = get_latest_data(key_table_name) + id = value["id"] + friendly_name = value["name"] + token = value["token"] + logger.debug(f"Received token: {friendly_name} with id {id}") + else: + # if there is no data, use environment variable for first run + token = os.environ["ISS_API_KEY"] + + # Pull new ISS SCMS API token + iss_base = os.environ["ISS_SCMS_TOKEN_REST_ENDPOINT"] + + # Create HTTP request headers + iss_headers = {"x-api-key": token} + + # Create the POST body + new_friendly_name = f"{os.environ['ISS_API_KEY_NAME']}_{str(uuid.uuid4())}" + iss_post_body = {"friendlyName": new_friendly_name, "expireDays": 1} + + # Create new ISS SCMS API Token to ensure its freshness + logger.debug("POST: " + iss_base) + response = requests.post(iss_base, json=iss_post_body, headers=iss_headers) + try: + new_token = response.json()["Item"] + except requests.JSONDecodeError: + logger.error("Failed to decode JSON response from ISS SCMS API. Response: " + response.text) + exit(1) + logger.debug(f"Received new token: {new_friendly_name}") + + if data_exists: + # If exists, delete previous API key to prevent key clutter + iss_delete_body = {"friendlyName": friendly_name} + requests.delete(iss_base, json=iss_delete_body, headers=iss_headers) + logger.debug(f"Old token has been deleted from ISS SCMS: {friendly_name}") + + version_data = {"name": new_friendly_name, "token": new_token} + + if get_storage_type() == "gcp": + # Add new version to the secret + add_secret_version(client, secret_id, parent, version_data) + elif get_storage_type() == "postgres": + # add new entry to the table + add_data(key_table_name, new_friendly_name, new_token) + + return new_token diff --git a/services/addons/images/iss_health_check/sample.env b/services/addons/images/iss_health_check/sample.env index 6fc35f8cd..9edad0305 100644 --- a/services/addons/images/iss_health_check/sample.env +++ b/services/addons/images/iss_health_check/sample.env @@ -1,21 +1,31 @@ -# ISS Account Authentication -ISS_API_KEY= -ISS_API_KEY_NAME= -ISS_PROJECT_ID= -ISS_SCMS_TOKEN_REST_ENDPOINT= -ISS_SCMS_VEHICLE_REST_ENDPOINT= - -# PostgreSQL connection information -# Host port must be specified -PG_DB_HOST=:5432 -PG_DB_NAME= -PG_DB_USER= -PG_DB_PASS= - -# GCP Project ID and service account JSON key file location (mount as volume or secret) -PROJECT_ID= -GOOGLE_APPLICATION_CREDENTIALS= - -# Customize the logging level, defaults to INFO -# Options: DEBUG, INFO, WARN, ERROR (case sensitive) +# ISS Account Authentication +ISS_API_KEY= +ISS_API_KEY_NAME= +ISS_PROJECT_ID= +ISS_SCMS_TOKEN_REST_ENDPOINT= +ISS_SCMS_VEHICLE_REST_ENDPOINT= + +# PostgreSQL connection information +# Host port must be specified +PG_DB_HOST=:5432 +PG_DB_NAME= +PG_DB_USER= +PG_DB_PASS= + +# Key Storage +## Type of key storage, options: gcp, postgres +STORAGE_TYPE= + +## GCP Storage (Required if STORAGE_TYPE=gcp) +### GCP Project ID +PROJECT_ID= +### Service account JSON key file location (mount as volume or secret) +GOOGLE_APPLICATION_CREDENTIALS= + +## Postgres Storage (Required if STORAGE_TYPE=postgres) +### Table name to store keys +ISS_KEY_TABLE_NAME= + +# Customize the logging level, defaults to INFO +# Options: DEBUG, INFO, WARN, ERROR (case sensitive) LOGGING_LEVEL= \ No newline at end of file diff --git a/services/addons/tests/firmware_manager/test_download_blob.py b/services/addons/tests/firmware_manager/test_download_blob.py index cc44ced9d..970ec8f97 100644 --- a/services/addons/tests/firmware_manager/test_download_blob.py +++ b/services/addons/tests/firmware_manager/test_download_blob.py @@ -1,7 +1,9 @@ -from unittest.mock import patch +from unittest.mock import MagicMock, patch import os +import pytest from addons.images.firmware_manager import download_blob +from addons.images.firmware_manager.download_blob import UnsupportedFileTypeException @patch.dict( @@ -17,14 +19,69 @@ def test_download_gcp_blob(mock_storage_client, mock_logging): # run download_blob.download_gcp_blob( - blob_name="test.blob", destination_file_name="/home/test/" + blob_name="test.tar", destination_file_name="/home/test/" ) # validate mock_storage_client.assert_called_with("test-project") mock_client.get_bucket.assert_called_with("test-bucket") - mock_bucket.blob.assert_called_with("test.blob") + mock_bucket.blob.assert_called_with("test.tar") mock_blob.download_to_filename.assert_called_with("/home/test/") mock_logging.info.assert_called_with( - "Downloaded storage object test.blob from bucket test-bucket to local file /home/test/." + "Downloaded storage object test.tar from bucket test-bucket to local file /home/test/." ) + + +@patch.dict( + os.environ, {"GCP_PROJECT": "test-project", "BLOB_STORAGE_BUCKET": "test-bucket"} +) +@patch("addons.images.firmware_manager.download_blob.logging") +def test_download_gcp_blob_unsupported_file_type(mock_logging): + # prepare + blob_name = "test.blob" + destination_file_name = "/home/test/" + + # run + result = download_blob.download_gcp_blob(blob_name, destination_file_name) + + # validate + mock_logging.error.assert_called_with( + f"Unsupported file type for storage object {blob_name}. Only .tar files are supported." + ) + assert result == False + + +@patch("addons.images.firmware_manager.download_blob.logging") +def test_download_docker_blob(mock_logging): + # prepare + os.system = MagicMock() + blob_name = "test.tar" + destination_file_name = "/home/test/" + + # run + download_blob.download_docker_blob(blob_name, destination_file_name) + + # validate + os.system.assert_called_with( + f"cp /mnt/blob_storage/{blob_name} {destination_file_name}" + ) + mock_logging.info.assert_called_with( + f"Copied storage object {blob_name} from directory /mnt/blob_storage to local file {destination_file_name}." + ) + + +@patch("addons.images.firmware_manager.download_blob.logging") +def test_download_docker_blob_unsupported_file_type(mock_logging): + # prepare + os.system = MagicMock() + blob_name = "test.blob" + destination_file_name = "/home/test/" + + # run + result = download_blob.download_docker_blob(blob_name, destination_file_name) + + # validate + mock_logging.error.assert_called_with( + f"Unsupported file type for storage object {blob_name}. Only .tar files are supported." + ) + assert result == False diff --git a/services/addons/tests/firmware_manager/test_upgrader.py b/services/addons/tests/firmware_manager/test_upgrader.py index 417c2b084..d273b4e81 100644 --- a/services/addons/tests/firmware_manager/test_upgrader.py +++ b/services/addons/tests/firmware_manager/test_upgrader.py @@ -1,7 +1,9 @@ from unittest.mock import MagicMock, patch import os +import pytest from addons.images.firmware_manager import upgrader +from addons.images.firmware_manager.upgrader import StorageProviderNotSupportedException # Test class for testing the abstract class @@ -85,6 +87,21 @@ def test_download_blob_gcp(mock_Path, mock_download_gcp_blob): "/home/8.8.8.8/firmware_package.tar", ) +@patch.dict(os.environ, {"BLOB_STORAGE_PROVIDER": "DOCKER"}) +@patch("addons.images.firmware_manager.upgrader.download_blob.download_docker_blob") +@patch("addons.images.firmware_manager.upgrader.Path") +def test_download_blob_docker(mock_Path, mock_download_docker_blob): + mock_path_obj = mock_Path.return_value + test_upgrader = TestUpgrader(test_upgrade_info) + + test_upgrader.download_blob() + + mock_path_obj.mkdir.assert_called_with(exist_ok=True) + mock_download_docker_blob.assert_called_with( + "test-manufacturer/test-model/1.0.0/firmware_package.tar", + "/home/8.8.8.8/firmware_package.tar", + ) + @patch.dict(os.environ, {"BLOB_STORAGE_PROVIDER": "Test"}) @patch("addons.images.firmware_manager.upgrader.logging") @@ -94,11 +111,12 @@ def test_download_blob_not_supported(mock_Path, mock_download_gcp_blob, mock_log mock_path_obj = mock_Path.return_value test_upgrader = TestUpgrader(test_upgrade_info) - test_upgrader.download_blob() + with pytest.raises(StorageProviderNotSupportedException): + test_upgrader.download_blob() - mock_path_obj.mkdir.assert_called_with(exist_ok=True) - mock_download_gcp_blob.assert_not_called() - mock_logging.error.assert_called_with("Unsupported blob storage provider") + mock_path_obj.mkdir.assert_called_with(exist_ok=True) + mock_download_gcp_blob.assert_not_called() + mock_logging.error.assert_called_with("Unsupported blob storage provider") @patch("addons.images.firmware_manager.upgrader.logging") diff --git a/services/addons/tests/iss_health_check/test_iss_health_checker.py b/services/addons/tests/iss_health_check/test_iss_health_checker.py index ea4a4b389..0e2069d32 100644 --- a/services/addons/tests/iss_health_check/test_iss_health_checker.py +++ b/services/addons/tests/iss_health_check/test_iss_health_checker.py @@ -2,6 +2,7 @@ import os from addons.images.iss_health_check import iss_health_checker +from addons.images.iss_health_check.iss_health_checker import RsuDataWrapper @patch("addons.images.iss_health_check.iss_health_checker.pgquery.query_db") @@ -10,7 +11,8 @@ def test_get_rsu_data_no_data(mock_query_db): result = iss_health_checker.get_rsu_data() # check - assert result == {} + expected = RsuDataWrapper({}) + assert result == expected mock_query_db.assert_called_once() mock_query_db.assert_called_with( "SELECT jsonb_build_object('rsu_id', rsu_id, 'iss_scms_id', iss_scms_id) FROM public.rsus WHERE iss_scms_id IS NOT NULL ORDER BY rsu_id" @@ -27,7 +29,7 @@ def test_get_rsu_data_with_data(mock_query_db): ] result = iss_health_checker.get_rsu_data() - expected_result = {"ABC": {"rsu_id": 1}, "DEF": {"rsu_id": 2}, "GHI": {"rsu_id": 3}} + expected_result = RsuDataWrapper({"ABC": {"rsu_id": 1}, "DEF": {"rsu_id": 2}, "GHI": {"rsu_id": 3}}) # check assert result == expected_result @@ -52,7 +54,7 @@ def test_get_rsu_data_with_data(mock_query_db): def test_get_scms_status_data( mock_get_rsu_data, mock_get_token, mock_requests, mock_response ): - mock_get_rsu_data.return_value = {"ABC": {"rsu_id": 1}, "DEF": {"rsu_id": 2}} + mock_get_rsu_data.return_value = RsuDataWrapper({"ABC": {"rsu_id": 1}, "DEF": {"rsu_id": 2}}) mock_get_token.get_token.return_value = "test-token" mock_requests.get.return_value = mock_response mock_response.json.side_effect = [ @@ -141,3 +143,66 @@ def test_insert_scms_data(mock_write_db, mock_datetime): "('2022-11-03T00:00:00.000Z', '0', NULL, 2)" ) mock_write_db.assert_called_with(expectedQuery) + + +@patch("addons.images.iss_health_check.iss_health_checker.datetime") +@patch("addons.images.iss_health_check.iss_health_checker.pgquery.write_db") +def test_insert_scms_data_no_rsu_id(mock_write_db, mock_datetime): + mock_datetime.strftime.return_value = "2022-11-03T00:00:00.000Z" + test_data = { + "ABC": { + "deviceHealth": "Healthy", + "expiration": "2022-11-02T00:00:00.000Z", + }, + "DEF": {"rsu_id": 2, "deviceHealth": "Unhealthy", "expiration": None}, + } + # call + iss_health_checker.insert_scms_data(test_data) + + expectedQuery = ( + 'INSERT INTO public.scms_health("timestamp", health, expiration, rsu_id) VALUES ' + "('2022-11-03T00:00:00.000Z', '0', NULL, 2)" + ) + mock_write_db.assert_called_with(expectedQuery) + + +@patch("addons.images.iss_health_check.iss_health_checker.datetime") +@patch("addons.images.iss_health_check.iss_health_checker.pgquery.write_db") +def test_insert_scms_data_no_deviceHealth(mock_write_db, mock_datetime): + mock_datetime.strftime.return_value = "2022-11-03T00:00:00.000Z" + test_data = { + "ABC": { + "rsu_id": 1, + "expiration": "2022-11-02T00:00:00.000Z", + }, + "DEF": {"rsu_id": 2, "deviceHealth": "Unhealthy", "expiration": None}, + } + # call + iss_health_checker.insert_scms_data(test_data) + + expectedQuery = ( + 'INSERT INTO public.scms_health("timestamp", health, expiration, rsu_id) VALUES ' + "('2022-11-03T00:00:00.000Z', '0', NULL, 2)" + ) + mock_write_db.assert_called_with(expectedQuery) + + +@patch("addons.images.iss_health_check.iss_health_checker.datetime") +@patch("addons.images.iss_health_check.iss_health_checker.pgquery.write_db") +def test_insert_scms_data_no_expiration(mock_write_db, mock_datetime): + mock_datetime.strftime.return_value = "2022-11-03T00:00:00.000Z" + test_data = { + "ABC": { + "rsu_id": 1, + "deviceHealth": "Healthy", + }, + "DEF": {"rsu_id": 2, "deviceHealth": "Unhealthy", "expiration": "test"}, + } + # call + iss_health_checker.insert_scms_data(test_data) + + expectedQuery = ( + 'INSERT INTO public.scms_health("timestamp", health, expiration, rsu_id) VALUES ' + "('2022-11-03T00:00:00.000Z', '0', 'test', 2)" + ) + mock_write_db.assert_called_with(expectedQuery) \ No newline at end of file diff --git a/services/addons/tests/iss_health_check/test_iss_token.py b/services/addons/tests/iss_health_check/test_iss_token.py index 392b041ed..37c73eaa5 100644 --- a/services/addons/tests/iss_health_check/test_iss_token.py +++ b/services/addons/tests/iss_health_check/test_iss_token.py @@ -1,230 +1,478 @@ -from unittest.mock import patch, MagicMock -import os -import json - -from addons.images.iss_health_check import iss_token - - -@patch( - "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" -) -def test_create_secret(mock_sm_client): - iss_token.create_secret(mock_sm_client, "test-secret_id", "test-parent") - expected_request = { - "parent": "test-parent", - "secret_id": "test-secret_id", - "secret": {"replication": {"automatic": {}}}, - } - mock_sm_client.create_secret.assert_called_with(request=expected_request) - - -@patch( - "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" -) -@patch("addons.images.iss_health_check.iss_token.secretmanager") -def test_check_if_secret_exists_true(mock_secretmanager, mock_sm_client): - mock_secretmanager.ListSecretsRequest.return_value = "list-request" - - item_match = MagicMock() - item_match.name = "proj/test-proj/secret/test-secret_id" - mock_list_values = [item_match] - mock_sm_client.list_secrets.return_value = mock_list_values - - actual_value = iss_token.check_if_secret_exists( - mock_sm_client, "test-secret_id", "test-parent" - ) - mock_secretmanager.ListSecretsRequest.assert_called_with(parent="test-parent") - mock_sm_client.list_secrets.assert_called_with(request="list-request") - assert actual_value == True - - -@patch( - "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" -) -@patch("addons.images.iss_health_check.iss_token.secretmanager") -def test_check_if_secret_exists_false(mock_secretmanager, mock_sm_client): - mock_secretmanager.ListSecretsRequest.return_value = "list-request" - - item_not_match = MagicMock() - item_not_match.name = "proj/test-proj/secret/test-secret" - mock_list_values = [item_not_match] - mock_sm_client.list_secrets.return_value = mock_list_values - - actual_value = iss_token.check_if_secret_exists( - mock_sm_client, "test-secret_id", "test-parent" - ) - mock_secretmanager.ListSecretsRequest.assert_called_with(parent="test-parent") - mock_sm_client.list_secrets.assert_called_with(request="list-request") - assert actual_value == False - - -@patch( - "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" -) -def test_get_latest_secret_version(mock_sm_client): - mock_response = MagicMock() - mock_response.payload.data = str.encode('{"message": "Secret payload data"}') - mock_sm_client.access_secret_version.return_value = mock_response - - actual_value = iss_token.get_latest_secret_version( - mock_sm_client, "test-secret_id", "test-parent" - ) - mock_sm_client.access_secret_version.assert_called_with( - request={"name": "test-parent/secrets/test-secret_id/versions/latest"} - ) - assert actual_value == {"message": "Secret payload data"} - - -@patch( - "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" -) -def test_add_secret_version(mock_sm_client): - secret_id = "test-secret_id" - parent = "test-parent" - data = {"message": "Secret payload data"} - iss_token.add_secret_version(mock_sm_client, secret_id, parent, data) - - expected_request = { - "parent": f"{parent}/secrets/{secret_id}", - "payload": {"data": str.encode(json.dumps(data))}, - } - mock_sm_client.add_secret_version.assert_called_with(request=expected_request) - - -@patch.dict( - os.environ, - { - "PROJECT_ID": "test-proj", - "ISS_API_KEY": "test-api-key", - "ISS_SCMS_TOKEN_REST_ENDPOINT": "https://api.dm.iss-scms.com/api/test-token", - "ISS_API_KEY_NAME": "test-api-key-name", - }, -) -@patch("addons.images.iss_health_check.iss_token.requests.Response") -@patch("addons.images.iss_health_check.iss_token.requests") -@patch("addons.images.iss_health_check.iss_token.uuid") -@patch("addons.images.iss_health_check.iss_token.add_secret_version") -@patch("addons.images.iss_health_check.iss_token.create_secret") -@patch("addons.images.iss_health_check.iss_token.check_if_secret_exists") -@patch("addons.images.iss_health_check.iss_token.secretmanager") -def test_get_token_create_secret( - mock_secretmanager, - mock_check_if_secret_exists, - mock_create_secret, - mock_add_secret_version, - mock_uuid, - mock_requests, - mock_response, -): - # Mock every major dependency - mock_sm_client = MagicMock() - mock_secretmanager.SecretManagerServiceClient.return_value = mock_sm_client - mock_check_if_secret_exists.return_value = False - mock_uuid.uuid4.return_value = 12345 - mock_requests.post.return_value = mock_response - mock_response.json.return_value = {"Item": "new-iss-token"} - - # Call function - expected_value = "new-iss-token" - actual_value = iss_token.get_token() - - # Check if iss_token function calls were made correctly - mock_check_if_secret_exists.assert_called_with( - mock_sm_client, "iss-token-secret", "projects/test-proj" - ) - mock_create_secret.assert_called_with( - mock_sm_client, "iss-token-secret", "projects/test-proj" - ) - mock_add_secret_version.assert_called_with( - mock_sm_client, - "iss-token-secret", - "projects/test-proj", - {"name": "test-api-key-name_12345", "token": expected_value}, - ) - - # Check if HTTP requests were made correctly - expected_headers = {"x-api-key": "test-api-key"} - expected_body = {"friendlyName": "test-api-key-name_12345", "expireDays": 1} - mock_requests.post.assert_called_with( - "https://api.dm.iss-scms.com/api/test-token", - json=expected_body, - headers=expected_headers, - ) - - # Assert final value - assert actual_value == expected_value - - -@patch.dict( - os.environ, - { - "PROJECT_ID": "test-proj", - "ISS_API_KEY": "test-api-key", - "ISS_SCMS_TOKEN_REST_ENDPOINT": "https://api.dm.iss-scms.com/api/test-token", - "ISS_API_KEY_NAME": "test-api-key-name", - }, -) -@patch("addons.images.iss_health_check.iss_token.requests.Response") -@patch("addons.images.iss_health_check.iss_token.requests") -@patch("addons.images.iss_health_check.iss_token.uuid") -@patch("addons.images.iss_health_check.iss_token.add_secret_version") -@patch("addons.images.iss_health_check.iss_token.get_latest_secret_version") -@patch("addons.images.iss_health_check.iss_token.check_if_secret_exists") -@patch("addons.images.iss_health_check.iss_token.secretmanager") -def test_get_token_secret_exists( - mock_secretmanager, - mock_check_if_secret_exists, - mock_get_latest_secret_version, - mock_add_secret_version, - mock_uuid, - mock_requests, - mock_response, -): - # Mock every major dependency - mock_sm_client = MagicMock() - mock_secretmanager.SecretManagerServiceClient.return_value = mock_sm_client - mock_check_if_secret_exists.return_value = True - mock_get_latest_secret_version.return_value = { - "name": "test-api-key-name_01234", - "token": "old-token", - } - mock_uuid.uuid4.return_value = 12345 - mock_requests.post.return_value = mock_response - mock_response.json.return_value = {"Item": "new-iss-token"} - - # Call function - expected_value = "new-iss-token" - actual_value = iss_token.get_token() - - # Check if iss_token function calls were made correctly - mock_check_if_secret_exists.assert_called_with( - mock_sm_client, "iss-token-secret", "projects/test-proj" - ) - mock_get_latest_secret_version.assert_called_with( - mock_sm_client, "iss-token-secret", "projects/test-proj" - ) - mock_add_secret_version.assert_called_with( - mock_sm_client, - "iss-token-secret", - "projects/test-proj", - {"name": "test-api-key-name_12345", "token": expected_value}, - ) - - # Check if HTTP requests were made correctly - expected_headers = {"x-api-key": "old-token"} - expected_post_body = {"friendlyName": "test-api-key-name_12345", "expireDays": 1} - mock_requests.post.assert_called_with( - "https://api.dm.iss-scms.com/api/test-token", - json=expected_post_body, - headers=expected_headers, - ) - - expected_delete_body = {"friendlyName": "test-api-key-name_01234"} - mock_requests.delete.assert_called_with( - "https://api.dm.iss-scms.com/api/test-token", - json=expected_delete_body, - headers=expected_headers, - ) - - # Assert final value - assert actual_value == expected_value +from unittest.mock import patch, MagicMock +import os +import json + +import pytest + +from addons.images.iss_health_check import iss_token + +# --------------------- Storage Type tests --------------------- + +@patch.dict( + os.environ, + { + "STORAGE_TYPE": "gcp", + }, +) +def test_get_storage_type_gcp(): + actual_value = iss_token.get_storage_type() + assert actual_value == "gcp" + + +@patch.dict( + os.environ, + { + "STORAGE_TYPE": "postgres", + }, +) +def test_get_storage_type_postgres(): + actual_value = iss_token.get_storage_type() + assert actual_value == "postgres" + + +@patch.dict( + os.environ, + { + "STORAGE_TYPE": "GCP", + }, +) +def test_get_storage_type_gcp_case_insensitive(): + actual_value = iss_token.get_storage_type() + assert actual_value == "gcp" + + +@patch.dict( + os.environ, + { + "STORAGE_TYPE": "POSTGRES", + }, +) +def test_get_storage_type_postgres_case_insensitive(): + actual_value = iss_token.get_storage_type() + assert actual_value == "postgres" + + +@patch.dict( + os.environ, + { + "STORAGE_TYPE": "test", + }, +) +def test_get_storage_type_invalid(): + with pytest.raises(SystemExit): + iss_token.get_storage_type() + + +@patch.dict( + os.environ, + { + + }, +) +def test_get_storage_type_unset(): + with pytest.raises(SystemExit): + iss_token.get_storage_type() + +# --------------------- end of Storage Type tests --------------------- + + +# --------------------- GCP tests --------------------- +@patch( + "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" +) +def test_create_secret(mock_sm_client): + iss_token.create_secret(mock_sm_client, "test-secret_id", "test-parent") + expected_request = { + "parent": "test-parent", + "secret_id": "test-secret_id", + "secret": {"replication": {"automatic": {}}}, + } + mock_sm_client.create_secret.assert_called_with(request=expected_request) + + +@patch( + "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" +) +@patch("addons.images.iss_health_check.iss_token.secretmanager") +def test_check_if_secret_exists_true(mock_secretmanager, mock_sm_client): + mock_secretmanager.ListSecretsRequest.return_value = "list-request" + + item_match = MagicMock() + item_match.name = "proj/test-proj/secret/test-secret_id" + mock_list_values = [item_match] + mock_sm_client.list_secrets.return_value = mock_list_values + + actual_value = iss_token.check_if_secret_exists( + mock_sm_client, "test-secret_id", "test-parent" + ) + mock_secretmanager.ListSecretsRequest.assert_called_with(parent="test-parent") + mock_sm_client.list_secrets.assert_called_with(request="list-request") + assert actual_value == True + + +@patch( + "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" +) +@patch("addons.images.iss_health_check.iss_token.secretmanager") +def test_check_if_secret_exists_false(mock_secretmanager, mock_sm_client): + mock_secretmanager.ListSecretsRequest.return_value = "list-request" + + item_not_match = MagicMock() + item_not_match.name = "proj/test-proj/secret/test-secret" + mock_list_values = [item_not_match] + mock_sm_client.list_secrets.return_value = mock_list_values + + actual_value = iss_token.check_if_secret_exists( + mock_sm_client, "test-secret_id", "test-parent" + ) + mock_secretmanager.ListSecretsRequest.assert_called_with(parent="test-parent") + mock_sm_client.list_secrets.assert_called_with(request="list-request") + assert actual_value == False + + +@patch( + "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" +) +def test_get_latest_secret_version(mock_sm_client): + mock_response = MagicMock() + mock_response.payload.data = str.encode('{"message": "Secret payload data"}') + mock_sm_client.access_secret_version.return_value = mock_response + + actual_value = iss_token.get_latest_secret_version( + mock_sm_client, "test-secret_id", "test-parent" + ) + mock_sm_client.access_secret_version.assert_called_with( + request={"name": "test-parent/secrets/test-secret_id/versions/latest"} + ) + assert actual_value == {"message": "Secret payload data"} + + +@patch( + "addons.images.iss_health_check.iss_token.secretmanager.SecretManagerServiceClient" +) +def test_add_secret_version(mock_sm_client): + secret_id = "test-secret_id" + parent = "test-parent" + data = {"message": "Secret payload data"} + iss_token.add_secret_version(mock_sm_client, secret_id, parent, data) + + expected_request = { + "parent": f"{parent}/secrets/{secret_id}", + "payload": {"data": str.encode(json.dumps(data))}, + } + mock_sm_client.add_secret_version.assert_called_with(request=expected_request) + + +@patch.dict( + os.environ, + { + "PROJECT_ID": "test-proj", + "ISS_API_KEY": "test-api-key", + "ISS_SCMS_TOKEN_REST_ENDPOINT": "https://api.dm.iss-scms.com/api/test-token", + "ISS_API_KEY_NAME": "test-api-key-name", + "STORAGE_TYPE": "gcp", + }, +) +@patch("addons.images.iss_health_check.iss_token.requests.Response") +@patch("addons.images.iss_health_check.iss_token.requests") +@patch("addons.images.iss_health_check.iss_token.uuid") +@patch("addons.images.iss_health_check.iss_token.add_secret_version") +@patch("addons.images.iss_health_check.iss_token.create_secret") +@patch("addons.images.iss_health_check.iss_token.check_if_secret_exists") +@patch("addons.images.iss_health_check.iss_token.secretmanager") +def test_get_token_create_secret( + mock_secretmanager, + mock_check_if_secret_exists, + mock_create_secret, + mock_add_secret_version, + mock_uuid, + mock_requests, + mock_response, +): + # Mock every major dependency + mock_sm_client = MagicMock() + mock_secretmanager.SecretManagerServiceClient.return_value = mock_sm_client + mock_check_if_secret_exists.return_value = False + mock_uuid.uuid4.return_value = 12345 + mock_requests.post.return_value = mock_response + mock_response.json.return_value = {"Item": "new-iss-token"} + + # Call function + expected_value = "new-iss-token" + actual_value = iss_token.get_token() + + # Check if iss_token function calls were made correctly + mock_check_if_secret_exists.assert_called_with( + mock_sm_client, "iss-token-secret", "projects/test-proj" + ) + mock_create_secret.assert_called_with( + mock_sm_client, "iss-token-secret", "projects/test-proj" + ) + mock_add_secret_version.assert_called_with( + mock_sm_client, + "iss-token-secret", + "projects/test-proj", + {"name": "test-api-key-name_12345", "token": expected_value}, + ) + + # Check if HTTP requests were made correctly + expected_headers = {"x-api-key": "test-api-key"} + expected_body = {"friendlyName": "test-api-key-name_12345", "expireDays": 1} + mock_requests.post.assert_called_with( + "https://api.dm.iss-scms.com/api/test-token", + json=expected_body, + headers=expected_headers, + ) + + # Assert final value + assert actual_value == expected_value + + +@patch.dict( + os.environ, + { + "PROJECT_ID": "test-proj", + "ISS_API_KEY": "test-api-key", + "ISS_SCMS_TOKEN_REST_ENDPOINT": "https://api.dm.iss-scms.com/api/test-token", + "ISS_API_KEY_NAME": "test-api-key-name", + "STORAGE_TYPE": "gcp", + }, +) +@patch("addons.images.iss_health_check.iss_token.requests.Response") +@patch("addons.images.iss_health_check.iss_token.requests") +@patch("addons.images.iss_health_check.iss_token.uuid") +@patch("addons.images.iss_health_check.iss_token.add_secret_version") +@patch("addons.images.iss_health_check.iss_token.get_latest_secret_version") +@patch("addons.images.iss_health_check.iss_token.check_if_secret_exists") +@patch("addons.images.iss_health_check.iss_token.secretmanager") +def test_get_token_secret_exists( + mock_secretmanager, + mock_check_if_secret_exists, + mock_get_latest_secret_version, + mock_add_secret_version, + mock_uuid, + mock_requests, + mock_response, +): + # Mock every major dependency + mock_sm_client = MagicMock() + mock_secretmanager.SecretManagerServiceClient.return_value = mock_sm_client + mock_check_if_secret_exists.return_value = True + mock_get_latest_secret_version.return_value = { + "name": "test-api-key-name_01234", + "token": "old-token", + } + mock_uuid.uuid4.return_value = 12345 + mock_requests.post.return_value = mock_response + mock_response.json.return_value = {"Item": "new-iss-token"} + + # Call function + expected_value = "new-iss-token" + actual_value = iss_token.get_token() + + # Check if iss_token function calls were made correctly + mock_check_if_secret_exists.assert_called_with( + mock_sm_client, "iss-token-secret", "projects/test-proj" + ) + mock_get_latest_secret_version.assert_called_with( + mock_sm_client, "iss-token-secret", "projects/test-proj" + ) + mock_add_secret_version.assert_called_with( + mock_sm_client, + "iss-token-secret", + "projects/test-proj", + {"name": "test-api-key-name_12345", "token": expected_value}, + ) + + # Check if HTTP requests were made correctly + expected_headers = {"x-api-key": "old-token"} + expected_post_body = {"friendlyName": "test-api-key-name_12345", "expireDays": 1} + mock_requests.post.assert_called_with( + "https://api.dm.iss-scms.com/api/test-token", + json=expected_post_body, + headers=expected_headers, + ) + + expected_delete_body = {"friendlyName": "test-api-key-name_01234"} + mock_requests.delete.assert_called_with( + "https://api.dm.iss-scms.com/api/test-token", + json=expected_delete_body, + headers=expected_headers, + ) + + # Assert final value + assert actual_value == expected_value + +# --------------------- end of GCP tests --------------------- + + +# --------------------- Postgres tests --------------------- + +@patch( + "addons.images.iss_health_check.iss_token.pgquery", +) +def test_check_if_data_exists_true(mock_pgquery): + mock_pgquery.query_db.return_value = [(1,)] + actual_value = iss_token.check_if_data_exists("test-table-name") + expected_query = ( + "SELECT * FROM test-table-name" + ) + mock_pgquery.query_db.assert_called_with(expected_query) + assert actual_value == True + + +@patch( + "addons.images.iss_health_check.iss_token.pgquery", +) +def test_check_if_data_exists_false(mock_pgquery): + mock_pgquery.query_db.return_value = [] + actual_value = iss_token.check_if_data_exists("test-table-name") + expected_query = ( + "SELECT * FROM test-table-name" + ) + mock_pgquery.query_db.assert_called_with(expected_query) + assert actual_value == False + + +@patch( + "addons.images.iss_health_check.iss_token.pgquery", +) +def test_add_data(mock_pgquery): + iss_token.add_data("test-table-name", "test-common-name", "test-token") + expected_query = ( + "INSERT INTO test-table-name (common_name, token) " + "VALUES ('test-common-name', 'test-token')" + ) + mock_pgquery.write_db.assert_called_with(expected_query) + + +@patch( + "addons.images.iss_health_check.iss_token.pgquery", +) +def test_get_latest_data(mock_pgquery): + mock_pgquery.query_db.return_value = [(1, "test-common-name", "test-token")] + actual_value = iss_token.get_latest_data("test-table-name") + expected_query = ( + "SELECT * FROM test-table-name ORDER BY iss_key_id DESC LIMIT 1" + ) + mock_pgquery.query_db.assert_called_with(expected_query) + assert actual_value == {"id": 1, "name": "test-common-name", "token": "test-token"} + + +@patch.dict( + os.environ, + { + "PROJECT_ID": "test-proj", + "ISS_API_KEY": "test-api-key", + "ISS_SCMS_TOKEN_REST_ENDPOINT": "https://api.dm.iss-scms.com/api/test-token", + "ISS_API_KEY_NAME": "test-api-key-name", + "STORAGE_TYPE": "postgres", + "ISS_KEY_TABLE_NAME": "test-table-name", + }, +) +@patch("addons.images.iss_health_check.iss_token.requests.Response") +@patch("addons.images.iss_health_check.iss_token.requests") +@patch("addons.images.iss_health_check.iss_token.uuid") +@patch("addons.images.iss_health_check.iss_token.add_data") +@patch("addons.images.iss_health_check.iss_token.check_if_data_exists") +def test_get_token_data_does_not_exist( + mock_check_if_data_exists, + mock_add_data, + mock_uuid, + mock_requests, + mock_response, +): + # Mock every major dependency + mock_check_if_data_exists.return_value = False + mock_uuid.uuid4.return_value = 12345 + mock_requests.post.return_value = mock_response + mock_response.json.return_value = {"Item": "new-iss-token"} + + # Call function + result = iss_token.get_token() + + # Check if iss_token function calls were made correctly + mock_check_if_data_exists.assert_called_with("test-table-name") + mock_add_data.assert_called_with( + "test-table-name", "test-api-key-name_12345", "new-iss-token" + ) + + # Check if HTTP requests were made correctly + expected_headers = {"x-api-key": "test-api-key"} + expected_post_body = {"friendlyName": "test-api-key-name_12345", "expireDays": 1} + mock_requests.post.assert_called_with( + "https://api.dm.iss-scms.com/api/test-token", + json=expected_post_body, + headers=expected_headers, + ) + + # Assert final value + assert result == "new-iss-token" + + +@patch.dict( + os.environ, + { + "PROJECT_ID": "test-proj", + "ISS_API_KEY": "test-api-key", + "ISS_SCMS_TOKEN_REST_ENDPOINT": "https://api.dm.iss-scms.com/api/test-token", + "ISS_API_KEY_NAME": "test-api-key-name", + "STORAGE_TYPE": "postgres", + "ISS_KEY_TABLE_NAME": "test-table-name", + }, +) +@patch("addons.images.iss_health_check.iss_token.requests.Response") +@patch("addons.images.iss_health_check.iss_token.requests") +@patch("addons.images.iss_health_check.iss_token.uuid") +@patch("addons.images.iss_health_check.iss_token.add_data") +@patch("addons.images.iss_health_check.iss_token.check_if_data_exists") +@patch("addons.images.iss_health_check.iss_token.get_latest_data") +def test_get_token_data_exists( + mock_get_latest_data, + mock_check_if_data_exists, + mock_add_data, + mock_uuid, + mock_requests, + mock_response, +): + # Mock every major dependency + mock_check_if_data_exists.return_value = True + mock_get_latest_data.return_value = { + "id": 1, + "name": "test-api-key-name_01234", + "token": "old-token", + } + mock_uuid.uuid4.return_value = 12345 + mock_requests.post.return_value = mock_response + mock_response.json.return_value = {"Item": "new-iss-token"} + + # Call function + result = iss_token.get_token() + + # Check if iss_token function calls were made correctly + mock_check_if_data_exists.assert_called_with("test-table-name") + mock_get_latest_data.assert_called_with("test-table-name") + mock_add_data.assert_called_with( + "test-table-name", "test-api-key-name_12345", "new-iss-token" + ) + + # Check if HTTP requests were made correctly + expected_headers = {"x-api-key": "old-token"} + expected_post_body = {"friendlyName": "test-api-key-name_12345", "expireDays": 1} + mock_requests.post.assert_called_with( + "https://api.dm.iss-scms.com/api/test-token", + json=expected_post_body, + headers=expected_headers, + ) + + expected_delete_body = {"friendlyName": "test-api-key-name_01234"} + mock_requests.delete.assert_called_with( + "https://api.dm.iss-scms.com/api/test-token", + json=expected_delete_body, + headers=expected_headers, + ) + + # Assert final value + assert result == "new-iss-token" + +# --------------------- end of Postgres tests --------------------- \ No newline at end of file