From f1949ab1e596477809840f3a55cfed2e7e87ebbb Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Mon, 21 Apr 2025 13:48:34 +0530 Subject: [PATCH 01/10] D-39625 New logger --- digitalai/release/integration/__init__.py | 1 + digitalai/release/integration/base_task.py | 21 +++++--- digitalai/release/integration/logger.py | 20 ++++++++ .../release/integration/logging_config.py | 35 ------------- digitalai/release/integration/watcher.py | 13 ++--- digitalai/release/integration/wrapper.py | 50 ++++++++----------- pyproject.toml | 13 ++--- tests/release/integration/hello.py | 9 ++-- 8 files changed, 74 insertions(+), 88 deletions(-) create mode 100644 digitalai/release/integration/logger.py delete mode 100644 digitalai/release/integration/logging_config.py diff --git a/digitalai/release/integration/__init__.py b/digitalai/release/integration/__init__.py index 8b072b5..c883e33 100644 --- a/digitalai/release/integration/__init__.py +++ b/digitalai/release/integration/__init__.py @@ -3,3 +3,4 @@ from .output_context import OutputContext from .exceptions import AbortException from .reporting_records import BuildRecord, PlanRecord, ItsmRecord,CodeComplianceRecord, DeploymentRecord +from .logger import dai_logger diff --git a/digitalai/release/integration/base_task.py b/digitalai/release/integration/base_task.py index 7229a75..1243fa1 100644 --- a/digitalai/release/integration/base_task.py +++ b/digitalai/release/integration/base_task.py @@ -10,14 +10,20 @@ from .input_context import AutomatedTaskAsUserContext, ReleaseContext from .output_context import OutputContext from .exceptions import AbortException - -logger = logging.getLogger("Digitalai") +from .logger import dai_logger class BaseTask(ABC): """ An abstract base class representing a task that can be executed. """ + def __init__(self): + self.task_id = None + self.release_context = None + self.release_server_url = None + self.input_properties = None + self.output_context = None + def execute_task(self) -> None: """ Executes the task by calling the execute method. If an AbortException is raised during execution, @@ -28,12 +34,13 @@ def execute_task(self) -> None: self.output_context = OutputContext(0, "", {}, []) self.execute() except AbortException: - logger.debug("Abort requested") + dai_logger.info("Abort requested") self.set_exit_code(1) - sys.exit(1) self.set_error_message("Abort requested") + sys.exit(1) + except Exception as e: - logger.error("Unexpected error occurred.", exc_info=True) + dai_logger.error("Unexpected error occurred.", exc_info=True) self.set_exit_code(1) self.set_error_message(str(e)) @@ -104,13 +111,13 @@ def add_comment(self, comment: str) -> None: """ Logs a comment of the task. """ - logger.debug(f"##[start: comment]{comment}##[end: comment]") + dai_logger.debug(f"##[start: comment]{comment}##[end: comment]") def set_status_line(self, status_line: str) -> None: """ Set the status of the task. """ - logger.debug(f"##[start: status]{status_line}##[end: status]") + dai_logger.debug(f"##[start: status]{status_line}##[end: status]") def add_reporting_record(self, reporting_record: Any) -> None: """ diff --git a/digitalai/release/integration/logger.py b/digitalai/release/integration/logger.py new file mode 100644 index 0000000..6a67290 --- /dev/null +++ b/digitalai/release/integration/logger.py @@ -0,0 +1,20 @@ +import logging +import sys + +# Define the log format (with milliseconds) and date format +LOG_FORMAT = "%(asctime)s.%(msecs)03d %(levelname)s [%(filename)s:%(lineno)d] - %(message)s" +DATE_FORMAT = "%Y-%m-%d %H:%M:%S" + +# Create a formatter +_formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=DATE_FORMAT) + +# Create a stream handler (to stdout) and attach the formatter +_handler = logging.StreamHandler(sys.stdout) +_handler.setFormatter(_formatter) + +# Get your “dai” logger +dai_logger = logging.getLogger("digital_ai") +dai_logger.setLevel(logging.DEBUG) +dai_logger.propagate = False +if not dai_logger.handlers: + dai_logger.addHandler(_handler) diff --git a/digitalai/release/integration/logging_config.py b/digitalai/release/integration/logging_config.py deleted file mode 100644 index 95daf42..0000000 --- a/digitalai/release/integration/logging_config.py +++ /dev/null @@ -1,35 +0,0 @@ -import sys -LOGGING_CONFIG = ({ - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'standard': { - 'format': '%(levelname)-7s [%(filename)s:%(lineno)d] - %(message)s' - } - }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'level': 'DEBUG', - 'formatter': 'standard', - 'stream': sys.stdout - } - }, - 'loggers': { - 'Digitalai': { - 'level': 'DEBUG', - 'handlers': ['console'], - 'propagate': False - }, - '__main__': { - 'level': 'DEBUG', - 'handlers': ['console'], - 'propagate': False - } - }, - 'root': { - 'level': 'INFO', - 'handlers': ['console'], - 'propagate': False - } -}) diff --git a/digitalai/release/integration/watcher.py b/digitalai/release/integration/watcher.py index 98c0948..a60a7a1 100644 --- a/digitalai/release/integration/watcher.py +++ b/digitalai/release/integration/watcher.py @@ -1,23 +1,20 @@ -import logging import os import threading from kubernetes import watch - +from .logger import dai_logger from digitalai.release.integration import k8s -logger = logging.getLogger("Digitalai") - def start_input_context_watcher(on_input_context_update_func): - logger.debug("Input context watcher started") + dai_logger.info("Input context watcher started") stop = threading.Event() try: start_input_secret_watcher(on_input_context_update_func, stop) except Exception: - logger.error("Unexpected error occurred.", exc_info=True) + dai_logger.error("Unexpected error occurred.", exc_info=True) return # Wait until the watcher is stopped @@ -25,7 +22,7 @@ def start_input_context_watcher(on_input_context_update_func): def start_input_secret_watcher(on_input_context_update_func, stop): - logger.debug("Input secret watcher started") + dai_logger.info("Input secret watcher started") kubernetes_client = k8s.get_client() field_selector = "metadata.name=" + os.getenv("INPUT_CONTEXT_SECRET") @@ -39,7 +36,7 @@ def start_input_secret_watcher(on_input_context_update_func, stop): # Checking if 'session-key' field has changed if old_session_key and old_session_key != new_session_key: - logger.info("Detected input context value change") + dai_logger.info("Detected input context value change") on_input_context_update_func() # Set old session-key value diff --git a/digitalai/release/integration/wrapper.py b/digitalai/release/integration/wrapper.py index 12cdbef..3b15731 100644 --- a/digitalai/release/integration/wrapper.py +++ b/digitalai/release/integration/wrapper.py @@ -4,7 +4,6 @@ import base64 import importlib import json -import logging.config import os import signal import sys @@ -17,7 +16,7 @@ from .base_task import BaseTask from .input_context import InputContext from .job_data_encryptor import AESJobDataEncryptor, NoOpJobDataEncryptor -from .logging_config import LOGGING_CONFIG +from .logger import dai_logger from .masked_io import MaskedIO from .output_context import OutputContext @@ -51,12 +50,6 @@ def get_encryptor(): return encryptor -# Set up logging -logging.config.dictConfig(LOGGING_CONFIG) - -# Get the logger -logger = logging.getLogger("Digitalai") - # Initialize the global task object dai_task_object: BaseTask = None @@ -66,13 +59,13 @@ def abort_handler(signum, frame): This function handles the abort request by calling the abort method on the global task object, if it exists. If the task object does not exist, it logs a message and exits with a status code of 1. """ - logger.debug("Received SIGTERM to gracefully stop the process") + dai_logger.info("Received SIGTERM to gracefully stop the process") global dai_task_object if dai_task_object: dai_task_object.abort() else: - logger.debug("Abort requested") + dai_logger.info("Abort requested") sys.exit(1) @@ -86,15 +79,16 @@ def get_task_details(): and parsing the JSON data into an InputContext object. Then, set the secrets for the masked standard output and error streams, build the task properties from the InputContext object. """ - logger.debug("Preparing for task properties.") + dai_logger.info("Preparing for task properties") if input_context_file: - logger.debug("Reading input context from file") + dai_logger.info("Reading input context from file") with open(input_context_file) as data_input: input_content = data_input.read() + dai_logger.info("Successfully loaded input context from file") else: - logger.debug("Reading input context from secret") + dai_logger.info("Reading input context from secret") secret = k8s.get_client().read_namespaced_secret(input_context_secret, runner_namespace) - + dai_logger.info("Successfully loaded input context from secret") global base64_session_key, callback_url base64_session_key = base64.b64decode(secret.data["session-key"]) callback_url = base64.b64decode(secret.data["url"]) @@ -111,7 +105,7 @@ def get_task_details(): response = requests.get(fetch_url) response.raise_for_status() except requests.exceptions.RequestException as e: - logger.error("Failed to fetch data.", exc_info=True) + dai_logger.error("Failed to fetch data.", exc_info=True) raise e if response.status_code != 200: @@ -122,6 +116,7 @@ def get_task_details(): input_content = base64.b64decode(input_content) decrypted_json = get_encryptor().decrypt(input_content) + dai_logger.info("Successfully decrypted input context") global input_context input_context = InputContext.from_dict(json.loads(decrypted_json)) @@ -140,39 +135,38 @@ def update_output_context(output_context: OutputContext): dictionary to a JSON string, encrypting the string using the encryptor, and writing the encrypted string to the output context file or secret and pushing to callback URL. """ - logger.debug("Creating output context file") output_content = json.dumps(output_context.to_dict()) encrypted_json = get_encryptor().encrypt(output_content) try: if output_context_file: - logger.debug("Writing output context to file") + dai_logger.info("Writing output context to file") with open(output_context_file, "w") as data_output: data_output.write(encrypted_json) if result_secret_key: - logger.debug("Writing output context to secret") + dai_logger.info("Writing output context to secret") if len(encrypted_json) >= size_of_1Mb: - logger.warning("Result size exceeds 1Mb and is too big to store in secret") + dai_logger.warning("Result size exceeds 1Mb and is too big to store in secret") else: namespace, name, key = k8s.split_secret_resource_data(result_secret_key) secret = k8s.get_client().read_namespaced_secret(name, namespace) secret.data[key] = encrypted_json k8s.get_client().replace_namespaced_secret(name, namespace, secret) if callback_url: - logger.debug("Pushing result using HTTP") + dai_logger.info("Pushing result using HTTP") url = base64.b64decode(callback_url).decode("UTF-8") try: urllib3.PoolManager().request("POST", url, headers={'Content-Type': 'application/json'}, body=encrypted_json) except Exception: if should_retry_callback_request(encrypted_json): - logger.error("Cannot finish Callback request.", exc_info=True) - logger.info("Retry flag was set on Callback request, retrying until successful...") + dai_logger.error("Cannot finish Callback request.", exc_info=True) + dai_logger.info("Retry flag was set on Callback request, retrying until successful...") retry_push_result_infinitely(encrypted_json) else: raise except Exception: - logger.error("Unexpected error occurred.", exc_info=True) + dai_logger.error("Unexpected error occurred.", exc_info=True) def retry_push_result_infinitely(encrypted_json): @@ -197,7 +191,7 @@ def retry_push_result_infinitely(encrypted_json): response = urllib3.PoolManager().request("POST", url, headers={'Content-Type': 'application/json'}, body=encrypted_json) return response except Exception as e: - logger.warning(f"Cannot finish retried Callback request: {e}. Retrying in {retry_delay} seconds...") + dai_logger.warning(f"Cannot finish retried Callback request: {e}. Retrying in {retry_delay} seconds...") time.sleep(retry_delay) retry_delay = min(retry_delay * backoff_factor, max_backoff) @@ -216,13 +210,13 @@ def execute_task(task_object: BaseTask): If an exception is raised during execution, log the error. Finally, update the output context file using the output context of the task object. """ + global dai_task_object try: - global dai_task_object dai_task_object = task_object - logger.debug("Starting task execution") + dai_logger.info("Starting task execution") dai_task_object.execute_task() except Exception: - logger.error("Unexpected error occurred.", exc_info=True) + dai_logger.error("Unexpected error occurred.", exc_info=True) finally: update_output_context(dai_task_object.get_output_context()) @@ -261,7 +255,7 @@ def run(): execute_task(task_obj) except Exception as e: # Log the error and update the output context file with exit code 1 if an exception is raised - logger.error("Unexpected error occurred.", exc_info=True) + dai_logger.error("Unexpected error occurred.", exc_info=True) update_output_context(OutputContext(1, str(e), {}, [])) finally: if execution_mode == "daemon": diff --git a/pyproject.toml b/pyproject.toml index fb59d75..b14991f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [tool.hatch.build] exclude = [ - "digitalai/release/v1/docs/*" + "digitalai/release/v1/docs/*", ".gitignore", "tests/*" ] [tool.hatch.build.targets.wheel] @@ -20,11 +20,11 @@ description = "Digital.ai Release SDK" readme = "README.md" requires-python = ">=3.7" dependencies = [ - 'dataclasses-json==0.5.7', - 'pycryptodomex==3.16.0', - 'python_dateutil >= 2.5.3', - 'urllib3 >= 1.25.3', - 'kubernetes==25.3.0' + 'dataclasses-json==0.6.7', + 'pycryptodomex==3.22.0', + 'python-dateutil==2.9.0', + 'urllib3==2.0.7', + 'kubernetes==32.0.1' ] classifiers = [ "Programming Language :: Python :: 3", @@ -34,3 +34,4 @@ classifiers = [ [project.urls] "Homepage" = "https://digital.ai/" +Documentation = "https://docs.digital.ai/release/docs/category/python-sdk" diff --git a/tests/release/integration/hello.py b/tests/release/integration/hello.py index 3b70120..764ceb6 100644 --- a/tests/release/integration/hello.py +++ b/tests/release/integration/hello.py @@ -1,4 +1,5 @@ from digitalai.release.integration import BaseTask +from digitalai.release.integration import dai_logger class Hello(BaseTask): @@ -12,10 +13,10 @@ def execute(self) -> None: greeting = f"Hello {name}" - print(f"get_release_server_url() : {self.get_release_server_url()}") - print(f"get_task_user() : {self.get_task_user()}") - print(f"get_release_id() : {self.get_release_id()}") - print(f"get_task_id() : {self.get_task_id()}") + dai_logger.info(f"get_release_server_url() : {self.get_release_server_url()}") + dai_logger.info(f"get_task_user() : {self.get_task_user()}") + dai_logger.info(f"get_release_id() : {self.get_release_id()}") + dai_logger.info(f"get_task_id() : {self.get_task_id()}") # Add to the comment section of the task in the UI self.add_comment(greeting) From cb14036f88279bf0cb4d4fbe11f630de02897576 Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Mon, 21 Apr 2025 13:58:38 +0530 Subject: [PATCH 02/10] D-39625 New logger --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b14991f..aa762c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [tool.hatch.build] exclude = [ - "digitalai/release/v1/docs/*", ".gitignore", "tests/*" + "digitalai/release/v1/docs/*", ".gitignore", "tests/*", ".github/*" ] [tool.hatch.build.targets.wheel] @@ -12,7 +12,7 @@ packages = ["digitalai"] [project] name = "digitalai_release_sdk" -version = "24.1.0" +version = "24.1.1b1" authors = [ { name="Digital.ai", email="pypi-devops@digital.ai" }, ] From 43b1000012f41e1e8d64734298f53bd2fe52cfe6 Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Mon, 21 Apr 2025 14:19:37 +0530 Subject: [PATCH 03/10] D-39625 New logger --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index aa762c5..f70578e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ dependencies = [ 'dataclasses-json==0.6.7', 'pycryptodomex==3.22.0', 'python-dateutil==2.9.0', - 'urllib3==2.0.7', + 'urllib3>=2.0.7', 'kubernetes==32.0.1' ] classifiers = [ From 7ca2e6fbd24f3a6c264ed954eb7bf748d9c5a3f3 Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Mon, 21 Apr 2025 14:56:55 +0530 Subject: [PATCH 04/10] D-39625 New logger --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f70578e..3d88b77 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ requires-python = ">=3.7" dependencies = [ 'dataclasses-json==0.6.7', 'pycryptodomex==3.22.0', - 'python-dateutil==2.9.0', + 'python-dateutil>=2.9.0', 'urllib3>=2.0.7', 'kubernetes==32.0.1' ] From dd94e9a159f3c9d7b2aa8943ac0e51de2ebce6c5 Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Mon, 21 Apr 2025 15:02:53 +0530 Subject: [PATCH 05/10] D-39625 New logger --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index eeef5af..473c77b 100644 --- a/README.md +++ b/README.md @@ -35,4 +35,4 @@ class Hello(BaseTask): ``` ## Documentation -Read more about Digital.ai Release Python SDK [here](https://digital.ai/) \ No newline at end of file +Read more about Digital.ai Release Python SDK [here](https://docs.digital.ai/release/docs/category/python-sdk) \ No newline at end of file From 6295715cb3319cf874c5abc27c0b47f9de052e70 Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Mon, 21 Apr 2025 15:12:57 +0530 Subject: [PATCH 06/10] D-39625 New logger --- digitalai/release/integration/k8s.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/digitalai/release/integration/k8s.py b/digitalai/release/integration/k8s.py index 4a84da9..930512d 100644 --- a/digitalai/release/integration/k8s.py +++ b/digitalai/release/integration/k8s.py @@ -1,5 +1,5 @@ import threading - +import warnings from kubernetes import client, config from kubernetes.client import CoreV1Api @@ -13,6 +13,7 @@ def get_client(): if not kubernetes_client: with lock: if not kubernetes_client: + warnings.filterwarnings("ignore", message=".*kube_config_path not provided.*") config.load_config() kubernetes_client = client.CoreV1Api() From dbf81b86353d3bf734028b7c01b186bb3e5dec2e Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Mon, 21 Apr 2025 15:46:04 +0530 Subject: [PATCH 07/10] D-39625 New logger --- digitalai/release/integration/k8s.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/digitalai/release/integration/k8s.py b/digitalai/release/integration/k8s.py index 930512d..d56dbe5 100644 --- a/digitalai/release/integration/k8s.py +++ b/digitalai/release/integration/k8s.py @@ -1,7 +1,8 @@ import threading -import warnings + from kubernetes import client, config from kubernetes.client import CoreV1Api +from .logger import dai_logger kubernetes_client: CoreV1Api = None lock = threading.Lock() @@ -13,8 +14,19 @@ def get_client(): if not kubernetes_client: with lock: if not kubernetes_client: - warnings.filterwarnings("ignore", message=".*kube_config_path not provided.*") - config.load_config() + try: + dai_logger.info("Attempting to load in-cluster config") + config.load_incluster_config() + dai_logger.info("Successfully loaded in-cluster config") + except Exception: + dai_logger.warning("In-cluster config failed, attempting default load_config") + try: + config.load_config() + dai_logger.info("Successfully loaded config using load_config") + except Exception as e: + dai_logger.error(f"Failed to load any config", exc_info=True) + raise RuntimeError("Could not configure kubernetes client") from e + kubernetes_client = client.CoreV1Api() return kubernetes_client From d8c9afdf0743938a00c6da154890f01a9cdf7078 Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Mon, 21 Apr 2025 16:19:42 +0530 Subject: [PATCH 08/10] D-39625 New logger --- digitalai/release/integration/k8s.py | 11 ++++++----- digitalai/release/integration/wrapper.py | 3 ++- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/digitalai/release/integration/k8s.py b/digitalai/release/integration/k8s.py index d56dbe5..b102623 100644 --- a/digitalai/release/integration/k8s.py +++ b/digitalai/release/integration/k8s.py @@ -2,6 +2,7 @@ from kubernetes import client, config from kubernetes.client import CoreV1Api +from kubernetes.config.config_exception import ConfigException from .logger import dai_logger kubernetes_client: CoreV1Api = None @@ -18,16 +19,16 @@ def get_client(): dai_logger.info("Attempting to load in-cluster config") config.load_incluster_config() dai_logger.info("Successfully loaded in-cluster config") - except Exception: + except ConfigException: dai_logger.warning("In-cluster config failed, attempting default load_config") try: config.load_config() dai_logger.info("Successfully loaded config using load_config") - except Exception as e: - dai_logger.error(f"Failed to load any config", exc_info=True) - raise RuntimeError("Could not configure kubernetes client") from e - + except Exception: + dai_logger.exception("Failed to load any Kubernetes config") + raise RuntimeError("Could not configure Kubernetes client") kubernetes_client = client.CoreV1Api() + dai_logger.info("Kubernetes client created successfully") return kubernetes_client diff --git a/digitalai/release/integration/wrapper.py b/digitalai/release/integration/wrapper.py index 3b15731..7370c15 100644 --- a/digitalai/release/integration/wrapper.py +++ b/digitalai/release/integration/wrapper.py @@ -86,8 +86,9 @@ def get_task_details(): input_content = data_input.read() dai_logger.info("Successfully loaded input context from file") else: + k8s_client = k8s.get_client() dai_logger.info("Reading input context from secret") - secret = k8s.get_client().read_namespaced_secret(input_context_secret, runner_namespace) + secret =k8s_client.read_namespaced_secret(input_context_secret, runner_namespace) dai_logger.info("Successfully loaded input context from secret") global base64_session_key, callback_url base64_session_key = base64.b64decode(secret.data["session-key"]) From 874f437b4727af07ef7afde2d0baf1479acaab3f Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Tue, 6 May 2025 14:59:56 +0530 Subject: [PATCH 09/10] D-39625 New logger --- digitalai/release/integration/k8s.py | 10 +++++----- digitalai/release/integration/wrapper.py | 6 +++--- pyproject.toml | 6 +++--- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/digitalai/release/integration/k8s.py b/digitalai/release/integration/k8s.py index b102623..6aff685 100644 --- a/digitalai/release/integration/k8s.py +++ b/digitalai/release/integration/k8s.py @@ -16,19 +16,19 @@ def get_client(): with lock: if not kubernetes_client: try: - dai_logger.info("Attempting to load in-cluster config") + #dai_logger.info("Attempting to load in-cluster config") config.load_incluster_config() - dai_logger.info("Successfully loaded in-cluster config") + #dai_logger.info("Successfully loaded in-cluster config") except ConfigException: - dai_logger.warning("In-cluster config failed, attempting default load_config") + #dai_logger.warning("In-cluster config failed, attempting default load_config") try: config.load_config() - dai_logger.info("Successfully loaded config using load_config") + #dai_logger.info("Successfully loaded config using load_config") except Exception: dai_logger.exception("Failed to load any Kubernetes config") raise RuntimeError("Could not configure Kubernetes client") kubernetes_client = client.CoreV1Api() - dai_logger.info("Kubernetes client created successfully") + #dai_logger.info("Kubernetes client created successfully") return kubernetes_client diff --git a/digitalai/release/integration/wrapper.py b/digitalai/release/integration/wrapper.py index 7370c15..86a0445 100644 --- a/digitalai/release/integration/wrapper.py +++ b/digitalai/release/integration/wrapper.py @@ -84,12 +84,12 @@ def get_task_details(): dai_logger.info("Reading input context from file") with open(input_context_file) as data_input: input_content = data_input.read() - dai_logger.info("Successfully loaded input context from file") + #dai_logger.info("Successfully loaded input context from file") else: k8s_client = k8s.get_client() dai_logger.info("Reading input context from secret") secret =k8s_client.read_namespaced_secret(input_context_secret, runner_namespace) - dai_logger.info("Successfully loaded input context from secret") + #dai_logger.info("Successfully loaded input context from secret") global base64_session_key, callback_url base64_session_key = base64.b64decode(secret.data["session-key"]) callback_url = base64.b64decode(secret.data["url"]) @@ -117,7 +117,7 @@ def get_task_details(): input_content = base64.b64decode(input_content) decrypted_json = get_encryptor().decrypt(input_content) - dai_logger.info("Successfully decrypted input context") + #dai_logger.info("Successfully decrypted input context") global input_context input_context = InputContext.from_dict(json.loads(decrypted_json)) diff --git a/pyproject.toml b/pyproject.toml index 3d88b77..fa02e85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,11 +20,11 @@ description = "Digital.ai Release SDK" readme = "README.md" requires-python = ">=3.7" dependencies = [ - 'dataclasses-json==0.6.7', - 'pycryptodomex==3.22.0', + 'dataclasses-json>=0.6.7', + 'pycryptodomex>=3.22.0', 'python-dateutil>=2.9.0', 'urllib3>=2.0.7', - 'kubernetes==32.0.1' + 'kubernetes>=32.0.1' ] classifiers = [ "Programming Language :: Python :: 3", From d24694654f7c45fd62f2cfa0aed4d24b70cb255a Mon Sep 17 00:00:00 2001 From: Balaji Venkatesan Date: Tue, 6 May 2025 15:01:08 +0530 Subject: [PATCH 10/10] D-39625 New logger --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index fa02e85..ec9edb7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ packages = ["digitalai"] [project] name = "digitalai_release_sdk" -version = "24.1.1b1" +version = "24.1.1" authors = [ { name="Digital.ai", email="pypi-devops@digital.ai" }, ]