diff --git a/dffml/df/kubernetes.py b/dffml/df/kubernetes.py index 760168edf8..9f25d76b84 100644 --- a/dffml/df/kubernetes.py +++ b/dffml/df/kubernetes.py @@ -100,14 +100,7 @@ Subprocess, ) from ..util.internal import load_dataflow_from_configloader - -# TODO Use importlib.resources instead of reading via pathlib -python_code: str = pathlib.Path(__file__).parent.joinpath( - "kubernetes_execute_pickled_dataflow_with_inputs.py" -).read_text() -output_server: str = pathlib.Path(__file__).parent.joinpath( - "kubernetes_output_server.py" -).read_text() +from .kubernetes_output_server import PYTHON_CODE, OUTPUT_SERVER class JobKubernetesOrchestratorPreApplyDefinitions(enum.Enum): @@ -278,12 +271,12 @@ async def run_operations_for_ctx( execute_pickled_dataflow_with_inputs_path = tempdir_path.joinpath( "execute_pickled_dataflow_with_inputs.py" ) - execute_pickled_dataflow_with_inputs_path.write_text(python_code) + execute_pickled_dataflow_with_inputs_path.write_text(PYTHON_CODE) # Write out the Python code to execute the dataflow kubernetes_output_server_path = tempdir_path.joinpath( "kubernetes_output_server.py" ) - kubernetes_output_server_path.write_text(output_server) + kubernetes_output_server_path.write_text(OUTPUT_SERVER) # Write out the prerun dataflow (secret) prerun_dataflow_path = tempdir_path.joinpath( "prerun-dataflow.json" diff --git a/dffml/df/kubernetes_output_server.py b/dffml/df/kubernetes_output_server.py index fb7046ba29..247eae8f9a 100644 --- a/dffml/df/kubernetes_output_server.py +++ b/dffml/df/kubernetes_output_server.py @@ -21,6 +21,12 @@ LOGGER = logging.getLogger(pathlib.Path(__file__).stem) +# TODO Use importlib.resources instead of reading via pathlib +PYTHON_CODE: str = pathlib.Path(__file__).parent.joinpath( + "kubernetes_execute_pickled_dataflow_with_inputs.py" +).read_text() +OUTPUT_SERVER: str = pathlib.Path(__file__).read_text() + async def concurrently( work: Dict[asyncio.Task, Any], diff --git a/dffml/df/ssh.py b/dffml/df/ssh.py index de2dafbaf8..3e0b5be41c 100644 --- a/dffml/df/ssh.py +++ b/dffml/df/ssh.py @@ -82,7 +82,7 @@ MemoryRedundancyChecker, MEMORYORCHESTRATORCONFIG_MAX_CTXS, ) -from .kubernetes_output_server import server_socket_unix_stream, read_messages +from .kubernetes_output_server import server_socket_unix_stream, read_messages, PYTHON_CODE, OUTPUT_SERVER from ..operation.output import GetSingle, get_single_spec from ..base import config, field from ..util.crypto import secure_hash @@ -98,14 +98,6 @@ ) from ..util.internal import load_dataflow_from_configloader -# TODO Use importlib.resources instead of reading via pathlib -python_code: str = pathlib.Path(__file__).parent.joinpath( - "kubernetes_execute_pickled_dataflow_with_inputs.py" -).read_text() -output_server: str = pathlib.Path(__file__).parent.joinpath( - "kubernetes_output_server.py" -).read_text() - @config class SSHOrchestratorConfig: @@ -300,13 +292,13 @@ async def run_operations_for_ctx( "execute_pickled_dataflow_with_inputs.py" ) execute_pickled_dataflow_with_inputs_path.write_text( - python_code + PYTHON_CODE ) # Write out the Python code to execute the dataflow kubernetes_output_server_path = tempdir_path.joinpath( "kubernetes_output_server.py" ) - kubernetes_output_server_path.write_text(output_server) + kubernetes_output_server_path.write_text(OUTPUT_SERVER) # Write out the prerun dataflow prerun_dataflow_path = tempdir_path.joinpath( "prerun-dataflow.json"