diff --git a/cylc/flow/cfgspec/workflow.py b/cylc/flow/cfgspec/workflow.py index d22f0f415bb..8575980707a 100644 --- a/cylc/flow/cfgspec/workflow.py +++ b/cylc/flow/cfgspec/workflow.py @@ -996,6 +996,136 @@ def get_script_common_text(this: str, example: Optional[str] = None): can be explicitly configured to provide or override default settings for all tasks in the workflow. '''): + Conf('completion', VDR.V_STRING, desc=''' + Define the condition for task completion. + + The completion condition is evaluated when a task is finished. + It is a validation check which confirms that the task has + generated the outputs it was expected to. + + If the task fails this check it is considered + :term:`incomplete` and may cause the workflow to + :term:`stall`, alerting you that something has gone wrong which + requires investigation. + + By default, the completion condition ensures that all required + outputs, i.e. outputs which appear in the graph but are not + marked as optional with the ``?`` character, are completed. + + E.g., in this example, the task ``foo`` must generate the + required outputs ``succeeded`` and ``x``, it may or may not + generate the optional output ``y``: + + .. code-block:: cylc-graph + + foo => bar + foo:x => x + foo:y? => y + + In Python syntax that condition looks like this: + + .. code-block:: python + + # the task must succeeded and generate the custom output "x" + succeeded and x + + The ``completion`` configuration allows you to override the + default completion to suit your needs. + + E.G., in this example, the task ``foo`` has three optional + outputs, ``x``, ``y`` and ``z``: + + .. code-block:: cylc-graph + + foo:x? => x + foo:y? => y + foo:z? => z + x | y | z => bar + + Because all three of these outputs are optional, if none of + them are generated, the task will still be marked as complete. + + If you wanted to require that at least one of these outputs is + generated you could configure the completion confition like so: + + .. code-block:: python + + # the task must succeeded and generate at least one of the + # outputs "x" or "y" or "z": + succeeded and (x or y or z) + + .. note:: + + For the completion expression, hyphens in task outputs + are converted into underscores e.g: + + .. code-block:: cylc + + [runtime] + [[foo]] + completion = succeeded and my_output # underscore + [[[outputs]]] + my-output = 'my custom task output' # hyphen + + .. note:: + + In some cases the succeeded output might not explicitly + appear in the graph, e.g: + + .. code-block:: cylc-graph + + foo:x? => x + + In these cases succeess is presumed to be required unless + explicitly stated otherwise, either in the graph: + + .. code-block:: cylc-graph + + foo? + foo:x? => x + + Or in the completion expression: + + .. code-block:: cylc + + completion = x # no reference to succeeded here + + + .. hint:: + + If task outputs are optional in the graph they must also + be optional in the completion condition and vice versa. + + .. code-block:: cylc + + [scheduling] + [[graph]] + R1 = """ + # ERROR: this should be "a? => b" + a => b + """ + [runtime] + [[a]] + # this completion condition implies that the + # succeeded output is optional + completion = succeeded or failed + + .. rubric:: Examples + + ``succeeded`` + The task must succeed. + ``succeeded or (failed and my_error)`` + The task can fail, but only if it also yields the custom + output ``my_error``. + ``succeeded and (x or y or z)`` + The task must succeed and yield at least one of the + custom outputs, x, y or z. + ``(a and b) or (c and d)`` + One pair of these outputs must be yielded for the task + to be complete. + + .. versionadded:: 8.3.0 + ''') Conf('platform', VDR.V_STRING, desc=''' The name of a compute resource defined in :cylc:conf:`global.cylc[platforms]` or diff --git a/cylc/flow/config.py b/cylc/flow/config.py index ad87315c4c1..cfaa4b1df30 100644 --- a/cylc/flow/config.py +++ b/cylc/flow/config.py @@ -90,7 +90,11 @@ from cylc.flow.task_id import TaskID from cylc.flow.task_outputs import ( TASK_OUTPUT_SUCCEEDED, - TaskOutputs + TASK_OUTPUT_FINISHED, + TaskOutputs, + get_completion_expression, + get_optional_outputs, + trigger_to_completion_variable, ) from cylc.flow.task_trigger import TaskTrigger, Dependency from cylc.flow.taskdef import TaskDef @@ -519,6 +523,8 @@ def __init__( self.load_graph() self.mem_log("config.py: after load_graph()") + self._set_completion_expressions() + self.process_runahead_limit() if self.run_mode('simulation', 'dummy'): @@ -1007,6 +1013,197 @@ def _check_sequence_bounds(self): ) LOG.warning(msg) + def _set_completion_expressions(self): + """Sets and checks completion expressions for each task. + + If a task does not have a user-defined completion expression, then set + one according to the default rules. + + If a task does have a used-defined completion expression, then ensure + it is consistent with the use of outputs in the graph. + """ + for name, taskdef in self.taskdefs.items(): + expr = taskdef.rtconfig['completion'] + if expr: + # check the user-defined expression + self._check_completion_expression(name, expr) + else: + # derive a completion expression for this taskdef + expr = get_completion_expression(taskdef) + + if name not in self.taskdefs: + # this is a family -> nothing more to do here + continue + + # update both the sparse and dense configs to make these values + # visible to "cylc config" to make the completion expression more + # transparent to users. + # NOTE: we have to update both because we are setting this value + # late on in the process after the dense copy has been made + self.pcfg.sparse.setdefault( + 'runtime', {} + ).setdefault( + name, {} + )['completion'] = expr + self.pcfg.dense['runtime'][name]['completion'] = expr + + # update the task's runtime config to make this value visible to + # the data store + # NOTE: we have to do this because we are setting this value late + # on after the TaskDef has been created + taskdef.rtconfig['completion'] = expr + + def _check_completion_expression(self, task_name: str, expr: str) -> None: + """Checks a user-defined completion expression. + + Args: + task_name: + The name of the task we are checking. + expr: + The completion expression as defined in the config. + + """ + # check completion expressions are not being used in compat mode + if cylc.flow.flags.cylc7_back_compat: + raise WorkflowConfigError( + '[runtime][]completion cannot be used' + ' in Cylc 7 compatibility mode.' + ) + + # check for invalid triggers in the expression + if 'submit-failed' in expr: + raise WorkflowConfigError( + f'Error in [runtime][{task_name}]completion:' + f'\nUse "submit_failed" rather than "submit-failed"' + ' in completion expressions.' + ) + elif '-' in expr: + raise WorkflowConfigError( + f'Error in [runtime][{task_name}]completion:' + f'\n {expr}' + '\nReplace hyphens with underscores in task outputs when' + ' used in completion expressions.' + ) + + # get the outputs and completion expression for this task + try: + outputs = self.taskdefs[task_name].outputs + except KeyError: + # this is a family -> we'll check integrity for each task that + # inherits from it + return + + # get the optional/required outputs defined in the graph + graph_optionals = { + # completion_variable: is_optional + trigger_to_completion_variable(output): ( + None if is_required is None else not is_required + ) + for output, (_, is_required) + in outputs.items() + } + + # get the optional/required outputs defined in the expression + try: + # this involves running the expression which also validates it + expression_optionals = get_optional_outputs(expr, outputs) + except NameError as exc: + # expression references an output which has not been registered + error = exc.args[0][5:] + + if f"'{TASK_OUTPUT_FINISHED}'" in error: + # the finished output cannot be used in completion expressions + # see proposal point 5:: + # https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + raise WorkflowConfigError( + f'Error in [runtime][{task_name}]completion:' + f'\n {expr}' + '\nThe "finished" output cannot be used in completion' + ' expressions, use "succeeded or failed".' + ) + + raise WorkflowConfigError( + # NOTE: str(exc) == "name 'x' is not defined" tested in + # tests/integration/test_optional_outputs.py + f'Error in [runtime][{task_name}]completion:' + f'\nInput {error}' + ) + except Exception as exc: # includes InvalidCompletionExpression + # expression contains non-whitelisted syntax or any other error in + # the expression e.g. SyntaxError + raise WorkflowConfigError( + f'Error in [runtime][{task_name}]completion:' + f'\n{str(exc)}' + ) + + # ensure consistency between the graph and the completion expression + for compvar in ( + { + *graph_optionals, + *expression_optionals + } + ): + # is the output optional in the graph? + graph_opt = graph_optionals.get(compvar) + # is the output optional in the completion expression? + expr_opt = expression_optionals.get(compvar) + + # True = is optional + # False = is required + # None = is not referenced + + # graph_opt expr_opt + # True True ok + # True False not ok + # True None not ok [1] + # False True not ok [1] + # False False ok + # False None not ok + # None True ok + # None False ok + # None None ok + + # [1] applies only to "submit-failed" and "expired" + + output = compvar # TODO + + if graph_opt is True and expr_opt is False: + raise WorkflowConfigError( + f'{task_name}:{output} is optional in the graph' + ' (? symbol), but required in the completion' + f' expression:\n{expr}' + ) + + if graph_opt is False and expr_opt is None: + raise WorkflowConfigError( + f'{task_name}:{output} is required in the graph,' + ' but not referenced in the completion' + f' expression\n{expr}' + ) + + if ( + graph_opt is True + and expr_opt is None + and compvar in {'submit_failed', 'expired'} + ): + raise WorkflowConfigError( + f'{task_name}:{output} is permitted in the graph' + ' but is not referenced in the completion' + ' expression (so is not permitted by it).' + f'\nTry: completion = "{expr} or {output}"' + ) + + if ( + graph_opt is False + and expr_opt is True + and compvar not in {'submit_failed', 'expired'} + ): + raise WorkflowConfigError( + f'{task_name}:{output} is required in the graph,' + ' but optional in the completion expression' + f'\n{expr}' + ) + def _expand_name_list(self, orig_names): """Expand any parameters in lists of names.""" name_expander = NameExpander(self.parameters) diff --git a/cylc/flow/data_messages.proto b/cylc/flow/data_messages.proto index 6068bb1c5df..bc0355e6d4c 100644 --- a/cylc/flow/data_messages.proto +++ b/cylc/flow/data_messages.proto @@ -127,6 +127,7 @@ message PbRuntime { optional string directives = 15; optional string environment = 16; optional string outputs = 17; + optional string completion = 18; } diff --git a/cylc/flow/data_messages_pb2.py b/cylc/flow/data_messages_pb2.py index 82c620bcacf..5ecb96fc122 100644 --- a/cylc/flow/data_messages_pb2.py +++ b/cylc/flow/data_messages_pb2.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: data_messages.proto +# Protobuf Python Version: 4.25.3 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -14,7 +15,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x64\x61ta_messages.proto\"\x96\x01\n\x06PbMeta\x12\x12\n\x05title\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x10\n\x03URL\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x19\n\x0cuser_defined\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_titleB\x0e\n\x0c_descriptionB\x06\n\x04_URLB\x0f\n\r_user_defined\"\xaa\x01\n\nPbTimeZone\x12\x12\n\x05hours\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x14\n\x07minutes\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x19\n\x0cstring_basic\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1c\n\x0fstring_extended\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_hoursB\n\n\x08_minutesB\x0f\n\r_string_basicB\x12\n\x10_string_extended\"\'\n\x0fPbTaskProxyRefs\x12\x14\n\x0ctask_proxies\x18\x01 \x03(\t\"\xd4\x0c\n\nPbWorkflow\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06status\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x11\n\x04host\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x11\n\x04port\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x12\n\x05owner\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\r\n\x05tasks\x18\x08 \x03(\t\x12\x10\n\x08\x66\x61milies\x18\t \x03(\t\x12\x1c\n\x05\x65\x64ges\x18\n \x01(\x0b\x32\x08.PbEdgesH\x07\x88\x01\x01\x12\x18\n\x0b\x61pi_version\x18\x0b \x01(\x05H\x08\x88\x01\x01\x12\x19\n\x0c\x63ylc_version\x18\x0c \x01(\tH\t\x88\x01\x01\x12\x19\n\x0clast_updated\x18\r \x01(\x01H\n\x88\x01\x01\x12\x1a\n\x04meta\x18\x0e \x01(\x0b\x32\x07.PbMetaH\x0b\x88\x01\x01\x12&\n\x19newest_active_cycle_point\x18\x10 \x01(\tH\x0c\x88\x01\x01\x12&\n\x19oldest_active_cycle_point\x18\x11 \x01(\tH\r\x88\x01\x01\x12\x15\n\x08reloaded\x18\x12 \x01(\x08H\x0e\x88\x01\x01\x12\x15\n\x08run_mode\x18\x13 \x01(\tH\x0f\x88\x01\x01\x12\x19\n\x0c\x63ycling_mode\x18\x14 \x01(\tH\x10\x88\x01\x01\x12\x32\n\x0cstate_totals\x18\x15 \x03(\x0b\x32\x1c.PbWorkflow.StateTotalsEntry\x12\x1d\n\x10workflow_log_dir\x18\x16 \x01(\tH\x11\x88\x01\x01\x12(\n\x0etime_zone_info\x18\x17 \x01(\x0b\x32\x0b.PbTimeZoneH\x12\x88\x01\x01\x12\x17\n\ntree_depth\x18\x18 \x01(\x05H\x13\x88\x01\x01\x12\x15\n\rjob_log_names\x18\x19 \x03(\t\x12\x14\n\x0cns_def_order\x18\x1a \x03(\t\x12\x0e\n\x06states\x18\x1b \x03(\t\x12\x14\n\x0ctask_proxies\x18\x1c \x03(\t\x12\x16\n\x0e\x66\x61mily_proxies\x18\x1d \x03(\t\x12\x17\n\nstatus_msg\x18\x1e \x01(\tH\x14\x88\x01\x01\x12\x1a\n\ris_held_total\x18\x1f \x01(\x05H\x15\x88\x01\x01\x12\x0c\n\x04jobs\x18 \x03(\t\x12\x15\n\x08pub_port\x18! \x01(\x05H\x16\x88\x01\x01\x12\x17\n\nbroadcasts\x18\" \x01(\tH\x17\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18# \x01(\x05H\x18\x88\x01\x01\x12=\n\x12latest_state_tasks\x18$ \x03(\x0b\x32!.PbWorkflow.LatestStateTasksEntry\x12\x13\n\x06pruned\x18% \x01(\x08H\x19\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18& \x01(\x05H\x1a\x88\x01\x01\x12\x1b\n\x0estates_updated\x18\' \x01(\x08H\x1b\x88\x01\x01\x12\x1c\n\x0fn_edge_distance\x18( \x01(\x05H\x1c\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1aI\n\x15LatestStateTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1f\n\x05value\x18\x02 \x01(\x0b\x32\x10.PbTaskProxyRefs:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\t\n\x07_statusB\x07\n\x05_hostB\x07\n\x05_portB\x08\n\x06_ownerB\x08\n\x06_edgesB\x0e\n\x0c_api_versionB\x0f\n\r_cylc_versionB\x0f\n\r_last_updatedB\x07\n\x05_metaB\x1c\n\x1a_newest_active_cycle_pointB\x1c\n\x1a_oldest_active_cycle_pointB\x0b\n\t_reloadedB\x0b\n\t_run_modeB\x0f\n\r_cycling_modeB\x13\n\x11_workflow_log_dirB\x11\n\x0f_time_zone_infoB\r\n\x0b_tree_depthB\r\n\x0b_status_msgB\x10\n\x0e_is_held_totalB\x0b\n\t_pub_portB\r\n\x0b_broadcastsB\x12\n\x10_is_queued_totalB\t\n\x07_prunedB\x14\n\x12_is_runahead_totalB\x11\n\x0f_states_updatedB\x12\n\x10_n_edge_distance\"\xb9\x06\n\tPbRuntime\x12\x15\n\x08platform\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06script\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0binit_script\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x17\n\nenv_script\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\nerr_script\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x18\n\x0b\x65xit_script\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x17\n\npre_script\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x18\n\x0bpost_script\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x19\n\x0cwork_sub_dir\x18\t \x01(\tH\x08\x88\x01\x01\x12(\n\x1b\x65xecution_polling_intervals\x18\n \x01(\tH\t\x88\x01\x01\x12#\n\x16\x65xecution_retry_delays\x18\x0b \x01(\tH\n\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0c \x01(\tH\x0b\x88\x01\x01\x12)\n\x1csubmission_polling_intervals\x18\r \x01(\tH\x0c\x88\x01\x01\x12$\n\x17submission_retry_delays\x18\x0e \x01(\tH\r\x88\x01\x01\x12\x17\n\ndirectives\x18\x0f \x01(\tH\x0e\x88\x01\x01\x12\x18\n\x0b\x65nvironment\x18\x10 \x01(\tH\x0f\x88\x01\x01\x12\x14\n\x07outputs\x18\x11 \x01(\tH\x10\x88\x01\x01\x42\x0b\n\t_platformB\t\n\x07_scriptB\x0e\n\x0c_init_scriptB\r\n\x0b_env_scriptB\r\n\x0b_err_scriptB\x0e\n\x0c_exit_scriptB\r\n\x0b_pre_scriptB\x0e\n\x0c_post_scriptB\x0f\n\r_work_sub_dirB\x1e\n\x1c_execution_polling_intervalsB\x19\n\x17_execution_retry_delaysB\x17\n\x15_execution_time_limitB\x1f\n\x1d_submission_polling_intervalsB\x1a\n\x18_submission_retry_delaysB\r\n\x0b_directivesB\x0e\n\x0c_environmentB\n\n\x08_outputs\"\x9d\x05\n\x05PbJob\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nsubmit_num\x18\x03 \x01(\x05H\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\ntask_proxy\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x1b\n\x0esubmitted_time\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x19\n\x0cstarted_time\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x1a\n\rfinished_time\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x06job_id\x18\t \x01(\tH\x08\x88\x01\x01\x12\x1c\n\x0fjob_runner_name\x18\n \x01(\tH\t\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0e \x01(\x02H\n\x88\x01\x01\x12\x15\n\x08platform\x18\x0f \x01(\tH\x0b\x88\x01\x01\x12\x18\n\x0bjob_log_dir\x18\x11 \x01(\tH\x0c\x88\x01\x01\x12\x11\n\x04name\x18\x1e \x01(\tH\r\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x1f \x01(\tH\x0e\x88\x01\x01\x12\x10\n\x08messages\x18 \x03(\t\x12 \n\x07runtime\x18! \x01(\x0b\x32\n.PbRuntimeH\x0f\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\r\n\x0b_submit_numB\x08\n\x06_stateB\r\n\x0b_task_proxyB\x11\n\x0f_submitted_timeB\x0f\n\r_started_timeB\x10\n\x0e_finished_timeB\t\n\x07_job_idB\x12\n\x10_job_runner_nameB\x17\n\x15_execution_time_limitB\x0b\n\t_platformB\x0e\n\x0c_job_log_dirB\x07\n\x05_nameB\x0e\n\x0c_cycle_pointB\n\n\x08_runtimeJ\x04\x08\x1d\x10\x1e\"\xe2\x02\n\x06PbTask\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x1e\n\x11mean_elapsed_time\x18\x05 \x01(\x02H\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x0f\n\x07proxies\x18\x07 \x03(\t\x12\x11\n\tnamespace\x18\x08 \x03(\t\x12\x0f\n\x07parents\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x06\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x07\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x14\n\x12_mean_elapsed_timeB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xd8\x01\n\nPbPollTask\x12\x18\n\x0blocal_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08workflow\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x19\n\x0cremote_proxy\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\treq_state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x19\n\x0cgraph_string\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x0e\n\x0c_local_proxyB\x0b\n\t_workflowB\x0f\n\r_remote_proxyB\x0c\n\n_req_stateB\x0f\n\r_graph_string\"\xcb\x01\n\x0bPbCondition\x12\x17\n\ntask_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nexpr_alias\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\treq_state\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x14\n\x07message\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\r\n\x0b_task_proxyB\r\n\x0b_expr_aliasB\x0c\n\n_req_stateB\x0c\n\n_satisfiedB\n\n\x08_message\"\x96\x01\n\x0ePbPrerequisite\x12\x17\n\nexpression\x18\x01 \x01(\tH\x00\x88\x01\x01\x12 \n\nconditions\x18\x02 \x03(\x0b\x32\x0c.PbCondition\x12\x14\n\x0c\x63ycle_points\x18\x03 \x03(\t\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\r\n\x0b_expressionB\x0c\n\n_satisfied\"\x8c\x01\n\x08PbOutput\x12\x12\n\x05label\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07message\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\tsatisfied\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12\x11\n\x04time\x18\x04 \x01(\x01H\x03\x88\x01\x01\x42\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\xa5\x01\n\tPbTrigger\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05label\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x14\n\x07message\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x11\n\x04time\x18\x05 \x01(\x01H\x04\x88\x01\x01\x42\x05\n\x03_idB\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\x91\x08\n\x0bPbTaskProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04task\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x18\n\x0bjob_submits\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12*\n\x07outputs\x18\t \x03(\x0b\x32\x19.PbTaskProxy.OutputsEntry\x12\x11\n\tnamespace\x18\x0b \x03(\t\x12&\n\rprerequisites\x18\x0c \x03(\x0b\x32\x0f.PbPrerequisite\x12\x0c\n\x04jobs\x18\r \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\x0f \x01(\tH\x07\x88\x01\x01\x12\x11\n\x04name\x18\x10 \x01(\tH\x08\x88\x01\x01\x12\x14\n\x07is_held\x18\x11 \x01(\x08H\t\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x12 \x03(\t\x12\x11\n\tancestors\x18\x13 \x03(\t\x12\x16\n\tflow_nums\x18\x14 \x01(\tH\n\x88\x01\x01\x12=\n\x11\x65xternal_triggers\x18\x17 \x03(\x0b\x32\".PbTaskProxy.ExternalTriggersEntry\x12.\n\txtriggers\x18\x18 \x03(\x0b\x32\x1b.PbTaskProxy.XtriggersEntry\x12\x16\n\tis_queued\x18\x19 \x01(\x08H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x1a \x01(\x08H\x0c\x88\x01\x01\x12\x16\n\tflow_wait\x18\x1b \x01(\x08H\r\x88\x01\x01\x12 \n\x07runtime\x18\x1c \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x1d \x01(\x05H\x0f\x88\x01\x01\x1a\x39\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x18\n\x05value\x18\x02 \x01(\x0b\x32\t.PbOutput:\x02\x38\x01\x1a\x43\n\x15\x45xternalTriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x1a<\n\x0eXtriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_taskB\x08\n\x06_stateB\x0e\n\x0c_cycle_pointB\x08\n\x06_depthB\x0e\n\x0c_job_submitsB\x0f\n\r_first_parentB\x07\n\x05_nameB\n\n\x08_is_heldB\x0c\n\n_flow_numsB\x0c\n\n_is_queuedB\x0e\n\x0c_is_runaheadB\x0c\n\n_flow_waitB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xc8\x02\n\x08PbFamily\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x05 \x01(\x05H\x04\x88\x01\x01\x12\x0f\n\x07proxies\x18\x06 \x03(\t\x12\x0f\n\x07parents\x18\x07 \x03(\t\x12\x13\n\x0b\x63hild_tasks\x18\x08 \x03(\t\x12\x16\n\x0e\x63hild_families\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x05\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x06\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xae\x06\n\rPbFamilyProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x11\n\x04name\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x13\n\x06\x66\x61mily\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05state\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12\x19\n\x0c\x66irst_parent\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x0b\x63hild_tasks\x18\n \x03(\t\x12\x16\n\x0e\x63hild_families\x18\x0b \x03(\t\x12\x14\n\x07is_held\x18\x0c \x01(\x08H\x08\x88\x01\x01\x12\x11\n\tancestors\x18\r \x03(\t\x12\x0e\n\x06states\x18\x0e \x03(\t\x12\x35\n\x0cstate_totals\x18\x0f \x03(\x0b\x32\x1f.PbFamilyProxy.StateTotalsEntry\x12\x1a\n\ris_held_total\x18\x10 \x01(\x05H\t\x88\x01\x01\x12\x16\n\tis_queued\x18\x11 \x01(\x08H\n\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18\x12 \x01(\x05H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x13 \x01(\x08H\x0c\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18\x14 \x01(\x05H\r\x88\x01\x01\x12 \n\x07runtime\x18\x15 \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x16 \x01(\x05H\x0f\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x0e\n\x0c_cycle_pointB\x07\n\x05_nameB\t\n\x07_familyB\x08\n\x06_stateB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_is_heldB\x10\n\x0e_is_held_totalB\x0c\n\n_is_queuedB\x12\n\x10_is_queued_totalB\x0e\n\x0c_is_runaheadB\x14\n\x12_is_runahead_totalB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xbc\x01\n\x06PbEdge\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x13\n\x06source\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06target\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x14\n\x07suicide\x18\x05 \x01(\x08H\x04\x88\x01\x01\x12\x11\n\x04\x63ond\x18\x06 \x01(\x08H\x05\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\t\n\x07_sourceB\t\n\x07_targetB\n\n\x08_suicideB\x07\n\x05_cond\"{\n\x07PbEdges\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x02 \x03(\t\x12+\n\x16workflow_polling_tasks\x18\x03 \x03(\x0b\x32\x0b.PbPollTask\x12\x0e\n\x06leaves\x18\x04 \x03(\t\x12\x0c\n\x04\x66\x65\x65t\x18\x05 \x03(\tB\x05\n\x03_id\"\xf2\x01\n\x10PbEntireWorkflow\x12\"\n\x08workflow\x18\x01 \x01(\x0b\x32\x0b.PbWorkflowH\x00\x88\x01\x01\x12\x16\n\x05tasks\x18\x02 \x03(\x0b\x32\x07.PbTask\x12\"\n\x0ctask_proxies\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x14\n\x04jobs\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x1b\n\x08\x66\x61milies\x18\x05 \x03(\x0b\x32\t.PbFamily\x12&\n\x0e\x66\x61mily_proxies\x18\x06 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x16\n\x05\x65\x64ges\x18\x07 \x03(\x0b\x32\x07.PbEdgeB\x0b\n\t_workflow\"\xaf\x01\n\x07\x45\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbEdge\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbEdge\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xb3\x01\n\x07\x46\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x18\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\t.PbFamily\x12\x1a\n\x07updated\x18\x04 \x03(\x0b\x32\t.PbFamily\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xbe\x01\n\x08\x46PDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1d\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x1f\n\x07updated\x18\x04 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xad\x01\n\x07JDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x15\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x06.PbJob\x12\x17\n\x07updated\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xaf\x01\n\x07TDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbTask\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbTask\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xba\x01\n\x08TPDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1b\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x1d\n\x07updated\x18\x04 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xc3\x01\n\x07WDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x1f\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x0b\x32\x0b.PbWorkflowH\x01\x88\x01\x01\x12!\n\x07updated\x18\x03 \x01(\x0b\x32\x0b.PbWorkflowH\x02\x88\x01\x01\x12\x15\n\x08reloaded\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x13\n\x06pruned\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x07\n\x05_timeB\x08\n\x06_addedB\n\n\x08_updatedB\x0b\n\t_reloadedB\t\n\x07_pruned\"\xd1\x01\n\tAllDeltas\x12\x1a\n\x08\x66\x61milies\x18\x01 \x01(\x0b\x32\x08.FDeltas\x12!\n\x0e\x66\x61mily_proxies\x18\x02 \x01(\x0b\x32\t.FPDeltas\x12\x16\n\x04jobs\x18\x03 \x01(\x0b\x32\x08.JDeltas\x12\x17\n\x05tasks\x18\x04 \x01(\x0b\x32\x08.TDeltas\x12\x1f\n\x0ctask_proxies\x18\x05 \x01(\x0b\x32\t.TPDeltas\x12\x17\n\x05\x65\x64ges\x18\x06 \x01(\x0b\x32\x08.EDeltas\x12\x1a\n\x08workflow\x18\x07 \x01(\x0b\x32\x08.WDeltasb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x64\x61ta_messages.proto\"\x96\x01\n\x06PbMeta\x12\x12\n\x05title\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x10\n\x03URL\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x19\n\x0cuser_defined\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_titleB\x0e\n\x0c_descriptionB\x06\n\x04_URLB\x0f\n\r_user_defined\"\xaa\x01\n\nPbTimeZone\x12\x12\n\x05hours\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x14\n\x07minutes\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x19\n\x0cstring_basic\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1c\n\x0fstring_extended\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_hoursB\n\n\x08_minutesB\x0f\n\r_string_basicB\x12\n\x10_string_extended\"\'\n\x0fPbTaskProxyRefs\x12\x14\n\x0ctask_proxies\x18\x01 \x03(\t\"\xd4\x0c\n\nPbWorkflow\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06status\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x11\n\x04host\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x11\n\x04port\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x12\n\x05owner\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\r\n\x05tasks\x18\x08 \x03(\t\x12\x10\n\x08\x66\x61milies\x18\t \x03(\t\x12\x1c\n\x05\x65\x64ges\x18\n \x01(\x0b\x32\x08.PbEdgesH\x07\x88\x01\x01\x12\x18\n\x0b\x61pi_version\x18\x0b \x01(\x05H\x08\x88\x01\x01\x12\x19\n\x0c\x63ylc_version\x18\x0c \x01(\tH\t\x88\x01\x01\x12\x19\n\x0clast_updated\x18\r \x01(\x01H\n\x88\x01\x01\x12\x1a\n\x04meta\x18\x0e \x01(\x0b\x32\x07.PbMetaH\x0b\x88\x01\x01\x12&\n\x19newest_active_cycle_point\x18\x10 \x01(\tH\x0c\x88\x01\x01\x12&\n\x19oldest_active_cycle_point\x18\x11 \x01(\tH\r\x88\x01\x01\x12\x15\n\x08reloaded\x18\x12 \x01(\x08H\x0e\x88\x01\x01\x12\x15\n\x08run_mode\x18\x13 \x01(\tH\x0f\x88\x01\x01\x12\x19\n\x0c\x63ycling_mode\x18\x14 \x01(\tH\x10\x88\x01\x01\x12\x32\n\x0cstate_totals\x18\x15 \x03(\x0b\x32\x1c.PbWorkflow.StateTotalsEntry\x12\x1d\n\x10workflow_log_dir\x18\x16 \x01(\tH\x11\x88\x01\x01\x12(\n\x0etime_zone_info\x18\x17 \x01(\x0b\x32\x0b.PbTimeZoneH\x12\x88\x01\x01\x12\x17\n\ntree_depth\x18\x18 \x01(\x05H\x13\x88\x01\x01\x12\x15\n\rjob_log_names\x18\x19 \x03(\t\x12\x14\n\x0cns_def_order\x18\x1a \x03(\t\x12\x0e\n\x06states\x18\x1b \x03(\t\x12\x14\n\x0ctask_proxies\x18\x1c \x03(\t\x12\x16\n\x0e\x66\x61mily_proxies\x18\x1d \x03(\t\x12\x17\n\nstatus_msg\x18\x1e \x01(\tH\x14\x88\x01\x01\x12\x1a\n\ris_held_total\x18\x1f \x01(\x05H\x15\x88\x01\x01\x12\x0c\n\x04jobs\x18 \x03(\t\x12\x15\n\x08pub_port\x18! \x01(\x05H\x16\x88\x01\x01\x12\x17\n\nbroadcasts\x18\" \x01(\tH\x17\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18# \x01(\x05H\x18\x88\x01\x01\x12=\n\x12latest_state_tasks\x18$ \x03(\x0b\x32!.PbWorkflow.LatestStateTasksEntry\x12\x13\n\x06pruned\x18% \x01(\x08H\x19\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18& \x01(\x05H\x1a\x88\x01\x01\x12\x1b\n\x0estates_updated\x18\' \x01(\x08H\x1b\x88\x01\x01\x12\x1c\n\x0fn_edge_distance\x18( \x01(\x05H\x1c\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1aI\n\x15LatestStateTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1f\n\x05value\x18\x02 \x01(\x0b\x32\x10.PbTaskProxyRefs:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\t\n\x07_statusB\x07\n\x05_hostB\x07\n\x05_portB\x08\n\x06_ownerB\x08\n\x06_edgesB\x0e\n\x0c_api_versionB\x0f\n\r_cylc_versionB\x0f\n\r_last_updatedB\x07\n\x05_metaB\x1c\n\x1a_newest_active_cycle_pointB\x1c\n\x1a_oldest_active_cycle_pointB\x0b\n\t_reloadedB\x0b\n\t_run_modeB\x0f\n\r_cycling_modeB\x13\n\x11_workflow_log_dirB\x11\n\x0f_time_zone_infoB\r\n\x0b_tree_depthB\r\n\x0b_status_msgB\x10\n\x0e_is_held_totalB\x0b\n\t_pub_portB\r\n\x0b_broadcastsB\x12\n\x10_is_queued_totalB\t\n\x07_prunedB\x14\n\x12_is_runahead_totalB\x11\n\x0f_states_updatedB\x12\n\x10_n_edge_distance\"\xe1\x06\n\tPbRuntime\x12\x15\n\x08platform\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06script\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0binit_script\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x17\n\nenv_script\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\nerr_script\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x18\n\x0b\x65xit_script\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x17\n\npre_script\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x18\n\x0bpost_script\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x19\n\x0cwork_sub_dir\x18\t \x01(\tH\x08\x88\x01\x01\x12(\n\x1b\x65xecution_polling_intervals\x18\n \x01(\tH\t\x88\x01\x01\x12#\n\x16\x65xecution_retry_delays\x18\x0b \x01(\tH\n\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0c \x01(\tH\x0b\x88\x01\x01\x12)\n\x1csubmission_polling_intervals\x18\r \x01(\tH\x0c\x88\x01\x01\x12$\n\x17submission_retry_delays\x18\x0e \x01(\tH\r\x88\x01\x01\x12\x17\n\ndirectives\x18\x0f \x01(\tH\x0e\x88\x01\x01\x12\x18\n\x0b\x65nvironment\x18\x10 \x01(\tH\x0f\x88\x01\x01\x12\x14\n\x07outputs\x18\x11 \x01(\tH\x10\x88\x01\x01\x12\x17\n\ncompletion\x18\x12 \x01(\tH\x11\x88\x01\x01\x42\x0b\n\t_platformB\t\n\x07_scriptB\x0e\n\x0c_init_scriptB\r\n\x0b_env_scriptB\r\n\x0b_err_scriptB\x0e\n\x0c_exit_scriptB\r\n\x0b_pre_scriptB\x0e\n\x0c_post_scriptB\x0f\n\r_work_sub_dirB\x1e\n\x1c_execution_polling_intervalsB\x19\n\x17_execution_retry_delaysB\x17\n\x15_execution_time_limitB\x1f\n\x1d_submission_polling_intervalsB\x1a\n\x18_submission_retry_delaysB\r\n\x0b_directivesB\x0e\n\x0c_environmentB\n\n\x08_outputsB\r\n\x0b_completion\"\x9d\x05\n\x05PbJob\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nsubmit_num\x18\x03 \x01(\x05H\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\ntask_proxy\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x1b\n\x0esubmitted_time\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x19\n\x0cstarted_time\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x1a\n\rfinished_time\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x06job_id\x18\t \x01(\tH\x08\x88\x01\x01\x12\x1c\n\x0fjob_runner_name\x18\n \x01(\tH\t\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0e \x01(\x02H\n\x88\x01\x01\x12\x15\n\x08platform\x18\x0f \x01(\tH\x0b\x88\x01\x01\x12\x18\n\x0bjob_log_dir\x18\x11 \x01(\tH\x0c\x88\x01\x01\x12\x11\n\x04name\x18\x1e \x01(\tH\r\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x1f \x01(\tH\x0e\x88\x01\x01\x12\x10\n\x08messages\x18 \x03(\t\x12 \n\x07runtime\x18! \x01(\x0b\x32\n.PbRuntimeH\x0f\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\r\n\x0b_submit_numB\x08\n\x06_stateB\r\n\x0b_task_proxyB\x11\n\x0f_submitted_timeB\x0f\n\r_started_timeB\x10\n\x0e_finished_timeB\t\n\x07_job_idB\x12\n\x10_job_runner_nameB\x17\n\x15_execution_time_limitB\x0b\n\t_platformB\x0e\n\x0c_job_log_dirB\x07\n\x05_nameB\x0e\n\x0c_cycle_pointB\n\n\x08_runtimeJ\x04\x08\x1d\x10\x1e\"\xe2\x02\n\x06PbTask\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x1e\n\x11mean_elapsed_time\x18\x05 \x01(\x02H\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x0f\n\x07proxies\x18\x07 \x03(\t\x12\x11\n\tnamespace\x18\x08 \x03(\t\x12\x0f\n\x07parents\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x06\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x07\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x14\n\x12_mean_elapsed_timeB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xd8\x01\n\nPbPollTask\x12\x18\n\x0blocal_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08workflow\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x19\n\x0cremote_proxy\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\treq_state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x19\n\x0cgraph_string\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x0e\n\x0c_local_proxyB\x0b\n\t_workflowB\x0f\n\r_remote_proxyB\x0c\n\n_req_stateB\x0f\n\r_graph_string\"\xcb\x01\n\x0bPbCondition\x12\x17\n\ntask_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nexpr_alias\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\treq_state\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x14\n\x07message\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\r\n\x0b_task_proxyB\r\n\x0b_expr_aliasB\x0c\n\n_req_stateB\x0c\n\n_satisfiedB\n\n\x08_message\"\x96\x01\n\x0ePbPrerequisite\x12\x17\n\nexpression\x18\x01 \x01(\tH\x00\x88\x01\x01\x12 \n\nconditions\x18\x02 \x03(\x0b\x32\x0c.PbCondition\x12\x14\n\x0c\x63ycle_points\x18\x03 \x03(\t\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\r\n\x0b_expressionB\x0c\n\n_satisfied\"\x8c\x01\n\x08PbOutput\x12\x12\n\x05label\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07message\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\tsatisfied\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12\x11\n\x04time\x18\x04 \x01(\x01H\x03\x88\x01\x01\x42\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\xa5\x01\n\tPbTrigger\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05label\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x14\n\x07message\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x11\n\x04time\x18\x05 \x01(\x01H\x04\x88\x01\x01\x42\x05\n\x03_idB\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\x91\x08\n\x0bPbTaskProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04task\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x18\n\x0bjob_submits\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12*\n\x07outputs\x18\t \x03(\x0b\x32\x19.PbTaskProxy.OutputsEntry\x12\x11\n\tnamespace\x18\x0b \x03(\t\x12&\n\rprerequisites\x18\x0c \x03(\x0b\x32\x0f.PbPrerequisite\x12\x0c\n\x04jobs\x18\r \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\x0f \x01(\tH\x07\x88\x01\x01\x12\x11\n\x04name\x18\x10 \x01(\tH\x08\x88\x01\x01\x12\x14\n\x07is_held\x18\x11 \x01(\x08H\t\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x12 \x03(\t\x12\x11\n\tancestors\x18\x13 \x03(\t\x12\x16\n\tflow_nums\x18\x14 \x01(\tH\n\x88\x01\x01\x12=\n\x11\x65xternal_triggers\x18\x17 \x03(\x0b\x32\".PbTaskProxy.ExternalTriggersEntry\x12.\n\txtriggers\x18\x18 \x03(\x0b\x32\x1b.PbTaskProxy.XtriggersEntry\x12\x16\n\tis_queued\x18\x19 \x01(\x08H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x1a \x01(\x08H\x0c\x88\x01\x01\x12\x16\n\tflow_wait\x18\x1b \x01(\x08H\r\x88\x01\x01\x12 \n\x07runtime\x18\x1c \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x1d \x01(\x05H\x0f\x88\x01\x01\x1a\x39\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x18\n\x05value\x18\x02 \x01(\x0b\x32\t.PbOutput:\x02\x38\x01\x1a\x43\n\x15\x45xternalTriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x1a<\n\x0eXtriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_taskB\x08\n\x06_stateB\x0e\n\x0c_cycle_pointB\x08\n\x06_depthB\x0e\n\x0c_job_submitsB\x0f\n\r_first_parentB\x07\n\x05_nameB\n\n\x08_is_heldB\x0c\n\n_flow_numsB\x0c\n\n_is_queuedB\x0e\n\x0c_is_runaheadB\x0c\n\n_flow_waitB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xc8\x02\n\x08PbFamily\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x05 \x01(\x05H\x04\x88\x01\x01\x12\x0f\n\x07proxies\x18\x06 \x03(\t\x12\x0f\n\x07parents\x18\x07 \x03(\t\x12\x13\n\x0b\x63hild_tasks\x18\x08 \x03(\t\x12\x16\n\x0e\x63hild_families\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x05\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x06\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xae\x06\n\rPbFamilyProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x11\n\x04name\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x13\n\x06\x66\x61mily\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05state\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12\x19\n\x0c\x66irst_parent\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x0b\x63hild_tasks\x18\n \x03(\t\x12\x16\n\x0e\x63hild_families\x18\x0b \x03(\t\x12\x14\n\x07is_held\x18\x0c \x01(\x08H\x08\x88\x01\x01\x12\x11\n\tancestors\x18\r \x03(\t\x12\x0e\n\x06states\x18\x0e \x03(\t\x12\x35\n\x0cstate_totals\x18\x0f \x03(\x0b\x32\x1f.PbFamilyProxy.StateTotalsEntry\x12\x1a\n\ris_held_total\x18\x10 \x01(\x05H\t\x88\x01\x01\x12\x16\n\tis_queued\x18\x11 \x01(\x08H\n\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18\x12 \x01(\x05H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x13 \x01(\x08H\x0c\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18\x14 \x01(\x05H\r\x88\x01\x01\x12 \n\x07runtime\x18\x15 \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x16 \x01(\x05H\x0f\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x0e\n\x0c_cycle_pointB\x07\n\x05_nameB\t\n\x07_familyB\x08\n\x06_stateB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_is_heldB\x10\n\x0e_is_held_totalB\x0c\n\n_is_queuedB\x12\n\x10_is_queued_totalB\x0e\n\x0c_is_runaheadB\x14\n\x12_is_runahead_totalB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xbc\x01\n\x06PbEdge\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x13\n\x06source\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06target\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x14\n\x07suicide\x18\x05 \x01(\x08H\x04\x88\x01\x01\x12\x11\n\x04\x63ond\x18\x06 \x01(\x08H\x05\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\t\n\x07_sourceB\t\n\x07_targetB\n\n\x08_suicideB\x07\n\x05_cond\"{\n\x07PbEdges\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x02 \x03(\t\x12+\n\x16workflow_polling_tasks\x18\x03 \x03(\x0b\x32\x0b.PbPollTask\x12\x0e\n\x06leaves\x18\x04 \x03(\t\x12\x0c\n\x04\x66\x65\x65t\x18\x05 \x03(\tB\x05\n\x03_id\"\xf2\x01\n\x10PbEntireWorkflow\x12\"\n\x08workflow\x18\x01 \x01(\x0b\x32\x0b.PbWorkflowH\x00\x88\x01\x01\x12\x16\n\x05tasks\x18\x02 \x03(\x0b\x32\x07.PbTask\x12\"\n\x0ctask_proxies\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x14\n\x04jobs\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x1b\n\x08\x66\x61milies\x18\x05 \x03(\x0b\x32\t.PbFamily\x12&\n\x0e\x66\x61mily_proxies\x18\x06 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x16\n\x05\x65\x64ges\x18\x07 \x03(\x0b\x32\x07.PbEdgeB\x0b\n\t_workflow\"\xaf\x01\n\x07\x45\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbEdge\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbEdge\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xb3\x01\n\x07\x46\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x18\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\t.PbFamily\x12\x1a\n\x07updated\x18\x04 \x03(\x0b\x32\t.PbFamily\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xbe\x01\n\x08\x46PDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1d\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x1f\n\x07updated\x18\x04 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xad\x01\n\x07JDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x15\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x06.PbJob\x12\x17\n\x07updated\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xaf\x01\n\x07TDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbTask\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbTask\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xba\x01\n\x08TPDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1b\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x1d\n\x07updated\x18\x04 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xc3\x01\n\x07WDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x1f\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x0b\x32\x0b.PbWorkflowH\x01\x88\x01\x01\x12!\n\x07updated\x18\x03 \x01(\x0b\x32\x0b.PbWorkflowH\x02\x88\x01\x01\x12\x15\n\x08reloaded\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x13\n\x06pruned\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x07\n\x05_timeB\x08\n\x06_addedB\n\n\x08_updatedB\x0b\n\t_reloadedB\t\n\x07_pruned\"\xd1\x01\n\tAllDeltas\x12\x1a\n\x08\x66\x61milies\x18\x01 \x01(\x0b\x32\x08.FDeltas\x12!\n\x0e\x66\x61mily_proxies\x18\x02 \x01(\x0b\x32\t.FPDeltas\x12\x16\n\x04jobs\x18\x03 \x01(\x0b\x32\x08.JDeltas\x12\x17\n\x05tasks\x18\x04 \x01(\x0b\x32\x08.TDeltas\x12\x1f\n\x0ctask_proxies\x18\x05 \x01(\x0b\x32\t.TPDeltas\x12\x17\n\x05\x65\x64ges\x18\x06 \x01(\x0b\x32\x08.EDeltas\x12\x1a\n\x08workflow\x18\x07 \x01(\x0b\x32\x08.WDeltasb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -46,55 +47,55 @@ _globals['_PBWORKFLOW_LATESTSTATETASKSENTRY']._serialized_start=1493 _globals['_PBWORKFLOW_LATESTSTATETASKSENTRY']._serialized_end=1566 _globals['_PBRUNTIME']._serialized_start=2014 - _globals['_PBRUNTIME']._serialized_end=2839 - _globals['_PBJOB']._serialized_start=2842 - _globals['_PBJOB']._serialized_end=3511 - _globals['_PBTASK']._serialized_start=3514 - _globals['_PBTASK']._serialized_end=3868 - _globals['_PBPOLLTASK']._serialized_start=3871 - _globals['_PBPOLLTASK']._serialized_end=4087 - _globals['_PBCONDITION']._serialized_start=4090 - _globals['_PBCONDITION']._serialized_end=4293 - _globals['_PBPREREQUISITE']._serialized_start=4296 - _globals['_PBPREREQUISITE']._serialized_end=4446 - _globals['_PBOUTPUT']._serialized_start=4449 - _globals['_PBOUTPUT']._serialized_end=4589 - _globals['_PBTRIGGER']._serialized_start=4592 - _globals['_PBTRIGGER']._serialized_end=4757 - _globals['_PBTASKPROXY']._serialized_start=4760 - _globals['_PBTASKPROXY']._serialized_end=5801 - _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_start=5411 - _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_end=5468 - _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_start=5470 - _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_end=5537 - _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_start=5539 - _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_end=5599 - _globals['_PBFAMILY']._serialized_start=5804 - _globals['_PBFAMILY']._serialized_end=6132 - _globals['_PBFAMILYPROXY']._serialized_start=6135 - _globals['_PBFAMILYPROXY']._serialized_end=6949 + _globals['_PBRUNTIME']._serialized_end=2879 + _globals['_PBJOB']._serialized_start=2882 + _globals['_PBJOB']._serialized_end=3551 + _globals['_PBTASK']._serialized_start=3554 + _globals['_PBTASK']._serialized_end=3908 + _globals['_PBPOLLTASK']._serialized_start=3911 + _globals['_PBPOLLTASK']._serialized_end=4127 + _globals['_PBCONDITION']._serialized_start=4130 + _globals['_PBCONDITION']._serialized_end=4333 + _globals['_PBPREREQUISITE']._serialized_start=4336 + _globals['_PBPREREQUISITE']._serialized_end=4486 + _globals['_PBOUTPUT']._serialized_start=4489 + _globals['_PBOUTPUT']._serialized_end=4629 + _globals['_PBTRIGGER']._serialized_start=4632 + _globals['_PBTRIGGER']._serialized_end=4797 + _globals['_PBTASKPROXY']._serialized_start=4800 + _globals['_PBTASKPROXY']._serialized_end=5841 + _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_start=5451 + _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_end=5508 + _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_start=5510 + _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_end=5577 + _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_start=5579 + _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_end=5639 + _globals['_PBFAMILY']._serialized_start=5844 + _globals['_PBFAMILY']._serialized_end=6172 + _globals['_PBFAMILYPROXY']._serialized_start=6175 + _globals['_PBFAMILYPROXY']._serialized_end=6989 _globals['_PBFAMILYPROXY_STATETOTALSENTRY']._serialized_start=1441 _globals['_PBFAMILYPROXY_STATETOTALSENTRY']._serialized_end=1491 - _globals['_PBEDGE']._serialized_start=6952 - _globals['_PBEDGE']._serialized_end=7140 - _globals['_PBEDGES']._serialized_start=7142 - _globals['_PBEDGES']._serialized_end=7265 - _globals['_PBENTIREWORKFLOW']._serialized_start=7268 - _globals['_PBENTIREWORKFLOW']._serialized_end=7510 - _globals['_EDELTAS']._serialized_start=7513 - _globals['_EDELTAS']._serialized_end=7688 - _globals['_FDELTAS']._serialized_start=7691 - _globals['_FDELTAS']._serialized_end=7870 - _globals['_FPDELTAS']._serialized_start=7873 - _globals['_FPDELTAS']._serialized_end=8063 - _globals['_JDELTAS']._serialized_start=8066 - _globals['_JDELTAS']._serialized_end=8239 - _globals['_TDELTAS']._serialized_start=8242 - _globals['_TDELTAS']._serialized_end=8417 - _globals['_TPDELTAS']._serialized_start=8420 - _globals['_TPDELTAS']._serialized_end=8606 - _globals['_WDELTAS']._serialized_start=8609 - _globals['_WDELTAS']._serialized_end=8804 - _globals['_ALLDELTAS']._serialized_start=8807 - _globals['_ALLDELTAS']._serialized_end=9016 + _globals['_PBEDGE']._serialized_start=6992 + _globals['_PBEDGE']._serialized_end=7180 + _globals['_PBEDGES']._serialized_start=7182 + _globals['_PBEDGES']._serialized_end=7305 + _globals['_PBENTIREWORKFLOW']._serialized_start=7308 + _globals['_PBENTIREWORKFLOW']._serialized_end=7550 + _globals['_EDELTAS']._serialized_start=7553 + _globals['_EDELTAS']._serialized_end=7728 + _globals['_FDELTAS']._serialized_start=7731 + _globals['_FDELTAS']._serialized_end=7910 + _globals['_FPDELTAS']._serialized_start=7913 + _globals['_FPDELTAS']._serialized_end=8103 + _globals['_JDELTAS']._serialized_start=8106 + _globals['_JDELTAS']._serialized_end=8279 + _globals['_TDELTAS']._serialized_start=8282 + _globals['_TDELTAS']._serialized_end=8457 + _globals['_TPDELTAS']._serialized_start=8460 + _globals['_TPDELTAS']._serialized_end=8646 + _globals['_WDELTAS']._serialized_start=8649 + _globals['_WDELTAS']._serialized_end=8844 + _globals['_ALLDELTAS']._serialized_start=8847 + _globals['_ALLDELTAS']._serialized_end=9056 # @@protoc_insertion_point(module_scope) diff --git a/cylc/flow/data_store_mgr.py b/cylc/flow/data_store_mgr.py index 744daeb4dda..f49c5bd9eaa 100644 --- a/cylc/flow/data_store_mgr.py +++ b/cylc/flow/data_store_mgr.py @@ -247,6 +247,7 @@ def runtime_from_config(rtconfig): return PbRuntime( platform=platform, script=rtconfig['script'], + completion=rtconfig['completion'], init_script=rtconfig['init-script'], env_script=rtconfig['env-script'], err_script=rtconfig['err-script'], @@ -1440,7 +1441,7 @@ def apply_task_proxy_db_history(self): ) ): for message in json.loads(outputs_str): - itask.state.outputs.set_completion(message, True) + itask.state.outputs.set_message_complete(message) # Gather tasks with flow id. prereq_ids.add(f'{relative_id}/{flow_nums_str}') @@ -1502,7 +1503,7 @@ def _process_internal_task_proxy(self, itask, tproxy): del tproxy.prerequisites[:] tproxy.prerequisites.extend(prereq_list) - for label, message, satisfied in itask.state.outputs.get_all(): + for label, message, satisfied in itask.state.outputs: output = tproxy.outputs[label] output.label = label output.message = message @@ -2393,10 +2394,8 @@ def delta_task_output( tp_id, tproxy = self.store_node_fetcher(itask.tokens) if not tproxy: return - item = itask.state.outputs.get_item(message) - if item is None: - return - label, _, satisfied = item + outputs = itask.state.outputs + label = outputs.get_trigger(message) # update task instance update_time = time() tp_delta = self.updated[TASK_PROXIES].setdefault( @@ -2405,7 +2404,7 @@ def delta_task_output( output = tp_delta.outputs[label] output.label = label output.message = message - output.satisfied = satisfied + output.satisfied = outputs.is_message_complete(message) output.time = update_time self.updates_pending = True @@ -2425,9 +2424,10 @@ def delta_task_outputs(self, itask: TaskProxy) -> None: tp_delta = self.updated[TASK_PROXIES].setdefault( tp_id, PbTaskProxy(id=tp_id)) tp_delta.stamp = f'{tp_id}@{update_time}' - for label, _, satisfied in itask.state.outputs.get_all(): - output = tp_delta.outputs[label] - output.label = label + for trigger, message, satisfied in itask.state.outputs: + output = tp_delta.outputs[trigger] + output.label = trigger + output.message = message output.satisfied = satisfied output.time = update_time diff --git a/cylc/flow/exceptions.py b/cylc/flow/exceptions.py index 0800a914888..9881631484b 100644 --- a/cylc/flow/exceptions.py +++ b/cylc/flow/exceptions.py @@ -487,3 +487,16 @@ def __str__(self): ) else: return "Installed workflow is not compatible with Cylc 8." + + +class InvalidCompletionExpression(CylcError): + """For the [runtime][]completion configuration. + + Raised when non-whitelisted syntax is present. + """ + def __init__(self, message, expr=None): + self.message = message + self.expr = expr + + def __str__(self): + return self.message diff --git a/cylc/flow/graph_parser.py b/cylc/flow/graph_parser.py index 3dbab9261b7..ac8968dc213 100644 --- a/cylc/flow/graph_parser.py +++ b/cylc/flow/graph_parser.py @@ -745,9 +745,14 @@ def _set_output_opt( if suicide: return - if output == TASK_OUTPUT_EXPIRED and not optional: - raise GraphParseError( - f"Expired-output {name}:{output} must be optional") + if ( + output in {TASK_OUTPUT_EXPIRED, TASK_OUTPUT_SUBMIT_FAILED} + and not optional + ): + # ":expire" and ":submit-fail" cannot be required + # proposal point 4: + # https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + raise GraphParseError(f"{name}:{output} must be optional") if output == TASK_OUTPUT_FINISHED: # Interpret :finish pseudo-output diff --git a/cylc/flow/loggingutil.py b/cylc/flow/loggingutil.py index 35884729ea0..e3f1b82d47b 100644 --- a/cylc/flow/loggingutil.py +++ b/cylc/flow/loggingutil.py @@ -53,10 +53,10 @@ class CylcLogFormatter(logging.Formatter): """ COLORS = { - 'CRITICAL': cparse('{0}'), - 'ERROR': cparse('{0}'), - 'WARNING': cparse('{0}'), - 'DEBUG': cparse('{0}') + 'CRITICAL': '{0}', + 'ERROR': '{0}', + 'WARNING': '{0}', + 'DEBUG': '{0}' } # default hard-coded max width for log entries @@ -99,7 +99,7 @@ def format(self, record): # noqa: A003 (method name not local) if not self.timestamp: _, text = text.split(' ', 1) # ISO8601 time points have no spaces if self.color and record.levelname in self.COLORS: - text = self.COLORS[record.levelname].format(text) + text = cparse(self.COLORS[record.levelname].format(text)) if self.max_width: return '\n'.join( line @@ -329,7 +329,7 @@ def _filter(self, record): def re_formatter(log_string): """Read in an uncoloured log_string file and apply colour formatting.""" for sub, repl in LOG_LEVEL_REGEXES: - log_string = sub.sub(repl, log_string) + log_string = cparse(sub.sub(repl, log_string)) return log_string diff --git a/cylc/flow/network/schema.py b/cylc/flow/network/schema.py index a1ae2ea26f7..e2e0d8f9422 100644 --- a/cylc/flow/network/schema.py +++ b/cylc/flow/network/schema.py @@ -807,6 +807,7 @@ class Meta: """) platform = String(default_value=None) script = String(default_value=None) + completion = String(default_value=None) init_script = String(default_value=None) env_script = String(default_value=None) err_script = String(default_value=None) diff --git a/cylc/flow/scripts/show.py b/cylc/flow/scripts/show.py index dae42f637b8..80180964cc5 100755 --- a/cylc/flow/scripts/show.py +++ b/cylc/flow/scripts/show.py @@ -40,6 +40,7 @@ import re import json import sys +from textwrap import indent from typing import Any, Dict, TYPE_CHECKING from ansimarkup import ansiprint @@ -51,6 +52,7 @@ from cylc.flow.id import Tokens from cylc.flow.id_cli import parse_ids from cylc.flow.network.client_factory import get_client +from cylc.flow.task_outputs import TaskOutputs from cylc.flow.task_state import ( TASK_STATUSES_ORDERED, TASK_STATUS_RUNNING @@ -60,6 +62,7 @@ ID_MULTI_ARG_DOC, ) from cylc.flow.terminal import cli_function +from cylc.flow.util import BOOL_SYMBOLS if TYPE_CHECKING: @@ -135,16 +138,39 @@ label satisfied } + runtime { + completion + } } } ''' +SATISFIED = BOOL_SYMBOLS[True] +UNSATISFIED = BOOL_SYMBOLS[False] + + def print_msg_state(msg, state): if state: - ansiprint(f' + {msg}') + ansiprint(f' {SATISFIED} {msg}') else: - ansiprint(f' - {msg}') + ansiprint(f' {UNSATISFIED} {msg}') + + +def print_completion_state(t_proxy): + # create task outputs object + outputs = TaskOutputs(t_proxy["runtime"]["completion"]) + + for output in t_proxy['outputs']: + outputs.add(output['label'], output['message']) + if output['satisfied']: + outputs.set_message_complete(output['message']) + + ansiprint( + f'completion:' + f' {"complete" if outputs.is_complete() else "incomplete"}' + f'\n{indent(outputs.format_completion_status(ansimarkup=2), " ")}' + ) def flatten_data(data, flat_data=None): @@ -316,14 +342,16 @@ async def prereqs_and_outputs_query( ansiprint(f"{pre_txt} (n/a for past tasks)") else: ansiprint( - f"{pre_txt} ('-': not satisfied)") + f"{pre_txt}" + f"('{UNSATISFIED}': not satisfied)" + ) for _, prefix, msg, state in prereqs: print_msg_state(f'{prefix}{msg}', state) # outputs ansiprint( 'outputs:' - " ('-': not completed)") + f" ('{UNSATISFIED}': not completed)") if not t_proxy['outputs']: # (Not possible - standard outputs) print(' (None)') for output in t_proxy['outputs']: @@ -334,7 +362,9 @@ async def prereqs_and_outputs_query( or t_proxy['xtriggers'] ): ansiprint( - "other: ('-': not satisfied)") + "other:" + f"('{UNSATISFIED}': not satisfied)" + ) for ext_trig in t_proxy['externalTriggers']: state = ext_trig['satisfied'] print_msg_state( @@ -346,6 +376,9 @@ async def prereqs_and_outputs_query( print_msg_state( f'xtrigger "{xtrig["label"]} = {label}"', state) + + print_completion_state(t_proxy) + if not results['taskProxies']: ansiprint( f"No matching active tasks found: {', '.join(ids_list)}", diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index a7f16e04498..70342d4ab59 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -662,6 +662,7 @@ def process_message( True: if polling is required to confirm a reversal of status. """ + # Log messages if event_time is None: event_time = get_current_time_string() @@ -699,8 +700,7 @@ def process_message( completed_output = None if msg0 not in [TASK_OUTPUT_SUBMIT_FAILED, TASK_OUTPUT_FAILED]: - completed_output = itask.state.outputs.set_msg_trg_completion( - message=msg0, is_completed=True) + completed_output = itask.state.outputs.set_message_complete(msg0) if completed_output: self.data_store_mgr.delta_task_output(itask, msg0) @@ -1315,8 +1315,7 @@ def _process_message_failed(self, itask, event_time, message, forced): if itask.state_reset(TASK_STATUS_FAILED, forced=forced): self.setup_event_handlers(itask, self.EVENT_FAILED, message) self.data_store_mgr.delta_task_state(itask) - itask.state.outputs.set_msg_trg_completion( - message=TASK_OUTPUT_FAILED, is_completed=True) + itask.state.outputs.set_message_complete(TASK_OUTPUT_FAILED) self.data_store_mgr.delta_task_output( itask, TASK_OUTPUT_FAILED) self.data_store_mgr.delta_task_state(itask) @@ -1417,8 +1416,9 @@ def _process_message_submit_failed( self.setup_event_handlers( itask, self.EVENT_SUBMIT_FAILED, f'job {self.EVENT_SUBMIT_FAILED}') - itask.state.outputs.set_msg_trg_completion( - message=TASK_OUTPUT_SUBMIT_FAILED, is_completed=True) + itask.state.outputs.set_message_complete( + TASK_OUTPUT_SUBMIT_FAILED + ) self.data_store_mgr.delta_task_output( itask, TASK_OUTPUT_SUBMIT_FAILED) self.data_store_mgr.delta_task_state(itask) @@ -1462,7 +1462,7 @@ def _process_message_submitted( itask.set_summary_time('started', event_time) if itask.state_reset(TASK_STATUS_RUNNING, forced=forced): self.data_store_mgr.delta_task_state(itask) - itask.state.outputs.set_completion(TASK_OUTPUT_STARTED, True) + itask.state.outputs.set_message_complete(TASK_OUTPUT_STARTED) self.data_store_mgr.delta_task_output(itask, TASK_OUTPUT_STARTED) else: diff --git a/cylc/flow/task_outputs.py b/cylc/flow/task_outputs.py index 644b7b0dd3c..0ca7a2345d6 100644 --- a/cylc/flow/task_outputs.py +++ b/cylc/flow/task_outputs.py @@ -15,7 +15,29 @@ # along with this program. If not, see . """Task output message manager and constants.""" -from typing import List +import ast +import re +from typing import ( + Dict, + Iterable, + Iterator, + List, + Optional, + TYPE_CHECKING, + Tuple, + Union, +) + +from cylc.flow.exceptions import InvalidCompletionExpression +from cylc.flow.util import ( + BOOL_SYMBOLS, + get_variable_names, + restricted_evaluator, +) + +if TYPE_CHECKING: + from cylc.flow.taskdef import TaskDef + # Standard task output strings, used for triggering. TASK_OUTPUT_EXPIRED = "expired" @@ -32,7 +54,8 @@ TASK_OUTPUT_SUBMIT_FAILED, TASK_OUTPUT_STARTED, TASK_OUTPUT_SUCCEEDED, - TASK_OUTPUT_FAILED) + TASK_OUTPUT_FAILED, +) TASK_OUTPUTS = ( TASK_OUTPUT_EXPIRED, @@ -44,226 +67,305 @@ TASK_OUTPUT_FINISHED, ) -_TRIGGER = 0 -_MESSAGE = 1 -_IS_COMPLETED = 2 +# this evaluates task completion expressions +CompletionEvaluator = restricted_evaluator( + # expressions + ast.Expression, + # variables + ast.Name, ast.Load, + # operations + ast.BoolOp, ast.And, ast.Or, ast.BinOp, + error_class=InvalidCompletionExpression, +) +# regex for splitting expressions into individual parts for formatting +RE_EXPR_SPLIT = re.compile(r'([\(\) ])') -class TaskOutputs: - """Task output message manager. - Manage standard task outputs and custom outputs, e.g.: - [scheduling] - [[graph]] - R1 = t1:trigger1 => t2 - [runtime] - [[t1]] - [[[outputs]]] - trigger1 = message 1 +def trigger_to_completion_variable(output: str) -> str: + """Turn a trigger into something that can be used in an expression. - Can search item by message string or by trigger string. - """ + Examples: + >>> trigger_to_completion_variable('succeeded') + 'succeeded' + >>> trigger_to_completion_variable('submit-failed') + 'submit_failed' - # Memory optimization - constrain possible attributes to this list. - __slots__ = ["_by_message", "_by_trigger", "_required"] + """ + return output.replace('-', '_') - def __init__(self, tdef): - self._by_message = {} - self._by_trigger = {} - self._required = {} # trigger: message - # Add outputs from task def. - for trigger, (message, required) in tdef.outputs.items(): - self._add(message, trigger, required=required) +def get_completion_expression(tdef: 'TaskDef') -> str: + """Return a completion expression for this task definition. - # Handle implicit submit requirement - if ( - # "submitted" is not declared as optional/required - tdef.outputs[TASK_OUTPUT_SUBMITTED][1] is None - # and "submit-failed" is not declared as optional/required - and tdef.outputs[TASK_OUTPUT_SUBMIT_FAILED][1] is None - ): - self._add( - TASK_OUTPUT_SUBMITTED, - TASK_OUTPUT_SUBMITTED, - required=True, - ) + If there is *not* a user provided completion statement: - def _add(self, message, trigger, is_completed=False, required=False): - """Add a new output message""" - self._by_message[message] = [trigger, message, is_completed] - self._by_trigger[trigger] = self._by_message[message] - if required: - self._required[trigger] = message - - def set_completed_by_msg(self, message): - """For flow trigger --wait: set completed outputs from the DB.""" - for trig, msg, _ in self._by_trigger.values(): - if message == msg: - self._add(message, trig, True, trig in self._required) - break - - def all_completed(self): - """Return True if all all outputs completed.""" - return all(val[_IS_COMPLETED] for val in self._by_message.values()) - - def exists(self, message=None, trigger=None): - """Return True if message/trigger is identified as an output.""" - try: - return self._get_item(message, trigger) is not None - except KeyError: - return False - - def get_all(self): - """Return an iterator for all output messages.""" - return sorted(self._by_message.values(), key=self.msg_sort_key) - - def get_completed(self): - """Return all completed output messages.""" - ret = [] - for value in self.get_all(): - if value[_IS_COMPLETED]: - ret.append(value[_MESSAGE]) - return ret - - def get_completed_all(self): - """Return all completed outputs. - - Return a list in this form: [(trigger1, message1), ...] - """ - ret = [] - for value in self.get_all(): - if value[_IS_COMPLETED]: - ret.append((value[_TRIGGER], value[_MESSAGE])) - return ret - - def has_custom_triggers(self): - """Return True if it has any custom triggers.""" - return any(key not in SORT_ORDERS for key in self._by_trigger) - - def _get_custom_triggers(self, required: bool = False) -> List[str]: - """Return list of all, or required, custom trigger messages.""" - custom = [ - out[1] for trg, out in self._by_trigger.items() - if trg not in SORT_ORDERS - ] + 1. Create a completion expression that ensures all required ouputs are + completed. + 2. If succeess is optional add "or succeeded or failed" onto the end. + 3. If submission is optional add "or submit-failed" onto the end of it. + 4. If expiry is optional add "or expired" onto the end of it. + """ + # check if there is a user-configured completion expression + completion = tdef.rtconfig.get('completion') + if completion: + # completion expression is defined in the runtime -> return it + return completion + + # (1) start with an expression that ensures all required outputs are + # generated (if the task runs) + required = { + trigger_to_completion_variable(trigger) + for trigger, (_message, required) in tdef.outputs.items() + if required + } + parts = [] + if required: + _part = ' and '.join(sorted(required)) + if len(required) > 1: + # wrap the expression in brackets for clarity + parts.append(f'({_part})') + else: + parts.append(_part) + + # (2) handle optional success + if ( + tdef.outputs[TASK_OUTPUT_SUCCEEDED][1] is False + or tdef.outputs[TASK_OUTPUT_FAILED][1] is False + ): + # failure is tolerated -> ensure the task succeess OR fails if required: - custom = [out for out in custom if out in self._required.values()] - return custom - - def get_not_completed(self): - """Return all not-completed output messages.""" - ret = [] - for value in self.get_all(): - if not value[_IS_COMPLETED]: - ret.append(value[_MESSAGE]) - return ret - - def is_completed(self, message=None, trigger=None): - """Return True if output of message is completed.""" - try: - return self._get_item(message, trigger)[_IS_COMPLETED] - except KeyError: - return False - - def remove(self, message=None, trigger=None): - """Remove an output by message, if it exists.""" - try: - trigger, message = self._get_item(message, trigger)[:2] - except KeyError: - pass + # required outputs are required only if the task actually runs + parts = [ + f'({parts[0]} and {TASK_OUTPUT_SUCCEEDED})' + f' or {TASK_OUTPUT_FAILED}' + ] else: - del self._by_message[message] - del self._by_trigger[trigger] + parts.append( + f'{TASK_OUTPUT_SUCCEEDED} or {TASK_OUTPUT_FAILED}' + ) - def set_all_completed(self): - """Set all outputs to complete.""" - for value in self._by_message.values(): - value[_IS_COMPLETED] = True + # (3) handle optional submission + if ( + tdef.outputs[TASK_OUTPUT_SUBMITTED][1] is False + or tdef.outputs[TASK_OUTPUT_SUBMIT_FAILED][1] is False + ): + # submit-fail tolerated -> ensure the task executes OR submit-fails + parts.append( + trigger_to_completion_variable(TASK_OUTPUT_SUBMIT_FAILED) + ) - def set_all_incomplete(self): - """Set all outputs to incomplete.""" - for value in self._by_message.values(): - value[_IS_COMPLETED] = False + # (4) handle optional expiry + if tdef.outputs[TASK_OUTPUT_EXPIRED][1] is False: + # expiry tolerated -> ensure the task executes OR expires + parts.append(TASK_OUTPUT_EXPIRED) + + return ' or '.join(parts) + + +def get_optional_outputs( + expression: str, + outputs: Iterable[str], +) -> Dict[str, Optional[bool]]: + """Determine which outputs in an expression are optional. + + Args: + expression: + The completion expression. + outputs: + All outputs that apply to this task. + + Returns: + dict: compvar: is_optional + + compvar: + The completion variable, i.e. the trigger as used in the completion + expression. + is_optional: + * True if var is optional. + * False if var is required. + * None if var is not referenced. + + Examples: + >>> sorted(get_optional_outputs( + ... '(succeeded and (x or y)) or failed', + ... {'succeeded', 'x', 'y', 'failed', 'expired'} + ... ).items()) + [('expired', None), ('failed', True), ('succeeded', True), + ('x', True), ('y', True)] + + >>> sorted(get_optional_outputs( + ... '(succeeded and x and y) or expired', + ... {'succeeded', 'x', 'y', 'failed', 'expired'} + ... ).items()) + [('expired', True), ('failed', None), ('succeeded', False), + ('x', False), ('y', False)] - def set_completion(self, message, is_completed): - """Set output message completion status to is_completed (bool).""" - if message in self._by_message: - self._by_message[message][_IS_COMPLETED] = is_completed + """ + # determine which triggers are used in the expression + used_compvars = get_variable_names(expression) + + # all completion vairables which could appear in the expression + all_compvars = {trigger_to_completion_variable(out) for out in outputs} + + return { # output: is_optional + # the outputs that are used in the expression + **{ + output: CompletionEvaluator( + expression, + **{ + **{out: out != output for out in all_compvars}, + # don't consider pre-execution conditions as optional + # (pre-conditions are considered separately) + 'expired': False, + 'submit_failed': False, + }, + ) + for output in used_compvars + }, + # the outputs that are not used in the expression + **{ + output: None + for output in all_compvars - used_compvars + }, + } - def set_msg_trg_completion(self, message=None, trigger=None, - is_completed=True): - """Set the output identified by message/trigger to is_completed. - Return: - - Value of trigger (True) if completion flag is changed, - - False if completion is unchanged, or - - None if message/trigger is not found. +class TaskOutputs: + """Represents a collection of outputs for a task. - """ - try: - item = self._get_item(message, trigger) - old_is_completed = item[_IS_COMPLETED] - item[_IS_COMPLETED] = is_completed - except KeyError: - return None - else: - if bool(old_is_completed) == bool(is_completed): - return False - else: - return item[_TRIGGER] - - def is_incomplete(self): - """Return True if any required outputs are not complete.""" - return any( - not completed - and trigger in self._required - for trigger, (_, _, completed) in self._by_trigger.items() - ) + Task outputs have a trigger and a message: + * The trigger is used in the graph and with "cylc set". + * Messages map onto triggers and are used with "cylc message", they can + provide additional context to an output which will appear in the workflow + log. - def get_incomplete(self): - """Return a list of required outputs that are not complete. + [scheduling] + [[graph]] + R1 = t1:trigger1 => t2 + [runtime] + [[t1]] + [[[outputs]]] + trigger1 = message 1 - A task is incomplete if: + Args: + tdef: + The task definition for the task these outputs represent. - * it finished executing without completing all required outputs - * or if job submission failed and the :submit output was not optional + For use outside of the scheduler, this argument can be completion + expression string. - https://github.com/cylc/cylc-admin/blob/master/docs/proposal-new-output-syntax.md#output-syntax + """ + __slots__ = ( + "_message_to_trigger", + "_message_to_compvar", + "_completed", + "_completion_expression", + ) + + _message_to_trigger: Dict[str, str] # message: trigger + _message_to_compvar: Dict[str, str] # message: completion variable + _completed: Dict[str, bool] # message: is_complete + _completion_expression: str + + def __init__(self, tdef: 'Union[TaskDef, str]'): + self._message_to_trigger = {} + self._message_to_compvar = {} + self._completed = {} + + if isinstance(tdef, str): + # abnormal use e.g. from the "cylc show" command + self._completion_expression = tdef + else: + # normal use e.g. from within the scheduler + self._completion_expression = get_completion_expression(tdef) + for trigger, (message, _required) in tdef.outputs.items(): + self.add(trigger, message) + + def add(self, trigger: str, message: str) -> None: + """Register a new output. + + Note, normally outputs are listed automatically from the provided + TaskDef so there is no need to call this interface. It exists for cases + where TaskOutputs are used outside of the scheduler where there is no + TaskDef object handy so outputs must be listed manually. """ - return [ + self._message_to_trigger[message] = trigger + self._message_to_compvar[message] = trigger_to_completion_variable( trigger - for trigger, (_, _, is_completed) in self._by_trigger.items() - if not is_completed and trigger in self._required - ] + ) + self._completed[message] = False - def get_item(self, message): - """Return output item by message. + def get_trigger(self, message: str) -> str: + """Return the trigger associated with this message.""" + return self._message_to_trigger[message] - Args: - message (str): Output message. + def set_message_complete(self, message): + """Set the provided task message as complete.""" + if ( + # Note, this interface sometimes gets called erroneously, so we + # have to check that the message being set actually applies to + # these outputs before setting it + message in self._completed + and self._completed[message] is False + ): + self._completed[message] = True + return True + return False + + def is_message_complete(self, message: str) -> Optional[bool]: + """Return True if this messages is complete. Returns: - item (tuple): - label (str), message (str), satisfied (bool) + * True if the message is complete. + * False if the message is not complete. + * None if the message does not apply to these outputs. + """ + if message in self._completed: + return self._completed[message] + return None + + def iter_completed_messages(self) -> Iterator[str]: + """A generator that yields completed messages. + + Yields: + message: A completed task message. """ - if message in self._by_message: - return self._by_message[message] + for message, is_completed in self._completed.items(): + if is_completed: + yield message + + def __iter__(self) -> Iterator[Tuple[str, str, bool]]: + """A generator that yields all outputs. + + Yields: + (trigger, message, is_complete) - def _get_item(self, message, trigger): - """Return self._by_trigger[trigger] or self._by_message[message]. + trigger: + The output trigger. + message: + The output message. + is_complete: + True if the output is complete, else False. - whichever is relevant. """ - if message is None: - return self._by_trigger[trigger] - else: - return self._by_message[message] + for message, is_complete in self._completed.items(): + yield self._message_to_trigger[message], message, is_complete + + def is_complete(self) -> bool: + """Return True if the outputs are complete.""" + return CompletionEvaluator( + self._completion_expression, + **{ + self._message_to_compvar[message]: completed + for message, completed in self._completed.items() + }, + ) - def get_incomplete_implied(self, output: str) -> List[str]: - """Return an ordered list of incomplete implied outputs. + def get_incomplete_implied(self, message: str) -> List[str]: + """Return an ordered list of incomplete implied messages. Use to determined implied outputs to complete automatically. @@ -276,41 +378,141 @@ def get_incomplete_implied(self, output: str) -> List[str]: """ implied: List[str] = [] - if output in [TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_FAILED]: + if message in [TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_FAILED]: # Finished, so it must have submitted and started. implied = [TASK_OUTPUT_SUBMITTED, TASK_OUTPUT_STARTED] - - elif output == TASK_OUTPUT_STARTED: + elif message == TASK_OUTPUT_STARTED: # It must have submitted. implied = [TASK_OUTPUT_SUBMITTED] - return [out for out in implied if not self.is_completed(out)] + return [ + message + for message in implied + if not self.is_message_complete(message) + ] + + def format_completion_status( + self, + indent: int = 2, + gutter: int = 2, + ansimarkup: int = 0, + ) -> str: + """Return a text representation of the status of these outputs. + + Returns a multiline string representing the status of each output used + in the expression within the context of the expression itself. + + Args: + indent: + Number of spaces to indent each level of the expression. + gutter: + Number of spaces to pad the left column from the expression. + ansimarkup: + Turns on colour coding using ansimarkup tags. These will need + to be parsed before display. There are three options + + 0: + No colour coding. + 1: + Only success colours will be used. This is easier to read + in colour coded logs. + 2: + Both success and fail colours will be used. + + Returns: + A multiline textural representation of the completion status. + + """ + indent_space: str = ' ' * indent + _gutter: str = ' ' * gutter + + def color_wrap(string, is_complete): + nonlocal ansimarkup + if ansimarkup == 0: + return string + if is_complete: + return f'{string}' + if ansimarkup == 2: + return f'{string}' + return string + + ret: List[str] = [] + indent_level: int = 0 + op: Optional[str] = None + for part in RE_EXPR_SPLIT.split(self._completion_expression): + if not part.strip(): + continue + + if part in {'and', 'or'}: + op = part + continue + + elif part == '(': + if op: + ret.append( + f' |{_gutter}{op}' + f' {(indent_space * indent_level)}{part}' + ) + else: + ret.append( + f' |{_gutter}{(indent_space * indent_level)}{part}' + ) + indent_level += 1 + elif part == ')': + indent_level -= 1 + ret.append( + f' |{_gutter}{(indent_space * indent_level)}{part}' + ) + + else: + _symbol = BOOL_SYMBOLS[ + self._is_compvar_complete(part) # type: ignore + ] + is_complete = self._is_compvar_complete(part) + _pre = ( + f'{color_wrap(_symbol, is_complete)} |' + f'{_gutter}{(indent_space * indent_level)}' + ) + if op: + ret.append(f'{_pre}{op} {color_wrap(part, is_complete)}') + else: + ret.append(f'{_pre}{color_wrap(part, is_complete)}') + + op = None + + return '\n'.join(ret) @staticmethod - def is_valid_std_name(name): + def is_valid_std_name(name: str) -> bool: """Check name is a valid standard output name.""" return name in SORT_ORDERS @staticmethod - def msg_sort_key(item): - """Compare by _MESSAGE.""" - try: - ind = SORT_ORDERS.index(item[_MESSAGE]) - except ValueError: - ind = 999 - return (ind, item[_MESSAGE] or '') - - @staticmethod - def output_sort_key(item): + def output_sort_key(item: Iterable[str]) -> float: """Compare by output order. Examples: - >>> this = TaskOutputs.output_sort_key >>> sorted(['finished', 'started', 'custom'], key=this) ['started', 'custom', 'finished'] + """ if item in TASK_OUTPUTS: return TASK_OUTPUTS.index(item) # Sort custom outputs after started. return TASK_OUTPUTS.index(TASK_OUTPUT_STARTED) + .5 + + def _is_compvar_complete(self, compvar: str) -> Optional[bool]: + """Return True if the completion variable is complete. + + Returns: + * True if var is optional. + * False if var is required. + * None if var is not referenced. + + """ + for message, _compvar in self._message_to_compvar.items(): + if _compvar == compvar: + return self.is_message_complete(message) + else: + raise KeyError(compvar) diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index 2b214d70943..cb245d800b3 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -19,6 +19,7 @@ from contextlib import suppress from collections import Counter import json +from textwrap import indent from typing import ( Dict, Iterable, @@ -526,7 +527,7 @@ def load_db_task_pool_for_restart(self, row_idx, row): TASK_STATUS_SUCCEEDED ): for message in json.loads(outputs_str): - itask.state.outputs.set_completion(message, True) + itask.state.outputs.set_message_complete(message) self.data_store_mgr.delta_task_output(itask, message) if platform_name and status != TASK_STATUS_WAITING: @@ -1146,15 +1147,22 @@ def log_incomplete_tasks(self) -> bool: for itask in self.get_tasks(): if not itask.state(*TASK_STATUSES_FINAL): continue - outputs = itask.state.outputs.get_incomplete() - if outputs: - incomplete.append((itask.identity, outputs)) + if not itask.state.outputs.is_complete(): + incomplete.append( + ( + itask.identity, + itask.state.outputs.format_completion_status( + ansimarkup=1 + ), + ) + ) if incomplete: LOG.error( "Incomplete tasks:\n" + "\n".join( - f" * {id_} did not complete required outputs: {outputs}" + f"* {id_} did not complete the required outputs:" + f"\n{indent(outputs, ' ')}" for id_, outputs in incomplete ) ) @@ -1441,22 +1449,17 @@ def remove_if_complete( self.release_runahead_tasks() return ret - if itask.state(TASK_STATUS_EXPIRED): - self.remove(itask, "expired") - if self.compute_runahead(): - self.release_runahead_tasks() - return True - - incomplete = itask.state.outputs.get_incomplete() - if incomplete: + if not itask.state.outputs.is_complete(): # Keep incomplete tasks in the pool. if output in TASK_STATUSES_FINAL: # Log based on the output, not the state, to avoid warnings # due to use of "cylc set" to set internal outputs on an # already-finished task. LOG.warning( - f"[{itask}] did not complete required outputs:" - f" {incomplete}" + f"[{itask}] did not complete the required outputs:\n" + + itask.state.outputs.format_completion_status( + ansimarkup=1 + ) ) return False @@ -1482,14 +1485,12 @@ def spawn_on_all_outputs( """ if not itask.flow_nums: return - if completed_only: - outputs = itask.state.outputs.get_completed() - else: - outputs = itask.state.outputs._by_message - for output in outputs: + for _trigger, message, is_completed in itask.state.outputs: + if completed_only and not is_completed: + continue try: - children = itask.graph_children[output] + children = itask.graph_children[message] except KeyError: continue @@ -1509,7 +1510,7 @@ def spawn_on_all_outputs( continue if completed_only: c_task.satisfy_me( - [itask.tokens.duplicate(task_sel=output)] + [itask.tokens.duplicate(task_sel=message)] ) self.data_store_mgr.delta_task_prerequisite(c_task) self.add_to_pool(c_task) @@ -1595,7 +1596,7 @@ def _load_historical_outputs(self, itask): for outputs_str, fnums in info.items(): if itask.flow_nums.intersection(fnums): for msg in json.loads(outputs_str): - itask.state.outputs.set_completed_by_msg(msg) + itask.state.outputs.set_message_complete(msg) def spawn_task( self, @@ -1744,7 +1745,7 @@ def _get_task_proxy_db_outputs( for outputs_str, fnums in info.items(): if flow_nums.intersection(fnums): for msg in json.loads(outputs_str): - itask.state.outputs.set_completed_by_msg(msg) + itask.state.outputs.set_message_complete(msg) return itask def _standardise_prereqs( @@ -1887,7 +1888,6 @@ def _set_outputs_itask( outputs: List[str], ) -> None: """Set requested outputs on a task proxy and spawn children.""" - if not outputs: outputs = itask.tdef.get_required_output_messages() else: @@ -1896,7 +1896,7 @@ def _set_outputs_itask( outputs = sorted(outputs, key=itask.state.outputs.output_sort_key) for output in outputs: - if itask.state.outputs.is_completed(output): + if itask.state.outputs.is_message_complete(output): LOG.info(f"output {itask.identity}:{output} completed already") continue self.task_events_mgr.process_message( @@ -2410,7 +2410,7 @@ def merge_flows(self, itask: TaskProxy, flow_nums: 'FlowNums') -> None: if ( itask.state(*TASK_STATUSES_FINAL) - and itask.state.outputs.get_incomplete() + and not itask.state.outputs.is_complete() ): # Re-queue incomplete task to run again in the merged flow. LOG.info(f"[{itask}] incomplete task absorbed by new flow.") diff --git a/cylc/flow/task_proxy.py b/cylc/flow/task_proxy.py index 898017c8da1..dbb6efc6db9 100644 --- a/cylc/flow/task_proxy.py +++ b/cylc/flow/task_proxy.py @@ -38,19 +38,11 @@ from cylc.flow.flow_mgr import stringify_flow_nums from cylc.flow.platforms import get_platform from cylc.flow.task_action_timer import TimerFlags -from cylc.flow.task_outputs import ( - TASK_OUTPUT_FAILED, - TASK_OUTPUT_EXPIRED, - TASK_OUTPUT_SUCCEEDED, - TASK_OUTPUT_SUBMIT_FAILED -) from cylc.flow.task_state import ( TaskState, TASK_STATUS_WAITING, TASK_STATUS_EXPIRED, - TASK_STATUS_SUCCEEDED, - TASK_STATUS_SUBMIT_FAILED, - TASK_STATUS_FAILED + TASK_STATUSES_FINAL, ) from cylc.flow.taskdef import generate_graph_children from cylc.flow.wallclock import get_unix_time_from_time_string as str2time @@ -578,32 +570,8 @@ def clock_expire(self) -> bool: def is_finished(self) -> bool: """Return True if a final state achieved.""" - return ( - self.state( - TASK_STATUS_EXPIRED, - TASK_STATUS_SUBMIT_FAILED, - TASK_STATUS_FAILED, - TASK_STATUS_SUCCEEDED - ) - ) + return self.state(*TASK_STATUSES_FINAL) def is_complete(self) -> bool: """Return True if complete or expired.""" - return ( - self.state(TASK_STATUS_EXPIRED) - or not self.state.outputs.is_incomplete() - ) - - def set_state_by_outputs(self) -> None: - """Set state according to which final output is completed.""" - for output in ( - TASK_OUTPUT_EXPIRED, TASK_OUTPUT_SUBMIT_FAILED, - TASK_OUTPUT_FAILED, TASK_OUTPUT_SUCCEEDED - ): - if self.state.outputs.is_completed(output, output): - # This assumes status and output strings are the same: - self.state_reset( - status=output, - silent=True, is_queued=False, is_runahead=False - ) - break + return self.state.outputs.is_complete() diff --git a/cylc/flow/taskdef.py b/cylc/flow/taskdef.py index 1da5101306b..22315b0e78e 100644 --- a/cylc/flow/taskdef.py +++ b/cylc/flow/taskdef.py @@ -23,9 +23,7 @@ from cylc.flow.exceptions import TaskDefError from cylc.flow.task_id import TaskID from cylc.flow.task_outputs import ( - TASK_OUTPUT_EXPIRED, TASK_OUTPUT_SUBMITTED, - TASK_OUTPUT_SUBMIT_FAILED, TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_FAILED, SORT_ORDERS @@ -75,7 +73,7 @@ def generate_graph_children(tdef, point): def generate_graph_parents(tdef, point, taskdefs): - """Determine concrent graph parents of task tdef at point. + """Determine concrete graph parents of task tdef at point. Infer parents be reversing upstream triggers that lead to point/task. """ @@ -204,21 +202,13 @@ def get_required_output_messages(self): def tweak_outputs(self): """Output consistency checking and tweaking.""" - # If :succeed or :fail not set, assume success is required. - # Unless submit (and submit-fail) is optional (don't stall - # because of missing succeed if submit is optional). if ( self.outputs[TASK_OUTPUT_SUCCEEDED][1] is None and self.outputs[TASK_OUTPUT_FAILED][1] is None - and self.outputs[TASK_OUTPUT_SUBMITTED][1] is not False - and self.outputs[TASK_OUTPUT_SUBMIT_FAILED][1] is not False ): self.set_required_output(TASK_OUTPUT_SUCCEEDED, True) - # Expired must be optional - self.set_required_output(TASK_OUTPUT_EXPIRED, False) - # In Cylc 7 back compat mode, make all success outputs required. if cylc.flow.flags.cylc7_back_compat: for output in [ diff --git a/cylc/flow/util.py b/cylc/flow/util.py index b167cb33a84..ac10bab9b54 100644 --- a/cylc/flow/util.py +++ b/cylc/flow/util.py @@ -25,9 +25,12 @@ Callable, List, Sequence, + Tuple, ) +BOOL_SYMBOLS: List[str] = ['x', '✓'] + _NAT_SORT_SPLIT = re.compile(r'([\d\.]+)') @@ -336,3 +339,35 @@ def _get_exception( } return error_class(message, **context) + + +class NameWalker(ast.NodeVisitor): + """AST node visitor which records all variable names in an expression. + + Examples: + >>> tree = ast.parse('(foo and bar) or baz or qux') + >>> walker = NameWalker() + >>> walker.visit(tree) + >>> sorted(walker.names) + ['bar', 'baz', 'foo', 'qux'] + + """ + + def __init__(self): + super().__init__() + self._names = set() + + def visit(self, node): + if isinstance(node, ast.Name): + self._names.add(node.id) + return super().visit(node) + + @property + def names(self): + return self._names + + +def get_variable_names(expression): + walker = NameWalker() + walker.visit(ast.parse(expression)) + return walker.names diff --git a/cylc/flow/workflow_db_mgr.py b/cylc/flow/workflow_db_mgr.py index 128cfd45126..0a92e7312bf 100644 --- a/cylc/flow/workflow_db_mgr.py +++ b/cylc/flow/workflow_db_mgr.py @@ -630,11 +630,10 @@ def put_update_task_jobs(self, itask, set_args): def put_update_task_outputs(self, itask): """Put UPDATE statement for task_outputs table.""" - outputs = [] - for _, message in itask.state.outputs.get_completed_all(): - outputs.append(message) set_args = { - "outputs": json.dumps(outputs) + "outputs": json.dumps( + list(itask.state.outputs.iter_completed_messages()) + ) } where_args = { "cycle": str(itask.point), diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index d575f2a6af2..edfe56e2a1f 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -360,6 +360,28 @@ def _validate(id_: Union[str, Path], **kwargs) -> WorkflowConfig: return _validate +@pytest.fixture(scope='module') +def mod_validate(run_dir): + """Provides a function for validating workflow configurations. + + Attempts to load the configuration, will raise exceptions if there are + errors. + + Args: + id_ - The flow to validate + kwargs - Arguments to pass to ValidateOptions + """ + def _validate(id_: Union[str, Path], **kwargs) -> WorkflowConfig: + id_ = str(id_) + return WorkflowConfig( + id_, + str(Path(run_dir, id_, 'flow.cylc')), + ValidateOptions(**kwargs) + ) + + return _validate + + @pytest.fixture def capture_submission(): """Suppress job submission and capture submitted tasks. diff --git a/tests/integration/test_optional_outputs.py b/tests/integration/test_optional_outputs.py new file mode 100644 index 00000000000..d897fc4ce6d --- /dev/null +++ b/tests/integration/test_optional_outputs.py @@ -0,0 +1,380 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Tests optional output and task completion logic. + +This functionality is defined by the "optional-output-extension" proposal: + +https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal +""" + +from itertools import combinations +from typing import TYPE_CHECKING + +import pytest + +from cylc.flow.cycling.integer import IntegerPoint +from cylc.flow.cycling.iso8601 import ISO8601Point +from cylc.flow.network.resolvers import TaskMsg +from cylc.flow.scheduler import Scheduler +from cylc.flow.task_events_mgr import ( + TaskEventsManager, +) +from cylc.flow.task_outputs import ( + TASK_OUTPUTS, + TASK_OUTPUT_EXPIRED, + TASK_OUTPUT_FINISHED, + TASK_OUTPUT_SUCCEEDED, + get_completion_expression, +) +from cylc.flow.task_state import ( + TASK_STATUS_EXPIRED, + TASK_STATUS_PREPARING, + TASK_STATUS_WAITING, +) + +if TYPE_CHECKING: + from cylc.flow.task_proxy import TaskProxy + + +def reset_outputs(itask: 'TaskProxy'): + """Undo the consequences of setting task outputs. + + This assumes you haven't completed the task. + """ + itask.state.outputs._completed = { + message: False + for message in itask.state.outputs._completed + } + itask.state_reset( + TASK_STATUS_WAITING, + is_queued=False, + is_held=False, + is_runahead=False, + ) + + +@pytest.mark.parametrize( + 'graph, completion_outputs', + [ + pytest.param( + 'a:x', + [{TASK_OUTPUT_SUCCEEDED, 'x'}], + id='1', + ), + pytest.param( + 'a\na:x\na:expired?', + [{TASK_OUTPUT_SUCCEEDED, 'x'}, {TASK_OUTPUT_EXPIRED}], + id='2', + ), + ], +) +async def test_task_completion( + flow, + scheduler, + start, + graph, + completion_outputs, + capcall, +): + """Ensure that task completion is watertight. + + Run through every possible permutation of outputs MINUS the ones that would + actually complete a task to ensure that task completion is correctly + handled. + + Note, the building and evaluation of completion expressions is also tested, + this is more of an end-to-end test to ensure everything is connected + properly. + """ + # prevent tasks from being removed from the pool when complete + capcall( + 'cylc.flow.task_pool.TaskPool.remove_if_complete' + ) + id_ = flow({ + 'scheduling': { + 'graph': {'R1': graph}, + }, + 'runtime': { + 'a': { + 'outputs': { + 'x': 'xxx', + }, + }, + }, + }) + schd = scheduler(id_) + all_outputs = { + # all built-in outputs + *TASK_OUTPUTS, + # all registered custom outputs + 'x' + # but not the finished psudo output + } - {TASK_OUTPUT_FINISHED} + + async with start(schd): + a1 = schd.pool.get_task(IntegerPoint('1'), 'a') + + # try every set of outputs that *shouldn't* complete the task + for combination in { + comb + # every possible combination of outputs + for _length in range(1, len(all_outputs)) + for comb in combinations(all_outputs, _length) + # that doesn't contain the outputs that would satisfy the task + if not any( + set(comb) & output_set == output_set + for output_set in completion_outputs + ) + }: + # set the combination of outputs + schd.pool.set_prereqs_and_outputs( + ['1/a'], + combination, + [], + ['1'], + ) + + # ensure these outputs do *not* complete the task + assert not a1.state.outputs.is_complete() + + # reset any changes + reset_outputs(a1) + + # now try the outputs that *should* satisfy the task + for combination in completion_outputs: + # set the combination of outputs + schd.pool.set_prereqs_and_outputs( + ['1/a'], + combination, + [], + ['1'], + ) + + # ensure the task *is* completed + assert a1.state.outputs.is_complete() + + # reset any changes + reset_outputs(a1) + + +async def test_expire_orthogonality(flow, scheduler, start): + """Ensure "expired?" does not infer "succeeded?". + + Asserts proposal point 2: + https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + """ + id_ = flow({ + 'scheduling': { + 'graph': { + 'R1': 'a:expire? => e' + }, + }, + }) + schd: 'Scheduler' = scheduler(id_, paused_start=False) + async with start(schd): + a_1 = schd.pool.get_task(IntegerPoint('1'), 'a') + + # wait for the task to submit + while not a_1.state(TASK_STATUS_WAITING, TASK_STATUS_PREPARING): + schd.release_queued_tasks() + + # NOTE: The submit number isn't presently incremented via this code + # pathway so we have to hack it here. If the task messages in this test + # get ignored because of some future change, then you can safely remove + # this line (it's not what this test is testing). + a_1.submit_num += 1 + + # tell the scheduler that the task *submit-failed* + schd.message_queue.put( + TaskMsg( + '1/a/01', + '2000-01-01T00:00:00+00', + 'INFO', + TaskEventsManager.EVENT_SUBMIT_FAILED + ), + ) + schd.process_queued_task_messages() + # ensure that the scheduler is stalled + assert not a_1.state.outputs.is_complete() + assert schd.pool.is_stalled() + + # tell the scheduler that the task *failed* + schd.message_queue.put( + TaskMsg( + '1/a/01', + '2000-01-01T00:00:00+00', + 'INFO', + TaskEventsManager.EVENT_FAILED, + ), + ) + schd.process_queued_task_messages() + # ensure that the scheduler is stalled + assert not a_1.state.outputs.is_complete() + assert schd.pool.is_stalled() + + # tell the scheduler that the task *expired* + schd.message_queue.put( + TaskMsg( + '1/a/01', + '2000-01-01T00:00:00+00', + 'INFO', + TaskEventsManager.EVENT_EXPIRED, + ), + ) + schd.process_queued_task_messages() + # ensure that the scheduler is *not* stalled + assert a_1.state.outputs.is_complete() + assert not schd.pool.is_stalled() + + +@pytest.fixture(scope='module') +def implicit_completion_config(mod_flow, mod_validate): + id_ = mod_flow({ + 'scheduling': { + 'graph': { + 'R1': ''' + a + + b? + + c:x + + d:x? + d:y? + d:z? + + e:x + e:y + e:z + + f? + f:x + + g:expired? + + h:succeeded? + h:expired? + + i:expired? + i:submitted + + j:expired? + j:submitted? + + k:submit-failed? + k:succeeded? + + l:expired? + l:submit-failed? + l:succeeded? + ''' + } + }, + 'runtime': { + 'root': { + 'outputs': { + 'x': 'xxx', + 'y': 'yyy', + 'z': 'zzz', + } + } + } + }) + return mod_validate(id_) + + +@pytest.mark.parametrize( + 'task, condition', + [ + pytest.param('a', 'succeeded', id='a'), + pytest.param('b', 'succeeded or failed', id='b'), + pytest.param('c', '(succeeded and x)', id='c'), + pytest.param('d', 'succeeded', id='d'), + pytest.param('e', '(succeeded and x and y and z)', id='e'), + pytest.param('f', '(x and succeeded) or failed', id='f'), + pytest.param('g', 'succeeded or expired', id='h'), + pytest.param('h', 'succeeded or failed or expired', id='h'), + pytest.param('i', '(submitted and succeeded) or expired', id='i'), + pytest.param('j', 'succeeded or submit_failed or expired', id='j'), + pytest.param('k', 'succeeded or failed or submit_failed', id='k'), + pytest.param( + 'l', 'succeeded or failed or submit_failed or expired', id='l' + ), + ], +) +async def test_implicit_completion_expression( + implicit_completion_config, + task, + condition, +): + """It should generate a completion expression from the graph. + + If no completion expression is provided in the runtime section, then it + should auto generate one inferring whether outputs are required or not from + the graph. + """ + completion_expression = get_completion_expression( + implicit_completion_config.taskdefs[task] + ) + assert completion_expression == condition + + +async def test_clock_expire_partially_satisfied_task( + flow, + scheduler, + start, +): + """Clock expire should take effect on a partially satisfied task. + + Tests proposal point 8: + https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + """ + id_ = flow({ + 'scheduling': { + 'initial cycle point': '2000', + 'runahead limit': 'P0', + 'special tasks': { + 'clock-expire': 'e', + }, + 'graph': { + 'P1D': ''' + # this prerequisite we will satisfy + a => e + + # this prerequisite we will leave unsatisfied creating a + # partially-satisfied task + b => e + ''' + }, + }, + }) + schd = scheduler(id_) + async with start(schd): + # satisfy one of the prerequisites + a = schd.pool.get_task(ISO8601Point('20000101T0000Z'), 'a') + assert a + schd.pool.spawn_on_output(a, TASK_OUTPUT_SUCCEEDED) + + # the task "e" should now be spawned + e = schd.pool.get_task(ISO8601Point('20000101T0000Z'), 'e') + assert e + + # check for clock-expired tasks + schd.pool.clock_expire_tasks() + + # the task should now be in the expired state + assert e.state(TASK_STATUS_EXPIRED) diff --git a/tests/integration/test_simulation.py b/tests/integration/test_simulation.py index 72cf23996a4..66842fade25 100644 --- a/tests/integration/test_simulation.py +++ b/tests/integration/test_simulation.py @@ -184,11 +184,10 @@ def test_task_finishes(sim_time_check_setup, monkeytime, caplog): # After simulation time is up it Fails and records custom outputs: assert sim_time_check(schd.task_events_mgr, [fail_all_1066], '') is True - outputs = { - o[0]: (o[1], o[2]) for o in fail_all_1066.state.outputs.get_all()} - assert outputs['succeeded'] == ('succeeded', False) - assert outputs['foo'] == ('bar', True) - assert outputs['failed'] == ('failed', True) + outputs = fail_all_1066.state.outputs + assert outputs.is_message_complete('succeeded') is False + assert outputs.is_message_complete('bar') is True + assert outputs.is_message_complete('failed') is True def test_task_sped_up(sim_time_check_setup, monkeytime): @@ -334,7 +333,7 @@ async def test_settings_reload( one_1066 = schd.pool.get_tasks()[0] itask = run_simjob(schd, one_1066.point, 'one') - assert ['failed', 'failed', False] in itask.state.outputs.get_all() + assert itask.state.outputs.is_message_complete('failed') is False # Modify config as if reinstall had taken place: conf_file = Path(schd.workflow_run_dir) / 'flow.cylc' @@ -346,8 +345,7 @@ async def test_settings_reload( # Submit second psuedo-job and "run" to success: itask = run_simjob(schd, one_1066.point, 'one') - assert [ - 'succeeded', 'succeeded', True] in itask.state.outputs.get_all() + assert itask.state.outputs.is_message_complete('succeeded') is True async def test_settings_broadcast( diff --git a/tests/integration/test_task_pool.py b/tests/integration/test_task_pool.py index 3d75074cf15..7623ba0a797 100644 --- a/tests/integration/test_task_pool.py +++ b/tests/integration/test_task_pool.py @@ -1178,9 +1178,10 @@ async def test_detect_incomplete_tasks( start, log_filter, ): - """Finished but incomplete tasks should be retains as incomplete.""" - - final_task_states = { + """Finished but incomplete tasks should be retained as incomplete.""" + incomplete_final_task_states = { + # final task states that would leave a task with + # completion=succeeded incomplete TASK_STATUS_FAILED: TaskEventsManager.EVENT_FAILED, TASK_STATUS_EXPIRED: TaskEventsManager.EVENT_EXPIRED, TASK_STATUS_SUBMIT_FAILED: TaskEventsManager.EVENT_SUBMIT_FAILED @@ -1192,7 +1193,7 @@ async def test_detect_incomplete_tasks( 'scheduling': { 'graph': { # a workflow with one task for each of the final task states - 'R1': '\n'.join(final_task_states.keys()) + 'R1': '\n'.join(incomplete_final_task_states.keys()) } } }) @@ -1204,28 +1205,18 @@ async def test_detect_incomplete_tasks( # spawn the output corresponding to the task schd.pool.task_events_mgr.process_message( itask, 1, - final_task_states[itask.tdef.name] + incomplete_final_task_states[itask.tdef.name] ) # ensure that it is correctly identified as incomplete - assert itask.state.outputs.get_incomplete() - assert itask.state.outputs.is_incomplete() - if itask.tdef.name == TASK_STATUS_EXPIRED: - assert log_filter( - log, - contains=f"[{itask}] removed (expired)" - ) - # the task should have been removed - assert itask not in schd.pool.get_tasks() - else: - assert log_filter( - log, - contains=( - f"[{itask}] did not complete " - "required outputs:" - ) - ) - # the task should not have been removed - assert itask in schd.pool.get_tasks() + assert not itask.state.outputs.is_complete() + assert log_filter( + log, + contains=( + f"[{itask}] did not complete the required outputs:" + ), + ) + # the task should not have been removed + assert itask in schd.pool.get_tasks() async def test_future_trigger_final_point( diff --git a/tests/integration/tui/screenshots/test_show.success.html b/tests/integration/tui/screenshots/test_show.success.html index 5f9c192b04b..8982956d523 100644 --- a/tests/integration/tui/screenshots/test_show.success.html +++ b/tests/integration/tui/screenshots/test_show.success.html @@ -8,7 +8,6 @@ - ──────────────────────────────────────────────── title: Foo description: The first metasyntactic @@ -16,13 +15,15 @@ URL: (not given) state: waiting prerequisites: (None) - outputs: ('-': not completed) - - 1/foo expired - - 1/foo submitted - - 1/foo submit-failed - - 1/foo started - - 1/foo succeeded - - 1/foo failed + outputs: ('x': not completed) + x 1/foo expired + x 1/foo submitted + x 1/foo submit-failed + x 1/foo started + x 1/foo succeeded + x 1/foo failed + completion: incomplete + x | succeeded q to close @@ -35,7 +36,6 @@ - quit: q help: h context: enter tree: - ← + → navigation: ↑ ↓ ↥ ↧ Home End filter tasks: T f s r R filter workflows: W E p \ No newline at end of file diff --git a/tests/integration/validate/test_outputs.py b/tests/integration/validate/test_outputs.py index 5675372a09f..8bfd9b546ee 100644 --- a/tests/integration/validate/test_outputs.py +++ b/tests/integration/validate/test_outputs.py @@ -32,7 +32,6 @@ 'foo', 'foo-bar', 'foo_bar', - 'foo.bar', '0foo0', '123', ], @@ -152,7 +151,7 @@ def test_messages(messages, valid, flow, validate): 'runtime': { 'foo': { 'outputs': { - str(random()): message + str(random())[2:]: message for message in messages } } @@ -164,3 +163,111 @@ def test_messages(messages, valid, flow, validate): else: with pytest.raises(WorkflowConfigError): val() + + +@pytest.mark.parametrize( + 'graph, expression, message', [ + pytest.param( + 'foo:x', + 'succeeded and (x or y)', + r'foo:x is required in the graph.*' + r' but optional in the completion expression', + id='required-in-graph-optional-in-completion', + ), + pytest.param( + 'foo:x?', + 'succeeded and x', + r'foo:x is optional in the graph.*' + r' but required in the completion expression', + id='optional-in-graph-required-in-completion', + ), + pytest.param( + 'foo:x', + 'succeeded', + 'foo:x is required in the graph.*' + 'but not referenced in the completion expression', + id='required-in-graph-not-referenced-in-completion', + ), + pytest.param( + # tests proposal point 4: + # https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + 'foo:expired', + 'succeeded', + 'foo:expired must be optional', + id='expire-required-in-graph', + ), + pytest.param( + 'foo:expired?', + 'succeeded', + 'foo:expired is permitted in the graph.*' + '\nTry: completion = "succeeded or expired"', + id='expire-optional-in-graph-but-not-used-in-completion' + ), + pytest.param( + # tests part of proposal point 5: + # https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + 'foo', + 'finished and x', + '"finished" output cannot be used in completion expressions', + id='finished-output-used-in-completion-expression', + ), + ] +) +def test_completion_expression_invalid( + flow, + validate, + graph, + expression, + message, +): + """It should ensure the completion is logically consistent with the graph. + + Tests proposal point 5: + https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + """ + id_ = flow({ + 'scheduling': { + 'graph': {'R1': graph}, + }, + 'runtime': { + 'foo': { + 'completion': expression, + 'outputs': { + 'x': 'xxx', + 'y': 'yyy', + }, + }, + }, + }) + with pytest.raises(WorkflowConfigError, match=message): + validate(id_) + + +@pytest.mark.parametrize( + 'graph, expression', [ + ('foo', 'succeeded and (x or y or z)'), + ('foo?', 'succeeded and (x or y or z) or failed or expired'), + ('foo', '(succeeded and x) or (expired and y)'), + ] +) +def test_completion_expression_valid( + flow, + validate, + graph, + expression, +): + id_ = flow({ + 'scheduling': { + 'graph': {'R1': graph}, + }, + 'runtime': { + 'foo': { + 'completion': expression, + 'outputs': { + 'x': 'xxx', + 'y': 'yyy', + }, + }, + }, + }) + validate(id_) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 98d9fc2f4ce..a69ab176412 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -1317,28 +1317,6 @@ def test_implicit_success_required(tmp_flow_config, graph): assert cfg.taskdefs['foo'].outputs[TASK_OUTPUT_SUCCEEDED][1] -@pytest.mark.parametrize( - 'graph', - [ - "foo:submit? => bar", - "foo:submit-fail? => bar", - ] -) -def test_success_after_optional_submit(tmp_flow_config, graph): - """Check foo:succeed is not required if foo:submit is optional.""" - id_ = 'blargh' - flow_file = tmp_flow_config(id_, f""" - [scheduling] - [[graph]] - R1 = {graph} - [runtime] - [[bar]] - [[foo]] - """) - cfg = WorkflowConfig(workflow=id_, fpath=flow_file, options=None) - assert not cfg.taskdefs['foo'].outputs[TASK_OUTPUT_SUCCEEDED][1] - - @pytest.mark.parametrize( 'allow_implicit_tasks', [ diff --git a/tests/unit/test_graph_parser.py b/tests/unit/test_graph_parser.py index ca19b2060b4..bbd4f85a001 100644 --- a/tests/unit/test_graph_parser.py +++ b/tests/unit/test_graph_parser.py @@ -720,7 +720,6 @@ def test_task_optional_outputs(): ('succeed', TASK_OUTPUT_SUCCEEDED), ('fail', TASK_OUTPUT_FAILED), ('submit', TASK_OUTPUT_SUBMITTED), - ('submit-fail', TASK_OUTPUT_SUBMIT_FAILED), ] ) def test_family_optional_outputs(qual, task_output): @@ -751,6 +750,26 @@ def test_family_optional_outputs(qual, task_output): assert gp.task_output_opt[(member, task_output)][0] == optional +def test_cannot_be_required(): + """Is should not allow :expired or :submit-failed to be required. + + See proposal point 4: + https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + """ + gp = GraphParser({}) + + # outputs can be optional + gp.parse_graph('a:expired? => b') + gp.parse_graph('a:submit-failed? => b') + + # but cannot be required + with pytest.raises(GraphParseError, match='must be optional'): + gp.parse_graph('a:expired => b') + with pytest.raises(GraphParseError, match='must be optional'): + gp.parse_graph('a:submit-failed => b') + + + @pytest.mark.parametrize( 'graph, error', [ diff --git a/tests/unit/test_subprocpool.py b/tests/unit/test_subprocpool.py index c72ffc4d094..7da14e9e73c 100644 --- a/tests/unit/test_subprocpool.py +++ b/tests/unit/test_subprocpool.py @@ -30,6 +30,13 @@ from cylc.flow.task_events_mgr import TaskJobLogsRetrieveContext from cylc.flow.subprocctx import SubProcContext from cylc.flow.subprocpool import SubProcPool, _XTRIG_FUNC_CACHE, _XTRIG_MOD_CACHE, get_xtrig_func +from cylc.flow.task_outputs import ( + TASK_OUTPUT_SUBMITTED, + TASK_OUTPUT_SUBMIT_FAILED, + TASK_OUTPUT_SUCCEEDED, + TASK_OUTPUT_FAILED, + TASK_OUTPUT_EXPIRED, +) from cylc.flow.task_proxy import TaskProxy @@ -316,8 +323,7 @@ def test__run_command_exit_add_to_badhosts(mock_ctx): def test__run_command_exit_add_to_badhosts_log(caplog, mock_ctx): - """It gets platform name from the callback args. - """ + """It gets platform name from the callback args.""" badhosts = {'foo', 'bar'} SubProcPool._run_command_exit( mock_ctx(cmd=['ssh']), @@ -330,7 +336,11 @@ def test__run_command_exit_add_to_badhosts_log(caplog, mock_ctx): external_triggers=[], xtrig_labels={}, expiration_offset=None, outputs={ - 'submitted': [None, None], 'submit-failed': [None, None] + TASK_OUTPUT_SUBMITTED: [None, None], + TASK_OUTPUT_SUBMIT_FAILED: [None, None], + TASK_OUTPUT_SUCCEEDED: [None, None], + TASK_OUTPUT_FAILED: [None, None], + TASK_OUTPUT_EXPIRED: [None, None], }, graph_children={}, rtconfig={'platform': 'foo'} diff --git a/tests/unit/test_task_outputs.py b/tests/unit/test_task_outputs.py index 4f61e696fbc..34bd1fde09f 100644 --- a/tests/unit/test_task_outputs.py +++ b/tests/unit/test_task_outputs.py @@ -13,30 +13,247 @@ # # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import random -import unittest - -from cylc.flow.task_outputs import TaskOutputs - - -class TestMessageSorting(unittest.TestCase): - - TEST_MESSAGES = [ - ['expired', 'expired', False], - ['submitted', 'submitted', False], - ['submit-failed', 'submit-failed', False], - ['started', 'started', False], - ['succeeded', 'succeeded', False], - ['failed', 'failed', False], - [None, None, False], - ['foo', 'bar', False], - ['foot', 'bart', False], - # NOTE: [None, 'bar', False] is unstable under Python2 - ] - - def test_sorting(self): - messages = list(self.TEST_MESSAGES) - for _ in range(5): - random.shuffle(messages) - output = sorted(messages, key=TaskOutputs.msg_sort_key) - self.assertEqual(output, self.TEST_MESSAGES, output) + +from textwrap import dedent +from types import SimpleNamespace + +import pytest + +from cylc.flow.task_outputs import ( + TASK_OUTPUTS, + TASK_OUTPUT_EXPIRED, + TASK_OUTPUT_FAILED, + TASK_OUTPUT_SUBMITTED, + TASK_OUTPUT_SUBMIT_FAILED, + TASK_OUTPUT_SUCCEEDED, + TaskOutputs, + get_completion_expression, +) + + +def tdef(required, optional, completion=None): + """Stuf a task definition. + + Args: + required: Collection or required outputs. + optional: Collection of optional outputs. + completion: User defined execution completion expression. + + """ + return SimpleNamespace( + rtconfig={ + 'completion': completion, + }, + outputs={ + output: ( + output, + ( + # output is required: + True if output in required + # output is optional: + else False if output in optional + # output is ambiguous (i.e. not referenced in graph): + else None + ) + ) + for output in set(TASK_OUTPUTS) | set(required) | set(optional) + }, + ) + + +def test_completion_implicit(): + """It should generate a completion expression when none is provided. + + The outputs should be considered "complete" according to the logic in + proposal point 5: + https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + """ + # one required output - succeeded + outputs = TaskOutputs(tdef([TASK_OUTPUT_SUCCEEDED], [])) + + # the completion expression should only contain the one required output + assert outputs._completion_expression == 'succeeded' + # the outputs should be incomplete - it hasn't run yet + assert outputs.is_complete() is False + + # set the submit-failed output + outputs.set_message_complete(TASK_OUTPUT_SUBMIT_FAILED) + # the outputs should be incomplete - submited-failed is a "final" output + assert outputs.is_complete() is False + + # set the submitted and succeeded outputs + outputs.set_message_complete(TASK_OUTPUT_SUBMITTED) + outputs.set_message_complete(TASK_OUTPUT_SUCCEEDED) + # the outputs should be complete - it has run an succeedd + assert outputs.is_complete() is True + + # set the expired output + outputs.set_message_complete(TASK_OUTPUT_EXPIRED) + # the outputs should still be complete - it has run and succeeded + assert outputs.is_complete() is True + + +def test_completion_explicit(): + """It should use the provided completion expression. + + The outputs should be considered "complete" according to the logic in + proposal point 5: + https://cylc.github.io/cylc-admin/proposal-optional-output-extension.html#proposal + """ + outputs = TaskOutputs(tdef( + # no required outputs + [], + # four optional outputs + [ + TASK_OUTPUT_SUCCEEDED, + TASK_OUTPUT_FAILED, + 'x', + 'y', + ], + # one pair must be satisfied for the outputs to be complete + completion='(succeeded and x) or (failed and y)', + )) + + # the outputs should be incomplete - it hasn't run yet + assert outputs.is_complete() is False + + # set the succeeded and failed outputs + outputs.set_message_complete(TASK_OUTPUT_SUCCEEDED) + outputs.set_message_complete(TASK_OUTPUT_FAILED) + + # the task should be incomplete - it has executed but the completion + # expression is not satisfied + assert outputs.is_complete() is False + + # satisfy the (failed and y) pair + outputs.set_message_complete('y') + assert outputs.is_complete() is True + + # satisfy the (succeeded and x) pair + outputs._completed['y'] = False + outputs.set_message_complete('x') + assert outputs.is_complete() is True + + +@pytest.mark.parametrize( + 'required, optional, expression', [ + pytest.param( + {TASK_OUTPUT_SUCCEEDED}, + [], + 'succeeded', + id='0', + ), + pytest.param( + {TASK_OUTPUT_SUCCEEDED, 'x'}, + [], + '(succeeded and x)', + id='1', + ), + pytest.param( + [], + {TASK_OUTPUT_SUCCEEDED}, + 'succeeded or failed', + id='2', + ), + pytest.param( + {TASK_OUTPUT_SUCCEEDED}, + {TASK_OUTPUT_EXPIRED}, + 'succeeded or expired', + id='3', + ), + pytest.param( + [], + {TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_EXPIRED}, + 'succeeded or failed or expired', + id='4', + ), + pytest.param( + {TASK_OUTPUT_SUCCEEDED}, + {TASK_OUTPUT_EXPIRED, TASK_OUTPUT_SUBMITTED}, + 'succeeded or submit_failed or expired', + id='5', + ), + pytest.param( + {TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_SUBMITTED}, + {TASK_OUTPUT_EXPIRED}, + '(submitted and succeeded) or expired', + id='6', + ), + pytest.param( + [], + {TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_SUBMIT_FAILED}, + 'succeeded or failed or submit_failed', + id='7', + ), + pytest.param( + {'x'}, + { + TASK_OUTPUT_SUCCEEDED, + TASK_OUTPUT_SUBMIT_FAILED, + TASK_OUTPUT_EXPIRED, + }, + '(x and succeeded) or failed or submit_failed or expired', + id='8', + ), + ], +) +def test_get_completion_expression_implicit(required, optional, expression): + """It should generate a completion expression if none is provided.""" + assert get_completion_expression(tdef(required, optional)) == expression + + +def test_get_completion_expression_explicit(): + """If a completion expression is used, it should be used unmodified.""" + assert get_completion_expression(tdef( + {'x', 'y'}, + {TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_FAILED, TASK_OUTPUT_EXPIRED}, + '((failed and x) or (succeeded and y)) or expired' + )) == '((failed and x) or (succeeded and y)) or expired' + + +def sstrip(text): # TODO + """Simple function to dedent and strip text. + + Examples: + >>> print(sstrip(''' + ... foo + ... bar + ... baz + ... ''')) + foo + bar + baz + + """ + return dedent(text).strip('\n') + + +def test_format_completion_status(): + outputs = TaskOutputs( + tdef( + {TASK_OUTPUT_SUCCEEDED, 'x', 'y'}, + {TASK_OUTPUT_EXPIRED}, + ) + ) + assert outputs.format_completion_status(indent=2, gutter=2) == sstrip( + ''' + | ( + x | succeeded + x | and x + x | and y + | ) + x | or expired + ''' + ) + outputs.set_message_complete('succeeded') + outputs.set_message_complete('x') + assert outputs.format_completion_status(indent=2, gutter=2) == sstrip( + ''' + | ( + ✓ | succeeded + ✓ | and x + x | and y + | ) + x | or expired + ''' + ) diff --git a/tests/unit/test_xtrigger_mgr.py b/tests/unit/test_xtrigger_mgr.py index 3cfee363d15..5192a7d3dd8 100644 --- a/tests/unit/test_xtrigger_mgr.py +++ b/tests/unit/test_xtrigger_mgr.py @@ -154,10 +154,10 @@ def test_housekeeping_with_xtrigger_satisfied(xtrigger_mgr): xtrig.out = "[\"True\", {\"name\": \"Yossarian\"}]" tdef = TaskDef( name="foo", - rtcfg=None, + rtcfg={'completion': None}, run_mode="live", start_point=1, - initial_point=1 + initial_point=1, ) init() sequence = ISO8601Sequence('P1D', '2019') @@ -197,7 +197,7 @@ def test__call_xtriggers_async(xtrigger_mgr): # create a task tdef = TaskDef( name="foo", - rtcfg=None, + rtcfg={'completion': None}, run_mode="live", start_point=1, initial_point=1