From cd5e28c3011134bccb04e8b1bc57755aad3bebbe Mon Sep 17 00:00:00 2001 From: WXTIM <26465611+wxtim@users.noreply.github.com> Date: Thu, 25 Apr 2024 13:56:33 +0100 Subject: [PATCH] Implement Skip Mode * Add `[runtime][]run mode` and `[runtime][][skip]`. * Spin run mode functionality into separate modules. * Run sim mode check with every main loop - we don't know if any tasks are in sim mode from the scheduler, but it doesn't cost much to check if none are. * Implemented separate job "submission" pathway switching. * Implemented skip mode, including output control logic. * Add a linter and a validation check for tasks in nonlive modes, and for combinations of outputs * Enabled setting outputs as if task ran in skip mode using `cylc set --out skip`. * Testing for the above. Schema: use `Enum` for task run mode instead of `String` (#61) * Schema: use `Enum` for task run mode instead of `String` * Tidy fixup merge fix broken functional test Improve cylc set --out skip * Improve documentation of feature in cylc set --help * Allow cylc set --out=skip,optional_output * Test the above Remove test: We don't want users opting out of validating [runtime][ns][simulation/skip] because we can now changes these in a running workflow. stop users opting out of validating workflows without validating ski/simulation taskdef sections added tests for db entries in nonlive mode ensure db entries for all four modes are correct. move the change file toi the correct name get localhost of platforms 'simulation' 'skip' or 'dummy' not defined. (They probably shouldn't be, but that's a site specific choice...) --- changes.d/6039.feat.md | 1 + cylc/flow/cfgspec/workflow.py | 63 +- cylc/flow/commands.py | 6 +- cylc/flow/config.py | 10 +- cylc/flow/data_messages.proto | 1 + cylc/flow/data_messages_pb2.py | 100 +- cylc/flow/data_messages_pb2.pyi | 68 +- cylc/flow/data_store_mgr.py | 2 + .../etc/examples/extending-workflow/.validate | 20 +- cylc/flow/network/schema.py | 59 +- cylc/flow/platforms.py | 7 + cylc/flow/prerequisite.py | 25 +- cylc/flow/run_modes/dummy.py | 121 ++ cylc/flow/run_modes/nonlive.py | 61 + cylc/flow/{ => run_modes}/simulation.py | 190 ++- cylc/flow/run_modes/skip.py | 166 ++ cylc/flow/scheduler.py | 29 +- cylc/flow/scheduler_cli.py | 12 +- cylc/flow/scripts/lint.py | 49 +- cylc/flow/scripts/set.py | 9 +- cylc/flow/scripts/validate.py | 6 +- cylc/flow/task_events_mgr.py | 16 +- cylc/flow/task_job_mgr.py | 110 +- cylc/flow/task_pool.py | 23 +- cylc/flow/task_proxy.py | 7 +- cylc/flow/task_state.py | 68 +- cylc/flow/unicode_rules.py | 4 +- .../04-dummy-mode-output/flow.cylc | 2 +- .../events/05-timeout-ref-dummy/flow.cylc | 2 +- .../modes/03-dummy-env/flow.cylc | 2 +- .../cylc-config/00-simple/section2.stdout | 1510 +++++++++-------- .../cylc-kill/03-simulation/flow.cylc | 2 +- tests/functional/cylc-set/09-set-skip.t | 28 + .../functional/cylc-set/09-set-skip/flow.cylc | 50 + .../cylc-set/09-set-skip/reference.log | 8 + .../{modes => run_modes}/01-dummy.t | 0 .../{modes => run_modes}/01-dummy/flow.cylc | 0 .../01-dummy/reference.log | 0 .../02-dummy-message-outputs.t | 0 .../02-dummy-message-outputs/flow.cylc | 2 +- .../02-dummy-message-outputs/reference.log | 0 .../{modes => run_modes}/03-simulation.t | 0 .../03-simulation/flow.cylc | 0 .../03-simulation/reference.log | 0 .../04-simulation-runtime.t | 0 .../04-simulation-runtime/flow.cylc | 0 .../04-simulation-runtime/reference.log | 0 .../{modes => run_modes}/05-sim-trigger.t | 0 .../05-sim-trigger/flow.cylc | 2 +- .../05-sim-trigger/reference.log | 0 .../run_modes/06-run-mode-overrides.t | 72 + .../run_modes/06-run-mode-overrides/flow.cylc | 29 + .../{modes => run_modes}/test_header | 0 .../run_modes/test_mode_overrides.py | 109 ++ tests/integration/run_modes/test_nonlive.py | 110 ++ .../{ => run_modes}/test_simulation.py | 55 +- tests/integration/test_config.py | 51 +- tests/integration/test_task_events_mgr.py | 2 +- tests/integration/test_task_pool.py | 87 +- ...uler_logs.workflow-configuration-file.html | 2 +- tests/unit/run_modes/test_dummy.py | 40 + tests/unit/run_modes/test_nonlive.py | 46 + tests/unit/{ => run_modes}/test_simulation.py | 26 +- tests/unit/run_modes/test_skip.py | 105 ++ tests/unit/scripts/test_lint.py | 3 + tests/unit/test_config.py | 36 +- tests/unit/test_task_state.py | 28 + 67 files changed, 2535 insertions(+), 1107 deletions(-) create mode 100644 changes.d/6039.feat.md create mode 100644 cylc/flow/run_modes/dummy.py create mode 100644 cylc/flow/run_modes/nonlive.py rename cylc/flow/{ => run_modes}/simulation.py (64%) create mode 100644 cylc/flow/run_modes/skip.py create mode 100644 tests/functional/cylc-set/09-set-skip.t create mode 100644 tests/functional/cylc-set/09-set-skip/flow.cylc create mode 100644 tests/functional/cylc-set/09-set-skip/reference.log rename tests/functional/{modes => run_modes}/01-dummy.t (100%) rename tests/functional/{modes => run_modes}/01-dummy/flow.cylc (100%) rename tests/functional/{modes => run_modes}/01-dummy/reference.log (100%) rename tests/functional/{modes => run_modes}/02-dummy-message-outputs.t (100%) rename tests/functional/{modes => run_modes}/02-dummy-message-outputs/flow.cylc (92%) rename tests/functional/{modes => run_modes}/02-dummy-message-outputs/reference.log (100%) rename tests/functional/{modes => run_modes}/03-simulation.t (100%) rename tests/functional/{modes => run_modes}/03-simulation/flow.cylc (100%) rename tests/functional/{modes => run_modes}/03-simulation/reference.log (100%) rename tests/functional/{modes => run_modes}/04-simulation-runtime.t (100%) rename tests/functional/{modes => run_modes}/04-simulation-runtime/flow.cylc (100%) rename tests/functional/{modes => run_modes}/04-simulation-runtime/reference.log (100%) rename tests/functional/{modes => run_modes}/05-sim-trigger.t (100%) rename tests/functional/{modes => run_modes}/05-sim-trigger/flow.cylc (84%) rename tests/functional/{modes => run_modes}/05-sim-trigger/reference.log (100%) create mode 100644 tests/functional/run_modes/06-run-mode-overrides.t create mode 100644 tests/functional/run_modes/06-run-mode-overrides/flow.cylc rename tests/functional/{modes => run_modes}/test_header (100%) create mode 100644 tests/integration/run_modes/test_mode_overrides.py create mode 100644 tests/integration/run_modes/test_nonlive.py rename tests/integration/{ => run_modes}/test_simulation.py (90%) create mode 100644 tests/unit/run_modes/test_dummy.py create mode 100644 tests/unit/run_modes/test_nonlive.py rename tests/unit/{ => run_modes}/test_simulation.py (86%) create mode 100644 tests/unit/run_modes/test_skip.py diff --git a/changes.d/6039.feat.md b/changes.d/6039.feat.md new file mode 100644 index 00000000000..6b951fd7076 --- /dev/null +++ b/changes.d/6039.feat.md @@ -0,0 +1 @@ +Allow setting of run mode on a task by task basis. Add a new mode "skip". \ No newline at end of file diff --git a/cylc/flow/cfgspec/workflow.py b/cylc/flow/cfgspec/workflow.py index 934897bdbb4..108c5efe900 100644 --- a/cylc/flow/cfgspec/workflow.py +++ b/cylc/flow/cfgspec/workflow.py @@ -1334,6 +1334,22 @@ def get_script_common_text(this: str, example: Optional[str] = None): "[platforms][]submission retry delays" ) ) + Conf( + 'run mode', VDR.V_STRING, + options=['workflow', 'simulation', 'dummy', 'live', 'skip'], + default='workflow', + desc=''' + Override the workflow's run mode. + + By default workflows run in "live mode" - tasks run + in the way defined by the runtime config. + This setting allows individual tasks to be run using + a different run mode. + + .. TODO: Reference updated documention. + + .. versionadded:: 8.4.0 + ''') with Conf('meta', desc=r''' Metadata for the task or task family. @@ -1406,9 +1422,44 @@ def get_script_common_text(this: str, example: Optional[str] = None): determine how an event handler responds to task failure events. ''') + with Conf('skip', desc=''' + Task configuration for task :ref:`SkipMode`. + + For a full description of skip run mode see + :ref:`SkipMode`. + + .. versionadded:: 8.4.0 + '''): + Conf( + 'outputs', + VDR.V_STRING_LIST, + desc=''' + Outputs to be emitted by a task in skip mode. + + By default started, submitted, succeeded and all + required outputs will be emitted. + If outputs are specified, but neither succeeded or + failed are specified, succeeded will automatically be + emitted. + + .. versionadded:: 8.4.0 + ''' + ) + Conf( + 'disable task event handlers', + VDR.V_BOOLEAN, + default=True, + desc=''' + Task event handlers are turned off by default for + skip mode tasks. Changing this setting to ``False`` + will re-enable task event handlers. + + .. versionadded:: 8.4.0 + ''' + ) with Conf('simulation', desc=''' - Task configuration for workflow *simulation* and *dummy* run + Task configuration for *simulation* and *dummy* run modes. For a full description of simulation and dummy run modes see @@ -1416,20 +1467,18 @@ def get_script_common_text(this: str, example: Optional[str] = None): '''): Conf('default run length', VDR.V_INTERVAL, DurationFloat(10), desc=''' - The default simulated job run length. - Used if :cylc:conf:`flow.cylc[runtime][] execution time limit` **and** :cylc:conf:`flow.cylc[runtime][][simulation] speedup factor` are not set. ''') Conf('speedup factor', VDR.V_FLOAT, desc=''' - Simulated run length = speedup factor * execution time + Simulated run time = speedup factor * execution time limit. If :cylc:conf:`flow.cylc[runtime][] execution time limit` is set, the task - simulated run length is computed by dividing it by this + simulated run time is computed by dividing it by this factor. ''') Conf('time limit buffer', VDR.V_INTERVAL, DurationFloat(30), @@ -2118,6 +2167,10 @@ def upg(cfg, descr): ['runtime', '__MANY__', 'events', f"{old}s"], silent=cylc.flow.flags.cylc7_back_compat, ) + u.deprecate( + '8.4.0', + ['runtime', '__MANY__', 'simulation', 'default run length'], + ['runtime', '__MANY__', 'simulation', 'default run length']) u.obsolete('8.0.0', ['cylc', 'events', 'abort on stalled']) u.obsolete('8.0.0', ['cylc', 'events', 'abort if startup handler fails']) diff --git a/cylc/flow/commands.py b/cylc/flow/commands.py index 173984f17e0..e565737636d 100644 --- a/cylc/flow/commands.py +++ b/cylc/flow/commands.py @@ -77,14 +77,14 @@ from cylc.flow.network.schema import WorkflowStopMode from cylc.flow.parsec.exceptions import ParsecError from cylc.flow.task_id import TaskID -from cylc.flow.task_state import TASK_STATUSES_ACTIVE, TASK_STATUS_FAILED -from cylc.flow.workflow_status import RunMode, StopMode +from cylc.flow.task_state import ( + TASK_STATUSES_ACTIVE, TASK_STATUS_FAILED, RunMode) +from cylc.flow.workflow_status import StopMode from metomi.isodatetime.parsers import TimePointParser if TYPE_CHECKING: from cylc.flow.scheduler import Scheduler - # define a type for command implementations Command = Callable[ ..., diff --git a/cylc/flow/config.py b/cylc/flow/config.py index cb987ac6d4c..b7d199ed9c1 100644 --- a/cylc/flow/config.py +++ b/cylc/flow/config.py @@ -82,7 +82,7 @@ ) from cylc.flow.print_tree import print_tree from cylc.flow.task_qualifiers import ALT_QUALIFIERS -from cylc.flow.simulation import configure_sim_modes +from cylc.flow.run_modes.nonlive import mode_validate_checks from cylc.flow.subprocctx import SubFuncContext from cylc.flow.task_events_mgr import ( EventData, @@ -114,8 +114,8 @@ WorkflowFiles, check_deprecation, ) -from cylc.flow.workflow_status import RunMode from cylc.flow.xtrigger_mgr import XtriggerCollator +from cylc.flow.task_state import RunMode if TYPE_CHECKING: from optparse import Values @@ -513,10 +513,6 @@ def __init__( self.process_runahead_limit() - run_mode = self.run_mode() - if run_mode in {RunMode.SIMULATION, RunMode.DUMMY}: - configure_sim_modes(self.taskdefs.values(), run_mode) - self.configure_workflow_state_polling_tasks() self._check_task_event_handlers() @@ -567,6 +563,8 @@ def __init__( self.mem_log("config.py: end init config") + mode_validate_checks(self.taskdefs) + @staticmethod def _warn_if_queues_have_implicit_tasks( config, taskdefs, max_warning_lines diff --git a/cylc/flow/data_messages.proto b/cylc/flow/data_messages.proto index c0af5094c0d..f259a735f0a 100644 --- a/cylc/flow/data_messages.proto +++ b/cylc/flow/data_messages.proto @@ -128,6 +128,7 @@ message PbRuntime { optional string environment = 16; optional string outputs = 17; optional string completion = 18; + optional string run_mode = 19; } diff --git a/cylc/flow/data_messages_pb2.py b/cylc/flow/data_messages_pb2.py index 7fb5ae84d24..0f16888d6bd 100644 --- a/cylc/flow/data_messages_pb2.py +++ b/cylc/flow/data_messages_pb2.py @@ -14,7 +14,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x64\x61ta_messages.proto\"\x96\x01\n\x06PbMeta\x12\x12\n\x05title\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x10\n\x03URL\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x19\n\x0cuser_defined\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_titleB\x0e\n\x0c_descriptionB\x06\n\x04_URLB\x0f\n\r_user_defined\"\xaa\x01\n\nPbTimeZone\x12\x12\n\x05hours\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x14\n\x07minutes\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x19\n\x0cstring_basic\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1c\n\x0fstring_extended\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_hoursB\n\n\x08_minutesB\x0f\n\r_string_basicB\x12\n\x10_string_extended\"\'\n\x0fPbTaskProxyRefs\x12\x14\n\x0ctask_proxies\x18\x01 \x03(\t\"\xd4\x0c\n\nPbWorkflow\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06status\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x11\n\x04host\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x11\n\x04port\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x12\n\x05owner\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\r\n\x05tasks\x18\x08 \x03(\t\x12\x10\n\x08\x66\x61milies\x18\t \x03(\t\x12\x1c\n\x05\x65\x64ges\x18\n \x01(\x0b\x32\x08.PbEdgesH\x07\x88\x01\x01\x12\x18\n\x0b\x61pi_version\x18\x0b \x01(\x05H\x08\x88\x01\x01\x12\x19\n\x0c\x63ylc_version\x18\x0c \x01(\tH\t\x88\x01\x01\x12\x19\n\x0clast_updated\x18\r \x01(\x01H\n\x88\x01\x01\x12\x1a\n\x04meta\x18\x0e \x01(\x0b\x32\x07.PbMetaH\x0b\x88\x01\x01\x12&\n\x19newest_active_cycle_point\x18\x10 \x01(\tH\x0c\x88\x01\x01\x12&\n\x19oldest_active_cycle_point\x18\x11 \x01(\tH\r\x88\x01\x01\x12\x15\n\x08reloaded\x18\x12 \x01(\x08H\x0e\x88\x01\x01\x12\x15\n\x08run_mode\x18\x13 \x01(\tH\x0f\x88\x01\x01\x12\x19\n\x0c\x63ycling_mode\x18\x14 \x01(\tH\x10\x88\x01\x01\x12\x32\n\x0cstate_totals\x18\x15 \x03(\x0b\x32\x1c.PbWorkflow.StateTotalsEntry\x12\x1d\n\x10workflow_log_dir\x18\x16 \x01(\tH\x11\x88\x01\x01\x12(\n\x0etime_zone_info\x18\x17 \x01(\x0b\x32\x0b.PbTimeZoneH\x12\x88\x01\x01\x12\x17\n\ntree_depth\x18\x18 \x01(\x05H\x13\x88\x01\x01\x12\x15\n\rjob_log_names\x18\x19 \x03(\t\x12\x14\n\x0cns_def_order\x18\x1a \x03(\t\x12\x0e\n\x06states\x18\x1b \x03(\t\x12\x14\n\x0ctask_proxies\x18\x1c \x03(\t\x12\x16\n\x0e\x66\x61mily_proxies\x18\x1d \x03(\t\x12\x17\n\nstatus_msg\x18\x1e \x01(\tH\x14\x88\x01\x01\x12\x1a\n\ris_held_total\x18\x1f \x01(\x05H\x15\x88\x01\x01\x12\x0c\n\x04jobs\x18 \x03(\t\x12\x15\n\x08pub_port\x18! \x01(\x05H\x16\x88\x01\x01\x12\x17\n\nbroadcasts\x18\" \x01(\tH\x17\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18# \x01(\x05H\x18\x88\x01\x01\x12=\n\x12latest_state_tasks\x18$ \x03(\x0b\x32!.PbWorkflow.LatestStateTasksEntry\x12\x13\n\x06pruned\x18% \x01(\x08H\x19\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18& \x01(\x05H\x1a\x88\x01\x01\x12\x1b\n\x0estates_updated\x18\' \x01(\x08H\x1b\x88\x01\x01\x12\x1c\n\x0fn_edge_distance\x18( \x01(\x05H\x1c\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1aI\n\x15LatestStateTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1f\n\x05value\x18\x02 \x01(\x0b\x32\x10.PbTaskProxyRefs:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\t\n\x07_statusB\x07\n\x05_hostB\x07\n\x05_portB\x08\n\x06_ownerB\x08\n\x06_edgesB\x0e\n\x0c_api_versionB\x0f\n\r_cylc_versionB\x0f\n\r_last_updatedB\x07\n\x05_metaB\x1c\n\x1a_newest_active_cycle_pointB\x1c\n\x1a_oldest_active_cycle_pointB\x0b\n\t_reloadedB\x0b\n\t_run_modeB\x0f\n\r_cycling_modeB\x13\n\x11_workflow_log_dirB\x11\n\x0f_time_zone_infoB\r\n\x0b_tree_depthB\r\n\x0b_status_msgB\x10\n\x0e_is_held_totalB\x0b\n\t_pub_portB\r\n\x0b_broadcastsB\x12\n\x10_is_queued_totalB\t\n\x07_prunedB\x14\n\x12_is_runahead_totalB\x11\n\x0f_states_updatedB\x12\n\x10_n_edge_distance\"\xe1\x06\n\tPbRuntime\x12\x15\n\x08platform\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06script\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0binit_script\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x17\n\nenv_script\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\nerr_script\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x18\n\x0b\x65xit_script\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x17\n\npre_script\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x18\n\x0bpost_script\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x19\n\x0cwork_sub_dir\x18\t \x01(\tH\x08\x88\x01\x01\x12(\n\x1b\x65xecution_polling_intervals\x18\n \x01(\tH\t\x88\x01\x01\x12#\n\x16\x65xecution_retry_delays\x18\x0b \x01(\tH\n\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0c \x01(\tH\x0b\x88\x01\x01\x12)\n\x1csubmission_polling_intervals\x18\r \x01(\tH\x0c\x88\x01\x01\x12$\n\x17submission_retry_delays\x18\x0e \x01(\tH\r\x88\x01\x01\x12\x17\n\ndirectives\x18\x0f \x01(\tH\x0e\x88\x01\x01\x12\x18\n\x0b\x65nvironment\x18\x10 \x01(\tH\x0f\x88\x01\x01\x12\x14\n\x07outputs\x18\x11 \x01(\tH\x10\x88\x01\x01\x12\x17\n\ncompletion\x18\x12 \x01(\tH\x11\x88\x01\x01\x42\x0b\n\t_platformB\t\n\x07_scriptB\x0e\n\x0c_init_scriptB\r\n\x0b_env_scriptB\r\n\x0b_err_scriptB\x0e\n\x0c_exit_scriptB\r\n\x0b_pre_scriptB\x0e\n\x0c_post_scriptB\x0f\n\r_work_sub_dirB\x1e\n\x1c_execution_polling_intervalsB\x19\n\x17_execution_retry_delaysB\x17\n\x15_execution_time_limitB\x1f\n\x1d_submission_polling_intervalsB\x1a\n\x18_submission_retry_delaysB\r\n\x0b_directivesB\x0e\n\x0c_environmentB\n\n\x08_outputsB\r\n\x0b_completion\"\x9d\x05\n\x05PbJob\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nsubmit_num\x18\x03 \x01(\x05H\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\ntask_proxy\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x1b\n\x0esubmitted_time\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x19\n\x0cstarted_time\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x1a\n\rfinished_time\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x06job_id\x18\t \x01(\tH\x08\x88\x01\x01\x12\x1c\n\x0fjob_runner_name\x18\n \x01(\tH\t\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0e \x01(\x02H\n\x88\x01\x01\x12\x15\n\x08platform\x18\x0f \x01(\tH\x0b\x88\x01\x01\x12\x18\n\x0bjob_log_dir\x18\x11 \x01(\tH\x0c\x88\x01\x01\x12\x11\n\x04name\x18\x1e \x01(\tH\r\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x1f \x01(\tH\x0e\x88\x01\x01\x12\x10\n\x08messages\x18 \x03(\t\x12 \n\x07runtime\x18! \x01(\x0b\x32\n.PbRuntimeH\x0f\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\r\n\x0b_submit_numB\x08\n\x06_stateB\r\n\x0b_task_proxyB\x11\n\x0f_submitted_timeB\x0f\n\r_started_timeB\x10\n\x0e_finished_timeB\t\n\x07_job_idB\x12\n\x10_job_runner_nameB\x17\n\x15_execution_time_limitB\x0b\n\t_platformB\x0e\n\x0c_job_log_dirB\x07\n\x05_nameB\x0e\n\x0c_cycle_pointB\n\n\x08_runtimeJ\x04\x08\x1d\x10\x1e\"\xe2\x02\n\x06PbTask\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x1e\n\x11mean_elapsed_time\x18\x05 \x01(\x02H\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x0f\n\x07proxies\x18\x07 \x03(\t\x12\x11\n\tnamespace\x18\x08 \x03(\t\x12\x0f\n\x07parents\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x06\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x07\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x14\n\x12_mean_elapsed_timeB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xd8\x01\n\nPbPollTask\x12\x18\n\x0blocal_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08workflow\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x19\n\x0cremote_proxy\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\treq_state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x19\n\x0cgraph_string\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x0e\n\x0c_local_proxyB\x0b\n\t_workflowB\x0f\n\r_remote_proxyB\x0c\n\n_req_stateB\x0f\n\r_graph_string\"\xcb\x01\n\x0bPbCondition\x12\x17\n\ntask_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nexpr_alias\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\treq_state\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x14\n\x07message\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\r\n\x0b_task_proxyB\r\n\x0b_expr_aliasB\x0c\n\n_req_stateB\x0c\n\n_satisfiedB\n\n\x08_message\"\x96\x01\n\x0ePbPrerequisite\x12\x17\n\nexpression\x18\x01 \x01(\tH\x00\x88\x01\x01\x12 \n\nconditions\x18\x02 \x03(\x0b\x32\x0c.PbCondition\x12\x14\n\x0c\x63ycle_points\x18\x03 \x03(\t\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\r\n\x0b_expressionB\x0c\n\n_satisfied\"\x8c\x01\n\x08PbOutput\x12\x12\n\x05label\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07message\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\tsatisfied\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12\x11\n\x04time\x18\x04 \x01(\x01H\x03\x88\x01\x01\x42\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\xa5\x01\n\tPbTrigger\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05label\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x14\n\x07message\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x11\n\x04time\x18\x05 \x01(\x01H\x04\x88\x01\x01\x42\x05\n\x03_idB\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\x91\x08\n\x0bPbTaskProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04task\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x18\n\x0bjob_submits\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12*\n\x07outputs\x18\t \x03(\x0b\x32\x19.PbTaskProxy.OutputsEntry\x12\x11\n\tnamespace\x18\x0b \x03(\t\x12&\n\rprerequisites\x18\x0c \x03(\x0b\x32\x0f.PbPrerequisite\x12\x0c\n\x04jobs\x18\r \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\x0f \x01(\tH\x07\x88\x01\x01\x12\x11\n\x04name\x18\x10 \x01(\tH\x08\x88\x01\x01\x12\x14\n\x07is_held\x18\x11 \x01(\x08H\t\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x12 \x03(\t\x12\x11\n\tancestors\x18\x13 \x03(\t\x12\x16\n\tflow_nums\x18\x14 \x01(\tH\n\x88\x01\x01\x12=\n\x11\x65xternal_triggers\x18\x17 \x03(\x0b\x32\".PbTaskProxy.ExternalTriggersEntry\x12.\n\txtriggers\x18\x18 \x03(\x0b\x32\x1b.PbTaskProxy.XtriggersEntry\x12\x16\n\tis_queued\x18\x19 \x01(\x08H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x1a \x01(\x08H\x0c\x88\x01\x01\x12\x16\n\tflow_wait\x18\x1b \x01(\x08H\r\x88\x01\x01\x12 \n\x07runtime\x18\x1c \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x1d \x01(\x05H\x0f\x88\x01\x01\x1a\x39\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x18\n\x05value\x18\x02 \x01(\x0b\x32\t.PbOutput:\x02\x38\x01\x1a\x43\n\x15\x45xternalTriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x1a<\n\x0eXtriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_taskB\x08\n\x06_stateB\x0e\n\x0c_cycle_pointB\x08\n\x06_depthB\x0e\n\x0c_job_submitsB\x0f\n\r_first_parentB\x07\n\x05_nameB\n\n\x08_is_heldB\x0c\n\n_flow_numsB\x0c\n\n_is_queuedB\x0e\n\x0c_is_runaheadB\x0c\n\n_flow_waitB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xc8\x02\n\x08PbFamily\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x05 \x01(\x05H\x04\x88\x01\x01\x12\x0f\n\x07proxies\x18\x06 \x03(\t\x12\x0f\n\x07parents\x18\x07 \x03(\t\x12\x13\n\x0b\x63hild_tasks\x18\x08 \x03(\t\x12\x16\n\x0e\x63hild_families\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x05\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x06\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xae\x06\n\rPbFamilyProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x11\n\x04name\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x13\n\x06\x66\x61mily\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05state\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12\x19\n\x0c\x66irst_parent\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x0b\x63hild_tasks\x18\n \x03(\t\x12\x16\n\x0e\x63hild_families\x18\x0b \x03(\t\x12\x14\n\x07is_held\x18\x0c \x01(\x08H\x08\x88\x01\x01\x12\x11\n\tancestors\x18\r \x03(\t\x12\x0e\n\x06states\x18\x0e \x03(\t\x12\x35\n\x0cstate_totals\x18\x0f \x03(\x0b\x32\x1f.PbFamilyProxy.StateTotalsEntry\x12\x1a\n\ris_held_total\x18\x10 \x01(\x05H\t\x88\x01\x01\x12\x16\n\tis_queued\x18\x11 \x01(\x08H\n\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18\x12 \x01(\x05H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x13 \x01(\x08H\x0c\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18\x14 \x01(\x05H\r\x88\x01\x01\x12 \n\x07runtime\x18\x15 \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x16 \x01(\x05H\x0f\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x0e\n\x0c_cycle_pointB\x07\n\x05_nameB\t\n\x07_familyB\x08\n\x06_stateB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_is_heldB\x10\n\x0e_is_held_totalB\x0c\n\n_is_queuedB\x12\n\x10_is_queued_totalB\x0e\n\x0c_is_runaheadB\x14\n\x12_is_runahead_totalB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xbc\x01\n\x06PbEdge\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x13\n\x06source\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06target\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x14\n\x07suicide\x18\x05 \x01(\x08H\x04\x88\x01\x01\x12\x11\n\x04\x63ond\x18\x06 \x01(\x08H\x05\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\t\n\x07_sourceB\t\n\x07_targetB\n\n\x08_suicideB\x07\n\x05_cond\"{\n\x07PbEdges\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x02 \x03(\t\x12+\n\x16workflow_polling_tasks\x18\x03 \x03(\x0b\x32\x0b.PbPollTask\x12\x0e\n\x06leaves\x18\x04 \x03(\t\x12\x0c\n\x04\x66\x65\x65t\x18\x05 \x03(\tB\x05\n\x03_id\"\xf2\x01\n\x10PbEntireWorkflow\x12\"\n\x08workflow\x18\x01 \x01(\x0b\x32\x0b.PbWorkflowH\x00\x88\x01\x01\x12\x16\n\x05tasks\x18\x02 \x03(\x0b\x32\x07.PbTask\x12\"\n\x0ctask_proxies\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x14\n\x04jobs\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x1b\n\x08\x66\x61milies\x18\x05 \x03(\x0b\x32\t.PbFamily\x12&\n\x0e\x66\x61mily_proxies\x18\x06 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x16\n\x05\x65\x64ges\x18\x07 \x03(\x0b\x32\x07.PbEdgeB\x0b\n\t_workflow\"\xaf\x01\n\x07\x45\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbEdge\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbEdge\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xb3\x01\n\x07\x46\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x18\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\t.PbFamily\x12\x1a\n\x07updated\x18\x04 \x03(\x0b\x32\t.PbFamily\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xbe\x01\n\x08\x46PDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1d\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x1f\n\x07updated\x18\x04 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xad\x01\n\x07JDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x15\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x06.PbJob\x12\x17\n\x07updated\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xaf\x01\n\x07TDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbTask\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbTask\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xba\x01\n\x08TPDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1b\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x1d\n\x07updated\x18\x04 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xc3\x01\n\x07WDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x1f\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x0b\x32\x0b.PbWorkflowH\x01\x88\x01\x01\x12!\n\x07updated\x18\x03 \x01(\x0b\x32\x0b.PbWorkflowH\x02\x88\x01\x01\x12\x15\n\x08reloaded\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x13\n\x06pruned\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x07\n\x05_timeB\x08\n\x06_addedB\n\n\x08_updatedB\x0b\n\t_reloadedB\t\n\x07_pruned\"\xd1\x01\n\tAllDeltas\x12\x1a\n\x08\x66\x61milies\x18\x01 \x01(\x0b\x32\x08.FDeltas\x12!\n\x0e\x66\x61mily_proxies\x18\x02 \x01(\x0b\x32\t.FPDeltas\x12\x16\n\x04jobs\x18\x03 \x01(\x0b\x32\x08.JDeltas\x12\x17\n\x05tasks\x18\x04 \x01(\x0b\x32\x08.TDeltas\x12\x1f\n\x0ctask_proxies\x18\x05 \x01(\x0b\x32\t.TPDeltas\x12\x17\n\x05\x65\x64ges\x18\x06 \x01(\x0b\x32\x08.EDeltas\x12\x1a\n\x08workflow\x18\x07 \x01(\x0b\x32\x08.WDeltasb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x64\x61ta_messages.proto\"\x96\x01\n\x06PbMeta\x12\x12\n\x05title\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x10\n\x03URL\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x19\n\x0cuser_defined\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_titleB\x0e\n\x0c_descriptionB\x06\n\x04_URLB\x0f\n\r_user_defined\"\xaa\x01\n\nPbTimeZone\x12\x12\n\x05hours\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x14\n\x07minutes\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x19\n\x0cstring_basic\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1c\n\x0fstring_extended\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_hoursB\n\n\x08_minutesB\x0f\n\r_string_basicB\x12\n\x10_string_extended\"\'\n\x0fPbTaskProxyRefs\x12\x14\n\x0ctask_proxies\x18\x01 \x03(\t\"\xd4\x0c\n\nPbWorkflow\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06status\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x11\n\x04host\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x11\n\x04port\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x12\n\x05owner\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\r\n\x05tasks\x18\x08 \x03(\t\x12\x10\n\x08\x66\x61milies\x18\t \x03(\t\x12\x1c\n\x05\x65\x64ges\x18\n \x01(\x0b\x32\x08.PbEdgesH\x07\x88\x01\x01\x12\x18\n\x0b\x61pi_version\x18\x0b \x01(\x05H\x08\x88\x01\x01\x12\x19\n\x0c\x63ylc_version\x18\x0c \x01(\tH\t\x88\x01\x01\x12\x19\n\x0clast_updated\x18\r \x01(\x01H\n\x88\x01\x01\x12\x1a\n\x04meta\x18\x0e \x01(\x0b\x32\x07.PbMetaH\x0b\x88\x01\x01\x12&\n\x19newest_active_cycle_point\x18\x10 \x01(\tH\x0c\x88\x01\x01\x12&\n\x19oldest_active_cycle_point\x18\x11 \x01(\tH\r\x88\x01\x01\x12\x15\n\x08reloaded\x18\x12 \x01(\x08H\x0e\x88\x01\x01\x12\x15\n\x08run_mode\x18\x13 \x01(\tH\x0f\x88\x01\x01\x12\x19\n\x0c\x63ycling_mode\x18\x14 \x01(\tH\x10\x88\x01\x01\x12\x32\n\x0cstate_totals\x18\x15 \x03(\x0b\x32\x1c.PbWorkflow.StateTotalsEntry\x12\x1d\n\x10workflow_log_dir\x18\x16 \x01(\tH\x11\x88\x01\x01\x12(\n\x0etime_zone_info\x18\x17 \x01(\x0b\x32\x0b.PbTimeZoneH\x12\x88\x01\x01\x12\x17\n\ntree_depth\x18\x18 \x01(\x05H\x13\x88\x01\x01\x12\x15\n\rjob_log_names\x18\x19 \x03(\t\x12\x14\n\x0cns_def_order\x18\x1a \x03(\t\x12\x0e\n\x06states\x18\x1b \x03(\t\x12\x14\n\x0ctask_proxies\x18\x1c \x03(\t\x12\x16\n\x0e\x66\x61mily_proxies\x18\x1d \x03(\t\x12\x17\n\nstatus_msg\x18\x1e \x01(\tH\x14\x88\x01\x01\x12\x1a\n\ris_held_total\x18\x1f \x01(\x05H\x15\x88\x01\x01\x12\x0c\n\x04jobs\x18 \x03(\t\x12\x15\n\x08pub_port\x18! \x01(\x05H\x16\x88\x01\x01\x12\x17\n\nbroadcasts\x18\" \x01(\tH\x17\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18# \x01(\x05H\x18\x88\x01\x01\x12=\n\x12latest_state_tasks\x18$ \x03(\x0b\x32!.PbWorkflow.LatestStateTasksEntry\x12\x13\n\x06pruned\x18% \x01(\x08H\x19\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18& \x01(\x05H\x1a\x88\x01\x01\x12\x1b\n\x0estates_updated\x18\' \x01(\x08H\x1b\x88\x01\x01\x12\x1c\n\x0fn_edge_distance\x18( \x01(\x05H\x1c\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1aI\n\x15LatestStateTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1f\n\x05value\x18\x02 \x01(\x0b\x32\x10.PbTaskProxyRefs:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\t\n\x07_statusB\x07\n\x05_hostB\x07\n\x05_portB\x08\n\x06_ownerB\x08\n\x06_edgesB\x0e\n\x0c_api_versionB\x0f\n\r_cylc_versionB\x0f\n\r_last_updatedB\x07\n\x05_metaB\x1c\n\x1a_newest_active_cycle_pointB\x1c\n\x1a_oldest_active_cycle_pointB\x0b\n\t_reloadedB\x0b\n\t_run_modeB\x0f\n\r_cycling_modeB\x13\n\x11_workflow_log_dirB\x11\n\x0f_time_zone_infoB\r\n\x0b_tree_depthB\r\n\x0b_status_msgB\x10\n\x0e_is_held_totalB\x0b\n\t_pub_portB\r\n\x0b_broadcastsB\x12\n\x10_is_queued_totalB\t\n\x07_prunedB\x14\n\x12_is_runahead_totalB\x11\n\x0f_states_updatedB\x12\n\x10_n_edge_distance\"\x85\x07\n\tPbRuntime\x12\x15\n\x08platform\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06script\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0binit_script\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x17\n\nenv_script\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\nerr_script\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x18\n\x0b\x65xit_script\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x17\n\npre_script\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x18\n\x0bpost_script\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x19\n\x0cwork_sub_dir\x18\t \x01(\tH\x08\x88\x01\x01\x12(\n\x1b\x65xecution_polling_intervals\x18\n \x01(\tH\t\x88\x01\x01\x12#\n\x16\x65xecution_retry_delays\x18\x0b \x01(\tH\n\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0c \x01(\tH\x0b\x88\x01\x01\x12)\n\x1csubmission_polling_intervals\x18\r \x01(\tH\x0c\x88\x01\x01\x12$\n\x17submission_retry_delays\x18\x0e \x01(\tH\r\x88\x01\x01\x12\x17\n\ndirectives\x18\x0f \x01(\tH\x0e\x88\x01\x01\x12\x18\n\x0b\x65nvironment\x18\x10 \x01(\tH\x0f\x88\x01\x01\x12\x14\n\x07outputs\x18\x11 \x01(\tH\x10\x88\x01\x01\x12\x17\n\ncompletion\x18\x12 \x01(\tH\x11\x88\x01\x01\x12\x15\n\x08run_mode\x18\x13 \x01(\tH\x12\x88\x01\x01\x42\x0b\n\t_platformB\t\n\x07_scriptB\x0e\n\x0c_init_scriptB\r\n\x0b_env_scriptB\r\n\x0b_err_scriptB\x0e\n\x0c_exit_scriptB\r\n\x0b_pre_scriptB\x0e\n\x0c_post_scriptB\x0f\n\r_work_sub_dirB\x1e\n\x1c_execution_polling_intervalsB\x19\n\x17_execution_retry_delaysB\x17\n\x15_execution_time_limitB\x1f\n\x1d_submission_polling_intervalsB\x1a\n\x18_submission_retry_delaysB\r\n\x0b_directivesB\x0e\n\x0c_environmentB\n\n\x08_outputsB\r\n\x0b_completionB\x0b\n\t_run_mode\"\x9d\x05\n\x05PbJob\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nsubmit_num\x18\x03 \x01(\x05H\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\ntask_proxy\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x1b\n\x0esubmitted_time\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x19\n\x0cstarted_time\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x1a\n\rfinished_time\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x06job_id\x18\t \x01(\tH\x08\x88\x01\x01\x12\x1c\n\x0fjob_runner_name\x18\n \x01(\tH\t\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0e \x01(\x02H\n\x88\x01\x01\x12\x15\n\x08platform\x18\x0f \x01(\tH\x0b\x88\x01\x01\x12\x18\n\x0bjob_log_dir\x18\x11 \x01(\tH\x0c\x88\x01\x01\x12\x11\n\x04name\x18\x1e \x01(\tH\r\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x1f \x01(\tH\x0e\x88\x01\x01\x12\x10\n\x08messages\x18 \x03(\t\x12 \n\x07runtime\x18! \x01(\x0b\x32\n.PbRuntimeH\x0f\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\r\n\x0b_submit_numB\x08\n\x06_stateB\r\n\x0b_task_proxyB\x11\n\x0f_submitted_timeB\x0f\n\r_started_timeB\x10\n\x0e_finished_timeB\t\n\x07_job_idB\x12\n\x10_job_runner_nameB\x17\n\x15_execution_time_limitB\x0b\n\t_platformB\x0e\n\x0c_job_log_dirB\x07\n\x05_nameB\x0e\n\x0c_cycle_pointB\n\n\x08_runtimeJ\x04\x08\x1d\x10\x1e\"\xe2\x02\n\x06PbTask\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x1e\n\x11mean_elapsed_time\x18\x05 \x01(\x02H\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x0f\n\x07proxies\x18\x07 \x03(\t\x12\x11\n\tnamespace\x18\x08 \x03(\t\x12\x0f\n\x07parents\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x06\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x07\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x14\n\x12_mean_elapsed_timeB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xd8\x01\n\nPbPollTask\x12\x18\n\x0blocal_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08workflow\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x19\n\x0cremote_proxy\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\treq_state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x19\n\x0cgraph_string\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x0e\n\x0c_local_proxyB\x0b\n\t_workflowB\x0f\n\r_remote_proxyB\x0c\n\n_req_stateB\x0f\n\r_graph_string\"\xcb\x01\n\x0bPbCondition\x12\x17\n\ntask_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nexpr_alias\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\treq_state\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x14\n\x07message\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\r\n\x0b_task_proxyB\r\n\x0b_expr_aliasB\x0c\n\n_req_stateB\x0c\n\n_satisfiedB\n\n\x08_message\"\x96\x01\n\x0ePbPrerequisite\x12\x17\n\nexpression\x18\x01 \x01(\tH\x00\x88\x01\x01\x12 \n\nconditions\x18\x02 \x03(\x0b\x32\x0c.PbCondition\x12\x14\n\x0c\x63ycle_points\x18\x03 \x03(\t\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\r\n\x0b_expressionB\x0c\n\n_satisfied\"\x8c\x01\n\x08PbOutput\x12\x12\n\x05label\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07message\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\tsatisfied\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12\x11\n\x04time\x18\x04 \x01(\x01H\x03\x88\x01\x01\x42\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\xa5\x01\n\tPbTrigger\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05label\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x14\n\x07message\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x11\n\x04time\x18\x05 \x01(\x01H\x04\x88\x01\x01\x42\x05\n\x03_idB\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\x91\x08\n\x0bPbTaskProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04task\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x18\n\x0bjob_submits\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12*\n\x07outputs\x18\t \x03(\x0b\x32\x19.PbTaskProxy.OutputsEntry\x12\x11\n\tnamespace\x18\x0b \x03(\t\x12&\n\rprerequisites\x18\x0c \x03(\x0b\x32\x0f.PbPrerequisite\x12\x0c\n\x04jobs\x18\r \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\x0f \x01(\tH\x07\x88\x01\x01\x12\x11\n\x04name\x18\x10 \x01(\tH\x08\x88\x01\x01\x12\x14\n\x07is_held\x18\x11 \x01(\x08H\t\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x12 \x03(\t\x12\x11\n\tancestors\x18\x13 \x03(\t\x12\x16\n\tflow_nums\x18\x14 \x01(\tH\n\x88\x01\x01\x12=\n\x11\x65xternal_triggers\x18\x17 \x03(\x0b\x32\".PbTaskProxy.ExternalTriggersEntry\x12.\n\txtriggers\x18\x18 \x03(\x0b\x32\x1b.PbTaskProxy.XtriggersEntry\x12\x16\n\tis_queued\x18\x19 \x01(\x08H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x1a \x01(\x08H\x0c\x88\x01\x01\x12\x16\n\tflow_wait\x18\x1b \x01(\x08H\r\x88\x01\x01\x12 \n\x07runtime\x18\x1c \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x1d \x01(\x05H\x0f\x88\x01\x01\x1a\x39\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x18\n\x05value\x18\x02 \x01(\x0b\x32\t.PbOutput:\x02\x38\x01\x1a\x43\n\x15\x45xternalTriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x1a<\n\x0eXtriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_taskB\x08\n\x06_stateB\x0e\n\x0c_cycle_pointB\x08\n\x06_depthB\x0e\n\x0c_job_submitsB\x0f\n\r_first_parentB\x07\n\x05_nameB\n\n\x08_is_heldB\x0c\n\n_flow_numsB\x0c\n\n_is_queuedB\x0e\n\x0c_is_runaheadB\x0c\n\n_flow_waitB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xc8\x02\n\x08PbFamily\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x05 \x01(\x05H\x04\x88\x01\x01\x12\x0f\n\x07proxies\x18\x06 \x03(\t\x12\x0f\n\x07parents\x18\x07 \x03(\t\x12\x13\n\x0b\x63hild_tasks\x18\x08 \x03(\t\x12\x16\n\x0e\x63hild_families\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x05\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x06\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xae\x06\n\rPbFamilyProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x11\n\x04name\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x13\n\x06\x66\x61mily\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05state\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12\x19\n\x0c\x66irst_parent\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x0b\x63hild_tasks\x18\n \x03(\t\x12\x16\n\x0e\x63hild_families\x18\x0b \x03(\t\x12\x14\n\x07is_held\x18\x0c \x01(\x08H\x08\x88\x01\x01\x12\x11\n\tancestors\x18\r \x03(\t\x12\x0e\n\x06states\x18\x0e \x03(\t\x12\x35\n\x0cstate_totals\x18\x0f \x03(\x0b\x32\x1f.PbFamilyProxy.StateTotalsEntry\x12\x1a\n\ris_held_total\x18\x10 \x01(\x05H\t\x88\x01\x01\x12\x16\n\tis_queued\x18\x11 \x01(\x08H\n\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18\x12 \x01(\x05H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x13 \x01(\x08H\x0c\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18\x14 \x01(\x05H\r\x88\x01\x01\x12 \n\x07runtime\x18\x15 \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x16 \x01(\x05H\x0f\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x0e\n\x0c_cycle_pointB\x07\n\x05_nameB\t\n\x07_familyB\x08\n\x06_stateB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_is_heldB\x10\n\x0e_is_held_totalB\x0c\n\n_is_queuedB\x12\n\x10_is_queued_totalB\x0e\n\x0c_is_runaheadB\x14\n\x12_is_runahead_totalB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xbc\x01\n\x06PbEdge\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x13\n\x06source\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06target\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x14\n\x07suicide\x18\x05 \x01(\x08H\x04\x88\x01\x01\x12\x11\n\x04\x63ond\x18\x06 \x01(\x08H\x05\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\t\n\x07_sourceB\t\n\x07_targetB\n\n\x08_suicideB\x07\n\x05_cond\"{\n\x07PbEdges\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x02 \x03(\t\x12+\n\x16workflow_polling_tasks\x18\x03 \x03(\x0b\x32\x0b.PbPollTask\x12\x0e\n\x06leaves\x18\x04 \x03(\t\x12\x0c\n\x04\x66\x65\x65t\x18\x05 \x03(\tB\x05\n\x03_id\"\xf2\x01\n\x10PbEntireWorkflow\x12\"\n\x08workflow\x18\x01 \x01(\x0b\x32\x0b.PbWorkflowH\x00\x88\x01\x01\x12\x16\n\x05tasks\x18\x02 \x03(\x0b\x32\x07.PbTask\x12\"\n\x0ctask_proxies\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x14\n\x04jobs\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x1b\n\x08\x66\x61milies\x18\x05 \x03(\x0b\x32\t.PbFamily\x12&\n\x0e\x66\x61mily_proxies\x18\x06 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x16\n\x05\x65\x64ges\x18\x07 \x03(\x0b\x32\x07.PbEdgeB\x0b\n\t_workflow\"\xaf\x01\n\x07\x45\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbEdge\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbEdge\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xb3\x01\n\x07\x46\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x18\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\t.PbFamily\x12\x1a\n\x07updated\x18\x04 \x03(\x0b\x32\t.PbFamily\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xbe\x01\n\x08\x46PDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1d\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x1f\n\x07updated\x18\x04 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xad\x01\n\x07JDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x15\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x06.PbJob\x12\x17\n\x07updated\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xaf\x01\n\x07TDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbTask\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbTask\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xba\x01\n\x08TPDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1b\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x1d\n\x07updated\x18\x04 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xc3\x01\n\x07WDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x1f\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x0b\x32\x0b.PbWorkflowH\x01\x88\x01\x01\x12!\n\x07updated\x18\x03 \x01(\x0b\x32\x0b.PbWorkflowH\x02\x88\x01\x01\x12\x15\n\x08reloaded\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x13\n\x06pruned\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x07\n\x05_timeB\x08\n\x06_addedB\n\n\x08_updatedB\x0b\n\t_reloadedB\t\n\x07_pruned\"\xd1\x01\n\tAllDeltas\x12\x1a\n\x08\x66\x61milies\x18\x01 \x01(\x0b\x32\x08.FDeltas\x12!\n\x0e\x66\x61mily_proxies\x18\x02 \x01(\x0b\x32\t.FPDeltas\x12\x16\n\x04jobs\x18\x03 \x01(\x0b\x32\x08.JDeltas\x12\x17\n\x05tasks\x18\x04 \x01(\x0b\x32\x08.TDeltas\x12\x1f\n\x0ctask_proxies\x18\x05 \x01(\x0b\x32\t.TPDeltas\x12\x17\n\x05\x65\x64ges\x18\x06 \x01(\x0b\x32\x08.EDeltas\x12\x1a\n\x08workflow\x18\x07 \x01(\x0b\x32\x08.WDeltasb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -46,55 +46,55 @@ _globals['_PBWORKFLOW_LATESTSTATETASKSENTRY']._serialized_start=1493 _globals['_PBWORKFLOW_LATESTSTATETASKSENTRY']._serialized_end=1566 _globals['_PBRUNTIME']._serialized_start=2014 - _globals['_PBRUNTIME']._serialized_end=2879 - _globals['_PBJOB']._serialized_start=2882 - _globals['_PBJOB']._serialized_end=3551 - _globals['_PBTASK']._serialized_start=3554 - _globals['_PBTASK']._serialized_end=3908 - _globals['_PBPOLLTASK']._serialized_start=3911 - _globals['_PBPOLLTASK']._serialized_end=4127 - _globals['_PBCONDITION']._serialized_start=4130 - _globals['_PBCONDITION']._serialized_end=4333 - _globals['_PBPREREQUISITE']._serialized_start=4336 - _globals['_PBPREREQUISITE']._serialized_end=4486 - _globals['_PBOUTPUT']._serialized_start=4489 - _globals['_PBOUTPUT']._serialized_end=4629 - _globals['_PBTRIGGER']._serialized_start=4632 - _globals['_PBTRIGGER']._serialized_end=4797 - _globals['_PBTASKPROXY']._serialized_start=4800 - _globals['_PBTASKPROXY']._serialized_end=5841 - _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_start=5451 - _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_end=5508 - _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_start=5510 - _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_end=5577 - _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_start=5579 - _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_end=5639 - _globals['_PBFAMILY']._serialized_start=5844 - _globals['_PBFAMILY']._serialized_end=6172 - _globals['_PBFAMILYPROXY']._serialized_start=6175 - _globals['_PBFAMILYPROXY']._serialized_end=6989 + _globals['_PBRUNTIME']._serialized_end=2915 + _globals['_PBJOB']._serialized_start=2918 + _globals['_PBJOB']._serialized_end=3587 + _globals['_PBTASK']._serialized_start=3590 + _globals['_PBTASK']._serialized_end=3944 + _globals['_PBPOLLTASK']._serialized_start=3947 + _globals['_PBPOLLTASK']._serialized_end=4163 + _globals['_PBCONDITION']._serialized_start=4166 + _globals['_PBCONDITION']._serialized_end=4369 + _globals['_PBPREREQUISITE']._serialized_start=4372 + _globals['_PBPREREQUISITE']._serialized_end=4522 + _globals['_PBOUTPUT']._serialized_start=4525 + _globals['_PBOUTPUT']._serialized_end=4665 + _globals['_PBTRIGGER']._serialized_start=4668 + _globals['_PBTRIGGER']._serialized_end=4833 + _globals['_PBTASKPROXY']._serialized_start=4836 + _globals['_PBTASKPROXY']._serialized_end=5877 + _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_start=5487 + _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_end=5544 + _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_start=5546 + _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_end=5613 + _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_start=5615 + _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_end=5675 + _globals['_PBFAMILY']._serialized_start=5880 + _globals['_PBFAMILY']._serialized_end=6208 + _globals['_PBFAMILYPROXY']._serialized_start=6211 + _globals['_PBFAMILYPROXY']._serialized_end=7025 _globals['_PBFAMILYPROXY_STATETOTALSENTRY']._serialized_start=1441 _globals['_PBFAMILYPROXY_STATETOTALSENTRY']._serialized_end=1491 - _globals['_PBEDGE']._serialized_start=6992 - _globals['_PBEDGE']._serialized_end=7180 - _globals['_PBEDGES']._serialized_start=7182 - _globals['_PBEDGES']._serialized_end=7305 - _globals['_PBENTIREWORKFLOW']._serialized_start=7308 - _globals['_PBENTIREWORKFLOW']._serialized_end=7550 - _globals['_EDELTAS']._serialized_start=7553 - _globals['_EDELTAS']._serialized_end=7728 - _globals['_FDELTAS']._serialized_start=7731 - _globals['_FDELTAS']._serialized_end=7910 - _globals['_FPDELTAS']._serialized_start=7913 - _globals['_FPDELTAS']._serialized_end=8103 - _globals['_JDELTAS']._serialized_start=8106 - _globals['_JDELTAS']._serialized_end=8279 - _globals['_TDELTAS']._serialized_start=8282 - _globals['_TDELTAS']._serialized_end=8457 - _globals['_TPDELTAS']._serialized_start=8460 - _globals['_TPDELTAS']._serialized_end=8646 - _globals['_WDELTAS']._serialized_start=8649 - _globals['_WDELTAS']._serialized_end=8844 - _globals['_ALLDELTAS']._serialized_start=8847 - _globals['_ALLDELTAS']._serialized_end=9056 + _globals['_PBEDGE']._serialized_start=7028 + _globals['_PBEDGE']._serialized_end=7216 + _globals['_PBEDGES']._serialized_start=7218 + _globals['_PBEDGES']._serialized_end=7341 + _globals['_PBENTIREWORKFLOW']._serialized_start=7344 + _globals['_PBENTIREWORKFLOW']._serialized_end=7586 + _globals['_EDELTAS']._serialized_start=7589 + _globals['_EDELTAS']._serialized_end=7764 + _globals['_FDELTAS']._serialized_start=7767 + _globals['_FDELTAS']._serialized_end=7946 + _globals['_FPDELTAS']._serialized_start=7949 + _globals['_FPDELTAS']._serialized_end=8139 + _globals['_JDELTAS']._serialized_start=8142 + _globals['_JDELTAS']._serialized_end=8315 + _globals['_TDELTAS']._serialized_start=8318 + _globals['_TDELTAS']._serialized_end=8493 + _globals['_TPDELTAS']._serialized_start=8496 + _globals['_TPDELTAS']._serialized_end=8682 + _globals['_WDELTAS']._serialized_start=8685 + _globals['_WDELTAS']._serialized_end=8880 + _globals['_ALLDELTAS']._serialized_start=8883 + _globals['_ALLDELTAS']._serialized_end=9092 # @@protoc_insertion_point(module_scope) diff --git a/cylc/flow/data_messages_pb2.pyi b/cylc/flow/data_messages_pb2.pyi index 4e96c6ed2da..8c80f7f8f10 100644 --- a/cylc/flow/data_messages_pb2.pyi +++ b/cylc/flow/data_messages_pb2.pyi @@ -6,7 +6,7 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map DESCRIPTOR: _descriptor.FileDescriptor class PbMeta(_message.Message): - __slots__ = ["title", "description", "URL", "user_defined"] + __slots__ = ("title", "description", "URL", "user_defined") TITLE_FIELD_NUMBER: _ClassVar[int] DESCRIPTION_FIELD_NUMBER: _ClassVar[int] URL_FIELD_NUMBER: _ClassVar[int] @@ -18,7 +18,7 @@ class PbMeta(_message.Message): def __init__(self, title: _Optional[str] = ..., description: _Optional[str] = ..., URL: _Optional[str] = ..., user_defined: _Optional[str] = ...) -> None: ... class PbTimeZone(_message.Message): - __slots__ = ["hours", "minutes", "string_basic", "string_extended"] + __slots__ = ("hours", "minutes", "string_basic", "string_extended") HOURS_FIELD_NUMBER: _ClassVar[int] MINUTES_FIELD_NUMBER: _ClassVar[int] STRING_BASIC_FIELD_NUMBER: _ClassVar[int] @@ -30,22 +30,22 @@ class PbTimeZone(_message.Message): def __init__(self, hours: _Optional[int] = ..., minutes: _Optional[int] = ..., string_basic: _Optional[str] = ..., string_extended: _Optional[str] = ...) -> None: ... class PbTaskProxyRefs(_message.Message): - __slots__ = ["task_proxies"] + __slots__ = ("task_proxies",) TASK_PROXIES_FIELD_NUMBER: _ClassVar[int] task_proxies: _containers.RepeatedScalarFieldContainer[str] def __init__(self, task_proxies: _Optional[_Iterable[str]] = ...) -> None: ... class PbWorkflow(_message.Message): - __slots__ = ["stamp", "id", "name", "status", "host", "port", "owner", "tasks", "families", "edges", "api_version", "cylc_version", "last_updated", "meta", "newest_active_cycle_point", "oldest_active_cycle_point", "reloaded", "run_mode", "cycling_mode", "state_totals", "workflow_log_dir", "time_zone_info", "tree_depth", "job_log_names", "ns_def_order", "states", "task_proxies", "family_proxies", "status_msg", "is_held_total", "jobs", "pub_port", "broadcasts", "is_queued_total", "latest_state_tasks", "pruned", "is_runahead_total", "states_updated", "n_edge_distance"] + __slots__ = ("stamp", "id", "name", "status", "host", "port", "owner", "tasks", "families", "edges", "api_version", "cylc_version", "last_updated", "meta", "newest_active_cycle_point", "oldest_active_cycle_point", "reloaded", "run_mode", "cycling_mode", "state_totals", "workflow_log_dir", "time_zone_info", "tree_depth", "job_log_names", "ns_def_order", "states", "task_proxies", "family_proxies", "status_msg", "is_held_total", "jobs", "pub_port", "broadcasts", "is_queued_total", "latest_state_tasks", "pruned", "is_runahead_total", "states_updated", "n_edge_distance") class StateTotalsEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str value: int def __init__(self, key: _Optional[str] = ..., value: _Optional[int] = ...) -> None: ... class LatestStateTasksEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str @@ -132,7 +132,7 @@ class PbWorkflow(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., name: _Optional[str] = ..., status: _Optional[str] = ..., host: _Optional[str] = ..., port: _Optional[int] = ..., owner: _Optional[str] = ..., tasks: _Optional[_Iterable[str]] = ..., families: _Optional[_Iterable[str]] = ..., edges: _Optional[_Union[PbEdges, _Mapping]] = ..., api_version: _Optional[int] = ..., cylc_version: _Optional[str] = ..., last_updated: _Optional[float] = ..., meta: _Optional[_Union[PbMeta, _Mapping]] = ..., newest_active_cycle_point: _Optional[str] = ..., oldest_active_cycle_point: _Optional[str] = ..., reloaded: bool = ..., run_mode: _Optional[str] = ..., cycling_mode: _Optional[str] = ..., state_totals: _Optional[_Mapping[str, int]] = ..., workflow_log_dir: _Optional[str] = ..., time_zone_info: _Optional[_Union[PbTimeZone, _Mapping]] = ..., tree_depth: _Optional[int] = ..., job_log_names: _Optional[_Iterable[str]] = ..., ns_def_order: _Optional[_Iterable[str]] = ..., states: _Optional[_Iterable[str]] = ..., task_proxies: _Optional[_Iterable[str]] = ..., family_proxies: _Optional[_Iterable[str]] = ..., status_msg: _Optional[str] = ..., is_held_total: _Optional[int] = ..., jobs: _Optional[_Iterable[str]] = ..., pub_port: _Optional[int] = ..., broadcasts: _Optional[str] = ..., is_queued_total: _Optional[int] = ..., latest_state_tasks: _Optional[_Mapping[str, PbTaskProxyRefs]] = ..., pruned: bool = ..., is_runahead_total: _Optional[int] = ..., states_updated: bool = ..., n_edge_distance: _Optional[int] = ...) -> None: ... class PbRuntime(_message.Message): - __slots__ = ["platform", "script", "init_script", "env_script", "err_script", "exit_script", "pre_script", "post_script", "work_sub_dir", "execution_polling_intervals", "execution_retry_delays", "execution_time_limit", "submission_polling_intervals", "submission_retry_delays", "directives", "environment", "outputs", "completion"] + __slots__ = ("platform", "script", "init_script", "env_script", "err_script", "exit_script", "pre_script", "post_script", "work_sub_dir", "execution_polling_intervals", "execution_retry_delays", "execution_time_limit", "submission_polling_intervals", "submission_retry_delays", "directives", "environment", "outputs", "completion", "run_mode") PLATFORM_FIELD_NUMBER: _ClassVar[int] SCRIPT_FIELD_NUMBER: _ClassVar[int] INIT_SCRIPT_FIELD_NUMBER: _ClassVar[int] @@ -151,6 +151,7 @@ class PbRuntime(_message.Message): ENVIRONMENT_FIELD_NUMBER: _ClassVar[int] OUTPUTS_FIELD_NUMBER: _ClassVar[int] COMPLETION_FIELD_NUMBER: _ClassVar[int] + RUN_MODE_FIELD_NUMBER: _ClassVar[int] platform: str script: str init_script: str @@ -169,10 +170,11 @@ class PbRuntime(_message.Message): environment: str outputs: str completion: str - def __init__(self, platform: _Optional[str] = ..., script: _Optional[str] = ..., init_script: _Optional[str] = ..., env_script: _Optional[str] = ..., err_script: _Optional[str] = ..., exit_script: _Optional[str] = ..., pre_script: _Optional[str] = ..., post_script: _Optional[str] = ..., work_sub_dir: _Optional[str] = ..., execution_polling_intervals: _Optional[str] = ..., execution_retry_delays: _Optional[str] = ..., execution_time_limit: _Optional[str] = ..., submission_polling_intervals: _Optional[str] = ..., submission_retry_delays: _Optional[str] = ..., directives: _Optional[str] = ..., environment: _Optional[str] = ..., outputs: _Optional[str] = ..., completion: _Optional[str] = ...) -> None: ... + run_mode: str + def __init__(self, platform: _Optional[str] = ..., script: _Optional[str] = ..., init_script: _Optional[str] = ..., env_script: _Optional[str] = ..., err_script: _Optional[str] = ..., exit_script: _Optional[str] = ..., pre_script: _Optional[str] = ..., post_script: _Optional[str] = ..., work_sub_dir: _Optional[str] = ..., execution_polling_intervals: _Optional[str] = ..., execution_retry_delays: _Optional[str] = ..., execution_time_limit: _Optional[str] = ..., submission_polling_intervals: _Optional[str] = ..., submission_retry_delays: _Optional[str] = ..., directives: _Optional[str] = ..., environment: _Optional[str] = ..., outputs: _Optional[str] = ..., completion: _Optional[str] = ..., run_mode: _Optional[str] = ...) -> None: ... class PbJob(_message.Message): - __slots__ = ["stamp", "id", "submit_num", "state", "task_proxy", "submitted_time", "started_time", "finished_time", "job_id", "job_runner_name", "execution_time_limit", "platform", "job_log_dir", "name", "cycle_point", "messages", "runtime"] + __slots__ = ("stamp", "id", "submit_num", "state", "task_proxy", "submitted_time", "started_time", "finished_time", "job_id", "job_runner_name", "execution_time_limit", "platform", "job_log_dir", "name", "cycle_point", "messages", "runtime") STAMP_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] SUBMIT_NUM_FIELD_NUMBER: _ClassVar[int] @@ -210,7 +212,7 @@ class PbJob(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., submit_num: _Optional[int] = ..., state: _Optional[str] = ..., task_proxy: _Optional[str] = ..., submitted_time: _Optional[str] = ..., started_time: _Optional[str] = ..., finished_time: _Optional[str] = ..., job_id: _Optional[str] = ..., job_runner_name: _Optional[str] = ..., execution_time_limit: _Optional[float] = ..., platform: _Optional[str] = ..., job_log_dir: _Optional[str] = ..., name: _Optional[str] = ..., cycle_point: _Optional[str] = ..., messages: _Optional[_Iterable[str]] = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ...) -> None: ... class PbTask(_message.Message): - __slots__ = ["stamp", "id", "name", "meta", "mean_elapsed_time", "depth", "proxies", "namespace", "parents", "first_parent", "runtime"] + __slots__ = ("stamp", "id", "name", "meta", "mean_elapsed_time", "depth", "proxies", "namespace", "parents", "first_parent", "runtime") STAMP_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] @@ -236,7 +238,7 @@ class PbTask(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., name: _Optional[str] = ..., meta: _Optional[_Union[PbMeta, _Mapping]] = ..., mean_elapsed_time: _Optional[float] = ..., depth: _Optional[int] = ..., proxies: _Optional[_Iterable[str]] = ..., namespace: _Optional[_Iterable[str]] = ..., parents: _Optional[_Iterable[str]] = ..., first_parent: _Optional[str] = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ...) -> None: ... class PbPollTask(_message.Message): - __slots__ = ["local_proxy", "workflow", "remote_proxy", "req_state", "graph_string"] + __slots__ = ("local_proxy", "workflow", "remote_proxy", "req_state", "graph_string") LOCAL_PROXY_FIELD_NUMBER: _ClassVar[int] WORKFLOW_FIELD_NUMBER: _ClassVar[int] REMOTE_PROXY_FIELD_NUMBER: _ClassVar[int] @@ -250,7 +252,7 @@ class PbPollTask(_message.Message): def __init__(self, local_proxy: _Optional[str] = ..., workflow: _Optional[str] = ..., remote_proxy: _Optional[str] = ..., req_state: _Optional[str] = ..., graph_string: _Optional[str] = ...) -> None: ... class PbCondition(_message.Message): - __slots__ = ["task_proxy", "expr_alias", "req_state", "satisfied", "message"] + __slots__ = ("task_proxy", "expr_alias", "req_state", "satisfied", "message") TASK_PROXY_FIELD_NUMBER: _ClassVar[int] EXPR_ALIAS_FIELD_NUMBER: _ClassVar[int] REQ_STATE_FIELD_NUMBER: _ClassVar[int] @@ -264,7 +266,7 @@ class PbCondition(_message.Message): def __init__(self, task_proxy: _Optional[str] = ..., expr_alias: _Optional[str] = ..., req_state: _Optional[str] = ..., satisfied: bool = ..., message: _Optional[str] = ...) -> None: ... class PbPrerequisite(_message.Message): - __slots__ = ["expression", "conditions", "cycle_points", "satisfied"] + __slots__ = ("expression", "conditions", "cycle_points", "satisfied") EXPRESSION_FIELD_NUMBER: _ClassVar[int] CONDITIONS_FIELD_NUMBER: _ClassVar[int] CYCLE_POINTS_FIELD_NUMBER: _ClassVar[int] @@ -276,7 +278,7 @@ class PbPrerequisite(_message.Message): def __init__(self, expression: _Optional[str] = ..., conditions: _Optional[_Iterable[_Union[PbCondition, _Mapping]]] = ..., cycle_points: _Optional[_Iterable[str]] = ..., satisfied: bool = ...) -> None: ... class PbOutput(_message.Message): - __slots__ = ["label", "message", "satisfied", "time"] + __slots__ = ("label", "message", "satisfied", "time") LABEL_FIELD_NUMBER: _ClassVar[int] MESSAGE_FIELD_NUMBER: _ClassVar[int] SATISFIED_FIELD_NUMBER: _ClassVar[int] @@ -288,7 +290,7 @@ class PbOutput(_message.Message): def __init__(self, label: _Optional[str] = ..., message: _Optional[str] = ..., satisfied: bool = ..., time: _Optional[float] = ...) -> None: ... class PbTrigger(_message.Message): - __slots__ = ["id", "label", "message", "satisfied", "time"] + __slots__ = ("id", "label", "message", "satisfied", "time") ID_FIELD_NUMBER: _ClassVar[int] LABEL_FIELD_NUMBER: _ClassVar[int] MESSAGE_FIELD_NUMBER: _ClassVar[int] @@ -302,23 +304,23 @@ class PbTrigger(_message.Message): def __init__(self, id: _Optional[str] = ..., label: _Optional[str] = ..., message: _Optional[str] = ..., satisfied: bool = ..., time: _Optional[float] = ...) -> None: ... class PbTaskProxy(_message.Message): - __slots__ = ["stamp", "id", "task", "state", "cycle_point", "depth", "job_submits", "outputs", "namespace", "prerequisites", "jobs", "first_parent", "name", "is_held", "edges", "ancestors", "flow_nums", "external_triggers", "xtriggers", "is_queued", "is_runahead", "flow_wait", "runtime", "graph_depth"] + __slots__ = ("stamp", "id", "task", "state", "cycle_point", "depth", "job_submits", "outputs", "namespace", "prerequisites", "jobs", "first_parent", "name", "is_held", "edges", "ancestors", "flow_nums", "external_triggers", "xtriggers", "is_queued", "is_runahead", "flow_wait", "runtime", "graph_depth") class OutputsEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str value: PbOutput def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[PbOutput, _Mapping]] = ...) -> None: ... class ExternalTriggersEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str value: PbTrigger def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[PbTrigger, _Mapping]] = ...) -> None: ... class XtriggersEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str @@ -375,7 +377,7 @@ class PbTaskProxy(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., task: _Optional[str] = ..., state: _Optional[str] = ..., cycle_point: _Optional[str] = ..., depth: _Optional[int] = ..., job_submits: _Optional[int] = ..., outputs: _Optional[_Mapping[str, PbOutput]] = ..., namespace: _Optional[_Iterable[str]] = ..., prerequisites: _Optional[_Iterable[_Union[PbPrerequisite, _Mapping]]] = ..., jobs: _Optional[_Iterable[str]] = ..., first_parent: _Optional[str] = ..., name: _Optional[str] = ..., is_held: bool = ..., edges: _Optional[_Iterable[str]] = ..., ancestors: _Optional[_Iterable[str]] = ..., flow_nums: _Optional[str] = ..., external_triggers: _Optional[_Mapping[str, PbTrigger]] = ..., xtriggers: _Optional[_Mapping[str, PbTrigger]] = ..., is_queued: bool = ..., is_runahead: bool = ..., flow_wait: bool = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ..., graph_depth: _Optional[int] = ...) -> None: ... class PbFamily(_message.Message): - __slots__ = ["stamp", "id", "name", "meta", "depth", "proxies", "parents", "child_tasks", "child_families", "first_parent", "runtime"] + __slots__ = ("stamp", "id", "name", "meta", "depth", "proxies", "parents", "child_tasks", "child_families", "first_parent", "runtime") STAMP_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] @@ -401,9 +403,9 @@ class PbFamily(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., name: _Optional[str] = ..., meta: _Optional[_Union[PbMeta, _Mapping]] = ..., depth: _Optional[int] = ..., proxies: _Optional[_Iterable[str]] = ..., parents: _Optional[_Iterable[str]] = ..., child_tasks: _Optional[_Iterable[str]] = ..., child_families: _Optional[_Iterable[str]] = ..., first_parent: _Optional[str] = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ...) -> None: ... class PbFamilyProxy(_message.Message): - __slots__ = ["stamp", "id", "cycle_point", "name", "family", "state", "depth", "first_parent", "child_tasks", "child_families", "is_held", "ancestors", "states", "state_totals", "is_held_total", "is_queued", "is_queued_total", "is_runahead", "is_runahead_total", "runtime", "graph_depth"] + __slots__ = ("stamp", "id", "cycle_point", "name", "family", "state", "depth", "first_parent", "child_tasks", "child_families", "is_held", "ancestors", "states", "state_totals", "is_held_total", "is_queued", "is_queued_total", "is_runahead", "is_runahead_total", "runtime", "graph_depth") class StateTotalsEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str @@ -454,7 +456,7 @@ class PbFamilyProxy(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., cycle_point: _Optional[str] = ..., name: _Optional[str] = ..., family: _Optional[str] = ..., state: _Optional[str] = ..., depth: _Optional[int] = ..., first_parent: _Optional[str] = ..., child_tasks: _Optional[_Iterable[str]] = ..., child_families: _Optional[_Iterable[str]] = ..., is_held: bool = ..., ancestors: _Optional[_Iterable[str]] = ..., states: _Optional[_Iterable[str]] = ..., state_totals: _Optional[_Mapping[str, int]] = ..., is_held_total: _Optional[int] = ..., is_queued: bool = ..., is_queued_total: _Optional[int] = ..., is_runahead: bool = ..., is_runahead_total: _Optional[int] = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ..., graph_depth: _Optional[int] = ...) -> None: ... class PbEdge(_message.Message): - __slots__ = ["stamp", "id", "source", "target", "suicide", "cond"] + __slots__ = ("stamp", "id", "source", "target", "suicide", "cond") STAMP_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] SOURCE_FIELD_NUMBER: _ClassVar[int] @@ -470,7 +472,7 @@ class PbEdge(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., source: _Optional[str] = ..., target: _Optional[str] = ..., suicide: bool = ..., cond: bool = ...) -> None: ... class PbEdges(_message.Message): - __slots__ = ["id", "edges", "workflow_polling_tasks", "leaves", "feet"] + __slots__ = ("id", "edges", "workflow_polling_tasks", "leaves", "feet") ID_FIELD_NUMBER: _ClassVar[int] EDGES_FIELD_NUMBER: _ClassVar[int] WORKFLOW_POLLING_TASKS_FIELD_NUMBER: _ClassVar[int] @@ -484,7 +486,7 @@ class PbEdges(_message.Message): def __init__(self, id: _Optional[str] = ..., edges: _Optional[_Iterable[str]] = ..., workflow_polling_tasks: _Optional[_Iterable[_Union[PbPollTask, _Mapping]]] = ..., leaves: _Optional[_Iterable[str]] = ..., feet: _Optional[_Iterable[str]] = ...) -> None: ... class PbEntireWorkflow(_message.Message): - __slots__ = ["workflow", "tasks", "task_proxies", "jobs", "families", "family_proxies", "edges"] + __slots__ = ("workflow", "tasks", "task_proxies", "jobs", "families", "family_proxies", "edges") WORKFLOW_FIELD_NUMBER: _ClassVar[int] TASKS_FIELD_NUMBER: _ClassVar[int] TASK_PROXIES_FIELD_NUMBER: _ClassVar[int] @@ -502,7 +504,7 @@ class PbEntireWorkflow(_message.Message): def __init__(self, workflow: _Optional[_Union[PbWorkflow, _Mapping]] = ..., tasks: _Optional[_Iterable[_Union[PbTask, _Mapping]]] = ..., task_proxies: _Optional[_Iterable[_Union[PbTaskProxy, _Mapping]]] = ..., jobs: _Optional[_Iterable[_Union[PbJob, _Mapping]]] = ..., families: _Optional[_Iterable[_Union[PbFamily, _Mapping]]] = ..., family_proxies: _Optional[_Iterable[_Union[PbFamilyProxy, _Mapping]]] = ..., edges: _Optional[_Iterable[_Union[PbEdge, _Mapping]]] = ...) -> None: ... class EDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -518,7 +520,7 @@ class EDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbEdge, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbEdge, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class FDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -534,7 +536,7 @@ class FDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbFamily, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbFamily, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class FPDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -550,7 +552,7 @@ class FPDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbFamilyProxy, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbFamilyProxy, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class JDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -566,7 +568,7 @@ class JDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbJob, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbJob, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class TDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -582,7 +584,7 @@ class TDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbTask, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbTask, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class TPDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -598,7 +600,7 @@ class TPDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbTaskProxy, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbTaskProxy, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class WDeltas(_message.Message): - __slots__ = ["time", "added", "updated", "reloaded", "pruned"] + __slots__ = ("time", "added", "updated", "reloaded", "pruned") TIME_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] UPDATED_FIELD_NUMBER: _ClassVar[int] @@ -612,7 +614,7 @@ class WDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., added: _Optional[_Union[PbWorkflow, _Mapping]] = ..., updated: _Optional[_Union[PbWorkflow, _Mapping]] = ..., reloaded: bool = ..., pruned: _Optional[str] = ...) -> None: ... class AllDeltas(_message.Message): - __slots__ = ["families", "family_proxies", "jobs", "tasks", "task_proxies", "edges", "workflow"] + __slots__ = ("families", "family_proxies", "jobs", "tasks", "task_proxies", "edges", "workflow") FAMILIES_FIELD_NUMBER: _ClassVar[int] FAMILY_PROXIES_FIELD_NUMBER: _ClassVar[int] JOBS_FIELD_NUMBER: _ClassVar[int] diff --git a/cylc/flow/data_store_mgr.py b/cylc/flow/data_store_mgr.py index b98a055f882..592022fc688 100644 --- a/cylc/flow/data_store_mgr.py +++ b/cylc/flow/data_store_mgr.py @@ -258,6 +258,7 @@ def runtime_from_config(rtconfig): pre_script=rtconfig['pre-script'], post_script=rtconfig['post-script'], work_sub_dir=rtconfig['work sub-directory'], + run_mode=rtconfig['run mode'], execution_time_limit=str(rtconfig['execution time limit'] or ''), execution_polling_intervals=listjoin( rtconfig['execution polling intervals'] @@ -1227,6 +1228,7 @@ def generate_ghost_task( graph_depth=n_depth, name=name, ) + self.all_n_window_nodes.add(tp_id) self.n_window_depths.setdefault(n_depth, set()).add(tp_id) diff --git a/cylc/flow/etc/examples/extending-workflow/.validate b/cylc/flow/etc/examples/extending-workflow/.validate index 43c810372ce..bb89574deca 100755 --- a/cylc/flow/etc/examples/extending-workflow/.validate +++ b/cylc/flow/etc/examples/extending-workflow/.validate @@ -20,15 +20,15 @@ set -eux test_simple () { local ID ID="$(< /dev/urandom tr -dc A-Za-z | head -c6)" - + # lint cylc lint ./simple - + # copy into a temp directory local SRC_DIR SRC_DIR="$(mktemp -d)" cp simple/flow.cylc "$SRC_DIR" - + # speed things up with simulation mode cat >>"${SRC_DIR}/flow.cylc" <<__HERE__ [runtime] @@ -36,7 +36,7 @@ test_simple () { [[[simulation]]] default run length = PT0S __HERE__ - + # start the workflow cylc vip \ --check-circular \ @@ -45,34 +45,34 @@ __HERE__ --workflow-name "$ID" \ --mode=simulation \ "$SRC_DIR" - + # it should have reached the 2002 cycle grep '2002/a' "${HOME}/cylc-run/${ID}/log/scheduler/log" if grep '2003/a' "${HOME}/cylc-run/${ID}/log/scheduler/log"; then exit 1 fi - + # edit the "stop after cycle point" sed -i \ 's/stop after cycle point.*/stop after cycle point = 2004/' \ "${SRC_DIR}/flow.cylc" - + # continue the run cylc vr \ --no-detach \ --mode=simulation \ --yes \ "$ID" - + # it should have reached the 2004 cycle grep '2004/a' "${HOME}/cylc-run/${ID}/log/scheduler/log" if grep '2005/a' "${HOME}/cylc-run/${ID}/log/scheduler/log"; then exit 1 fi - + # clean up cylc clean "$ID" - + rm -r "${SRC_DIR}" } diff --git a/cylc/flow/network/schema.py b/cylc/flow/network/schema.py index b962b582b70..6009eeb1c85 100644 --- a/cylc/flow/network/schema.py +++ b/cylc/flow/network/schema.py @@ -51,6 +51,7 @@ from cylc.flow.id import Tokens from cylc.flow.task_outputs import SORT_ORDERS from cylc.flow.task_state import ( + RunMode, TASK_STATUSES_ORDERED, TASK_STATUS_DESC, TASK_STATUS_WAITING, @@ -66,6 +67,7 @@ from cylc.flow.workflow_status import StopMode if TYPE_CHECKING: + from enum import Enum from graphql import ResolveInfo from graphql.type.definition import ( GraphQLNamedType, @@ -595,6 +597,46 @@ class Meta: string_extended = String() +def describe_run_mode(run_mode: Optional['Enum']) -> str: + """Returns description for a workflow/task run mode.""" + if not run_mode: + return "" + mode = run_mode.value + if mode == RunMode.WORKFLOW: + return "Default to the workflow's run mode." + if mode == RunMode.LIVE: + return "Tasks will run normally." + if mode == RunMode.SIMULATION: + return ( + "Simulates job submission with configurable execution time " + "(does not submit real jobs)." + ) + if mode == RunMode.DUMMY: + return "Submits real jobs with empty scripts." + if mode == RunMode.SKIP: + return ( + "Skips job submission; sets required outputs (by default) or " + "configured outputs." + ) + return "" + + +WorkflowRunMode = graphene.Enum( + 'WorkflowRunMode', + [(m.capitalize(), m) for m in RunMode.WORKFLOW_MODES], + description=describe_run_mode, +) +"""The run mode for the workflow.""" + + +TaskRunMode = graphene.Enum( + 'TaskRunMode', + [(m.capitalize(), m) for m in (RunMode.WORKFLOW, *RunMode.WORKFLOW_MODES)], + description=describe_run_mode, +) +"""The run mode for tasks.""" + + class Workflow(ObjectType): class Meta: description = """Global workflow info.""" @@ -821,6 +863,7 @@ class Meta: directives = graphene.List(RuntimeSetting, resolver=resolve_json_dump) environment = graphene.List(RuntimeSetting, resolver=resolve_json_dump) outputs = graphene.List(RuntimeSetting, resolver=resolve_json_dump) + run_mode = TaskRunMode(default_value=TaskRunMode.Workflow.name) RUNTIME_FIELD_TO_CFG_MAP = { @@ -1499,9 +1542,9 @@ class RuntimeConfiguration(String): class BroadcastMode(graphene.Enum): - Set = 'put_broadcast' - Clear = 'clear_broadcast' - Expire = 'expire_broadcast' + Set = cast('Enum', 'put_broadcast') + Clear = cast('Enum', 'clear_broadcast') + Expire = cast('Enum', 'expire_broadcast') @property def description(self): @@ -1626,10 +1669,10 @@ class WorkflowStopMode(graphene.Enum): # * Graphene requires special enums. # * We only want to offer a subset of stop modes (REQUEST_* only). - Clean = StopMode.REQUEST_CLEAN.value # type: graphene.Enum - Kill = StopMode.REQUEST_KILL.value # type: graphene.Enum - Now = StopMode.REQUEST_NOW.value # type: graphene.Enum - NowNow = StopMode.REQUEST_NOW_NOW.value # type: graphene.Enum + Clean = cast('Enum', StopMode.REQUEST_CLEAN.value) + Kill = cast('Enum', StopMode.REQUEST_KILL.value) + Now = cast('Enum', StopMode.REQUEST_NOW.value) + NowNow = cast('Enum', StopMode.REQUEST_NOW_NOW.value) @property def description(self): @@ -1686,7 +1729,7 @@ class Arguments: mode = BroadcastMode( # use the enum name as the default value # https://github.com/graphql-python/graphql-core-legacy/issues/166 - default_value=BroadcastMode.Set.name, # type: ignore + default_value=BroadcastMode.Set.name, description='What type of broadcast is this?', required=True ) diff --git a/cylc/flow/platforms.py b/cylc/flow/platforms.py index d06c84ade92..fb3f2b3f22d 100644 --- a/cylc/flow/platforms.py +++ b/cylc/flow/platforms.py @@ -31,6 +31,7 @@ PlatformLookupError, CylcError, NoHostsError, NoPlatformsError) from cylc.flow.cfgspec.glbl_cfg import glbl_cfg from cylc.flow.hostuserutil import is_remote_host +from cylc.flow.task_state import RunMode if TYPE_CHECKING: from cylc.flow.parsec.OrderedDict import OrderedDictWithDefaults @@ -265,6 +266,12 @@ def platform_from_name( platform_data['name'] = platform_name return platform_data + # If platform name in run mode and not otherwise defined: + if platform_name in RunMode.WORKFLOW_MODES: + platform_data = deepcopy(platforms['localhost']) + platform_data['name'] = platform_name + return platform_data + raise PlatformLookupError( f"No matching platform \"{platform_name}\" found") diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index 449a5009656..b2952087502 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -57,6 +57,7 @@ class Prerequisite: MESSAGE_TEMPLATE = r'%s/%s %s' DEP_STATE_SATISFIED = 'satisfied naturally' + DEP_STATE_ARTIFICIAL = 'Artificially satisfied' DEP_STATE_OVERRIDDEN = 'force satisfied' DEP_STATE_UNSATISFIED = False @@ -199,20 +200,26 @@ def _conditional_is_satisfied(self): ) from None return res - def satisfy_me(self, outputs: Iterable['Tokens']) -> 'Set[Tokens]': + def satisfy_me( + self, outputs: Iterable['Tokens'], mode: str = 'live' + ) -> 'Set[Tokens]': """Attempt to satisfy me with given outputs. Updates cache with the result. Return outputs that match. """ + if mode != 'live': + satisfied_message = self.DEP_STATE_ARTIFICIAL + f' by {mode} mode' + else: + satisfied_message = self.DEP_STATE_SATISFIED valid = set() for output in outputs: prereq = (output['cycle'], output['task'], output['task_sel']) if prereq not in self.satisfied: continue valid.add(output) - self.satisfied[prereq] = self.DEP_STATE_SATISFIED + self.satisfied[prereq] = satisfied_message if self.conditional_expression is None: self._all_satisfied = all(self.satisfied.values()) else: @@ -293,6 +300,14 @@ def get_resolved_dependencies(self): E.G: ['1/foo', '2/bar'] """ - return [f'{point}/{name}' for - (point, name, _), satisfied in self.satisfied.items() if - satisfied == self.DEP_STATE_SATISFIED] + return [ + f'{point}/{name}' for + (point, name, _), satisfied in self.satisfied.items() + if ( + satisfied == self.DEP_STATE_SATISFIED + or ( + isinstance(satisfied, str) + and satisfied.startswith(self.DEP_STATE_ARTIFICIAL) + ) + ) + ] diff --git a/cylc/flow/run_modes/dummy.py b/cylc/flow/run_modes/dummy.py new file mode 100644 index 00000000000..56d99b2c626 --- /dev/null +++ b/cylc/flow/run_modes/dummy.py @@ -0,0 +1,121 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Utilities supporting dummy mode. +""" + +from logging import INFO +from typing import TYPE_CHECKING, Any, Dict, Tuple + +from cylc.flow.task_outputs import TASK_OUTPUT_SUBMITTED +from cylc.flow.run_modes.simulation import ( + ModeSettings, + disable_platforms, + get_simulated_run_len, + parse_fail_cycle_points +) +from cylc.flow.task_state import RunMode +from cylc.flow.platforms import get_platform + + +if TYPE_CHECKING: + from cylc.flow.task_job_mgr import TaskJobManager + from cylc.flow.task_proxy import TaskProxy + from typing_extensions import Literal + + +def submit_task_job( + task_job_mgr: 'TaskJobManager', + itask: 'TaskProxy', + rtconfig: Dict[str, Any], + workflow: str, + now: Tuple[float, str] +) -> 'Literal[False]': + """Submit a task in dummy mode. + + Returns: + False - indicating that TaskJobManager needs to continue running the + live mode path. + """ + configure_dummy_mode( + rtconfig, itask.tdef.rtconfig['simulation']['fail cycle points']) + + itask.summary['started_time'] = now[0] + task_job_mgr._set_retry_timers(itask, rtconfig) + itask.mode_settings = ModeSettings( + itask, + task_job_mgr.workflow_db_mgr, + rtconfig + ) + + itask.waiting_on_job_prep = False + itask.submit_num += 1 + + itask.platform = get_platform() + itask.platform['name'] = RunMode.DUMMY + itask.summary['job_runner_name'] = RunMode.DUMMY + itask.summary[task_job_mgr.KEY_EXECUTE_TIME_LIMIT] = ( + itask.mode_settings.simulated_run_length) + itask.jobs.append( + task_job_mgr.get_simulation_job_conf(itask, workflow)) + task_job_mgr.task_events_mgr.process_message( + itask, INFO, TASK_OUTPUT_SUBMITTED) + task_job_mgr.workflow_db_mgr.put_insert_task_jobs( + itask, { + 'time_submit': now[1], + 'try_num': itask.get_try_num(), + } + ) + return False + + +def configure_dummy_mode(rtc, fallback): + """Adjust task defs for simulation and dummy mode. + + """ + rtc['submission retry delays'] = [1] + # Generate dummy scripting. + rtc['init-script'] = "" + rtc['env-script'] = "" + rtc['pre-script'] = "" + rtc['post-script'] = "" + rtc['script'] = build_dummy_script( + rtc, get_simulated_run_len(rtc)) + disable_platforms(rtc) + # Disable environment, in case it depends on env-script. + rtc['environment'] = {} + rtc["simulation"][ + "fail cycle points" + ] = parse_fail_cycle_points( + rtc["simulation"]["fail cycle points"], fallback + ) + + +def build_dummy_script(rtc: Dict[str, Any], sleep_sec: int) -> str: + """Create fake scripting for dummy mode. + + This is for Dummy mode only. + """ + script = "sleep %d" % sleep_sec + # Dummy message outputs. + for msg in rtc['outputs'].values(): + script += "\ncylc message '%s'" % msg + if rtc['simulation']['fail try 1 only']: + arg1 = "true" + else: + arg1 = "false" + arg2 = " ".join(rtc['simulation']['fail cycle points']) + script += "\ncylc__job__dummy_result %s %s || exit 1" % (arg1, arg2) + return script diff --git a/cylc/flow/run_modes/nonlive.py b/cylc/flow/run_modes/nonlive.py new file mode 100644 index 00000000000..af9567527d6 --- /dev/null +++ b/cylc/flow/run_modes/nonlive.py @@ -0,0 +1,61 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Utilities supporting all nonlive modes +""" +from typing import TYPE_CHECKING, Dict, List + +from cylc.flow import LOG +from cylc.flow.run_modes.skip import check_task_skip_config +from cylc.flow.task_state import RunMode + +if TYPE_CHECKING: + from cylc.flow.taskdef import TaskDef + + +def mode_validate_checks(taskdefs: 'Dict[str, TaskDef]'): + """Warn user if any tasks have "run mode" set to simulation or + dummy in flow.cylc. + + Setting run mode to simulation/dummy in the config should only + be done during development of a workflow. + + Additionally, run specific checks for each mode's config settings. + """ + warn_nonlive: Dict[str, List[str]] = { + RunMode.SIMULATION: [], + RunMode.DUMMY: [], + } + + # Run through taskdefs looking for those with nonlive modes + for taskdef in taskdefs.values(): + # Add to list of tasks to be run in non-live modes: + if ( + taskdef.rtconfig.get('run mode', None) + in {RunMode.SIMULATION, RunMode.DUMMY} + ): + warn_nonlive[taskdef.rtconfig['run mode']].append(taskdef.name) + + # Run any mode specific validation checks: + check_task_skip_config(taskdef) + + if any(warn_nonlive.values()): + message = 'The following tasks are set to run in non-live mode:' + for mode, tasknames in warn_nonlive.items(): + if tasknames: + message += f'\n{mode} mode:' + for taskname in tasknames: + message += f'\n * {taskname}' + LOG.warning(message) diff --git a/cylc/flow/simulation.py b/cylc/flow/run_modes/simulation.py similarity index 64% rename from cylc/flow/simulation.py rename to cylc/flow/run_modes/simulation.py index 8ec4d279cb9..5280e512d81 100644 --- a/cylc/flow/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -13,33 +13,90 @@ # # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Utilities supporting simulation and skip modes +"""Utilities supporting simulation mode """ from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from logging import INFO +from typing import ( + TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union) from time import time from metomi.isodatetime.parsers import DurationParser from cylc.flow import LOG +from cylc.flow.cycling import PointBase from cylc.flow.cycling.loader import get_point from cylc.flow.exceptions import PointParsingError -from cylc.flow.platforms import FORBIDDEN_WITH_PLATFORM +from cylc.flow.platforms import FORBIDDEN_WITH_PLATFORM, get_platform +from cylc.flow.task_outputs import TASK_OUTPUT_SUBMITTED from cylc.flow.task_state import ( TASK_STATUS_RUNNING, TASK_STATUS_FAILED, TASK_STATUS_SUCCEEDED, ) from cylc.flow.wallclock import get_unix_time_from_time_string -from cylc.flow.workflow_status import RunMode +from cylc.flow.task_state import RunMode if TYPE_CHECKING: from cylc.flow.task_events_mgr import TaskEventsManager + from cylc.flow.task_job_mgr import TaskJobManager from cylc.flow.task_proxy import TaskProxy from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager - from cylc.flow.cycling import PointBase + from typing_extensions import Literal + + +def submit_task_job( + task_job_mgr: 'TaskJobManager', + itask: 'TaskProxy', + rtconfig: Dict[str, Any], + workflow: str, + now: Tuple[float, str] +) -> 'Literal[True]': + """Submit a task in simulation mode. + + Returns: + True - indicating that TaskJobManager need take no further action. + """ + configure_sim_mode( + rtconfig, + itask.tdef.rtconfig['simulation']['fail cycle points']) + itask.summary['started_time'] = now[0] + task_job_mgr._set_retry_timers(itask, rtconfig) + itask.mode_settings = ModeSettings( + itask, + task_job_mgr.workflow_db_mgr, + rtconfig + ) + itask.waiting_on_job_prep = False + itask.submit_num += 1 + + itask.platform = get_platform() + itask.platform['name'] = RunMode.SIMULATION + itask.summary['job_runner_name'] = RunMode.SIMULATION + itask.summary[task_job_mgr.KEY_EXECUTE_TIME_LIMIT] = ( + itask.mode_settings.simulated_run_length + ) + itask.jobs.append( + task_job_mgr.get_simulation_job_conf(itask, workflow) + ) + task_job_mgr.task_events_mgr.process_message( + itask, INFO, TASK_OUTPUT_SUBMITTED, + ) + task_job_mgr.workflow_db_mgr.put_insert_task_jobs( + itask, { + 'time_submit': now[1], + 'try_num': itask.get_try_num(), + 'flow_nums': str(list(itask.flow_nums)), + 'is_manual_submit': itask.is_manual_submit, + 'job_runner_name': RunMode.SIMULATION, + 'platform_name': RunMode.SIMULATION, + 'submit_status': 0 # Submission has succeeded + } + ) + itask.state.status = TASK_STATUS_RUNNING + return True @dataclass @@ -79,7 +136,6 @@ def __init__( db_mgr: 'WorkflowDatabaseManager', rtconfig: Dict[str, Any] ): - # itask.summary['started_time'] and mode_settings.timeout need # repopulating from the DB on workflow restart: started_time = itask.summary['started_time'] @@ -104,22 +160,15 @@ def __init__( try_num = db_info["try_num"] # Parse fail cycle points: - if rtconfig != itask.tdef.rtconfig: - try: - rtconfig["simulation"][ - "fail cycle points" - ] = parse_fail_cycle_points( - rtconfig["simulation"]["fail cycle points"] - ) - except PointParsingError as exc: - # Broadcast Fail CP didn't parse - LOG.warning( - 'Broadcast fail cycle point was invalid:\n' - f' {exc.args[0]}' - ) - rtconfig['simulation'][ - 'fail cycle points' - ] = itask.tdef.rtconfig['simulation']['fail cycle points'] + if not rtconfig: + rtconfig = itask.tdef.rtconfig + if rtconfig and rtconfig != itask.tdef.rtconfig: + rtconfig["simulation"][ + "fail cycle points" + ] = parse_fail_cycle_points( + rtconfig["simulation"]["fail cycle points"], + itask.tdef.rtconfig['simulation']['fail cycle points'] + ) # Calculate simulation info: self.simulated_run_length = ( @@ -132,37 +181,39 @@ def __init__( self.timeout = started_time + self.simulated_run_length -def configure_sim_modes(taskdefs, sim_mode): +def configure_sim_mode(rtc, fallback): """Adjust task defs for simulation and dummy mode. + Example: + >>> this = configure_sim_mode + >>> rtc = { + ... 'submission retry delays': [42, 24, 23], + ... 'environment': {'DoNot': '"WantThis"'}, + ... 'simulation': {'fail cycle points': ['all']} + ... } + >>> this(rtc, [53]) + >>> rtc['submission retry delays'] + [1] + >>> rtc['environment'] + {} + >>> rtc['simulation'] + {'fail cycle points': None} + >>> rtc['platform'] + 'localhost' """ - dummy_mode = (sim_mode == RunMode.DUMMY) - - for tdef in taskdefs: - # Compute simulated run time by scaling the execution limit. - rtc = tdef.rtconfig - - rtc['submission retry delays'] = [1] + rtc['submission retry delays'] = [1] - if dummy_mode: - # Generate dummy scripting. - rtc['init-script'] = "" - rtc['env-script'] = "" - rtc['pre-script'] = "" - rtc['post-script'] = "" - rtc['script'] = build_dummy_script( - rtc, get_simulated_run_len(rtc)) + disable_platforms(rtc) - disable_platforms(rtc) + # Disable environment, in case it depends on env-script. + rtc['environment'] = {} - # Disable environment, in case it depends on env-script. - rtc['environment'] = {} - - rtc["simulation"][ - "fail cycle points" - ] = parse_fail_cycle_points( - rtc["simulation"]["fail cycle points"] - ) + rtc["simulation"][ + "fail cycle points" + ] = parse_fail_cycle_points( + rtc["simulation"]["fail cycle points"], + fallback + ) def get_simulated_run_len(rtc: Dict[str, Any]) -> int: @@ -184,24 +235,6 @@ def get_simulated_run_len(rtc: Dict[str, Any]) -> int: return sleep_sec -def build_dummy_script(rtc: Dict[str, Any], sleep_sec: int) -> str: - """Create fake scripting for dummy mode. - - This is for Dummy mode only. - """ - script = "sleep %d" % sleep_sec - # Dummy message outputs. - for msg in rtc['outputs'].values(): - script += "\ncylc message '%s'" % msg - if rtc['simulation']['fail try 1 only']: - arg1 = "true" - else: - arg1 = "false" - arg2 = " ".join(rtc['simulation']['fail cycle points']) - script += "\ncylc__job__dummy_result %s %s || exit 1" % (arg1, arg2) - return script - - def disable_platforms( rtc: Dict[str, Any] ) -> None: @@ -222,7 +255,7 @@ def disable_platforms( def parse_fail_cycle_points( - f_pts_orig: List[str] + f_pts_orig: List[str], fallback ) -> 'Union[None, List[PointBase]]': """Parse `[simulation][fail cycle points]`. @@ -231,11 +264,11 @@ def parse_fail_cycle_points( Examples: >>> this = parse_fail_cycle_points - >>> this(['all']) is None + >>> this(['all'], ['42']) is None True - >>> this([]) + >>> this([], ['42']) [] - >>> this(None) is None + >>> this(None, ['42']) is None True """ f_pts: 'Optional[List[PointBase]]' = [] @@ -247,7 +280,16 @@ def parse_fail_cycle_points( elif f_pts_orig: f_pts = [] for point_str in f_pts_orig: - f_pts.append(get_point(point_str).standardise()) + if isinstance(point_str, PointBase): + f_pts.append(point_str) + else: + try: + f_pts.append(get_point(point_str).standardise()) + except PointParsingError: + LOG.warning( + f'Invalid ISO 8601 date representation: {point_str}' + ) + return fallback return f_pts @@ -266,13 +308,19 @@ def sim_time_check( now = time() sim_task_state_changed: bool = False for itask in itasks: - if itask.state.status != TASK_STATUS_RUNNING: + if ( + itask.state.status != TASK_STATUS_RUNNING + or itask.tdef.run_mode != RunMode.SIMULATION + ): continue # This occurs if the workflow has been restarted. if itask.mode_settings is None: rtconfig = task_events_manager.broadcast_mgr.get_updated_rtconfig( itask) + rtconfig = configure_sim_mode( + rtconfig, + itask.tdef.rtconfig['simulation']['fail cycle points']) itask.mode_settings = ModeSettings( itask, db_mgr, diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py new file mode 100644 index 00000000000..8a58a1bf983 --- /dev/null +++ b/cylc/flow/run_modes/skip.py @@ -0,0 +1,166 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Utilities supporting skip modes +""" +from logging import INFO +from typing import ( + TYPE_CHECKING, List, Set, Tuple) + +from cylc.flow import LOG +from cylc.flow.exceptions import WorkflowConfigError +from cylc.flow.platforms import get_platform +from cylc.flow.task_outputs import ( + TASK_OUTPUT_SUBMITTED, + TASK_OUTPUT_SUCCEEDED, + TASK_OUTPUT_FAILED, + TASK_OUTPUT_STARTED +) +from cylc.flow.task_state import RunMode + +if TYPE_CHECKING: + from cylc.flow.taskdef import TaskDef + from cylc.flow.task_job_mgr import TaskJobManager + from cylc.flow.task_proxy import TaskProxy + from typing_extensions import Literal + + +def submit_task_job( + task_job_mgr: 'TaskJobManager', + itask: 'TaskProxy', + now: Tuple[float, str] +) -> 'Literal[True]': + """Submit a task in skip mode. + + Returns: + True - indicating that TaskJobManager need take no further action. + """ + task_job_mgr._set_retry_timers(itask, itask.tdef.rtconfig) + itask.summary['started_time'] = now[0] + itask.waiting_on_job_prep = False + itask.submit_num += 1 + + itask.platform = get_platform() + itask.platform['name'] = RunMode.SKIP + itask.summary['job_runner_name'] = RunMode.SKIP + itask.tdef.run_mode = RunMode.SKIP + task_job_mgr.task_events_mgr.process_message( + itask, INFO, TASK_OUTPUT_SUBMITTED, + ) + task_job_mgr.workflow_db_mgr.put_insert_task_jobs( + itask, { + 'time_submit': now[1], + 'try_num': itask.get_try_num(), + 'flow_nums': str(list(itask.flow_nums)), + 'is_manual_submit': itask.is_manual_submit, + 'job_runner_name': RunMode.SIMULATION, + 'platform_name': RunMode.SIMULATION, + 'submit_status': 0 # Submission has succeeded + } + ) + for output in process_outputs(itask): + task_job_mgr.task_events_mgr.process_message(itask, INFO, output) + + return True + + +def process_outputs(itask: 'TaskProxy') -> List[str]: + """Process Skip Mode Outputs: + + * By default, all required outputs will be generated plus succeeded + if success is optional. + * The outputs submitted and started are always produced and do not + need to be defined in outputs. + * If outputs is specified and does not include either + succeeded or failed then succeeded will be produced. + + Return: + A list of outputs to emit. + """ + result: List[str] = [] + conf_outputs = itask.tdef.rtconfig['skip']['outputs'] + + # Remove started or submitted from our list of outputs: + for out in get_unecessary_outputs(conf_outputs): + conf_outputs.remove(out) + + # Always produce `submitted` output: + result.append(TASK_OUTPUT_SUBMITTED) + # (No need to produce `started` as this is automatically handled by + # task event manager for jobless modes) + + # Send the rest of our outputs, unless they are succeed or failed, + # which we hold back, to prevent warnings about pre-requisites being + # unmet being shown because a "finished" output happens to come first. + for message in itask.state.outputs.iter_required_messages(): + trigger = itask.state.outputs._message_to_trigger[message] + # Send message unless it be succeeded/failed. + if trigger in [TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_FAILED]: + continue + if not conf_outputs or trigger in conf_outputs: + result.append(message) + + # Send succeeded/failed last. + if TASK_OUTPUT_FAILED in conf_outputs: + result.append(TASK_OUTPUT_FAILED) + else: + result.append(TASK_OUTPUT_SUCCEEDED) + + return result + + +def check_task_skip_config(tdef: 'TaskDef') -> None: + """Ensure that skip mode configurations are sane at validation time: + + Args: + tdef: of task + + Logs: + * Warn that outputs need not include started and submitted as these + are always emitted. + + Raises: + * Error if outputs include succeeded and failed. + """ + skip_config = tdef.rtconfig.get('skip', {}) + if not skip_config: + return + skip_outputs = skip_config.get('outputs', {}) + if not skip_outputs: + return + + # Error if outputs include succeded and failed: + if ( + TASK_OUTPUT_SUCCEEDED in skip_outputs + and TASK_OUTPUT_FAILED in skip_outputs + ): + raise WorkflowConfigError( + f'Skip mode settings for task {tdef.name} has' + ' mutually exclusive outputs: succeeded AND failed.') + LOG.info(f'Task {tdef.name} will be run in skip mode.') + + +def get_unecessary_outputs(skip_outputs: List[str]) -> Set[str]: + """Get a list of outputs which we will always run, and don't need + setting config. + + Examples: + >>> this = get_unecessary_outputs + >>> this(['foo', 'started', 'succeeded']) + {'started'} + """ + return {TASK_OUTPUT_SUBMITTED, TASK_OUTPUT_STARTED}.intersection( + skip_outputs + ) diff --git a/cylc/flow/scheduler.py b/cylc/flow/scheduler.py index 92702b0b55e..9f6b0b33dfc 100644 --- a/cylc/flow/scheduler.py +++ b/cylc/flow/scheduler.py @@ -107,8 +107,13 @@ ) from cylc.flow.profiler import Profiler from cylc.flow.resources import get_resources -from cylc.flow.simulation import sim_time_check +from cylc.flow.run_modes.simulation import sim_time_check from cylc.flow.subprocpool import SubProcPool +from cylc.flow.templatevars import eval_var +from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager +from cylc.flow.workflow_events import WorkflowEventHandler +from cylc.flow.workflow_status import StopMode, AutoRestartMode +from cylc.flow.taskdef import TaskDef from cylc.flow.task_events_mgr import TaskEventsManager from cylc.flow.task_job_mgr import TaskJobManager from cylc.flow.task_pool import TaskPool @@ -127,9 +132,7 @@ TASK_STATUS_RUNNING, TASK_STATUS_SUBMITTED, TASK_STATUS_WAITING, -) -from cylc.flow.taskdef import TaskDef -from cylc.flow.templatevars import eval_var + RunMode) from cylc.flow.templatevars import get_template_vars from cylc.flow.timer import Timer from cylc.flow.util import cli_format @@ -138,9 +141,6 @@ get_time_string_from_unix_time as time2str, get_utc_mode, ) -from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager -from cylc.flow.workflow_events import WorkflowEventHandler -from cylc.flow.workflow_status import AutoRestartMode, RunMode, StopMode from cylc.flow.xtrigger_mgr import XtriggerManager if TYPE_CHECKING: @@ -1269,7 +1269,7 @@ def release_queued_tasks(self) -> bool: pre_prep_tasks, self.server.curve_auth, self.server.client_pub_key_dir, - is_simulation=(self.get_run_mode() == RunMode.SIMULATION) + run_mode=self.get_run_mode() ): if itask.flow_nums: flow = ','.join(str(i) for i in itask.flow_nums) @@ -1516,17 +1516,12 @@ async def _main_loop(self) -> None: if self.xtrigger_mgr.do_housekeeping: self.xtrigger_mgr.housekeep(self.pool.get_tasks()) - self.pool.clock_expire_tasks() self.release_queued_tasks() - - if ( - self.get_run_mode() == RunMode.SIMULATION - and sim_time_check( - self.task_events_mgr, - self.pool.get_tasks(), - self.workflow_db_mgr, - ) + if sim_time_check( + self.task_events_mgr, + self.pool.get_tasks(), + self.workflow_db_mgr, ): # A simulated task state change occurred. self.reset_inactivity_timer() diff --git a/cylc/flow/scheduler_cli.py b/cylc/flow/scheduler_cli.py index 0594820685a..017cfa23b54 100644 --- a/cylc/flow/scheduler_cli.py +++ b/cylc/flow/scheduler_cli.py @@ -54,6 +54,7 @@ from cylc.flow.remote import cylc_server_cmd from cylc.flow.scheduler import Scheduler, SchedulerError from cylc.flow.scripts.common import cylc_header +from cylc.flow.task_state import RunMode from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager from cylc.flow.workflow_files import ( SUITERC_DEPR_MSG, @@ -65,7 +66,6 @@ is_terminal, prompt, ) -from cylc.flow.workflow_status import RunMode if TYPE_CHECKING: from optparse import Values @@ -129,9 +129,15 @@ RUN_MODE = OptionSettings( ["-m", "--mode"], - help="Run mode: live, dummy, simulation (default live).", + help=( + f"Run mode: {RunMode.WORKFLOW_MODES} (default live)." + " Live mode executes the tasks as defined in the runtime section." + " Simulation, skip and dummy modes ignore part of tasks'" + " runtime configurations. Simulation and dummy modes are" + " designed for testing, and skip mode is for flow control." + ), metavar="STRING", action='store', dest="run_mode", - choices=[RunMode.LIVE, RunMode.DUMMY, RunMode.SIMULATION], + choices=list(RunMode.WORKFLOW_MODES), ) PLAY_RUN_MODE = deepcopy(RUN_MODE) diff --git a/cylc/flow/scripts/lint.py b/cylc/flow/scripts/lint.py index c8aa7279590..eb9fb0a2f96 100755 --- a/cylc/flow/scripts/lint.py +++ b/cylc/flow/scripts/lint.py @@ -89,9 +89,15 @@ from cylc.flow.cfgspec.workflow import upg, SPEC from cylc.flow.id_cli import parse_id from cylc.flow.parsec.config import ParsecConfig +from cylc.flow.run_modes.skip import get_unecessary_outputs from cylc.flow.scripts.cylc import DEAD_ENDS +from cylc.flow.task_outputs import ( + TASK_OUTPUT_SUCCEEDED, + TASK_OUTPUT_FAILED, +) from cylc.flow.terminal import cli_function + if TYPE_CHECKING: # BACK COMPAT: typing_extensions.Literal # FROM: Python 3.7 @@ -350,6 +356,39 @@ def check_for_deprecated_task_event_template_vars( return None +BAD_SKIP_OUTS = re.compile(r'outputs\s*=\s*(.*)') + + +def check_skip_mode_outputs(line: str) -> Dict: + """Ensure skip mode output setting doesn't include: + + * succeeded _and_ failed: Mutually exclusive. + * submitted and started: These are emitted by skip mode anyway. + + n.b. + + This should be separable from ``[[outputs]]`` because it's a key + value pair not a section heading. + """ + + outputs = BAD_SKIP_OUTS.findall(line) + if outputs: + outputs = [i.strip() for i in outputs[0].split(',')] + if TASK_OUTPUT_FAILED in outputs and TASK_OUTPUT_SUCCEEDED in outputs: + return { + 'description': + 'are mutually exclusive and cannot be used together', + 'outputs': f'{TASK_OUTPUT_FAILED} and {TASK_OUTPUT_SUCCEEDED}' + } + pointless_outputs = get_unecessary_outputs(outputs) + if pointless_outputs: + return { + 'description': 'are not required, they will be emitted anyway', + 'outputs': f'{pointless_outputs}' + } + return {} + + INDENTATION = re.compile(r'^(\s*)(.*)') @@ -588,7 +627,15 @@ def list_wrapper(line: str, check: Callable) -> Optional[Dict[str, str]]: ' directive can make your workflow more portable.' ), FUNCTION: check_wallclock_directives, - } + }, + 'S015': { + 'short': 'Task outputs {outputs}: {description}.', + FUNCTION: check_skip_mode_outputs + }, + 'S016': { + 'short': 'Run mode is not live: This task will only appear to run.', + FUNCTION: re.compile(r'run mode\s*=\s*[^l][^i][^v][^e]$').findall + }, } # Subset of deprecations which are tricky (impossible?) to scrape from the # upgrader. diff --git a/cylc/flow/scripts/set.py b/cylc/flow/scripts/set.py index b64cf74aba0..888ba20890e 100755 --- a/cylc/flow/scripts/set.py +++ b/cylc/flow/scripts/set.py @@ -65,6 +65,9 @@ # complete the succeeded output of 3/bar: $ cylc set --out=succeeded my_workflow//3/bar + # complete the outputs defined in [runtime][task][skip] + $ cylc set --out=skip my_workflow//3/bar + # satisfy the 3/foo:succeeded prerequisite of 3/bar: $ cylc set --pre=3/foo my_workflow//3/bar # or: @@ -154,8 +157,10 @@ def get_option_parser() -> COP: "-o", "--out", "--output", metavar="OUTPUT(s)", help=( "Complete task outputs. For multiple outputs re-use the" - " option, or give a comma-separated list of outputs, or" - ' use "--out=required" to complete all required outputs.' + " option, or give a comma-separated list of outputs." + ' Use "--out=required" to complete all required outputs.' + ' Use "--out=skip" to complete outputs defined in the tasks.' + ' [skip] configuration.' " OUTPUT format: trigger names as used in the graph." ), action="append", default=None, dest="outputs" diff --git a/cylc/flow/scripts/validate.py b/cylc/flow/scripts/validate.py index bd3e6098906..63460a81458 100755 --- a/cylc/flow/scripts/validate.py +++ b/cylc/flow/scripts/validate.py @@ -54,15 +54,12 @@ from cylc.flow.task_proxy import TaskProxy from cylc.flow.templatevars import get_template_vars from cylc.flow.terminal import cli_function -from cylc.flow.scheduler_cli import RUN_MODE -from cylc.flow.workflow_status import RunMode +from cylc.flow.task_state import RunMode if TYPE_CHECKING: from cylc.flow.option_parsers import Values -VALIDATE_RUN_MODE = deepcopy(RUN_MODE) -VALIDATE_RUN_MODE.sources = {'validate'} VALIDATE_ICP_OPTION = deepcopy(ICP_OPTION) VALIDATE_ICP_OPTION.sources = {'validate'} VALIDATE_AGAINST_SOURCE_OPTION = deepcopy(AGAINST_SOURCE_OPTION) @@ -98,7 +95,6 @@ dest="profile_mode", sources={'validate'} ), - VALIDATE_RUN_MODE, VALIDATE_ICP_OPTION, ] diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index bf9c2ba3a9b..3737ed88bad 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -78,7 +78,8 @@ TASK_STATUS_FAILED, TASK_STATUS_EXPIRED, TASK_STATUS_SUCCEEDED, - TASK_STATUS_WAITING + TASK_STATUS_WAITING, + RunMode, ) from cylc.flow.task_outputs import ( TASK_OUTPUT_EXPIRED, @@ -98,7 +99,6 @@ get_template_variables as get_workflow_template_variables, process_mail_footer, ) -from cylc.flow.workflow_status import RunMode if TYPE_CHECKING: @@ -770,7 +770,7 @@ def process_message( # ... but either way update the job ID in the job proxy (it only # comes in via the submission message). - if itask.tdef.run_mode != RunMode.SIMULATION: + if itask.tdef.run_mode not in RunMode.JOBLESS_MODES: job_tokens = itask.tokens.duplicate( job=str(itask.submit_num) ) @@ -889,7 +889,7 @@ def _process_message_check( if ( itask.state(TASK_STATUS_WAITING) - # Polling in live mode only: + # Polling in live mode only. and itask.tdef.run_mode == RunMode.LIVE and ( ( @@ -935,7 +935,7 @@ def _process_message_check( def setup_event_handlers(self, itask, event, message): """Set up handlers for a task event.""" - if itask.tdef.run_mode != RunMode.LIVE: + if RunMode.disable_task_event_handlers(itask): return msg = "" if message != f"job {event}": @@ -1460,7 +1460,7 @@ def _process_message_submitted( ) itask.set_summary_time('submitted', event_time) - if itask.tdef.run_mode == RunMode.SIMULATION: + if itask.tdef.run_mode in RunMode.JOBLESS_MODES: # Simulate job started as well. itask.set_summary_time('started', event_time) if itask.state_reset(TASK_STATUS_RUNNING, forced=forced): @@ -1497,7 +1497,7 @@ def _process_message_submitted( 'submitted', event_time, ) - if itask.tdef.run_mode == RunMode.SIMULATION: + if itask.tdef.run_mode in RunMode.JOBLESS_MODES: # Simulate job started as well. self.data_store_mgr.delta_job_time( job_tokens, @@ -1530,7 +1530,7 @@ def _insert_task_job( # not see previous submissions (so can't use itask.jobs[submit_num-1]). # And transient tasks, used for setting outputs and spawning children, # do not submit jobs. - if (itask.tdef.run_mode == RunMode.SIMULATION) or forced: + if itask.tdef.run_mode in RunMode.JOBLESS_MODES or forced: job_conf = {"submit_num": itask.submit_num} else: job_conf = itask.jobs[-1] diff --git a/cylc/flow/task_job_mgr.py b/cylc/flow/task_job_mgr.py index 185966ff12d..89553ac1e13 100644 --- a/cylc/flow/task_job_mgr.py +++ b/cylc/flow/task_job_mgr.py @@ -35,7 +35,7 @@ ) from shutil import rmtree from time import time -from typing import TYPE_CHECKING, Any, Union, Optional +from typing import TYPE_CHECKING, Any, List, Tuple, Union, Optional from cylc.flow import LOG from cylc.flow.job_runner_mgr import JobPollContext @@ -63,7 +63,12 @@ get_platform, ) from cylc.flow.remote import construct_ssh_cmd -from cylc.flow.simulation import ModeSettings +from cylc.flow.run_modes.simulation import ( + submit_task_job as simulation_submit_task_job) +from cylc.flow.run_modes.skip import ( + submit_task_job as skip_submit_task_job) +from cylc.flow.run_modes.dummy import ( + submit_task_job as dummy_submit_task_job) from cylc.flow.subprocctx import SubProcContext from cylc.flow.subprocpool import SubProcPool from cylc.flow.task_action_timer import ( @@ -103,7 +108,8 @@ TASK_STATUS_SUBMITTED, TASK_STATUS_RUNNING, TASK_STATUS_WAITING, - TASK_STATUSES_ACTIVE + TASK_STATUSES_ACTIVE, + RunMode ) from cylc.flow.wallclock import ( get_current_time_string, @@ -247,7 +253,7 @@ def prep_submit_task_jobs(self, workflow, itasks, check_syntax=True): return [prepared_tasks, bad_tasks] def submit_task_jobs(self, workflow, itasks, curve_auth, - client_pub_key_dir, is_simulation=False): + client_pub_key_dir, run_mode='live'): """Prepare for job submission and submit task jobs. Preparation (host selection, remote host init, and remote install) @@ -262,8 +268,8 @@ def submit_task_jobs(self, workflow, itasks, curve_auth, Return (list): list of tasks that attempted submission. """ - if is_simulation: - return self._simulation_submit_task_jobs(itasks, workflow) + itasks, jobless_tasks = self._nonlive_submit_task_jobs( + itasks, workflow, run_mode) # Prepare tasks for job submission prepared_tasks, bad_tasks = self.prep_submit_task_jobs( @@ -272,9 +278,10 @@ def submit_task_jobs(self, workflow, itasks, curve_auth, # Reset consumed host selection results self.task_remote_mgr.subshell_eval_reset() - if not prepared_tasks: + if not prepared_tasks and not jobless_tasks: return bad_tasks - + elif not prepared_tasks: + return jobless_tasks auth_itasks = {} # {platform: [itask, ...], ...} for itask in prepared_tasks: @@ -282,8 +289,7 @@ def submit_task_jobs(self, workflow, itasks, curve_auth, auth_itasks.setdefault(platform_name, []) auth_itasks[platform_name].append(itask) # Submit task jobs for each platform - # Non-prepared tasks can be considered done for now: - done_tasks = bad_tasks + done_tasks = bad_tasks + jobless_tasks for _, itasks in sorted(auth_itasks.items()): # Find the first platform where >1 host has not been tried and @@ -997,44 +1003,64 @@ def _set_retry_timers( except KeyError: itask.try_timers[key] = TaskActionTimer(delays=delays) - def _simulation_submit_task_jobs(self, itasks, workflow): - """Simulation mode task jobs submission.""" + def _nonlive_submit_task_jobs( + self: 'TaskJobManager', + itasks: 'List[TaskProxy]', + workflow: str, + workflow_run_mode: str, + ) -> 'Tuple[List[TaskProxy], List[TaskProxy]]': + """Simulation mode task jobs submission. + + Returns: + lively_tasks: + A list of tasks which require subsequent + processing **as if** they were live mode tasks. + (This includes live and dummy mode tasks) + ghostly_tasks: + A list of tasks which require no further processing + because their apparent execution is done entirely inside + the scheduler. (This includes skip and simulation mode tasks). + """ + lively_tasks: 'List[TaskProxy]' = [] + jobless_tasks: 'List[TaskProxy]' = [] now = time() - now_str = get_time_string_from_unix_time(now) + now = (now, get_time_string_from_unix_time(now)) + for itask in itasks: - # Handle broadcasts + # Handle broadcasts: rtconfig = self.task_events_mgr.broadcast_mgr.get_updated_rtconfig( itask) - itask.summary['started_time'] = now - self._set_retry_timers(itask, rtconfig) - itask.mode_settings = ModeSettings( - itask, - self.workflow_db_mgr, - rtconfig - ) - - itask.waiting_on_job_prep = False - itask.submit_num += 1 + # Apply task run mode - itask.platform = {'name': 'SIMULATION'} - itask.summary['job_runner_name'] = 'SIMULATION' - itask.summary[self.KEY_EXECUTE_TIME_LIMIT] = ( - itask.mode_settings.simulated_run_length - ) - itask.jobs.append( - self.get_simulation_job_conf(itask, workflow) - ) - self.task_events_mgr.process_message( - itask, INFO, TASK_OUTPUT_SUBMITTED, - ) - self.workflow_db_mgr.put_insert_task_jobs( - itask, { - 'time_submit': now_str, - 'try_num': itask.get_try_num(), - } - ) - return itasks + if rtconfig['run mode'] == 'workflow': + run_mode = workflow_run_mode + else: + if rtconfig['run mode'] != workflow_run_mode: + LOG.info( + f'[{itask.identity}] run mode set by task settings' + f' to: {rtconfig["run mode"]} mode.') + run_mode = rtconfig['run mode'] + itask.tdef.run_mode = run_mode + + # Submit ghost tasks, or add live-like tasks to list + # of tasks to put through live submission pipeline: + is_done = False + if run_mode == RunMode.DUMMY: + is_done = dummy_submit_task_job( + self, itask, rtconfig, workflow, now) + elif run_mode == RunMode.SIMULATION: + is_done = simulation_submit_task_job( + self, itask, rtconfig, workflow, now) + elif run_mode == RunMode.SKIP: + is_done = skip_submit_task_job( + self, itask, now) + # Assign task to list: + if is_done: + jobless_tasks.append(itask) + else: + lively_tasks.append(itask) + return lively_tasks, jobless_tasks def _submit_task_jobs_callback(self, ctx, workflow, itasks): """Callback when submit task jobs command exits.""" diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index 72e8065d4ca..b28c5f0250c 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -53,6 +53,7 @@ from cylc.flow.task_id import TaskID from cylc.flow.task_proxy import TaskProxy from cylc.flow.task_state import ( + RunMode, TASK_STATUSES_ACTIVE, TASK_STATUSES_FINAL, TASK_STATUS_WAITING, @@ -70,6 +71,8 @@ ) from cylc.flow.wallclock import get_current_time_string from cylc.flow.platforms import get_platform +from cylc.flow.run_modes.skip import ( + process_outputs as get_skip_mode_outputs) from cylc.flow.task_outputs import ( TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_EXPIRED, @@ -1414,7 +1417,10 @@ def spawn_on_output(self, itask, output, forced=False): tasks = [c_task] for t in tasks: - t.satisfy_me([itask.tokens.duplicate(task_sel=output)]) + t.satisfy_me( + [itask.tokens.duplicate(task_sel=output)], + getattr(itask.tdef, 'run_mode', RunMode.LIVE) + ) self.data_store_mgr.delta_task_prerequisite(t) if not in_pool: self.add_to_pool(t) @@ -1538,7 +1544,8 @@ def spawn_on_all_outputs( continue if completed_only: c_task.satisfy_me( - [itask.tokens.duplicate(task_sel=message)] + [itask.tokens.duplicate(task_sel=message)], + itask.tdef.run_mode ) self.data_store_mgr.delta_task_prerequisite(c_task) self.add_to_pool(c_task) @@ -1857,7 +1864,8 @@ def _standardise_outputs( try: msg = tdef.outputs[output][0] except KeyError: - LOG.warning(f"output {point}/{tdef.name}:{output} not found") + LOG.warning( + f"output {point}/{tdef.name}:{output} not found") continue _outputs.append(msg) return _outputs @@ -1953,9 +1961,14 @@ def _set_outputs_itask( if not outputs: outputs = list(itask.state.outputs.iter_required_messages()) else: + skips = [] + if RunMode.SKIP in outputs: + # Get outputs defined in skip mode config: + skips = get_skip_mode_outputs(itask) + outputs.remove(RunMode.SKIP) outputs = self._standardise_outputs( - itask.point, itask.tdef, outputs - ) + itask.point, itask.tdef, outputs) + outputs = list(set(outputs + skips)) for output in sorted(outputs, key=itask.state.outputs.output_sort_key): if itask.state.outputs.is_message_complete(output): diff --git a/cylc/flow/task_proxy.py b/cylc/flow/task_proxy.py index 4e7b60d6e0a..80fe210fd80 100644 --- a/cylc/flow/task_proxy.py +++ b/cylc/flow/task_proxy.py @@ -39,6 +39,7 @@ from cylc.flow.platforms import get_platform from cylc.flow.task_action_timer import TimerFlags from cylc.flow.task_state import ( + RunMode, TaskState, TASK_STATUS_WAITING, TASK_STATUS_EXPIRED, @@ -53,7 +54,7 @@ if TYPE_CHECKING: from cylc.flow.cycling import PointBase - from cylc.flow.simulation import ModeSettings + from cylc.flow.run_modes.simulation import ModeSettings from cylc.flow.task_action_timer import TaskActionTimer from cylc.flow.taskdef import TaskDef from cylc.flow.id import Tokens @@ -544,7 +545,7 @@ def state_reset( return False def satisfy_me( - self, task_messages: 'List[Tokens]' + self, task_messages: 'List[Tokens]', mode=RunMode.LIVE ) -> 'Set[Tokens]': """Try to satisfy my prerequisites with given output messages. @@ -554,7 +555,7 @@ def satisfy_me( Return a set of unmatched task messages. """ - used = self.state.satisfy_me(task_messages) + used = self.state.satisfy_me(task_messages, mode) return set(task_messages) - used def clock_expire(self) -> bool: diff --git a/cylc/flow/task_state.py b/cylc/flow/task_state.py index a4adb53d374..6166b8502a8 100644 --- a/cylc/flow/task_state.py +++ b/cylc/flow/task_state.py @@ -23,6 +23,7 @@ from cylc.flow.wallclock import get_current_time_string if TYPE_CHECKING: + from cylc.flow.option_parsers import Values from cylc.flow.id import Tokens @@ -154,6 +155,61 @@ } +class RunMode: + """The possible run modes of a task/workflow.""" + + LIVE = 'live' + """Task will run normally.""" + + SIMULATION = 'simulation' + """Task will run in simulation mode.""" + + DUMMY = 'dummy' + """Task will run in dummy mode.""" + + SKIP = 'skip' + """Task will run in skip mode.""" + + WORKFLOW = 'workflow' + """Default to workflow run mode""" + + MODES = frozenset({LIVE, SIMULATION, DUMMY, SKIP, WORKFLOW}) + + WORKFLOW_MODES = (LIVE, DUMMY, SIMULATION, SKIP) + """Workflow mode not sensible mode for workflow. + + n.b. not using a set to ensure ordering in CLI + """ + + JOB_MODES = frozenset({LIVE, DUMMY}) + """Modes which need to have real jobs submitted.""" + + JOBLESS_MODES = frozenset({SKIP, SIMULATION}) + """Modes which completely ignore the standard submission path.""" + + @staticmethod + def get(options: 'Values') -> str: + """Return the workflow run mode from the options.""" + return getattr(options, 'run_mode', None) or RunMode.LIVE + + @staticmethod + def disable_task_event_handlers(itask): + """Should we disable event handlers for this task? + + No event handlers in simulation mode, or in skip mode + if we don't deliberately enable them: + """ + mode = itask.tdef.run_mode + return ( + mode == RunMode.SIMULATION + or ( + mode == RunMode.SKIP + and itask.tdef.rtconfig['skip'][ + 'disable task event handlers'] is True + ) + ) + + def status_leq(status_a, status_b): """"Return True if status_a <= status_b""" return (TASK_STATUSES_ORDERED.index(status_a) <= @@ -301,7 +357,8 @@ def __call__( def satisfy_me( self, - outputs: Iterable['Tokens'] + outputs: Iterable['Tokens'], + mode, ) -> Set['Tokens']: """Try to satisfy my prerequisites with given outputs. @@ -309,9 +366,12 @@ def satisfy_me( """ valid: Set[Tokens] = set() for prereq in (*self.prerequisites, *self.suicide_prerequisites): - valid.update( - prereq.satisfy_me(outputs) - ) + yep = prereq.satisfy_me(outputs, mode) + if yep: + valid = valid.union(yep) + continue + self._is_satisfied = None + self._suicide_is_satisfied = None return valid def xtriggers_all_satisfied(self): diff --git a/cylc/flow/unicode_rules.py b/cylc/flow/unicode_rules.py index a6974888248..4e29e4eff07 100644 --- a/cylc/flow/unicode_rules.py +++ b/cylc/flow/unicode_rules.py @@ -23,7 +23,7 @@ _TASK_NAME_PREFIX, ) from cylc.flow.task_qualifiers import TASK_QUALIFIERS -from cylc.flow.task_state import TASK_STATUSES_ORDERED +from cylc.flow.task_state import TASK_STATUSES_ORDERED, RunMode ENGLISH_REGEX_MAP = { r'\w': 'alphanumeric', @@ -351,6 +351,8 @@ class TaskOutputValidator(UnicodeRuleChecker): not_starts_with('_cylc'), # blacklist keywords not_equals('required', 'optional', 'all', 'and', 'or'), + # blacklist Run Modes: + not_equals(*RunMode.MODES), # blacklist built-in task qualifiers and statuses (e.g. "waiting") not_equals(*sorted({*TASK_QUALIFIERS, *TASK_STATUSES_ORDERED})), ] diff --git a/tests/flakyfunctional/cylc-get-config/04-dummy-mode-output/flow.cylc b/tests/flakyfunctional/cylc-get-config/04-dummy-mode-output/flow.cylc index 4db81ada9ee..052b00c9387 100644 --- a/tests/flakyfunctional/cylc-get-config/04-dummy-mode-output/flow.cylc +++ b/tests/flakyfunctional/cylc-get-config/04-dummy-mode-output/flow.cylc @@ -19,7 +19,7 @@ [[root]] script = true [[[simulation]]] - default run length = PT0S + default run time = PT0S [[foo]] script = true [[[outputs]]] diff --git a/tests/flakyfunctional/events/05-timeout-ref-dummy/flow.cylc b/tests/flakyfunctional/events/05-timeout-ref-dummy/flow.cylc index 52d723fead1..5b150eecc10 100644 --- a/tests/flakyfunctional/events/05-timeout-ref-dummy/flow.cylc +++ b/tests/flakyfunctional/events/05-timeout-ref-dummy/flow.cylc @@ -11,4 +11,4 @@ script = "false" [[[simulation]]] fail cycle points = 1 - default run length = PT0S + default run time = PT0S diff --git a/tests/flakyfunctional/modes/03-dummy-env/flow.cylc b/tests/flakyfunctional/modes/03-dummy-env/flow.cylc index e4a5d678712..d94938d2988 100644 --- a/tests/flakyfunctional/modes/03-dummy-env/flow.cylc +++ b/tests/flakyfunctional/modes/03-dummy-env/flow.cylc @@ -4,7 +4,7 @@ [runtime] [[root]] [[[simulation]]] - default run length = PT0S + default run time = PT0S [[oxygas]] pre-script = echo "[MY-PRE-SCRIPT] \${CYLC_TASK_NAME} is ${CYLC_TASK_NAME}" script = """ diff --git a/tests/functional/cylc-config/00-simple/section2.stdout b/tests/functional/cylc-config/00-simple/section2.stdout index 3d83ac15278..e43206e1409 100644 --- a/tests/functional/cylc-config/00-simple/section2.stdout +++ b/tests/functional/cylc-config/00-simple/section2.stdout @@ -1,377 +1,397 @@ [[root]] - completion = - platform = - inherit = - script = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + completion = + platform = + inherit = + script = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[directives]]] [[[outputs]]] [[[parameter environment templates]]] [[OPS]] script = echo "RUN: run-ops.sh" - completion = - platform = - inherit = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + completion = + platform = + inherit = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[directives]]] [[[outputs]]] [[[parameter environment templates]]] [[VAR]] script = echo "RUN: run-var.sh" - completion = - platform = - inherit = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + completion = + platform = + inherit = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[directives]]] [[[outputs]]] [[[parameter environment templates]]] [[SERIAL]] - completion = - platform = - inherit = - script = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + completion = + platform = + inherit = + script = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = serial [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] [[PARALLEL]] - completion = - platform = - inherit = - script = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + completion = + platform = + inherit = + script = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = parallel [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] @@ -379,75 +399,79 @@ script = echo "RUN: run-ops.sh" inherit = OPS, SERIAL completion = succeeded or failed - platform = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + platform = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = serial [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] @@ -455,75 +479,79 @@ script = echo "RUN: run-ops.sh" inherit = OPS, SERIAL completion = succeeded or failed - platform = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + platform = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = serial [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] @@ -531,75 +559,79 @@ script = echo "RUN: run-ops.sh" inherit = OPS, PARALLEL completion = succeeded or failed - platform = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + platform = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = parallel [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] @@ -607,75 +639,79 @@ script = echo "RUN: run-ops.sh" inherit = OPS, PARALLEL completion = succeeded or failed - platform = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + platform = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = parallel [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] @@ -683,75 +719,79 @@ script = echo "RUN: run-var.sh" inherit = VAR, SERIAL completion = succeeded - platform = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + platform = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = serial [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] @@ -759,75 +799,79 @@ script = echo "RUN: run-var.sh" inherit = VAR, SERIAL completion = succeeded - platform = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + platform = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = serial [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] @@ -835,75 +879,79 @@ script = echo "RUN: run-var.sh" inherit = VAR, PARALLEL completion = succeeded - platform = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + platform = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = parallel [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S - speedup factor = + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] @@ -911,75 +959,79 @@ script = echo "RUN: run-var.sh" inherit = VAR, PARALLEL completion = succeeded - platform = - init-script = - env-script = - err-script = - exit-script = - pre-script = - post-script = - work sub-directory = - execution polling intervals = - execution retry delays = - execution time limit = - submission polling intervals = - submission retry delays = + platform = + init-script = + env-script = + err-script = + exit-script = + pre-script = + post-script = + work sub-directory = + execution polling intervals = + execution retry delays = + execution time limit = + submission polling intervals = + submission retry delays = + run mode = workflow [[[directives]]] job_type = parallel [[[meta]]] - title = - description = - URL = + title = + description = + URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] - default run length = PT10S - speedup factor = + default run time = PT10S + speedup factor = time limit buffer = PT30S - fail cycle points = + fail cycle points = fail try 1 only = True disable task event handlers = True [[[environment filter]]] - include = - exclude = + include = + exclude = [[[job]]] - batch system = - batch submit command template = + batch system = + batch submit command template = [[[remote]]] - host = - owner = - retrieve job logs = - retrieve job logs max size = - retrieve job logs retry delays = + host = + owner = + retrieve job logs = + retrieve job logs max size = + retrieve job logs retry delays = [[[events]]] - execution timeout = - handlers = - handler events = - handler retry delays = - mail events = - submission timeout = - expired handlers = - late offset = - late handlers = - submitted handlers = - started handlers = - succeeded handlers = - failed handlers = - submission failed handlers = - warning handlers = - critical handlers = - retry handlers = - submission retry handlers = - execution timeout handlers = - submission timeout handlers = - custom handlers = + execution timeout = + handlers = + handler events = + handler retry delays = + mail events = + submission timeout = + expired handlers = + late offset = + late handlers = + submitted handlers = + started handlers = + succeeded handlers = + failed handlers = + submission failed handlers = + warning handlers = + critical handlers = + retry handlers = + submission retry handlers = + execution timeout handlers = + submission timeout handlers = + custom handlers = [[[mail]]] - from = - to = + from = + to = [[[workflow state polling]]] - interval = - max-polls = - message = - alt-cylc-run-dir = - verbose mode = + interval = + max-polls = + message = + alt-cylc-run-dir = + verbose mode = [[[environment]]] [[[outputs]]] [[[parameter environment templates]]] diff --git a/tests/functional/cylc-kill/03-simulation/flow.cylc b/tests/functional/cylc-kill/03-simulation/flow.cylc index 03b6249e962..2a0a288a040 100644 --- a/tests/functional/cylc-kill/03-simulation/flow.cylc +++ b/tests/functional/cylc-kill/03-simulation/flow.cylc @@ -8,5 +8,5 @@ [runtime] [[root]] [[[simulation]]] - default run length = PT30S + default run time = PT30S [[foo]] diff --git a/tests/functional/cylc-set/09-set-skip.t b/tests/functional/cylc-set/09-set-skip.t new file mode 100644 index 00000000000..dd314283700 --- /dev/null +++ b/tests/functional/cylc-set/09-set-skip.t @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- +# +# Skip Mode proposal example: +# https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md +# The cylc set --out option should accept the skip value +# which should set the outputs defined in +# [runtime][][skip]outputs. + +. "$(dirname "$0")/test_header" +set_test_number 2 +reftest +exit diff --git a/tests/functional/cylc-set/09-set-skip/flow.cylc b/tests/functional/cylc-set/09-set-skip/flow.cylc new file mode 100644 index 00000000000..ef74c362773 --- /dev/null +++ b/tests/functional/cylc-set/09-set-skip/flow.cylc @@ -0,0 +1,50 @@ +[meta] + test_description = """ + Test that cylc set --out skip satisfies + all outputs which are required by the graph. + """ + proposal url = https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + +[scheduler] + allow implicit tasks = true + [[events]] + expected task failures = 1/bar + +[scheduling] + [[graph]] + R1 = """ + # Optional out not created by set --out skip + foo:no? => not_this_task? + + # set --out skip creates required, started, submitted + # and succeeded (unless failed is set): + foo:yes => require_this_task + foo:submitted => submitted_emitted + foo:succeeded => succeeded_emitted + foo:started => skip_foo + + # set --out skip creates failed if that is required + # by skip mode settings: + bar:started => skip_bar + bar:failed? => bar_failed + """ + +[runtime] + [[foo]] + script = sleep 100 + [[[skip]]] + outputs = yes + [[[outputs]]] + no = 'Don\'t require this task' + yes = 'Require this task' + + [[bar]] + script = sleep 100 + [[[skip]]] + outputs = failed + + [[skip_foo]] + script = cylc set ${CYLC_WORKFLOW_ID}//1/foo --out skip + + [[skip_bar]] + script = cylc set ${CYLC_WORKFLOW_ID}//1/bar --out skip diff --git a/tests/functional/cylc-set/09-set-skip/reference.log b/tests/functional/cylc-set/09-set-skip/reference.log new file mode 100644 index 00000000000..6e7b636f540 --- /dev/null +++ b/tests/functional/cylc-set/09-set-skip/reference.log @@ -0,0 +1,8 @@ +1/bar -triggered off [] in flow 1 +1/foo -triggered off [] in flow 1 +1/submitted_emitted -triggered off ['1/foo'] in flow 1 +1/skip_bar -triggered off ['1/bar'] in flow 1 +1/skip_foo -triggered off ['1/foo'] in flow 1 +1/succeeded_emitted -triggered off ['1/foo'] in flow 1 +1/bar_failed -triggered off ['1/bar'] in flow 1 +1/require_this_task -triggered off ['1/foo'] in flow 1 diff --git a/tests/functional/modes/01-dummy.t b/tests/functional/run_modes/01-dummy.t similarity index 100% rename from tests/functional/modes/01-dummy.t rename to tests/functional/run_modes/01-dummy.t diff --git a/tests/functional/modes/01-dummy/flow.cylc b/tests/functional/run_modes/01-dummy/flow.cylc similarity index 100% rename from tests/functional/modes/01-dummy/flow.cylc rename to tests/functional/run_modes/01-dummy/flow.cylc diff --git a/tests/functional/modes/01-dummy/reference.log b/tests/functional/run_modes/01-dummy/reference.log similarity index 100% rename from tests/functional/modes/01-dummy/reference.log rename to tests/functional/run_modes/01-dummy/reference.log diff --git a/tests/functional/modes/02-dummy-message-outputs.t b/tests/functional/run_modes/02-dummy-message-outputs.t similarity index 100% rename from tests/functional/modes/02-dummy-message-outputs.t rename to tests/functional/run_modes/02-dummy-message-outputs.t diff --git a/tests/functional/modes/02-dummy-message-outputs/flow.cylc b/tests/functional/run_modes/02-dummy-message-outputs/flow.cylc similarity index 92% rename from tests/functional/modes/02-dummy-message-outputs/flow.cylc rename to tests/functional/run_modes/02-dummy-message-outputs/flow.cylc index 6b3817508a2..17db8085abd 100644 --- a/tests/functional/modes/02-dummy-message-outputs/flow.cylc +++ b/tests/functional/run_modes/02-dummy-message-outputs/flow.cylc @@ -12,7 +12,7 @@ [runtime] [[root]] [[[simulation]]] - default run length = PT0S + default run time = PT0S time limit buffer = PT1M [[bar]] [[foo]] diff --git a/tests/functional/modes/02-dummy-message-outputs/reference.log b/tests/functional/run_modes/02-dummy-message-outputs/reference.log similarity index 100% rename from tests/functional/modes/02-dummy-message-outputs/reference.log rename to tests/functional/run_modes/02-dummy-message-outputs/reference.log diff --git a/tests/functional/modes/03-simulation.t b/tests/functional/run_modes/03-simulation.t similarity index 100% rename from tests/functional/modes/03-simulation.t rename to tests/functional/run_modes/03-simulation.t diff --git a/tests/functional/modes/03-simulation/flow.cylc b/tests/functional/run_modes/03-simulation/flow.cylc similarity index 100% rename from tests/functional/modes/03-simulation/flow.cylc rename to tests/functional/run_modes/03-simulation/flow.cylc diff --git a/tests/functional/modes/03-simulation/reference.log b/tests/functional/run_modes/03-simulation/reference.log similarity index 100% rename from tests/functional/modes/03-simulation/reference.log rename to tests/functional/run_modes/03-simulation/reference.log diff --git a/tests/functional/modes/04-simulation-runtime.t b/tests/functional/run_modes/04-simulation-runtime.t similarity index 100% rename from tests/functional/modes/04-simulation-runtime.t rename to tests/functional/run_modes/04-simulation-runtime.t diff --git a/tests/functional/modes/04-simulation-runtime/flow.cylc b/tests/functional/run_modes/04-simulation-runtime/flow.cylc similarity index 100% rename from tests/functional/modes/04-simulation-runtime/flow.cylc rename to tests/functional/run_modes/04-simulation-runtime/flow.cylc diff --git a/tests/functional/modes/04-simulation-runtime/reference.log b/tests/functional/run_modes/04-simulation-runtime/reference.log similarity index 100% rename from tests/functional/modes/04-simulation-runtime/reference.log rename to tests/functional/run_modes/04-simulation-runtime/reference.log diff --git a/tests/functional/modes/05-sim-trigger.t b/tests/functional/run_modes/05-sim-trigger.t similarity index 100% rename from tests/functional/modes/05-sim-trigger.t rename to tests/functional/run_modes/05-sim-trigger.t diff --git a/tests/functional/modes/05-sim-trigger/flow.cylc b/tests/functional/run_modes/05-sim-trigger/flow.cylc similarity index 84% rename from tests/functional/modes/05-sim-trigger/flow.cylc rename to tests/functional/run_modes/05-sim-trigger/flow.cylc index 2220e958e00..be80287eca2 100644 --- a/tests/functional/modes/05-sim-trigger/flow.cylc +++ b/tests/functional/run_modes/05-sim-trigger/flow.cylc @@ -5,7 +5,7 @@ [runtime] [[root, bar]] [[[simulation]]] - default run length = PT0S + default run time = PT0S [[fail_fail_fail]] [[[simulation]]] fail cycle points = all diff --git a/tests/functional/modes/05-sim-trigger/reference.log b/tests/functional/run_modes/05-sim-trigger/reference.log similarity index 100% rename from tests/functional/modes/05-sim-trigger/reference.log rename to tests/functional/run_modes/05-sim-trigger/reference.log diff --git a/tests/functional/run_modes/06-run-mode-overrides.t b/tests/functional/run_modes/06-run-mode-overrides.t new file mode 100644 index 00000000000..2e90deaa460 --- /dev/null +++ b/tests/functional/run_modes/06-run-mode-overrides.t @@ -0,0 +1,72 @@ +#!/usr/bin/env bash +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# Test that broadcasting a change in [runtime][]run mode +# Leads to the next submission from that task to be in the updated +# mode. + +. "$(dirname "$0")/test_header" +set_test_number 15 + +install_workflow "${TEST_NAME_BASE}" "${TEST_NAME_BASE}" +run_ok "${TEST_NAME_BASE}-validate" cylc validate "${WORKFLOW_NAME}" +workflow_run_ok "${TEST_NAME_BASE}-run" \ + cylc play "${WORKFLOW_NAME}" \ + --no-detach + +JOB_LOGS="${WORKFLOW_RUN_DIR}/log/job/1000" + +# Ghost modes do not leave log folders: +for MODE in simulation skip; do + run_fail "${TEST_NAME_BASE}-no-${MODE}-task-folder" ls "${JOB_LOGS}/${MODE}_" +done + +# Live modes leave log folders: +for MODE in default live dummy; do + run_ok "${TEST_NAME_BASE}-${MODE}-task-folder" ls "${JOB_LOGS}/${MODE}_" +done + +# Default defaults to live, and live is live: +for MODE in default live; do + named_grep_ok "${TEST_NAME_BASE}-default-task-live" "===.*===" "${JOB_LOGS}/${MODE}_/NN/job.out" +done + +# Dummy produces a job.out, containing dummy message: +named_grep_ok "${TEST_NAME_BASE}-default-task-live" "dummy job succeed" "${JOB_LOGS}/dummy_/NN/job.out" + +purge + +# Do it again with a workflow in simulation. +install_workflow "${TEST_NAME_BASE}" "${TEST_NAME_BASE}" +run_ok "${TEST_NAME_BASE}-validate" cylc validate "${WORKFLOW_NAME}" +workflow_run_ok "${TEST_NAME_BASE}-run" \ + cylc play "${WORKFLOW_NAME}" \ + --no-detach \ + --mode simulation + +JOB_LOGS="${WORKFLOW_RUN_DIR}/log/job/1000" + +# Live modes leave log folders: +for MODE in live dummy; do + run_ok "${TEST_NAME_BASE}-${MODE}-task-folder" ls "${JOB_LOGS}/${MODE}_" +done + +# Ghost modes do not leave log folders: +run_fail "${TEST_NAME_BASE}-no-default-task-folder" ls "${JOB_LOGS}/default_" + +purge +exit 0 diff --git a/tests/functional/run_modes/06-run-mode-overrides/flow.cylc b/tests/functional/run_modes/06-run-mode-overrides/flow.cylc new file mode 100644 index 00000000000..b7693232149 --- /dev/null +++ b/tests/functional/run_modes/06-run-mode-overrides/flow.cylc @@ -0,0 +1,29 @@ +[scheduler] + cycle point format = %Y + +[scheduling] + initial cycle point = 1000 + final cycle point = 1000 + [[graph]] + P1Y = """ + default_ + live_ + dummy_ + simulation_ + skip_ + """ + +[runtime] + [[root]] + script = echo "=== this task ran in live mode ===" + [[[simulation]]] + default run length = PT0S + [[default_]] + [[live_]] + run mode = live + [[dummy_]] + run mode = dummy + [[simulation_]] + run mode = simulation + [[skip_]] + run mode = skip diff --git a/tests/functional/modes/test_header b/tests/functional/run_modes/test_header similarity index 100% rename from tests/functional/modes/test_header rename to tests/functional/run_modes/test_header diff --git a/tests/integration/run_modes/test_mode_overrides.py b/tests/integration/run_modes/test_mode_overrides.py new file mode 100644 index 00000000000..209837ebb7e --- /dev/null +++ b/tests/integration/run_modes/test_mode_overrides.py @@ -0,0 +1,109 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Test that using [runtime][TASK]run mode works in each mode. + +TODO: This is pretty much a functional test and +probably ought to be labelled as such, but uses the +integration test framework. +""" + +import pytest + + +@pytest.mark.parametrize( + 'workflow_run_mode', [('live'), ('simulation'), ('dummy')]) #, ('skip')]) +async def test_run_mode_override( + workflow_run_mode, flow, scheduler, run, complete, log_filter +): + """Test that ``[runtime][TASK]run mode`` overrides workflow modes. + + Can only be run for tasks which run in ghost modes. + """ + default_ = ( + '\ndefault_' if workflow_run_mode in ['simulation', 'skip'] else '') + + cfg = { + "scheduler": {"cycle point format": "%Y"}, + "scheduling": { + "initial cycle point": "1000", + "final cycle point": "1000", + "graph": {"P1Y": f"sim_\nskip_{default_}"}}, + "runtime": { + "sim_": { + "run mode": "simulation", + 'simulation': {'default run length': 'PT0S'} + }, + "skip_": {"run mode": "skip"}, + } + } + id_ = flow(cfg) + schd = scheduler(id_, run_mode=workflow_run_mode, paused_start=False) + expect = ('[1000/sim_] run mode set by task settings to: simulation mode.') + + async with run(schd) as log: + await complete(schd) + if workflow_run_mode == 'simulation': + # No message in simulation mode. + assert not log_filter(log, contains=expect) + else: + assert log_filter(log, contains=expect) + + +@pytest.mark.parametrize('mode', (('skip'), ('simulation'), ('dummy'))) +async def test_force_trigger_does_not_override_run_mode( + flow, + scheduler, + start, + mode, +): + """Force-triggering a task will not override the run mode. + + Tasks with run mode = skip will continue to abide by + the is_held flag as normal. + + Taken from spec at + https://github.com/cylc/cylc-admin/blob/master/ + docs/proposal-skip-mode.md#proposal + """ + wid = flow({ + 'scheduling': {'graph': {'R1': 'foo'}}, + 'runtime': {'foo': {'run mode': mode}} + }) + schd = scheduler(wid) + async with start(schd): + # Check that task isn't held at first + foo = schd.pool.get_tasks()[0] + assert foo.state.is_held is False + + # Hold task, check that it's held: + schd.pool.hold_tasks('1/foo') + assert foo.state.is_held is True + + # Trigger task, check that it's _still_ held: + schd.pool.force_trigger_tasks('1/foo', [1]) + assert foo.state.is_held is True + + # run_mode will always be simulation from test + # workflow before submit routine... + assert foo.tdef.run_mode == 'simulation' + + # ... but job submission will always change this to the correct mode: + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + [foo], + schd.server.curve_auth, + schd.server.client_pub_key_dir) + assert foo.tdef.run_mode == mode diff --git a/tests/integration/run_modes/test_nonlive.py b/tests/integration/run_modes/test_nonlive.py new file mode 100644 index 00000000000..db5222fd5d6 --- /dev/null +++ b/tests/integration/run_modes/test_nonlive.py @@ -0,0 +1,110 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# Define here to ensure test doesn't just mirror code: +from typing import Any, Dict + + +KGO = { + 'live': { + 'flow_nums': '[1]', + 'is_manual_submit': 0, + 'try_num': 1, + 'submit_status': None, + 'run_signal': None, + 'run_status': None, + 'platform_name': 'localhost', + 'job_runner_name': 'background', + 'job_id': None}, + 'simulation': { + 'flow_nums': '[1]', + 'is_manual_submit': 0, + 'try_num': 1, + 'submit_status': 0, + 'run_signal': None, + 'run_status': None, + 'platform_name': 'simulation', + 'job_runner_name': 'simulation', + 'job_id': None}, + 'skip': { + 'flow_nums': '[1]', + 'is_manual_submit': 0, + 'try_num': 1, + 'submit_status': 0, + 'run_signal': None, + 'run_status': 0, + 'platform_name': 'skip', + 'job_runner_name': 'simulation', + 'job_id': None}, + 'dummy': { + 'flow_nums': '[1]', + 'is_manual_submit': 0, + 'try_num': 1, + 'submit_status': None, + 'run_signal': None, + 'run_status': None, + 'platform_name': 'localhost', + 'job_runner_name': 'background', + 'job_id': None}, +} + + +def not_time(data: Dict[str, Any]): + """Filter out fields containing times to reduce risk of + flakiness""" + return {k: v for k, v in data.items() if 'time' not in k} + + +async def test_task_jobs(flow, scheduler, start): + """Ensure that task job data is added to the database correctly + for each run mode. + """ + schd = scheduler(flow({ + 'scheduling': {'graph': { + 'R1': '&'.join(KGO)}}, + 'runtime': { + mode: {'run mode': mode} for mode in KGO} + })) + async with start(schd): + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + schd.pool.get_tasks(), + schd.server.curve_auth, + schd.server.client_pub_key_dir + ) + schd.workflow_db_mgr.process_queued_ops() + + for mode, kgo in KGO.items(): + taskdata = not_time( + schd.workflow_db_mgr.pub_dao.select_task_job(1, mode)) + assert taskdata == kgo, ( + f'Mode {mode}: incorrect db entries.') + + schd.pool.set_prereqs_and_outputs('*', ['failed'], [], []) + + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + schd.pool.get_tasks(), + schd.server.curve_auth, + schd.server.client_pub_key_dir + ) + schd.workflow_db_mgr.process_queued_ops() + + for mode, kgo in KGO.items(): + taskdata = not_time( + schd.workflow_db_mgr.pub_dao.select_task_job(1, mode)) + assert taskdata == kgo, ( + f'Mode {mode}: incorrect db entries.') diff --git a/tests/integration/test_simulation.py b/tests/integration/run_modes/test_simulation.py similarity index 90% rename from tests/integration/test_simulation.py rename to tests/integration/run_modes/test_simulation.py index c7e1b42fe27..03006220f2c 100644 --- a/tests/integration/test_simulation.py +++ b/tests/integration/run_modes/test_simulation.py @@ -20,7 +20,7 @@ from cylc.flow import commands from cylc.flow.cycling.iso8601 import ISO8601Point -from cylc.flow.simulation import sim_time_check +from cylc.flow.run_modes.simulation import sim_time_check @pytest.fixture @@ -28,7 +28,8 @@ def monkeytime(monkeypatch): """Convenience function monkeypatching time.""" def _inner(time_: int): monkeypatch.setattr('cylc.flow.task_job_mgr.time', lambda: time_) - monkeypatch.setattr('cylc.flow.simulation.time', lambda: time_) + monkeypatch.setattr( + 'cylc.flow.run_modes.simulation.time', lambda: time_) return _inner @@ -42,8 +43,8 @@ def _run_simjob(schd, point, task): itask = schd.pool.get_task(point, task) itask.state.is_queued = False monkeytime(0) - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') monkeytime(itask.mode_settings.timeout + 1) # Run Time Check @@ -150,8 +151,8 @@ def test_fail_once(sim_time_check_setup, itask, point, results, monkeypatch): for i, result in enumerate(results): itask.try_timers['execution-retry'].num = i - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') assert itask.mode_settings.sim_task_fails is result @@ -170,11 +171,11 @@ def test_task_finishes(sim_time_check_setup, monkeytime, caplog): fail_all_1066 = schd.pool.get_task(ISO8601Point('1066'), 'fail_all') fail_all_1066.state.status = 'running' fail_all_1066.state.is_queued = False - schd.task_job_mgr._simulation_submit_task_jobs( - [fail_all_1066], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [fail_all_1066], schd.workflow, 'simulation') # For the purpose of the test delete the started time set by - # _simulation_submit_task_jobs. + # _nonlive_submit_task_jobs. fail_all_1066.summary['started_time'] = 0 # Before simulation time is up: @@ -200,8 +201,8 @@ def test_task_sped_up(sim_time_check_setup, monkeytime): # Run the job submission method: monkeytime(0) - schd.task_job_mgr._simulation_submit_task_jobs( - [fast_forward_1066], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [fast_forward_1066], schd.workflow, 'simulation') fast_forward_1066.state.is_queued = False result = sim_time_check(schd.task_events_mgr, [fast_forward_1066], '') @@ -254,8 +255,8 @@ async def test_settings_restart( async with start(schd): og_timeouts = {} for itask in schd.pool.get_tasks(): - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') og_timeouts[itask.identity] = itask.mode_settings.timeout @@ -379,8 +380,8 @@ async def test_settings_broadcast( itask.state.is_queued = False # Submit the first - the sim task will fail: - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') assert itask.mode_settings.sim_task_fails is True # Let task finish. @@ -398,14 +399,14 @@ async def test_settings_broadcast( 'simulation': {'fail cycle points': ''} }]) # Submit again - result is different: - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') assert itask.mode_settings.sim_task_fails is False # Assert Clearing the broadcast works schd.broadcast_mgr.clear_broadcast() - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') assert itask.mode_settings.sim_task_fails is True # Assert that list of broadcasts doesn't change if we submit @@ -415,18 +416,22 @@ async def test_settings_broadcast( ['1066'], ['one'], [{ 'simulation': {'fail cycle points': 'higadfuhasgiurguj'} }]) - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') assert ( 'Invalid ISO 8601 date representation: higadfuhasgiurguj' in log.messages[-1]) + # Check that the invalid broadcast hasn't + # changed the itask sim mode settings: + assert itask.mode_settings.sim_task_fails is True + schd.broadcast_mgr.put_broadcast( ['1066'], ['one'], [{ 'simulation': {'fail cycle points': '1'} }]) - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') assert ( 'Invalid ISO 8601 date representation: 1' in log.messages[-1]) @@ -437,8 +442,8 @@ async def test_settings_broadcast( 'simulation': {'fail cycle points': '1945, 1977, 1066'}, 'execution retry delays': '3*PT2S' }]) - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') assert itask.mode_settings.sim_task_fails is True assert itask.try_timers['execution-retry'].delays == [2.0, 2.0, 2.0] # n.b. rtconfig should remain unchanged, lest we cancel broadcasts: diff --git a/tests/integration/test_config.py b/tests/integration/test_config.py index c75797e9cbb..eba65929bc5 100644 --- a/tests/integration/test_config.py +++ b/tests/integration/test_config.py @@ -16,7 +16,9 @@ import logging from pathlib import Path +import re import sqlite3 +from textwrap import dedent from typing import Any import pytest @@ -596,25 +598,44 @@ def _inner(*args, **kwargs): assert get_platforms(glbl_cfg()) == {'localhost', 'foo', 'bar'} -def test_validate_run_mode(flow: Fixture, validate: Fixture): - """Test that Cylc validate will only check simulation mode settings - if validate --mode simulation or dummy. - - Discovered in: - https://github.com/cylc/cylc-flow/pull/6213#issuecomment-2225365825 +def test_nonlive_mode_validation(flow, validate, caplog): + """Nonlive tasks return a warning at validation. """ + msg1 = dedent( + ''' The following tasks are set to run in non-live mode: + simulation mode: + * simulation + dummy mode: + * dummy''') + wid = flow({ - 'scheduling': {'graph': {'R1': 'mytask'}}, - 'runtime': {'mytask': {'simulation': {'fail cycle points': 'alll'}}} + 'scheduling': { + 'graph': { + 'R1': 'live => skip => simulation => dummy => default' + } + }, + 'runtime': { + 'default': {}, + 'live': {'run mode': 'live'}, + 'simulation': {'run mode': 'simulation'}, + 'dummy': {'run mode': 'dummy'}, + 'skip': { + 'run mode': 'skip', + 'skip': {'outputs': 'started, submitted'} + }, + }, }) - # It's fine with run mode live validate(wid) + assert msg1 in caplog.messages - # It fails with run mode simulation: - with pytest.raises(PointParsingError, match='Incompatible value'): - validate(wid, run_mode='simulation') - # It fails with run mode dummy: - with pytest.raises(PointParsingError, match='Incompatible value'): - validate(wid, run_mode='dummy') +@pytest.mark.parametrize('mode', (('simulation'), ('skip'), ('dummy'))) +def test_nonlive_mode_forbidden_as_outputs(flow, validate, mode): + """Run mode names are forbidden as task output names.""" + wid = flow({ + 'scheduling': {'graph': {'R1': 'task'}}, + 'runtime': {'task': {'outputs': {mode: f'message for {mode}'}}} + }) + with pytest.raises(WorkflowConfigError, match=f'message for {mode}'): + validate(wid) diff --git a/tests/integration/test_task_events_mgr.py b/tests/integration/test_task_events_mgr.py index 7ac12274d7b..08ed816414d 100644 --- a/tests/integration/test_task_events_mgr.py +++ b/tests/integration/test_task_events_mgr.py @@ -152,7 +152,7 @@ async def test__always_insert_task_job( schd.pool.get_tasks(), schd.server.curve_auth, schd.server.client_pub_key_dir, - is_simulation=False + run_mode='live' ) # Both tasks are in a waiting state: diff --git a/tests/integration/test_task_pool.py b/tests/integration/test_task_pool.py index beba9075bd3..8d53a4b9192 100644 --- a/tests/integration/test_task_pool.py +++ b/tests/integration/test_task_pool.py @@ -644,7 +644,8 @@ def list_tasks(schd): ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): 'satisfied naturally'}, + {('1', 'a', 'succeeded'): + 'Artificially satisfied by simulation mode'}, {('1', 'b', 'succeeded'): False}, {('1', 'c', 'succeeded'): False}, ], @@ -672,7 +673,8 @@ def list_tasks(schd): ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): 'satisfied naturally'}, + {('1', 'a', 'succeeded'): + 'Artificially satisfied by simulation mode'}, {('1', 'b', 'succeeded'): False}, ], id='removed' @@ -767,7 +769,8 @@ async def test_restart_prereqs( ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): 'satisfied naturally'}, + {('1', 'a', 'succeeded'): + 'Artificially satisfied by simulation mode'}, {('1', 'b', 'succeeded'): False}, {('1', 'c', 'succeeded'): False}, ], @@ -795,7 +798,8 @@ async def test_restart_prereqs( ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): 'satisfied naturally'}, + {('1', 'a', 'succeeded'): + 'Artificially satisfied by simulation mode'}, {('1', 'b', 'succeeded'): False}, ], id='removed' @@ -893,7 +897,7 @@ async def _test_restart_prereqs_sat(): for prereq in task_c.state.prerequisites for key, satisfied in prereq.satisfied.items() ) == [ - ('1', 'a', 'succeeded', 'satisfied naturally'), + ('1', 'a', 'succeeded', 'Artificially satisfied by simulation mode'), ('1', 'b', 'succeeded', 'satisfied from database') ] @@ -910,7 +914,7 @@ async def _test_restart_prereqs_sat(): for prereq in task_c_prereqs for condition in prereq.conditions ) == [ - ('1/a', True, 'satisfied naturally'), + ('1/a', True, 'Artificially satisfied by simulation mode'), ('1/b', True, 'satisfied from database'), ] @@ -1584,6 +1588,75 @@ async def test_set_outputs_future( assert log_filter(log, contains="completed output y") +async def test_set_outputs_from_skip_settings( + flow, + scheduler, + start, + log_filter, + validate +): + """Check working of ``cylc set --out=skip``: + + 1. --out=skip can be used to set all required outputs. + 2. --out=skip,other_output can be used to set other outputs. + + """ + id_ = flow( + { + 'scheduler': { + 'allow implicit tasks': 'True', + }, + 'scheduling': { + 'cycling mode': 'integer', + 'initial cycle point': 1, + 'final cycle point': 2, + 'graph': { + 'P1': """ + a => after_asucceeded + a:x => after_ax + a:y? => after_ay + """ + } + }, + 'runtime': { + 'a': { + 'outputs': { + 'x': 'xebec', + 'y': 'yacht' + }, + 'skip': {'outputs': 'x'} + } + } + } + ) + validate(id_) + schd = scheduler(id_) + + async with start(schd) as log: + + # it should start up with just tasks a: + assert pool_get_task_ids(schd.pool) == ['1/a', '2/a'] + + # setting 1/a output to skip should set output x, but not + # y (because y is optional). + schd.pool.set_prereqs_and_outputs( + ['1/a'], ['skip'], None, ['all']) + assert (pool_get_task_ids(schd.pool) == [ + '1/after_asucceeded', + '1/after_ax', + '2/a']) + + # You should be able to set skip as part of a list of outputs: + schd.pool.set_prereqs_and_outputs( + ['2/a'], ['skip', 'y'], None, ['all']) + assert (pool_get_task_ids(schd.pool) == [ + '1/after_asucceeded', + '1/after_ax', + '2/after_asucceeded', + '2/after_ax', + '2/after_ay']) + + async def test_prereq_satisfaction( flow, scheduler, @@ -2085,7 +2158,7 @@ async def test_set_future_flow(flow, scheduler, start, log_filter): # set b:succeeded in flow 2 and check downstream spawning schd.pool.set_prereqs_and_outputs(['1/b'], prereqs=[], outputs=[], flow=[2]) assert schd.pool.get_task(IntegerPoint("1"), "c1") is None, '1/c1 (flow 2) should not be spawned after 1/b:succeeded' - assert schd.pool.get_task(IntegerPoint("1"), "c2") is not None, '1/c2 (flow 2) should be spawned after 1/b:succeeded' + assert schd.pool.get_task(IntegerPoint("1"), "c2") is not None, '1/c2 (flow 2) should be spawned after 1/b:succeeded' async def test_trigger_queue(one, run, db_select, complete): diff --git a/tests/integration/tui/screenshots/test_scheduler_logs.workflow-configuration-file.html b/tests/integration/tui/screenshots/test_scheduler_logs.workflow-configuration-file.html index c7ab1e925ec..cb655245f32 100644 --- a/tests/integration/tui/screenshots/test_scheduler_logs.workflow-configuration-file.html +++ b/tests/integration/tui/screenshots/test_scheduler_logs.workflow-configuration-file.html @@ -10,7 +10,7 @@ [[a]] [[root]] [[[simulation]]] - default run length = PT0S + default run length = PT0S [scheduler] allow implicit tasks = True diff --git a/tests/unit/run_modes/test_dummy.py b/tests/unit/run_modes/test_dummy.py new file mode 100644 index 00000000000..998c13767c9 --- /dev/null +++ b/tests/unit/run_modes/test_dummy.py @@ -0,0 +1,40 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Tests for utilities supporting dummy mode. +""" +import pytest +from cylc.flow.run_modes.dummy import build_dummy_script + + +@pytest.mark.parametrize( + 'fail_one_time_only', (True, False) +) +def test_build_dummy_script(fail_one_time_only): + rtc = { + 'outputs': {'foo': '1', 'bar': '2'}, + 'simulation': { + 'fail try 1 only': fail_one_time_only, + 'fail cycle points': '1', + } + } + result = build_dummy_script(rtc, 60) + assert result.split('\n') == [ + 'sleep 60', + "cylc message '1'", + "cylc message '2'", + f"cylc__job__dummy_result {str(fail_one_time_only).lower()}" + " 1 || exit 1" + ] diff --git a/tests/unit/run_modes/test_nonlive.py b/tests/unit/run_modes/test_nonlive.py new file mode 100644 index 00000000000..6377e315007 --- /dev/null +++ b/tests/unit/run_modes/test_nonlive.py @@ -0,0 +1,46 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Unit tests for utilities supporting all nonlive modes +""" + +from types import SimpleNamespace + +from cylc.flow.run_modes.nonlive import mode_validate_checks + + +def test_mode_validate_checks(monkeypatch, caplog): + """It warns us if we've set a task config to nonlive mode. + + (And not otherwise) + """ + taskdefs = { + f'{run_mode}_task': SimpleNamespace( + rtconfig={'run mode': run_mode}, + name=f'{run_mode}_task' + ) + for run_mode + in ['live', 'workflow', 'dummy', 'simulation', 'skip'] + } + + mode_validate_checks(taskdefs) + + message = caplog.messages[0] + + assert 'skip mode:\n * skip_task' not in message + assert 'simulation mode:\n * simulation_task' in message + assert 'dummy mode:\n * dummy_task' in message + assert ' live mode' not in message # Avoid matching "non-live mode" + assert 'workflow mode' not in message diff --git a/tests/unit/test_simulation.py b/tests/unit/run_modes/test_simulation.py similarity index 86% rename from tests/unit/test_simulation.py rename to tests/unit/run_modes/test_simulation.py index 920a872503a..109174c8b43 100644 --- a/tests/unit/test_simulation.py +++ b/tests/unit/run_modes/test_simulation.py @@ -20,9 +20,8 @@ from cylc.flow.cycling.integer import IntegerPoint from cylc.flow.cycling.iso8601 import ISO8601Point -from cylc.flow.simulation import ( +from cylc.flow.run_modes.simulation import ( parse_fail_cycle_points, - build_dummy_script, disable_platforms, get_simulated_run_len, sim_task_failed, @@ -56,27 +55,6 @@ def test_get_simulated_run_len( assert get_simulated_run_len(rtc) == 3600 -@pytest.mark.parametrize( - 'fail_one_time_only', (True, False) -) -def test_set_simulation_script(fail_one_time_only): - rtc = { - 'outputs': {'foo': '1', 'bar': '2'}, - 'simulation': { - 'fail try 1 only': fail_one_time_only, - 'fail cycle points': '1', - } - } - result = build_dummy_script(rtc, 60) - assert result.split('\n') == [ - 'sleep 60', - "cylc message '1'", - "cylc message '2'", - f"cylc__job__dummy_result {str(fail_one_time_only).lower()}" - " 1 || exit 1" - ] - - @pytest.mark.parametrize( 'rtc, expect', ( ({'platform': 'skarloey'}, 'localhost'), @@ -100,7 +78,7 @@ def test_disable_platforms(rtc, expect): def test_parse_fail_cycle_points(set_cycling_type): before = ['2', '4'] set_cycling_type() - assert parse_fail_cycle_points(before) == [ + assert parse_fail_cycle_points(before, ['']) == [ IntegerPoint(i) for i in before ] diff --git a/tests/unit/run_modes/test_skip.py b/tests/unit/run_modes/test_skip.py new file mode 100644 index 00000000000..1de6e79a493 --- /dev/null +++ b/tests/unit/run_modes/test_skip.py @@ -0,0 +1,105 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Unit tests for utilities supporting skip modes +""" +import pytest +from pytest import param, raises +from types import SimpleNamespace + +from cylc.flow.exceptions import WorkflowConfigError +from cylc.flow.run_modes.skip import check_task_skip_config, process_outputs + + +@pytest.mark.parametrize( + 'conf', + ( + param({}, id='no-skip-config'), + param({'skip': {'outputs': []}}, id='no-skip-outputs'), + param({'skip': {'outputs': ['foo1', 'failed']}}, id='ok-skip-outputs'), + ) +) +def test_good_check_task_skip_config(conf): + """It returns none if the problems this function checks are not present. + """ + tdef = SimpleNamespace(rtconfig=conf) + tdef.name = 'foo' + assert check_task_skip_config(tdef) is None + + +def test_raises_check_task_skip_config(): + """It raises an error if succeeded and failed are set. + """ + tdef = SimpleNamespace( + rtconfig={'skip': {'outputs': ['foo1', 'failed', 'succeeded']}} + ) + tdef.name = 'foo' + with raises(WorkflowConfigError, match='succeeded AND failed'): + check_task_skip_config(tdef) + + +@pytest.mark.parametrize( + 'outputs, required, expect', + ( + param([], [], ['succeeded'], id='implicit-succeded'), + param( + ['succeeded'], ['succeeded'], ['succeeded'], + id='explicit-succeded' + ), + param(['submitted'], [], ['succeeded'], id='only-1-submit'), + param( + ['foo', 'bar', 'baz', 'qux'], + ['bar', 'qux'], + ['bar', 'qux', 'succeeded'], + id='required-only' + ), + param( + ['foo', 'baz'], + ['bar', 'qux'], + ['succeeded'], + id='no-required' + ), + param( + ['failed'], + [], + ['failed'], + id='explicit-failed' + ), + ) +) +def test_process_outputs(outputs, required, expect): + """Check that skip outputs: + + 1. Doesn't send submitted twice. + 2. Sends every required output. + 3. If failed is set send failed + 4. If failed in not set send succeeded. + + n.b: The real process message function sends the TASK_OUTPUT_STARTED + message for free, so there is no reference to that here. + """ + + + # Create a mocked up task-proxy: + itask = SimpleNamespace( + tdef=SimpleNamespace( + rtconfig={'skip': {'outputs': outputs}}), + state=SimpleNamespace( + outputs=SimpleNamespace( + iter_required_messages=lambda: iter(required), + _message_to_trigger={v: v for v in required} + ))) + + assert process_outputs(itask) == ['submitted'] + expect diff --git a/tests/unit/scripts/test_lint.py b/tests/unit/scripts/test_lint.py index 527a33bdfb5..dbb2e014071 100644 --- a/tests/unit/scripts/test_lint.py +++ b/tests/unit/scripts/test_lint.py @@ -181,7 +181,10 @@ [[[directives]]] -l walltime = 666 [[baz]] + run mode = skip platform = `no backticks` + [[[skip]]] + outputs = succeeded, failed """ + ( '\nscript = the quick brown fox jumps over the lazy dog until it becomes ' 'clear that this line is longer than the default 130 character limit.' diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 9cdcee89003..b830228103a 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -17,8 +17,8 @@ import os import sys from optparse import Values -from typing import Any, Callable, Dict, List, Optional, Tuple, Type -from pathlib import Path +from typing import ( + TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type) import pytest import logging from types import SimpleNamespace @@ -47,8 +47,9 @@ from cylc.flow.cycling.iso8601 import ISO8601Point - -Fixture = Any +if TYPE_CHECKING: + from pathlib import Path + Fixture = Any def _tmp_flow_config(tmp_run_dir: Callable): @@ -60,8 +61,8 @@ def _tmp_flow_config(tmp_run_dir: Callable): Returns the path to the flow file. """ - def __tmp_flow_config(id_: str, config: str) -> Path: - run_dir: Path = tmp_run_dir(id_) + def __tmp_flow_config(id_: str, config: str) -> 'Path': + run_dir: 'Path' = tmp_run_dir(id_) flow_file = run_dir / WorkflowFiles.FLOW_FILE flow_file.write_text(config) return flow_file @@ -82,7 +83,7 @@ class TestWorkflowConfig: """Test class for the Cylc WorkflowConfig object.""" def test_xfunction_imports( - self, mock_glbl_cfg: Fixture, tmp_path: Path): + self, mock_glbl_cfg: 'Fixture', tmp_path: 'Path'): """Test for a workflow configuration with valid xtriggers""" mock_glbl_cfg( 'cylc.flow.platforms.glbl_cfg', @@ -175,7 +176,8 @@ def test_xfunction_attribute_error(self, mock_glbl_cfg, tmp_path): with pytest.raises(XtriggerConfigError) as excinfo: WorkflowConfig(workflow="capybara_workflow", fpath=flow_file, options=SimpleNamespace()) - assert "module 'capybara' has no attribute 'capybara'" in str(excinfo.value) + assert "module 'capybara' has no attribute 'capybara'" in str( + excinfo.value) def test_xfunction_not_callable(self, mock_glbl_cfg, tmp_path): """Test for error when a xtrigger function is not callable.""" @@ -358,7 +360,7 @@ def test_process_icp( expected_icp: Optional[str], expected_opt_icp: Optional[str], expected_err: Optional[Tuple[Type[Exception], str]], - monkeypatch: pytest.MonkeyPatch, set_cycling_type: Fixture + monkeypatch: pytest.MonkeyPatch, set_cycling_type: 'Fixture' ) -> None: """Test WorkflowConfig.process_initial_cycle_point(). @@ -445,7 +447,7 @@ def test_process_startcp( starttask: Optional[str], expected: str, expected_err: Optional[Tuple[Type[Exception], str]], - monkeypatch: pytest.MonkeyPatch, set_cycling_type: Fixture + monkeypatch: pytest.MonkeyPatch, set_cycling_type: 'Fixture' ) -> None: """Test WorkflowConfig.process_start_cycle_point(). @@ -648,7 +650,7 @@ def test_process_fcp( options_fcp: Optional[str], expected_fcp: Optional[str], expected_err: Optional[Tuple[Type[Exception], str]], - set_cycling_type: Fixture + set_cycling_type: 'Fixture' ) -> None: """Test WorkflowConfig.process_final_cycle_point(). @@ -671,7 +673,7 @@ def test_process_fcp( initial_point=loader.get_point( scheduling_cfg['initial cycle point'] ).standardise(), - final_point = None, + final_point=None, options=SimpleNamespace(fcp=options_fcp), ) @@ -812,7 +814,7 @@ def test_stopcp_after_fcp( cycle point is handled correctly.""" caplog.set_level(logging.WARNING, CYLC_LOG) id_ = 'cassini' - flow_file: Path = tmp_flow_config(id_, f""" + flow_file: 'Path' = tmp_flow_config(id_, f""" [scheduler] allow implicit tasks = True [scheduling] @@ -1366,7 +1368,7 @@ def test_implicit_tasks( """ # Setup id_ = 'rincewind' - flow_file: Path = tmp_flow_config(id_, f""" + flow_file: 'Path' = tmp_flow_config(id_, f""" [scheduler] { f'allow implicit tasks = {allow_implicit_tasks}' @@ -1470,7 +1472,7 @@ def test_zero_interval( """Test that a zero-duration recurrence with >1 repetition gets an appropriate warning.""" id_ = 'ordinary' - flow_file: Path = tmp_flow_config(id_, f""" + flow_file: 'Path' = tmp_flow_config(id_, f""" [scheduler] UTC mode = True allow implicit tasks = True @@ -1514,7 +1516,7 @@ def test_chain_expr( Note the order matters when "nominal" units (years, months) are used. """ id_ = 'osgiliath' - flow_file: Path = tmp_flow_config(id_, f""" + flow_file: 'Path' = tmp_flow_config(id_, f""" [scheduler] UTC mode = True allow implicit tasks = True @@ -1693,7 +1695,7 @@ def test__warn_if_queues_have_implicit_tasks(caplog): ] ) def test_cylc_env_at_parsing( - tmp_path: Path, + tmp_path: 'Path', monkeypatch: pytest.MonkeyPatch, installed, run_dir, diff --git a/tests/unit/test_task_state.py b/tests/unit/test_task_state.py index e655c74b7bb..106f4f8e2de 100644 --- a/tests/unit/test_task_state.py +++ b/tests/unit/test_task_state.py @@ -15,11 +15,13 @@ # along with this program. If not, see . import pytest +from types import SimpleNamespace from cylc.flow.taskdef import TaskDef from cylc.flow.cycling.integer import IntegerSequence, IntegerPoint from cylc.flow.task_trigger import Dependency, TaskTrigger from cylc.flow.task_state import ( + RunMode, TaskState, TASK_STATUS_PREPARING, TASK_STATUS_SUBMIT_FAILED, @@ -119,3 +121,29 @@ def test_task_state_order(): assert not tstate.is_gt(TASK_STATUS_RUNNING) assert not tstate.is_gte(TASK_STATUS_RUNNING) + +@pytest.mark.parametrize( + 'itask_run_mode, disable_handlers, expect', + ( + ('live', True, False), + ('live', False, False), + ('dummy', True, False), + ('dummy', False, False), + ('simulation', True, True), + ('simulation', False, True), + ('skip', True, True), + ('skip', False, False), + ) +) +def test_disable_task_event_handlers(itask_run_mode, disable_handlers, expect): + """Conditions under which task event handlers should not be used. + """ + # Construct a fake itask object: + itask = SimpleNamespace( + tdef=SimpleNamespace( + run_mode=itask_run_mode, + rtconfig={ + 'skip': {'disable task event handlers': disable_handlers}}) + ) + # Check method: + assert RunMode.disable_task_event_handlers(itask) is expect