Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into 8.3.x-sync
Browse files Browse the repository at this point in the history
  • Loading branch information
MetRonnie committed Jul 23, 2024
2 parents f68e8b9 + b7f52ee commit 2b93514
Show file tree
Hide file tree
Showing 69 changed files with 470 additions and 168 deletions.
4 changes: 2 additions & 2 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@ $ towncrier create <PR-number>.<break|feat|fix>.md --content "Short description"

### 🔧 Fixes

[#6178](https://github.com/cylc/cylc-flow/pull/6178) - Fix an issue where Tui could hang when closing.

[#6186](https://github.com/cylc/cylc-flow/pull/6186) - Fixed bug where using flow numbers with `cylc set` would not work correctly.

[#6200](https://github.com/cylc/cylc-flow/pull/6200) - Fixed bug where a stalled paused workflow would be incorrectly reported as running, not paused
Expand All @@ -47,6 +45,8 @@ $ towncrier create <PR-number>.<break|feat|fix>.md --content "Short description"

[#6176](https://github.com/cylc/cylc-flow/pull/6176) - Fix bug where jobs which fail to submit are not shown in GUI/TUI if submission retries are set.

[#6178](https://github.com/cylc/cylc-flow/pull/6178) - Fix an issue where Tui could hang when closing.

## __cylc-8.3.0 (Released 2024-06-18)__

### ⚠ Breaking Changes
Expand Down
1 change: 1 addition & 0 deletions changes.d/6137.feat.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
New Cylc lint rule: S014: Don't use job runner specific execution time limit directives, use execution time limit.
5 changes: 2 additions & 3 deletions cylc/flow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,8 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Set up the cylc environment."""

import os
import logging

import os

CYLC_LOG = 'cylc'

Expand Down Expand Up @@ -53,7 +52,7 @@ def environ_init():

environ_init()

__version__ = '8.3.4.dev'
__version__ = '8.4.0.dev'


def iter_entry_points(entry_point_name):
Expand Down
2 changes: 1 addition & 1 deletion cylc/flow/broadcast_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def get_broadcast_change_iter(modified_settings, is_cancel=False):
value = setting
keys_str = ""
while isinstance(value, dict):
key, value = list(value.items())[0]
key, value = next(iter(value.items()))
if isinstance(value, dict):
keys_str += "[" + key + "]"
else:
Expand Down
4 changes: 3 additions & 1 deletion cylc/flow/cfgspec/globalcfg.py
Original file line number Diff line number Diff line change
Expand Up @@ -1311,7 +1311,7 @@ def default_for(
The means by which task progress messages are reported back to
the running workflow.
Options:
..rubric:: Options:
zmq
Direct client-server TCP communication via network ports
Expand All @@ -1320,6 +1320,8 @@ def default_for(
ssh
Use non-interactive ssh for task communications
For more information, see :ref:`TaskComms`.
.. versionchanged:: 8.0.0
{REPLACES}``global.rc[hosts][<host>]task communication
Expand Down
9 changes: 5 additions & 4 deletions cylc/flow/clean.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def _clean_check(opts: 'Values', id_: str, run_dir: Path) -> None:
except ContactFileExists as exc:
raise ServiceFileError(
f"Cannot clean running workflow {id_}.\n\n{exc}"
)
) from None


def init_clean(id_: str, opts: 'Values') -> None:
Expand Down Expand Up @@ -173,7 +173,7 @@ def init_clean(id_: str, opts: 'Values') -> None:
try:
platform_names = get_platforms_from_db(local_run_dir)
except ServiceFileError as exc:
raise ServiceFileError(f"Cannot clean {id_} - {exc}")
raise ServiceFileError(f"Cannot clean {id_} - {exc}") from None
except sqlite3.OperationalError as exc:
# something went wrong with the query
# e.g. the table/field we need isn't there
Expand All @@ -186,7 +186,7 @@ def init_clean(id_: str, opts: 'Values') -> None:
' local files (you may need to remove files on other'
' platforms manually).'
)
raise ServiceFileError(f"Cannot clean {id_} - {exc}")
raise ServiceFileError(f"Cannot clean {id_} - {exc}") from exc

if platform_names and platform_names != {'localhost'}:
remote_clean(
Expand Down Expand Up @@ -361,7 +361,8 @@ def remote_clean(
except PlatformLookupError as exc:
raise PlatformLookupError(
f"Cannot clean {id_} on remote platforms as the workflow database "
f"is out of date/inconsistent with the global config - {exc}")
f"is out of date/inconsistent with the global config - {exc}"
) from None

queue: Deque[RemoteCleanQueueTuple] = deque()
remote_clean_cmd = partial(
Expand Down
2 changes: 1 addition & 1 deletion cylc/flow/command_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def flow_opts(flows: List[str], flow_wait: bool) -> None:
try:
int(val)
except ValueError:
raise InputError(ERR_OPT_FLOW_VAL.format(val))
raise InputError(ERR_OPT_FLOW_VAL.format(val)) from None

if flow_wait and flows[0] in [FLOW_NEW, FLOW_NONE]:
raise InputError(ERR_OPT_FLOW_WAIT)
Expand Down
4 changes: 2 additions & 2 deletions cylc/flow/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ async def stop(
try:
mode = StopMode(mode)
except ValueError:
raise CommandFailedError(f"Invalid stop mode: '{mode}'")
raise CommandFailedError(f"Invalid stop mode: '{mode}'") from None
schd._set_stop(mode)
if mode is StopMode.REQUEST_KILL:
schd.time_next_kill = time()
Expand Down Expand Up @@ -309,7 +309,7 @@ async def set_verbosity(schd: 'Scheduler', level: Union[int, str]):
lvl = int(level)
LOG.setLevel(lvl)
except (TypeError, ValueError) as exc:
raise CommandFailedError(exc)
raise CommandFailedError(exc) from None
cylc.flow.flags.verbosity = log_level_to_verbosity(lvl)


Expand Down
55 changes: 32 additions & 23 deletions cylc/flow/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,11 +199,11 @@ def interpolate_template(tmpl, params_dict):
try:
return tmpl % params_dict
except KeyError:
raise ParamExpandError('bad parameter')
raise ParamExpandError('bad parameter') from None
except TypeError:
raise ParamExpandError('wrong data type for parameter')
raise ParamExpandError('wrong data type for parameter') from None
except ValueError:
raise ParamExpandError('bad template syntax')
raise ParamExpandError('bad template syntax') from None


class WorkflowConfig:
Expand Down Expand Up @@ -480,8 +480,8 @@ def __init__(
get_interval(offset_string).standardise())
except IntervalParsingError:
raise WorkflowConfigError(
"Illegal %s spec: %s" % (
s_type, offset_string))
"Illegal %s spec: %s" % (s_type, offset_string)
) from None
extn = "(" + offset_string + ")"

# Replace family names with members.
Expand Down Expand Up @@ -709,7 +709,7 @@ def process_initial_cycle_point(self) -> None:
try:
icp = ingest_time(orig_icp, get_current_time_string())
except IsodatetimeError as exc:
raise WorkflowConfigError(str(exc))
raise WorkflowConfigError(str(exc)) from None
if orig_icp != icp:
# now/next()/previous() was used, need to store
# evaluated point in DB
Expand Down Expand Up @@ -761,7 +761,7 @@ def process_start_cycle_point(self) -> None:
for taskid in self.options.starttask
]
except ValueError as exc:
raise InputError(str(exc))
raise InputError(str(exc)) from None
self.start_point = min(
get_point(cycle).standardise()
for cycle in cycle_points if cycle
Expand Down Expand Up @@ -1114,7 +1114,7 @@ def _check_completion_expression(self, task_name: str, expr: str) -> None:
f'\n {expr}'
'\nThe "finished" output cannot be used in completion'
' expressions, use "succeeded or failed".'
)
) from None

for alt_qualifier, qualifier in ALT_QUALIFIERS.items():
_alt_compvar = trigger_to_completion_variable(alt_qualifier)
Expand All @@ -1125,21 +1125,21 @@ def _check_completion_expression(self, task_name: str, expr: str) -> None:
f'\n {expr}'
f'\nUse "{_compvar}" not "{_alt_compvar}" '
'in completion expressions.'
)
) from None

raise WorkflowConfigError(
# NOTE: str(exc) == "name 'x' is not defined" tested in
# tests/integration/test_optional_outputs.py
f'Error in [runtime][{task_name}]completion:'
f'\n{error}'
)
) from None
except Exception as exc: # includes InvalidCompletionExpression
# expression contains non-whitelisted syntax or any other error in
# the expression e.g. SyntaxError
raise WorkflowConfigError(
f'Error in [runtime][{task_name}]completion:'
f'\n{str(exc)}'
)
) from None

# ensure consistency between the graph and the completion expression
for compvar in (
Expand Down Expand Up @@ -1415,11 +1415,12 @@ def compute_family_tree(self):
c3_single.mro(name))
except RecursionError:
raise WorkflowConfigError(
"circular [runtime] inheritance?")
"circular [runtime] inheritance?"
) from None
except Exception as exc:
# catch inheritance errors
# TODO - specialise MRO exceptions
raise WorkflowConfigError(str(exc))
raise WorkflowConfigError(str(exc)) from None

for name in self.cfg['runtime']:
ancestors = self.runtime['linearized ancestors'][name]
Expand Down Expand Up @@ -1771,7 +1772,7 @@ def _check_task_event_handlers(self):
f' {taskdef.name}:'
f' {handler_template}:'
f' {repr(exc)}'
)
) from None

def _check_special_tasks(self):
"""Check declared special tasks are valid, and detect special
Expand Down Expand Up @@ -1878,7 +1879,9 @@ def generate_triggers(self, lexpression, left_nodes, right, seq,
try:
expr_list = listify(lexpression)
except SyntaxError:
raise WorkflowConfigError('Error in expression "%s"' % lexpression)
raise WorkflowConfigError(
'Error in expression "%s"' % lexpression
) from None

triggers = {}
xtrig_labels = set()
Expand Down Expand Up @@ -1955,7 +1958,9 @@ def generate_triggers(self, lexpression, left_nodes, right, seq,
xtrig = xtrigs[label]
except KeyError:
if label != 'wall_clock':
raise WorkflowConfigError(f"xtrigger not defined: {label}")
raise WorkflowConfigError(
f"xtrigger not defined: {label}"
) from None
else:
# Allow "@wall_clock" in graph as implicit zero-offset.
xtrig = SubFuncContext('wall_clock', 'wall_clock', [], {})
Expand Down Expand Up @@ -2289,7 +2294,7 @@ def load_graph(self):
msg += ' (final cycle point=%s)' % fcp
if isinstance(exc, CylcError):
msg += ' %s' % exc.args[0]
raise WorkflowConfigError(msg)
raise WorkflowConfigError(msg) from None
self.sequences.append(seq)
parser = GraphParser(
family_map,
Expand Down Expand Up @@ -2444,7 +2449,7 @@ def get_taskdef(
except TaskDefError as exc:
if orig_expr:
LOG.error(orig_expr)
raise WorkflowConfigError(str(exc))
raise WorkflowConfigError(str(exc)) from None
else:
# Record custom message outputs from [runtime].
messages = set(self.cfg['runtime'][name]['outputs'].values())
Expand All @@ -2465,14 +2470,14 @@ def get_taskdef(
f'Invalid task output "'
f'[runtime][{name}][outputs]'
f'{output} = {message}" - {msg}'
)
) from None
valid, msg = TaskMessageValidator.validate(message)
if not valid:
raise WorkflowConfigError(
f'Invalid task message "'
f'[runtime][{name}][outputs]'
f'{output} = {message}" - {msg}'
)
) from None
self.taskdefs[name].add_output(output, message)

return self.taskdefs[name]
Expand All @@ -2484,7 +2489,7 @@ def _get_taskdef(self, name: str) -> TaskDef:
try:
rtcfg = self.cfg['runtime'][name]
except KeyError:
raise WorkflowConfigError("Task not defined: %s" % name)
raise WorkflowConfigError("Task not defined: %s" % name) from None
# We may want to put in some handling for cases of changing the
# initial cycle via restart (accidentally or otherwise).

Expand Down Expand Up @@ -2576,7 +2581,9 @@ def process_metadata_urls(self):
'workflow': self.workflow,
}
except (KeyError, ValueError):
raise InputError(f'Invalid template [meta]URL: {url}')
raise InputError(
f'Invalid template [meta]URL: {url}'
) from None
else:
LOG.warning(
'Detected deprecated template variables in [meta]URL.'
Expand Down Expand Up @@ -2612,7 +2619,9 @@ def process_metadata_urls(self):
'task': name,
}
except (KeyError, ValueError):
raise InputError(f'Invalid template [meta]URL: {url}')
raise InputError(
f'Invalid template [meta]URL: {url}'
) from None
else:
LOG.warning(
'Detected deprecated template variables in'
Expand Down
2 changes: 1 addition & 1 deletion cylc/flow/cycling/integer.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def standardise(self):
try:
self.value = str(int(self))
except (TypeError, ValueError) as exc:
raise PointParsingError(type(self), self.value, exc)
raise PointParsingError(type(self), self.value, exc) from None
return self

def __int__(self):
Expand Down
6 changes: 3 additions & 3 deletions cylc/flow/cycling/iso8601.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def standardise(self):
WorkflowSpecifics.NUM_EXPANDED_YEAR_DIGITS)
else:
message = str(exc)
raise PointParsingError(type(self), self.value, message)
raise PointParsingError(type(self), self.value, message) from None
return self

def sub(self, other):
Expand Down Expand Up @@ -176,7 +176,7 @@ def standardise(self):
try:
self.value = str(interval_parse(self.value))
except IsodatetimeError:
raise IntervalParsingError(type(self), self.value)
raise IntervalParsingError(type(self), self.value) from None
return self

def add(self, other):
Expand Down Expand Up @@ -782,7 +782,7 @@ def prev_next(
raise WorkflowConfigError(
f'Invalid offset: {my_time}:'
f' Offset lists are semicolon separated, try {suggest}'
)
) from None

timepoints.append(parsed_point + now)

Expand Down
6 changes: 3 additions & 3 deletions cylc/flow/dbstatecheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,15 +78,15 @@ def __init__(self, rund, workflow, db_path=None):
try:
self.db_point_fmt = self._get_db_point_format()
self.c7_back_compat_mode = False
except sqlite3.OperationalError as exc:
except sqlite3.OperationalError:
# BACK COMPAT: Cylc 7 DB (see method below).
try:
self.db_point_fmt = self._get_db_point_format_compat()
self.c7_back_compat_mode = True
except sqlite3.OperationalError:
with suppress(Exception):
self.conn.close()
raise exc # original error
raise

def __enter__(self):
return self
Expand Down Expand Up @@ -137,7 +137,7 @@ def adjust_point_to_db(self, cycle, offset):
raise InputError(
f'Cycle point "{cycle}" is not compatible'
f' with DB point format "{self.db_point_fmt}"'
)
) from None
return cycle

@staticmethod
Expand Down
Loading

0 comments on commit 2b93514

Please sign in to comment.