From b093a37e2f647374a9490ae48f3ac706d713818c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 22 Jan 2025 21:33:36 +0000 Subject: [PATCH] style: pre-commit.ci auto fixes [...] --- .../transforms/firefoxci_artifact.py | 11 ++++-- .../transforms/integration_test.py | 35 +++++++++++++++---- .../fxci_config_taskgraph/util/integration.py | 13 +++---- 3 files changed, 45 insertions(+), 14 deletions(-) diff --git a/taskcluster/fxci_config_taskgraph/transforms/firefoxci_artifact.py b/taskcluster/fxci_config_taskgraph/transforms/firefoxci_artifact.py index adc5b7fd..5ef12f4e 100644 --- a/taskcluster/fxci_config_taskgraph/transforms/firefoxci_artifact.py +++ b/taskcluster/fxci_config_taskgraph/transforms/firefoxci_artifact.py @@ -32,7 +32,9 @@ def make_firefoxci_artifact_tasks(config, tasks): # turns out to be necessary when you we're running integration tests # on tasks that have fetches from a non-mirrored task in the firefox ci # cluster as well as a mirrored task in the staging cluster. - mirror_public_artifacts = [re.compile(r) for r in task.pop("mirror-public-artifacts", [])] + mirror_public_artifacts = [ + re.compile(r) for r in task.pop("mirror-public-artifacts", []) + ] for decision_index_path in task.pop("decision-index-paths"): for _, task_def in find_tasks( decision_index_path, @@ -55,7 +57,12 @@ def make_firefoxci_artifact_tasks(config, tasks): ) for fetch in fetches: if fetch["artifact"].startswith("public"): - if not any([pat.match(task_def["metadata"]["name"]) for pat in mirror_public_artifacts]): + if not any( + [ + pat.match(task_def["metadata"]["name"]) + for pat in mirror_public_artifacts + ] + ): continue task_id = fetch["task"] diff --git a/taskcluster/fxci_config_taskgraph/transforms/integration_test.py b/taskcluster/fxci_config_taskgraph/transforms/integration_test.py index 3fad8232..a425f446 100644 --- a/taskcluster/fxci_config_taskgraph/transforms/integration_test.py +++ b/taskcluster/fxci_config_taskgraph/transforms/integration_test.py @@ -234,7 +234,10 @@ def rewrite_mirrored_dependencies( # Others may be `firefoxci-artifact` tasks that have mirrored artifacts # from firefox ci tasks into this cluster. artifact_task_label = f"firefoxci-artifact-{prefix}-{upstream_task_id}" - if artifact_task_label in artifact_tasks and artifact_task_label not in taskdesc["dependencies"].values(): + if ( + artifact_task_label in artifact_tasks + and artifact_task_label not in taskdesc["dependencies"].values() + ): artifact_deps.add(upstream_task_id) taskdesc["dependencies"][artifact_task_label] = artifact_task_label @@ -317,7 +320,12 @@ def make_integration_test_description( rewrite_docker_image(taskdesc) rewrite_private_fetches(taskdesc) rewrite_mirrored_dependencies( - taskdesc, name_prefix, orig_dependencies, mirrored_tasks, include_deps, artifact_tasks, + taskdesc, + name_prefix, + orig_dependencies, + mirrored_tasks, + include_deps, + artifact_tasks, ) # Tasks may only have 1 root url set, which is primarily used to decide # where to find `MOZ_FETCHES`. When all of our fetches are known to be @@ -325,7 +333,12 @@ def make_integration_test_description( # If they're all running in production, we must patch it. If we have a mix # of both, we cannot proceed, as either the stage or production ones would # result in 404s at runtime. - fetches = json.loads(task_def.get("payload", {}).get("env", {}).get("MOZ_FETCHES", {}).get("task-reference", "{}")) + fetches = json.loads( + task_def.get("payload", {}) + .get("env", {}) + .get("MOZ_FETCHES", {}) + .get("task-reference", "{}") + ) task_locations = set() for f in fetches: name = f["task"].strip("<>") @@ -339,7 +352,9 @@ def make_integration_test_description( task_locations.add("prod") if len(task_locations) == 2: - raise Exception("Cannot run a task with fetches from stage and production clusters.") + raise Exception( + "Cannot run a task with fetches from stage and production clusters." + ) if "prod" in task_locations: patch_root_url(task_def) @@ -355,7 +370,11 @@ def schedule_tasks_at_index(config, tasks): if os.environ["TASKCLUSTER_ROOT_URL"] != STAGING_ROOT_URL: return - artifact_tasks = {k: v for k, v in config.kind_dependencies_tasks.items() if k.startswith("firefoxci-artifact")} + artifact_tasks = { + k: v + for k, v in config.kind_dependencies_tasks.items() + if k.startswith("firefoxci-artifact") + } for task in tasks: include_attrs = task.pop("include-attrs", {}) exclude_attrs = task.pop("exclude-attrs", {}) @@ -378,7 +397,11 @@ def schedule_tasks_at_index(config, tasks): # task_def is copied to avoid modifying the version in `tasks`, which # may be used to modify parts of the new task description yield make_integration_test_description( - copy.deepcopy(task_def), task["name"], found_tasks, include_deps, artifact_tasks + copy.deepcopy(task_def), + task["name"], + found_tasks, + include_deps, + artifact_tasks, ) created_tasks.add(task_def["metadata"]["name"]) diff --git a/taskcluster/fxci_config_taskgraph/util/integration.py b/taskcluster/fxci_config_taskgraph/util/integration.py index 3d5be55e..6ccc78c7 100644 --- a/taskcluster/fxci_config_taskgraph/util/integration.py +++ b/taskcluster/fxci_config_taskgraph/util/integration.py @@ -5,9 +5,8 @@ import copy import os import re -import functools from functools import cache -from typing import Any, Dict, List, Union +from typing import Any import requests import taskcluster @@ -17,7 +16,7 @@ from fxci_config_taskgraph.util.constants import FIREFOXCI_ROOT_URL -@functools.lru_cache(maxsize=None) +@cache def _get_deps(task_ids, use_proxy): upstream_tasks = {} for task_id in task_ids: @@ -36,8 +35,8 @@ def _get_deps(task_ids, use_proxy): def taskgraph_get_ancestors( - task_ids: Union[List[str], str], use_proxy: bool = False -) -> Dict[str, str]: + task_ids: list[str] | str, use_proxy: bool = False +) -> dict[str, str]: """Gets the ancestor tasks of the given task_ids as a dictionary of taskid -> label. Args: @@ -47,7 +46,7 @@ def taskgraph_get_ancestors( Returns: dict: A dict whose keys are task ids and values are task labels. """ - upstream_tasks: Dict[str, str] = {} + upstream_tasks: dict[str, str] = {} if isinstance(task_ids, str): task_ids = [task_ids] @@ -66,6 +65,8 @@ def taskgraph_get_ancestors( upstream_tasks.update(_get_deps(tuple(task_def["dependencies"]), use_proxy)) return copy.deepcopy(upstream_tasks) + + def get_ancestors(task_ids: list[str] | str) -> dict[str, str]: # This is not ideal, but at the moment we don't have a better way # to ensure that the upstream get_ancestors talks to the correct taskcluster