From 5ed420e2de99e6ec8a42b83f31cc547b793e2584 Mon Sep 17 00:00:00 2001 From: crayon <873217631@qq.com> Date: Wed, 13 Sep 2023 10:06:51 +0800 Subject: [PATCH 01/26] =?UTF-8?q?feature:=20=E6=89=A7=E8=A1=8C=E4=B8=AD?= =?UTF-8?q?=E4=BB=BB=E5=8A=A1=E7=8A=B6=E6=80=81=E5=B1=95=E7=A4=BA=E5=8F=8A?= =?UTF-8?q?=E8=BF=87=E6=BB=A4=E4=BC=98=E5=8C=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- gcloud/constants.py | 13 +++ gcloud/core/apis/drf/viewsets/taskflow.py | 110 +++++++++++------- gcloud/taskflow3/domains/dispatchers/task.py | 114 ++++++++++++++++++- gcloud/taskflow3/signals/handlers.py | 35 +++--- gcloud/taskflow3/utils.py | 51 ++++++--- gcloud/utils/concurrent.py | 56 +++++++++ 6 files changed, 307 insertions(+), 72 deletions(-) create mode 100644 gcloud/utils/concurrent.py diff --git a/gcloud/constants.py b/gcloud/constants.py index 2a812b6e28..542cf6ba9f 100644 --- a/gcloud/constants.py +++ b/gcloud/constants.py @@ -257,3 +257,16 @@ class JobBizScopeType(Enum): class GseAgentStatus(Enum): OFFLINE = 0 ONlINE = 1 + + +class TaskExtraStatus(Enum): + """独立于 Bamboo 的额外业务级别任务状态""" + + # 节点暂停 + NODE_SUSPENDED = "NODE_SUSPENDED" + # 等待审批 + PENDING_APPROVAL = "PENDING_APPROVAL" + # 等待确认 + PENDING_CONFIRMATION = "PENDING_CONFIRMATION" + # 等待处理 + PENDING_PROCESSING = "PENDING_PROCESSING" diff --git a/gcloud/core/apis/drf/viewsets/taskflow.py b/gcloud/core/apis/drf/viewsets/taskflow.py index ebac80c701..58e9736a8d 100644 --- a/gcloud/core/apis/drf/viewsets/taskflow.py +++ b/gcloud/core/apis/drf/viewsets/taskflow.py @@ -12,12 +12,13 @@ """ import logging import re +import typing from datetime import datetime, timedelta from bamboo_engine import states from django.conf import settings from django.db import transaction -from django.db.models import Q +from django.db.models import Q, QuerySet from django.utils.translation import ugettext_lazy as _ from django_filters import FilterSet from drf_yasg.utils import swagger_auto_schema @@ -32,7 +33,7 @@ from gcloud import err_code from gcloud.analysis_statistics.models import TaskflowExecutedNodeStatistics from gcloud.common_template.models import CommonTemplate -from gcloud.constants import TASK_NAME_MAX_LENGTH, TaskCreateMethod +from gcloud.constants import TASK_NAME_MAX_LENGTH, TaskCreateMethod, TaskExtraStatus from gcloud.contrib.appmaker.models import AppMaker from gcloud.contrib.function.models import FunctionTask from gcloud.contrib.operate_record.constants import OperateSource, OperateType, RecordType @@ -67,8 +68,10 @@ from gcloud.iam_auth.conf import TASK_ACTIONS from gcloud.iam_auth.utils import get_common_flow_allowed_actions_for_user, get_flow_allowed_actions_for_user from gcloud.taskflow3.domains.auto_retry import AutoRetryNodeStrategyCreator +from gcloud.taskflow3.domains.dispatchers import TaskCommandDispatcher from gcloud.taskflow3.models import TaskConfig, TaskFlowInstance, TaskFlowRelation, TimeoutNodeConfig from gcloud.tasktmpl3.models import TaskTemplate +from gcloud.utils import concurrent from gcloud.utils.strings import standardize_name, standardize_pipeline_node_name logger = logging.getLogger("root") @@ -80,6 +83,7 @@ class TaskFLowStatusFilterHandler: FAILED = "failed" PAUSE = "pause" RUNNING = "running" + PENDING_PROCESSING = "pending_processing" def __init__(self, status, queryset): """ @@ -102,6 +106,8 @@ def get_queryset(self): return self._filter_pipeline_pause() elif self.status == self.RUNNING: return self._filter_running() + elif self.status == self.PENDING_PROCESSING: + return self._filter_pending_process() else: return self.queryset @@ -118,42 +124,62 @@ def _get_pipeline_id_list(self): return pipeline_id_list + def _fetch_pipeline_instance_ids(self, statuses: typing.List[str], by_root: bool = True) -> QuerySet: + pipeline_id_list = self._get_pipeline_id_list() + # 暂停是针对于流程的暂停 + query_kwargs: typing.Dict[str, typing.Any] = {"name__in": statuses} + if by_root: + query_kwargs["root_id__in"] = pipeline_id_list + else: + query_kwargs["node_id__in"] = pipeline_id_list + pipeline_pause_root_id_list = State.objects.filter(**query_kwargs).values("root_id").distinct() + return PipelineInstance.objects.filter(instance_id__in=pipeline_pause_root_id_list).values_list("id", flat=True) + + def _fetch_pending_process_taskflow_ids( + self, taskflow_instances: typing.List[TaskFlowInstance] + ) -> typing.List[int]: + def _get_task_status(taskflow_instance: TaskFlowInstance) -> typing.Dict[str, typing.Any]: + dispatcher = TaskCommandDispatcher( + engine_ver=taskflow_instance.engine_ver, + taskflow_id=taskflow_instance.id, + pipeline_instance=taskflow_instance.pipeline_instance, + project_id=taskflow_instance.project_id, + ) + get_task_status_result: typing.Dict[str, typing.Any] = dispatcher.get_task_status(with_ex_data=False) + if get_task_status_result.get("result"): + return {"id": taskflow_instance.id, "state": get_task_status_result["data"]["state"]} + else: + return {"id": taskflow_instance.id, "state": None} + + task_status_infos: typing.List[typing.Dict[str, typing.Any]] = concurrent.batch_call( + _get_task_status, + params_list=[{"taskflow_instance": taskflow_instance} for taskflow_instance in taskflow_instances], + ) + + pending_process_taskflow_ids: typing.List[int] = [] + for task_status_info in task_status_infos: + if task_status_info["state"] == TaskExtraStatus.PENDING_PROCESSING.value: + pending_process_taskflow_ids.append(task_status_info["id"]) + return pending_process_taskflow_ids + def _filter_failed(self): """ 获取所有失败的任务,当任务失败时,任务的State的name会为Failed,去重可以获得当前存在失败节点的pipeline instance @return: """ - pipeline_id_list = self._get_pipeline_id_list() - # 获取存在异常任务状态的pipline task - pipeline_failed_root_id_list = ( - State.objects.filter(name=states.FAILED, root_id__in=pipeline_id_list).values("root_id").distinct() + # root_id + return self.queryset.filter( + pipeline_instance_id__in=self._fetch_pipeline_instance_ids(statuses=[states.FAILED]) ) - failed_pipeline_instance_id_list = PipelineInstance.objects.filter( - instance_id__in=pipeline_failed_root_id_list - ).values_list("id", flat=True) - queryset = self.queryset.filter(pipeline_instance_id__in=failed_pipeline_instance_id_list) - - return queryset def _filter_pipeline_pause(self): """ 获取所有暂停的任务,当任务暂停时,pipeline 的状态会变成暂停, return: """ - # 获取正在暂停的pipeline任务 - pipeline_id_list = self._get_pipeline_id_list() - # 暂停是针对于流程的暂停 - pipeline_pause_root_id_list = ( - State.objects.filter(name=states.SUSPENDED, node_id__in=pipeline_id_list).values("root_id").distinct() + return self.queryset.filter( + pipeline_instance_id__in=self._fetch_pipeline_instance_ids(statuses=[states.SUSPENDED], by_root=False) ) - # 获取 - pause_pipeline_instance_id_list = PipelineInstance.objects.filter( - instance_id__in=pipeline_pause_root_id_list - ).values_list("id", flat=True) - - queryset = self.queryset.filter(pipeline_instance_id__in=pause_pipeline_instance_id_list) - - return queryset def _filter_running(self): """ @@ -161,21 +187,29 @@ def _filter_running(self): @return: """ - pipeline_id_list = self._get_pipeline_id_list() - - # 这里统一使用 root_id 进行查询,可以避免进行失败和暂停两次查询 - pipeline_failed_and_pause_root_id_list = ( - State.objects.filter(name__in=[states.SUSPENDED, states.FAILED], root_id__in=pipeline_id_list) - .values("root_id") - .distinct() + return self.queryset.exclude( + pipeline_instance_id__in=self._fetch_pipeline_instance_ids(statuses=[states.FAILED, states.SUSPENDED]) ) - pipeline_failed_and_pause_id_list = PipelineInstance.objects.filter( - instance_id__in=pipeline_failed_and_pause_root_id_list - ).values_list("id", flat=True) - - queryset = self.queryset.exclude(pipeline_instance_id__in=pipeline_failed_and_pause_id_list) - return queryset + def _filter_pending_process(self): + """ + 过滤出等待处理的流程 + :return: + """ + # 找到所有正在执行中的流程 + # selected_related 只能在 objects 最前面加,此处先查处 ID 列表 + taskflow_instance_ids: typing.List[int] = list( + self.queryset.exclude( + pipeline_instance_id__in=self._fetch_pipeline_instance_ids(statuses=[states.FAILED]) + ).values_list("id", flat=True) + ) + # selected_related 提前查取刚需的关联数据,避免 n+1 查询 + taskflow_instances: typing.List[TaskFlowInstance] = TaskFlowInstance.objects.select_related( + "pipeline_instance" + ).filter(id__in=taskflow_instance_ids) + # 并行找到全部的等待执行任务 + pending_process_taskflow_ids: typing.List[int] = self._fetch_pending_process_taskflow_ids(taskflow_instances) + return self.queryset.filter(id__in=pending_process_taskflow_ids) class TaskFlowFilterSet(FilterSet): diff --git a/gcloud/taskflow3/domains/dispatchers/task.py b/gcloud/taskflow3/domains/dispatchers/task.py index 5b24089be4..c9db907c0b 100644 --- a/gcloud/taskflow3/domains/dispatchers/task.py +++ b/gcloud/taskflow3/domains/dispatchers/task.py @@ -15,6 +15,7 @@ import json import logging import traceback +import typing from typing import Optional from bamboo_engine import api as bamboo_engine_api @@ -39,10 +40,11 @@ from engine_pickle_obj.context import SystemObject from gcloud import err_code +from gcloud.constants import TaskExtraStatus from gcloud.project_constants.domains.context import get_project_constants_context from gcloud.taskflow3.domains.context import TaskContext from gcloud.taskflow3.signals import pre_taskflow_start, taskflow_started -from gcloud.taskflow3.utils import format_bamboo_engine_status, format_pipeline_status +from gcloud.taskflow3.utils import _format_status_time, find_nodes_from_pipeline_tree, format_pipeline_status from pipeline_web.parser.format import classify_constants, format_web_data_to_pipeline from .base import EngineCommandDispatcher, ensure_return_is_dict @@ -191,7 +193,9 @@ def start_v2(self, executor: str) -> dict: except Exception as e: logger.exception("run pipeline failed") PipelineInstance.objects.filter(instance_id=self.pipeline_instance.instance_id, is_started=True).update( - start_time=None, is_started=False, executor="", + start_time=None, + is_started=False, + executor="", ) message = _(f"任务启动失败: 引擎启动失败, 请重试. 如持续失败可联系管理员处理. {e} | start_v2") logger.error(message) @@ -203,7 +207,9 @@ def start_v2(self, executor: str) -> dict: if not result.result: PipelineInstance.objects.filter(instance_id=self.pipeline_instance.instance_id, is_started=True).update( - start_time=None, is_started=False, executor="", + start_time=None, + is_started=False, + executor="", ) logger.error("run_pipeline fail: {}, exception: {}".format(result.message, result.exc_trace)) else: @@ -520,6 +526,7 @@ def get_task_status_v2(self, subprocess_id: Optional[str], with_ex_data: bool) - status_result = bamboo_engine_api.get_pipeline_states( runtime=runtime, root_id=self.pipeline_instance.instance_id, flat_children=False ) + if not status_result: logger.exception("bamboo_engine_api.get_pipeline_states fail") return { @@ -553,7 +560,23 @@ def get_subprocess_status(task_status: dict, subprocess_id: str) -> dict: # subprocess not been executed task_status = task_status or self.CREATED_STATUS - format_bamboo_engine_status(task_status) + # 遍历树,获取需要进行状态优化的节点,标记哪些节点具有独立子流程 + # 遍历状态树,hit -> 状态优化,独立子流程 -> 递归 + node_infos_gby_code: typing.Dict[ + str, typing.List[typing.Dict[str, typing.Any]] + ] = find_nodes_from_pipeline_tree( + self.pipeline_instance.execution_data, codes=["pause_node", "bk_approve", "subprocess_plugin"] + ) + + node_ids_gby_code: typing.Dict[str, typing.Set[str]] = {} + for code, node_infos in node_infos_gby_code.items(): + node_ids_gby_code[code] = {node_info["act_id"] for node_info in node_infos} + + code__status_map: typing.Dict[str, str] = { + "pause_node": TaskExtraStatus.PENDING_CONFIRMATION.value, + "bk_approve": TaskExtraStatus.PENDING_APPROVAL.value, + } + self.format_bamboo_engine_status(task_status, node_ids_gby_code, code__status_map) # 返回失败节点和对应调试信息 if with_ex_data and task_status["state"] == bamboo_engine_states.FAILED: @@ -642,3 +665,86 @@ def render_current_constants_v2(self): data.append({"key": key, "value": value}) return {"result": True, "data": data, "code": err_code.SUCCESS.code, "message": ""} + + def format_bamboo_engine_status( + self, + status_tree: typing.Dict[str, typing.Any], + node_ids_gby_code: typing.Dict[str, typing.Set[str]], + code__status_map: typing.Dict[str, str], + ): + """ + 格式化 bamboo 状态树 + :param status_tree: 状态树 + :param node_ids_gby_code: 按组件 Code 聚合节点 ID + :param code__status_map: 状态映射关系 + :return: + """ + + from gcloud.taskflow3.models import TaskFlowInstance + + _format_status_time(status_tree) + + # 处理状态映射 + if status_tree["state"] in [bamboo_engine_states.SUSPENDED, TaskExtraStatus.NODE_SUSPENDED.value]: + status_tree["state"] = TaskExtraStatus.PENDING_PROCESSING.value + elif status_tree["state"] == bamboo_engine_states.RUNNING: + # 独立子流程下钻 + if status_tree["id"] in node_ids_gby_code.get("subprocess_plugin", set()): + try: + # 尝试从独立子流程组件输出中获取 TaskID + task: TaskFlowInstance = TaskFlowInstance.objects.get( + pk=self.taskflow_id, project_id=self.project_id + ) + node_outputs: typing.List[typing.Dict[str, typing.Any]] = task.get_node_data( + status_tree["id"], "admin", "subprocess_plugin" + )["data"]["outputs"] + + # Raise StopIteration if not found + task_id: typing.Optional[int] = next( + (node_output["value"] for node_output in node_outputs if node_output["key"] == "task_id") + ) + sub_task: TaskFlowInstance = TaskFlowInstance.objects.get(pk=task_id, project_id=self.project_id) + except Exception: + # 非核心逻辑,记录排查日志并跳过 + logger.exception( + f"[format_bamboo_engine_status] get subprocess_plugin task_id failed, " + f"project_id -> {self.project_id}, taskflow_id -> {self.taskflow_id}, " + f"node -> {status_tree['id']}" + ) + pass + else: + dispatcher = TaskCommandDispatcher( + engine_ver=sub_task.engine_ver, + taskflow_id=sub_task.id, + pipeline_instance=sub_task.pipeline_instance, + project_id=self.project_id, + ) + get_task_status_result: typing.Dict[str, typing.Any] = dispatcher.get_task_status( + with_ex_data=False + ) + if get_task_status_result.get("result"): + status_tree["state"] = get_task_status_result["data"]["state"] + + else: + # 状态转换 + for code, node_ids in node_ids_gby_code.items(): + # 短路原则:code in code__status_map 处理效率高于后者,先行过滤不需要转换的 code + if code in code__status_map and status_tree["id"] in node_ids: + status_tree["state"] = code__status_map[code] + + child_status: typing.Set[str] = set() + for identifier_code, child_tree in list(status_tree["children"].items()): + self.format_bamboo_engine_status(child_tree, node_ids_gby_code, code__status_map) + child_status.add(child_tree["state"]) + + if status_tree["state"] == bamboo_engine_states.RUNNING: + if bamboo_engine_states.FAILED in child_status: + # 失败优先级最高 + status_tree["state"] = bamboo_engine_states.FAILED + elif { + TaskExtraStatus.PENDING_APPROVAL.value, + TaskExtraStatus.PENDING_CONFIRMATION.value, + TaskExtraStatus.PENDING_PROCESSING.value, + } & child_status: + # 存在其中一个状态,父级状态扭转为等待处理(PENDING_PROCESSING) + status_tree["state"] = TaskExtraStatus.PENDING_PROCESSING.value diff --git a/gcloud/taskflow3/signals/handlers.py b/gcloud/taskflow3/signals/handlers.py index bcbd5e1e01..346b4f53fa 100644 --- a/gcloud/taskflow3/signals/handlers.py +++ b/gcloud/taskflow3/signals/handlers.py @@ -14,12 +14,23 @@ import datetime import logging +from bamboo_engine import states as bamboo_engine_states from bk_monitor_report.reporter import MonitorReporter from django.conf import settings from django.dispatch import receiver +from pipeline.core.pipeline import Pipeline +from pipeline.engine.signals import activity_failed, pipeline_end, pipeline_revoke +from pipeline.eri.signals import ( + execute_interrupt, + post_set_state, + pre_service_execute, + pre_service_schedule, + schedule_interrupt, +) +from pipeline.models import PipelineInstance +from pipeline.signals import post_pipeline_finish, post_pipeline_revoke import env -from bamboo_engine import states as bamboo_engine_states from gcloud.shortcuts.message import ATOM_FAILED, TASK_FINISHED from gcloud.taskflow3.celery.tasks import auto_retry_node, send_taskflow_message, task_callback from gcloud.taskflow3.models import ( @@ -30,17 +41,6 @@ TimeoutNodeConfig, ) from gcloud.taskflow3.signals import taskflow_finished, taskflow_revoked -from pipeline.core.pipeline import Pipeline -from pipeline.engine.signals import activity_failed, pipeline_end, pipeline_revoke -from pipeline.eri.signals import ( - execute_interrupt, - post_set_state, - pre_service_execute, - pre_service_schedule, - schedule_interrupt, -) -from pipeline.models import PipelineInstance -from pipeline.signals import post_pipeline_finish, post_pipeline_revoke logger = logging.getLogger("celery") @@ -90,7 +90,9 @@ def _check_and_callback(taskflow_id, *args, **kwargs): return try: task_callback.apply_async( - kwargs=dict(task_id=taskflow_id, **kwargs), queue="task_callback", routing_key="task_callback", + kwargs=dict(task_id=taskflow_id, **kwargs), + queue="task_callback", + routing_key="task_callback", ) except Exception as e: logger.exception(f"[_check_and_callback] task_callback delay error: {e}") @@ -189,6 +191,13 @@ def bamboo_engine_eri_post_set_state_handler(sender, node_id, to_state, version, _finish_taskflow_and_send_signal(root_id, taskflow_finished, True) + elif to_state == bamboo_engine_states.SUSPENDED and node_id == root_id: + # TODO 发送通知,向上找到根流程,发送通知 + # 问题1:独立子流程场景,暂停后应该是由父流程决定是否通知,如何将消息通知给 Root Taskflow? + # 问题2:等待确认 / 等待审批场景也需要通知到父流程 -> 向上找到根流程 + # 问题3:子流程、父流程都有通知,以哪一方为准?(继承父流程配置?) + pass + try: _node_timeout_info_update(settings.redis_inst, to_state, node_id, version) except Exception: diff --git a/gcloud/taskflow3/utils.py b/gcloud/taskflow3/utils.py index 855dba29dc..a65fd115fc 100644 --- a/gcloud/taskflow3/utils.py +++ b/gcloud/taskflow3/utils.py @@ -13,14 +13,16 @@ import logging from typing import Any, Dict, List, Optional +import typing +from collections import defaultdict -from bamboo_engine import states as bamboo_engine_states from django.apps import apps from django.utils.translation import ugettext_lazy as _ from pipeline.core import constants as pipeline_constants from pipeline.engine import states as pipeline_states from pipeline.engine.utils import calculate_elapsed_time +from gcloud.constants import TaskExtraStatus from gcloud.utils.dates import format_datetime logger = logging.getLogger("root") @@ -59,29 +61,44 @@ def format_pipeline_status(status_tree): status_tree["state"] = pipeline_states.RUNNING elif pipeline_states.FAILED in child_status: status_tree["state"] = pipeline_states.FAILED - elif pipeline_states.SUSPENDED in child_status or "NODE_SUSPENDED" in child_status: - status_tree["state"] = "NODE_SUSPENDED" + elif pipeline_states.SUSPENDED in child_status or TaskExtraStatus.NODE_SUSPENDED.value in child_status: + status_tree["state"] = TaskExtraStatus.NODE_SUSPENDED.value # 子流程 BLOCKED 状态表示子节点失败 elif not child_status: status_tree["state"] = pipeline_states.FAILED -def format_bamboo_engine_status(status_tree): +def find_nodes_from_pipeline_tree( + pipeline_tree: typing.Dict[str, typing.Any], codes: typing.Iterable[str] +) -> typing.Dict[str, typing.List[typing.Dict[str, typing.Any]]]: """ - @summary: 转换通过 bamboo engine api 获取的任务状态格式 - @return: + 在 pipeline tree 查找指定的 Component codes + :param pipeline_tree: + :param codes: + :return: """ - _format_status_time(status_tree) - child_status = set() - for identifier_code, child_tree in list(status_tree["children"].items()): - format_bamboo_engine_status(child_tree) - child_status.add(child_tree["state"]) - - if status_tree["state"] == bamboo_engine_states.RUNNING: - if bamboo_engine_states.FAILED in child_status: - status_tree["state"] = bamboo_engine_states.FAILED - elif bamboo_engine_states.SUSPENDED in child_status or "NODE_SUSPENDED" in child_status: - status_tree["state"] = "NODE_SUSPENDED" + # 转 set 去重,提高 in 查找效率 + codes: typing.Set[str] = set(codes) + node_infos_gby_code: typing.Dict[str, typing.List[typing.Dict[str, typing.Any]]] = defaultdict(list) + if not codes: + raise ValueError("Empty codes") + for act_id, act in pipeline_tree[pipeline_constants.PE.activities].items(): + if act["type"] == pipeline_constants.PE.SubProcess: + # 非独立子流程继续递归查找 + child_node_infos_gby_code: typing.Dict[ + str, typing.List[typing.Dict[str, typing.Any]] + ] = find_nodes_from_pipeline_tree(act[pipeline_constants.PE.pipeline], codes) + # 子树查找结果同父流程合并 + for code in codes: + node_infos_gby_code[code].extend(child_node_infos_gby_code.get(code) or []) + elif act["type"] == pipeline_constants.PE.ServiceActivity: + code_or_none: typing.Optional[str] = act.get(pipeline_constants.PE.component, {}).get( + pipeline_constants.PE.code + ) + if code_or_none and code_or_none in codes: + # 使用 Dict 结构,便于后续扩展更多需要的字段 + node_infos_gby_code[code_or_none].append({"act_id": act_id}) + return node_infos_gby_code def add_node_name_to_status_tree(pipeline_tree, status_tree_children): diff --git a/gcloud/utils/concurrent.py b/gcloud/utils/concurrent.py new file mode 100644 index 0000000000..ebece4838e --- /dev/null +++ b/gcloud/utils/concurrent.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import time +from concurrent.futures import as_completed +from concurrent.futures.thread import ThreadPoolExecutor +from typing import Callable, Dict, List + + +def batch_call( + func: Callable, + params_list: List[Dict], + get_data=lambda x: x, + extend_result: bool = False, + interval: float = 0, + **kwargs +) -> List: + """ + # TODO 后续 batch_call 支持 *args 类参数 + 并发请求接口,每次按不同参数请求最后叠加请求结果 + :param func: 请求方法 + :param params_list: 参数列表 + :param get_data: 获取数据函数 + :param extend_result: 是否展开结果 + :param interval: 任务提交间隔 + :return: 请求结果累计 + """ + + result = [] + + # 不存在参数列表,直接返回 + if not params_list: + return result + + with ThreadPoolExecutor(max_workers=50) as ex: + tasks = [] + for idx, params in enumerate(params_list): + if idx != 0: + time.sleep(interval) + tasks.append(ex.submit(func, **params)) + + for future in as_completed(tasks): + if extend_result: + result.extend(get_data(future.result())) + else: + result.append(get_data(future.result())) + return result From 1854e11f3a4a490add90d9bee69d22879253a993 Mon Sep 17 00:00:00 2001 From: crayon <873217631@qq.com> Date: Tue, 10 Oct 2023 21:12:06 +0800 Subject: [PATCH 02/26] =?UTF-8?q?feature:=20=E7=AD=89=E5=BE=85=E5=A4=84?= =?UTF-8?q?=E7=90=86=E9=80=9A=E7=9F=A5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- gcloud/clocked_task/models.py | 13 +- gcloud/core/apis/drf/serilaziers/template.py | 8 +- gcloud/periodictask/models.py | 6 +- gcloud/shortcuts/message/__init__.py | 18 ++- gcloud/shortcuts/message/common.py | 14 +++ gcloud/taskflow3/celery/tasks.py | 34 +++++- gcloud/taskflow3/models.py | 6 +- gcloud/tests/mock_settings.py | 5 + .../tasks/test_send_taskflow_message.py | 113 ++++++++++++++++++ .../components/collections/controller.py | 30 +++-- .../collections/sites/open/bk/approve/v1_0.py | 19 ++- 11 files changed, 239 insertions(+), 27 deletions(-) create mode 100644 gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py diff --git a/gcloud/clocked_task/models.py b/gcloud/clocked_task/models.py index 4c24cdaf0c..24c0684c46 100644 --- a/gcloud/clocked_task/models.py +++ b/gcloud/clocked_task/models.py @@ -18,12 +18,7 @@ from django_celery_beat.models import ClockedSchedule as DjangoCeleryBeatClockedSchedule from django_celery_beat.models import PeriodicTask as DjangoCeleryBeatPeriodicTask -from gcloud.constants import ( - CLOCKED_TASK_NOT_STARTED, - CLOCKED_TASK_STATE, - PROJECT, - TEMPLATE_SOURCE, -) +from gcloud.constants import CLOCKED_TASK_NOT_STARTED, CLOCKED_TASK_STATE, PROJECT, TEMPLATE_SOURCE from gcloud.core.models import Project, StaffGroupSet from gcloud.shortcuts.cmdb import get_business_group_members from gcloud.utils.unique import uniqid @@ -134,7 +129,11 @@ def get_notify_type(self): else: notify_type = json.loads(self.notify_type) logger.info(f"[clocked_task get_notify_type] success: {notify_type}") - return notify_type if isinstance(notify_type, dict) else {"success": notify_type, "fail": notify_type} + return ( + notify_type + if isinstance(notify_type, dict) + else {"success": notify_type, "fail": notify_type, "pending_processing": notify_type} + ) def get_stakeholders(self): # 如果没有配置,则使用模版中的配置 diff --git a/gcloud/core/apis/drf/serilaziers/template.py b/gcloud/core/apis/drf/serilaziers/template.py index 292559c089..fa42fa36b4 100644 --- a/gcloud/core/apis/drf/serilaziers/template.py +++ b/gcloud/core/apis/drf/serilaziers/template.py @@ -25,11 +25,15 @@ class BaseTemplateSerializer(serializers.ModelSerializer): notify_type = ReadWriteSerializerMethodField(help_text="通知类型") def get_notify_type(self, obj): - default_notify_type = {"success": [], "fail": []} + default_notify_type = {"success": [], "fail": [], "pending_processing": []} try: notify_type = json.loads(obj.notify_type) # 对于旧数据中解析出来为[]的情况,返回默认格式 - return notify_type if isinstance(notify_type, dict) else {"success": notify_type, "fail": notify_type} + return ( + notify_type + if isinstance(notify_type, dict) + else {"success": notify_type, "fail": notify_type, "pending_processing": []} + ) except Exception as e: logger.exception(f"[get_notify_type] error: {e}") return default_notify_type diff --git a/gcloud/periodictask/models.py b/gcloud/periodictask/models.py index 9b42d975c4..0018064eef 100644 --- a/gcloud/periodictask/models.py +++ b/gcloud/periodictask/models.py @@ -293,7 +293,11 @@ def get_stakeholders(self): def get_notify_type(self): notify_type = json.loads(self.template.notify_type) - return notify_type if isinstance(notify_type, dict) else {"success": notify_type, "fail": notify_type} + return ( + notify_type + if isinstance(notify_type, dict) + else {"success": notify_type, "fail": notify_type, "pending_processing": notify_type} + ) class PeriodicTaskHistoryManager(models.Manager): diff --git a/gcloud/shortcuts/message/__init__.py b/gcloud/shortcuts/message/__init__.py index 5575d6972d..357788e4ba 100644 --- a/gcloud/shortcuts/message/__init__.py +++ b/gcloud/shortcuts/message/__init__.py @@ -13,38 +13,48 @@ import logging +from gcloud.periodictask.models import PeriodicTask from gcloud.shortcuts.message.common import ( title_and_content_for_atom_failed, + title_and_content_for_clocked_task_create_fail, title_and_content_for_flow_finished, + title_and_content_for_pending_processing, title_and_content_for_periodic_task_start_fail, - title_and_content_for_clocked_task_create_fail, ) from gcloud.shortcuts.message.send_msg import send_message -from gcloud.periodictask.models import PeriodicTask logger = logging.getLogger("root") ATOM_FAILED = "atom_failed" TASK_FINISHED = "task_finished" +PENDING_PROCESSING = "pending_processing" def send_task_flow_message(taskflow, msg_type, node_name=""): + # {"success": ["weixin"], "fail": ["weixin"]} + # [] notify_types = taskflow.get_notify_type() receivers_list = taskflow.get_stakeholders() receivers = ",".join(receivers_list) executor = taskflow.executor - if msg_type == "atom_failed": + if msg_type == ATOM_FAILED: title, content, email_content = title_and_content_for_atom_failed( taskflow, taskflow.pipeline_instance, node_name, executor ) notify_type = notify_types.get("fail", []) - elif msg_type == "task_finished": + elif msg_type == TASK_FINISHED: title, content, email_content = title_and_content_for_flow_finished( taskflow, taskflow.pipeline_instance, node_name, executor ) notify_type = notify_types.get("success", []) + elif msg_type == PENDING_PROCESSING: + title, content, email_content = title_and_content_for_pending_processing( + taskflow, taskflow.pipeline_instance, node_name, executor + ) + notify_type = notify_types.get("pending_processing", []) + else: return False diff --git a/gcloud/shortcuts/message/common.py b/gcloud/shortcuts/message/common.py index db650ba887..ac24963a3c 100644 --- a/gcloud/shortcuts/message/common.py +++ b/gcloud/shortcuts/message/common.py @@ -42,6 +42,20 @@ def title_and_content_for_flow_finished(taskflow, pipeline_inst, node_name, exec return title, content, email_content +def title_and_content_for_pending_processing(taskflow, pipeline_inst, node_name, executor): + title = _("【标准运维APP通知】等待处理") + base_content = _("您在【{cc_name}】业务中的任务【{task_name}】等待处理中,操作员是【{executor}】," "请前往标准运维APP{url}查看详情!").format( + cc_name=taskflow.project.name, + task_name=pipeline_inst.name, + executor=executor, + url="{url}", + ) + + content = base_content.format(url="( {} )".format(taskflow.url)) + email_content = base_content.format(url="( {} )".format(taskflow.url, taskflow.url)) + return title, content, email_content + + def title_and_content_for_periodic_task_start_fail(periodic_task, history): title = _("【标准运维APP通知】周期任务启动失败") content = _("您在【{cc_name}】业务中计划于【{start_time}】执行的周期任务【{task_name}】启动失败," "错误信息:【{ex_data}】").format( diff --git a/gcloud/taskflow3/celery/tasks.py b/gcloud/taskflow3/celery/tasks.py index 4c70bdaa74..70689205a0 100644 --- a/gcloud/taskflow3/celery/tasks.py +++ b/gcloud/taskflow3/celery/tasks.py @@ -26,12 +26,14 @@ from gcloud.constants import CallbackStatus from gcloud.shortcuts.message import send_task_flow_message from gcloud.taskflow3.domains.callback import TaskCallBacker +from gcloud.taskflow3.domains.dispatchers import TaskCommandDispatcher from gcloud.taskflow3.domains.dispatchers.node import NodeCommandDispatcher from gcloud.taskflow3.domains.node_timeout_strategy import node_timeout_handler from gcloud.taskflow3.models import ( AutoRetryNodeStrategy, EngineConfig, TaskFlowInstance, + TaskFlowRelation, TimeoutNodeConfig, TimeoutNodesRecord, ) @@ -42,9 +44,39 @@ @task -def send_taskflow_message(task_id, msg_type, node_name=""): +def send_taskflow_message(task_id, msg_type, node_name="", skip_if_not_status="", use_root=False): try: taskflow = TaskFlowInstance.objects.get(id=task_id) + if use_root and taskflow.is_child_taskflow: + logger.info("send_task_flow_message[taskflow_id=%s] is_child_taskflow, try to find root taskflow.", task_id) + root_task_id = TaskFlowRelation.objects.get(task_id=task_id).root_task_id + taskflow = TaskFlowInstance.objects.get(id=root_task_id) + logger.info( + "send_task_flow_message[taskflow_id=%s] use root taskflow[id=%s] to send message", task_id, root_task_id + ) + + if skip_if_not_status: + # 满足某个具体状态才发通知 + dispatcher = TaskCommandDispatcher( + engine_ver=taskflow.engine_ver, + taskflow_id=taskflow.id, + pipeline_instance=taskflow.pipeline_instance, + project_id=taskflow.project_id, + ) + get_task_status_result = dispatcher.get_task_status(with_ex_data=False) + if get_task_status_result.get("result") and get_task_status_result["data"]["state"] == skip_if_not_status: + logger.info( + "send_task_flow_message[taskflow_id=%s] taskflow[id=%s] check status -> %s success.", + task_id, + taskflow.id, + skip_if_not_status, + ) + else: + raise ValueError( + f"taskflow[id={taskflow.id}] status not match: actual -> {get_task_status_result}, " + f"expect -> {skip_if_not_status}", + ) + send_task_flow_message(taskflow, msg_type, node_name) except Exception as e: logger.exception("send_task_flow_message[taskflow_id=%s] send message error: %s" % (task_id, e)) diff --git a/gcloud/taskflow3/models.py b/gcloud/taskflow3/models.py index a4bbbcf99f..51d438dbff 100644 --- a/gcloud/taskflow3/models.py +++ b/gcloud/taskflow3/models.py @@ -1252,7 +1252,11 @@ def get_stakeholders(self): def get_notify_type(self): notify_type = json.loads(self.template.notify_type) - return notify_type if isinstance(notify_type, dict) else {"success": notify_type, "fail": notify_type} + return ( + notify_type + if isinstance(notify_type, dict) + else {"success": notify_type, "fail": notify_type, "pending_processing": notify_type} + ) def record_and_get_executor_proxy(self, operator): if self.recorded_executor_proxy is None: diff --git a/gcloud/tests/mock_settings.py b/gcloud/tests/mock_settings.py index 53691cc99b..7e84d532f2 100644 --- a/gcloud/tests/mock_settings.py +++ b/gcloud/tests/mock_settings.py @@ -52,6 +52,11 @@ TASKFLOW_CONTEXT_PROJECT_CONFIG = "gcloud.taskflow3.domains.context.ProjectConfig" TASKFLOW_TASKS_TASKFLOW_INSTANCE = "gcloud.taskflow3.celery.tasks.TaskFlowInstance" +TASKFLOW_TASKS_TASKFLOW_RELATION = "gcloud.taskflow3.celery.tasks.TaskFlowRelation" +TASKFLOW_TASKS_TASK_COMMAND_DISPATCHER_GET_STATUS = ( + "gcloud.taskflow3.celery.tasks.TaskCommandDispatcher." "get_task_status" +) +TASKFLOW_TASKS_SEND_TASK_FLOW_MESSAGE = "gcloud.taskflow3.celery.tasks.send_task_flow_message" TASKFLOW_DISPATCHERS_NODE_PIPELINE_API = "gcloud.taskflow3.domains.dispatchers.node.pipeline_api" TASKFLOW_DISPATCHERS_NODE_BAMBOO_API = "gcloud.taskflow3.domains.dispatchers.node.bamboo_engine_api" diff --git a/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py b/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py new file mode 100644 index 0000000000..7b3eca93af --- /dev/null +++ b/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase +from mock import MagicMock, call, patch + +from gcloud.constants import TaskExtraStatus +from gcloud.shortcuts.message import ATOM_FAILED +from gcloud.taskflow3.celery.tasks import send_taskflow_message +from gcloud.tests.mock_settings import * # noqa + + +class SendTaskflowMessageTaskTestCase(TestCase): + @classmethod + def generate_taskflow(cls): + root_taskflow = MagicMock() + root_taskflow.id = 1 + root_taskflow.engine_ver = 1 + root_taskflow.pipeline_instance = {} + root_taskflow.project_id = 1 + + child_taskflow = MagicMock() + child_taskflow.id = 2 + child_taskflow.is_child_taskflow = True + + return root_taskflow, child_taskflow + + def test_send_taskflow_message(self): + + get_task_status = MagicMock() + taskflow = MagicMock() + taskflow.id = 1 + taskflow_model = MagicMock() + taskflow_model.objects.get = MagicMock(return_value=taskflow) + + send_task_flow_message = MagicMock() + with patch(TASKFLOW_TASKS_TASKFLOW_INSTANCE, taskflow_model): + with patch(TASKFLOW_TASKS_TASK_COMMAND_DISPATCHER_GET_STATUS, get_task_status): + with patch(TASKFLOW_TASKS_SEND_TASK_FLOW_MESSAGE, send_task_flow_message): + send_taskflow_message(taskflow.id, ATOM_FAILED, node_name="test") + + send_task_flow_message.assert_called_once_with(taskflow, ATOM_FAILED, "test") + get_task_status.assert_not_called() + taskflow_model.objects.get.assert_called_once_with(id=taskflow.id) + + def test_send_taskflow_message__use_root(self): + + root_taskflow, child_taskflow = self.generate_taskflow() + taskflow_model = MagicMock() + taskflow_model.objects.get = MagicMock(side_effect=lambda id: {1: root_taskflow, 2: child_taskflow}[id]) + + taskflow_relation = MagicMock() + taskflow_relation.root_task_id = root_taskflow.id + taskflow_relation_model = MagicMock() + taskflow_relation_model.objects.get = MagicMock(return_value=taskflow_relation) + send_task_flow_message = MagicMock() + get_task_status = MagicMock( + return_value={"result": True, "data": {"state": TaskExtraStatus.PENDING_PROCESSING.value}} + ) + with patch(TASKFLOW_TASKS_TASKFLOW_RELATION, taskflow_relation_model): + with patch(TASKFLOW_TASKS_TASKFLOW_INSTANCE, taskflow_model): + with patch(TASKFLOW_TASKS_TASK_COMMAND_DISPATCHER_GET_STATUS, get_task_status): + with patch(TASKFLOW_TASKS_SEND_TASK_FLOW_MESSAGE, send_task_flow_message): + send_taskflow_message( + child_taskflow.id, + ATOM_FAILED, + node_name="test", + skip_if_not_status=TaskExtraStatus.PENDING_PROCESSING.value, + use_root=True, + ) + + get_task_status.assert_called_once() + send_task_flow_message.assert_called_once_with(root_taskflow, ATOM_FAILED, "test") + taskflow_model.objects.get.assert_has_calls(calls=[call(id=child_taskflow.id), call(id=root_taskflow.id)]) + + def test_send_taskflow_message__skip_if_status(self): + + root_taskflow, child_taskflow = self.generate_taskflow() + taskflow_model = MagicMock() + taskflow_model.objects.get = MagicMock(side_effect=lambda id: {1: root_taskflow, 2: child_taskflow}[id]) + + taskflow_relation = MagicMock() + taskflow_relation.root_task_id = root_taskflow.id + taskflow_relation_model = MagicMock() + taskflow_relation_model.objects.get = MagicMock(return_value=taskflow_relation) + send_task_flow_message = MagicMock() + # 实际状态是 FAILD + get_task_status = MagicMock(return_value={"result": True, "data": {"state": "FAILED"}}) + with patch(TASKFLOW_TASKS_TASKFLOW_RELATION, taskflow_relation_model): + with patch(TASKFLOW_TASKS_TASKFLOW_INSTANCE, taskflow_model): + with patch(TASKFLOW_TASKS_TASK_COMMAND_DISPATCHER_GET_STATUS, get_task_status): + with patch(TASKFLOW_TASKS_SEND_TASK_FLOW_MESSAGE, send_task_flow_message): + send_taskflow_message( + child_taskflow.id, + ATOM_FAILED, + node_name="test", + skip_if_not_status=TaskExtraStatus.PENDING_PROCESSING.value, + use_root=True, + ) + + get_task_status.assert_called_once() + send_task_flow_message.assert_not_called() + taskflow_model.objects.get.assert_has_calls(calls=[call(id=child_taskflow.id), call(id=root_taskflow.id)]) diff --git a/pipeline_plugins/components/collections/controller.py b/pipeline_plugins/components/collections/controller.py index 30f520d94b..516d03e535 100644 --- a/pipeline_plugins/components/collections/controller.py +++ b/pipeline_plugins/components/collections/controller.py @@ -12,18 +12,21 @@ """ import datetime +import logging import os import re -import logging from django.conf import settings -from django.utils import translation, timezone +from django.utils import timezone, translation from django.utils.translation import ugettext_lazy as _ - -from pipeline.core.flow.activity import Service, StaticIntervalGenerator -from pipeline.core.flow.io import StringItemSchema, ObjectItemSchema from pipeline.component_framework.component import Component +from pipeline.core.flow.activity import Service, StaticIntervalGenerator +from pipeline.core.flow.io import ObjectItemSchema, StringItemSchema + +from gcloud.constants import TaskExtraStatus from gcloud.core.models import Project +from gcloud.shortcuts.message import PENDING_PROCESSING +from gcloud.taskflow3.celery.tasks import send_taskflow_message __group_name__ = _("蓝鲸服务(BK)") @@ -34,6 +37,13 @@ class PauseService(Service): __need_schedule__ = True def execute(self, data, parent_data): + task_id: int = parent_data.get_one_of_inputs("task_id") + send_taskflow_message.delay( + task_id=task_id, + msg_type=PENDING_PROCESSING, + skip_if_not_status=TaskExtraStatus.PENDING_PROCESSING.value, + use_root=True, + ) return True def schedule(self, data, parent_data, callback_data=None): @@ -45,7 +55,10 @@ def schedule(self, data, parent_data, callback_data=None): def inputs_format(self): return [ self.InputItem( - name=_("描述"), key="description", type="string", schema=StringItemSchema(description=_("描述")), + name=_("描述"), + key="description", + type="string", + schema=StringItemSchema(description=_("描述")), ) ] @@ -55,7 +68,10 @@ def outputs_format(self): name=_("API回调数据"), key="callback_data", type="object", - schema=ObjectItemSchema(description=_("通过node_callback API接口回调并传入数据,支持dict数据"), property_schemas={},), + schema=ObjectItemSchema( + description=_("通过node_callback API接口回调并传入数据,支持dict数据"), + property_schemas={}, + ), ), ] diff --git a/pipeline_plugins/components/collections/sites/open/bk/approve/v1_0.py b/pipeline_plugins/components/collections/sites/open/bk/approve/v1_0.py index 084fa83728..9eb70ab0de 100644 --- a/pipeline_plugins/components/collections/sites/open/bk/approve/v1_0.py +++ b/pipeline_plugins/components/collections/sites/open/bk/approve/v1_0.py @@ -15,14 +15,16 @@ import traceback from django.utils.translation import ugettext_lazy as _ - -from api.collections.itsm import BKItsmClient +from pipeline.component_framework.component import Component from pipeline.core.flow.activity import Service from pipeline.core.flow.io import StringItemSchema -from pipeline.component_framework.component import Component -from gcloud.utils.handlers import handle_api_error +from api.collections.itsm import BKItsmClient from gcloud.conf import settings +from gcloud.constants import TaskExtraStatus +from gcloud.shortcuts.message import PENDING_PROCESSING +from gcloud.taskflow3.celery.tasks import send_taskflow_message +from gcloud.utils.handlers import handle_api_error from pipeline_plugins.components.utils import get_node_callback_url __group_name__ = _("蓝鲸服务(BK)") @@ -83,6 +85,15 @@ def execute(self, data, parent_data): return False data.outputs.sn = result["data"]["sn"] + + task_id: int = parent_data.get_one_of_inputs("task_id") + send_taskflow_message.delay( + task_id=task_id, + msg_type=PENDING_PROCESSING, + skip_if_not_status=TaskExtraStatus.PENDING_PROCESSING.value, + use_root=True, + ) + return True def schedule(self, data, parent_data, callback_data=None): From b2d94acdf8f1d12b788a945806ae860faa4f03d7 Mon Sep 17 00:00:00 2001 From: crayon <873217631@qq.com> Date: Wed, 11 Oct 2023 10:08:34 +0800 Subject: [PATCH 03/26] minor: unit test fix --- .../sites/open/bk_test/approve/test_v1_0.py | 48 +++++++++++++++---- 1 file changed, 39 insertions(+), 9 deletions(-) diff --git a/pipeline_plugins/tests/components/collections/sites/open/bk_test/approve/test_v1_0.py b/pipeline_plugins/tests/components/collections/sites/open/bk_test/approve/test_v1_0.py index d0d63767de..7e15169c86 100644 --- a/pipeline_plugins/tests/components/collections/sites/open/bk_test/approve/test_v1_0.py +++ b/pipeline_plugins/tests/components/collections/sites/open/bk_test/approve/test_v1_0.py @@ -13,16 +13,18 @@ from django.test import TestCase from mock import MagicMock - from pipeline.component_framework.test import ( - ComponentTestMixin, - ComponentTestCase, + Call, CallAssertion, + ComponentTestCase, + ComponentTestMixin, ExecuteAssertion, - ScheduleAssertion, - Call, Patcher, + ScheduleAssertion, ) + +from gcloud.constants import TaskExtraStatus +from gcloud.shortcuts.message import PENDING_PROCESSING from pipeline_plugins.components.collections.sites.open.bk.approve.v1_0 import ApproveComponent @@ -46,8 +48,9 @@ def __init__(self, create_ticket=None): GET_CLIENT_BY_USER = "pipeline_plugins.components.collections.sites.open.bk.approve.v1_0.BKItsmClient" GET_NODE_CALLBACK_URL = "pipeline_plugins.components.collections.sites.open.bk.approve.v1_0.get_node_callback_url" BK_HANDLE_API_ERROR = "pipeline_plugins.components.collections.sites.open.bk.approve.v1_0.handle_api_error" +SEND_TASKFLOW_MESSAGE = "pipeline_plugins.components.collections.sites.open.bk.approve.v1_0.send_taskflow_message.delay" -COMMON_PARENT = {"executor": "admin", "biz_cc_id": 2, "biz_supplier_account": 0} +COMMON_PARENT = {"executor": "admin", "biz_cc_id": 2, "biz_supplier_account": 0, "task_id": 1} CREAT_TICKET_FAIL_RETURN = {"result": False, "message": "create ticket fail"} @@ -77,6 +80,7 @@ def __init__(self, create_ticket=None): CREAT_TICKET_SUCCESS_CLIENT = MockClient(create_ticket=CREAT_TICKET_SUCCESS_RETURN) CREAT_TICKET_FAIL_RETURN_CLIENT = MockClient(create_ticket=CREAT_TICKET_FAIL_RETURN) +SEND_TASKFLOW_MESSAGE_MOCK_FUNC = MagicMock(return_value=2) CREAT_TICKET_CALL = { "creator": "admin", @@ -88,6 +92,12 @@ def __init__(self, create_ticket=None): "fast_approval": True, "meta": {"callback_url": "callback_url"}, } +SEND_TASKFLOW_MESSAGE_CALL = { + "task_id": COMMON_PARENT["task_id"], + "msg_type": PENDING_PROCESSING, + "skip_if_not_status": TaskExtraStatus.PENDING_PROCESSING.value, + "use_root": True, +} INPUTS = { "bk_verifier": "tester, tester1", "bk_approve_title": "this is a test", @@ -101,13 +111,17 @@ def __init__(self, create_ticket=None): parent_data=COMMON_PARENT, execute_assertion=ExecuteAssertion(success=False, outputs={"ex_data": "create ticket fail"}), execute_call_assertion=[ - CallAssertion(func=CREAT_TICKET_FAIL_RETURN_CLIENT.create_ticket, calls=[Call(**CREAT_TICKET_CALL)],) + CallAssertion( + func=CREAT_TICKET_FAIL_RETURN_CLIENT.create_ticket, + calls=[Call(**CREAT_TICKET_CALL)], + ), ], schedule_assertion=None, patchers=[ Patcher(target=GET_CLIENT_BY_USER, return_value=CREAT_TICKET_FAIL_RETURN_CLIENT), Patcher(target=BK_HANDLE_API_ERROR, return_value="create ticket fail"), Patcher(target=GET_NODE_CALLBACK_URL, return_value="callback_url"), + Patcher(target=SEND_TASKFLOW_MESSAGE, side_effect=SEND_TASKFLOW_MESSAGE_MOCK_FUNC), ], ) @@ -117,7 +131,14 @@ def __init__(self, create_ticket=None): parent_data=COMMON_PARENT, execute_assertion=ExecuteAssertion(success=True, outputs={"sn": "NO2019090519542603"}), execute_call_assertion=[ - CallAssertion(func=CREAT_TICKET_SUCCESS_CLIENT.create_ticket, calls=[Call(**CREAT_TICKET_CALL)],) + CallAssertion( + func=CREAT_TICKET_SUCCESS_CLIENT.create_ticket, + calls=[Call(**CREAT_TICKET_CALL)], + ), + CallAssertion( + func=SEND_TASKFLOW_MESSAGE_MOCK_FUNC, + calls=[Call(**SEND_TASKFLOW_MESSAGE_CALL)], + ), ], schedule_assertion=ScheduleAssertion( success=True, @@ -128,6 +149,7 @@ def __init__(self, create_ticket=None): Patcher(target=GET_CLIENT_BY_USER, return_value=CREAT_TICKET_SUCCESS_CLIENT), Patcher(target=BK_HANDLE_API_ERROR, return_value=""), Patcher(target=GET_NODE_CALLBACK_URL, return_value="callback_url"), + Patcher(target=SEND_TASKFLOW_MESSAGE, side_effect=SEND_TASKFLOW_MESSAGE_MOCK_FUNC), ], ) BLOCKED_INPUTS = { @@ -144,7 +166,14 @@ def __init__(self, create_ticket=None): parent_data=COMMON_PARENT, execute_assertion=ExecuteAssertion(success=True, outputs={"sn": "NO2019090519542603"}), execute_call_assertion=[ - CallAssertion(func=CREAT_TICKET_SUCCESS_CLIENT.create_ticket, calls=[Call(**CREAT_TICKET_CALL)],) + CallAssertion( + func=CREAT_TICKET_SUCCESS_CLIENT.create_ticket, + calls=[Call(**CREAT_TICKET_CALL)], + ), + CallAssertion( + func=SEND_TASKFLOW_MESSAGE_MOCK_FUNC, + calls=[Call(**SEND_TASKFLOW_MESSAGE_CALL)], + ), ], schedule_assertion=ScheduleAssertion( success=True, @@ -155,5 +184,6 @@ def __init__(self, create_ticket=None): Patcher(target=GET_CLIENT_BY_USER, return_value=CREAT_TICKET_SUCCESS_CLIENT), Patcher(target=BK_HANDLE_API_ERROR, return_value=""), Patcher(target=GET_NODE_CALLBACK_URL, return_value="callback_url"), + Patcher(target=SEND_TASKFLOW_MESSAGE, side_effect=SEND_TASKFLOW_MESSAGE_MOCK_FUNC), ], ) From e1817daa4a3c807779b362d9b0eb05e564d09c3e Mon Sep 17 00:00:00 2001 From: crayon <873217631@qq.com> Date: Thu, 12 Oct 2023 22:28:48 +0800 Subject: [PATCH 04/26] minor: review fix --- gcloud/core/apis/drf/serilaziers/template.py | 2 +- gcloud/taskflow3/celery/tasks.py | 25 +--------- gcloud/taskflow3/signals/handlers.py | 7 --- .../tasks/test_send_taskflow_message.py | 50 +++---------------- .../components/collections/controller.py | 2 - .../collections/sites/open/bk/approve/v1_0.py | 2 - .../sites/open/bk_test/approve/test_v1_0.py | 2 - 7 files changed, 9 insertions(+), 81 deletions(-) diff --git a/gcloud/core/apis/drf/serilaziers/template.py b/gcloud/core/apis/drf/serilaziers/template.py index fa42fa36b4..e743b3ed61 100644 --- a/gcloud/core/apis/drf/serilaziers/template.py +++ b/gcloud/core/apis/drf/serilaziers/template.py @@ -32,7 +32,7 @@ def get_notify_type(self, obj): return ( notify_type if isinstance(notify_type, dict) - else {"success": notify_type, "fail": notify_type, "pending_processing": []} + else {"success": notify_type, "fail": notify_type, "pending_processing": notify_type} ) except Exception as e: logger.exception(f"[get_notify_type] error: {e}") diff --git a/gcloud/taskflow3/celery/tasks.py b/gcloud/taskflow3/celery/tasks.py index 70689205a0..3f6c9de5b9 100644 --- a/gcloud/taskflow3/celery/tasks.py +++ b/gcloud/taskflow3/celery/tasks.py @@ -26,7 +26,6 @@ from gcloud.constants import CallbackStatus from gcloud.shortcuts.message import send_task_flow_message from gcloud.taskflow3.domains.callback import TaskCallBacker -from gcloud.taskflow3.domains.dispatchers import TaskCommandDispatcher from gcloud.taskflow3.domains.dispatchers.node import NodeCommandDispatcher from gcloud.taskflow3.domains.node_timeout_strategy import node_timeout_handler from gcloud.taskflow3.models import ( @@ -44,7 +43,7 @@ @task -def send_taskflow_message(task_id, msg_type, node_name="", skip_if_not_status="", use_root=False): +def send_taskflow_message(task_id, msg_type, node_name="", use_root=False): try: taskflow = TaskFlowInstance.objects.get(id=task_id) if use_root and taskflow.is_child_taskflow: @@ -55,28 +54,6 @@ def send_taskflow_message(task_id, msg_type, node_name="", skip_if_not_status="" "send_task_flow_message[taskflow_id=%s] use root taskflow[id=%s] to send message", task_id, root_task_id ) - if skip_if_not_status: - # 满足某个具体状态才发通知 - dispatcher = TaskCommandDispatcher( - engine_ver=taskflow.engine_ver, - taskflow_id=taskflow.id, - pipeline_instance=taskflow.pipeline_instance, - project_id=taskflow.project_id, - ) - get_task_status_result = dispatcher.get_task_status(with_ex_data=False) - if get_task_status_result.get("result") and get_task_status_result["data"]["state"] == skip_if_not_status: - logger.info( - "send_task_flow_message[taskflow_id=%s] taskflow[id=%s] check status -> %s success.", - task_id, - taskflow.id, - skip_if_not_status, - ) - else: - raise ValueError( - f"taskflow[id={taskflow.id}] status not match: actual -> {get_task_status_result}, " - f"expect -> {skip_if_not_status}", - ) - send_task_flow_message(taskflow, msg_type, node_name) except Exception as e: logger.exception("send_task_flow_message[taskflow_id=%s] send message error: %s" % (task_id, e)) diff --git a/gcloud/taskflow3/signals/handlers.py b/gcloud/taskflow3/signals/handlers.py index 346b4f53fa..38ffe3e127 100644 --- a/gcloud/taskflow3/signals/handlers.py +++ b/gcloud/taskflow3/signals/handlers.py @@ -191,13 +191,6 @@ def bamboo_engine_eri_post_set_state_handler(sender, node_id, to_state, version, _finish_taskflow_and_send_signal(root_id, taskflow_finished, True) - elif to_state == bamboo_engine_states.SUSPENDED and node_id == root_id: - # TODO 发送通知,向上找到根流程,发送通知 - # 问题1:独立子流程场景,暂停后应该是由父流程决定是否通知,如何将消息通知给 Root Taskflow? - # 问题2:等待确认 / 等待审批场景也需要通知到父流程 -> 向上找到根流程 - # 问题3:子流程、父流程都有通知,以哪一方为准?(继承父流程配置?) - pass - try: _node_timeout_info_update(settings.redis_inst, to_state, node_id, version) except Exception: diff --git a/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py b/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py index 7b3eca93af..f41f8664f9 100644 --- a/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py +++ b/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py @@ -14,7 +14,6 @@ from django.test import TestCase from mock import MagicMock, call, patch -from gcloud.constants import TaskExtraStatus from gcloud.shortcuts.message import ATOM_FAILED from gcloud.taskflow3.celery.tasks import send_taskflow_message from gcloud.tests.mock_settings import * # noqa @@ -64,50 +63,15 @@ def test_send_taskflow_message__use_root(self): taskflow_relation_model = MagicMock() taskflow_relation_model.objects.get = MagicMock(return_value=taskflow_relation) send_task_flow_message = MagicMock() - get_task_status = MagicMock( - return_value={"result": True, "data": {"state": TaskExtraStatus.PENDING_PROCESSING.value}} - ) with patch(TASKFLOW_TASKS_TASKFLOW_RELATION, taskflow_relation_model): with patch(TASKFLOW_TASKS_TASKFLOW_INSTANCE, taskflow_model): - with patch(TASKFLOW_TASKS_TASK_COMMAND_DISPATCHER_GET_STATUS, get_task_status): - with patch(TASKFLOW_TASKS_SEND_TASK_FLOW_MESSAGE, send_task_flow_message): - send_taskflow_message( - child_taskflow.id, - ATOM_FAILED, - node_name="test", - skip_if_not_status=TaskExtraStatus.PENDING_PROCESSING.value, - use_root=True, - ) + with patch(TASKFLOW_TASKS_SEND_TASK_FLOW_MESSAGE, send_task_flow_message): + send_taskflow_message( + child_taskflow.id, + ATOM_FAILED, + node_name="test", + use_root=True, + ) - get_task_status.assert_called_once() send_task_flow_message.assert_called_once_with(root_taskflow, ATOM_FAILED, "test") taskflow_model.objects.get.assert_has_calls(calls=[call(id=child_taskflow.id), call(id=root_taskflow.id)]) - - def test_send_taskflow_message__skip_if_status(self): - - root_taskflow, child_taskflow = self.generate_taskflow() - taskflow_model = MagicMock() - taskflow_model.objects.get = MagicMock(side_effect=lambda id: {1: root_taskflow, 2: child_taskflow}[id]) - - taskflow_relation = MagicMock() - taskflow_relation.root_task_id = root_taskflow.id - taskflow_relation_model = MagicMock() - taskflow_relation_model.objects.get = MagicMock(return_value=taskflow_relation) - send_task_flow_message = MagicMock() - # 实际状态是 FAILD - get_task_status = MagicMock(return_value={"result": True, "data": {"state": "FAILED"}}) - with patch(TASKFLOW_TASKS_TASKFLOW_RELATION, taskflow_relation_model): - with patch(TASKFLOW_TASKS_TASKFLOW_INSTANCE, taskflow_model): - with patch(TASKFLOW_TASKS_TASK_COMMAND_DISPATCHER_GET_STATUS, get_task_status): - with patch(TASKFLOW_TASKS_SEND_TASK_FLOW_MESSAGE, send_task_flow_message): - send_taskflow_message( - child_taskflow.id, - ATOM_FAILED, - node_name="test", - skip_if_not_status=TaskExtraStatus.PENDING_PROCESSING.value, - use_root=True, - ) - - get_task_status.assert_called_once() - send_task_flow_message.assert_not_called() - taskflow_model.objects.get.assert_has_calls(calls=[call(id=child_taskflow.id), call(id=root_taskflow.id)]) diff --git a/pipeline_plugins/components/collections/controller.py b/pipeline_plugins/components/collections/controller.py index 516d03e535..2ff9c7a285 100644 --- a/pipeline_plugins/components/collections/controller.py +++ b/pipeline_plugins/components/collections/controller.py @@ -23,7 +23,6 @@ from pipeline.core.flow.activity import Service, StaticIntervalGenerator from pipeline.core.flow.io import ObjectItemSchema, StringItemSchema -from gcloud.constants import TaskExtraStatus from gcloud.core.models import Project from gcloud.shortcuts.message import PENDING_PROCESSING from gcloud.taskflow3.celery.tasks import send_taskflow_message @@ -41,7 +40,6 @@ def execute(self, data, parent_data): send_taskflow_message.delay( task_id=task_id, msg_type=PENDING_PROCESSING, - skip_if_not_status=TaskExtraStatus.PENDING_PROCESSING.value, use_root=True, ) return True diff --git a/pipeline_plugins/components/collections/sites/open/bk/approve/v1_0.py b/pipeline_plugins/components/collections/sites/open/bk/approve/v1_0.py index 9eb70ab0de..6ae7039640 100644 --- a/pipeline_plugins/components/collections/sites/open/bk/approve/v1_0.py +++ b/pipeline_plugins/components/collections/sites/open/bk/approve/v1_0.py @@ -21,7 +21,6 @@ from api.collections.itsm import BKItsmClient from gcloud.conf import settings -from gcloud.constants import TaskExtraStatus from gcloud.shortcuts.message import PENDING_PROCESSING from gcloud.taskflow3.celery.tasks import send_taskflow_message from gcloud.utils.handlers import handle_api_error @@ -90,7 +89,6 @@ def execute(self, data, parent_data): send_taskflow_message.delay( task_id=task_id, msg_type=PENDING_PROCESSING, - skip_if_not_status=TaskExtraStatus.PENDING_PROCESSING.value, use_root=True, ) diff --git a/pipeline_plugins/tests/components/collections/sites/open/bk_test/approve/test_v1_0.py b/pipeline_plugins/tests/components/collections/sites/open/bk_test/approve/test_v1_0.py index 7e15169c86..13e89848ce 100644 --- a/pipeline_plugins/tests/components/collections/sites/open/bk_test/approve/test_v1_0.py +++ b/pipeline_plugins/tests/components/collections/sites/open/bk_test/approve/test_v1_0.py @@ -23,7 +23,6 @@ ScheduleAssertion, ) -from gcloud.constants import TaskExtraStatus from gcloud.shortcuts.message import PENDING_PROCESSING from pipeline_plugins.components.collections.sites.open.bk.approve.v1_0 import ApproveComponent @@ -95,7 +94,6 @@ def __init__(self, create_ticket=None): SEND_TASKFLOW_MESSAGE_CALL = { "task_id": COMMON_PARENT["task_id"], "msg_type": PENDING_PROCESSING, - "skip_if_not_status": TaskExtraStatus.PENDING_PROCESSING.value, "use_root": True, } INPUTS = { From b0cdb91147613778551e67517a7facc7556d33ff Mon Sep 17 00:00:00 2001 From: crayon <873217631@qq.com> Date: Thu, 12 Oct 2023 22:33:40 +0800 Subject: [PATCH 05/26] minor: review fix --- gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py b/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py index f41f8664f9..1b53912dbd 100644 --- a/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py +++ b/gcloud/tests/taskflow3/tasks/test_send_taskflow_message.py @@ -36,7 +36,6 @@ def generate_taskflow(cls): def test_send_taskflow_message(self): - get_task_status = MagicMock() taskflow = MagicMock() taskflow.id = 1 taskflow_model = MagicMock() @@ -44,12 +43,10 @@ def test_send_taskflow_message(self): send_task_flow_message = MagicMock() with patch(TASKFLOW_TASKS_TASKFLOW_INSTANCE, taskflow_model): - with patch(TASKFLOW_TASKS_TASK_COMMAND_DISPATCHER_GET_STATUS, get_task_status): - with patch(TASKFLOW_TASKS_SEND_TASK_FLOW_MESSAGE, send_task_flow_message): - send_taskflow_message(taskflow.id, ATOM_FAILED, node_name="test") + with patch(TASKFLOW_TASKS_SEND_TASK_FLOW_MESSAGE, send_task_flow_message): + send_taskflow_message(taskflow.id, ATOM_FAILED, node_name="test") send_task_flow_message.assert_called_once_with(taskflow, ATOM_FAILED, "test") - get_task_status.assert_not_called() taskflow_model.objects.get.assert_called_once_with(id=taskflow.id) def test_send_taskflow_message__use_root(self): From 2a27b2e33e1b8bad932110885a666ff27ad5c2ff Mon Sep 17 00:00:00 2001 From: v_xugzhou <941071842@qq.com> Date: Fri, 27 Oct 2023 11:44:44 +0800 Subject: [PATCH 06/26] =?UTF-8?q?optimization:=20=E8=8A=82=E7=82=B9&?= =?UTF-8?q?=E4=BB=BB=E5=8A=A1=E7=8A=B6=E6=80=81=E4=BC=98=E5=8C=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../desktop/src/assets/fonts/bksops-icon.eot | Bin 40776 -> 41880 bytes .../desktop/src/assets/fonts/bksops-icon.svg | 20 +- .../desktop/src/assets/fonts/bksops-icon.ttf | Bin 40608 -> 41712 bytes .../desktop/src/assets/fonts/bksops-icon.woff | Bin 24000 -> 24656 bytes .../NodeTemplate/NodeRightIconStatus.vue | 17 +- .../TemplateCanvas/NodeTemplate/TaskNode.vue | 26 +- .../TemplateCanvas/NodeTemplate/index.vue | 55 ++- frontend/desktop/src/config/i18n/cn.js | 10 +- frontend/desktop/src/config/i18n/en.js | 10 +- frontend/desktop/src/constants/index.js | 3 + frontend/desktop/src/mixins/task.js | 4 + .../pages/task/TaskExecute/ExecuteInfo.vue | 41 +- .../pages/task/TaskExecute/TaskOperation.vue | 258 +++++++++++- .../task/TaskExecute/TaskOperationHeader.vue | 366 +++++++++--------- .../desktop/src/pages/task/TaskList/index.vue | 4 + .../TemplateSetting/NotifyTypeConfig.vue | 2 +- .../TemplateSetting/TabTemplateConfig.vue | 6 +- frontend/desktop/src/scss/iconfont.scss | 24 +- 18 files changed, 602 insertions(+), 244 deletions(-) diff --git a/frontend/desktop/src/assets/fonts/bksops-icon.eot b/frontend/desktop/src/assets/fonts/bksops-icon.eot index d8c71d9d0fd5dd7bcc870cccdd60cc1173120e0e..a756565cf82cf04584a5a8ac4f0e663289f840cd 100644 GIT binary patch delta 2209 zcmYjSdu$wM5uf??_HnP>+k5QZ!@l$0-gz(1os-jO>7SqN} z1VIgM#aR`NKvDD1f$BD*vilH)Sb#9CXlN5RR87S{MWIL#38fhRNRf#61ET)noSF4G zgd5Gy+|2xDzVF-lWbmaZ~k={10Vy?Rb4JIR z=N%i)4Ym0vepuKLH{`9YrYcpdxE=BcJOM}GyYN$Z8wDZ><3wFV*=;Ifo62}UK7^0p z96o{15-qNs7M?-nBK(Gq^gXx)muc%l^PIHMJSm-aIWx2b`*&q6?z}1;6ahAf;T@7v zo8cl!XPYStY{+f4U(trrX37j3zBW@0*ihRHm)CBoPowwt$O9vgRIGBPpa*qLFZFVz;xJZ91u-aUdPq>v>@HeGs~4?M zoP~li6GOULuJmF+5M>MnH9atlr6Tz;UkH&q7$PXiIO^D$lG{3Gl?Vg$jmGsvr>7%`6_NLb+*|`4ObPk5 z7%w0bWuMd284UP%iFdmk(JsO1;CEl}#Z$U>SBDjJb(-;lE(?+t$wd@N3B^pw7mlYQ zl5()>atbbwEcD?!3Bkn)JteOabTd1>$Ek**yoe}yRF4wZWS=1N9%m0< zaQ4Gyw(6H>{IZSNR&30j^;(TVYY~@af9-Fzp<1m*^T56km${?tWm;tcDgfzTM!jiK zYg2p0@9Na;qE5ZIZKFG3>qTl@CU6y9L@^S@Xn3l(Zlo}kj88l~k*MdUw1JUC#TZPa-V`M;^%GDDDi6;qc)U)+S1Y>NV-a8ehtdS69 zxfhbgRO=fArM<&ej%{%{8f-K=)Piav@y(k|`|L9=yNTS^rak!VxU2)XEVv;?$FG4d zfY=RI83HnjjSy|AU|)E;V+^jN!Sk&f*RQv3@H`q{P3%J4mH50Zxa|MAu2X31clVrj zj6H#Oip61w1`O~HDu)N3oN9KMrkL44%7}ICry+%+Fo({^>a5i zN96#mb$0v^a?2-9EOU5h9B0Y4Ims=wmgRukH9XE1>h%RSKJ0P_K_wP;54Ep64 z<-qy$$q51xbmCi89Om{;`mXB6E_K|0v1HA+AKgyahn7SB{XV7 z2@lEbf(0OK6wRwB@!vtmTl8SXQ)G3j&&{l7Kd39|k(7H^#9|73UOQ%lOE_5$_ z3*eh3@9c7IZ3VxFzo-vUKK5K~dEtT_sk;E)A^>}J<a|4f~=pU=iwY4nBd) z$f$UXP<2ry_iGUUs~jH0qd152xQJ`bE8@#Y+lkKC@=MN^{H7}3rLEf6wdN7!x~$MC z@3l|*dGlHS3y6EoUjy@vMyu887jANCco-(33g_V}(2E;(u{5k}x}h7s3|GjHVW}`2 z6v@!U4z&6Tc{^`s(Dp@{7W6VnGpu64>oQYvLq*P|g*t=jSXWqf z6G2K2DTJ6nC6p~$S|uTk#yT>E>`bqwdd%dMZMZ}6RM8VkWJ|WYb8`HIw?pm>=<@7? z@}p-agbv>!0c|x_t`5x1J+6!^kuJVpibhUH1DZ>8h2m`Q5AWcU(^HER2{qq1bfBWK z-&`Kwh?N<2?TvNL9I*11$kOOldcwV#J4rhtYntp1gmbr#J`u!#84L ziWhG6sy;I)(+GWLMCks>j2osVD^}Rd-AX9_9z$V4^tju&+k{hipDwxmkv1=LQ?H&b zSj&v%IH_2jxLA$(Byk8an5`T;rl%%{n9DnV;>(C%RrtX`^ZAWS47Zx*yQ|pz=)G?Q IzCeHAzs5P#rT_o{ diff --git a/frontend/desktop/src/assets/fonts/bksops-icon.svg b/frontend/desktop/src/assets/fonts/bksops-icon.svg index 0b4dc054f8..0b1db0697b 100644 --- a/frontend/desktop/src/assets/fonts/bksops-icon.svg +++ b/frontend/desktop/src/assets/fonts/bksops-icon.svg @@ -587,7 +587,25 @@ - + + + + + + + + + + + + + + + + + + + diff --git a/frontend/desktop/src/assets/fonts/bksops-icon.ttf b/frontend/desktop/src/assets/fonts/bksops-icon.ttf index ed11d935cbc610b9610e58427e97ed01b427728f..c0b46dfb29f06b663b9f5ff59ede6b0212d35ae3 100644 GIT binary patch delta 2180 zcmYjSdu$wM5uf??cK4pU_t@LLhkfV0z4M-(yGwFUKN8>Bshb+95^*G}lry&b2&Rpj z0D%U#;w(fXP}L@_E2T67ol&a-i2!l5q9G4Alvd)OqEIBTgi;JbDH2uxfT({scg*@6 z!j9%#8p9X-LhnE(P-+Jzs{{pam0O0=ki$}k4#J~J^ z4j^zJz%$=je01TF^r>ghQ|yPtPh^je{Tblg0|4UL7cLp& z>wpwX{G-#)o}QEU{Rdn&G64K_=GEPIWM{LZ2!G=C(Yqa(Wu?c$t;0W8`*04~>IzoT z*ILI<*^Gt1!>n^x@8$)b+_YxMbi=rJw=)4-C##h+L()kh?}ClJMDH~s!mQr14fD25 z$EI&}4?iYsikniWv-)MRTE*>mU46=(-5Mrd_@aSKw-EUUZ|=k*3<#bFKAt`+N37BETjwyhYO9Ww=Pv(Pe_e zrqpHYxHjc3Q%czMcA1jHrmxFX=3d{E=(#n1-}tNJzZ(B|VtV48))oIFjUxb^^W2B* z0t~@8Ou-8L0&W6S%yOlmDVnO4a$Ko6hLuu5R76z^3Njk~MYCw;&|NDB;_>w>BY=(MyZgu@G8ZTNH76I$VG%2FX)I0+{rsC{1Vg*eS>}oh0mhEydYS_J@STbyv4^^EG!ReNSA$%(?I5}akqG5$$fDTaKxcR}gu(W00@Rlo~EiiTjH^lo`@T4-SiaeNKgo3#u}s#q7M@ zA7bx0llPPh_vfRcSsL7pGAr7ho=n8ZI@&*Tj}AuC5gV?_=#B(<^~aHrKTOFZfpCAb zN02n1h%z;m7Kj>av0g>sJU-sxQBpswj=bU>nlYn?8i*;z4-Dm>913}OTMAJ&cGqg& zcs_-8`OIm>V;5Lb`lDBHNP*N^byP+;gs z>H@by9TkQ+r0It3fg)XCJwaz|B!r+(Q44w_Pp4eSmrK54Ki7-8PY)QR8hT0R^`I^i zTg1)ZUYGpylHV_}T37jLn})(l%`dg*8QlH@ezyH{&ib*KUGPiue#ydYD;DPVdacHw zy@E~2U;A5af2~%d``)0hyviM8uh3T(paPJ}G3rf|I+)rkc1NeC7B%X{Z5y?|r5CAj znZQ*v5yfx>Bca(`T~A^%5u1K+I$qDtsw3kg?~@FUjt{G$C`EieJF6QhJ$|S@{nQ)T zHwwX##`uUjo2|zQ5=#(RuV?DvFhn)KEcF=vICz zL)EBiP|sTzSV-xO(oO_zs1-s^8XPgSyyEupb2qjoqyT;E!qj2pnkP>-IXpat3uN1x z;g;J?Dd2LBO|j*AeVI*-Ib8v%-Q3Upd&@nKe(9_fIF~v#O(23!pGutzkjX!B7z5IE zr_UB?Jbr*+1U~S1#O8Bem;LxbnYN8)e`nYBwq>a=hM@|Rw0U_|tCZC^s)AuA`m30u zMwXZoV}*Q06O6?6EbGoJ)C8JoWTjtDL$ji#6jDnJKJ#2PY4g}-IJ@|^*br=bdAasr zvoSHzSo>VL=-XFaoU0$4x<73`U!TJ>xrCWYtk%5RNZ@;=_UqS6KVVrl$+*m5x=jB# z7etraE$%gpiAJ-v_RRyH19I9tICb&;x%%^Fdh5+XJT$8OCY(+gefZs?h5!CsC`6-} z)<}=Pwf@p;4A)yn&YnOwUc$?G1+U^dzTMh*`5IrWh+li+OOyFMLpySKM^5g@d%JS) Ko*lWDa{d?HBCnDF delta 1073 zcmXw1UrbwN6hG&?y?@(YXnX&(wUqW=%B4_SD822JLIs3OtCE?G2ThpW0C@mMHZpOV zls)*M4+c^bH7-#TTmp$$h1rWRiaQW}0h1$IH z@RJm2?@<(&h)|{k(n33WmR8o*4??Y*0`+51?+1h5;^0xqi zVbUjFsjVz*O3@`Rz`JC?-dsI*VeLe%`U}8&PXdT9Ztd9P+km8{xbFGj{^^eLZ{YP1 z0dU`c_|E|@d8c)GdWGKjz zyL!|C>{DXfy&2k-Bi_T->0E{c>0%9)e+>D?GyE89wHjOe$OuxY40EsuORxc-!A)cY z4@L<^SfLI9*0S&!e#gz=rK&ESQ<8T!!*o5mU9bZSaOFofe2mb zK)cs1I0Yw*P9V;7O=Y5P+C|su4GE%(x@MT6F?0)5M>B86(KNbHi(@wmB7t&Y09}JT zi={5NP&!IT+&hY7%Gs&BsgoxO8=Z}`%#%T}g2r%Tyb-TgNJo<`(J6AAj46h^Gg}@_ z2d&w%!(JNYnb1Q|=Fj&|ofxnS)G~=!MDme?*f%7T z#RSUXT*=leDRDH>k#%!3J-Xtz`lcMy7fz;&{%|T+a(tbW<7d5Y(JGSlRib1>~PY()1w3| zO`;M8tSI00la(|rU6SpHmA{jcgWaagH1zx0c6Z56_5=*k7mT+3vb#q0ylby8mgmG` zb>gFHA|MJwh+3|4>Xeb59AaK|{>)d=pdxz)L(SdyuQA+iUixqin}1!o&3jz>f&T)G Ck2& zC7}yQ?_EKvfRr!qoqO-hw`ZO?d(Lxqp8s}cx22w9poN0jQ}4+W5Cw?5Uh0Er|K-oW zW0{o!lnu(t83dwu2LfFc1A*wa4kN$(dhOu-3IwA30s=i>0D(S+F#0TAwnJFifWRFDFcge*=qfXgohkENp_I{FeJP33dM4^6B<=|vP&Y8X=&w2lE z899i9QC42$xzg|}rI&`!`AwVEF4>@EE0$utTLuE(n?Bwp>9c zT9UA{(s(=-g*=6IJd}bTpcdae9kU9ts3#~HwB0&PJ6{Gt#|ZzOb5YA*xB~> zmC(p$Le}U;WtLPyb)59g>VMZbE?d7MMR`}qV!lSrZk?h~IS*i=3HG+$v`FfYH=iG~wjUa$q=fO{zqCP+9mBrfF>C&1Gh1tkOje_+! zbTfs2+4YCY&2Y+v>ZXw%yj^+mbs*6(?TM$4v%KQEy3@&k3{aKP4NL2l+h2tDE&4sm zRK8pn6VBSz0dLEpvlfN2_K2b--p9fT0#78+L^h&qX;)E3OQNUynr}uk)Sf2r;R{}z$F{%t(^-4E`^|D*csgJI}ix&|VR*D=yx?V}| z2%33CmNfKnG<2Ia^eQwEeI1BT^IhKkX{DG}y@=OSJS&*4IdbyF8g_;J(^a1+VAr9c zQlvy@ua8hmL80o}w<3rDI^{8AVi@MMC{>IjeV43Y+#R?vsl*I7@4Ext81ZLa98oz9oY+`k+#An3*b<_}DdLT!vmD*`o_DwZG9aHj;nluqZtI>p4 zpKuj0Cp!meR`%3LvW&-7j62vl)Q}~LEbDO<$1MXteu_#;D+m`+FF=cIkGcku0e=S= z@i5R(XLDt-bx4`;CffoBu?KxL zT1mLqBq3=$%YiLYM6U?a^%{;3z|UU7xNa)5V{0?$dMEoaBTN-zl~MNt#3VDCO-GHs zIiXiPG7ydGr;af1n*b|zPnll7_t+A>a_R)#JB`L2UyN1Sb03BlN-}#$H-!13sl*7EjrnYU-$9_5?({h6A1Mpm16r zv!R{X!y@MNXv0HYd*ouLQ&X%d2=j@U6Ze2oyO@$009l3vIlU=BzA0d``Ary#qwD{6 zWj|YYj8Xfg_IdsO`MKeTnaAhiszuu^kEJH^(bmq(?;a)#Zr(C6Zc?n0HK=*o3dD1# znv-7fYw|E@L%=ByFv?CWRb4cKc7yl7kJ4a<*hF!9AzXoUxtzO8r!x~cHCdP(#l4;b zHHUPgd{feb^z&6$zXx@R3l1q^V?=S2K`!{mB8O<^@FP*&YSP}O1HhOL$kb%B3yol&i&JEOk>UYPf9@W? zzFijG`n|4iBYQ|1i9kIqtrPUJzvX4(m1(3bFtC|o-g#5PR}|>0n=a3*2QJ~JR`N!J z_m(G){y!yy8YD$h+hC<796 z{fEJccyXzHr;Mh5wkh&lF!vEM%zzZ(w>;1d&GLs7n(Chr;dy5V3yPYCQE-7AyLhi9 z{)Y{vdF}T#N$!JiNaQiH+8UvC*Nzi^QqZ`!;&tz zf#RbNkGB`zzSh_g;d~1y(o4jD*RX)+xT;L?`Gf|VsfOlb9x1f0hMKa8xvSBnTJZ#q z@l>rIqdVp654qAOycljAU^6@-TiWQ2Hk*c;kNGjXV-~M2K4fW3!E5e&xvH~DGT6_o~^u3-@&Z{=cnY2 z^NLE~6ZH_TlYA+>{t>6xKp{eO-!171^f|`xsOZXj5mta?H5XCB1lDIuN`{z0nAq-i zbJk9Wa_vwaJ1)JvGFB1NGE-6I`j*;%F z2C?}}+ld5JUxJ)G&!vkfTmAzira+tZtMh&G`N+3=W5@@|0wF%z-dinEGQNX)p)Z?V zYYM?1A5_pXee-0azZnZ3{Lngl+n)#E9r(`t&WX*eVMpk(^cG62rxR^?MQ_uCx)i;z z6XRF7%SWPb_DZgO!6BP8>r~**0#spxFUS1AKE5Mjn=1Bhto;sW?8$^bs8?Y@A9lJT zmR~&zoR~N0qdZ1fI*s-aa<)0Mv|Q=Nk^MDCZCMdM6^&)FwePD_SdWx)-1w~ z(H1?AqMj_nc+EGNf*daRKPNpAqyK$J=bQ^X3aP!6k>?U~J24nISC{Pq3}TLg-e1A- zXZW3G|1;hBWo~=6z~s#Jg#uQfn>tMQhS(-A0G0merjnygeQC{D_3EA&`}>E3gt>-;L`y=jyL{*2Nx=;yJv=2 zK~na=2`NWXbQ`Ih8^{c~>xBGniIimZ1h9p&SG@`nbF_@5!hEH#c{hIW{mfpQbC|;_ z2G2!jEv?H^-@TsJQa70$lGiiHhX;M-p=9L;4UK;NEIbnRZb|))cDPie{FmgB{jer> z|7!Xn2(sK*ERz7v?3lNbYY?U7D`_IUlB!^@(aQ4}b2XKk^lD$C4@IBYcjp#S13vMe zTi$?6^NIOl6N)O4-RtytNy5D2C(Q8r)l&L^t;M%=V=>~aD8mrRvN7y0N6fW>wO*gB zv3tp~=_5#vANwYSJu%kj`7Vz3d!GB%doERdgd^Etl=i;8Nn}T8KnX1hrE{GZ(vV

z7%BbjFLjlc<6SM;mFqne+OZ zM~$JwG^4-$JcDmWJmN5u*ml91%cn0-u^4CT_j16W^qF*&K{O3j7Qe1F07jC@Yg46x zQmangLqcT~8&(El-g{MEdrQO`N2c#6*tmRx?MyD8Z89p3Kc?^q)FgEA;oj|C?tnhN zo|SBb-s@yp4MEa3mMhN=`^R9L{^Ra>INTesG^Ojqy6sp39atf&Z+Yz z2i~54z&&Kb_ngi}+jJ?=6Tr9r0!J^O-J`nNz+JE5cf3}|Jsgh@Ht^NZt8ZU#l{!@G zJZmkVl}Y20YrJ8uIpIRz=_R?{eq#oNK3J7mHh=UxQ(H-T>s#d4Q6p-orc%_sTX?Da zJDLe5-Z=u;=sWj1`7=aQF@fHdbel)E{iYk^rj=Bhg05?M11K>3ff;RDLt7*>WhQ{#mspQ7?kRj|UexUo*7p6?XGB4HY!wXS(A1J!k zo&|sqg8Nf3`iN-YBv*QMo+u`D3RXW;t>8_KN?qpWi-1R)~Qxsm^yKVHzf6j zt1Izp&SP_#e;gU8Le^gL58c=33kq?Leq_j_9kgI?ZjVq~sFrkSA7_3}58t?6K))gO zq+MhuZDLmb)3mwlWdDGv4Iw-pqcUwQLyPc$MwfdSmdwNncO2*8wt7?+Bo_f3eUUi#IgCh0z8>!WY>804g zv_X=D)BZ(2pUa2D;{Y1Gwf!73|6y>~_V5=2C2&TJGmpZL#2vL2!EoxN}ER`_1|q zn`|JHTxh)9M+3RYVYwg)x$r8vU^ltwJ^VXbd`LL{!!vvY5&!-nKCB3jM&hG3@qrw$ z9-S$Y@Kleusm?Pyii# zy?sA>t@y$#3Yvid%QfNsTPL)QA7@ncaLqB6T@1THr+1a>MAoQxgPJn=*QmE>PTtp( zF_S5?5$TgPlmB0|)|aom5q*+%{@n3FZGu^k@G{2r89pcNxvs0m|6wR;pV$AX<^NDO zXE*0lU?}6@^%)Ep1^$=+n=IGM_Ki;-o+X}Z0XfY>2HgeA-)BZ`%~s7;)o6>Vy;F_VqNu9clo++eN{!Gqwv^ftyGqsGA!^SUv1g5_ zRYAQdYUIoNeb@E-J$NdJW_x_in6{}OtmcYp69usTJ&P(>*J z`Q4qb8uiJyYl{%IB`o6nW&><ARDc@P$HbjbbEJ5&rQKJp!Qd0lvR6h_LKpkyvMwfQWY9 z7^c&biW!Ytk?N5`?!$W1pw#S`Pfzq5pDvPWOWRBaZG~$ZQLc@tvzBSSN3cOViG%bJ zrU@TGhQs@=HW{lDT4>m!cEVQUY3YE#2EF7B#SzmXsh2>zGA*lRvDr{rS&=f*%H2Tie^Uy z6QaTTT$&-{S6P|FrYVj3Qq(zo;^8a^OK}Z>X(pq-YZ@Fr;;iwEA@cQ>HX6Vz*VmiP zU-S03aeg{7(!S-0JZP^)`Evpl?&xW)jLaVmu)TqlOnpjTRT)#fyS+29Kcc5zR;E5H zuWE-We#h3?O`Dt`8%b-6>b`P2i=*9!mZo6uAB^38@*LBhhL#3k?;`xmma77S?WjLls3@zbc?+u<99$Qh5+`L8xu>kPvgthNFjP(lw(E_PKLf%zt zS&7QEIc~!#N+2aLhSpAo($HEJkf$~^%2lxHc-GXROnv;0CP;ecoqZ%sky_RSfRUy{QMR3J^k2vYUTZXtgm-};}zDdfM zLcq|Y7KnkyxvleJsEmv!$jo1_Yb3RWeLptu^n9{9=e9o&YF;%}_4MV)x;{VasOvx2}KK74|`B!*i)SVO^oq!)1vleHP z8a|}PEX-`itf#-D(|YFKafr>6hFTp2QDxG>9-d4s{`s*ignnY;FEhWNPn=W-<}ODE zk0{!d(d+9MZkg#=&(apg|Sz64uTw5Q%n{S=%~L2C>sO11x$Hu8+L zT9Y(=X8W{kT*+l($UMv9%C|39vQgB#HW!1==fr!M>{(UXuiaZA8@!Z%T2woYSq8EU zhL{=NdiYxTqUYsXblNdLvZOv3cd7>MG<~1(#HY8PJTFZScRHYOOrh7|5&xCUf`GSD zCp^E#6KHX9b!OwL&&W~}^-a1$U13Nn%G3sQKDH9Ae%-vnb*HdA@lOPv+X1i2`Bdn+ z5ces9=G%5kBjC<2)bfjGf}uPT#W}_Ffy;^&#$29kAB0be^XKj@i?8W3E>@4Wf_V3GHmh?zb(j*Brg0m>jyx|}+#QexsOcp?^&VrQaVT*Y;j z7^v?{xu4!j{{h+p=yYA&Tcj^1A>?8?rF@Jf2KP*-YGikyF)nAFw zbbkt@DOzXs(@f^}p7gf!df=)(?(^-fpm(u;i@=jdsb5AKyU(c8DUpY%dV4KyFGJE7 z-x{>gA-{316qQ=FWTrpM#g;wNLWJ>x)>-$Q8fw2WLostLzSs2kVRhfkLki6&A?ncI zDpMO@=N}FREwFJ@t~_N&2b;9Oul|(ZT`=KnQ*0ANo(*PB51*fkSDv+ojXs=r-NAX3 z`ibYZAJ1x8G~(ry=jD`>&{_J!ZnYUUB2JD+ueZ-0kN>j-sC&H1s@0j+Y&UShKC`m= z!M_aoSixLqI=-GzFxkQ0TK)?$Y62&veU1H12_D7kCP;ob>R=mtCzr7dS4^YiubpAI zJxdikH(TP*@#R!v(0(4$rKw?ejsCrw&B@olu{y%+z*Z;+7{d*yvn<{;{J5t;L9qs; zw?wY_JUnt()%Wu5O|+>B#t)xpCNiHTx0ZLe3nVOMM~1T$LyW==!_?}HxfkXr9{G#X zawjb*T2*oJRXPTuBqB>g}O`_NgyEGQ;JVUN5 z;+P;S0Kin zaGP)BvsET|0*>8^T9!A{S4rzPSLj)29$#}`5dg#)dHIBUW%g6#P3~-@(8?uQ{ER=* zQn;bM6;}Ng9<0ARw355z0KQfkjpmxf(++-TBoN*b+)QT@B_KWa{IRN^Hq)pw4czX} zhwSTVR?E6?tF|fX=}T0&m^QndXx!ZKx{7~P{Xuzc;^gunv;w2(tXG}4j!D4TLrT?H zOFlLdv-2*uzBd*sR2KG9-ZbACMmjg)O8n*HJ)^I~3m6v|6`--8)XSj9Z{%Pq!kM@n z62WN%6Mlbekcz~`%#c6Sfy!5_I)n`p;u`fguX|~sw)9CcQIEUqrL->+&o{iKmu{!$ z@p%7iY%r|Tv?N^Xqgv0 z*2Nz=+D{=O)=2>KXmvPwn+ZhQ+dA0V0tj_t(l>h!=7S;2j&^|XNbL4v+a~GbWo+dS z+j^jn;*Io2`2g!PCfcNaGd>y#KqZWhF^Omv#ju@0A^|ULG$H3;W%sNHi>})(X>Nr&YC*N6b509(;Pnq9`DPNc(s3&O zj-2Z?_nWJfgDOQBW@1~+`qYTHP~r%Xw@;mSP=I&9PK@y1m4o1nTzPf9<^#Ntw|5`i z7LD%ekGo5!@suqi21Q>~8E32*Kd$&*+9iE#Y*e}uU89f*RY*Hf$YE$qh<(-GUHX&l zrT5)5Z$(otAyaSJ=}MF<2Dyz1=fOm0Vh|RXc-e(vzJ-3P(lw9W73g|$-RMSk-$$(Za?qJRlzfgU=dC*^fJLXl5T>yB>Ddk*Y`+C|NEwR@!j8}e z_h|OaHINY7zK2*9O9=o+REY0ujFn_-p3_9`n{X_EX f8Qd$UT{R*js56i#hHf`g8h1VfK`U2c$zSI`QQ>k` diff --git a/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/NodeRightIconStatus.vue b/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/NodeRightIconStatus.vue index bd4288ecea..5aefd3dbee 100644 --- a/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/NodeRightIconStatus.vue +++ b/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/NodeRightIconStatus.vue @@ -4,18 +4,19 @@

-
- - {{ node.retry > 99 ? '100+' : node.retry }} + +
+ + +
-
+
- -
- - {{ node.loop > 99 ? '99+' : node.loop }} + +
+
diff --git a/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/TaskNode.vue b/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/TaskNode.vue index a7b95a1e8d..18c6493fb1 100755 --- a/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/TaskNode.vue +++ b/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/TaskNode.vue @@ -41,10 +41,28 @@ @change="onNodeCheckClick"> - AS - MS - MR - AR + +
+ + {{ node.loop > 99 ? '99+' : node.loop }} +
+ + +
diff --git a/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/index.vue b/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/index.vue index 9c3fa894f8..97dc2770f7 100755 --- a/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/index.vue +++ b/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/index.vue @@ -366,6 +366,14 @@ @include nodeClick ($blueDark); } } + &.pending_processing, + &.pending_approval, + &.pending_confirmation { + @include taskNodeStyle (#ffb848); + &.actived { + @include nodeClick (#ffb848); + } + } &.running { .node-name { border-color: $blueDark; @@ -447,7 +455,6 @@ position: absolute; top: -20px; left: 0; - overflow: hidden; .bk-form-checkbox, .dark-circle { float: left; @@ -456,17 +463,29 @@ color: #979ba5; } .error-handle-icon { - float: left; - margin-right: 2px; - padding: 0 3px; + display: flex; + align-items: center; + margin: 4px 0 -4px; line-height: 12px; - color: #ffffff; - background: #979ba5; - border-radius: 2px; + font-size: 12px; + transform: scale(0.75); .text { - display: inline-block; - font-size: 12px; - transform: scale(0.8); + padding: 2px 3px; + color: #ffffff; + background: #979ba5; + border-radius: 1px 0 0 1px; + } + .count { + padding: 2px 3px; + color: #636568; + background: #dcdee5; + border-radius: 0px 1px 1px 0; + } + &:last-child { + margin-left: -5px; + } + &:first-child { + margin-left: -4px; } } } @@ -541,6 +560,12 @@ .common-icon-clock { display: inline-block; } + .common-icon-clock, + .common-icon-pending-approval, + .common-icon-pending-confirm { + font-size: 12px; + color: #fff; + } .common-icon-loading { display: inline-block; animation: loading 1.4s infinite linear; @@ -551,19 +576,27 @@ .retry-times { font-size: 12px; } + &.node-pending { + height: 20px; + width: 20px; + box-shadow: none; + } &.task-node-loop { position: relative; + top: 2px; height: 16px; width: 16px; + margin-left: 0; color: #3a84ff; background: #fff !important; > i { position: absolute; - font-size: 14px; + font-size: 16px; } > span { position: relative; top: -0.5px; + left: -0.5px; font-weight: 700; font-size: 18px; transform: scale(.5); diff --git a/frontend/desktop/src/config/i18n/cn.js b/frontend/desktop/src/config/i18n/cn.js index fff8ebb805..4ea6904f83 100644 --- a/frontend/desktop/src/config/i18n/cn.js +++ b/frontend/desktop/src/config/i18n/cn.js @@ -1788,7 +1788,15 @@ const cn = { '清除所有IP': '清除所有IP', '清除异常IP': '清除异常IP', '节点输入型变量仅支持从节点"取消使用变量"来删除': '节点输入型变量仅支持从节点"取消使用变量"来删除', - '节点输出型变量仅支持从节点"取消接收输出"来删除': '节点输出型变量仅支持从节点"取消接收输出"来删除 ' + '节点输出型变量仅支持从节点"取消接收输出"来删除': '节点输出型变量仅支持从节点"取消接收输出"来删除', + '等待处理': '等待处理', + '等待审批': '等待处理', + '等待确认': '等待处理', + '再次执行': '再次执行', + '暂停执行': '暂停执行', + '终止执行': '终止执行', + '状态明细': '状态明细', + '已自动重试 m 次 (最多 10 次),手动重试 n 次': '已自动重试 {m} 次 (最多 10 次),手动重试 {n} 次' } export default cn diff --git a/frontend/desktop/src/config/i18n/en.js b/frontend/desktop/src/config/i18n/en.js index ecff355d91..2d20bb33cb 100644 --- a/frontend/desktop/src/config/i18n/en.js +++ b/frontend/desktop/src/config/i18n/en.js @@ -1822,7 +1822,15 @@ const en = { '清除所有IP': 'Clear all IPs', '清除异常IP': 'Clear abnormal IP', '节点输入型变量仅支持从节点"取消使用变量"来删除': 'Node input variables can only be deleted by the node "Cancel Using variable"', - '节点输出型变量仅支持从节点"取消接收输出"来删除': 'Node output variables can only be deleted by the node "Cancel Receiving Output"' + '节点输出型变量仅支持从节点"取消接收输出"来删除': 'Node output variables can only be deleted by the node "Cancel Receiving Output"', + '等待处理': 'Pending Processing', + '等待审批': 'Pending Approval', + '等待确认': 'Pending Confirmation', + '再次执行': 'Redo', + '暂停执行': 'Pause', + '终止执行': 'Terminate', + '状态明细': 'Detail', + '已自动重试 m 次 (最多 10 次),手动重试 n 次': '' } export default en diff --git a/frontend/desktop/src/constants/index.js b/frontend/desktop/src/constants/index.js index e8e8a2f9ff..af9bd416cc 100644 --- a/frontend/desktop/src/constants/index.js +++ b/frontend/desktop/src/constants/index.js @@ -17,6 +17,9 @@ const TASK_STATE_DICT = { 'READY': i18n.t('排队中'), 'SUSPENDED': i18n.t('已暂停'), 'NODE_SUSPENDED': i18n.t('节点暂停'), + 'PENDING_PROCESSING': i18n.t('等待处理'), + 'PENDING_APPROVAL': i18n.t('等待审批'), + 'PENDING_CONFIRMATION': i18n.t('等待确认'), 'FAILED': i18n.t('失败'), 'FINISHED': i18n.t('完成'), 'REVOKED': i18n.t('终止') diff --git a/frontend/desktop/src/mixins/task.js b/frontend/desktop/src/mixins/task.js index 0978f7d054..d002ad9f8c 100644 --- a/frontend/desktop/src/mixins/task.js +++ b/frontend/desktop/src/mixins/task.js @@ -69,6 +69,10 @@ const task = { status.cls = 'failed common-icon-dark-circle-close' status.text = i18n.t('失败') break + case 'PENDING_PROCESSING': + status.cls = 'running common-icon-dark-circle-ellipsis' + status.text = i18n.t('等待处理') + break default: status.text = i18n.t('未知') } diff --git a/frontend/desktop/src/pages/task/TaskExecute/ExecuteInfo.vue b/frontend/desktop/src/pages/task/TaskExecute/ExecuteInfo.vue index d654c1de1a..b86a8bfdc4 100644 --- a/frontend/desktop/src/pages/task/TaskExecute/ExecuteInfo.vue +++ b/frontend/desktop/src/pages/task/TaskExecute/ExecuteInfo.vue @@ -118,6 +118,9 @@ {{$t('次执行')}}
+

+ {{ $t('已自动重试 m 次 (最多 10 次),手动重试 n 次', { m: 1, n: 2 }) }} +

diff --git a/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/TaskNode.vue b/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/TaskNode.vue index 18c6493fb1..2a9eea2720 100755 --- a/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/TaskNode.vue +++ b/frontend/desktop/src/components/common/TemplateCanvas/NodeTemplate/TaskNode.vue @@ -48,13 +48,13 @@