diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/config/WebMvcConfig.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/config/WebMvcConfig.java index 0abaa97cc3..504017851e 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/config/WebMvcConfig.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/config/WebMvcConfig.java @@ -31,11 +31,14 @@ import org.springframework.http.converter.ResourceHttpMessageConverter; import org.springframework.http.converter.StringHttpMessageConverter; import org.springframework.http.converter.support.AllEncompassingFormHttpMessageConverter; +import org.springframework.web.servlet.LocaleResolver; import org.springframework.web.servlet.config.annotation.CorsRegistry; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; +import org.springframework.web.servlet.i18n.CookieLocaleResolver; import java.util.List; +import java.util.Locale; /** Customize the SpringMVC configuration */ @Configuration @@ -52,6 +55,8 @@ public class WebMvcConfig implements WebMvcConfigurer { HttpMethod.DELETE.name() }; + public static final String LOCALE_LANGUAGE_COOKIE = "language"; + @Override public void extendMessageConverters(List> converters) { converters.add(new ByteArrayHttpMessageConverter()); @@ -95,4 +100,16 @@ public void addInterceptors(InterceptorRegistry registry) { .addInterceptor(uploadFileTypeInterceptor) .addPathPatterns("/flink/app/upload", "/resource/upload"); } + + @Bean(name = "localeResolver") + public LocaleResolver localeResolver() { + CookieLocaleResolver localeResolver = new CookieLocaleResolver(); + localeResolver.setCookieName(LOCALE_LANGUAGE_COOKIE); + // set default locale + localeResolver.setDefaultLocale(Locale.US); + // set language tag compliant + localeResolver.setLanguageTagCompliant(false); + return localeResolver; + } + } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/ApplicationMessageStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/ApplicationMessageStatus.java new file mode 100644 index 0000000000..69cd3772c1 --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/ApplicationMessageStatus.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.base.enums; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor +@Getter +public enum ApplicationMessageStatus implements Status { + + HANDLER_UPLOAD_FILE_IS_NULL_ERROR(10840, "File to upload can't be null. Upload file failed.", "要上传的文件不能为空,上传文件失败"), + HANDLER_UPLOAD_FILE_TYPE_ILLEGAL_ERROR(10850, + "Illegal file type, Only support standard jar or python files. Upload file failed.", + "文件类型非法,仅支持标准jar或python文件 上传文件失败。"), + + APP_CREATE_FAILED(10860, "create application failed.", "创建应用程序失败"), + APP_ID_NOT_EXISTS_ERROR(10870, "The application id={0} can't be found.", "找不到应用程序 id={0}"), + APP_ID_NOT_EXISTS_REVOKE_FAILED(10880, "The application id={0} can't be found, revoke failed.", + "找不到应用程序 id={0}, 撤销失败"), + APP_EXECUTE_MODE_NOT_EXISTS_ERROR(10890, "ExecutionMode can't be null.", "执行模式不能为空"), + APP_EXECUTE_MODE_OPERATION_DISABLE_ERROR(10900, "The FlinkExecutionMode [{0}] can't [{1}]!", + "Flink执行模式[{0}]无法{1}!"), + APP_NOT_EXISTS_ERROR(10920, "[StreamPark] {0} The application cannot be started repeatedly.", + "[StreamPark] {0} 应用程序无法重复启动。"), + APP_ACTION_REPEAT_START_ERROR(10930, "[StreamPark] {0} The application cannot be started repeatedly.", + "[StreamPark] {0} 应用程序无法重复启动。"), + APP_ACTION_SAME_TASK_IN_ALREADY_RUN_ERROR(10940, + "[StreamPark] The same task name is already running in the yarn queue", + "[StreamPark] 相同的任务名称已在Yarn队列中运行"), + APP_ACTION_YARN_CLUSTER_STATE_CHECK(10950, "[StreamPark] The yarn cluster service state is {0}, please check it", + "[StreamPark] Yarn 集群服务状态为 {0},请检查一下"), + APP_CONFIG_FILE_TYPE_ILLEGALLY(10960, "application' config error. must be (.properties|.yaml|.yml |.conf)", + "应用程序配置错误,必须是(.properties.yaml|.YML|.conf)"), + + APP_JOB_IS_INVALID(10970, "The job is invalid, or the job cannot be built while it is running", + "作业无效,或者在作业运行时无法生成作业"), + APP_JOB_EXECUTION_MODE_ILLEGALLY(10980, "Job executionMode must be kubernetes-session|kubernetes-application.", + "Job 执行模式必须是 Kubernetes-session 或 Kubernetes-application"), + APP_PY_FLINK_FILE_IS_NULL(10990, "pyflink file can't be null, start application failed.", + "PyFlink 文件不能为空,启动应用程序失败"), + APP_PY_FLINK_FILE_TYPE_ILLEGALLY(101000, + "pyflink format error, must be a \".py\" suffix, start application failed.", + "PyFlink格式错误,必须是 \".py\" 后缀,启动应用程序失败"), + + APP_QUEUE_LABEL_IN_TEAM_ILLEGALLY(101010, + "Queue label [{0}] isn't available for teamId [{1}], please add it into the team first.", + "队列标签 [{0}] 不适用于 teamId [{1}],请先将其添加到团队中。"), + + APP_QUEUE_LABEL_IN_DATABASE_ILLEGALLY(101020, + "Queue label [{0}] isn't available in database, please add it first.", + "队列标签[{0}]在数据库中不可用,请先添加它"), + + APP_NAME_REPEAT_COPY_FAILED(101030, + "Application names can't be repeated, copy application failed.", + "应用程序名称不能重复,复制应用程序失败。"), + + APP_FLINK_CLUSTER_NOT_RUNNING_UPDATE_FAILED(101040, + "update failed, because bind flink cluster not running", + "更新失败,因为绑定Flink集群未运行"), + + APP_BUILD_RESOURCE_GROUP_FAILED(101050, "Parse resource group failed", "分析资源组失败"), + + EXTERNAL_LINK_PARAM_EXISTING_ERROR(101060, "{0}:{1} is already existing.", "{0}:{1}已经能存在"), + + API_NOT_SUPPORT(101150, "current api unsupported: {0}", "当前API不受支持: {0}"), + + CATALOG_NOT_EXISTS_ERROR(101160, "Catalog not exist, please check.", "Catalog不存在, 请检查"), + CATALOG_NAME_EXISTS_ERROR(101161, "Catalog already exist, please check.", "Catalog存在, 请检查"), + CATALOG_NAME_MODIFY_ERROR(101162, "The catalog name cannot be modified.", "目录名称无法修改"), + CATALOG_NAME_VALID_MSG(101163, + "Catalog Name only lowercase letters, numbers, and -,.. Symbol composition, cannot end with a symbol.", + "目录名称 仅限小写字母、数字和-,..符号组成,不能以符号结尾。"), + + ; + private final int code; + private final String enMsg; + private final String zhMsg; + +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/CommonStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/CommonStatus.java new file mode 100644 index 0000000000..0f5dd58fca --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/CommonStatus.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.base.enums; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor +@Getter +public enum CommonStatus implements Status { + + SUCCESS(0, "success", "成功"), + UNKNOWN_ERROR(1, "unknown error: {0}", "未知错误: {0}"), + + PROJECT(10, "Project", "项目"), + TEAM(11, "Team", "团队"), + VARIABLE(12, "Variable", "变量"), + APPLICATION(13, "Application", "应用程序"), + FLINK_CLUSTERS(14, "Flink Clusters", "Flink集群"), + + ; + + private final int code; + private final String enMsg; + private final String zhMsg; + +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/FlinkMessageStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/FlinkMessageStatus.java new file mode 100644 index 0000000000..7678bec375 --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/FlinkMessageStatus.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.base.enums; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor +@Getter +public enum FlinkMessageStatus implements Status { + + FLINK_ENV_SQL_CLIENT_JAR_NOT_EXIST(10430, "[StreamPark] can't found streampark-flink-sqlclient jar in {0}", + "[StreamPark] 在{0}中找不到 streampark-flink-sqlclient jar"), + FLINK_ENV_SQL_CLIENT_JAR_MULTIPLE_EXIST(10440, "[StreamPark] found multiple streampark-flink-sqlclient jar in {0}", + "[StreamPark] 在 {0} 中发现多个 streampark-flink-sqlclient jar"), + FLINK_ENV_FILE_OR_DIR_NOT_EXIST(10450, "[StreamPark] file or directory [{0}] no exists. please check.", + "[StreamPark] 文件或目录 [{0}] 不存在,请检查"), + FLINK_ENV_FLINK_VERSION_NOT_FOUND(10460, "[StreamPark] can no found flink {0} version", + "[StreamPark] 无法找到Flink {0} 版本"), + FLINK_ENV_FLINK_VERSION_UNSUPPORT(10470, "[StreamPark] Unsupported flink version: {0}", + "[StreamPark] 不支持的Flink版本:{0}"), + FLINK_ENV_HOME_NOT_EXIST(10480, "The flink home does not exist, please check.", "Flink Home 不存在,请检查"), + FLINK_ENV_HOME_EXIST_CLUSTER_USE(10490, "The flink home is still in use by some flink cluster, please check.", + "Flink Home 还在被一些Flink集群使用 请检查"), + FLINK_ENV_HOME_EXIST_APP_USE(10500, "The flink home is still in use by some application, please check.", + "Flink Home 仍在被某些应用程序使用 请检查"), + + FLINK_ENV_HOME_IS_DEFAULT_SET(10510, "The flink home is set as default, please change it first.", + "Flink Home 设置为默认设置,请先更改"), + + FLINK_ENV_CONNECTOR_NULL_ERROR(10520, "The flink connector is null.", + "Flink连接器为空"), + + FLINK_ENV_DIRECTORY_NOT_CONFIG_FILE(10530, "cannot find {0} in flink/conf ", + "在 flink/conf 中找不到{0}"), + + FLINK_CLUSTER_UNAVAILABLE(10540, "[StreamPark] The target cluster is unavailable, please check!, please check it", + "[StreamPark] 目标集群不可用,请检查!"), + FLINK_CLUSTER_NOT_EXIST(10550, "[StreamPark] The flink cluster don't exist, please check it", + "[StreamPark] Flink 集群不存在,请检查"), + FLINK_CLUSTER_NOT_RUNNING(10560, "[StreamPark] The flink cluster not running, please start it", + "[StreamPark] Flink集群未运行,请启动它"), + FLINK_CLUSTER_NOT_ACTIVE(10570, "[StreamPark] Current cluster is not active, please check!", + "[StreamPark] 当前集群未处于活动状态,请检查"), + FLINK_CLUSTER_DEPLOY_FAILED(10580, + "[StreamPark] Deploy cluster failed, unknown reason,please check you params or StreamPark error log.", + "[StreamPark] 部署集群失败,原因不明,请检查您的参数或StreamPark错误日志"), + FLINK_CLUSTER_ID_CANNOT_FIND_ERROR(10590, + "The [clusterId={0}] cannot be find, maybe the clusterId is wrong or the cluster has been deleted. Please contact the Admin.", + "找不到[集群ID={0}],可能是集群Id错误或集群已被删除。请联系管理员。"), + FLINK_CLUSTER_ID_EMPTY_ERROR(10600, "[StreamPark] The clusterId can not be empty!", "[StreamPark] 集群Id不能为空!"), + FLINK_CLUSTER_CLOSE_FAILED(10610, "[StreamPark] Shutdown cluster failed: {0}", "[StreamPark] 关闭群集失败: {0}"), + FLINK_CLUSTER_DELETE_RUNNING_CLUSTER_FAILED(10620, + "[StreamPark] Flink cluster is running, cannot be delete, please check.", "[StreamPark] Flink集群正在运行,无法删除 请检查。"), + FLINK_CLUSTER_EXIST_APP_DELETE_FAILED(10630, + "[StreamPark] Some app exist on this cluster, the cluster cannot be delete, please check.", + "[StreamPark] 此集群上存在某些应用程序,无法删除该集群 请检查"), + FLINK_CLUSTER_EXIST_RUN_TASK_CLOSE_FAILED(10640, + "[StreamPark] Some app is running on this cluster, the cluster cannot be shutdown", + "[StreamPark] 某些应用程序正在此集群上运行,无法关闭集群"), + + FLINK_CLUSTER_SHUTDOWN_RESPONSE_FAILED(10650, + "Get shutdown response failed", + "获取关机响应失败"), + + FLINK_GATEWAY_NAME_EXIST(10660, "gateway name already exists", "网关名称已存在"), + FLINK_GATEWAY_GET_VERSION_FAILED(10670, "get gateway version failed", "获取网关版本失败"), + + FLINk_APP_IS_NULL(10671, "Invalid operation, application is null.", "操作无效,应用程序为空"), + FLINk_SQL_APPID_OR_TEAM_ID_IS_NULL(10680, "Permission denied, appId and teamId cannot be null.", + "权限被拒绝,应用Id和团队Id不能为空"), + FLINK_SQL_IS_NULL_UPDATE_FAILED(10690, "Flink sql is null, update flink sql job failed.", + "FlinkSql为空,更新FlinkSQL作业失败"), + FLINK_SQL_BACKUP_IS_NULL_ROLLBACK_FAILED(10700, "Application backup can't be null. Rollback flink sql failed.", + "应用程序备份不能为为空,回滚FlinkSql失败"), + + ; + private final int code; + private final String enMsg; + private final String zhMsg; + +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/ProjectMessageStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/ProjectMessageStatus.java new file mode 100644 index 0000000000..9c5c8913ac --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/ProjectMessageStatus.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.base.enums; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor +@Getter +public enum ProjectMessageStatus implements Status { + + PROJECT_MODULE_NULL_ERROR(101070, "Project module can't be null, please check.", "项目模块不能为空,请检查"), + PROJECT_NAME_EXIST(101080, "project name already exists", "项目名称已存在"), + PROJECT_GIT_PASSWORD_DECRYPT_FAILED(101090, "Project Github/Gitlab password decrypt failed", + "项目 Github/Gitlab 密码解密失败"), + PROJECT_TEAM_ID_MODIFY_ERROR(101100, "TeamId can't be changed", "无法更改TeamId"), + PROJECT_BUILDING_STATE(101110, "The project is being built", "该项目正在建设中"), + PROJECT_RUNNING_BUILDING_EXCEED_LIMIT(101120, + "The number of running Build projects exceeds the maximum number: {0}", + "正在运行的Build项目数超过最大数量: {0}"), + + API_NOT_SUPPORT(101150, "current api unsupported: {0}", "当前API不受支持: {0}"), + ; + private final int code; + private final String enMsg; + private final String zhMsg; + +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/ResourceMessageStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/ResourceMessageStatus.java new file mode 100644 index 0000000000..bf999dd6de --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/ResourceMessageStatus.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.base.enums; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor +@Getter +public enum ResourceMessageStatus implements Status { + + RESOURCE_ALREADY_ERROR(10300, "the resource {0} already exists, please check.", "资源{0}已经存在,请检查"), + RESOURCE_NAME_NULL_FAILED(10310, "The resource name cannot be null", "资源名不能为空"), + RESOURCE_NOT_EXIST_ERROR(10320, "the resource {0} doesn't exists, please check.", "资源{0}不存在,请检查"), + RESOURCE_STILL_USE_DELETE_ERROR(10330, "The resource is still in use, cannot be removed.", "资源仍在使用中,无法删除。"), + RESOURCE_POM_JAR_EMPTY(10340, "Please add pom or jar resource.", "请添加pom或jar资源。"), + RESOURCE_FLINK_APP_JAR_EMPTY_ERROR(10350, "Please upload jar for Flink App resource", "请上传 jar 以获取Flink App资源"), + RESOURCE_MULTI_FILE_ERROR(10360, "Please do not add multi dependency at one time.", "请不要一次添加多个依赖项"), + RESOURCE_NAME_MODIFY_ERROR(10370, "Please make sure the resource name is not changed.", "请确保未更改资源名称"), + RESOURCE_FLINK_JAR_NULL(10380, "flink app jar must exist.", "Flink App Jar 必须存在"), + + ; + private final int code; + private final String enMsg; + private final String zhMsg; + +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/SparkMessageStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/SparkMessageStatus.java new file mode 100644 index 0000000000..73b385074c --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/SparkMessageStatus.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.base.enums; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor +@Getter +public enum SparkMessageStatus implements Status { + + SPARK_ENV_HOME_NULL_ERROR(10710, "The spark home does not exist, please check.", + "Spark Home不存在,请查验。"), + SPARK_ENV_HOME_IS_DEFAULT_SET(10720, "The spark home is set as default, please change it first.", + "Spark Home 设置为默认设置,请先更改"), + SPARK_ENV_VERSION_NOT_FOUND(10730, "[StreamPark] can no found spark {0} version", + "[StreamPark] 无法找到Spark {0} 版本"), + + ; + private final int code; + private final String enMsg; + private final String zhMsg; + +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/IllegalFileTypeException.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/Status.java similarity index 57% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/IllegalFileTypeException.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/Status.java index 5ed8d929f8..da2bbcdf07 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/IllegalFileTypeException.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/Status.java @@ -15,16 +15,29 @@ * limitations under the License. */ -package org.apache.streampark.console.base.exception; +package org.apache.streampark.console.base.enums; -/** This exception is thrown when there is an error in the file type */ -public class IllegalFileTypeException extends ApiAlertException { +import org.springframework.context.i18n.LocaleContextHolder; - public IllegalFileTypeException(String message) { - super(message); - } +import java.util.Locale; + +public interface Status { + + /** get status code */ + int getCode(); + + /** get english message */ + String getEnMsg(); + + /** get chinese message */ + String getZhMsg(); - public IllegalFileTypeException(String message, Throwable cause) { - super(message, cause); + /** get internationalization message */ + default String getMessage() { + if (Locale.SIMPLIFIED_CHINESE.getLanguage().equals(LocaleContextHolder.getLocale().getLanguage())) { + return getZhMsg(); + } else { + return getEnMsg(); + } } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/UserMessageStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/UserMessageStatus.java new file mode 100644 index 0000000000..5b318cd064 --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/UserMessageStatus.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.base.enums; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor +@Getter +public enum UserMessageStatus implements Status { + + SYSTEM_USER_LOGIN_TYPE_CONSTRAINTS(10000, "user {0} can only sign in with [{1}]", "用户{0}只能使用 [{1}] 登录"), + SYSTEM_USER_LOGIN_TYPE_NOT_SUPPORT(10010, "The login type [{0}] is not supported", "不支持登录类型[{0}]"), + SYSTEM_USER_LOGIN_TYPE_NULL(10010, "The login type is null", "登录类型为空"), + SYSTEM_USER_ALLOW_LOGIN_TYPE(10020, "user {0} can not login with {1}", "用户{0}无法使用{1}登录"), + SYSTEM_USER_NOT_LOGIN(10030, "Permission denied, please login first.", "权限被拒绝,请先登录"), + SYSTEM_USER_NOT_BELONG_TEAM_LOGIN(10040, + "The current user does not belong to any team, please contact the administrator!", "当前用户不属于任何团队,请联系管理员!"), + SYSTEM_USER_NOT_EXIST(10050, "User {0} does not exist", "用户{0}不存在"), + SYSTEM_USER_ID_NOT_EXIST(10060, "User ID {0} does not exist", "用户ID {0}不存在"), + SYSTEM_USER_CURRENT_LOGIN_NULL_SET_TEAM_FAILED(10070, "Current login user is null, set team failed.", + "当前登录用户为空,设置团队失败"), + + SYSTEM_USER_UPDATE_PASSWORD_FAILED(10080, "Can only update password for user who sign in with PASSWORD", + "只能为使用密码登录的用户更新密码"), + SYSTEM_USER_OLD_PASSWORD_INCORRECT_UPDATE_PASSWORD_FAILED(10090, "Old password error. Update password failed.", + "旧密码错误,更新密码失败。"), + SYSTEM_USER_LOGIN_PASSWORD_INCORRECT(10100, "Incorrect password", "密码不正确"), + + SYSTEM_PERMISSION_LOGIN_USER_PERMISSION_MISMATCH(10160, + "Permission denied, operations can only be performed with the permissions of the currently logged-in user.", + "权限被拒绝,只能使用当前登录用户的权限进行操作"), + SYSTEM_PERMISSION_TEAM_NO_PERMISSION(10170, + "Permission denied, only members of this team can access this permission.", "权限被拒绝,只有此团队的成员才能访问此权限"), + SYSTEM_PERMISSION_JOB_OWNER_MISMATCH(10180, + "Permission denied, this job not created by the current user, And the job cannot be found in the current user's team.", + "权限被拒绝,此作业不是由当前用户创建的,并且在当前用户的团队中找不到该作业"), + + SYSTEM_TEAM_ALREADY_EXIST(10190, "The team {0} already exist.", "团队{0}已经存在。"), + SYSTEM_TEAM_NOT_EXIST(10200, "The team {0} doesn't exist.", "团队{0} 不存在。"), + SYSTEM_TEAM_ID_CANNOT_NULL(10210, "The team id is cannot null.", "团队ID不能为空"), + SYSTEM_TEAM_ID_NOT_EXIST(10220, "The team id {0} doesn't exist.", "团队ID {0}不存在"), + SYSTEM_TEAM_NAME_CAN_NOT_CHANGE(10230, "Team name can't be changed. Update team failed.", "团队名称不能更改"), + SYSTEM_LDAP_NOT_ENABLE(10240, "ldap is not enabled, Please check the configuration: ldap.enable", + "LDAP未启用,请检查配置:ldap.enable"), + + SYSTEM_TEAM_ID_NULL_ERROR(10250, "Team id mustn't be null.", "团队ID不能为空"), + + SYSTEM_TEAM_EXIST_MODULE_USE_DELETE_ERROR(10260, "Please delete the {1} under the team[{0}] first!", + "请先删除团队[{0}]下的{1}!"), + + SYSTEM_ROLE_NOT_EXIST(10270, "Role {0} not found.", + "角色{0}不存在"), + SYSTEM_ROLE_ID_NOT_EXIST(10280, "Role ID {0} not found.", + "角色ID{0}不存在"), + SYSTEM_ROLE_EXIST_USED_DELETE_ERROR(10290, + "There are some users of role {0}, delete role failed, please unbind it first.", + "有一些用户的角色{0},删除角色失败,请先解绑"), + + MEMBER_USER_TEAM_ALREADY_ERROR(10390, "The user [{0}] has been added the team [{1}], please don't add it again.", + "用户 [{0}] 已添加到团队 [{1}],请不要再次添加"), + MEMBER_ID_NOT_EXIST(10400, "The member id {0} doesn't exist.", + "成员ID {0}不存在"), + MEMBER_TEAM_ID_CHANGE_ERROR(10410, "Team id cannot be changed.", + "团队ID无法更改。"), + MEMBER_USER_ID_CHANGE_ERROR(10420, "User id cannot be changed.", + "用户 ID 无法更改。"), + + SSO_SINGLE_SIGN_NOT_AVAILABLE(101130, + "Single Sign On (SSO) is not available, please contact the administrator to enable", "单点登录(SSO)不可用,请联系管理员以启用"), + SSO_CONFIG_PRINCIPAL_NAME_ERROR(101140, "Please configure the correct Principal Name Attribute", "请配置正确的主体名称属性"), + ; + private final int code; + private final String enMsg; + private final String zhMsg; + +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/VariableMessageStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/VariableMessageStatus.java new file mode 100644 index 0000000000..9b58f94d1b --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/VariableMessageStatus.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.base.enums; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor +@Getter +public enum VariableMessageStatus implements Status { + + SYSTEM_VARIABLE_ID_NULL_FAILED(10110, "The variable id cannot be null.", "变量Id不能为空"), + SYSTEM_VARIABLE_NOT_EXIST(10120, "The variable does not exist.", "变量不存在"), + SYSTEM_VARIABLE_EXIST_USE(10130, "The variable is actually used.", "该变量实际上是在用的"), + SYSTEM_VARIABLE_ALREADY_EXIST(10140, "The variable code already exists", "变量代码已存在"), + SYSTEM_VARIABLE_CODE_MODIFY_FAILED(10150, "The variable code cannot be updated.", "变量代码无法更新"), + ; + private final int code; + private final String enMsg; + private final String zhMsg; + +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/YarnMessageStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/YarnMessageStatus.java new file mode 100644 index 0000000000..61f76f16e0 --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/enums/YarnMessageStatus.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.base.enums; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor +@Getter +public enum YarnMessageStatus implements Status { + + YARN_QUEUE_NOT_EXIST(10740, "The yarn queue doesn't exist.", "Yarn队列不存在"), + YARN_QUEUE_NULL(10750, "Yarn queue mustn't be empty.", "Yarn队列不能为空"), + YARN_QUEUE_ID_NULL(10760, "Yarn queue id mustn't be empty.", "Yarn队列ID不能为空"), + YARN_QUEUE_LABEL_EXIST(10770, "The queue label existed already. Try on a new queue label, please.", + "队列标签已存在,请尝试使用新的队列标签。"), + YARN_QUEUE_LABEL_NULL(10780, "Yarn queue label mustn't be empty.", "Yarn队列标签不能为空"), + YARN_QUEUE_LABEL_AVAILABLE(10790, "The queue label is availableThe queue label is available.", "队列标签可用队列标签可用"), + YARN_QUEUE_LABEL_FORMAT(10800, + "Yarn queue label format should be in format {queue} or {queue}@{label1,label2}", + "Yarn队列标签格式应为格式 {queue} 或 {queue}@{label1,label2}"), + YARN_QUEUE_QUERY_PARAMS_NULL(10810, "Yarn queue query params mustn't be null.", "Yarn队列查询参数不能为空"), + YARN_QUEUE_QUERY_PARAMS_TEAM_ID_NULL(10820, "Team id of yarn queue query params mustn't be null.", + "Yarn队列查询参数的团队ID不能为空"), + YARN_QUEUE_USED_FORMAT(10830, "Please remove the yarn queue for {0} referenced it before {1}.", + "请在{1}之前删除{0}引用的Yarn队列"), + + ; + private final int code; + private final String enMsg; + private final String zhMsg; + +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApiAlertException.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApiAlertException.java index cd866a9086..2001e96ada 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApiAlertException.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApiAlertException.java @@ -18,12 +18,16 @@ package org.apache.streampark.console.base.exception; import org.apache.streampark.console.base.domain.ResponseCode; +import org.apache.streampark.console.base.enums.Status; +import org.bouncycastle.util.Arrays; + +import java.text.MessageFormat; import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** - * - * *
  * To notify the frontend of an exception message,
  * it is usually a  clear  and  concise  message, e.g:
@@ -34,48 +38,77 @@
  */
 public class ApiAlertException extends AbstractApiException {
 
-    public ApiAlertException(String message) {
+    protected ApiAlertException(String message) {
         super(message, ResponseCode.CODE_FAIL_ALERT);
     }
 
-    public ApiAlertException(Throwable cause) {
+    protected ApiAlertException(Throwable cause) {
         super(cause, ResponseCode.CODE_FAIL_ALERT);
     }
 
-    public ApiAlertException(String message, Throwable cause) {
+    protected ApiAlertException(String message, Throwable cause) {
         super(message, cause, ResponseCode.CODE_FAIL_ALERT);
     }
 
-    public static void throwIfNull(Object object, String errorMsgFmt, Object... args) {
+    public static void throwIfNull(Object object, Status status, Object... args) {
         if (Objects.isNull(object)) {
-            if (args == null || args.length < 1) {
-                throw new ApiAlertException(errorMsgFmt);
-            }
-            throw new ApiAlertException(String.format(errorMsgFmt, args));
+            throwException(status, args);
         }
     }
 
-    public static void throwIfNotNull(Object object, String errorMsgFmt, Object... args) {
-        if (!Objects.isNull(object)) {
-            if (args == null || args.length < 1) {
-                throw new ApiAlertException(errorMsgFmt);
-            }
-            throw new ApiAlertException(String.format(errorMsgFmt, args));
+    public static void throwIfNotNull(Object object, Status status, Object... args) {
+        if (Objects.nonNull(object)) {
+            throwException(status, args);
         }
     }
 
-    public static void throwIfFalse(boolean expression, String errorMessage) {
-        if (!expression) {
-            throw new ApiAlertException(errorMessage);
-        }
+    public static void throwIfFalse(boolean expression, Status status, Object... args) {
+        throwIfTrue(!expression, status, args);
     }
 
-    public static void throwIfTrue(boolean expression, String errorMsgFmt, Object... args) {
+    public static void throwIfTrue(boolean expression, Status status, Object... args) {
         if (expression) {
-            if (args == null || args.length < 1) {
-                throw new ApiAlertException(errorMsgFmt);
+            throwException(status, args);
+        }
+    }
+
+    private static Object[] processArgs(Object[] args) {
+        if (!Arrays.isNullOrEmpty(args)) {
+            for (int i = 0; i < args.length; i++) {
+                Object arg = args[i];
+                if (arg instanceof Status) {
+                    args[i] = ((Status) arg).getMessage();
+                }
             }
-            throw new ApiAlertException(String.format(errorMsgFmt, args));
         }
+        return args;
+    }
+
+    private static final Pattern MESSAGE_BRACES_PATTERN = Pattern.compile("\\{([^{}]+)}");
+
+    private static String formatMessage(Status status, Object... args) {
+        // Use regular expressions to find all the contents in the curly braces and that they are not pure numbers
+        Matcher matcher = MESSAGE_BRACES_PATTERN.matcher(status.getMessage().replaceAll("'", "''"));
+        StringBuffer result = new StringBuffer();
+        while (matcher.find()) {
+            String content = matcher.group(1);
+            // if the content is not a pure number replace it
+            if (!content.matches("\\d+")) {
+                String replacement = "'{" + content + "}'";
+                matcher.appendReplacement(result, replacement);
+            } else {
+                matcher.appendReplacement(result, "{" + content + "}");
+            }
+        }
+        matcher.appendTail(result);
+        return MessageFormat.format(result.toString(), processArgs(args));
+    }
+
+    public static  T throwException(Status status, Throwable cause, Object... args) {
+        throw new ApiAlertException(formatMessage(status, args), cause);
+    }
+
+    public static  T throwException(Status status, Object... args) {
+        throw new ApiAlertException(formatMessage(status, args));
     }
 }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApiDetailException.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApiDetailException.java
index 89854ddd4c..03268252f0 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApiDetailException.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApiDetailException.java
@@ -19,10 +19,11 @@
 
 import org.apache.streampark.common.util.ExceptionUtils;
 import org.apache.streampark.console.base.domain.ResponseCode;
+import org.apache.streampark.console.base.enums.Status;
+
+import java.text.MessageFormat;
 
 /**
- *
- *
  * 
  * An exception message that needs to be notified to front-end,
  * is a detailed exception message,such as the stackTrace info,
@@ -32,16 +33,13 @@
  */
 public class ApiDetailException extends AbstractApiException {
 
-    public ApiDetailException(String message) {
-        super(message, ResponseCode.CODE_FAIL_DETAIL);
-    }
-
     public ApiDetailException(Throwable cause) {
         super(ExceptionUtils.stringifyException(cause), ResponseCode.CODE_FAIL_DETAIL);
     }
 
-    public ApiDetailException(String message, Throwable cause) {
-        super(message + ExceptionUtils.stringifyException(cause), ResponseCode.CODE_FAIL_DETAIL);
+    public ApiDetailException(Status status, Throwable cause, Object... args) {
+        super(MessageFormat.format(status.getMessage(), ExceptionUtils.stringifyException(cause), args),
+            ResponseCode.CODE_FAIL_DETAIL);
     }
 
     @Override
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApplicationException.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApplicationException.java
index 334d5e2455..ede74643aa 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApplicationException.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/exception/ApplicationException.java
@@ -17,14 +17,12 @@
 
 package org.apache.streampark.console.base.exception;
 
-/** Applies to all application exceptions */
+/**
+ * Applies to all application exceptions
+ */
 public class ApplicationException extends ApiAlertException {
 
-    public ApplicationException(String message) {
-        super(message);
-    }
-
-    public ApplicationException(Throwable cause) {
-        super(cause.getMessage());
+    public ApplicationException(String message, Throwable cause) {
+        super(message, cause);
     }
 }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/interceptor/UploadFileTypeInterceptor.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/interceptor/UploadFileTypeInterceptor.java
index 52dedb5df9..1dfb61c4b0 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/interceptor/UploadFileTypeInterceptor.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/interceptor/UploadFileTypeInterceptor.java
@@ -37,7 +37,12 @@
 import java.io.InputStream;
 import java.util.Map;
 
-/** An interceptor used to handle file uploads */
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.HANDLER_UPLOAD_FILE_IS_NULL_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.HANDLER_UPLOAD_FILE_TYPE_ILLEGAL_ERROR;
+
+/**
+ * An interceptor used to handle file uploads
+ */
 @Component
 public class UploadFileTypeInterceptor implements HandlerInterceptor {
 
@@ -53,13 +58,10 @@ public boolean preHandle(
             Map files = multipartRequest.getFileMap();
             for (String file : files.keySet()) {
                 MultipartFile multipartFile = multipartRequest.getFile(file);
-                ApiAlertException.throwIfNull(
-                    multipartFile, "File to upload can't be null. Upload file failed.");
+                ApiAlertException.throwIfNull(multipartFile, HANDLER_UPLOAD_FILE_IS_NULL_ERROR);
                 InputStream input = multipartFile.getInputStream();
                 boolean isJarOrPyFile = FileUtils.isJarFileType(input) || isPythonFile(input);
-                ApiAlertException.throwIfFalse(
-                    isJarOrPyFile,
-                    "Illegal file type, Only support standard jar or python files. Upload file failed.");
+                ApiAlertException.throwIfFalse(isJarOrPyFile, HANDLER_UPLOAD_FILE_TYPE_ILLEGAL_ERROR);
             }
         }
         return true;
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/OpenAPIAspect.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/OpenAPIAspect.java
index 2438802f0f..e49070ad60 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/OpenAPIAspect.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/OpenAPIAspect.java
@@ -43,6 +43,8 @@
 import java.util.Date;
 import java.util.TimeZone;
 
+import static org.apache.streampark.console.base.enums.CommonStatus.UNKNOWN_ERROR;
+
 @Slf4j
 @Component
 @Aspect
@@ -66,7 +68,7 @@ public RestResponse openAPI(ProceedingJoinPoint joinPoint) throws Throwable {
             OpenAPI openAPI = methodSignature.getMethod().getAnnotation(OpenAPI.class);
             if (openAPI == null) {
                 String url = request.getRequestURI();
-                throw new ApiAlertException("openapi unsupported: " + url);
+                ApiAlertException.throwException(UNKNOWN_ERROR, "openapi unsupported: " + url);
             } else {
                 Object[] objects = joinPoint.getArgs();
                 for (OpenAPI.Param param : openAPI.param()) {
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/PermissionAspect.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/PermissionAspect.java
index add5f5e012..eaf547f511 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/PermissionAspect.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/PermissionAspect.java
@@ -18,6 +18,7 @@
 package org.apache.streampark.console.core.aspect;
 
 import org.apache.streampark.console.base.domain.RestResponse;
+import org.apache.streampark.console.base.enums.UserMessageStatus;
 import org.apache.streampark.console.base.exception.ApiAlertException;
 import org.apache.streampark.console.core.annotation.Permission;
 import org.apache.streampark.console.core.entity.Application;
@@ -44,6 +45,12 @@
 import org.springframework.expression.spel.support.StandardEvaluationContext;
 import org.springframework.stereotype.Component;
 
+import static org.apache.streampark.console.base.enums.CommonStatus.UNKNOWN_ERROR;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINk_APP_IS_NULL;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_PERMISSION_JOB_OWNER_MISMATCH;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_PERMISSION_LOGIN_USER_PERMISSION_MISMATCH;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_PERMISSION_TEAM_NO_PERMISSION;
+
 @Slf4j
 @Component
 @Aspect
@@ -65,7 +72,7 @@ public RestResponse permissionAction(ProceedingJoinPoint joinPoint) throws Throw
         Permission permission = methodSignature.getMethod().getAnnotation(Permission.class);
 
         User currentUser = ServiceHelper.getLoginUser();
-        ApiAlertException.throwIfNull(currentUser, "Permission denied, please login first.");
+        ApiAlertException.throwIfNull(currentUser, UserMessageStatus.SYSTEM_USER_NOT_LOGIN);
 
         boolean isAdmin = currentUser.getUserType() == UserTypeEnum.ADMIN;
 
@@ -74,27 +81,23 @@ public RestResponse permissionAction(ProceedingJoinPoint joinPoint) throws Throw
             Long userId = getId(joinPoint, methodSignature, permission.user());
             ApiAlertException.throwIfTrue(
                 userId != null && !currentUser.getUserId().equals(userId),
-                "Permission denied, operations can only be performed with the permissions of the currently logged-in user.");
+                SYSTEM_PERMISSION_LOGIN_USER_PERMISSION_MISMATCH);
 
             // 2) check team
             Long teamId = getId(joinPoint, methodSignature, permission.team());
             if (teamId != null) {
                 Member member = memberService.getByTeamIdUserName(teamId, currentUser.getUsername());
-                ApiAlertException.throwIfTrue(
-                    member == null,
-                    "Permission denied, only members of this team can access this permission");
+                ApiAlertException.throwIfTrue(member == null, SYSTEM_PERMISSION_TEAM_NO_PERMISSION);
             }
 
             // 3) check app
             Long appId = getId(joinPoint, methodSignature, permission.app());
             if (appId != null) {
                 Application app = applicationManageService.getById(appId);
-                ApiAlertException.throwIfTrue(app == null, "Invalid operation, application is null");
+                ApiAlertException.throwIfTrue(app == null, FLINk_APP_IS_NULL);
                 if (!currentUser.getUserId().equals(app.getUserId())) {
                     Member member = memberService.getByTeamIdUserName(app.getTeamId(), currentUser.getUsername());
-                    ApiAlertException.throwIfTrue(
-                        member == null,
-                        "Permission denied, this job not created by the current user, And the job cannot be found in the current user's team.");
+                    ApiAlertException.throwIfTrue(member == null, SYSTEM_PERMISSION_JOB_OWNER_MISMATCH);
                 }
             }
         }
@@ -123,7 +126,7 @@ private Long getId(ProceedingJoinPoint joinPoint, MethodSignature methodSignatur
         try {
             return Long.parseLong(value.toString());
         } catch (NumberFormatException e) {
-            throw new ApiAlertException(
+            return ApiAlertException.throwException(UNKNOWN_ERROR,
                 "Wrong use of annotation on method " + methodSignature.getName(), e);
         }
     }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/FlinkSqlController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/FlinkSqlController.java
index 4659d2e451..8e51d46012 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/FlinkSqlController.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/FlinkSqlController.java
@@ -43,6 +43,8 @@
 
 import java.util.List;
 
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINk_SQL_APPID_OR_TEAM_ID_IS_NULL;
+
 @Slf4j
 @Validated
 @RestController
@@ -105,7 +107,7 @@ public RestResponse delete(FlinkSql flinkSql) {
     @Permission(app = "#appId", team = "#teamId")
     public RestResponse get(Long appId, Long teamId, String id) throws InternalException {
         ApiAlertException.throwIfTrue(
-            appId == null || teamId == null, "Permission denied, appId and teamId cannot be null");
+            appId == null || teamId == null, FLINk_SQL_APPID_OR_TEAM_ID_IS_NULL);
         String[] array = id.split(",");
         FlinkSql flinkSql1 = flinkSqlService.getById(array[0]);
         flinkSql1.base64Encode();
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ProjectController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ProjectController.java
index 50b33a7a16..285aa00f83 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ProjectController.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ProjectController.java
@@ -41,6 +41,8 @@
 import java.util.List;
 import java.util.Map;
 
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_ID_NULL_ERROR;
+
 @Slf4j
 @Validated
 @RestController
@@ -55,7 +57,7 @@ public class ProjectController {
     @RequiresPermissions("project:create")
     public RestResponse create(Project project) {
         ApiAlertException.throwIfNull(
-            project.getTeamId(), "The teamId can't be null. Create team failed.");
+            project.getTeamId(), SYSTEM_TEAM_ID_NULL_ERROR);
         return projectService.create(project);
     }
 
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkEnv.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkEnv.java
index c047817619..a543b3b2d2 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkEnv.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkEnv.java
@@ -39,6 +39,8 @@
 import java.util.Map;
 import java.util.Properties;
 
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_DIRECTORY_NOT_CONFIG_FILE;
+
 @Data
 @TableName("t_flink_env")
 public class FlinkEnv implements Serializable {
@@ -79,22 +81,17 @@ public void doSetFlinkConf() throws ApiDetailException {
         float ver = Float.parseFloat(getVersionOfFirst().concat(".").concat(getVersionOfMiddle()));
         if (ver < 1.19f) {
             yaml = new File(this.flinkHome.concat("/conf/flink-conf.yaml"));
-            if (!yaml.exists()) {
-                throw new ApiAlertException("cannot find flink-conf.yaml in flink/conf ");
-            }
+            ApiAlertException.throwIfFalse(yaml.exists(), FLINK_ENV_DIRECTORY_NOT_CONFIG_FILE, "flink-conf.yaml");
         } else if (ver == 1.19f) {
             yaml = new File(this.flinkHome.concat("/conf/flink-conf.yaml"));
             if (!yaml.exists()) {
                 yaml = new File(this.flinkHome.concat("/conf/config.yaml"));
             }
-            if (!yaml.exists()) {
-                throw new ApiAlertException("cannot find config.yaml|flink-conf.yaml in flink/conf ");
-            }
+            ApiAlertException.throwIfFalse(yaml.exists(), FLINK_ENV_DIRECTORY_NOT_CONFIG_FILE,
+                "config.yaml|flink-conf.yaml");
         } else {
             yaml = new File(this.flinkHome.concat("/conf/config.yaml"));
-            if (!yaml.exists()) {
-                throw new ApiAlertException("cannot find config.yaml in flink/conf ");
-            }
+            ApiAlertException.throwIfFalse(yaml.exists(), FLINK_ENV_DIRECTORY_NOT_CONFIG_FILE, "config.yaml");
         }
         try {
             String flinkConf = FileUtils.readFileToString(yaml, StandardCharsets.UTF_8);
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java
index e19976dd4d..895d462866 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java
@@ -122,6 +122,17 @@
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Executor;
 
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_ACTION_REPEAT_START_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_ACTION_SAME_TASK_IN_ALREADY_RUN_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_EXECUTE_MODE_NOT_EXISTS_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_ID_NOT_EXISTS_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_PY_FLINK_FILE_IS_NULL;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_PY_FLINK_FILE_TYPE_ILLEGALLY;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_ID_CANNOT_FIND_ERROR;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_NOT_RUNNING;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_FLINK_VERSION_NOT_FOUND;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -189,7 +200,7 @@ public class ApplicationActionServiceImpl extends ServiceImpl getUserJarAndAppConf(FlinkEnv flinkEnv, Applicati
         ApplicationConfig applicationConfig = configService.getEffective(application.getId());
 
         ApiAlertException.throwIfNull(
-            executionModeEnum, "ExecutionMode can't be null, start application failed.");
+            executionModeEnum, APP_EXECUTE_MODE_NOT_EXISTS_ERROR);
 
         String flinkUserJar = null;
         String appConf = null;
@@ -647,14 +656,14 @@ private Tuple2 getUserJarAndAppConf(FlinkEnv flinkEnv, Applicati
                 Resource resource = resourceService.findByResourceName(application.getTeamId(), application.getJar());
 
                 ApiAlertException.throwIfNull(
-                    resource, "pyflink file can't be null, start application failed.");
+                    resource, APP_PY_FLINK_FILE_IS_NULL);
 
                 ApiAlertException.throwIfNull(
-                    resource.getFilePath(), "pyflink file can't be null, start application failed.");
+                    resource.getFilePath(), APP_PY_FLINK_FILE_IS_NULL);
 
                 ApiAlertException.throwIfFalse(
                     resource.getFilePath().endsWith(Constant.PYTHON_SUFFIX),
-                    "pyflink format error, must be a \".py\" suffix, start application failed.");
+                    APP_PY_FLINK_FILE_TYPE_ILLEGALLY);
 
                 flinkUserJar = resource.getFilePath();
                 break;
@@ -724,10 +733,8 @@ private Map getProperties(Application application, String runtim
             FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId());
             ApiAlertException.throwIfNull(
                 cluster,
-                String.format(
-                    "The clusterId=%s can't be find, maybe the clusterId is wrong or "
-                        + "the cluster has been deleted. Please contact the Admin.",
-                    application.getFlinkClusterId()));
+                FLINK_CLUSTER_ID_CANNOT_FIND_ERROR,
+                application.getFlinkClusterId());
             URI activeAddress = cluster.getRemoteURI();
             properties.put(RestOptions.ADDRESS.key(), activeAddress.getHost());
             properties.put(RestOptions.PORT.key(), activeAddress.getPort());
@@ -736,10 +743,8 @@ private Map getProperties(Application application, String runtim
                 FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId());
                 ApiAlertException.throwIfNull(
                     cluster,
-                    String.format(
-                        "The yarn session clusterId=%s cannot be find, maybe the clusterId is wrong or "
-                            + "the cluster has been deleted. Please contact the Admin.",
-                        application.getFlinkClusterId()));
+                    FLINK_CLUSTER_ID_CANNOT_FIND_ERROR,
+                    application.getFlinkClusterId());
                 properties.put(ConfigKeys.KEY_YARN_APP_ID(), cluster.getClusterId());
             } else {
                 String yarnQueue = (String) application.getHotParamsMap().get(ConfigKeys.KEY_YARN_APP_QUEUE());
@@ -818,16 +823,16 @@ private String getSavepointPath(Application appParam) {
     /* check flink cluster before job start job */
     private void checkBeforeStart(Application application) {
         FlinkEnv flinkEnv = flinkEnvService.getByAppId(application.getId());
-        ApiAlertException.throwIfNull(flinkEnv, "[StreamPark] can no found flink version");
+        ApiAlertException.throwIfNull(flinkEnv, FLINK_ENV_FLINK_VERSION_NOT_FOUND);
 
         ApiAlertException.throwIfFalse(
             flinkClusterService.existsByFlinkEnvId(flinkEnv.getId()),
-            "[StreamPark] The flink cluster don't exist, please check it");
+            FLINK_CLUSTER_NOT_EXIST);
 
         FlinkCluster flinkCluster = flinkClusterService.getById(application.getFlinkClusterId());
         ApiAlertException.throwIfFalse(
             flinkClusterWatcher.getClusterState(flinkCluster) == ClusterState.RUNNING,
-            "[StreamPark] The flink cluster not running, please start it");
+            FLINK_CLUSTER_NOT_RUNNING);
     }
 
     private Tuple3 getNamespaceClusterId(
@@ -850,9 +855,8 @@ private Tuple3 getNamespaceClusterId(
                 FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId());
                 ApiAlertException.throwIfNull(
                     cluster,
-                    String.format(
-                        "The Kubernetes session clusterId=%s can't found, maybe the clusterId is wrong or the cluster has been deleted. Please contact the Admin.",
-                        application.getFlinkClusterId()));
+                    FLINK_CLUSTER_ID_CANNOT_FIND_ERROR,
+                    application.getFlinkClusterId());
                 clusterId = cluster.getClusterId();
                 k8sNamespace = cluster.getK8sNamespace();
                 exposedType = cluster.getK8sRestExposedTypeEnum();
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java
index 4b87659f53..94840dd409 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java
@@ -83,6 +83,9 @@
 import java.util.stream.Collectors;
 
 import static org.apache.streampark.common.enums.StorageType.LFS;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_ID_NOT_EXISTS_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_JOB_EXECUTION_MODE_ILLEGALLY;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_UNAVAILABLE;
 
 @Slf4j
 @Service
@@ -233,9 +236,7 @@ public boolean checkEnv(Application appParam) throws ApplicationException {
                 || FlinkExecutionMode.REMOTE == application.getFlinkExecutionMode()) {
                 FlinkCluster flinkCluster = flinkClusterService.getById(application.getFlinkClusterId());
                 boolean conned = flinkClusterWatcher.verifyClusterConnection(flinkCluster);
-                if (!conned) {
-                    throw new ApiAlertException("the target cluster is unavailable, please check!");
-                }
+                ApiAlertException.throwIfFalse(conned, FLINK_CLUSTER_UNAVAILABLE);
             }
             return true;
         } catch (Exception e) {
@@ -386,10 +387,10 @@ public List getYarnAppReport(String appName) {
     public String k8sStartLog(Long id, Integer offset, Integer limit) throws Exception {
         Application application = getById(id);
         ApiAlertException.throwIfNull(
-            application, String.format("The application id=%s can't be found.", id));
+            application, APP_ID_NOT_EXISTS_ERROR, id);
         ApiAlertException.throwIfFalse(
             FlinkExecutionMode.isKubernetesMode(application.getFlinkExecutionMode()),
-            "Job executionMode must be kubernetes-session|kubernetes-application.");
+            APP_JOB_EXECUTION_MODE_ILLEGALLY);
 
         CompletableFuture future = CompletableFuture.supplyAsync(
             () -> KubernetesDeploymentHelper.watchDeploymentLog(
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
index 25cdf63446..def3d009a6 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
@@ -91,6 +91,14 @@
 import java.util.Objects;
 import java.util.stream.Collectors;
 
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_CREATE_FAILED;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_FLINK_CLUSTER_NOT_RUNNING_UPDATE_FAILED;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_NAME_REPEAT_COPY_FAILED;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_QUEUE_LABEL_IN_TEAM_ILLEGALLY;
+import static org.apache.streampark.console.base.enums.CommonStatus.UNKNOWN_ERROR;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_SQL_IS_NULL_UPDATE_FAILED;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_ID_NULL_ERROR;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -98,9 +106,6 @@ public class ApplicationManageServiceImpl extends ServiceImpl getUserJarAndAppConf(
         ApplicationConfig applicationConfig = configService.getEffective(application.getId());
 
         ApiAlertException.throwIfNull(
-            executionModeEnum, "ExecutionMode can't be null, start application failed.");
+            executionModeEnum, APP_EXECUTE_MODE_NOT_EXISTS_ERROR);
 
         String flinkUserJar = null;
         String appConf = null;
@@ -450,14 +458,14 @@ private Tuple2 getUserJarAndAppConf(
                 Resource resource = resourceService.findByResourceName(application.getTeamId(), application.getJar());
 
                 ApiAlertException.throwIfNull(
-                    resource, "pyflink file can't be null, start application failed.");
+                    resource, APP_PY_FLINK_FILE_IS_NULL);
 
                 ApiAlertException.throwIfNull(
-                    resource.getFilePath(), "pyflink file can't be null, start application failed.");
+                    resource.getFilePath(), APP_PY_FLINK_FILE_IS_NULL);
 
                 ApiAlertException.throwIfFalse(
                     resource.getFilePath().endsWith(Constant.PYTHON_SUFFIX),
-                    "pyflink format error, must be a \".py\" suffix, start application failed.");
+                    APP_PY_FLINK_FILE_TYPE_ILLEGALLY);
 
                 flinkUserJar = resource.getFilePath();
                 break;
@@ -564,9 +572,9 @@ private void checkYarnBeforeStart(SparkApplication application) {
         STATE yarnState = HadoopUtils.yarnClient().getServiceState();
         ApiAlertException.throwIfFalse(
             yarnState == STARTED,
-            "[StreamPark] The yarn cluster service state is " + yarnState.name() + ", please check it");
+            APP_ACTION_YARN_CLUSTER_STATE_CHECK, yarnState.name());
         ApiAlertException.throwIfTrue(
             !applicationInfoService.getYarnAppReport(application.getJobName()).isEmpty(),
-            "[StreamPark] The same task name is already running in the yarn queue");
+            APP_ACTION_SAME_TASK_IN_ALREADY_RUN_ERROR);
     }
 }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationManageServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationManageServiceImpl.java
index 28c4bfc0b6..526a579158 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationManageServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationManageServiceImpl.java
@@ -81,6 +81,13 @@
 import java.util.Objects;
 import java.util.stream.Collectors;
 
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_CREATE_FAILED;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_NAME_REPEAT_COPY_FAILED;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_QUEUE_LABEL_IN_TEAM_ILLEGALLY;
+import static org.apache.streampark.console.base.enums.CommonStatus.UNKNOWN_ERROR;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_SQL_IS_NULL_UPDATE_FAILED;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_ID_NULL_ERROR;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -90,9 +97,6 @@ public class SparkApplicationManageServiceImpl
     implements
         SparkApplicationManageService {
 
-    private static final String ERROR_APP_QUEUE_HINT =
-        "Queue label '%s' isn't available for teamId '%d', please add it into the team first.";
-
     @Autowired
     private ProjectService projectService;
 
@@ -267,7 +271,7 @@ public void changeOwnership(Long userId, Long targetUserId) {
     @Override
     public boolean create(SparkApplication appParam) {
         ApiAlertException.throwIfNull(
-            appParam.getTeamId(), "The teamId can't be null. Create application failed.");
+            appParam.getTeamId(), SYSTEM_TEAM_ID_NULL_ERROR);
         appParam.setUserId(ServiceHelper.getUserId());
         appParam.setState(FlinkAppStateEnum.ADDED.getValue());
         appParam.setRelease(ReleaseStateEnum.NEED_RELEASE.get());
@@ -277,7 +281,7 @@ public boolean create(SparkApplication appParam) {
         boolean success = validateQueueIfNeeded(appParam);
         ApiAlertException.throwIfFalse(
             success,
-            String.format(ERROR_APP_QUEUE_HINT, appParam.getYarnQueue(), appParam.getTeamId()));
+            APP_QUEUE_LABEL_IN_TEAM_ILLEGALLY, appParam.getYarnQueue(), appParam.getTeamId());
 
         appParam.doSetHotParams();
         if (appParam.isUploadJob()) {
@@ -302,7 +306,7 @@ public boolean create(SparkApplication appParam) {
             }
             return true;
         } else {
-            throw new ApiAlertException("create application failed");
+            return ApiAlertException.throwException(APP_CREATE_FAILED);
         }
     }
 
@@ -318,7 +322,7 @@ public Long copy(SparkApplication appParam) {
         boolean existsByJobName = this.existsByJobName(appParam.getJobName());
         ApiAlertException.throwIfFalse(
             !existsByJobName,
-            "[StreamPark] Application names can't be repeated, copy application failed.");
+            APP_NAME_REPEAT_COPY_FAILED);
 
         SparkApplication oldApp = getById(appParam.getId());
         SparkApplication newApp = new SparkApplication();
@@ -388,7 +392,7 @@ public Long copy(SparkApplication appParam) {
             }
             return newApp.getId();
         } else {
-            throw new ApiAlertException(
+            return ApiAlertException.throwException(UNKNOWN_ERROR,
                 "create application from copy failed, copy source app: " + oldApp.getJobName());
         }
     }
@@ -403,7 +407,7 @@ public boolean update(SparkApplication appParam) {
         boolean success = validateQueueIfNeeded(application, appParam);
         ApiAlertException.throwIfFalse(
             success,
-            String.format(ERROR_APP_QUEUE_HINT, appParam.getYarnQueue(), appParam.getTeamId()));
+            APP_QUEUE_LABEL_IN_TEAM_ILLEGALLY, appParam.getYarnQueue(), appParam.getTeamId());
 
         application.setRelease(ReleaseStateEnum.NEED_RELEASE.get());
 
@@ -515,7 +519,7 @@ private void updateFlinkSqlJob(SparkApplication application, SparkApplication ap
             // get previous flink sql and decode
             FlinkSql copySourceFlinkSql = flinkSqlService.getById(appParam.getSqlId());
             ApiAlertException.throwIfNull(
-                copySourceFlinkSql, "Flink sql is null, update flink sql job failed.");
+                copySourceFlinkSql, FLINK_SQL_IS_NULL_UPDATE_FAILED);
             copySourceFlinkSql.decode();
 
             // get submit flink sql
@@ -703,7 +707,7 @@ public boolean validateQueueIfNeeded(SparkApplication oldApp, SparkApplication n
      *
      * @param application application entity.
      * @return If the executionMode is (Yarn PerJob or application mode) and the queue label is not
-     *     (empty or default), return true, false else.
+     * (empty or default), return true, false else.
      */
     private boolean isYarnNotDefaultQueue(SparkApplication application) {
         return SparkExecutionMode.isYarnMode(application.getSparkExecutionMode())
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
index e616ce4833..0ad5cbc173 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
@@ -111,6 +111,11 @@
 import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_BUILD_RESOURCE_GROUP_FAILED;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_JOB_IS_INVALID;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_FILE_OR_DIR_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_FLINK_VERSION_NOT_FOUND;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_FLINK_VERSION_UNSUPPORT;
 import static org.apache.streampark.console.core.enums.OperationEnum.RELEASE;
 
 @Service
@@ -171,7 +176,7 @@ public class AppBuildPipeServiceImpl
     /**
      * Build application. This is an async call method.
      *
-     * @param appId application id
+     * @param appId      application id
      * @param forceBuild forced start pipeline or not
      * @return Whether the pipeline was successfully started
      */
@@ -284,10 +289,8 @@ public void onStart(PipelineSnapshot snapshot) {
                             for (String jar : app.getDependencyObject().getJar()) {
                                 File localJar = new File(WebUtils.getAppTempDir(), jar);
                                 File uploadJar = new File(localUploads, jar);
-                                if (!localJar.exists() && !uploadJar.exists()) {
-                                    throw new ApiAlertException(
-                                        "Missing file: " + jar + ", please upload again");
-                                }
+                                ApiAlertException.throwIfTrue(!localJar.exists() && !uploadJar.exists(),
+                                    FLINK_ENV_FILE_OR_DIR_NOT_EXIST);
                                 if (localJar.exists()) {
                                     checkOrElseUploadJar(
                                         FsOperator.lfs(), localJar, uploadJar.getAbsolutePath(),
@@ -419,31 +422,31 @@ private ApplicationLog getApplicationLog(Application app) {
     /**
      * check the build environment
      *
-     * @param appId application id
+     * @param appId      application id
      * @param forceBuild forced start pipeline or not
      */
     private void checkBuildEnv(Long appId, boolean forceBuild) {
         Application app = applicationManageService.getById(appId);
 
         // 1) check flink version
-        String checkEnvErrorMessage = "Check flink env failed, please check the flink version of this job";
         FlinkEnv env = flinkEnvService.getByIdOrDefault(app.getVersionId());
-        ApiAlertException.throwIfNull(env, checkEnvErrorMessage);
+        ApiAlertException.throwIfNull(env, FLINK_ENV_FLINK_VERSION_NOT_FOUND);
         boolean checkVersion = env.getFlinkVersion().checkVersion(false);
         ApiAlertException.throwIfFalse(
-            checkVersion, "Unsupported flink version:" + env.getFlinkVersion().version());
+            checkVersion, FLINK_ENV_FLINK_VERSION_UNSUPPORT, env.getFlinkVersion().version());
 
         // 2) check env
         boolean envOk = applicationInfoService.checkEnv(app);
-        ApiAlertException.throwIfFalse(envOk, checkEnvErrorMessage);
+        ApiAlertException.throwIfFalse(envOk, FLINK_ENV_FLINK_VERSION_NOT_FOUND);
 
         // 3) Whether the application can currently start a new building progress
         ApiAlertException.throwIfTrue(
-            !forceBuild && !allowToBuildNow(appId),
-            "The job is invalid, or the job cannot be built while it is running");
+            !forceBuild && !allowToBuildNow(appId), APP_JOB_IS_INVALID);
     }
 
-    /** create building pipeline instance */
+    /**
+     * create building pipeline instance
+     */
     private BuildPipeline createPipelineInstance(@Nonnull Application app) {
         FlinkEnv flinkEnv = flinkEnvService.getByIdOrDefault(app.getVersionId());
         String flinkUserJar = retrieveFlinkUserJar(flinkEnv, app);
@@ -573,7 +576,9 @@ private FlinkRemotePerJobBuildRequest buildFlinkRemotePerJobBuildRequest(
             getMergedDependencyInfo(app));
     }
 
-    /** copy from {@link ApplicationActionService#start(Application, boolean)} */
+    /**
+     * copy from {@link ApplicationActionService#start(Application, boolean)}
+     */
     private String retrieveFlinkUserJar(FlinkEnv flinkEnv, Application app) {
         switch (app.getDevelopmentMode()) {
             case CUSTOM_CODE:
@@ -718,7 +723,7 @@ private DependencyInfo getMergedDependencyInfo(Application application) {
                                             resourceService.getById(
                                                 resourceIdInGroup)));
                             } catch (JsonProcessingException e) {
-                                throw new ApiAlertException("Parse resource group failed.", e);
+                                ApiAlertException.throwException(APP_BUILD_RESOURCE_GROUP_FAILED, e);
                             }
                         }
                     });
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java
index 9b9434d6fa..3abf442f1c 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java
@@ -46,6 +46,8 @@
 import org.springframework.transaction.annotation.Propagation;
 import org.springframework.transaction.annotation.Transactional;
 
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_SQL_BACKUP_IS_NULL_ROLLBACK_FAILED;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -174,7 +176,7 @@ public void rollbackFlinkSql(Application appParam, FlinkSql flinkSqlParam) {
             .eq(ApplicationBackUp::getSqlId, flinkSqlParam.getId());
         ApplicationBackUp backUp = baseMapper.selectOne(queryWrapper);
         ApiAlertException.throwIfNull(
-            backUp, "Application backup can't be null. Rollback flink sql failed.");
+            backUp, FLINK_SQL_BACKUP_IS_NULL_ROLLBACK_FAILED);
         // rollback config and sql
         effectiveService.saveOrUpdate(backUp.getAppId(), EffectiveTypeEnum.CONFIG, backUp.getId());
         effectiveService.saveOrUpdate(backUp.getAppId(), EffectiveTypeEnum.FLINKSQL, backUp.getSqlId());
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationConfigServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationConfigServiceImpl.java
index 69da15f626..e6a147d9ce 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationConfigServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationConfigServiceImpl.java
@@ -49,6 +49,8 @@
 import java.util.List;
 import java.util.Scanner;
 
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_CONFIG_FILE_TYPE_ILLEGALLY;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -77,8 +79,7 @@ public synchronized void create(Application appParam, Boolean latest) {
         if (appParam.getFormat() != null) {
             ConfigFileTypeEnum fileType = ConfigFileTypeEnum.of(appParam.getFormat());
             ApiAlertException.throwIfTrue(
-                fileType == null || ConfigFileTypeEnum.UNKNOWN == fileType,
-                "application' config error. must be (.properties|.yaml|.yml |.conf)");
+                fileType == null || ConfigFileTypeEnum.UNKNOWN == fileType, APP_CONFIG_FILE_TYPE_ILLEGALLY);
 
             applicationConfig.setFormat(fileType.getValue());
         }
@@ -174,7 +175,9 @@ private void updateForFlinkSqlJob(
         }
     }
 
-    /** Not running tasks are set to Effective, running tasks are set to Latest */
+    /**
+     * Not running tasks are set to Effective, running tasks are set to Latest
+     */
     @Override
     public void setLatestOrEffective(Boolean latest, Long configId, Long appId) {
         if (latest) {
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/CatalogServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/CatalogServiceImpl.java
index 199220fb06..174d570c82 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/CatalogServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/CatalogServiceImpl.java
@@ -40,7 +40,15 @@
 import java.util.List;
 import java.util.regex.Pattern;
 
-/** catalog manage */
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.CATALOG_NAME_EXISTS_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.CATALOG_NAME_MODIFY_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.CATALOG_NAME_VALID_MSG;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.CATALOG_NOT_EXISTS_ERROR;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_ID_CANNOT_NULL;
+
+/**
+ * catalog manage
+ */
 @Service
 @Slf4j
 @Transactional(propagation = Propagation.SUPPORTS, rollbackFor = Exception.class)
@@ -53,12 +61,12 @@ public class CatalogServiceImpl extends ServiceImpl
     @Override
     public boolean create(FlinkCatalogParams catalog, Long userId) {
         AlertException.throwIfNull(
-            catalog.getTeamId(), "The teamId can't be null. Create catalog failed.");
+            catalog.getTeamId(), SYSTEM_TEAM_ID_CANNOT_NULL);
         AlertException.throwIfFalse(
             validateCatalogName(catalog.getCatalogName()),
-            "Catalog Name only lowercase letters, numbers, and -,.. Symbol composition, cannot end with a symbol.");
+            CATALOG_NAME_VALID_MSG);
         AlertException.throwIfTrue(
-            existsByCatalogName(catalog.getCatalogName()), "Catalog name  already exists.");
+            existsByCatalogName(catalog.getCatalogName()), CATALOG_NAME_EXISTS_ERROR);
         FlinkCatalog flinkCatalog = FlinkCatalog.of(catalog);
         Date date = new Date();
         flinkCatalog.setCreateTime(date);
@@ -69,14 +77,14 @@ public boolean create(FlinkCatalogParams catalog, Long userId) {
     @Override
     public boolean remove(Long id) {
         FlinkCatalog catalog = getById(id);
-        ApiAlertException.throwIfNull(catalog, "Catalog not exist, please check.");
+        ApiAlertException.throwIfNull(catalog, CATALOG_NOT_EXISTS_ERROR);
         return this.removeById(id);
     }
 
     @Override
     public IPage page(FlinkCatalogParams catalog, RestRequest request) {
         AlertException.throwIfNull(
-            catalog.getTeamId(), "The teamId can't be null. List catalog failed.");
+            catalog.getTeamId(), SYSTEM_TEAM_ID_CANNOT_NULL);
 
         Page page = MybatisPager.getPage(request);
         this.baseMapper.selectPage(page, FlinkCatalog.of(catalog));
@@ -95,12 +103,12 @@ record -> {
     @Override
     public boolean update(FlinkCatalogParams catalogParam, long userId) {
         AlertException.throwIfNull(
-            catalogParam.getTeamId(), "The teamId can't be null. List catalog failed.");
+            catalogParam.getTeamId(), SYSTEM_TEAM_ID_CANNOT_NULL);
         FlinkCatalog catalog = getById(catalogParam.getId());
         FlinkCatalog flinkCatalog = FlinkCatalog.of(catalogParam);
         AlertException.throwIfFalse(
             catalogParam.getCatalogName().equalsIgnoreCase(catalog.getCatalogName()),
-            "The catalog name cannot be modified.");
+            CATALOG_NAME_MODIFY_ERROR);
         log.debug(
             "Catalog {} has modify from {} to {}",
             catalog.getCatalogName(),
@@ -116,7 +124,9 @@ public Boolean existsByCatalogName(String catalogName) {
         return this.baseMapper.existsByCatalogName(catalogName);
     }
 
-    /** validate catalog name */
+    /**
+     * validate catalog name
+     */
     private boolean validateCatalogName(String catalogName) {
         return Pattern.compile(CATALOG_REGEX).matcher(catalogName).matches();
     }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java
index d2eae02af1..90793c59c7 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java
@@ -17,7 +17,6 @@
 
 package org.apache.streampark.console.core.service.impl;
 
-import org.apache.streampark.common.util.AssertUtils;
 import org.apache.streampark.console.base.exception.ApiAlertException;
 import org.apache.streampark.console.core.entity.Application;
 import org.apache.streampark.console.core.entity.ExternalLink;
@@ -39,6 +38,9 @@
 import java.util.List;
 import java.util.Map;
 
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_NOT_EXISTS_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.EXTERNAL_LINK_PARAM_EXISTING_ERROR;
+
 @Slf4j
 @Service
 @RequiredArgsConstructor
@@ -74,7 +76,7 @@ public void removeById(Long linkId) {
     @Override
     public List render(Long appId) {
         Application app = applicationManageService.getById(appId);
-        AssertUtils.notNull(app, "Application doesn't exist");
+        ApiAlertException.throwIfNull(app, APP_NOT_EXISTS_ERROR);
         List externalLink = this.list();
         if (externalLink != null && externalLink.size() > 0) {
             // Render the placeholder
@@ -109,9 +111,9 @@ private boolean check(ExternalLink params) {
             return true;
         }
         ApiAlertException.throwIfTrue(result.getBadgeName().equals(params.getBadgeName()),
-            String.format("The name: %s is already existing.", result.getBadgeName()));
+            EXTERNAL_LINK_PARAM_EXISTING_ERROR, "badge name", result.getBadgeName());
         ApiAlertException.throwIfTrue(result.getLinkUrl().equals(params.getLinkUrl()),
-            String.format("The linkUrl: %s is already existing.", result.getLinkUrl()));
+            EXTERNAL_LINK_PARAM_EXISTING_ERROR, "link url", result.getLinkUrl());
 
         return false;
     }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java
index 00408144ef..833dfb5a68 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java
@@ -38,7 +38,6 @@
 import org.apache.streampark.flink.client.bean.ShutDownResponse;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.lang3.exception.ExceptionUtils;
 
 import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
 import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
@@ -64,6 +63,18 @@
 import java.util.concurrent.TimeoutException;
 import java.util.stream.Collectors;
 
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_EXECUTE_MODE_OPERATION_DISABLE_ERROR;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_QUEUE_LABEL_IN_DATABASE_ILLEGALLY;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_CLOSE_FAILED;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_DELETE_RUNNING_CLUSTER_FAILED;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_DEPLOY_FAILED;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_EXIST_APP_DELETE_FAILED;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_EXIST_RUN_TASK_CLOSE_FAILED;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_ID_EMPTY_ERROR;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_NOT_RUNNING;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_CLUSTER_SHUTDOWN_RESPONSE_FAILED;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -71,9 +82,6 @@ public class FlinkClusterServiceImpl extends ServiceImpl 1 && flinkEnv.getIsDefault()),
-            "The flink home is set as default, please change it first.");
+            FLINK_ENV_HOME_IS_DEFAULT_SET);
 
         this.baseMapper.deleteById(id);
     }
@@ -162,16 +167,16 @@ public void validity(Long id) {
     private void checkOrElseAlert(FlinkEnv flinkEnv) {
 
         // 1.check exists
-        ApiAlertException.throwIfNull(flinkEnv, "The flink home does not exist, please check.");
+        ApiAlertException.throwIfNull(flinkEnv, FLINK_ENV_HOME_NOT_EXIST);
 
         // 2.check if it is being used by any flink cluster
         ApiAlertException.throwIfTrue(
             flinkClusterService.existsByFlinkEnvId(flinkEnv.getId()),
-            "The flink home is still in use by some flink cluster, please check.");
+            FLINK_ENV_HOME_EXIST_CLUSTER_USE);
 
         // 3.check if it is being used by any application
         ApiAlertException.throwIfTrue(
             applicationInfoService.existsByFlinkEnvId(flinkEnv.getId()),
-            "The flink home is still in use by some application, please check.");
+            FLINK_ENV_HOME_EXIST_APP_USE);
     }
 }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkGateWayServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkGateWayServiceImpl.java
index c42ad485f2..9e8f539d7c 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkGateWayServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkGateWayServiceImpl.java
@@ -37,6 +37,9 @@
 
 import java.util.concurrent.TimeUnit;
 
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_GATEWAY_GET_VERSION_FAILED;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_GATEWAY_NAME_EXIST;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -47,7 +50,7 @@ public class FlinkGateWayServiceImpl extends ServiceImpl 0, "project name already exists, add project failed");
+        ApiAlertException.throwIfTrue(count > 0, PROJECT_NAME_EXIST);
         if (StringUtils.isNotBlank(project.getPassword())) {
             String salt = ShaHashUtils.getRandomSalt();
             try {
@@ -113,16 +120,11 @@ public RestResponse create(Project project) {
                 project.setSalt(salt);
                 project.setPassword(encrypt);
             } catch (Exception e) {
-                log.error("Project password decrypt failed", e);
-                throw new ApiAlertException("Project github/gitlab password decrypt failed");
+                ApiAlertException.throwException(PROJECT_GIT_PASSWORD_DECRYPT_FAILED, e);
             }
         }
         boolean status = save(project);
-
-        if (status) {
-            return response.message("Add project successfully").data(true);
-        }
-        return response.message("Add project failed").data(false);
+        return response.message("Add project " + (status ? "successfully" : "failed")).data(true);
     }
 
     @Override
@@ -131,10 +133,10 @@ public boolean update(Project projectParam) {
         AssertUtils.notNull(project);
         ApiAlertException.throwIfFalse(
             project.getTeamId().equals(projectParam.getTeamId()),
-            "TeamId can't be changed, update project failed.");
-        ApiAlertException.throwIfFalse(
-            !project.getBuildState().equals(BuildStateEnum.BUILDING.get()),
-            "The project is being built, update project failed.");
+            PROJECT_TEAM_ID_MODIFY_ERROR);
+        ApiAlertException.throwIfTrue(
+            BuildStateEnum.BUILDING.get() == project.getBuildState(),
+            PROJECT_BUILDING_STATE);
         updateInternal(projectParam, project);
         if (project.isHttpRepositoryUrl()) {
             if (StringUtils.isBlank(projectParam.getUserName())) {
@@ -150,8 +152,7 @@ public boolean update(Project projectParam) {
                         project.setPassword(encrypt);
                         project.setSalt(salt);
                     } catch (Exception e) {
-                        log.error("The project github/gitlab password encrypt failed");
-                        throw new ApiAlertException(e);
+                        ApiAlertException.throwException(PROJECT_GIT_PASSWORD_DECRYPT_FAILED, e);
                     }
                 }
             }
@@ -232,9 +233,8 @@ public void build(Long id) throws Exception {
         Long currentBuildCount = this.baseMapper.getBuildingCount();
         ApiAlertException.throwIfTrue(
             maxProjectBuildNum > -1 && currentBuildCount > maxProjectBuildNum,
-            String.format(
-                "The number of running Build projects exceeds the maximum number: %d of max-build-num",
-                maxProjectBuildNum));
+            PROJECT_RUNNING_BUILDING_EXCEED_LIMIT,
+            maxProjectBuildNum);
         Project project = getById(id);
         this.baseMapper.updateBuildState(project.getId(), BuildStateEnum.BUILDING.get());
         String logPath = getBuildLogPath(id);
@@ -287,7 +287,7 @@ public List listModules(Long id) {
     @Override
     public List listJars(Project project) {
         ApiAlertException.throwIfNull(
-            project.getModule(), "Project module can't be null, please check.");
+            project.getModule(), PROJECT_MODULE_NULL_ERROR);
         File projectModuleDir = new File(project.getDistHome(), project.getModule());
         return Arrays.stream(Objects.requireNonNull(projectModuleDir.listFiles()))
             .map(File::getName)
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java
index 6ca08054e2..89579aee9e 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java
@@ -24,6 +24,7 @@
 import org.apache.streampark.common.util.Utils;
 import org.apache.streampark.console.base.domain.RestRequest;
 import org.apache.streampark.console.base.domain.RestResponse;
+import org.apache.streampark.console.base.enums.ResourceMessageStatus;
 import org.apache.streampark.console.base.exception.ApiAlertException;
 import org.apache.streampark.console.base.exception.ApiDetailException;
 import org.apache.streampark.console.base.mybatis.pager.MybatisPager;
@@ -85,6 +86,16 @@
 import java.util.jar.JarFile;
 import java.util.stream.Collectors;
 
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_CONNECTOR_NULL_ERROR;
+import static org.apache.streampark.console.base.enums.ResourceMessageStatus.RESOURCE_FLINK_APP_JAR_EMPTY_ERROR;
+import static org.apache.streampark.console.base.enums.ResourceMessageStatus.RESOURCE_FLINK_JAR_NULL;
+import static org.apache.streampark.console.base.enums.ResourceMessageStatus.RESOURCE_MULTI_FILE_ERROR;
+import static org.apache.streampark.console.base.enums.ResourceMessageStatus.RESOURCE_NAME_MODIFY_ERROR;
+import static org.apache.streampark.console.base.enums.ResourceMessageStatus.RESOURCE_NAME_NULL_FAILED;
+import static org.apache.streampark.console.base.enums.ResourceMessageStatus.RESOURCE_NOT_EXIST_ERROR;
+import static org.apache.streampark.console.base.enums.ResourceMessageStatus.RESOURCE_POM_JAR_EMPTY;
+import static org.apache.streampark.console.base.enums.ResourceMessageStatus.RESOURCE_STILL_USE_DELETE_ERROR;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -124,7 +135,7 @@ public boolean existsByUserId(Long userId) {
     @Override
     public void addResource(Resource resource) throws Exception {
         String resourceStr = resource.getResource();
-        ApiAlertException.throwIfNull(resourceStr, "Please add pom or jar resource.");
+        ApiAlertException.throwIfNull(resourceStr, RESOURCE_POM_JAR_EMPTY);
 
         // check
         Dependency dependency = Dependency.toDependency(resourceStr);
@@ -135,12 +146,12 @@ public void addResource(Resource resource) throws Exception {
         if (resource.getResourceType() == ResourceTypeEnum.CONNECTOR) {
             processConnectorResource(resource);
         } else {
-            ApiAlertException.throwIfNull(resource.getResourceName(), "The resourceName is required.");
+            ApiAlertException.throwIfNull(resource.getResourceName(), RESOURCE_NAME_NULL_FAILED);
         }
 
         ApiAlertException.throwIfNotNull(
             this.findByResourceName(resource.getTeamId(), resource.getResourceName()),
-            "the resource %s already exists, please check.",
+            ResourceMessageStatus.RESOURCE_ALREADY_ERROR,
             resource.getResourceName());
 
         if (!jars.isEmpty()) {
@@ -157,7 +168,7 @@ public void addResource(Resource resource) throws Exception {
 
     private static void processConnectorResource(Resource resource) throws JsonProcessingException {
         String connector = resource.getConnector();
-        ApiAlertException.throwIfNull(connector, "the flink connector is null.");
+        ApiAlertException.throwIfNull(connector, FLINK_ENV_CONNECTOR_NULL_ERROR);
         FlinkConnector connectorResource = JacksonUtils.read(connector, FlinkConnector.class);
         resource.setResourceName(connectorResource.getFactoryIdentifier());
         Optional.ofNullable(connectorResource.getRequiredOptions())
@@ -172,12 +183,12 @@ private static void processConnectorResource(Resource resource) throws JsonProce
 
     private void check(Resource resource, List jars, List poms) {
         ApiAlertException.throwIfTrue(
-            jars.isEmpty() && poms.isEmpty(), "Please add pom or jar resource.");
+            jars.isEmpty() && poms.isEmpty(), RESOURCE_POM_JAR_EMPTY);
         ApiAlertException.throwIfTrue(
             resource.getResourceType() == ResourceTypeEnum.FLINK_APP && jars.isEmpty(),
-            "Please upload jar for Flink_App resource");
+            RESOURCE_FLINK_APP_JAR_EMPTY_ERROR);
         ApiAlertException.throwIfTrue(
-            jars.size() + poms.size() > 1, "Please do not add multi dependency at one time.");
+            jars.size() + poms.size() > 1, RESOURCE_MULTI_FILE_ERROR);
     }
 
     @Override
@@ -197,7 +208,7 @@ public void updateResource(Resource resource) {
         if (resourceName != null) {
             ApiAlertException.throwIfFalse(
                 resourceName.equals(findResource.getResourceName()),
-                "Please make sure the resource name is not changed.");
+                RESOURCE_NAME_MODIFY_ERROR);
 
             Dependency dependency = Dependency.toDependency(resource.getResource());
             if (!dependency.getJar().isEmpty()) {
@@ -241,7 +252,7 @@ public List listByTeamId(Long teamId) {
     /**
      * change resource owner
      *
-     * @param userId original user id
+     * @param userId       original user id
      * @param targetUserId target user id
      */
     @Override
@@ -346,7 +357,7 @@ private RestResponse checkFlinkApp(Resource resourceParam) {
             return buildExceptResponse(e, 1);
         }
         ApiAlertException.throwIfTrue(
-            jarFile == null || !jarFile.exists(), "flink app jar must exist.");
+            jarFile == null || !jarFile.exists(), RESOURCE_FLINK_JAR_NULL);
         Map resp = new HashMap<>(0);
         resp.put(STATE, 0);
         if (jarFile.getName().endsWith(Constant.PYTHON_SUFFIX)) {
@@ -453,16 +464,16 @@ private void transferTeamResource(Long teamId, String resourcePath) {
         File localJar = new File(resourcePath);
         File teamUploadJar = new File(teamUploads, localJar.getName());
         ApiAlertException.throwIfFalse(
-            localJar.exists(), "Missing file: " + resourcePath + ", please upload again");
+            localJar.exists(), RESOURCE_NOT_EXIST_ERROR);
         FsOperator.lfs()
             .upload(localJar.getAbsolutePath(), teamUploadJar.getAbsolutePath(), false, true);
     }
 
     private void checkOrElseAlert(Resource resource) {
-        ApiAlertException.throwIfNull(resource, "The resource does not exist.");
+        ApiAlertException.throwIfNull(resource, RESOURCE_NOT_EXIST_ERROR);
 
         ApiAlertException.throwIfTrue(
-            isDependByApplications(resource), "The resource is still in use, cannot be removed.");
+            isDependByApplications(resource), RESOURCE_STILL_USE_DELETE_ERROR);
     }
 
     private boolean isDependByApplications(Resource resource) {
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavepointServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavepointServiceImpl.java
index af0a3a36ab..09e8b3f8b1 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavepointServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavepointServiceImpl.java
@@ -81,6 +81,7 @@
 import static org.apache.flink.configuration.CheckpointingOptions.MAX_RETAINED_CHECKPOINTS;
 import static org.apache.flink.configuration.CheckpointingOptions.SAVEPOINT_DIRECTORY;
 import static org.apache.streampark.common.util.PropertiesUtils.extractDynamicPropertiesAsJava;
+import static org.apache.streampark.console.base.enums.CommonStatus.UNKNOWN_ERROR;
 import static org.apache.streampark.console.core.enums.CheckPointTypeEnum.CHECKPOINT;
 
 @Slf4j
@@ -276,7 +277,7 @@ private String getFinalSavepointDir(@Nullable String savepointPath, Application
             try {
                 result = this.getSavePointPath(application);
             } catch (Exception e) {
-                throw new ApiAlertException(
+                ApiAlertException.throwException(UNKNOWN_ERROR,
                     "Error in getting savepoint path for triggering savepoint for app "
                         + application.getId(),
                     e);
@@ -387,7 +388,9 @@ public String getSavepointFromDeployLayer(Application application) throws JsonPr
         return config.isEmpty() ? null : config.get(SAVEPOINT_DIRECTORY.key());
     }
 
-    /** Try get the 'state.checkpoints.num-retained' from the dynamic properties. */
+    /**
+     * Try get the 'state.checkpoints.num-retained' from the dynamic properties.
+     */
     private Optional tryGetChkNumRetainedFromDynamicProps(String dynamicProps) {
         String rawCfgValue = extractDynamicPropertiesAsJava(dynamicProps).get(MAX_RETAINED_CHECKPOINTS.key());
         if (StringUtils.isBlank(rawCfgValue)) {
@@ -407,7 +410,9 @@ private Optional tryGetChkNumRetainedFromDynamicProps(String dynamicPro
         return Optional.empty();
     }
 
-    /** Try get the 'state.checkpoints.num-retained' from the flink env. */
+    /**
+     * Try get the 'state.checkpoints.num-retained' from the flink env.
+     */
     private int getChkNumRetainedFromFlinkEnv(
                                               @Nonnull FlinkEnv flinkEnv, @Nonnull Application application) {
         String flinkConfNumRetained = flinkEnv.convertFlinkYamlAsMap().get(MAX_RETAINED_CHECKPOINTS.key());
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SettingServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SettingServiceImpl.java
index 0ee24f259c..633328f149 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SettingServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SettingServiceImpl.java
@@ -167,7 +167,7 @@ public ResponseResult checkDocker(DockerConfig dockerConfig) {
         } catch (Exception e) {
             if (e.getMessage().contains("LastErrorException")) {
                 result.setStatus(400);
-            } else if (e.getMessage().contains("Status 401")) {
+            } else if (e.getMessage().contains("UserMessageStatus 401")) {
                 result.setStatus(500);
                 result.setMsg(
                     "Failed to validate Docker registry, unauthorized: incorrect username or password ");
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkAppBuildPipeServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkAppBuildPipeServiceImpl.java
index b9dd40e1ca..828c28871f 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkAppBuildPipeServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkAppBuildPipeServiceImpl.java
@@ -91,6 +91,11 @@
 import java.util.concurrent.ExecutorService;
 import java.util.stream.Collectors;
 
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_BUILD_RESOURCE_GROUP_FAILED;
+import static org.apache.streampark.console.base.enums.ApplicationMessageStatus.APP_JOB_IS_INVALID;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_FILE_OR_DIR_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_FLINK_VERSION_NOT_FOUND;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_FLINK_VERSION_UNSUPPORT;
 import static org.apache.streampark.console.core.enums.OperationEnum.RELEASE;
 
 @Service
@@ -136,7 +141,7 @@ public class SparkAppBuildPipeServiceImpl
     /**
      * Build application. This is an async call method.
      *
-     * @param appId application id
+     * @param appId      application id
      * @param forceBuild forced start pipeline or not
      * @return Whether the pipeline was successfully started
      */
@@ -249,10 +254,8 @@ public void onStart(PipelineSnapshot snapshot) {
                             for (String jar : app.getDependencyObject().getJar()) {
                                 File localJar = new File(WebUtils.getAppTempDir(), jar);
                                 File uploadJar = new File(localUploads, jar);
-                                if (!localJar.exists() && !uploadJar.exists()) {
-                                    throw new ApiAlertException(
-                                        "Missing file: " + jar + ", please upload again");
-                                }
+                                ApiAlertException.throwIfTrue(!localJar.exists() && !uploadJar.exists(),
+                                    FLINK_ENV_FILE_OR_DIR_NOT_EXIST, jar);
                                 if (localJar.exists()) {
                                     checkOrElseUploadJar(
                                         FsOperator.lfs(), localJar, uploadJar.getAbsolutePath(),
@@ -336,7 +339,7 @@ public void onFinish(PipelineSnapshot snapshot, BuildResult result) {
     /**
      * check the build environment
      *
-     * @param appId application id
+     * @param appId      application id
      * @param forceBuild forced start pipeline or not
      */
     private void checkBuildEnv(Long appId, boolean forceBuild) {
@@ -346,20 +349,22 @@ private void checkBuildEnv(Long appId, boolean forceBuild) {
         SparkEnv env = sparkEnvService.getById(app.getVersionId());
         boolean checkVersion = env.getSparkVersion().checkVersion(false);
         ApiAlertException.throwIfFalse(
-            checkVersion, "Unsupported flink version:" + env.getSparkVersion().version());
+            checkVersion, FLINK_ENV_FLINK_VERSION_UNSUPPORT, env.getSparkVersion().version());
 
         // 2) check env
         boolean envOk = applicationInfoService.checkEnv(app);
         ApiAlertException.throwIfFalse(
-            envOk, "Check flink env failed, please check the flink version of this job");
+            envOk, FLINK_ENV_FLINK_VERSION_NOT_FOUND);
 
         // 3) Whether the application can currently start a new building progress
         ApiAlertException.throwIfTrue(
             !forceBuild && !allowToBuildNow(appId),
-            "The job is invalid, or the job cannot be built while it is running");
+            APP_JOB_IS_INVALID);
     }
 
-    /** create building pipeline instance */
+    /**
+     * create building pipeline instance
+     */
     private BuildPipeline createPipelineInstance(@Nonnull SparkApplication app) {
         SparkEnv sparkEnv = sparkEnvService.getByIdOrDefault(app.getVersionId());
         String sparkUserJar = retrieveSparkUserJar(sparkEnv, app);
@@ -534,7 +539,7 @@ private DependencyInfo getMergedDependencyInfo(SparkApplication application) {
                                             resourceService.getById(
                                                 resourceIdInGroup)));
                             } catch (JsonProcessingException e) {
-                                throw new ApiAlertException("Parse resource group failed.", e);
+                                ApiAlertException.throwException(APP_BUILD_RESOURCE_GROUP_FAILED, e);
                             }
                         }
                     });
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkEnvServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkEnvServiceImpl.java
index 739c6a6058..27a9318f1b 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkEnvServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkEnvServiceImpl.java
@@ -37,6 +37,9 @@
 import java.io.IOException;
 import java.util.Date;
 
+import static org.apache.streampark.console.base.enums.SparkMessageStatus.SPARK_ENV_HOME_IS_DEFAULT_SET;
+import static org.apache.streampark.console.base.enums.SparkMessageStatus.SPARK_ENV_HOME_NULL_ERROR;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -93,7 +96,7 @@ public void removeById(Long id) {
         Long count = this.baseMapper.selectCount(null);
         ApiAlertException.throwIfFalse(
             !(count > 1 && sparkEnv.getIsDefault()),
-            "The spark home is set as default, please change it first.");
+            SPARK_ENV_HOME_IS_DEFAULT_SET);
 
         this.baseMapper.deleteById(id);
     }
@@ -150,7 +153,7 @@ public void validity(Long id) {
     private void checkOrElseAlert(SparkEnv sparkEnv) {
 
         // 1.check exists
-        ApiAlertException.throwIfNull(sparkEnv, "The spark home does not exist, please check.");
+        ApiAlertException.throwIfNull(sparkEnv, SPARK_ENV_HOME_NULL_ERROR);
 
         // todo : To be developed
         // 2.check if it is being used by any spark cluster
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/VariableServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/VariableServiceImpl.java
index 4264aa3555..7e767c5f79 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/VariableServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/VariableServiceImpl.java
@@ -52,6 +52,12 @@
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
+import static org.apache.streampark.console.base.enums.VariableMessageStatus.SYSTEM_VARIABLE_ALREADY_EXIST;
+import static org.apache.streampark.console.base.enums.VariableMessageStatus.SYSTEM_VARIABLE_CODE_MODIFY_FAILED;
+import static org.apache.streampark.console.base.enums.VariableMessageStatus.SYSTEM_VARIABLE_EXIST_USE;
+import static org.apache.streampark.console.base.enums.VariableMessageStatus.SYSTEM_VARIABLE_ID_NULL_FAILED;
+import static org.apache.streampark.console.base.enums.VariableMessageStatus.SYSTEM_VARIABLE_NOT_EXIST;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -76,7 +82,7 @@ public void createVariable(Variable variable) {
 
         ApiAlertException.throwIfTrue(
             this.findByVariableCode(variable.getTeamId(), variable.getVariableCode()) != null,
-            "The variable code already exists.");
+            SYSTEM_VARIABLE_ALREADY_EXIST);
 
         variable.setCreatorId(ServiceHelper.getUserId());
         this.save(variable);
@@ -85,7 +91,7 @@ public void createVariable(Variable variable) {
     @Override
     public void remove(Variable variable) {
         ApiAlertException.throwIfTrue(
-            isDependByApplications(variable), "The variable is actually used.");
+            isDependByApplications(variable), SYSTEM_VARIABLE_EXIST_USE);
         this.removeById(variable);
     }
 
@@ -119,12 +125,12 @@ public IPage getDependAppsPage(Variable variable, RestRequest reque
     @Override
     public void updateVariable(Variable variable) {
         // region update variable
-        ApiAlertException.throwIfNull(variable.getId(), "The variable id cannot be null.");
+        ApiAlertException.throwIfNull(variable.getId(), SYSTEM_VARIABLE_ID_NULL_FAILED);
         Variable findVariable = this.baseMapper.selectById(variable.getId());
-        ApiAlertException.throwIfNull(findVariable, "The variable does not exist.");
+        ApiAlertException.throwIfNull(findVariable, SYSTEM_VARIABLE_NOT_EXIST);
         ApiAlertException.throwIfFalse(
             findVariable.getVariableCode().equals(variable.getVariableCode()),
-            "The variable code cannot be updated.");
+            SYSTEM_VARIABLE_CODE_MODIFY_FAILED);
         this.baseMapper.updateById(variable);
         // endregion
 
@@ -177,7 +183,7 @@ public List listByTeamId(Long teamId, String keyword) {
      * Replace variable with defined variable codes.
      *
      * @param teamId
-     * @param mixed Text with placeholders, e.g. "--cluster ${kafka.cluster}"
+     * @param mixed  Text with placeholders, e.g. "--cluster ${kafka.cluster}"
      * @return
      */
     @Override
@@ -238,7 +244,7 @@ private List getDependApplicationsByCode(Variable variable) {
      * Determine whether variableCode is dependent on mixed.
      *
      * @param variableCode Variable code, e.g. "kafka.cluster"
-     * @param mixed Text with placeholders, e.g. "--cluster ${kafka.cluster}"
+     * @param mixed        Text with placeholders, e.g. "--cluster ${kafka.cluster}"
      * @return If mixed can match the variableCode, return true, otherwise return false
      */
     private boolean isDepend(String variableCode, String mixed) {
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java
index b87c4ca441..42dacb0c88 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java
@@ -19,8 +19,8 @@
 
 import org.apache.streampark.common.enums.FlinkExecutionMode;
 import org.apache.streampark.common.enums.SparkExecutionMode;
-import org.apache.streampark.common.util.AssertUtils;
 import org.apache.streampark.console.base.domain.RestRequest;
+import org.apache.streampark.console.base.enums.YarnMessageStatus;
 import org.apache.streampark.console.base.exception.ApiAlertException;
 import org.apache.streampark.console.base.mybatis.pager.MybatisPager;
 import org.apache.streampark.console.core.bean.ResponseResult;
@@ -52,7 +52,19 @@
 import java.util.List;
 import java.util.stream.Collectors;
 
-import static org.apache.streampark.console.core.util.YarnQueueLabelExpression.ERR_FORMAT_HINTS;
+import static org.apache.streampark.console.base.enums.CommonStatus.APPLICATION;
+import static org.apache.streampark.console.base.enums.CommonStatus.FLINK_CLUSTERS;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_ID_NULL_ERROR;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_ID_NULL;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_LABEL_AVAILABLE;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_LABEL_EXIST;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_LABEL_FORMAT;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_LABEL_NULL;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_NULL;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_QUERY_PARAMS_NULL;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_QUERY_PARAMS_TEAM_ID_NULL;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_USED_FORMAT;
 import static org.apache.streampark.console.core.util.YarnQueueLabelExpression.isValid;
 
 @Slf4j
@@ -63,11 +75,6 @@ public class YarnQueueServiceImpl extends ServiceImpl getPage(YarnQueue yarnQueue, RestRequest request) {
-        AssertUtils.notNull(yarnQueue, "Yarn queue query params mustn't be null.");
-        AssertUtils.notNull(
-            yarnQueue.getTeamId(), "Team id of yarn queue query params mustn't be null.");
+        ApiAlertException.throwIfNull(yarnQueue, YARN_QUEUE_QUERY_PARAMS_NULL);
+        ApiAlertException.throwIfNull(yarnQueue.getTeamId(), YARN_QUEUE_QUERY_PARAMS_TEAM_ID_NULL);
         Page page = MybatisPager.getPage(request);
         return this.baseMapper.selectPage(page, yarnQueue);
     }
@@ -91,21 +97,21 @@ public IPage getPage(YarnQueue yarnQueue, RestRequest request) {
     @Override
     public ResponseResult checkYarnQueue(YarnQueue yarnQueue) {
 
-        AssertUtils.notNull(yarnQueue, "Yarn queue mustn't be empty.");
-        AssertUtils.notNull(yarnQueue.getTeamId(), "Team id mustn't be null.");
+        ApiAlertException.throwIfNull(yarnQueue, YARN_QUEUE_NULL);
+        ApiAlertException.throwIfNull(yarnQueue.getTeamId(), SYSTEM_TEAM_ID_NULL_ERROR);
 
         ResponseResult responseResult = new ResponseResult<>();
 
         if (StringUtils.isBlank(yarnQueue.getQueueLabel())) {
             responseResult.setStatus(3);
-            responseResult.setMsg(QUEUE_EMPTY_HINT);
+            responseResult.setMsg(YARN_QUEUE_LABEL_NULL.getMessage());
             return responseResult;
         }
 
         boolean valid = isValid(yarnQueue.getQueueLabel());
         if (!valid) {
             responseResult.setStatus(2);
-            responseResult.setMsg(ERR_FORMAT_HINTS);
+            responseResult.setMsg(YARN_QUEUE_LABEL_FORMAT.getMessage());
             return responseResult;
         }
 
@@ -113,18 +119,33 @@ public ResponseResult checkYarnQueue(YarnQueue yarnQueue) {
 
         if (existed) {
             responseResult.setStatus(1);
-            responseResult.setMsg(QUEUE_EXISTED_IN_TEAM_HINT);
+            responseResult.setMsg(YARN_QUEUE_LABEL_EXIST.getMessage());
             return responseResult;
         }
         responseResult.setStatus(0);
-        responseResult.setMsg("The queue label is available.");
+        responseResult.setMsg(YARN_QUEUE_LABEL_AVAILABLE.getMessage());
         return responseResult;
     }
 
     @Override
     public boolean createYarnQueue(YarnQueue yarnQueue) {
         ResponseResult checkResponse = checkYarnQueue(yarnQueue);
-        ApiAlertException.throwIfFalse(checkResponse.getStatus() == 0, checkResponse.getMsg());
+
+        YarnMessageStatus status = null;
+        switch (checkResponse.getStatus()) {
+            case 1:
+                status = YARN_QUEUE_LABEL_EXIST;
+                break;
+            case 2:
+                status = YARN_QUEUE_LABEL_FORMAT;
+                break;
+            case 3:
+                status = YARN_QUEUE_LABEL_NULL;
+                break;
+            default:
+                status = YARN_QUEUE_LABEL_AVAILABLE;
+        }
+        ApiAlertException.throwIfFalse(checkResponse.getStatus() == 0, status);
         return save(yarnQueue);
     }
 
@@ -147,7 +168,7 @@ public void updateYarnQueue(YarnQueue yarnQueue) {
         }
 
         // 3 update yarnQueue
-        ApiAlertException.throwIfFalse(isValid(yarnQueue.getQueueLabel()), ERR_FORMAT_HINTS);
+        ApiAlertException.throwIfFalse(isValid(yarnQueue.getQueueLabel()), YARN_QUEUE_LABEL_FORMAT);
 
         checkNotReferencedByApplications(
             queueFromDB.getTeamId(), queueFromDB.getQueueLabel(), "updating");
@@ -176,19 +197,19 @@ public void remove(YarnQueue yarnQueue) {
      * mode or yarn-perjob mode.
      *
      * @param executionModeEnum execution mode.
-     * @param queueLabel queueLabel expression.
+     * @param queueLabel        queueLabel expression.
      */
     @Override
     public void checkQueueLabel(FlinkExecutionMode executionModeEnum, String queueLabel) {
         if (FlinkExecutionMode.isYarnMode(executionModeEnum)) {
-            ApiAlertException.throwIfFalse(isValid(queueLabel, true), ERR_FORMAT_HINTS);
+            ApiAlertException.throwIfFalse(isValid(queueLabel, true), YARN_QUEUE_LABEL_FORMAT);
         }
     }
 
     @Override
     public void checkQueueLabel(SparkExecutionMode executionModeEnum, String queueLabel) {
         if (SparkExecutionMode.isYarnMode(executionModeEnum)) {
-            ApiAlertException.throwIfFalse(isValid(queueLabel, true), ERR_FORMAT_HINTS);
+            ApiAlertException.throwIfFalse(isValid(queueLabel, true), YARN_QUEUE_LABEL_FORMAT);
         }
     }
 
@@ -216,10 +237,10 @@ public boolean existByTeamIdQueueLabel(Long teamId, String queueLabel) {
 
     @VisibleForTesting
     public YarnQueue getYarnQueueByIdWithPreconditions(YarnQueue yarnQueue) {
-        AssertUtils.notNull(yarnQueue, "Yarn queue mustn't be null.");
-        AssertUtils.notNull(yarnQueue.getId(), "Yarn queue id mustn't be null.");
+        ApiAlertException.throwIfNull(yarnQueue, YARN_QUEUE_NULL);
+        ApiAlertException.throwIfNull(yarnQueue.getId(), YARN_QUEUE_ID_NULL);
         YarnQueue queueFromDB = getById(yarnQueue.getId());
-        ApiAlertException.throwIfNull(queueFromDB, "The queue doesn't exist.");
+        ApiAlertException.throwIfNull(queueFromDB, YARN_QUEUE_NOT_EXIST);
         return queueFromDB;
     }
 
@@ -233,7 +254,7 @@ public void checkNotReferencedByFlinkClusters(
             .collect(Collectors.toList());
         ApiAlertException.throwIfFalse(
             CollectionUtils.isEmpty(clustersReferenceYarnQueueLabel),
-            String.format(QUEUE_USED_FORMAT, "flink clusters", operation));
+            YARN_QUEUE_USED_FORMAT, FLINK_CLUSTERS, operation);
     }
 
     @VisibleForTesting
@@ -255,6 +276,6 @@ public void checkNotReferencedByApplications(
             .collect(Collectors.toList());
         ApiAlertException.throwIfFalse(
             CollectionUtils.isEmpty(appsReferenceQueueLabel),
-            String.format(QUEUE_USED_FORMAT, "applications", operation));
+            YARN_QUEUE_USED_FORMAT, APPLICATION, operation);
     }
 }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/util/ServiceHelper.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/util/ServiceHelper.java
index 252b7364e1..9ea3c0be4c 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/util/ServiceHelper.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/util/ServiceHelper.java
@@ -35,6 +35,10 @@
 import java.util.Objects;
 import java.util.stream.Collectors;
 
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_FILE_OR_DIR_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_SQL_CLIENT_JAR_MULTIPLE_EXIST;
+import static org.apache.streampark.console.base.enums.FlinkMessageStatus.FLINK_ENV_SQL_CLIENT_JAR_NOT_EXIST;
+
 public class ServiceHelper {
 
     private static String flinkSqlClientJar = null;
@@ -62,7 +66,7 @@ public static String getFlinkSqlClientJar(FlinkEnv flinkEnv) {
         if (flinkSqlClientJar == null) {
             File localClient = WebUtils.getAppClientDir();
             ApiAlertException.throwIfFalse(
-                localClient.exists(), "[StreamPark] " + localClient + " no exists. please check.");
+                localClient.exists(), FLINK_ENV_FILE_OR_DIR_NOT_EXIST, localClient);
 
             String regex = String.format("streampark-flink-sqlclient_%s-.*\\.jar", flinkEnv.getScalaVersion());
 
@@ -72,11 +76,11 @@ public static String getFlinkSqlClientJar(FlinkEnv flinkEnv) {
 
             ApiAlertException.throwIfTrue(
                 jars.isEmpty(),
-                "[StreamPark] can't found streampark-flink-sqlclient jar in " + localClient);
+                FLINK_ENV_SQL_CLIENT_JAR_NOT_EXIST, localClient);
 
             ApiAlertException.throwIfTrue(
                 jars.size() > 1,
-                "[StreamPark] found multiple streampark-flink-sqlclient jar in " + localClient);
+                FLINK_ENV_SQL_CLIENT_JAR_MULTIPLE_EXIST, localClient);
             flinkSqlClientJar = jars.get(0);
         }
         return flinkSqlClientJar;
@@ -86,7 +90,7 @@ public static String getSparkSqlClientJar(SparkEnv sparkEnv) {
         if (sparkSqlClientJar == null) {
             File localClient = WebUtils.getAppClientDir();
             ApiAlertException.throwIfFalse(
-                localClient.exists(), "[StreamPark] " + localClient + " no exists. please check.");
+                localClient.exists(), FLINK_ENV_FILE_OR_DIR_NOT_EXIST, localClient);
             List jars = Arrays.stream(Objects.requireNonNull(localClient.list()))
                 .filter(
                     x -> x.matches(
@@ -96,11 +100,11 @@ public static String getSparkSqlClientJar(SparkEnv sparkEnv) {
 
             ApiAlertException.throwIfTrue(
                 jars.isEmpty(),
-                "[StreamPark] can't found streampark-flink-sqlclient jar in " + localClient);
+                FLINK_ENV_SQL_CLIENT_JAR_NOT_EXIST, localClient);
 
             ApiAlertException.throwIfTrue(
                 jars.size() > 1,
-                "[StreamPark] found multiple streampark-flink-sqlclient jar in " + localClient);
+                FLINK_ENV_SQL_CLIENT_JAR_MULTIPLE_EXIST, localClient);
 
             sparkSqlClientJar = jars.get(0);
         }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/util/YarnQueueLabelExpression.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/util/YarnQueueLabelExpression.java
index 101fa20089..d01b502c26 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/util/YarnQueueLabelExpression.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/util/YarnQueueLabelExpression.java
@@ -22,8 +22,6 @@
 
 import org.apache.commons.lang3.StringUtils;
 
-import com.google.common.annotations.VisibleForTesting;
-
 import javax.annotation.Nonnull;
 import javax.annotation.Nullable;
 
@@ -32,6 +30,8 @@
 import java.util.Optional;
 import java.util.regex.Pattern;
 
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_LABEL_FORMAT;
+
 /** Util class for parsing and checking Yarn queue & Label */
 public class YarnQueueLabelExpression {
 
@@ -39,10 +39,6 @@ public class YarnQueueLabelExpression {
 
     private static final String REGEX = "[a-zA-Z0-9_\\-]+";
 
-    @VisibleForTesting
-    public static final String ERR_FORMAT_HINTS =
-        "Yarn queue label format should be in format {queue} or {queue}@{label1,label2}";
-
     private static final Pattern QUEUE_LABEL_PATTERN = Pattern
         .compile(String.format("^(%s)(.%s)*(%s(%s)(,%s)*)?$", REGEX, REGEX, AT, REGEX, REGEX));
 
@@ -85,7 +81,7 @@ public static boolean isValid(String queueLabel) {
 
     // Visible for test.
     public static YarnQueueLabelExpression of(@Nonnull String queueLabelExpr) {
-        ApiAlertException.throwIfFalse(isValid(queueLabelExpr, false), ERR_FORMAT_HINTS);
+        ApiAlertException.throwIfFalse(isValid(queueLabelExpr, false), YARN_QUEUE_LABEL_FORMAT);
         String[] strs = queueLabelExpr.split(AT);
         if (strs.length == 2) {
             return new YarnQueueLabelExpression(strs[0], strs[1]);
@@ -97,7 +93,7 @@ public static YarnQueueLabelExpression of(
                                               @Nonnull String queue, @Nullable String labelExpression) {
         YarnQueueLabelExpression queueLabelExpression = new YarnQueueLabelExpression(queue, labelExpression);
         ApiAlertException.throwIfFalse(
-            isValid(queueLabelExpression.toString(), false), ERR_FORMAT_HINTS);
+            isValid(queueLabelExpression.toString(), false), YARN_QUEUE_LABEL_FORMAT);
         return queueLabelExpression;
     }
 
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java
index 4d9b501a79..a3328b9cd1 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java
@@ -41,6 +41,9 @@
 
 import java.util.List;
 
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SSO_CONFIG_PRINCIPAL_NAME_ERROR;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SSO_SINGLE_SIGN_NOT_AVAILABLE;
+
 @Slf4j
 @Controller
 @RequestMapping("sso")
@@ -71,7 +74,7 @@ public RestResponse token() throws Exception {
         // Check SSO enable status
         ApiAlertException.throwIfTrue(
             !ssoEnable,
-            "Single Sign On (SSO) is not available, please contact the administrator to enable");
+            SSO_SINGLE_SIGN_NOT_AVAILABLE);
 
         Subject subject = SecurityUtils.getSubject();
         PrincipalCollection principals = subject.getPrincipals();
@@ -87,7 +90,7 @@ public RestResponse token() throws Exception {
 
         // Check Principal name
         ApiAlertException.throwIfNull(
-            principal.getName(), "Please configure the correct Principal Name Attribute");
+            principal.getName(), SSO_CONFIG_PRINCIPAL_NAME_ERROR);
 
         User user = authenticator.authenticate(principal.getName(), null, LoginTypeEnum.SSO);
 
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/UserController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/UserController.java
index 262534daf5..5ca0c603e8 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/UserController.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/UserController.java
@@ -48,6 +48,8 @@
 import java.util.List;
 import java.util.Map;
 
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_CURRENT_LOGIN_NULL_SET_TEAM_FAILED;
+
 @Slf4j
 @Validated
 @RestController
@@ -130,7 +132,7 @@ public RestResponse setTeam(Long teamId) {
             return RestResponse.fail(ResponseCode.CODE_FAIL_ALERT, "TeamId is invalid, set team failed.");
         }
         User user = ServiceHelper.getLoginUser();
-        ApiAlertException.throwIfNull(user, "Current login user is null, set team failed.");
+        ApiAlertException.throwIfNull(user, SYSTEM_USER_CURRENT_LOGIN_NULL_SET_TEAM_FAILED);
         // 1) set the latest team
         userService.setLastTeam(teamId, user.getUserId());
 
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/AuthenticatorImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/AuthenticatorImpl.java
index b315014adc..3e3ca24ecc 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/AuthenticatorImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/AuthenticatorImpl.java
@@ -30,6 +30,13 @@
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_ALLOW_LOGIN_TYPE;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_LOGIN_PASSWORD_INCORRECT;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_LOGIN_TYPE_CONSTRAINTS;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_LOGIN_TYPE_NOT_SUPPORT;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_LOGIN_TYPE_NULL;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_NOT_EXIST;
+
 @Component
 public class AuthenticatorImpl implements Authenticator {
 
@@ -41,7 +48,7 @@ public class AuthenticatorImpl implements Authenticator {
     @Override
     public User authenticate(String username, String password, LoginTypeEnum loginType) throws Exception {
         ApiAlertException.throwIfNull(
-            loginType, "the login type is null");
+            loginType, SYSTEM_USER_LOGIN_TYPE_NULL);
 
         switch (loginType) {
             case PASSWORD:
@@ -51,26 +58,26 @@ public User authenticate(String username, String password, LoginTypeEnum loginTy
             case SSO:
                 return ssoAuthenticate(username);
             default:
-                throw new ApiAlertException(
-                    String.format("the login type [%s] is not supported.", loginType));
+                return ApiAlertException.throwException(SYSTEM_USER_LOGIN_TYPE_NOT_SUPPORT, loginType);
         }
     }
 
     private User passwordAuthenticate(String username, String password) {
         User user = usersService.getByUsername(username);
 
-        ApiAlertException.throwIfNull(user, String.format("User [%s] does not exist", username));
+        ApiAlertException.throwIfNull(user, SYSTEM_USER_NOT_EXIST, username);
 
         ApiAlertException.throwIfTrue(
             user.getLoginType() != LoginTypeEnum.PASSWORD,
-            "user [%s] can not login with PASSWORD",
-            username);
+            SYSTEM_USER_ALLOW_LOGIN_TYPE,
+            username,
+            LoginTypeEnum.PASSWORD);
 
         String salt = user.getSalt();
         password = ShaHashUtils.encrypt(salt, password);
 
         ApiAlertException.throwIfFalse(
-            StringUtils.equals(user.getPassword(), password), "Incorrect password");
+            StringUtils.equals(user.getPassword(), password), SYSTEM_USER_LOGIN_PASSWORD_INCORRECT);
 
         return user;
     }
@@ -86,7 +93,7 @@ private User ldapAuthenticate(String username, String password) throws Exception
         if (user != null) {
             ApiAlertException.throwIfTrue(
                 user.getLoginType() != LoginTypeEnum.LDAP,
-                "user [%s] can only sign in with %s",
+                SYSTEM_USER_LOGIN_TYPE_CONSTRAINTS,
                 username,
                 user.getLoginType());
 
@@ -102,7 +109,7 @@ private User ssoAuthenticate(String username) throws Exception {
         if (user != null) {
             ApiAlertException.throwIfTrue(
                 user.getLoginType() != LoginTypeEnum.SSO,
-                "user [%s] can only sign in with %s",
+                SYSTEM_USER_LOGIN_TYPE_CONSTRAINTS,
                 username,
                 user.getLoginType());
             return user;
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/LdapService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/LdapService.java
index 6c4427cab4..1fb6355a19 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/LdapService.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/LdapService.java
@@ -37,6 +37,8 @@
 
 import java.util.Properties;
 
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_LDAP_NOT_ENABLE;
+
 @Component
 @Configuration
 @Slf4j
@@ -74,7 +76,7 @@ public class LdapService {
      */
     public boolean ldapLogin(String userId, String userPwd) {
         ApiAlertException.throwIfFalse(
-            enable, "ldap is not enabled, Please check the configuration: ldap.enable");
+            enable, SYSTEM_LDAP_NOT_ENABLE);
         renderLdapEnv();
         try {
             NamingEnumeration results = getSearchResults(userId);
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/MemberServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/MemberServiceImpl.java
index fec9f83538..31f3eac197 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/MemberServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/MemberServiceImpl.java
@@ -17,8 +17,8 @@
 
 package org.apache.streampark.console.system.service.impl;
 
-import org.apache.streampark.common.util.AssertUtils;
 import org.apache.streampark.console.base.domain.RestRequest;
+import org.apache.streampark.console.base.enums.UserMessageStatus;
 import org.apache.streampark.console.base.exception.ApiAlertException;
 import org.apache.streampark.console.base.mybatis.pager.MybatisPager;
 import org.apache.streampark.console.system.entity.Member;
@@ -43,6 +43,14 @@
 import java.util.List;
 import java.util.stream.Collectors;
 
+import static org.apache.streampark.console.base.enums.UserMessageStatus.MEMBER_ID_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.MEMBER_TEAM_ID_CHANGE_ERROR;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.MEMBER_USER_ID_CHANGE_ERROR;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_ROLE_ID_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_ID_CANNOT_NULL;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_ID_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_NOT_EXIST;
+
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
 public class MemberServiceImpl extends ServiceImpl implements MemberService {
@@ -76,7 +84,7 @@ public void removeByTeamId(Long teamId) {
 
     @Override
     public IPage getPage(Member member, RestRequest request) {
-        ApiAlertException.throwIfNull(member.getTeamId(), "The team id is required.");
+        ApiAlertException.throwIfNull(member.getTeamId(), SYSTEM_TEAM_ID_CANNOT_NULL);
         Page page = MybatisPager.getPage(request);
         return baseMapper.selectPage(page, member);
     }
@@ -101,7 +109,7 @@ public Member getByTeamIdUserName(Long teamId, String userName) {
     }
 
     private Member findByUserId(Long teamId, Long userId) {
-        ApiAlertException.throwIfNull(teamId, "The team id is required.");
+        ApiAlertException.throwIfNull(teamId, SYSTEM_TEAM_ID_CANNOT_NULL);
         LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper()
             .eq(Member::getTeamId, teamId)
             .eq(Member::getUserId, userId);
@@ -118,15 +126,15 @@ public List listUserIdsByRoleId(Long roleId) {
     @Override
     public void createMember(Member member) {
         User user = userService.getByUsername(member.getUserName());
-        ApiAlertException.throwIfNull(user, "The username [%s] not found", member.getUserName());
+        ApiAlertException.throwIfNull(user, SYSTEM_USER_NOT_EXIST, member.getUserName());
 
         ApiAlertException.throwIfNull(
-            roleService.getById(member.getRoleId()), "The roleId [%s] not found", member.getRoleId());
+            roleService.getById(member.getRoleId()), SYSTEM_ROLE_ID_NOT_EXIST, member.getRoleId());
         Team team = teamService.getById(member.getTeamId());
-        ApiAlertException.throwIfNull(team, "The teamId [%s] not found", member.getTeamId());
+        ApiAlertException.throwIfNull(team, SYSTEM_TEAM_ID_NOT_EXIST, member.getTeamId());
         ApiAlertException.throwIfNotNull(
             findByUserId(member.getTeamId(), user.getUserId()),
-            "The user [%s] has been added the team [%s], please don't add it again.",
+            UserMessageStatus.MEMBER_USER_TEAM_ALREADY_ERROR,
             member.getUserName(),
             team.getTeamName());
 
@@ -139,7 +147,7 @@ public void createMember(Member member) {
     @Override
     public void remove(Long id) {
         Member member = this.getById(id);
-        ApiAlertException.throwIfNull(member, "The member [id=%s] not found", id);
+        ApiAlertException.throwIfNull(member, MEMBER_ID_NOT_EXIST, id);
         this.removeById(member);
         userService.clearLastTeam(member.getUserId(), member.getTeamId());
     }
@@ -147,13 +155,11 @@ public void remove(Long id) {
     @Override
     public void updateMember(Member member) {
         Member oldMember = this.getById(member.getId());
-        ApiAlertException.throwIfNull(oldMember, "The member [id=%s] not found", member.getId());
-        AssertUtils.state(
-            oldMember.getTeamId().equals(member.getTeamId()), "Team id cannot be changed.");
-        AssertUtils.state(
-            oldMember.getUserId().equals(member.getUserId()), "User id cannot be changed.");
+        ApiAlertException.throwIfNull(oldMember, MEMBER_ID_NOT_EXIST, member.getId());
+        ApiAlertException.throwIfFalse(oldMember.getTeamId().equals(member.getTeamId()), MEMBER_TEAM_ID_CHANGE_ERROR);
+        ApiAlertException.throwIfFalse(oldMember.getUserId().equals(member.getUserId()), MEMBER_USER_ID_CHANGE_ERROR);
         ApiAlertException.throwIfNull(
-            roleService.getById(member.getRoleId()), "The roleId [%s] not found", member.getRoleId());
+            roleService.getById(member.getRoleId()), SYSTEM_ROLE_ID_NOT_EXIST, member.getRoleId());
         oldMember.setRoleId(member.getRoleId());
         updateById(oldMember);
     }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/RoleServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/RoleServiceImpl.java
index d4c2af83bd..74bf0cea45 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/RoleServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/RoleServiceImpl.java
@@ -45,7 +45,9 @@
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Optional;
+
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_ROLE_EXIST_USED_DELETE_ERROR;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_ROLE_NOT_EXIST;
 
 @Slf4j
 @Service
@@ -82,17 +84,13 @@ public void createRole(Role role) {
 
     @Override
     public void removeById(Long roleId) {
-        Role role = Optional.ofNullable(this.getById(roleId))
-            .orElseThrow(
-                () -> new ApiAlertException(
-                    String.format("Role id [%s] not found. Delete role failed.",
-                        roleId)));
+        Role role = this.getById(roleId);
+        ApiAlertException.throwIfNull(role, SYSTEM_ROLE_NOT_EXIST);
         List userIdsByRoleId = memberService.listUserIdsByRoleId(roleId);
         ApiAlertException.throwIfFalse(
             CollectionUtils.isEmpty(userIdsByRoleId),
-            String.format(
-                "There are some users of role %s, delete role failed, please unbind it first.",
-                role.getRoleName()));
+            SYSTEM_ROLE_EXIST_USED_DELETE_ERROR,
+            role.getRoleName());
         super.removeById(roleId);
         this.roleMenuService.removeByRoleId(roleId);
     }
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java
index 3c698611c3..04790906a9 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java
@@ -44,7 +44,15 @@
 import org.springframework.transaction.annotation.Transactional;
 
 import java.util.List;
-import java.util.Optional;
+
+import static org.apache.streampark.console.base.enums.CommonStatus.APPLICATION;
+import static org.apache.streampark.console.base.enums.CommonStatus.PROJECT;
+import static org.apache.streampark.console.base.enums.CommonStatus.VARIABLE;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_ALREADY_EXIST;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_EXIST_MODULE_USE_DELETE_ERROR;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_NAME_CAN_NOT_CHANGE;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_TEAM_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_ID_NOT_EXIST;
 
 @Slf4j
 @Service
@@ -83,9 +91,8 @@ public void createTeam(Team team) {
         Team existedTeam = getByName(team.getTeamName());
         ApiAlertException.throwIfFalse(
             existedTeam == null,
-            String.format(
-                "Team name [%s] exists already. Create team failed. Please rename and try again.",
-                team.getTeamName()));
+            SYSTEM_TEAM_ALREADY_EXIST,
+            team.getTeamName());
         team.setId(null);
         this.save(team);
     }
@@ -95,22 +102,25 @@ public void removeById(Long teamId) {
         log.info("{} Proceed delete team[Id={}]", ServiceHelper.getLoginUser().getUsername(), teamId);
         Team team = this.getById(teamId);
 
-        ApiAlertException.throwIfNull(team, "The team[Id=%s] doesn't exist.", teamId);
+        ApiAlertException.throwIfNull(team, SYSTEM_TEAM_NOT_EXIST);
 
         ApiAlertException.throwIfTrue(
             applicationInfoService.existsByTeamId(teamId),
-            "Please delete the applications under the team[name=%s] first!",
-            team.getTeamName());
+            SYSTEM_TEAM_EXIST_MODULE_USE_DELETE_ERROR,
+            team.getTeamName(),
+            APPLICATION.getMessage());
 
         ApiAlertException.throwIfTrue(
             projectService.existsByTeamId(teamId),
-            "Please delete the projects under the team[name=%s] first!",
-            team.getTeamName());
+            SYSTEM_TEAM_EXIST_MODULE_USE_DELETE_ERROR,
+            team.getTeamName(),
+            PROJECT.getMessage());
 
         ApiAlertException.throwIfTrue(
             variableService.existsByTeamId(teamId),
-            "Please delete the variables under the team[name=%s] first!",
-            team.getTeamName());
+            SYSTEM_TEAM_EXIST_MODULE_USE_DELETE_ERROR,
+            team.getTeamName(),
+            VARIABLE.getMessage());
 
         memberService.removeByTeamId(teamId);
         userService.clearLastTeam(teamId);
@@ -119,23 +129,20 @@ public void removeById(Long teamId) {
 
     @Override
     public void updateTeam(Team team) {
-        Team oldTeam = Optional.ofNullable(this.getById(team.getId()))
-            .orElseThrow(
-                () -> new IllegalArgumentException(
-                    String.format("Team id [id=%s] not found", team.getId())));
+        Team oldTeam = this.getById(team.getId());
+        ApiAlertException.throwIfNull(team, SYSTEM_TEAM_NOT_EXIST);
         ApiAlertException.throwIfFalse(
             oldTeam.getTeamName().equals(team.getTeamName()),
-            "Team name can't be changed. Update team failed.");
+            SYSTEM_TEAM_NAME_CAN_NOT_CHANGE);
         oldTeam.setDescription(team.getDescription());
         updateById(oldTeam);
     }
 
     @Override
     public List listByUserId(Long userId) {
-        User user = Optional.ofNullable(userService.getById(userId))
-            .orElseThrow(
-                () -> new ApiAlertException(
-                    String.format("The userId [%s] not found.", userId)));
+        User user = userService.getById(userId);
+        ApiAlertException.throwIfNull(user, SYSTEM_USER_ID_NOT_EXIST, userId);
+
         // Admin has the permission for all teams.
         if (UserTypeEnum.ADMIN == user.getUserType()) {
             return this.list();
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java
index 3e03e23d43..d501cd15df 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java
@@ -62,6 +62,10 @@
 import java.util.Map;
 import java.util.Set;
 
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_ID_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_OLD_PASSWORD_INCORRECT_UPDATE_PASSWORD_FAILED;
+import static org.apache.streampark.console.base.enums.UserMessageStatus.SYSTEM_USER_UPDATE_PASSWORD_FAILED;
+
 @Slf4j
 @Service
 @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
@@ -147,15 +151,15 @@ private boolean needTransferResource(User existsUser, User user) {
     @Override
     public void updatePassword(User userParam) {
         User user = getById(userParam.getUserId());
-        ApiAlertException.throwIfNull(user, "User is null. Update password failed.");
+        ApiAlertException.throwIfNull(user, SYSTEM_USER_ID_NOT_EXIST);
         ApiAlertException.throwIfFalse(
             user.getLoginType() == LoginTypeEnum.PASSWORD,
-            "Can only update password for user who sign in with PASSWORD");
+            SYSTEM_USER_UPDATE_PASSWORD_FAILED);
 
         String saltPassword = ShaHashUtils.encrypt(user.getSalt(), userParam.getOldPassword());
         ApiAlertException.throwIfFalse(
             StringUtils.equals(user.getPassword(), saltPassword),
-            "Old password error. Update password failed.");
+            SYSTEM_USER_OLD_PASSWORD_INCORRECT_UPDATE_PASSWORD_FAILED);
 
         String salt = ShaHashUtils.getRandomSalt();
         String password = ShaHashUtils.encrypt(salt, userParam.getPassword());
diff --git a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringUnitTestBase.java b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringUnitTestBase.java
index 2cf57ca9c7..632d63f4c2 100644
--- a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringUnitTestBase.java
+++ b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringUnitTestBase.java
@@ -79,7 +79,7 @@ public abstract class SpringUnitTestBase {
 
     @BeforeAll
     public static void init(@TempDir File tempPath) throws IOException {
-        // Skip the EnvInitializer#run method by flag in System.properties.
+        // Skip the EnvInitializer#run method by flag in Status.properties.
         LOG.info("Start mock EnvInitializer init.");
         String mockedHome = tempPath.getAbsolutePath();
         Path localWorkspace = Files.createDirectories(new File(mockedHome + "/localWorkspace").toPath());
diff --git a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/YarnQueueServiceTest.java b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/YarnQueueServiceTest.java
index fb5655203e..604f3a674a 100644
--- a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/YarnQueueServiceTest.java
+++ b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/YarnQueueServiceTest.java
@@ -34,11 +34,18 @@
 import org.junit.jupiter.api.parallel.Execution;
 import org.springframework.beans.factory.annotation.Autowired;
 
+import java.text.MessageFormat;
 import java.util.stream.Collectors;
 
-import static org.apache.streampark.console.core.service.impl.YarnQueueServiceImpl.QUEUE_EMPTY_HINT;
-import static org.apache.streampark.console.core.service.impl.YarnQueueServiceImpl.QUEUE_USED_FORMAT;
-import static org.apache.streampark.console.core.util.YarnQueueLabelExpression.ERR_FORMAT_HINTS;
+import static org.apache.streampark.console.base.enums.CommonStatus.APPLICATION;
+import static org.apache.streampark.console.base.enums.CommonStatus.FLINK_CLUSTERS;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_ID_NULL;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_LABEL_EXIST;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_LABEL_FORMAT;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_LABEL_NULL;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_NOT_EXIST;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_NULL;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_USED_FORMAT;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThatThrownBy;
 import static org.junit.jupiter.api.parallel.ExecutionMode.SAME_THREAD;
@@ -115,13 +122,13 @@ void testCheckYarnQueue() {
         YarnQueue yarnQueue = mockYarnQueue(1L, "queue@");
         ResponseResult result = yarnQueueService.checkYarnQueue(yarnQueue);
         assertThat(result.getStatus()).isEqualTo(2);
-        assertThat(result.getMsg()).isEqualTo(ERR_FORMAT_HINTS);
+        assertThat(result.getMsg()).isEqualTo(YARN_QUEUE_LABEL_FORMAT.getMessage());
 
         // Test for error format with empty.
         yarnQueue.setQueueLabel("");
         result = yarnQueueService.checkYarnQueue(yarnQueue);
         assertThat(result.getStatus()).isEqualTo(3);
-        assertThat(result.getMsg()).isEqualTo(QUEUE_EMPTY_HINT);
+        assertThat(result.getMsg()).isEqualTo(YARN_QUEUE_LABEL_NULL.getMessage());
 
         // Test for existed
         yarnQueue.setQueueLabel("queue1@label1");
@@ -142,13 +149,12 @@ void testCheckYarnQueue() {
         yarnQueue.setQueueLabel("queue1@label1");
         result = yarnQueueService.checkYarnQueue(yarnQueue);
         assertThat(result.getStatus()).isEqualTo(1);
-        assertThat(result.getMsg()).isEqualTo(YarnQueueServiceImpl.QUEUE_EXISTED_IN_TEAM_HINT);
+        assertThat(result.getMsg()).isEqualTo(YARN_QUEUE_LABEL_EXIST.getMessage());
 
         // Test for normal cases.
         yarnQueue.setQueueLabel("q1");
         result = yarnQueueService.checkYarnQueue(yarnQueue);
         assertThat(result.getStatus()).isEqualTo(0);
-        assertThat(result.getMsg()).isEqualTo(YarnQueueServiceImpl.QUEUE_AVAILABLE_HINT);
     }
 
     /**
@@ -176,7 +182,7 @@ void testUpdateYarnQueue() {
         yarnQueue.setQueueLabel("q1@");
         assertThatThrownBy(() -> yarnQueueService.updateYarnQueue(yarnQueue))
             .isInstanceOf(ApiAlertException.class)
-            .hasMessage(ERR_FORMAT_HINTS);
+            .hasMessage(YARN_QUEUE_LABEL_FORMAT.getMessage());
 
         // Test for formal cases.
         yarnQueue.setQueueLabel(newQueue);
@@ -207,21 +213,21 @@ void testGetYarnQueueByIdWithPreconditions() {
 
         // Test for null yarn queue
         assertThatThrownBy(() -> yarnQueueServiceImpl.getYarnQueueByIdWithPreconditions(null))
-            .isInstanceOf(NullPointerException.class)
-            .hasMessage("Yarn queue mustn't be null.");
+            .isInstanceOf(ApiAlertException.class)
+            .hasMessage(YARN_QUEUE_NULL.getMessage());
 
         // Test for null yarn queue id
         YarnQueue yarnQueue = new YarnQueue();
         yarnQueue.setId(null);
         assertThatThrownBy(() -> yarnQueueServiceImpl.getYarnQueueByIdWithPreconditions(yarnQueue))
-            .isInstanceOf(NullPointerException.class)
-            .hasMessage("Yarn queue id mustn't be null.");
+            .isInstanceOf(ApiAlertException.class)
+            .hasMessage(YARN_QUEUE_ID_NULL.getMessage());
 
         // Test for yarn queue non-existed in database.
         yarnQueue.setId(1L);
         assertThatThrownBy(() -> yarnQueueServiceImpl.getYarnQueueByIdWithPreconditions(yarnQueue))
             .isInstanceOf(ApiAlertException.class)
-            .hasMessage("The queue doesn't exist.");
+            .hasMessage(YARN_QUEUE_NOT_EXIST.getMessage());
 
         // Test for expected condition.
         yarnQueue.setQueueLabel(queueLabel);
@@ -250,7 +256,8 @@ void testCheckNotReferencedByFlinkClusters() {
         assertThatThrownBy(
             () -> yarnQueueServiceImpl.checkNotReferencedByFlinkClusters(queueLabel, operation))
                 .isInstanceOf(ApiAlertException.class)
-                .hasMessage(String.format(QUEUE_USED_FORMAT, "flink clusters", operation));
+                .hasMessage(
+                    MessageFormat.format(YARN_QUEUE_USED_FORMAT.getMessage(), FLINK_CLUSTERS.getMessage(), operation));
     }
 
     @Test
@@ -289,7 +296,7 @@ void testCheckNotReferencedByApplications() {
             () -> yarnQueueServiceImpl.checkNotReferencedByApplications(
                 targetTeamId, queueLabel, operation))
                     .isInstanceOf(ApiAlertException.class)
-                    .hasMessage(String.format(QUEUE_USED_FORMAT, "applications",
+                    .hasMessage(MessageFormat.format(YARN_QUEUE_USED_FORMAT.getMessage(), APPLICATION.getMessage(),
                         operation));
     }
 }
diff --git a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/utils/YarnQueueLabelExpressionTest.java b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/utils/YarnQueueLabelExpressionTest.java
index ea289e474a..e4e4a87435 100644
--- a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/utils/YarnQueueLabelExpressionTest.java
+++ b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/utils/YarnQueueLabelExpressionTest.java
@@ -22,7 +22,7 @@
 
 import org.junit.jupiter.api.Test;
 
-import static org.apache.streampark.console.core.util.YarnQueueLabelExpression.ERR_FORMAT_HINTS;
+import static org.apache.streampark.console.base.enums.YarnMessageStatus.YARN_QUEUE_LABEL_FORMAT;
 import static org.apache.streampark.console.core.util.YarnQueueLabelExpression.isValid;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThatThrownBy;
@@ -64,6 +64,6 @@ void testOf() {
         assertThat(YarnQueueLabelExpression.of("a").getQueue()).isEqualTo("a");
         assertThatThrownBy(() -> YarnQueueLabelExpression.of("a@"))
             .isInstanceOf(ApiAlertException.class)
-            .hasMessageContaining(ERR_FORMAT_HINTS);
+            .hasMessageContaining(YARN_QUEUE_LABEL_FORMAT.getMessage());
     }
 }
diff --git a/streampark-console/streampark-console-webapp/package.json b/streampark-console/streampark-console-webapp/package.json
index b1ee23fef2..501434cb22 100644
--- a/streampark-console/streampark-console-webapp/package.json
+++ b/streampark-console/streampark-console-webapp/package.json
@@ -51,6 +51,7 @@
     "axios": "^1.4.0",
     "crypto-js": "^4.1.1",
     "dayjs": "^1.11.9",
+    "js-cookie": "^3.0.5",
     "lodash-es": "^4.17.21",
     "monaco-editor": "^0.40.0",
     "nprogress": "^0.2.0",
@@ -71,6 +72,7 @@
     "@iconify/json": "^2.2.89",
     "@purge-icons/generated": "^0.9.0",
     "@types/fs-extra": "^11.0.1",
+    "@types/js-cookie": "^3.0.5",
     "@types/lodash-es": "^4.17.7",
     "@types/node": "^20.4.1",
     "@types/nprogress": "^0.2.0",
diff --git a/streampark-console/streampark-console-webapp/src/store/modules/locale.ts b/streampark-console/streampark-console-webapp/src/store/modules/locale.ts
index 59d99dd698..d98b712803 100644
--- a/streampark-console/streampark-console-webapp/src/store/modules/locale.ts
+++ b/streampark-console/streampark-console-webapp/src/store/modules/locale.ts
@@ -1,5 +1,6 @@
 import type { LocaleSetting, LocaleType } from '/#/config';
 
+import cookies from 'js-cookie';
 import { defineStore } from 'pinia';
 import { store } from '/@/store';
 
@@ -7,6 +8,8 @@ import { LOCALE_KEY } from '/@/enums/cacheEnum';
 import { createLocalStorage } from '/@/utils/cache';
 import { localeSetting } from '/@/settings/localeSetting';
 
+const LANGUAGE = 'language';
+
 const ls = createLocalStorage();
 
 const lsLocaleSetting = (ls.get(LOCALE_KEY) || localeSetting) as LocaleSetting;
@@ -36,6 +39,7 @@ export const useLocaleStore = defineStore({
     setLocaleInfo(info: Partial) {
       this.localInfo = { ...this.localInfo, ...info };
       ls.set(LOCALE_KEY, this.localInfo);
+      cookies.set(LANGUAGE, this.localInfo?.locale || 'en', { path: '/' });
     },
     /**
      * Initialize multilingual information and load the existing configuration from the local cache
diff --git a/streampark-e2e/streampark-e2e-case/src/test/java/org/apache/streampark/e2e/cases/ExternalLinkTest.java b/streampark-e2e/streampark-e2e-case/src/test/java/org/apache/streampark/e2e/cases/ExternalLinkTest.java
index 34a09fd21e..e1ad4d4454 100644
--- a/streampark-e2e/streampark-e2e-case/src/test/java/org/apache/streampark/e2e/cases/ExternalLinkTest.java
+++ b/streampark-e2e/streampark-e2e-case/src/test/java/org/apache/streampark/e2e/cases/ExternalLinkTest.java
@@ -88,7 +88,7 @@ void testCreateDuplicateExternalLink() {
                     .as("Name Duplicated Error message should be displayed")
                     .extracting(WebElement::getText)
                     .anyMatch(it -> it.contains(
-                        String.format("The name: %s is already existing.", newName))));
+                        String.format("badge name:%s is already existing.", newName))));
 
         externalLinkPage.errorMessageConfirmButton().click();
         externalLinkPage.createExternalLinkForm().buttonCancel().click();
diff --git a/streampark-flink/streampark-flink-sql-gateway/streampark-flink-sql-gateway-flink-v1/src/main/java/org/apache/streampark/gateway/flink/FlinkSqlGatewayImpl.java b/streampark-flink/streampark-flink-sql-gateway/streampark-flink-sql-gateway-flink-v1/src/main/java/org/apache/streampark/gateway/flink/FlinkSqlGatewayImpl.java
index ade4822e0e..2509a437d9 100644
--- a/streampark-flink/streampark-flink-sql-gateway/streampark-flink-sql-gateway-flink-v1/src/main/java/org/apache/streampark/gateway/flink/FlinkSqlGatewayImpl.java
+++ b/streampark-flink/streampark-flink-sql-gateway/streampark-flink-sql-gateway-flink-v1/src/main/java/org/apache/streampark/gateway/flink/FlinkSqlGatewayImpl.java
@@ -83,10 +83,8 @@ public SessionHandle openSession(SessionEnvironment environment) throws SqlGatew
                     defaultApi
                         .openSession(
                             new OpenSessionRequestBody()
-                                .sessionName(environment
-                                    .getSessionName())
-                                .properties(environment
-                                    .getSessionConfig()))
+                                .sessionName(environment.getSessionName())
+                                .properties(environment.getSessionConfig()))
                         .getSessionHandle()));
         } catch (ApiException e) {
             throw new SqlGatewayException("Flink native SqlGateWay openSession failed!", e);
@@ -173,13 +171,10 @@ public OperationHandle executeStatement(
                 Objects.requireNonNull(
                     defaultApi
                         .executeStatement(
-                            UUID.fromString(sessionHandle
-                                .getIdentifier()),
+                            UUID.fromString(sessionHandle.getIdentifier()),
                             new ExecuteStatementRequestBody()
                                 .statement(statement)
-                                // currently, sql gateway
-                                // don't support execution
-                                // timeout
+                                // currently, sql gateway don't support execution timeout
                                 // .executionTimeout(executionTimeoutMs)
                                 .executionConfig(null))
                         .getOperationHandle()));
@@ -216,8 +211,7 @@ public ResultSet fetchResults(
             resultsColumns.forEach(
                 column -> columns.add(
                     new Column(
-                        column.getName(), column.getLogicalType().toJson(),
-                        column.getComment())));
+                        column.getName(), column.getLogicalType().toJson(), column.getComment())));
 
             resultsData.forEach(row -> data.add(new RowData(row.getKind().getValue(), row.getFields())));