From ecf8b210c7bca25791ead638ba5b3cac05f20385 Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Thu, 23 Nov 2023 10:25:11 +0530 Subject: [PATCH 1/6] add validations for the UpdateResultsAPI objects Signed-off-by: Saad Khan --- .../experiment/ExperimentInitiator.java | 17 +++++--- .../utils/PerformanceProfileUtil.java | 43 +++++++++++-------- .../analyzer/services/UpdateResults.java | 16 +++++-- .../utils/AnalyzerErrorConstants.java | 2 + .../remote_monitoring_tests/helpers/utils.py | 2 - 5 files changed, 51 insertions(+), 29 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java index 8f0bddb81..dadc3a12c 100644 --- a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java +++ b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java @@ -40,6 +40,7 @@ import java.util.*; import java.util.concurrent.ConcurrentHashMap; +import static com.autotune.analyzer.utils.AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_EXPERIMENT; import static com.autotune.analyzer.utils.AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_EXPERIMENT_NAME; /** @@ -136,16 +137,23 @@ public void generateAndAddRecommendations(KruizeObject kruizeObject, List updateResultsAPIObjects) { - List failedDBObjects = new ArrayList<>(); + List failedDBObjects; Validator validator = Validation.byProvider(HibernateValidator.class) .configure() .messageInterpolator(new ParameterMessageInterpolator()) .failFast(true) .buildValidatorFactory() .getValidator(); - Map mainKruizeExperimentMAP = new ConcurrentHashMap(); + Map mainKruizeExperimentMAP = new ConcurrentHashMap<>(); + List errorReasons = new ArrayList<>(); for (UpdateResultsAPIObject object : updateResultsAPIObjects) { String experimentName = object.getExperimentName(); + if (experimentName == null) { + errorReasons.add(String.format("%s", MISSING_EXPERIMENT)); + object.setErrors(getErrorMap(errorReasons)); + failedUpdateResultsAPIObjects.add(object); + continue; + } if (!mainKruizeExperimentMAP.containsKey(experimentName)) { try { new ExperimentDBService().loadExperimentFromDBByName(mainKruizeExperimentMAP, experimentName); // TODO try to avoid DB @@ -161,10 +169,9 @@ public void validateAndAddExperimentResults(List updateR if (violations.isEmpty()) { successUpdateResultsAPIObjects.add(object); } else { - List errorReasons = new ArrayList<>(); for (ConstraintViolation violation : violations) { String propertyPath = violation.getPropertyPath().toString(); - if (null != propertyPath && propertyPath.length() != 0) { + if (null != propertyPath && !propertyPath.isEmpty()) { errorReasons.add(getSerializedName(propertyPath, UpdateResultsAPIObject.class) + ": " + violation.getMessage()); } else { errorReasons.add(violation.getMessage()); @@ -176,13 +183,11 @@ public void validateAndAddExperimentResults(List updateR } catch (Exception e) { LOGGER.debug(e.getMessage()); e.printStackTrace(); - List errorReasons = new ArrayList<>(); errorReasons.add(String.format("%s%s", e.getMessage(), experimentName)); object.setErrors(getErrorMap(errorReasons)); failedUpdateResultsAPIObjects.add(object); } } else { - List errorReasons = new ArrayList<>(); errorReasons.add(String.format("%s%s", MISSING_EXPERIMENT_NAME, experimentName)); object.setErrors(getErrorMap(errorReasons)); failedUpdateResultsAPIObjects.add(object); diff --git a/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java b/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java index c40f314d1..8138f0a11 100644 --- a/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java +++ b/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java @@ -68,6 +68,7 @@ public static ValidationOutputData validateAndAddProfile(Map errorReasons = new ArrayList<>(); String errorMsg = ""; List mandatoryFields = Arrays.asList( AnalyzerConstants.MetricName.cpuUsage, @@ -87,12 +88,12 @@ public static String validateResults(PerformanceProfile performanceProfile, Upda queryList.add(metric.getQuery()); } - // Get the metrics data from the Kruize Object + // Get the metrics data from the Kruize Object and validate it for (KubernetesAPIObject kubernetesAPIObject : updateResultsAPIObject.getKubernetesObjects()) { for (ContainerAPIObject containerAPIObject : kubernetesAPIObject.getContainerAPIObjects()) { // if the metrics data is not present, set corresponding validation message and skip adding the current container data if (containerAPIObject.getMetrics() == null) { - errorMsg = errorMsg.concat(String.format( + errorReasons.add(String.format( AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_METRICS, containerAPIObject.getContainer_name(), updateResultsAPIObject.getExperimentName() @@ -106,6 +107,10 @@ public static String validateResults(PerformanceProfile performanceProfile, Upda // validate the metric values errorMsg = PerformanceProfileUtil.validateMetricsValues(metric.getName(), metric.getMetricResult()); if (!errorMsg.isBlank()) { + errorReasons.add(String.format( + AnalyzerErrorConstants.AutotuneObjectErrors.CONTAINER_AND_EXPERIMENT, + containerAPIObject.getContainer_name(), + updateResultsAPIObject.getExperimentName())); break; } AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metric.getName()); @@ -113,24 +118,26 @@ public static String validateResults(PerformanceProfile performanceProfile, Upda MetricResults metricResults = metric.getMetricResult(); Map aggrInfoClassAsMap; if (!perfProfileAggrFunctions.isEmpty()) { - try { - aggrInfoClassAsMap = convertObjectToMap(metricResults.getAggregationInfoResult()); - errorMsg = validateAggFunction(aggrInfoClassAsMap, perfProfileAggrFunctions); - if (!errorMsg.isBlank()) { - errorMsg = errorMsg.concat(String.format(" for the experiment : %s" - , updateResultsAPIObject.getExperimentName())); - break; - } - } catch(IllegalAccessException | InvocationTargetException e){ - throw new RuntimeException(e); - } - } else{ - // check if query is also absent - if (queryList.isEmpty()) { - errorMsg = AnalyzerErrorConstants.AutotuneObjectErrors.QUERY_FUNCTION_MISSING; + try { + aggrInfoClassAsMap = convertObjectToMap(metricResults.getAggregationInfoResult()); + errorMsg = validateAggFunction(aggrInfoClassAsMap, perfProfileAggrFunctions); + if (!errorMsg.isBlank()) { + errorReasons.add(String.format( + AnalyzerErrorConstants.AutotuneObjectErrors.CONTAINER_AND_EXPERIMENT, + containerAPIObject.getContainer_name(), + updateResultsAPIObject.getExperimentName())); break; } + } catch(IllegalAccessException | InvocationTargetException e){ + throw new RuntimeException(e); + } + } else{ + // check if query is also absent + if (queryList.isEmpty()) { + errorMsg = AnalyzerErrorConstants.AutotuneObjectErrors.QUERY_FUNCTION_MISSING; + break; } + } } catch (IllegalArgumentException e) { LOGGER.error("Error occurred in metrics validation: " + errorMsg); } @@ -146,7 +153,7 @@ public static String validateResults(PerformanceProfile performanceProfile, Upda } } } - return errorMsg; + return errorReasons.toString(); } public static void addPerformanceProfile(Map performanceProfileMap, PerformanceProfile performanceProfile) { diff --git a/src/main/java/com/autotune/analyzer/services/UpdateResults.java b/src/main/java/com/autotune/analyzer/services/UpdateResults.java index f26fc0344..a8b1f44a4 100644 --- a/src/main/java/com/autotune/analyzer/services/UpdateResults.java +++ b/src/main/java/com/autotune/analyzer/services/UpdateResults.java @@ -23,11 +23,14 @@ import com.autotune.analyzer.serviceObjects.UpdateResultsAPIObject; import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.analyzer.utils.AnalyzerErrorConstants; -import com.autotune.common.data.result.ExperimentResultData; import com.autotune.operator.KruizeDeploymentInfo; +import com.autotune.utils.KruizeConstants; import com.autotune.utils.MetricsConfig; import com.google.gson.Gson; +import com.google.gson.JsonParseException; import io.micrometer.core.instrument.Timer; +import org.json.JSONArray; +import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,8 +42,11 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; +import java.text.ParseException; +import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; +import java.util.Date; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; @@ -74,9 +80,13 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) // Set the character encoding of the request to UTF-8 request.setCharacterEncoding(CHARACTER_ENCODING); inputData = request.getReader().lines().collect(Collectors.joining()); + List updateResultsAPIObjects = new ArrayList<>(); LOGGER.debug("updateResults API request payload for requestID {} is {}", calCount, inputData); - List experimentResultDataList = new ArrayList<>(); - List updateResultsAPIObjects = Arrays.asList(new Gson().fromJson(inputData, UpdateResultsAPIObject[].class)); + try { + updateResultsAPIObjects = Arrays.asList(new Gson().fromJson(inputData, UpdateResultsAPIObject[].class)); + } catch (Exception e) { + LOGGER.debug("Parsing errors found: {}", e.getMessage()); + } LOGGER.debug("updateResults API request payload for requestID {} bulk count is {}", calCount, updateResultsAPIObjects.size()); // check for bulk entries and respond accordingly if (updateResultsAPIObjects.size() > KruizeDeploymentInfo.bulk_update_results_limit) { diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java index b75f8643c..9f396800e 100644 --- a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java +++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java @@ -91,6 +91,8 @@ public static final class AutotuneObjectErrors { public static final String BLANK_AGGREGATION_INFO_VALUE = " cannot be negative or blank for the metric variable: "; public static final String UNSUPPORTED_FORMAT = " Format value should be among these values: ".concat(KruizeSupportedTypes.SUPPORTED_FORMATS.toString()); public static final String UNSUPPORTED_METRIC = "Metric variable name should be among these values: ".concat(Arrays.toString(AnalyzerConstants.MetricName.values())); + public static final String MISSING_EXPERIMENT = "Experiment name is missing or null!"; + public static final String CONTAINER_AND_EXPERIMENT = " for container : %s for experiment: %s."; private AutotuneObjectErrors() { diff --git a/tests/scripts/remote_monitoring_tests/helpers/utils.py b/tests/scripts/remote_monitoring_tests/helpers/utils.py index aad97c4a4..61e244d57 100644 --- a/tests/scripts/remote_monitoring_tests/helpers/utils.py +++ b/tests/scripts/remote_monitoring_tests/helpers/utils.py @@ -189,8 +189,6 @@ def generate_test_data(csvfile, test_data): test_name = t + "_" + key status_code = 400 - if test_name == "invalid_experiment_name" or test_name == "invalid_cluster_name": - status_code = 201 data.append(test_name) data.append(status_code) From 31ff3d8202adc076344b0a3b1806c11096fcd0c8 Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Mon, 27 Nov 2023 13:46:14 +0530 Subject: [PATCH 2/6] updates added for more validation Signed-off-by: Saad Khan --- .../utils/PerformanceProfileUtil.java | 4 +- .../KubernetesElementsValidator.java | 75 ++++++++++++++++++- .../PerformanceProfileValidator.java | 7 +- .../rest_apis/test_update_results.py | 14 ++-- 4 files changed, 86 insertions(+), 14 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java b/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java index 8138f0a11..0b3207480 100644 --- a/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java +++ b/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java @@ -66,7 +66,7 @@ public static ValidationOutputData validateAndAddProfile(Map validateResults(PerformanceProfile performanceProfile, UpdateResultsAPIObject updateResultsAPIObject) { List errorReasons = new ArrayList<>(); String errorMsg = ""; @@ -153,7 +153,7 @@ public static String validateResults(PerformanceProfile performanceProfile, Upda } } } - return errorReasons.toString(); + return errorReasons; } public static void addPerformanceProfile(Map performanceProfileMap, PerformanceProfile performanceProfile) { diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java index af0d49f1f..ea35a771b 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java @@ -16,12 +16,12 @@ package com.autotune.analyzer.serviceObjects.verification.validators; import com.autotune.analyzer.kruizeObject.KruizeObject; -import com.autotune.analyzer.performanceProfiles.PerformanceProfile; import com.autotune.analyzer.serviceObjects.Converters; import com.autotune.analyzer.serviceObjects.UpdateResultsAPIObject; import com.autotune.analyzer.serviceObjects.verification.annotators.KubernetesElementsCheck; -import com.autotune.analyzer.services.UpdateResults; +import com.autotune.common.data.result.ContainerData; import com.autotune.common.data.result.ExperimentResultData; +import com.autotune.common.k8sObjects.K8sObject; import jakarta.validation.ConstraintValidator; import jakarta.validation.ConstraintValidatorContext; import org.slf4j.Logger; @@ -29,6 +29,8 @@ import java.io.PrintWriter; import java.io.StringWriter; +import java.util.stream.Collectors; +import java.util.stream.IntStream; public class KubernetesElementsValidator implements ConstraintValidator { private static final Logger LOGGER = LoggerFactory.getLogger(KubernetesElementsValidator.class); @@ -45,7 +47,6 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint String errorMessage = ""; try { KruizeObject kruizeObject = updateResultsAPIObject.getKruizeObject(); - PerformanceProfile performanceProfile = UpdateResults.performanceProfilesMap.get(kruizeObject.getPerformanceProfile()); ExperimentResultData resultData = Converters.KruizeObjectConverters.convertUpdateResultsAPIObjToExperimentResultData(updateResultsAPIObject); String expName = kruizeObject.getExperimentName(); String errorMsg = ""; @@ -96,6 +97,55 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint )); } + // Validate container names + if(kruizeObject.getKubernetes_objects().size() == resultData.getKubernetes_objects().size() && + IntStream.range(0, kruizeObject.getKubernetes_objects().size()) + .allMatch(i -> { + K8sObject kruizeK8sObject = kruizeObject.getKubernetes_objects().get(i); + K8sObject resultDataK8sObject = resultData.getKubernetes_objects().get(i); + return kruizeK8sObject.getName().equals(resultDataK8sObject.getName()) && compareContainerNames(kruizeK8sObject, resultDataK8sObject); + })) { + kubeObjsMisMatch = true; + errorMsg = errorMsg.concat( + String.format( + "Kubernetes Object Container names MisMatched. Expected names: %s, Found: %s in Results for experiment: %s \n", + kruizeObject.getKubernetes_objects().stream() + .flatMap(k8sObject -> k8sObject.getContainerDataMap().values().stream()) + .map(ContainerData::getContainer_name) + .collect(Collectors.toList()), + resultData.getKubernetes_objects().stream() + .flatMap(k8sObject -> k8sObject.getContainerDataMap().values().stream()) + .map(ContainerData::getContainer_name) + .collect(Collectors.toList()), + expName + )); + } + + // Validate container image names + if (kruizeObject.getKubernetes_objects().size() == resultData.getKubernetes_objects().size() && + IntStream.range(0, kruizeObject.getKubernetes_objects().size()) + .allMatch(i -> { + K8sObject kruizeK8sObject = kruizeObject.getKubernetes_objects().get(i); + K8sObject resultDataK8sObject = resultData.getKubernetes_objects().get(i); + return kruizeK8sObject.getName().equals(resultDataK8sObject.getName()) && compareContainerImageNames(kruizeK8sObject, resultDataK8sObject); + })) { + kubeObjsMisMatch = true; + errorMsg = errorMsg.concat( + String.format( + "Kubernetes Object Container names MisMatched. Expected names: %s, Found: %s in Results for experiment: %s \n", + kruizeObject.getKubernetes_objects().stream() + .flatMap(k8sObject -> k8sObject.getContainerDataMap().values().stream()) + .map(ContainerData::getContainer_image_name) + .collect(Collectors.toList()), + resultData.getKubernetes_objects().stream() + .flatMap(k8sObject -> k8sObject.getContainerDataMap().values().stream()) + .map(ContainerData::getContainer_image_name) + .collect(Collectors.toList()), + expName + )); + } + + if (kubeObjsMisMatch) { context.disableDefaultConstraintViolation(); context.buildConstraintViolationWithTemplate(errorMsg) @@ -126,4 +176,23 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint LOGGER.debug("KubernetesElementsValidator success : {}", success); return success; } + + private boolean compareContainerImageNames(K8sObject kruizeK8sObject, K8sObject resultDataK8sObject) { + return kruizeK8sObject.getContainerDataMap().size() == resultDataK8sObject.getContainerDataMap().size() && + kruizeK8sObject.getContainerDataMap().values().stream() + .allMatch(containerData -> + resultDataK8sObject.getContainerDataMap().values().stream() + .anyMatch(data -> + data.getContainer_image_name().equals(containerData.getContainer_image_name()) + ) + ); + } + + private boolean compareContainerNames(K8sObject kruizeK8sObject, K8sObject resultDataK8sObject) { + return kruizeK8sObject.getContainerDataMap().size() == resultDataK8sObject.getContainerDataMap().size() && + kruizeK8sObject.getContainerDataMap().keySet().stream() + .allMatch(containerData -> + resultDataK8sObject.getContainerDataMap().containsKey(containerData) + ); + } } diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/PerformanceProfileValidator.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/PerformanceProfileValidator.java index 3ba21a29d..abb80e1ae 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/PerformanceProfileValidator.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/PerformanceProfileValidator.java @@ -29,6 +29,7 @@ import java.io.PrintWriter; import java.io.StringWriter; +import java.util.List; import java.util.concurrent.ConcurrentHashMap; import static com.autotune.analyzer.utils.AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_PERF_PROFILE; @@ -63,12 +64,12 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint } // validate the results value present in the updateResultsAPIObject - String errorMsg = PerformanceProfileUtil.validateResults(performanceProfile, updateResultsAPIObject); - if (null == errorMsg || errorMsg.isEmpty()) { + List errorMsg = PerformanceProfileUtil.validateResults(performanceProfile, updateResultsAPIObject); + if (errorMsg.isEmpty()) { success = true; } else { context.disableDefaultConstraintViolation(); - context.buildConstraintViolationWithTemplate(errorMsg) + context.buildConstraintViolationWithTemplate(errorMsg.toString()) .addPropertyNode("Performance profile") .addConstraintViolation(); } diff --git a/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py b/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py index e2692534e..d8023351e 100644 --- a/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py +++ b/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py @@ -17,11 +17,11 @@ ] missing_metrics = [ - ("Missing_metrics_single_res_single_container", "../json_files/missing_metrics_jsons/update_results_missing_metrics_single_container.json", "Out of a total of 1 records, 1 failed to save", "kubernetes_objects : Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), - ("Missing_metrics_single_res_all_containers", "../json_files/missing_metrics_jsons/update_results_missing_metrics_all_containers.json", "Out of a total of 1 records, 1 failed to save", "kubernetes_objects : Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), - ("Missing_metrics_bulk_res_single_container", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_single_container.json", "Out of a total of 100 records, 1 failed to save", "kubernetes_objects : Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), - ("Missing_metrics_bulk_res_few_containers", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_few_containers.json", "Out of a total of 100 records, 2 failed to save", "kubernetes_objects : Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), - ("Missing_metrics_bulk_res_few_containers_few_individual_metrics_missing", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_few_containers_few_individual_metrics_missing.json", "Out of a total of 100 records, 4 failed to save", "Metric data is not present for container") + ("Missing_metrics_single_res_single_container", "../json_files/missing_metrics_jsons/update_results_missing_metrics_single_container.json", "Out of a total of 1 records, 1 failed to save", "Performance profile: [Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), + ("Missing_metrics_single_res_all_containers", "../json_files/missing_metrics_jsons/update_results_missing_metrics_all_containers.json", "Out of a total of 1 records, 1 failed to save", "Performance profile: [Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. , Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), + ("Missing_metrics_bulk_res_single_container", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_single_container.json", "Out of a total of 100 records, 1 failed to save", "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), + ("Missing_metrics_bulk_res_few_containers", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_few_containers.json", "Out of a total of 100 records, 2 failed to save", "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), + ("Missing_metrics_bulk_res_few_containers_few_individual_metrics_missing", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_few_containers_few_individual_metrics_missing.json", "Out of a total of 100 records, 2 failed to save", "Metric data is not present for container") ] @@ -180,7 +180,9 @@ def test_update_results_with_missing_metrics_section(test_name, result_json_file for err in error_data: actual_error_message = err["message"] if test_name == "Missing_metrics_bulk_res_few_containers" and d["interval_end_time"] == "2023-04-13T23:29:20.982Z": - error_message = "kubernetes_objects : Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db" + error_message = "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. ] , " \ + "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. , " \ + "Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db" if test_name == "Missing_metrics_bulk_res_few_containers_few_individual_metrics_missing" and d["interval_end_time"] == "2023-04-13T23:44:20.982Z" or d["interval_end_time"] == "2023-04-13T23:59:20.982Z": error_message = "Performance profile: Missing one of the following mandatory parameters for experiment - quarkus-resteasy-kruize-min-http-response-time-db : [cpuUsage, memoryUsage, memoryRSS]" assert error_message in actual_error_message From 39d972fc08ebfdace338ae5cba6ab53233ed856c Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Fri, 1 Dec 2023 15:11:22 +0530 Subject: [PATCH 3/6] updates wrt to the container and container image validations, bug fixes Signed-off-by: Saad Khan --- .../utils/PerformanceProfileUtil.java | 15 ++-- .../KubernetesElementsValidator.java | 84 +++++-------------- .../analyzer/services/UpdateResults.java | 16 +--- 3 files changed, 31 insertions(+), 84 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java b/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java index 0b3207480..b37dffb3f 100644 --- a/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java +++ b/src/main/java/com/autotune/analyzer/performanceProfiles/utils/PerformanceProfileUtil.java @@ -107,10 +107,10 @@ public static List validateResults(PerformanceProfile performanceProfile // validate the metric values errorMsg = PerformanceProfileUtil.validateMetricsValues(metric.getName(), metric.getMetricResult()); if (!errorMsg.isBlank()) { - errorReasons.add(String.format( + errorReasons.add(errorMsg.concat(String.format( AnalyzerErrorConstants.AutotuneObjectErrors.CONTAINER_AND_EXPERIMENT, containerAPIObject.getContainer_name(), - updateResultsAPIObject.getExperimentName())); + updateResultsAPIObject.getExperimentName()))); break; } AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metric.getName()); @@ -122,10 +122,10 @@ public static List validateResults(PerformanceProfile performanceProfile aggrInfoClassAsMap = convertObjectToMap(metricResults.getAggregationInfoResult()); errorMsg = validateAggFunction(aggrInfoClassAsMap, perfProfileAggrFunctions); if (!errorMsg.isBlank()) { - errorReasons.add(String.format( + errorReasons.add(errorMsg.concat(String.format( AnalyzerErrorConstants.AutotuneObjectErrors.CONTAINER_AND_EXPERIMENT, containerAPIObject.getContainer_name(), - updateResultsAPIObject.getExperimentName())); + updateResultsAPIObject.getExperimentName()))); break; } } catch(IllegalAccessException | InvocationTargetException e){ @@ -134,7 +134,7 @@ public static List validateResults(PerformanceProfile performanceProfile } else{ // check if query is also absent if (queryList.isEmpty()) { - errorMsg = AnalyzerErrorConstants.AutotuneObjectErrors.QUERY_FUNCTION_MISSING; + errorReasons.add(AnalyzerErrorConstants.AutotuneObjectErrors.QUERY_FUNCTION_MISSING); break; } } @@ -142,13 +142,14 @@ public static List validateResults(PerformanceProfile performanceProfile LOGGER.error("Error occurred in metrics validation: " + errorMsg); } } - if (!errorMsg.isBlank()) + if (!errorReasons.isEmpty()) break; LOGGER.debug("perfProfileFunctionVariablesList: {}", perfProfileFunctionVariablesList); LOGGER.debug("kruizeFunctionVariablesList: {}", kruizeFunctionVariablesList); if (!new HashSet<>(kruizeFunctionVariablesList).containsAll(mandatoryFields)) { - errorMsg = errorMsg.concat(String.format("Missing one of the following mandatory parameters for experiment - %s : %s", updateResultsAPIObject.getExperimentName(), mandatoryFields)); + errorReasons.add(errorMsg.concat(String.format("Missing one of the following mandatory parameters for experiment - %s : %s", + updateResultsAPIObject.getExperimentName(), mandatoryFields))); break; } } diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java index ea35a771b..5a25d36c7 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java @@ -29,6 +29,7 @@ import java.io.PrintWriter; import java.io.StringWriter; +import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -97,55 +98,29 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint )); } - // Validate container names - if(kruizeObject.getKubernetes_objects().size() == resultData.getKubernetes_objects().size() && - IntStream.range(0, kruizeObject.getKubernetes_objects().size()) - .allMatch(i -> { - K8sObject kruizeK8sObject = kruizeObject.getKubernetes_objects().get(i); - K8sObject resultDataK8sObject = resultData.getKubernetes_objects().get(i); - return kruizeK8sObject.getName().equals(resultDataK8sObject.getName()) && compareContainerNames(kruizeK8sObject, resultDataK8sObject); - })) { + // Validate container names and image names + List kruizeContainers = kruizeObject.getKubernetes_objects().get(0).getContainerDataMap().values().stream().toList(); + List resultContainers = resultData.getKubernetes_objects().get(0).getContainerDataMap().values().stream().toList(); + if (kruizeContainers.size() != resultContainers.size()) { kubeObjsMisMatch = true; errorMsg = errorMsg.concat( - String.format( - "Kubernetes Object Container names MisMatched. Expected names: %s, Found: %s in Results for experiment: %s \n", - kruizeObject.getKubernetes_objects().stream() - .flatMap(k8sObject -> k8sObject.getContainerDataMap().values().stream()) - .map(ContainerData::getContainer_name) - .collect(Collectors.toList()), - resultData.getKubernetes_objects().stream() - .flatMap(k8sObject -> k8sObject.getContainerDataMap().values().stream()) - .map(ContainerData::getContainer_name) - .collect(Collectors.toList()), - expName - )); - } - - // Validate container image names - if (kruizeObject.getKubernetes_objects().size() == resultData.getKubernetes_objects().size() && - IntStream.range(0, kruizeObject.getKubernetes_objects().size()) - .allMatch(i -> { - K8sObject kruizeK8sObject = kruizeObject.getKubernetes_objects().get(i); - K8sObject resultDataK8sObject = resultData.getKubernetes_objects().get(i); - return kruizeK8sObject.getName().equals(resultDataK8sObject.getName()) && compareContainerImageNames(kruizeK8sObject, resultDataK8sObject); - })) { - kubeObjsMisMatch = true; - errorMsg = errorMsg.concat( - String.format( - "Kubernetes Object Container names MisMatched. Expected names: %s, Found: %s in Results for experiment: %s \n", - kruizeObject.getKubernetes_objects().stream() - .flatMap(k8sObject -> k8sObject.getContainerDataMap().values().stream()) - .map(ContainerData::getContainer_image_name) - .collect(Collectors.toList()), - resultData.getKubernetes_objects().stream() - .flatMap(k8sObject -> k8sObject.getContainerDataMap().values().stream()) - .map(ContainerData::getContainer_image_name) - .collect(Collectors.toList()), - expName - )); + String.format("Containers count MisMatched. Expected: %s, Found: %s in Results for experiment: %s \n", + kruizeContainers.size(), resultContainers.size(), expName)); + } else { + for (int i = 0; i < kruizeContainers.size(); i++) { + ContainerData kruizeContainer = kruizeContainers.get(i); + ContainerData resultContainer = resultContainers.get(i); + if (!kruizeContainer.getContainer_name().equals(resultContainer.getContainer_name()) + || !kruizeContainer.getContainer_image_name().equals(resultContainer.getContainer_image_name())) { + kubeObjsMisMatch = true; + errorMsg = errorMsg.concat( + String.format("Container names or image names MisMatched. Expected: %s - %s, Found: %s - %s in Results for experiment: %s \n", + kruizeContainer.getContainer_name(), kruizeContainer.getContainer_image_name(), + resultContainer.getContainer_name(), resultContainer.getContainer_image_name(), expName)); + } + } } - if (kubeObjsMisMatch) { context.disableDefaultConstraintViolation(); context.buildConstraintViolationWithTemplate(errorMsg) @@ -176,23 +151,4 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint LOGGER.debug("KubernetesElementsValidator success : {}", success); return success; } - - private boolean compareContainerImageNames(K8sObject kruizeK8sObject, K8sObject resultDataK8sObject) { - return kruizeK8sObject.getContainerDataMap().size() == resultDataK8sObject.getContainerDataMap().size() && - kruizeK8sObject.getContainerDataMap().values().stream() - .allMatch(containerData -> - resultDataK8sObject.getContainerDataMap().values().stream() - .anyMatch(data -> - data.getContainer_image_name().equals(containerData.getContainer_image_name()) - ) - ); - } - - private boolean compareContainerNames(K8sObject kruizeK8sObject, K8sObject resultDataK8sObject) { - return kruizeK8sObject.getContainerDataMap().size() == resultDataK8sObject.getContainerDataMap().size() && - kruizeK8sObject.getContainerDataMap().keySet().stream() - .allMatch(containerData -> - resultDataK8sObject.getContainerDataMap().containsKey(containerData) - ); - } } diff --git a/src/main/java/com/autotune/analyzer/services/UpdateResults.java b/src/main/java/com/autotune/analyzer/services/UpdateResults.java index a8b1f44a4..f26fc0344 100644 --- a/src/main/java/com/autotune/analyzer/services/UpdateResults.java +++ b/src/main/java/com/autotune/analyzer/services/UpdateResults.java @@ -23,14 +23,11 @@ import com.autotune.analyzer.serviceObjects.UpdateResultsAPIObject; import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.analyzer.utils.AnalyzerErrorConstants; +import com.autotune.common.data.result.ExperimentResultData; import com.autotune.operator.KruizeDeploymentInfo; -import com.autotune.utils.KruizeConstants; import com.autotune.utils.MetricsConfig; import com.google.gson.Gson; -import com.google.gson.JsonParseException; import io.micrometer.core.instrument.Timer; -import org.json.JSONArray; -import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,11 +39,8 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; -import java.text.ParseException; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; -import java.util.Date; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; @@ -80,13 +74,9 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) // Set the character encoding of the request to UTF-8 request.setCharacterEncoding(CHARACTER_ENCODING); inputData = request.getReader().lines().collect(Collectors.joining()); - List updateResultsAPIObjects = new ArrayList<>(); LOGGER.debug("updateResults API request payload for requestID {} is {}", calCount, inputData); - try { - updateResultsAPIObjects = Arrays.asList(new Gson().fromJson(inputData, UpdateResultsAPIObject[].class)); - } catch (Exception e) { - LOGGER.debug("Parsing errors found: {}", e.getMessage()); - } + List experimentResultDataList = new ArrayList<>(); + List updateResultsAPIObjects = Arrays.asList(new Gson().fromJson(inputData, UpdateResultsAPIObject[].class)); LOGGER.debug("updateResults API request payload for requestID {} bulk count is {}", calCount, updateResultsAPIObjects.size()); // check for bulk entries and respond accordingly if (updateResultsAPIObjects.size() > KruizeDeploymentInfo.bulk_update_results_limit) { From 86a471149df2f983da039b696c2ccd7b8ad8d22e Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Tue, 5 Dec 2023 13:15:29 +0530 Subject: [PATCH 4/6] update parsing failure validations Signed-off-by: Saad Khan --- .../experiment/ExperimentInitiator.java | 23 ++++++++++++ .../analyzer/services/UpdateResults.java | 36 ++++++++++++++++--- .../utils/AnalyzerErrorConstants.java | 3 ++ .../rest_apis/test_update_results.py | 4 +-- 4 files changed, 60 insertions(+), 6 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java index dadc3a12c..9e70de7bc 100644 --- a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java +++ b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java @@ -162,6 +162,15 @@ public void validateAndAddExperimentResults(List updateR } } if (mainKruizeExperimentMAP.containsKey(experimentName)) { + + // check version + String errorMsg = checkVersion(object, mainKruizeExperimentMAP); + if (errorMsg != null) { + errorReasons.add(errorMsg); + object.setErrors(getErrorMap(errorReasons)); + failedUpdateResultsAPIObjects.add(object); + continue; + } object.setKruizeObject(mainKruizeExperimentMAP.get(object.getExperimentName())); Set> violations = new HashSet<>(); try { @@ -206,6 +215,20 @@ public void validateAndAddExperimentResults(List updateR } } + private String checkVersion(UpdateResultsAPIObject object, Map mainKruizeExperimentMAP) { + try { + KruizeObject kruizeObject = mainKruizeExperimentMAP.get(object.getExperimentName()); + if (!object.getApiVersion().equals(kruizeObject.getApiVersion())) { + return String.format(AnalyzerErrorConstants.AutotuneObjectErrors.VERSION_MISMATCH, + kruizeObject.getApiVersion(), object.getApiVersion()); + } + } catch (Exception e) { + LOGGER.error("Exception occurred while checking version: {}", e.getMessage()); + return null; + } + return null; + } + public String getSerializedName(String fieldName, Class targetClass) { Class currentClass = targetClass; while (currentClass != null) { diff --git a/src/main/java/com/autotune/analyzer/services/UpdateResults.java b/src/main/java/com/autotune/analyzer/services/UpdateResults.java index f26fc0344..33390da8a 100644 --- a/src/main/java/com/autotune/analyzer/services/UpdateResults.java +++ b/src/main/java/com/autotune/analyzer/services/UpdateResults.java @@ -23,10 +23,9 @@ import com.autotune.analyzer.serviceObjects.UpdateResultsAPIObject; import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.analyzer.utils.AnalyzerErrorConstants; -import com.autotune.common.data.result.ExperimentResultData; import com.autotune.operator.KruizeDeploymentInfo; import com.autotune.utils.MetricsConfig; -import com.google.gson.Gson; +import com.google.gson.*; import io.micrometer.core.instrument.Timer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,6 +38,7 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; +import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -74,9 +74,23 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) // Set the character encoding of the request to UTF-8 request.setCharacterEncoding(CHARACTER_ENCODING); inputData = request.getReader().lines().collect(Collectors.joining()); + List updateResultsAPIObjects; + Gson gson = new GsonBuilder() + .registerTypeAdapter(Double.class, new CustomNumberDeserializer()) + .registerTypeAdapter(Integer.class, new CustomNumberDeserializer()) + .create(); LOGGER.debug("updateResults API request payload for requestID {} is {}", calCount, inputData); - List experimentResultDataList = new ArrayList<>(); - List updateResultsAPIObjects = Arrays.asList(new Gson().fromJson(inputData, UpdateResultsAPIObject[].class)); + try { + updateResultsAPIObjects = Arrays.asList(gson.fromJson(inputData, UpdateResultsAPIObject[].class)); + } catch (JsonParseException e) { + LOGGER.error("{} : {}", AnalyzerErrorConstants.AutotuneObjectErrors.JSON_PARSING_ERROR, e.getMessage()); + sendErrorResponse(inputData, request, response, null, HttpServletResponse.SC_BAD_REQUEST, AnalyzerErrorConstants.AutotuneObjectErrors.JSON_PARSING_ERROR); + return; + } catch (NumberFormatException e) { + LOGGER.error("{}", e.getMessage()); + sendErrorResponse(inputData, request, response, null, HttpServletResponse.SC_BAD_REQUEST, e.getMessage()); + return; + } LOGGER.debug("updateResults API request payload for requestID {} bulk count is {}", calCount, updateResultsAPIObjects.size()); // check for bulk entries and respond accordingly if (updateResultsAPIObjects.size() > KruizeDeploymentInfo.bulk_update_results_limit) { @@ -151,4 +165,18 @@ public void sendErrorResponse(String inputPayload, HttpServletRequest request, H LOGGER.debug("UpdateRequestsAPI input pay load {} ", inputPayload); response.sendError(httpStatusCode, errorMsg); } + + public static class CustomNumberDeserializer implements JsonDeserializer { + + @Override + public Number deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws NumberFormatException { + String value = json.getAsString().trim(); + if (value.isEmpty()) { + throw new NumberFormatException(AnalyzerErrorConstants.AutotuneObjectErrors.AGGREGATION_INFO_INVALID_VALUE); + } else { + return Double.parseDouble(value); + } + } + } + } diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java index 9f396800e..816a72f34 100644 --- a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java +++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java @@ -93,6 +93,9 @@ public static final class AutotuneObjectErrors { public static final String UNSUPPORTED_METRIC = "Metric variable name should be among these values: ".concat(Arrays.toString(AnalyzerConstants.MetricName.values())); public static final String MISSING_EXPERIMENT = "Experiment name is missing or null!"; public static final String CONTAINER_AND_EXPERIMENT = " for container : %s for experiment: %s."; + public static final String JSON_PARSING_ERROR = "Failed to parse the JSON. Please check the input payload "; + public static final String AGGREGATION_INFO_INVALID_VALUE = "Invalid value type for aggregation_info objects. Expected a numeric value (Double)."; + public static final String VERSION_MISMATCH = "Version number mismatch found. Expected: %s , Found: %s"; private AutotuneObjectErrors() { diff --git a/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py b/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py index d8023351e..33a5f214e 100644 --- a/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py +++ b/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py @@ -21,7 +21,7 @@ ("Missing_metrics_single_res_all_containers", "../json_files/missing_metrics_jsons/update_results_missing_metrics_all_containers.json", "Out of a total of 1 records, 1 failed to save", "Performance profile: [Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. , Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), ("Missing_metrics_bulk_res_single_container", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_single_container.json", "Out of a total of 100 records, 1 failed to save", "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), ("Missing_metrics_bulk_res_few_containers", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_few_containers.json", "Out of a total of 100 records, 2 failed to save", "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db"), - ("Missing_metrics_bulk_res_few_containers_few_individual_metrics_missing", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_few_containers_few_individual_metrics_missing.json", "Out of a total of 100 records, 2 failed to save", "Metric data is not present for container") + ("Missing_metrics_bulk_res_few_containers_few_individual_metrics_missing", "../json_files/missing_metrics_jsons/bulk_update_results_missing_metrics_few_containers_few_individual_metrics_missing.json", "Out of a total of 100 records, 4 failed to save", "Metric data is not present for container") ] @@ -184,7 +184,7 @@ def test_update_results_with_missing_metrics_section(test_name, result_json_file "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. , " \ "Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db" if test_name == "Missing_metrics_bulk_res_few_containers_few_individual_metrics_missing" and d["interval_end_time"] == "2023-04-13T23:44:20.982Z" or d["interval_end_time"] == "2023-04-13T23:59:20.982Z": - error_message = "Performance profile: Missing one of the following mandatory parameters for experiment - quarkus-resteasy-kruize-min-http-response-time-db : [cpuUsage, memoryUsage, memoryRSS]" + error_message = "Performance profile: [Missing one of the following mandatory parameters for experiment - quarkus-resteasy-kruize-min-http-response-time-db : [cpuUsage, memoryUsage, memoryRSS]]" assert error_message in actual_error_message response = delete_experiment(input_json_file) From d9ae0443b39fe1a5855156bf4fa2d32fcc038622 Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Mon, 8 Jan 2024 16:51:07 +0530 Subject: [PATCH 5/6] fix failures and changes as per review Signed-off-by: Saad Khan --- .../analyzer/experiment/ExperimentInitiator.java | 2 +- .../remote_monitoring_tests/helpers/utils.py | 11 +++++++---- .../rest_apis/test_create_experiment.py | 6 +++--- .../rest_apis/test_list_recommendations.py | 15 ++++++++------- .../rest_apis/test_update_recommendations.py | 4 ++-- .../rest_apis/test_update_results.py | 6 ++---- 6 files changed, 23 insertions(+), 21 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java index 9e70de7bc..25ff49f44 100644 --- a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java +++ b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java @@ -149,7 +149,7 @@ public void validateAndAddExperimentResults(List updateR for (UpdateResultsAPIObject object : updateResultsAPIObjects) { String experimentName = object.getExperimentName(); if (experimentName == null) { - errorReasons.add(String.format("%s", MISSING_EXPERIMENT)); + errorReasons.add(String.format("%s%s", MISSING_EXPERIMENT_NAME, null)); object.setErrors(getErrorMap(errorReasons)); failedUpdateResultsAPIObjects.add(object); continue; diff --git a/tests/scripts/remote_monitoring_tests/helpers/utils.py b/tests/scripts/remote_monitoring_tests/helpers/utils.py index 61e244d57..0e333ed3c 100644 --- a/tests/scripts/remote_monitoring_tests/helpers/utils.py +++ b/tests/scripts/remote_monitoring_tests/helpers/utils.py @@ -30,19 +30,20 @@ ERROR_STATUS = "ERROR" UPDATE_RESULTS_SUCCESS_MSG = "Results added successfully! View saved results at /listExperiments." UPDATE_RESULTS_DATE_PRECEDE_ERROR_MSG = "The Start time should precede the End time!" -UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG = "Performance profile: avg cannot be negative or blank for the metric variable: " -UPDATE_RESULTS_INVALID_METRIC_FORMAT_ERROR_MSG = "Performance profile: Format value should be among these values: [cores, MiB]" +UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG = "Performance profile: [avg cannot be negative or blank for the metric variable: " +UPDATE_RESULTS_INVALID_METRIC_FORMAT_ERROR_MSG = "Performance profile: [ Format value should be among these values: [GiB, Gi, Ei, KiB, E, MiB, G, PiB, K, TiB, M, P, Bytes, cores, T, Ti, MB, KB, Pi, GB, EB, k, m, TB, PB, bytes, kB, Mi, Ki, EiB]" CREATE_EXP_SUCCESS_MSG = "Experiment registered successfully with Kruize. View registered experiments at /listExperiments" CREATE_EXP_BULK_ERROR_MSG = "At present, the system does not support bulk entries!" UPDATE_RECOMMENDATIONS_MANDATORY_DEFAULT_MESSAGE = 'experiment_name is mandatory' UPDATE_RECOMMENDATIONS_MANDATORY_INTERVAL_END_DATE = 'interval_end_time is mandatory' -UPDATE_RECOMMENDATIONS_DATA_NOT_FOUND = 'Data not found!' +UPDATE_RECOMMENDATIONS_EXPERIMENT_NOT_FOUND = 'Not Found: experiment_name does not exist: ' UPDATE_RECOMMENDATIONS_START_TIME_PRECEDE_END_TIME = 'The Start time should precede the End time!' UPDATE_RECOMMENDATIONS_START_TIME_END_TIME_GAP_ERROR = 'The gap between the interval_start_time and interval_end_time must be within a maximum of 15 days!' UPDATE_RECOMMENDATIONS_INVALID_DATE_TIME_FORMAT = "Given timestamp - \" %s \" is not a valid timestamp format" RECOMMENDATIONS_AVAILABLE = "Recommendations Are Available" COST_RECOMMENDATIONS_AVAILABLE = "Cost Recommendations Available" PERFORMANCE_RECOMMENDATIONS_AVAILABLE = "Performance Recommendations Available" +CONTAINER_AND_EXPERIMENT_NAME = " for container : %s for experiment: %s.]" # Kruize Recommendations Notification codes NOTIFICATION_CODE_FOR_RECOMMENDATIONS_AVAILABLE = "111000" @@ -177,7 +178,7 @@ test_type = {"blank": "", "null": "null", "invalid": "xyz"} -def generate_test_data(csvfile, test_data): +def generate_test_data(csvfile, test_data, api_name): if os.path.isfile(csvfile): os.remove(csvfile) with open(csvfile, 'a') as f: @@ -189,6 +190,8 @@ def generate_test_data(csvfile, test_data): test_name = t + "_" + key status_code = 400 + if api_name == "create_exp" and (test_name == "invalid_experiment_name" or test_name == "invalid_cluster_name"): + status_code = 201 data.append(test_name) data.append(status_code) diff --git a/tests/scripts/remote_monitoring_tests/rest_apis/test_create_experiment.py b/tests/scripts/remote_monitoring_tests/rest_apis/test_create_experiment.py index 5df144c5f..e41df7f8b 100644 --- a/tests/scripts/remote_monitoring_tests/rest_apis/test_create_experiment.py +++ b/tests/scripts/remote_monitoring_tests/rest_apis/test_create_experiment.py @@ -33,7 +33,7 @@ @pytest.mark.negative @pytest.mark.parametrize( "test_name, expected_status_code, version, experiment_name, cluster_name, performance_profile, mode, target_cluster, kubernetes_obj_type, name, namespace, container_image_name, container_name, measurement_duration, threshold", - generate_test_data(csvfile, create_exp_test_data)) + generate_test_data(csvfile, create_exp_test_data, "create_exp")) def test_create_exp_invalid_tests(test_name, expected_status_code, version, experiment_name, cluster_name, performance_profile, mode, target_cluster, kubernetes_obj_type, name, namespace, container_image_name, container_name, measurement_duration, threshold, cluster_type): @@ -322,7 +322,7 @@ def test_create_exp_with_both_performance_profile_slo(cluster_type): print("delete exp = ", response.status_code) -@pytest.mark.sanity +@pytest.mark.negative def test_create_exp_with_both_deployment_name_selector(cluster_type): """ Test Description: This test validates the creation of an experiment by specifying both deployment name & selector @@ -347,7 +347,7 @@ def test_create_exp_with_both_deployment_name_selector(cluster_type): print("delete exp = ", response.status_code) -@pytest.mark.sanity +@pytest.mark.negative def test_create_exp_with_invalid_header(cluster_type): """ Test Description: This test validates the creation of an experiment by specifying invalid content type in the header diff --git a/tests/scripts/remote_monitoring_tests/rest_apis/test_list_recommendations.py b/tests/scripts/remote_monitoring_tests/rest_apis/test_list_recommendations.py index 2db22dfd4..226d911a6 100644 --- a/tests/scripts/remote_monitoring_tests/rest_apis/test_list_recommendations.py +++ b/tests/scripts/remote_monitoring_tests/rest_apis/test_list_recommendations.py @@ -1117,17 +1117,18 @@ def test_list_recommendations_notification_codes(cluster_type: str): print("delete exp = ", response.status_code) -def validate_error_msgs(j: int, status_message): +def validate_error_msgs(j: int, status_message, cname, experiment_name): + if j == 96: - assert status_message == UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG + CPU_REQUEST + assert status_message == UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG + CPU_REQUEST + CONTAINER_AND_EXPERIMENT_NAME % (cname, experiment_name) elif j == 97: - assert status_message == UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG + MEMORY_REQUEST + assert status_message == UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG + MEMORY_REQUEST + CONTAINER_AND_EXPERIMENT_NAME % (cname, experiment_name) elif j == 98: - assert status_message == UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG + CPU_LIMIT + assert status_message == UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG + CPU_LIMIT + CONTAINER_AND_EXPERIMENT_NAME % (cname, experiment_name) elif j == 99: - assert status_message == UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG + MEMORY_LIMIT + assert status_message == UPDATE_RESULTS_INVALID_METRIC_VALUE_ERROR_MSG + MEMORY_LIMIT + CONTAINER_AND_EXPERIMENT_NAME % (cname, experiment_name) elif j > 100: - assert status_message == UPDATE_RESULTS_INVALID_METRIC_FORMAT_ERROR_MSG + assert status_message == UPDATE_RESULTS_INVALID_METRIC_FORMAT_ERROR_MSG + CONTAINER_AND_EXPERIMENT_NAME % (cname, experiment_name) @pytest.mark.negative @@ -1313,7 +1314,7 @@ def test_invalid_list_recommendations_notification_codes(cluster_type: str): if j in range(96, 104): assert response.status_code == ERROR_STATUS_CODE assert data['status'] == ERROR_STATUS - validate_error_msgs(j, data['data'][0]['errors'][0]['message']) + validate_error_msgs(j, data['data'][0]['errors'][0]['message'], container_name_to_update, experiment_name) else: assert response.status_code == SUCCESS_STATUS_CODE assert data['status'] == SUCCESS_STATUS diff --git a/tests/scripts/remote_monitoring_tests/rest_apis/test_update_recommendations.py b/tests/scripts/remote_monitoring_tests/rest_apis/test_update_recommendations.py index 3cce8c238..19debe961 100644 --- a/tests/scripts/remote_monitoring_tests/rest_apis/test_update_recommendations.py +++ b/tests/scripts/remote_monitoring_tests/rest_apis/test_update_recommendations.py @@ -305,7 +305,7 @@ def test_update_recommendations_with_unknown_experiment_name_and_end_time(cluste response = update_recommendations(experiment_name, None, end_time) data = response.json() assert response.status_code == ERROR_STATUS_CODE - assert data['message'] == UPDATE_RECOMMENDATIONS_DATA_NOT_FOUND + assert data['message'] == UPDATE_RECOMMENDATIONS_EXPERIMENT_NOT_FOUND + experiment_name @pytest.mark.negative @@ -323,7 +323,7 @@ def test_update_recommendations_with_end_time_precede_start_time(cluster_type): assert data['message'] == UPDATE_RECOMMENDATIONS_START_TIME_PRECEDE_END_TIME -@pytest.mark.negative +@pytest.mark.skip(reason="Not enabled interval_start_time yet") def test_update_recommendations_with_end_time_precede_start_time(cluster_type): ''' Update recommendation with start time and end time having difference more than 15 days. diff --git a/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py b/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py index 33a5f214e..dcb8410f0 100644 --- a/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py +++ b/tests/scripts/remote_monitoring_tests/rest_apis/test_update_results.py @@ -28,7 +28,7 @@ @pytest.mark.negative @pytest.mark.parametrize( "test_name, expected_status_code, version, experiment_name, interval_start_time, interval_end_time, kubernetes_obj_type, name, namespace, container_image_name, container_name, cpuRequest_name, cpuRequest_sum, cpuRequest_avg, cpuRequest_format, cpuLimit_name, cpuLimit_sum, cpuLimit_avg, cpuLimit_format, cpuUsage_name, cpuUsage_sum, cpuUsage_max, cpuUsage_avg, cpuUsage_min, cpuUsage_format, cpuThrottle_name, cpuThrottle_sum, cpuThrottle_max, cpuThrottle_avg, cpuThrottle_format, memoryRequest_name, memoryRequest_sum, memoryRequest_avg, memoryRequest_format, memoryLimit_name, memoryLimit_sum, memoryLimit_avg, memoryLimit_format, memoryUsage_name, memoryUsage_sum, memoryUsage_max, memoryUsage_avg, memoryUsage_min, memoryUsage_format, memoryRSS_name, memoryRSS_sum, memoryRSS_max, memoryRSS_avg, memoryRSS_min, memoryRSS_format", - generate_test_data(csvfile, update_results_test_data)) + generate_test_data(csvfile, update_results_test_data, "update_results")) def test_update_results_invalid_tests(test_name, expected_status_code, version, experiment_name, interval_start_time, interval_end_time, kubernetes_obj_type, name, namespace, container_image_name, container_name, cpuRequest_name, cpuRequest_sum, cpuRequest_avg, @@ -180,9 +180,7 @@ def test_update_results_with_missing_metrics_section(test_name, result_json_file for err in error_data: actual_error_message = err["message"] if test_name == "Missing_metrics_bulk_res_few_containers" and d["interval_end_time"] == "2023-04-13T23:29:20.982Z": - error_message = "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. ] , " \ - "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. , " \ - "Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db" + error_message = "Performance profile: [Metric data is not present for container : tfb-server-1 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. , Metric data is not present for container : tfb-server-0 for experiment: quarkus-resteasy-kruize-min-http-response-time-db. ]" if test_name == "Missing_metrics_bulk_res_few_containers_few_individual_metrics_missing" and d["interval_end_time"] == "2023-04-13T23:44:20.982Z" or d["interval_end_time"] == "2023-04-13T23:59:20.982Z": error_message = "Performance profile: [Missing one of the following mandatory parameters for experiment - quarkus-resteasy-kruize-min-http-response-time-db : [cpuUsage, memoryUsage, memoryRSS]]" assert error_message in actual_error_message From 258b0bfe48a0b0d8b0a32b80933d924f9479ab0a Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Tue, 30 Jan 2024 12:09:25 +0530 Subject: [PATCH 6/6] update docs and other changes as per the review Signed-off-by: Saad Khan --- design/UpdateResults.md | 61 ++++++++++++++++++- .../experiment/ExperimentInitiator.java | 1 - .../KubernetesElementsValidator.java | 28 --------- .../utils/AnalyzerErrorConstants.java | 1 - 4 files changed, 59 insertions(+), 32 deletions(-) diff --git a/design/UpdateResults.md b/design/UpdateResults.md index 783f8bd38..567022234 100644 --- a/design/UpdateResults.md +++ b/design/UpdateResults.md @@ -31,7 +31,7 @@ tuned. * Experiment name not found. ``` { - "message": "Experiment name: not found", + "message": "Not Found: experiment_name does not exist: ", "httpcode": 400, "documentationLink": "", "status": "ERROR" @@ -40,7 +40,7 @@ tuned. * Duplicate Experiment name and Timestamp. ``` { - "message": "Experiment name : already contains result for timestamp : ", + "message": "An entry for this record already exists!", "httpcode": 409, "documentationLink": "", "status": "ERROR" @@ -54,6 +54,63 @@ tuned. "documentationLink": "", "status": "ERROR" } + ``` + * Invalid Input JSON Format. + * For example, `interval_start_time` or `interval_end_time` is not following the correct UTC format or it's blank: + ``` + { + "message": "Failed to parse the JSON. Please check the input payload ", + "httpcode": 400, + "documentationLink": "", + "status": "ERROR" + } + ``` + * Parameters Mismatch + * `version` name passed is different from what was passed while creating the corresponding experiment: + ``` + { + "message": "Version number mismatch found. Expected: , Found: ", + "httpcode": 400, + "documentationLink": "", + "status": "ERROR" + } + ``` + * `namespace` in the `kubernetes_objects` is different from what was passed while creating the corresponding experiment: + ``` + { + "message": "kubernetes_objects : Kubernetes Object Namespaces MisMatched. Expected Namespace: , Found: in Results for experiment: ", + "httpcode": 400, + "documentationLink": "", + "status": "ERROR" + } + ``` + * Similar response will be returned for other parameters when there is a mismatch. + * Invalid Metric variable names. + ``` + { + "message": "Performance profile: [Metric variable name should be among these values: [cpuRequest, cpuLimit, cpuUsage, cpuThrottle, memoryRequest, memoryLimit, memoryUsage, memoryRSS] for container : for experiment: ]", + "httpcode": 400, + "documentationLink": "", + "status": "ERROR" + } + ``` + * Invalid Aggregation_Info Format + ``` + { + "message": "Performance profile: [ Format value should be among these values: [GiB, Gi, Ei, KiB, E, MiB, G, PiB, K, TiB, M, P, Bytes, cores, T, Ti, MB, KB, Pi, GB, EB, k, m, TB, PB, bytes, kB, Mi, Ki, EiB] for container : for experiment: ]", + "httpcode": 400, + "documentationLink": "", + "status": "ERROR" + } + ``` + * Invalid Aggregation_Info Values + ``` + { + "message": "Invalid value type for aggregation_info objects. Expected a numeric value (Double).", + "httpcode": 400, + "documentationLink": "", + "status": "ERROR" + } ``` * Any unknown exception on server side ``` diff --git a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java index 25ff49f44..c63bebe61 100644 --- a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java +++ b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java @@ -40,7 +40,6 @@ import java.util.*; import java.util.concurrent.ConcurrentHashMap; -import static com.autotune.analyzer.utils.AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_EXPERIMENT; import static com.autotune.analyzer.utils.AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_EXPERIMENT_NAME; /** diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java index 5a25d36c7..7e496d93d 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java @@ -19,9 +19,7 @@ import com.autotune.analyzer.serviceObjects.Converters; import com.autotune.analyzer.serviceObjects.UpdateResultsAPIObject; import com.autotune.analyzer.serviceObjects.verification.annotators.KubernetesElementsCheck; -import com.autotune.common.data.result.ContainerData; import com.autotune.common.data.result.ExperimentResultData; -import com.autotune.common.k8sObjects.K8sObject; import jakarta.validation.ConstraintValidator; import jakarta.validation.ConstraintValidatorContext; import org.slf4j.Logger; @@ -29,9 +27,6 @@ import java.io.PrintWriter; import java.io.StringWriter; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.IntStream; public class KubernetesElementsValidator implements ConstraintValidator { private static final Logger LOGGER = LoggerFactory.getLogger(KubernetesElementsValidator.class); @@ -98,29 +93,6 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint )); } - // Validate container names and image names - List kruizeContainers = kruizeObject.getKubernetes_objects().get(0).getContainerDataMap().values().stream().toList(); - List resultContainers = resultData.getKubernetes_objects().get(0).getContainerDataMap().values().stream().toList(); - if (kruizeContainers.size() != resultContainers.size()) { - kubeObjsMisMatch = true; - errorMsg = errorMsg.concat( - String.format("Containers count MisMatched. Expected: %s, Found: %s in Results for experiment: %s \n", - kruizeContainers.size(), resultContainers.size(), expName)); - } else { - for (int i = 0; i < kruizeContainers.size(); i++) { - ContainerData kruizeContainer = kruizeContainers.get(i); - ContainerData resultContainer = resultContainers.get(i); - if (!kruizeContainer.getContainer_name().equals(resultContainer.getContainer_name()) - || !kruizeContainer.getContainer_image_name().equals(resultContainer.getContainer_image_name())) { - kubeObjsMisMatch = true; - errorMsg = errorMsg.concat( - String.format("Container names or image names MisMatched. Expected: %s - %s, Found: %s - %s in Results for experiment: %s \n", - kruizeContainer.getContainer_name(), kruizeContainer.getContainer_image_name(), - resultContainer.getContainer_name(), resultContainer.getContainer_image_name(), expName)); - } - } - } - if (kubeObjsMisMatch) { context.disableDefaultConstraintViolation(); context.buildConstraintViolationWithTemplate(errorMsg) diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java index 816a72f34..6f4b6ac18 100644 --- a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java +++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java @@ -91,7 +91,6 @@ public static final class AutotuneObjectErrors { public static final String BLANK_AGGREGATION_INFO_VALUE = " cannot be negative or blank for the metric variable: "; public static final String UNSUPPORTED_FORMAT = " Format value should be among these values: ".concat(KruizeSupportedTypes.SUPPORTED_FORMATS.toString()); public static final String UNSUPPORTED_METRIC = "Metric variable name should be among these values: ".concat(Arrays.toString(AnalyzerConstants.MetricName.values())); - public static final String MISSING_EXPERIMENT = "Experiment name is missing or null!"; public static final String CONTAINER_AND_EXPERIMENT = " for container : %s for experiment: %s."; public static final String JSON_PARSING_ERROR = "Failed to parse the JSON. Please check the input payload "; public static final String AGGREGATION_INFO_INVALID_VALUE = "Invalid value type for aggregation_info objects. Expected a numeric value (Double).";